# -*- coding: cp936 -*-
import struct, sqlite3, datetime, os, math
import datetime


def days2time(days):
    t1 = datetime.datetime(1899, 12, 31)
    t = t1 + datetime.timedelta(days = days-1)
    return t
    
RECORDSIZE = 4 #default bit size

def swmm2sqlite(fpath, spath, tolerance=0.001):
    
    if os.path.exists(spath):
        print '%s exists! Please delete the exisitng file or use a new name.' % spath
        return
    Fout = open(fpath, "rb")
    conn = sqlite3.connect(spath)
    c = conn.cursor()
    c.execute('PRAGMA synchronous=OFF')
    c.execute('PRAGMA temp_store = MEMORY')
    
    #create the variable table
    c.execute('create table report_variable (code text, object_type text, name text, unit text)')
##    conn.commit()
    object_type='subcatchment'
    param = 'rainfall snow_depth eva_inf_loss runoff_rate groundwater_outflow water_table_elevation'
    units = 'in/hr in in/hr mgd mgd ft'
    for idx, (p, u) in enumerate(zip(param.split(), units.split())):
        c.execute('insert into report_variable values (?, ?, ?, ?)', [idx, object_type, p, u])
        
    object_type='node'
    param = 'depth head volume linflow tinflow loss'
    units = 'ft ft ft3 mgd mgd mgd'
    for idx, (p, u) in enumerate(zip(param.split(), units.split())):
        c.execute('insert into report_variable values (?, ?, ?, ?)', [idx, object_type, p, u])

    object_type='link'
    param = 'flow depth velocity fnumber capacity'
    units = 'mgd ft fps na na'
    for idx, (p, u) in enumerate(zip(param.split(), units.split())):
        c.execute('insert into report_variable values (?, ?, ?, ?)', [idx, object_type, p, u])

##    conn.commit()


    if Fout is None: return 2
    #  // --- check that file contains at least 14 records
    Fout.seek(0, 2)
    if Fout.tell()<14*RECORDSIZE:
        Fout.close()
        return 1
        
    #// --- read parameters from end of file
    Fout.seek(-5*RECORDSIZE, 2)
    offset0 = struct.unpack('i', Fout.read(RECORDSIZE))[0] #the byte position where the Object Properties section of the file begins (4-byte integer) 
    StartPos = struct.unpack('i', Fout.read(RECORDSIZE))[0] #the byte position where the Computed Results section of the file begins (4-byte integer)  
    SWMM_Nperiods = struct.unpack('i', Fout.read(RECORDSIZE))[0] #the total number of reporting periods contained in the Computed Results section of the file (4-byte integer) 
    errCode = struct.unpack('i', Fout.read(RECORDSIZE))[0] #the error code status of the simulation, where 0 indicates no errors (4-byte integer)  
    magic1 = struct.unpack('i', Fout.read(RECORDSIZE))[0] #the same identifying number, 516114522, that appears as the very first record in the file (4-byte integer).  

    #// --- read magic number from beginning of file
    Fout.seek(0)
    magic2 = struct.unpack('i', Fout.read(RECORDSIZE))[0]

    #// --- perform error checks
    if (magic1 != magic2):err = 1
    elif (errCode != 0): err = 1;
    elif (SWMM_Nperiods == 0): err = 1
    else: err = 0

     # // --- quit if errors found
    if (err > 0 ):
        Fout.close()
        return err;
    
    #// --- otherwise read additional parameters from start of file
    version = struct.unpack('i', Fout.read(RECORDSIZE))[0] #the version number of the engine (currently 50005)  
    SWMM_FlowUnits = struct.unpack('i', Fout.read(RECORDSIZE))[0] #a code number for the flow units that are in effect where 0 = CFS, 1 = GPM, 2 = MGD, 3 = CMS, 4 = LPS, and 5 = LPD  
    SWMM_Nsubcatch = struct.unpack('i', Fout.read(RECORDSIZE))[0] #the number of subcatchments in the project reported on  
    SWMM_Nnodes = struct.unpack('i', Fout.read(RECORDSIZE))[0] #the number of nodes in the project reported on 
    SWMM_Nlinks = struct.unpack('i', Fout.read(RECORDSIZE))[0] #the number of links in the project reported on  
    SWMM_Npolluts = struct.unpack('i', Fout.read(RECORDSIZE))[0] #the number of pollutants in the project.
    values = dict(zip('SWMM_FlowUnits SWMM_Nsubcatch SWMM_Nnodes SWMM_Nlinks SWMM_Npolluts'.split(), [SWMM_FlowUnits, SWMM_Nsubcatch, SWMM_Nnodes, SWMM_Nlinks, SWMM_Npolluts]))
    #writing summary table
    
    sql = '''create table if not exists summary  (name text, value int) '''
    c.execute(sql)
##    conn.commit()
    sql = 'insert into summary values (?, ?)'
    for k,v in values.items():
        c.execute(sql, [k, v])
    conn.commit()
    #writing ids
    sql = '''create table if not exists object_id_names  (name text, type text, idx int) '''
    c.execute(sql)
    conn.commit()
    sql = 'insert into object_id_names values (?, ?, ?)'
    def insert_objectid(object_type, count):
        for idx in range(1, count + 1):
            s_len = struct.unpack('i', Fout.read(RECORDSIZE))[0]
            name = Fout.read(s_len)
            c.execute(sql, [name, object_type, idx])

##            conn.commit()
    insert_objectid('subcatchment', SWMM_Nsubcatch)
    insert_objectid('node', SWMM_Nnodes)
    insert_objectid('link', SWMM_Nlinks)
    insert_objectid('pollutant', SWMM_Npolluts)
                
    #writing the object properties
    #insert_subcatchment
    def insert_summary(object_type, ct):
        no_prop = struct.unpack('i', Fout.read(RECORDSIZE))[0]
        print 'no of properties for ', object_type, ' is ', no_prop
        for i in range(no_prop):
            code_prop = struct.unpack('i', Fout.read(RECORDSIZE))[0]
            print '    code name: ', code_prop
        sql = 'insert into %s_properties values(%s)' % (object_type, ','.join(['?']*(no_prop + 1)))
        for i in range(ct):
            record = [i + 1]
            for j in range(no_prop):
                record.append(struct.unpack('f', Fout.read(RECORDSIZE))[0])
            
            c.execute(sql, record)
    c.execute('create table subcatchment_properties (idx int, area float)')
    c.execute('create table node_properties (idx int, type float, invert float, max_depth float)')
    c.execute('create table link_properties (idx int, type float, us_inv float, ds_inv float, max_depth float, length float)')
    insert_summary('subcatchment', SWMM_Nsubcatch)
    insert_summary('node', SWMM_Nnodes)
    insert_summary('link', SWMM_Nlinks)
            
        


                
    
    
    offset = (SWMM_Nsubcatch+2) * RECORDSIZE + (3*SWMM_Nnodes+4) * RECORDSIZE  + (5*SWMM_Nlinks+6) * RECORDSIZE #
    offset = offset0 + offset
    
    #move to the start of the reporting variables
    Fout.seek(offset)
    SubcatchVars = struct.unpack('i', Fout.read(RECORDSIZE))[0] # Number of subcatchment variables (currently 6 + number of pollutants).
    Fout.seek(SubcatchVars*RECORDSIZE, 1)

    NodeVars = struct.unpack('i', Fout.read(RECORDSIZE))[0] # Node variables
    Fout.seek(NodeVars*RECORDSIZE, 1)
    LinkVars = struct.unpack('i', Fout.read(RECORDSIZE))[0]  # Link variables
    Fout.seek(LinkVars*RECORDSIZE, 1)
    
    SysVars = struct.unpack('i', Fout.read(RECORDSIZE))[0] # System variables

    #// --- read data just before start of output results
    offset = StartPos - 3*RECORDSIZE
    Fout.seek(offset)
    ##The start date and time of the simulation, expressed as an 8-byte double precision number representing the number of decimal days since the start of January 1, 1900 (i.e., since midnight of December 31, 1899).  
    SWMM_StartDate = struct.unpack('d', Fout.read(8))[0]
    ## The time interval between reporting periods in seconds, expressed as a 4-byte integer number.  
    SWMM_ReportStep = struct.unpack('i', Fout.read(RECORDSIZE))[0]

    #create result table
    c.execute('create table computed_results (idx int, datetime float, code int, value float, object_type text)')
##    conn.commit()
    
    def insert_results_for_one_time_step(object_type, var_ct, object_ct, dt, ref_record=None):
        '''compare with the ref_record, if the difference is too small, the record will not be saved.'''
        sql = 'insert into computed_results values (?, ?, ?, ?, ?)'
        records = []
        for i in range(1, object_ct+1):
            record = []
            for j in range(var_ct):
                v = struct.unpack('f', Fout.read(4))[0]
                isAdded = False
                if ref_record:
                    v0 = ref_record[i-1][j][2]
                    if math.fabs(v0 - v)<tolerance:
                        #skip this record
                        v = v0
                    else:
                        #If there is a sudden jump, the previous point should be captured too.
                        c.execute(sql, [i, dt, j, v, object_type])
                        if ref_record[i-1][j][1]:
                            #already added
                            pass
                        else:
                            #should add this point too
                            c.execute(sql, [i, ref_record[i-1][j][0], j, v0, object_type])
                        isAdded = True
                else:
                    c.execute(sql, [i, dt, j, v, object_type])
                record.append([dt, isAdded ,v])
            records.append(record)
        return records
##        conn.commit()
    BytesPerPeriod = 2*RECORDSIZE + (SWMM_Nsubcatch*SubcatchVars + SWMM_Nnodes*NodeVars +  SWMM_Nlinks*LinkVars + SysVars)*RECORDSIZE
    sub_ref_record = None    
    node_ref_record = None
    link_ref_record = None
    system_ref_record = None
    for l in range(SWMM_Nperiods):
        dt = struct.unpack('d', Fout.read(8))[0]
        t1 = datetime.datetime(1899, 12, 31)
        t2 = t1 + datetime.timedelta(days=dt)
##        print 'writing results for timestep: %s' % t2.isoformat()
##        print 'dt=%s, position: %s' % (dt, Fout.tell())
        sub_ref_record = insert_results_for_one_time_step('subcatchment', SubcatchVars, SWMM_Nsubcatch, dt, sub_ref_record)
        
        node_ref_record = insert_results_for_one_time_step('node', NodeVars, SWMM_Nnodes, dt, node_ref_record)
        
        link_ref_record = insert_results_for_one_time_step('link', LinkVars, SWMM_Nlinks, dt, link_ref_record)
        
        system_ref_record = insert_results_for_one_time_step('system', SysVars, 1, dt, system_ref_record)
        conn.commit()
        
                
        
    BytesPerPeriod = 2*RECORDSIZE + (SWMM_Nsubcatch*SubcatchVars + SWMM_Nnodes*NodeVars +  SWMM_Nlinks*LinkVars + SysVars)*RECORDSIZE
    #inserting simulation results
    return [Fout,StartPos,SWMM_Nperiods,err,version,SWMM_FlowUnits,SWMM_Nsubcatch,SWMM_Nnodes,SWMM_Nlinks,SWMM_Npolluts,SubcatchVars,NodeVars,LinkVars,SysVars,SWMM_StartDate,SWMM_ReportStep,BytesPerPeriod]
    #// --- return with file left open

    return err


def compact_computed_results(spath, tol):
    conn = sqlite3.connect(spath)
    c = conn.cursor()
    c.execute('PRAGMA synchronous=OFF')
    c.execute('PRAGMA temp_store = MEMORY')
    
    sql = 'create index idx_results on computed_results (idx, datetime, code, object_type)'
    c.execute(sql)
##    conn.commit()
    sql = 'select distinct idx, object_type, code from computed_results'
    ts_list = []
    c.execute(sql)
    for r in c:
        ts_list.append(r)
    for r in ts_list:
        sql = 'select rowid, value from computed_results where idx=? and object_type=? and code=? order by datetime'
        c.execute(sql, r)
        v0 = None
        v1 = None
        remove_list = []
        for v in c:
            if v0:
                v1 = v
                if math.fabs(v1[1]-v0[1])<tol:
                    remove_list.append(v[0])
                else:
                    v0 = v
        if len(remove_list)>0:
            if v0[0]==remove_list[-1]:
                #The last point should be kept
                remove_list = remove_list[:-1]
            for rowid in remove_list:
                c.execute('delete from computed_results where rowid=?', [rowid])
            conn.commit()
        print '%s records remvoed for :%s' % (len(remove_list), str(r))
        
                    

def sqlite2tsf(spath, out, object_type, object_name):
    conn = sqlite3.connect(spath)
    c = conn.cursor()
    #get id of the object
    idx = None
    c.execute('select idx from object_id_names where name=? and type=?', [object_name, object_type])
    for r in c:
        idx = r[0]
    if idx is None:
        return
    #get the schema
    c.execute('select code, name, unit from report_variable where object_type=?', [object_type])
    fields = []
    for row in c:
        fields.append(row)
    print fields
    tsf = []
    c.execute('select datetime, code, value from computed_results where idx=? and object_type=? order by datetime, code', [idx, object_type])
    data = {}
    for dt, code, value in c:
        data.setdefault(code, [])
        data[code].append([dt, value])

    for k, v in data.items():
        
        row1 = ['IDs:', object_name] # id
        row2 = ['Time'] + [x[1] for x in fields if str(x[0])==str(k)] #variable
        row3 = ['']  + [x[2] for x in fields if str(x[0])==str(k)] #units
        if [x[2] for x in fields if str(x[0])==str(k)]:
            tsf.append(row1)
            tsf.append(row2)
            tsf.append(row3)
            for dt, value in v:
                tsf.append([days2time(dt).strftime('%m/%d/%Y %I:%M:%S %p'), value])
            tsf.append([])
    f = open(out, 'w')
    for l in tsf:
        f.write('%s\n' % '\t'.join([str(x) for x in l]))
    f.close()
        
        
                          


outputs = 'MK_2009_10_30.out MK_2010_06_26.out MK_2010_7_18.out'
for swmm in outputs.split():
    out = swmm.replace('.out', '.sqlite')
    swmm2sqlite(swmm, out)
    #compact_computed_results(out, 0.001)
    meters = {'T6': '0035T0528:0035T0518', 'T11': '0034T0307:0034T0338', 'T12': '0034T0392:0034T0391', 'T1': '0016T0462:0016C0461'}
    for k, v in meters.items():
        tsf = '%s-%s.tsf' % (k, swmm)
        sqlite2tsf(out, tsf, 'link', v)
