#!/usr/bin/python
# Created by Juan C. Espinoza <jucar.espinoza@gmail.com> April 1, 2010

import os, time, traceback
import cPickle
import argparse
from glob import glob
from joblib import Parallel, delayed
from lisn_utils import *

lisn_path = '/data1/lisn/'
TMP = '/data/temp/realtime'
networks    = ['LISN', 'UNIVAP']
request = LISNRequest(debug=False)

def realtime(opts, dt, site):
    
    print site['code']
    dt0 = dt-timedelta(1)
    kwargs = {}
    for bub in gps_names_new.values():
        ext     = bub[1]
        if ext in ('scn','pos', 'tec'): continue
        old_ext = bub[0].split('%s')[-1]
        files = glob(os.path.join(lisn_path, '%s' % dt.year, site['code'], 
                                  '*%s*%s' % (dt0.strftime('%y%m%d'),
                                              old_ext)))
        files += glob(os.path.join(lisn_path, '%s' % dt.year, site['code'], 
                                   '*%s*%s' % (dt.strftime('%y%m%d'),
                                               old_ext)))            
        files.sort()
        files   = files[-94:]
        if files:
            fileout = os.path.join(opts.output, '%s.%s' % (site['code'], ext))
            if ext in ('s4'):
                try:
                    cat = cat_files(fileout, files, None, 30, ext, old_ext,
                                    daily=False)                    
                    data = S4Data(fileout, station=site)
                except:
                    cat = False
    
                if cat:
                    figname  = os.path.join(opts.output, site['code']+'_s4.png')
                    data.plot('epoch', 's4', figname=figname, figsize=(8, 4), 
                              s4legend=True, pkwargs={'ms':4, 'marker':'.', 'ls':''})                        
            elif ext in ('nvd', 'lb2', 'obs'):
                updated  = False
                data     = False
                rnx      = [False]
                rnx_name = fileout.replace(ext, '%02do' % (dt.year%100))
                last_dt  = dt0
                try:
                    if os.path.exists(rnx_name):
                        data    = RNXData(rnx_name, station=site)
                        last_dt = data[-1].epoch
                        print 'Found old rinex %d records last time:%s' % (len(data), last_dt)
                        files = [f for f in files \
                                 if GPSDateTime(f.split('/')[-1], filename=True)>last_dt]
                    if cat_files(fileout, files, None, 30, ext, old_ext,
                                     daily=False):
                        kwargs['xyz'] = gps.lla2xyz(*site['location'])
                        if ext=='nvd':
                            rnx = nvd_to_rnx(fileout, site['code'], opts.output,
                                                 date=dt0, interval=30,
                                                 alt_name=site['code'], **kwargs)
                        elif ext=='lb2':
                            rnx = lb2_to_rnx(fileout, site['code'], opts.output,
                                                 date=dt0, interval=30,
                                                 alt_name=site['code'], **kwargs)
                        elif ext=='obs':
                            rnx = obs_to_rnx(fileout, site['code'], opts.output,
                                                 date=dt0, interval=30,
                                                 alt_name=site['code'], **kwargs)
                    if data and rnx[0]:
                        data.merge(RNXData(rnx[0], station=site))
                        updated = True
                    elif not data and rnx[0]:
                        data = RNXData(rnx[0], station=site)
                    elif not data and not rnx[0]:
                        continue
    
                    if data.date.year<2000:
                        continue
                    print 'Processing final rinex %d records' % len(data)
                    if updated:
                        sv=data.save(opts.output, site['code'], True, 24)
                    bias_file = '%s.bias' % site['code']
                    bias_file = os.path.join(opts.path, 'biases', bias_file)
                    bias_list = {}
                    if os.path.exists(bias_file):
                        for line in open(bias_file):
                            bias_list[GPSDateTime(line.split(',')[0])] = float(line.split(',')[1].strip())
                    bias_dt = GPSDateTime(*(dt - timedelta(1)).timetuple()[:3])
                    if bias_dt in bias_list:
                        bias = bias_list[bias_dt]
                    else:
                        bias = None
                    data.calctec(path=opts.path, rec_bias=bias)
                    figname = os.path.join(opts.output, site['code']+'_tec.png')
                    data.plot('epoch', 'eqTEC', figname=figname, colormap='lat', marks=True)
                    cPickle.dump(data, open(os.path.join(opts.output, site['code']+'.pkl'), 'wb'))
                except:
                    if opts.force:
                        continue
                    else:
                        traceback.print_exc()
                        break
            elif ext=='rnx':
                updated  = False
                data     = False
                rnx      = False
                rnx_name = fileout.replace(ext, '%02do' % (dt.year%100))
                last_dt  = dt0
                try:
                    if os.path.exists(rnx_name):
                        data    = RNXData(rnx_name, station=site)
                        last_dt = data[-1].epoch
                        print 'Found old rinex %d records last time:%s' % (len(data), last_dt)
                        files = [f for f in files \
                                 if GPSDateTime(f.split('/')[-1], filename=True)>last_dt]
                    if files: 
                        rnx = RNXData(files[0], station=site)
                        for f in files[1:]:
                            rnx.merge(RNXData(f, station=site))                        
                    if data and rnx:
                        data.merge(rnx)
                        updated = True
                    elif not data and rnx:
                        data = rnx
                        updated = True
                    elif not data and not rnx:
                        continue
    
                    if data.date.year<2000:
                        continue
                    print 'Processing final rinex %d records' % len(data)
                    if updated:
                        data.save(opts.output, site['code'], True, 24, seconds=(0,30))
                    bias_file = '%s.bias' % site['code']
                    bias_file = os.path.join(opts.path, 'biases', bias_file)
                    bias_list = {}
                    if os.path.exists(bias_file):
                        for line in open(bias_file):
                            bias_list[GPSDateTime(line.split(',')[0])] = float(line.split(',')[1].strip())
                    bias_dt = GPSDateTime(*(dt - timedelta(1)).timetuple()[:3])
                    if bias_dt in bias_list:
                        bias = bias_list[bias_dt]
                    else:
                        bias = None
                    data.calctec(path=opts.path, rec_bias=bias)#, bad_prn=['G32'])
                    figname = os.path.join(opts.output, site['code']+'_tec.png')
                    data.plot('epoch', 'eqTEC', figname=figname, colormap='lat', marks=True)
                    cPickle.dump(data, os.path.join(opts.output, site['code']+'.pkl'))
                except:
                    if opts.force:
                        continue
                    else:
                        traceback.print_exc()
                        break

def main():
    '''
    Script to generate tec plots in realtime from 15 min raw data
    '''
    
    # parse command line options
    parser = argparse.ArgumentParser(description=main.func_doc)
    # add options and arguments
    parser.add_argument('-s', '--sites', action='store',
        help='List of sites, default all stations')
    parser.add_argument('-o', '--output', action='store', 
        default=TMP,
        help='Output path to files and figures, default: current directory')
    parser.add_argument('-p', '--path', action='store', 
        default='/data/sources/gps_scripts/', 
        help='Path for almanac and bias folders, default: current directory')
    parser.add_argument('-F', '--force', action='store_true',
        help='Force execution non stop on errors.')    

    opts = parser.parse_args()
    dt = GPSDateTime()    

    sites = request.stations(instrument='gps', network=','.join(networks))
    if opts.sites:
        sites  = [s for s in sites if s['code'] in opts.sites.split(',')]
        
    Parallel(n_jobs=8)(delayed(realtime)(opts, dt, site) for site in sites)
    
#Execute main routine
if __name__ == "__main__":
    t0 = time.clock()
    print '\n Start at %s' % GPSDateTime.now()
    main()
    print '\n Finish at %s' % GPSDateTime.now(), time.clock()-t0