#!/usr/bin/env python

import os,re
import numpy
import pickle
from datetime import datetime, timedelta
from netCDF4 import Dataset
import numpy as np
from INFILE import siteid, sitelat, sitelon, maindir

export_flag = False
use_averaging = True
hrmos_addr = 'http://www.mdl.nws.noaa.gov/~qpf/grib/'


def main():
    os.chdir('%s/hrmos_qpf' % maindir)
    outfiles, filepfx = convert_files()

    sites = ['KPNS','KBIL','KSDF','KOTH','KSYR','KAUS','KBUR','KEWR','KDLH','KICT']
    sitelats = [30.47,45.7831,38.1858,43.4163,43.1146,30.2026,34.1965,40.6899,46.8389,37.6528]
    sitelons = [-87.2,-108.5524,-85.7421,-124.2467,-76.1119,-97.6681,-118.3542,-74.1782,-92.1800,-97.4286]
    for site,sitelt,siteln in zip(sites,sitelats,sitelons):
        # 360 degree longitude correction
        if siteln < 0:
            siteln = 360. + siteln

        # Find the location of our station
        xpt, ypt = get_coords(outfiles[-1],sitelt,siteln)

        # If using averaging, get the surrounding points
        if use_averaging:
            print "Using averaging..."
            xpts = 3*[xpt-1] + 3*[xpt] + 3*[xpt+1]
            ypts = 3*[ypt+1,ypt,ypt-1]
            print xpts
            print ypts
        else:
            xpts = [xpt]
            ypts = [ypt]


        # now get the data for that location
        data = get_data(outfiles,xpts,ypts)

        # Only plot if it's the real site
        if site == siteid:
            plot_output(datad)
        # Dump the archive of the data
        now = datetime.now()
        fcst_date = datetime(now.year,now.month,now.day) + timedelta(days=1)

        if not os.path.exists('%s_hrmos_dict.pickle' % site.upper()):
            outdict = {}
        else:
            outdict = pickle.load(open('%s_hrmos_dict.pickle' % site.upper(),'r'))
        for model in data.keys():
            if model not in outdict.keys():
                outdict[model] = {}
            outdict[model][fcst_date] = data[model]

        pickle.dump(outdict,open('%s_hrmos_dict.pickle' % site.upper(),'w'))



def get_data(filelist,xs,ys):
    # Loop through each file and get the value for each point
    datad = {}
    for file in filelist:
        ncin = Dataset(file)

        # Need to figure out which variable it is
        if file.startswith('dc'):
            varid = 'APCP_surface'
            prodid = 'DC Precip'        
        else:
            varid = 'var0_1_227_surface'
            prodid = 'PW Precip'


        # Now sort out the times
        timelist = [datetime(1970,1,1,0)+timedelta(seconds=int(d)) for d in ncin.variables['time'][:]]

        # We want the 6Z to 6Z values
        fcst_times = []
        now = datetime.now()
        tom = datetime(now.year,now.month,now.day,6) + timedelta(hours=24)
        curtime = tom
        while curtime < tom + timedelta(days=1):
            curtime = curtime + timedelta(hours=6)
            fcst_times.append(curtime)
        #print "Forecast times:", fcst_times

        # Are fed a list of x and y coordinates
        # Take the average of the values at each of those coordiantes
        # (if use_averaging == False, then only one point will be present)
        totalval = 0.0
        for time,idx in zip(timelist,range(len(timelist))):
            if time in fcst_times:
                valuelist = []
                for x,y in zip(xs,ys):
                    valuelist.append(ncin.variables[varid][idx,y,x])
                value = np.mean(valuelist)
                totalval = totalval + value
        # convert from mm to inches
        totalval = totalval * 0.0393701
        print prodid, totalval
        datad[prodid] = totalval
    return datad




def get_coords(infile, sitelat, sitelon):
    # Using the sitelat and sitelon, get the x-y coordinates
    # of that point
    print "Starting coordinate search..."
    # Load the latitude and longitude grids
    innc = Dataset(infile,'r')
    lats = innc.variables['latitude'][:,:]
    lons = innc.variables['longitude'][:,:]
    ylist = range(np.shape(lats)[1])
    xlist = range(np.shape(lats)[0])
    xmat,ymat = np.meshgrid(ylist,xlist)

    orig_shape = np.shape(lats)
    column_shape = orig_shape[0] * orig_shape[1]
    latcol = np.reshape(lats,column_shape)
    loncol = np.reshape(lons,column_shape)
    ycol = np.reshape(ymat,column_shape)
    xcol = np.reshape(xmat,column_shape)


    print "0,0"
    print latcol[0],loncol[0]
    print sitelat, sitelon
    #raw_input()
    mind = 100.
    miny = 0
    minx = 0
    minlat = 0
    minlon = 0
    for lat,lon,y,x in zip(latcol,loncol,ycol,xcol):
        #print lat,lon,x,y
        #raw_input()
        xdist = np.sqrt((sitelat-lat)**2 + (sitelon-lon)**2)
        #if xdist < 0.25:
        #    print lat,lon,x,y,xdist

        if xdist < mind:
            mind = xdist
            miny = y
            minx = x
            minlat = lat
            minlon = lon

    print "NEAREST POINT"
    print miny, minx, minlat, minlon, mind
    #raw_input()
    return minx, miny



def convert_files():
    # Get the current time
    now = datetime.utcnow()
    fcst_start = datetime(now.year,now.month,now.day,now.hour)


    # Find the files through our time of interest
    if now.hour >= 14 or now.hour <=2:
        # We are looking a the 12Z forecast
        filepfx = fcst_start.replace(hour=12)
    else:
        filepfx = fcst_start.replace(hour=0)


    print "Looking for forecast starting at", filepfx
    # Cleanup first
    os.system('rm hrqpf*_4km')
    os.system('rm *.nc')

    # Try downloading
    os.system('wget %s/hrqpf_%02dz_PWP06_grib2_4km' % (hrmos_addr,filepfx.hour))
    os.system('wget %s/hrqpf_%02dz_DCP06_grib2_4km' % (hrmos_addr,filepfx.hour))


    # convert to netcdf
    allfiles = os.listdir('.')
    gribfiles = [f for f in allfiles if f.endswith('_4km')]
    gribfiles.sort()
    outfiles = []
    for file in gribfiles:
        if re.search('PWP',file):
            outfile = 'pwp_values.nc'
        else:
            outfile = 'dcp_values.nc'

        #print file, outfile
        # Do the conversion
        os.system('/usr/local/bin/wgrib2 %s -netcdf %s' % (file,outfile))
        outfiles.append(outfile)
    return outfiles, filepfx


if __name__ == '__main__':
    main()

