#!/usr/bin/env python

import os
import numpy as np
from scipy.ndimage.interpolation import map_coordinates
from netCDF4 import Dataset
import matplotlib
import urllib
import re
from datetime import datetime, timedelta

# Leaving this to true by default, may add command line options later
export_flag = True

# Averaging will take the average value of the closest point to the lat lon
# and the 8 surrounding points
use_averaging = True

if export_flag:
    matplotlib.use('agg')

from INFILE import siteid, sitelat, sitelon, webdir, maindir
import sys
sys.path.append('%s/data_parsers' % maindir)
#siteid = 'KMSY'
#sitelat = 29.98
#sitelon = -90.25
op_site = siteid
hpc_ftp = 'ftp://ftp.hpc.ncep.noaa.gov/pqpf/conus/pqpf_6hr'




def main():
    cwd = os.getcwd()
    os.chdir('%s/hpc_pqpf' % maindir)
    # Get the latest HPC grib files
    if export_flag:
        # Only do this if we are exporting (auto-running)
        outfiles, fcst_start = convert_files()
        #pass
    else:
        #outfiles, fcst_start = convert_files()
        #Overwrite outfiles anyway
        allfiles = os.listdir('.')
        outfiles = [f for f in allfiles if f.endswith('.nc')]
        fcst_start = datetime(2012,8,24,12)

    outfiles.sort()

    sites = ['KPNS','KBIL','KSDF','KAST','KSYR','KAUS','KBUR','KEWR','KDLH','KICT']
    sitelats = [30.47,45.7831,38.1858,46.15695,43.1146,30.2026,34.1965,40.6899,46.8389,37.6528]
    sitelons = [-87.2,-108.5524,-85.7421,-123.8833,-76.1119,-97.6681,-118.3542,-74.1782,-92.1800,-97.4286]
    for site,sitelt,siteln in zip(sites,sitelats,sitelons):
        # 360 degree longitude correction
        if siteln < 0:
            siteln = 360. + siteln

        # Find the location of our station
        xpt, ypt = get_coords(outfiles[-1],sitelt,siteln)

        # If using averaging, get the surrounding points
        if use_averaging:
            print "Using averaging..."
            xpts = 3*[xpt-1] + 3*[xpt] + 3*[xpt+1]
            ypts = 3*[ypt+1,ypt,ypt-1]
            print xpts
            print ypts
        else: 
            xpts = [xpt]
            ypts = [ypt]
    

        # now get the data for that location
        data = get_data(outfiles,xpts,ypts)

        plot_output(data,fcst_start,site)
    
    os.chdir(cwd)
    # And cleanup
    #if export_flag:
    #    os.system('rm *.nc')
    #    os.system('rm *.grb')

def plot_output(datad,fcst_start,siteidcur):
    # Go through one of two types -- percentile or 
    # accumulated
    allkeys = datad.keys()
    accumlist = [d for d in allkeys if d.startswith('pqpf')]
    pctilelist = [d for d in allkeys if d.startswith('pctile')]
    accumlist.sort()
    pctilelist.sort()
  
   

    # Do the accum plot first
    # sort out the dates
    datelist = range(len(accumlist))
    import matplotlib.pyplot as plt
    import matplotlib.gridspec as gridspec
    from matplotlib.patches import Polygon

    #plt.figure()
    #pltlist = [(datad[key] * 39.3701/1000.) for key in accumlist]
    #plt.bar(datelist,pltlist)
    #plt.show()

    # Now sort out the pctiles by forecast hour
    alllists = []
    datelist = []
    for hour in range(6,54,6):
        hrlist = [d for d in pctilelist if d.endswith('f%03d' % hour)]
        hrlist.sort()
        pltlist = [(datad[key] * 39.3701/1000.) for key in hrlist]
        pltlist2 = [p if (p > 0.01) else 0.00 for p in pltlist]
        pltlist = pltlist2
        alllists.append(pltlist[1:-1])        
        datelist.append(fcst_start + timedelta(hours=hour))
    date_labels = [r.strftime('%d/%HZ') for r in datelist]

    # do some checking for the vfil
    if fcst_start.hour == 12:
        vleft = 3.5
        vright = 7.5
        indleft = 3
        indright = 7
        endday = fcst_start + timedelta(hours=24)
    else:
        vleft = 0.0
        indleft = 0
        vright = 4.5
        indright = 4
        endday = fcst_start

    # Compute the total daily accumulation
    totals = []
    #print alllists
    for r in range(5):
        curvals = [time[r] for time in alllists]
        totals.append(np.sum(curvals[indleft:indright]))

    plt.figure(figsize=(12,11))
    gs = gridspec.GridSpec(1,2,width_ratios=[3,1])
    ax1 = plt.subplot(gs[0])
    #plt.bar(range(len(pltlist)),pltlist)
    bp = plt.boxplot(alllists,vert=1)
    plt.hold(True)
    plt.axvspan(0.0,vleft,facecolor='0.8',alpha=0.40)
    plt.axvspan(vright,9.,facecolor='0.8',alpha=0.40)
    plt.xticks(range(1,len(alllists)+1),date_labels)
    plt.grid()
    plt.ylabel('Prev. 6-hr accumulation (in.)')
    plt.xlabel('End time of accumulation')
    plt.title('HPC precip. amount confidence intervals for %s' % siteidcur.upper())
 
    # Make the box plot look more interesting
    #ax1 = plt.gca()
    boxColor = 'darkseagreen'
    medians = range(len(alllists))
    plt.setp(bp['boxes'], color='black', linewidth=2)
    plt.setp(bp['whiskers'], color='black',linewidth=2)
    plt.setp(bp['fliers'], color='black',linewidth=2)
    plt.setp(bp['medians'], color='blue',linewidth=2)
    plt.setp(bp['caps'], color='black',linewidth=2)

    for i in range(len(alllists)):
        box = bp['boxes'][i]
        boxX = []
        boxY = []
        for j in range(5):
            boxX.append(box.get_xdata()[j])
            boxY.append(box.get_ydata()[j])
        boxCoords = zip(boxX,boxY)
        boxPolygon = Polygon(boxCoords, facecolor=boxColor)
        ax1.add_patch(boxPolygon)
    ylow,yhigh = ax1.get_ylim()


    ax2 = plt.subplot(gs[1])
    # Now just show the accumulation
    bpt = plt.boxplot(totals,vert=1)
    plt.grid()
    plt.title('Total')
    plt.xticks([1],[endday.strftime('%m/%d/%Y')])
    # Make the box plot look more interesting
    ax2 = plt.gca()
    boxColor = 'darkseagreen'
    medians = range(len(alllists))
    plt.setp(bpt['boxes'], color='black', linewidth=2)
    plt.setp(bpt['whiskers'], color='black',linewidth=2)
    plt.setp(bpt['fliers'], color='black',linewidth=2)
    plt.setp(bpt['medians'], color='blue',linewidth=2)
    plt.setp(bpt['caps'], color='black',linewidth=2)

    for i in range(1):
        box = bpt['boxes'][i]
        boxX = []
        boxY = []
        for j in range(5):
            boxX.append(box.get_xdata()[j])
            boxY.append(box.get_ydata()[j])
        boxCoords = zip(boxX,boxY)
        boxPolygon = Polygon(boxCoords, facecolor=boxColor)
        ax2.add_patch(boxPolygon)

    #ax1.set_ylims(ylow,yhigh)
    plt.tight_layout()
    if export_flag:
       if siteidcur.upper() == op_site.upper():
           print "Plotting and exporting for:", siteidcur.upper()
           plt.savefig('%s_hpc_pctiles.png' % siteidcur.upper(),bbox_inches='tight')
           os.system('mv %s_hpc_pctiles.png %s' % (siteidcur.upper(), webdir))
    else:
        plt.show()
        

    # Do quick archiving
    import pickle
    if not os.path.exists('%s_hpc_archive.pickle' % siteidcur.upper()):
        archived = {}
    else:
        archived = pickle.load(open('%s_hpc_archive.pickle' % siteidcur.upper(),'r'))
    # save the daily total list
    archived[endday] = totals
    pickle.dump(archived,open('%s_hpc_archive.pickle' % siteidcur.upper(),'w'))
   

def get_data(filelist,xs,ys):
    # Loop through each file and get the value for each point
    datad = {}
    for file in filelist:
        prodid = file[:-3]
        ncin = Dataset(file)

        # Are fed a list of x and y coordinates
        # Take the average of the values at each of those coordiantes
        # (if use_averaging == False, then only one point will be present) 
        valuelist = []
        for x,y in zip(xs,ys):
            valuelist.append(ncin.variables['APCP_surface'][0,y,x])
        value = np.mean(valuelist)

        datad[prodid] = value
    return datad

def get_coords(infile, sitelat, sitelon):
    # Using the sitelat and sitelon, get the x-y coordinates
    # of that point
    print "Starting coordinate search..."
    # Load the latitude and longitude grids
    innc = Dataset(infile,'r')
    lats = innc.variables['latitude'][:,:]
    lons = innc.variables['longitude'][:,:]
    ylist = range(np.shape(lats)[1])
    xlist = range(np.shape(lats)[0])
    xmat,ymat = np.meshgrid(ylist,xlist)
   
    orig_shape = np.shape(lats)
    column_shape = orig_shape[0] * orig_shape[1]
    latcol = np.reshape(lats,column_shape)
    loncol = np.reshape(lons,column_shape)
    ycol = np.reshape(ymat,column_shape)
    xcol = np.reshape(xmat,column_shape)


    print "0,0"
    print latcol[0],loncol[0]
    print sitelat, sitelon
    #raw_input()
    mind = 100.
    miny = 0
    minx = 0
    minlat = 0
    minlon = 0
    for lat,lon,y,x in zip(latcol,loncol,ycol,xcol):
        #print lat,lon,x,y
        #raw_input()
        xdist = np.sqrt((sitelat-lat)**2 + (sitelon-lon)**2)
        #if xdist < 0.25:
        #    print lat,lon,x,y,xdist
         
        if xdist < mind:
            mind = xdist
            miny = y
            minx = x
            minlat = lat
            minlon = lon

    print "NEAREST POINT"
    print miny, minx, minlat, minlon, mind
    #raw_input()
    return minx, miny 


     

def convert_files():
    # Get the current time
    now = datetime.utcnow()
    fcst_start = datetime(now.year,now.month,now.day,now.hour)


    # Find the files through our time of interest
    if now.hour >= 14 or now.hour <=2:
        # We are looking a the 12Z forecast
        filepfx = fcst_start.replace(hour=12)
    else:
        filepfx = fcst_start.replace(hour=0)
        
    print "Looking for forecast starting at", filepfx
    # Cleanup first
    os.system('rm *.grb')

    # We need files every 6 hours for 48 hours
    # Download the files
    for fhour in xrange(6,54,6):
        os.system("wget %s/pqpf_p06i_conus_%sf%03d.grb" % (hpc_ftp,filepfx.strftime('%Y%m%d%H'),fhour))
        os.system("wget %s/prcntil_p06i_05pt_conus_%sf%03d.grb" % (hpc_ftp,filepfx.strftime('%Y%m%d%H'),fhour))
        os.system("wget %s/prcntil_p06i_10pt_conus_%sf%03d.grb" % (hpc_ftp,filepfx.strftime('%Y%m%d%H'),fhour))
        os.system("wget %s/prcntil_p06i_25pt_conus_%sf%03d.grb" % (hpc_ftp,filepfx.strftime('%Y%m%d%H'),fhour))
        os.system("wget %s/prcntil_p06i_50pt_conus_%sf%03d.grb" % (hpc_ftp,filepfx.strftime('%Y%m%d%H'),fhour))
        os.system("wget %s/prcntil_p06i_75pt_conus_%sf%03d.grb" % (hpc_ftp,filepfx.strftime('%Y%m%d%H'),fhour))
        os.system("wget %s/prcntil_p06i_90pt_conus_%sf%03d.grb" % (hpc_ftp,filepfx.strftime('%Y%m%d%H'),fhour))
        os.system("wget %s/prcntil_p06i_95pt_conus_%sf%03d.grb" % (hpc_ftp,filepfx.strftime('%Y%m%d%H'),fhour))


    # convert to netcdf
    allfiles = os.listdir('.')
    gribfiles = [f for f in allfiles if f.endswith('.grb')]
    gribfiles.sort()
    outfiles = []
    for file in gribfiles:
        if file.startswith('pqpf'):
            outfile = "pqpf_" + file[-8:-4] + '.nc'
        else:
            # Use re to get the info we need
            pctile = int(re.search('_(\d{2})pt_',file).groups()[0])
            outfile = "pctile_%02d_" % pctile
            outfile = outfile + file[-8:-4] + '.nc'
        outfiles.append(outfile)
        #print file, outfile
        # Do the conversion
        os.system('/usr/local/bin/wgrib2 %s -netcdf %s' % (file,outfile))

    return outfiles, filepfx



if __name__ == '__main__':
    main()

