#!/usr/bin/env python
# Function to pull the latest USL forecast

def scrub_USL(siteid):
    # Read the html from the page
    import urllib, re, os, cPickle, sys
    from datetime import datetime, timedelta
    sys.path.append('..')
    from INFILE import maindir
    

    # Try to load the existing directory
    if os.path.exists('%s/site_data/%s_USL12Z_fcst.pickle' % (maindir,siteid)):
        fcst_dict_12Z = cPickle.load(open('%s/site_data/%s_USL12Z_fcst.pickle' % (maindir,siteid),'r'))
        fcst_dict_22Z = cPickle.load(open('%s/site_data/%s_USL22Z_fcst.pickle' % (maindir,siteid),'r'))

    else:
        fcst_dict_12Z = {}

        fcst_dict_22Z = {}


    now = datetime.utcnow()
    # figure out where we are
    if fcst_dict_12Z.keys() != [] and fcst_dict_22Z.keys() != []:
        # get the most recent date
        grabs_12Z = fcst_dict_12Z.keys()
        grabs_12Z.sort()
        last_grabbed_12Z = grabs_12Z[-1]
        grabs_22Z = fcst_dict_22Z.keys()
        grabs_22Z.sort()
        last_grabbed_22Z = grabs_22Z[-1]

        if last_grabbed_12Z == last_grabbed_22Z:
            last_grabbed = (last_grabbed_22Z - timedelta(hours=24)).replace(hour=22)
        else:
            last_grabbed = (last_grabbed_12Z - timedelta(hours=24)).replace(hour=12)
        #if last_grabbed_12Z > last_grabbed_22Z:
        #    last_grabbed = last_grabbed_12Z
        #else:
        #    last_grabbed = last_grabbed_22Z
        tograb = []
    else:
        last_grabbed = USL_startdate
        tograb = [USL_startdate]


    #last_grabbed = last_grabbed - timedelta(hours=24)
    print "LAST GRAB", last_grabbed
    
    #print "Latest fcst", last_grabbed_12Z
    #print fcst_dict_12Z[last_grabbed_12Z]['high'], fcst_dict_12Z[last_grabbed_12Z]['low']

    #raw_input()
    # Find all times since then that are valid
    if last_grabbed.hour == 12:
        curtime = last_grabbed + timedelta(hours=10)
    elif last_grabbed.hour == 22:
        curtime = last_grabbed + timedelta(hours=14)

    while curtime < now:
        tograb.append(curtime)
        if curtime.hour == 12:
            curtime = curtime + timedelta(hours=10)
        elif curtime.hour == 22:
            curtime = curtime + timedelta(hours=14)

    print "To grab:", tograb
    #raw_input()


    for modtime in tograb:
        base_page = 'http://www.microclimates.org/forecast/%s/%s.html' % (siteid.upper(),modtime.strftime('%Y%m%d_%H'))
        try:
            infile = urllib.urlopen(base_page)
        except:
            print "Couldn't find web output--model may not have run."
            exit(1)
        info = infile.read().split('<tr>')
        infile.close()
        #print info
        for block in info:
            if re.search('&deg;F</td>',block):
                #print "Possibly?"
                try:
                    split_block = block.split('<td>')
                    #print split_block
                    high = int(re.search('(-?\d{1,3})',split_block[1]).groups()[0])
                    low = int(re.search('(-?\d{1,3})',split_block[2]).groups()[0])
                    max_wind = int(re.search('(\d{1,3})',split_block[3]).groups()[0])
                    precip = float(re.search('(\d{1,3}.\d{2})',split_block[4]).groups()[0])
                    #print high, low, winds, precip
                except:
                    pass




        # For now just grab the data we need
        # Remove the '3hourly' from UTC
        #fcst_dict['UTC'] = fcst_dict['UTC'][1:]
        # Compute the forecast date
        fcst_date = modtime + timedelta(hours=24)

        #print modtime.hour, fcst_date.replace(hour=0), high,low,max_wind,precip
        #raw_input()
        # Now try to export
        fcst_date_0 = fcst_date.replace(hour=0)
        if modtime.hour == 12:
            print "12Z fcst:", fcst_date_0, high, low, max_wind, precip
            if fcst_date_0 not in fcst_dict_12Z.keys():
                fcst_dict_12Z[fcst_date_0] = {}
            fcst_dict_12Z[fcst_date_0]['high'] = float(high)
            fcst_dict_12Z[fcst_date_0]['low'] = float(low)
            fcst_dict_12Z[fcst_date_0]['wind'] = float(max_wind)
            fcst_dict_12Z[fcst_date_0]['precip'] = float(precip)
        if modtime.hour == 22:
            if fcst_date_0 not in fcst_dict_22Z.keys():
                fcst_dict_22Z[fcst_date_0] = {}
            fcst_dict_22Z[fcst_date_0]['high'] = float(high)
            fcst_dict_22Z[fcst_date_0]['low'] = float(low)
            fcst_dict_22Z[fcst_date_0]['wind'] = float(max_wind)
            fcst_dict_22Z[fcst_date_0]['precip'] = float(precip)
           
    # Dump the resulting dictionary
    cPickle.dump(fcst_dict_12Z,open('%s/site_data/%s_USL12Z_fcst.pickle' % (maindir,siteid), 'w'))
    cPickle.dump(fcst_dict_22Z,open('%s/site_data/%s_USL22Z_fcst.pickle' % (maindir,siteid), 'w'))
 


def scrub_ALL_USL(siteid):
    # Get the hourly forecast from the latest USL
    from datetime import datetime, timedelta
    from profile_class import Profile
    import urllib
    import re
    import numpy as np
    wdir_dict = {'N'    : 0.,
                 'NNE'  : 22.5,
                 'NE'   : 45.,
                 'ENE'  : 67.5,
                 'E'    : 90.,
                 'ESE'  : 112.5,
                 'SE'   : 135.,
                 'SSE'  : 157.5,
                 'S'    : 180.,
                 'SSW'  : 202.5,
                 'SW'   : 225.,
                 'WSW'  : 247.5,
                 'W'    : 270.,
                 'WNW'  : 292.5,
                 'NW'   : 315.,
                 'NNW'  : 337.5}


    # Some logic to figure out the most recent time
    curtime = datetime.utcnow()
    if curtime.hour > 13 and curtime.hour < 23:
        modtime = datetime(curtime.year,curtime.month,curtime.day,12)
        modtimes = [modtime]
    elif curtime.hour >=23:
        modtime = datetime(curtime.year,curtime.month,curtime.day,22)
        modtimes = [modtime,modtime-timedelta(hours=10)]
    else:
        curtime = curtime - timedelta(days=1)
        modtime = datetime(curtime.year,curtime.month,curtime.day,22)
        modtimes = [modtime,modtime-timedelta(hours=10)]


    #print modtime.strftime('%Y%m%d_%H')
    outdir = {}
    for modtime in modtimes:
        base_page = 'http://www.microclimates.org/forecast/%s/%s.html' % (siteid.upper(),modtime.strftime('%Y%m%d_%H'))
        try:
            infile = urllib.urlopen(base_page)
        except:
            print "Couldn't find web output--model may not have run."
            exit(1)
        info = infile.read().split('<tr>')

        USL_out_dict = {}
        USL_maxmin_dict = {}

        curProfile = Profile()
        for block in info:
            if re.search('&deg;F</td>',block):
                # The hunt for the max and min
                print "Possibly?"
                try:
                    split_block = block.split('<td>')
                    #print split_block
                    high = int(re.search('(-?\d{1,3})',split_block[1]).groups()[0])
                    low = int(re.search('(-?\d{1,3})',split_block[2]).groups()[0])
                    max_wind = int(re.search('(\d{1,3})',split_block[3]).groups()[0])
                    total_precip = float(re.search('(\d{1,3}.\d{2})',split_block[4]).groups()[0])
                    #print high, low, winds, precip
                    USL_maxmin_dict[datetime.now().date() + timedelta(days=1)] = [high,low,1]               
                except:
                    continue

            block = re.sub('<th scope="row" class="nobg3">','',block)   
            block = re.sub('<th scope="row" class="nobg">','',block)   
            block = re.sub('</th>',',',block)
            block = re.sub('</td>',',',block)
            block = re.sub('</tr>','',block)
            block = re.sub('<td>','',block)
            block = re.sub('<td class="hr3">','',block)
            block = re.sub('\n','',block)
            del curProfile
            curProfile = Profile()
            try:
                date,Temp,Dewpt,RH,Tsoil,Wdir,Wspd,SkyCover,Rnet,Precip,dummy = block.split(',')
                if re.search('Time',block):
                    #print "Not an ob line"
                    continue
                #print block
            except:
                #print "Not an ob line"
                continue

            # Now that we have our obs, let's sort them
            if len(date.strip()) > 4:
                # This line contains a new date
                curtime = datetime.strptime(date.strip()+str(curtime.year),'%d %b %H%M%Y')
            else:
                curtime = curtime.replace(hour=int(date.strip()[0:2]))

            # Now also grab wind direction to get U and V components
            wdir_num = wdir_dict[Wdir.strip()]

            uwnd = float(Wspd) * (-1 * np.sin(wdir_num * np.pi/180.))
            vwnd = float(Wspd) * (-1 * np.cos(wdir_num * np.pi/180.))

            curProfile.tmpc = (float(Temp) - 32) * 5./9.
            curProfile.sknt = float(Wspd)
            curProfile.uwnd = uwnd
            curProfile.vwnd = vwnd
            # check on precip
            if Precip.strip() != '':
                curProfile.p01m = float(Precip)
            else:
                curProfile.p01m = 0.0

            #print curtime, float(Temp), float(Wspd), Precip

            USL_out_dict[curtime] = curProfile
        outdir[modtimes.index(modtime)] = (USL_out_dict,USL_maxmin_dict)

    if len(outdir.keys()) > 1:
        return outdir[0][0],outdir[0][1],outdir[1][0], outdir[1][1]
    else:
        return outdir[0][0], outdir[0][1],{},{}
        

import sys
sys.path.append('..')
from INFILE import siteid, USL_startdate
scrub_USL(siteid.upper())  
#scrub_ALL_USL(siteid.upper())
