import os
import urllib2, Image, tempfile
from StringIO import StringIO as sio
from xml.etree import ElementTree as et
from HTMLParser import HTMLParser
from lxml import etree
import numpy, datetime
import matplotlib.pyplot as plt
import user, string

if os.sys.argv[0]:  # change cwd for working in pywin
    os.chdir(os.path.split(os.sys.argv[0])[0])

# guessing the dropbox folder location
dropbox_folder = os.path.join(user.home, 'Dropbox/Public/avy-app')

# colors for various danger conditions
color_dict = {
    (  4,  2,  4): ('extreme',     9),
    (252,  2,  4): ('high',        7),
    (252,154, 52): ('considerable',5),
    (252,254, 52): ('moderate',    3),
    (  4,254,  4): ('low',         1)}

# reverse the dictionary
rev_color_dict = dict((i[1][1],i[0]) for i in color_dict.iteritems())

weekdays = 'Mon Tue Wed Thu Fri Sat Sun'.split()

# a lookup from zone names to zone short_names (for web urls)
UT_zones = {
    'Logan':'logan',
    'Ogden':'ogden',
    'Salt Lake City':'slc',
    'Provo':'provo',
    'Manti Skyline':'skyline',
    'Western Uinta':'uintas',
    'Moab':'moab' }

# types of data in the rose xml file
xml_attrs='''
    zone_name
    zone_number
    created_by
    created_date
    updated_by
    updated_date
    danger_level_name
    danger_advice
    danger_likelihood
    danger_avalanche_size
    recommended_action
    watches_and_warnings
    watches_and_warnings_expiry_date
    rose_date0
    rose_colors0
    rose_dots0'''

def gchart(istring='135135135133133133133135'):
    ''' make a google rose plot'''
    cs = map(int,list(istring))
    cs = [rev_color_dict[i] for i in cs]
    cs = [struct.pack('BBB',*rgb).encode('hex') for rgb in cs]
    return ','.join(['|'.join(cs[2:24:3]),'|'.join(cs[1:24:3]),'|'.join(cs[0:24:3])])


def dtime(date, time):
    ''' get a datetime obj from snotel string data'''
    ymd = map(int,date.split('-'))
    h = int(time[:2])
    return datetime.datetime(*(ymd+[h]))

def date_str(dt, sep='-'):
    return sep.join(
        (string.zfill(dt.year,4), string.zfill(dt.month,2), string.zfill(dt.day,2)))

DATE_REF = dtime('1960-1-1','0000')


def snow_depth(site='628', state='ut', days=7):
    ''' return data from snotel site '''
    #http://www.wcc.nrcs.usda.gov/nwcc/view
    #http://www.wcc.nrcs.usda.gov/nwcc/sntl-datarpt.jsp?site=628&days=2&state=ut
    url = 'http://www.wcc.nrcs.usda.gov/nwcc/sntl-datarpt.jsp?site='+\
        site+'&days='+str(days)+'&state='+state
    print url
    text = urllib2.urlopen(url).read()
    parser = SNOTEL_parser()
    parser.feed(text)
    data = [map(float,row[2:-2]) for row in parser.data[2:-2]]
    dates = [dtime(row[0],row[1]) for row in parser.data[2:-2]]
    data_arr = numpy.array(data)
    data_arr[data_arr<-99] = numpy.nan
    date_arr = numpy.array(dates, object)
    return date_arr, data_arr 


def zone_snotel_ids(zone_name):
    '''
    get the snotel ids from the zone name
    '''
    fp = '../server_data/zones.xml'
    root = etree.fromstring(open(fp).read())
    #return root
    for z in root.getchildren():
        if z.find('zone_name').text==zone_name:
            txt = z.find('snotel_ids').text
            if txt=='TODO':
                return []
            else:
                return txt.split()
    
def plot_zone(zone_name):
    zids = zone_snotel_ids(zone_name)
    foo=[snow_depth(zid) for zid in zids]
    dates, datas = zip(*foo)
    plot_snow_depth(dates, datas, zone_name)

def plot_snow_depth(dates, datas, zone_name):
    depths = numpy.vstack([data[:,1] for data in datas])
    temps = numpy.vstack([data[:,3] for data in datas])
    date = dates[0].tolist()
    idxs = [date.index(dt) for dt in date if dt.hour==0]
    delta = datetime.timedelta(0.5)
    ticks = [date[i] for i in idxs]
    ticklabs = [date[i]+delta for i in idxs]
    days = [date[i].day for i in idxs]
    wkdys = [date[i].weekday() for i in idxs]
    wdays = [weekdays[wdy]+' '+str(dy) for wdy,dy in zip(wkdys,days)]

    for depth in depths:
        rem_nan(depth)
        
    plt.clf()        
    plt.figure(figsize=(6,3))
    depth = numpy.average(depths,0)
    #for depth, date in zip(depths,dates):
    plt.plot(date, smooth(depth,9), 'b')
    plt.grid(ls='-', alpha=.1)

    loc, lab = plt.xticks(ticks, wdays)
    plt.setp(lab, ha='left')
    
    plt.ylabel('Snow Depth (in)')
    plt.ylim(0,None)
    plt.twinx()

    loc, lab = plt.xticks(ticks, wdays)
    plt.setp(lab, ha='left')
    
    #for temp, date in zip(temps,dates):
    temp = numpy.average(temps,0)
    plt.plot(date, smooth(temp,9), 'r')
        
    plt.ylabel(u'Temperature (\u00b0F)')
    
    #plt.gca().yaxis.get_majorticklabels()[-1] = ''
    

    
    plt.savefig(os.path.join(dropbox_folder, zone_name+'.png'))


def rem_nan(a):
    if numpy.isnan(a[0]):
        a[0] = a[~numpy.isnan(a)][0]
    for i in range(len(a)):
        if numpy.isnan(a[i]):
            a[i] = a[i-1]

def mk_px():
    import math
    center = (85,87)
    r1 = 65
    r2 = 48
    r3 = 31
    px = []
    sec = 2./8.*math.pi
    for th in range(8):
        for r in [r1,r2,r3]:
            rr = r-8
            the = th*sec + 0.35*sec
            x = math.sin(the)*rr + center[0]
            y = -math.cos(the)*rr + center[1]
            px.append((x,y))
    return px


##def plot():
##    import pylab
##    x,y = zip(*mk_px())
##    pylab.cla()
##    pylab.imshow(pylab.imread(r'C:\Users\Paul\Dropbox\Bauer\errors\rose.bmp'))
##    pylab.plot(x,y)
##    pylab.savefig(r'c:\users\paul\pictures\rose_order.png')


class Zone:
    def __init__(self, root):
        self.zone_name = root.find('zone_name').text
        self.short_name = root.find('short_name').text
        self.state = root.find('state').text
        foo = root.find('snotel_ids').text
        if foo == 'TODO':
            self.snotel_ids = []
        else:
            self.snotel_ids = map(int, foo.split())

class Avy:
    ''' class that serves as a container for all avy data '''
    def __init__(self):
        self.src_fp = r'C:\Users\Paul\workspace\avy-app\server_data\zones.xml'
        self.root = etree.fromstring(open(self.src_fp).read())
        self.zones = [Zone(e) for e in self.root.iterchildren(tag='zone')]
        for z in self.zones:
            setattr(self, z.short_name, z)



class CO_Avy_Adviso:
    name_dict = dict((z.short_name, z.zone_name) for z in Avy().zones)
    def __init__(self, short_name):
        src = 'http://avalanche.state.co.us/data.xml'
        # TODO read this only once
        self.root = etree.fromstring(urllib2.urlopen(src).read())
        long_name = self.name_dict[short_name]
        for node in self.root.iterchildren(tag='zone'):
            #print node.find('zone_name').text, long_name
            if long_name == node.find('zone_name').text:
                self.created_by = node.find('created_by').text
                self.rose_colors = node.find('rose_colors0').text #rose_date0
                m,d,y = map(int, node.find('rose_date0').text.split('/'))
                self.date = datetime.date(y,m,d)
                danger_advice = node.find('danger_advice').text
                danger_likelihood = node.find('danger_likelihood').text
                danger_avalanche_size = node.find('danger_avalanche_size').text
                recommended_action = node.find('recommended_action').text
                self.statement = ' '.join((danger_advice, recommended_action))
                return



class UT_Avy_Adviso:
    ''' object containing parsed data from Utah Avalanche Website '''
    #ct = 0
    def __init__(self, short_name):
        #self.zone_name = name
        #self.zone_number = str(self.ct)
        #UT_Zone.ct += 1
        #print short_name
        self.date = None # datetime object
        self.rose_colors = None
        self.url = 'http://utahavalanchecenter.org/advisory/'+short_name
        self.loaded = False
        self.load()
        
    def load(self):
        self.text = urllib2.urlopen(self.url).read()
        self.parser = UAC_parser()  #et.fromstring(self.text)
        self.parser.feed(self.text)
        if not self.parser.img_urls:
            self.rose_colors = '0'*24
            dt = datetime.datetime.today()
            self.rose_gif = date_str(dt, sep='')
            #print self.rose_gif
            self.created_by = 'NA'
            self.statement = 'NA'
        else:
            self.rose_gif = 'http://utahavalanchecenter.org'+self.parser.img_urls[0]
            self.rose_colors = rose_colors(self.rose_gif)
            self.created_by = self.parser.forecaster[0].strip().lstrip('Forecaster:')
            idx = [i for i,h in enumerate(self.parser.headings) if 'BOTTOM' in h][0]
            self.statement = self.parser.advisos[1].strip()
        daystr = self.rose_gif.split('/')[-1].split('-')[0]
        y,m,d = map(int, (daystr[:4], daystr[4:6], daystr[6:]))
        self.date = datetime.datetime(y,m,d)
        self.loaded = True




    def to_xml(self):
        attrs = [a.strip() for a in xml_attrs.strip().split('\n')]
        root = etree.Element('zone')
        for attr in attrs:
            if attr in dir(self):
                elem = etree.SubElement(root, attr)
                elem.text = getattr(self, attr)
        return etree.tostring(root, pretty_print=True) #et.tostring(root)



class SNOTEL_parser(HTMLParser):
    '''
    parse the snotel web page for the hourly snow depth table
    '''
    def __init__(self):
        HTMLParser.__init__(self)
        self.data = None
        self._go = False
        
    def handle_starttag(self, tag, attrs):
        if tag=='table' and ('id', "dataTable") in attrs:
            self.data = []
            self._go = True
        elif tag=='tr' and self._go:
            self.data.append([])
        elif tag=='td' and self._go:
            self.data[-1].append('')
        
    def handle_data(self, data):
        if self._go and self.data and self.data[-1]:
            self.data[-1][-1]+=data.strip()
            
    def handle_endtag(self, tag):
        if tag=='table':
            self._go=False


class UAC_parser(HTMLParser):
    ''' parse the Utah Avalanche Center website for rose imgs and advisorys
    '''
    def __init__(self):
        HTMLParser.__init__(self)
        #self.text = None
        self.advisos = []  # text describing the advisory
        self.headings = []
        self.img_urls = [] # url to the danger rose
        self.forecaster = [] # name of forcaster
        self.reader = None # a pointer to one of the above

    def handle_starttag(self, tag, attrs):
        if tag=='table' and ('class', 'advisory-component') in attrs:
            self.advisos.append('')
            self.reader = self.advisos

        elif tag=='table' and ('class', 'advisory-container') in attrs:
            self.headings.append('')
            self.reader = self.headings
            
        elif tag=='img' and self.reader is self.advisos:
            url = dict(attrs)['src']
            if url.endswith('.gif'):
                self.img_urls.append(url)
            
        elif tag=='span' and ('class', 'forecaster') in attrs:
            self.forecaster.append('')
            self.reader = self.forecaster

    def handle_data(self, data):
        if self.reader is not None:
            self.reader[-1] += data

    def handle_endtag(self, tag):
        if tag =='table' and self.reader is self.advisos:
            self.headings.append('')
            self.reader = self.headings
        elif tag=='table' and self.reader is self.headings:
            self.reader = None
        elif tag=='span':
            self.reader = None



def rose_colors(img_url):
    ''' given an img location, get the colors for each quadrant'''
    buf = urllib2.urlopen(img_url).read()
    im = Image.open(sio(buf), mode='r').convert('RGB')
    return ''.join(map(str,[color_dict[im.getpixel(p)][1] for p in mk_px()]))



def smooth(x,window_len=11, window='hanning'):
    """smooth the data using a window with requested size.
    
    This method is based on the convolution of a scaled window with the signal.
    The signal is prepared by introducing reflected copies of the signal 
    (with the window size) in both ends so that transient parts are minimized
    in the begining and end part of the output signal.
    
    input:
        x: the input signal 
        window_len: the dimension of the smoothing window; should be an odd integer
        window: the type of window from 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'
            flat window will produce a moving average smoothing.

    output:
        the smoothed signal
        
    example:

    t=linspace(-2,2,0.1)
    x=sin(t)+randn(len(t))*0.1
    y=smooth(x)
    
    see also: 
    
    numpy.hanning, numpy.hamming, numpy.bartlett, numpy.blackman, numpy.convolve
    scipy.signal.lfilter
 
    TODO: the window parameter could be the window itself if an array instead of a string   
    """

    if x.ndim != 1:
        raise ValueError, "smooth only accepts 1 dimension arrays."

    if x.size < window_len:
        raise ValueError, "Input vector needs to be bigger than window size."


    if window_len<3:
        return x


    if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']:
        raise ValueError, "Window is on of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'"

    s=numpy.r_[2*x[0]-x[window_len-1::-1],x,2*x[-1]-x[-1:-window_len:-1]]
    #print(len(s))
    if window == 'flat': #moving average
        w=numpy.ones(window_len,'d')
    else:
        w=eval('numpy.'+window+'(window_len)')

    y=numpy.convolve(w/w.sum(),s,mode='same')
    return y[window_len:-window_len+1]


def get_todays_rose(zone, h5file):
    
    today = date_str(datetime.datetime.today())
    tbl = h5file.getNode('/zones/'+zone.short_name+'/rose')
    today_row = tbl[tbl.col('date')==today]
    if today_row.size:
        return today_row[0]
    else:
        return


def main(fn=os.path.join(dropbox_folder, 'avy_data.xml')):

    txt = '<?xml version="1.0" ?>\n<zones></zones>'
    h5file = tables.openFile('avy-app.h5', mode='r')
    avy = Avy()
    root = etree.fromstring(txt)
    for zone in avy.zones:
        print zone.short_name
        attrib = dict((('short_name',zone.short_name),))
        zone_node = etree.SubElement(root, 'zone', attrib=attrib) 
        
        etree.SubElement(zone_node, 'zone_name').text = zone.zone_name
        row = get_todays_rose(zone, h5file)
        for k,v in zip(row.dtype.names, row):
            etree.SubElement(zone_node, k).text = str(v)
        #root.append(etree.fromstring(elem))
    h5file.close()
    res = etree.tostring(root, pretty_print=True)
    open(fn,'w').write(res)