import logging
from ovzdusi_entities import *
from ovzdusi_util import SELC, CacheManager
from xml.etree import ElementTree as ET
from datetime import datetime, timedelta, tzinfo
from google.appengine.api import urlfetch, memcache
from google.appengine.ext import db

# if request.headers.get("X-AppEngine-Cron") == "true" :

logging.getLogger().setLevel(logging.WARNING)

allMeasurePointsDict = {}

def profile_main():
    # This is the main function for profiling
    # We've renamed our original main() above to real_main()
    import cProfile, pstats, StringIO
    prof = cProfile.Profile()
    prof = prof.runctx("real_main()", globals(), locals())
    stream = StringIO.StringIO()
    stats = pstats.Stats(prof, stream=stream)
    stats.sort_stats("cumulative")  # time/cumulative
    stats.print_stats(200)  # how many to print
    # The rest is optional.
    # stats.print_callees()
    # stats.print_callers()
    logging.debug("Profile data:\n%s", stream.getvalue())


def ColumnIndexPM10(theads):
    result = -1
    foundPM = False
    for h in theads:
        result += int(h.get("colspan", 1))
        sub = list(h)
        if "PM" in h.text and len(sub) > 0 and sub[0].text == "10":
            foundPM = True
            break
    if not foundPM:
        logging.error("Cannot find PM10 index.")
    return result


def ParseDateTime(theads):
    if len(theads) < 2:
        logging.error("Cannot find the cell for date time.")
    full = theads[1].text
    day = int(full[:2])
    month = int(full[3:5])
    year = int(full[6:10])
    hour = int(full[11:13])
    return datetime(year, month, day, hour)

    
def ParseData(row, indexPM10, dt):
    cells = list(row)
    if len(cells) <= indexPM10:
        logging.error("Cannot find the cell for PM10.")
    code = None
    name = None
    pm10 = None
    for index, d in enumerate(cells):
        if index == 0:
            a = list(d)
            if a != None and len(a) > 0:
                code = a[0].text
        if index == 1:
            name = d.text
        if index == indexPM10:
            span = list(d)
            if span != None and len(span) > 0:
                pm10 = span[0].text
            else:
                pm10 = d.text
    if code == None:
        logging.error("Cannot parse code.")
        return None
    if name == None:
        logging.error("Cannot parse name.")
        return None
    if code not in allMeasurePointsDict:
        return None
    mp = allMeasurePointsDict[code]
    p = Pollution(date = dt, measurePoint = mp)
    if pm10 == None:
        p.PM10 = 0.0
    else:
        try:
            p.PM10 = float(pm10.replace(',', '.'))
        except ValueError:
            logging.error("Cannot parse PM10=" + pm10)
    return p


def IsTimeToFetch():
    return True
    # try fetch 50mins after last successful measure,
    # then try get every 5mins as cron is configured
    # lastMeasure = memcache.get("last.measure")
    # if lastMeasure is None:
        # return True
    # diff = datetime.now(SELC()) - lastMeasure
    # if diff > timedelta(minutes = 50):
        # return True
    # return False


def FixPreviousData(currentMeasureDate):
    result = []
    query = db.GqlQuery("select * from Pollution order by date desc")
    lastMeasure = query.fetch(1)
    if lastMeasure is None or len(lastMeasure) == 0:
        logging.warning("lastMeasure does not exists")
        return
    lastMeasureDate = lastMeasure[0].date
    delta = currentMeasureDate - lastMeasureDate
    hours = delta.days * 24 + (delta.seconds // 3600)
    if hours <= 1:
        return
    allMeasurePointsList = CacheManager().getMeasurePoints()
    query = db.GqlQuery("select * from Pollution where date > :1 and date < :2 order by date desc, measurePoint", lastMeasureDate, currentMeasureDate)
    pollutions = query.fetch(len(allMeasurePointsList) * hours)
    
    for h in range(1, hours):
        dt = datetime(lastMeasureDate.year, lastMeasureDate.month, lastMeasureDate.day, lastMeasureDate.hour) + timedelta(hours = h)
        for mp in allMeasurePointsList:
            if PollutionExists(pollutions, mp, dt):
                continue
            p = Pollution(date = dt, measurePoint = mp)
            p.PM10 = 0.0
            p.put()
            result.append(p)
    
    return result


def PollutionExists(pollutions, mp, dt):
    for p in pollutions:
        if p.measurePoint.code == mp.code and p.date == dt:
            return True
    return False


def UpdateZeroValues(parsedPollution):
    # Some values from previous report might not be served.
    # Such missing values was set to 0.0 by FixPreviousData rutine.
    # But CHMU might update the already published report
    # before publishing report for next hour
    # and serve the missing values.
    # Therefore we have to check whether those zero values was updated
    # and update the 0.0 values in oue database as well.

    result = []
    query = db.GqlQuery("select * from Pollution where date = :1", parsedPollution[0].date)
    pollutions = query.fetch(len(allMeasurePointsDict))
    for p in pollutions:
        if p.PM10 == 0.0:
            for pp in parsedPollution:
                if pp.PM10 > 0.0 and p.measurePoint.code == pp.measurePoint.code:
                    p.PM10 = pp.PM10
                    p.put()
                    result.append(p)
    return result


def real_main():
    if not IsTimeToFetch():
        return

    url = "http://portal.chmi.cz/files/portal/docs/uoco/web_generator/actual_hour_data_CZ.html"
    result = urlfetch.fetch(url, deadline=60)
    httpResult = result.status_code
    if httpResult != 200:
        logging.error("Cannot fetch the url. Http status code is %s." % httpResult)
        return
    ns = "http://www.w3.org/1999/xhtml"
    xpTable = "{%(x)s}body/{%(x)s}div/{%(x)s}div/{%(x)s}table" % {"x":ns}
    xpTr = "{%s}tr" % ns
    xpTh = "{%s}th" % ns
    tree = ET.fromstring(result.content)
    table = tree.find(xpTable)
    if table == None:
        logging.error("Cannot find table. XPath: " + xpTable)
        return
    rows = list(table.findall(xpTr))
    if rows == None or len(rows) == 0:
        logging.error("Cannot find rows. XPath: " + xpTr)
        return
    flagFoundKraj = False
    indexPM10 = -1
    dt = datetime.min
    parsedPollution = []
    
    for r in rows:
        if flagFoundKraj:
            if r.find(xpTh) != None:
                continue
            p = ParseData(r, indexPM10, dt)
            if p != None:
                parsedPollution.append(p)
            continue
        
        heads = list(r.findall(xpTh))
        if len(heads) > 0 and "Moravskoslezsk" in heads[0].text:
            flagFoundKraj = True
            dt = ParseDateTime(heads)
            indexPM10 = ColumnIndexPM10(heads)
            allMeasurePointsList = CacheManager().getMeasurePoints()
            for mp in allMeasurePointsList:
                allMeasurePointsDict[mp.code] = mp

    if not flagFoundKraj:
        logging.error("Cannot find Kraj")

    if len(parsedPollution) > 0:
        query = db.GqlQuery("select __key__ from Pollution where date = :1", dt)
        exists = query.fetch(1)
        if len(exists) > 0:
            updated = UpdateZeroValues(parsedPollution)
            if len(updated) > 0:
                CacheManager().updateWeekDataZeros(updated)
        else:
            fixed = FixPreviousData(dt)
            db.put(parsedPollution)
            memcache.set("last.measure", datetime.now(SELC()))
            if fixed == None:
                CacheManager().updateData(parsedPollution)
            else:
                CacheManager().updateData(fixed + parsedPollution)


real_main()
