# Create your views here.
from django.http import HttpResponse, HttpResponseRedirect
from django.utils.translation import ugettext as _
from google.appengine.api import urlfetch
from google.appengine.api import taskqueue
from django.utils import feedgenerator, simplejson
from django.template import Context, loader, RequestContext
from google.appengine.ext import db
from google.appengine.ext.db import NotSavedError, Error
from geo import geotypes

from marovi.models import *
from marovi.constants import *

from collections import defaultdict

from pyExcelerator import *

import pyExcelerator

import logging, math, datetime


def send_range_to_datastore(request, new_res):
    """
    Sends to the datastore the new resolution for the gp pos
    """
    int_new_res = int(new_res)
    
    lat_lower_bound = -60
    lat_upper_bound = 50
    lng_lower_bound = -170 
    lng_upper_bound = 170 
    
    int_lng = lng_lower_bound
    
    # now we loop
    for i in range(lat_lower_bound,lat_upper_bound,int_new_res):
        
        if i == lat_lower_bound and int_lng == lng_lower_bound:
            continue 
        
        while  (int_lng + int_new_res ) <= lng_upper_bound:
            # do your magic here
            int_lng = int_lng + int_new_res
            
            # define the box here
            sw_coord = i - int_new_res
            se_coord = i
            nw_coord = int_lng - i
            ne_coord = int_lng
            logging.info("s %d w %d e %d n %d" % (sw_coord, se_coord, nw_coord, ne_coord ) )
            # now get the points now that we get the box
            point_list_qry = GeoDataPt.bounding_box_fetch(
                        GeoDataPt.all().filter('resolution =', '0.25'),
                         geotypes.Box(sw_coord, nw_coord, se_coord, ne_coord),
                        max_results=1000 )
            
            for pdata in point_list_qry:
                logging.info("temp: " + pdata.measured_value )
    
    return HttpResponse("new resolution completed")

def display_loads(request):
    """
    Display all loads ordered chronologically
    """
    
    query = DataLoad.all()
    query.order('-created')
    
    #the max point is never used
    c = Context({             
      'loads':query, 
    })
   
    t = loader.get_template('marovi/loadHistory.html')
    return HttpResponse(t.render(c))

def process_detail_file(request):
    """
    Process the master file with lat/lngs and resolutions 
    """
    blob_pts_ibe = None
    resolution = None
    day = None
    year = None
    hour = None
    load_number = None
    
    try:
        blob_pts_ibe = request.FILES['sourceFile']
        resolution = request.POST['resolution']
        day = request.POST['day']
        year = request.POST['year']
        hour = request.POST['hour']
        load_number = request.POST['loadNumber']
        
    except KeyError,(strError):
        logging.error("Error: %s"% strError)
        return HttpResponse("err: %s" % strError)
    
    
    # ---------------
    #  process each line
    # ---------------
    int_order = 0
    for line in blob_pts_ibe:
        int_order = int_order + 1
        p_points = line.split(' ')
        logging.info("processing value: %s" % p_points[1])
        t = taskqueue.Task(
                      url='/marovi/processMicroDataWorker/', 
                      params={'value': p_points[1],
                              'order': p_points[0],
                              'resolution': resolution,
                              'day': day,
                              'year':year,
                              'hour':hour,
                              'load':load_number,
                              }
                      
        )
        t.add(queue_name="geo-backgroud-proc")
        # send to 
    
    
    return HttpResponse("OK")


def process_master_file(request):
    """
    Process the master file with lat/lngs and resolutions 
    """
    logging.info("processing maste file")
    blob_file = None
    resolution = None
    mater_type = None
    try:
        blob_file = request.FILES['masterFile']
        resolution = request.POST['resolution']
        mater_type = request.POST['type']
    except KeyError,(strError):
        logging.error("Error: %s"% strError)
        return HttpResponse("Err: %s" % strError )
    
    # first check if resolution exist prior to loading
#    res_list_qry = MasterLatLngRes.all(keys_only = True)
#    res_list_qry.filter("resolution =", resolution).get()
#    
#    if res_list is not None:
#        db.delete(res_list)
    
    # now read the file and process lat,lng combinations
    #------- end of processing file -----------#
    total_processed = 0
    for line in blob_file:
        total_processed += 1
        p_dict = line.split(' ')
        
#        logging.info("terms lat: %s, long: %s" % p_dict[0], 
#                                                  p_dict[1].strip() )
        t = taskqueue.Task(
                  url='/marovi/processMasterWorker/', 
                  params={'value': p_dict[1],
                          'type': mater_type,
                          'resolution': resolution,
                          'order' : p_dict[0],
                          }
                  
                  )
        t.add(queue_name="geo-backgroud-proc")
        
    
    
    
    # End of looping through the file
    return HttpResponse("OK total files %d" % total_processed )
    

def process_excel(request):
    """
    Reads from an excel file
    """
    blob_pts_ibe = None
    str_starting_time = None
    load_name = None
    try:
        blob_pts_ibe = request.FILES['sourceFile']
        str_starting_time = request.POST['initTimeStamp']
        load_name = request.POST['loadName']
    except KeyError,(strError):
        logging.error("Error: %s"% strError)
    
    # temporal date transform  time.strptime("16/6/1981", "%d/%m/%Y")
    dt_init = datetime.datetime.strptime(str_starting_time, '%d/%m/%Y')
    
    logging.info("1. reading the file")
    book = pyExcelerator.parse_xls(blob_pts_ibe)
    
    logging.info("2. getting el data")
    data = book[0][1]

    # ahora procesamos cada fila
    parsed_dictionary = defaultdict(lambda: None, book[0][1])
    
    # number of columns
    number_of_rows = len(parsed_dictionary)/3

    # create a new load
    load_obj = DataLoad(load_id=load_name,
                        total_entities=int(number_of_rows), 
                        )
    load_key = load_obj.put()

    
    # for now we set the timedate of the begining
    #date_init = datetime.date(2011,05,29)
    
    for i in range(0,number_of_rows):
        # si existe
        if parsed_dictionary[i,0]:
            logging.info("zip code: %s municipi: %s" % (parsed_dictionary[i,0], parsed_dictionary[i,2]) )
            
            # ------------------------------------------------
            # temporarily disabling the time increment
            # ------------------------------------------------
            #dt_init = dt_init + datetime.timedelta(hours=3)
            
            if parsed_dictionary[i,2] == '--':
                continue
            
            str_dt_init = dt_init.strftime(DATE_FORMAT)
            
            logging.info("date formatted: %s" % str_dt_init )
            # sends to background processing for each row of the file
            t = taskqueue.Task(
                      url='/marovi/processGribWorker/', 
                      params={'lat': parsed_dictionary[i,0],
                              'lng': parsed_dictionary[i,1],
                              'temp': parsed_dictionary[i,2],
                              'ts': str_dt_init,
                              'pkey':load_key.id(),
                              }
                      
                      )
            t.add(queue_name="geo-backgroud-proc")

    return HttpResponse("OK")

def display_by_day(request, p_day, p_month, p_year):
    """
    Selects the data points by day
    """
    
    logging.info("obtaining d: %s/%s/%s" % (p_day, p_month, p_year) )
    
    str_lower_bound = "%s/%s/%s 00:00:00" % (p_day, p_month, p_year) 
    
    str_upper_bound = "%s/%s/%s 23:59:59" % (p_day, p_month, p_year) 
    
    logging.info("upper bound: %s" % str_upper_bound)
    
    # now get lower bound day
    dt_init = datetime.datetime.strptime(str_lower_bound, '%d/%m/%Y %H:%M:%S')
    
    # now get upper bound day
    dt_end = datetime.datetime.strptime(str_upper_bound, '%d/%m/%Y %H:%M:%S')
    
    # execute the query  .filter('dt_taken <', dt_end)
    qry = GeoDataPt.all().filter('dt_dim >', dt_init) 
    qry.filter('dt_dim <=',dt_end)
    pts = qry.fetch(5000)
    
    logging.info("total results: %d" % qry.count() )
    
    #the max point is never used
    c = Context({             
      'points':pts,
      'max':100, 
    })
   
    t = loader.get_template('marovi/heatmapLayer.html')
    return HttpResponse(t.render(c))
    
def display_all_data(request):
    
    logging.info("printing the lat and longs")
    
    pts = GeoDataPt.all().fetch(5000)
    
    d_max = GeoDataPt.all().order('-measured_value').get()
    
    c = Context({             
      'points':pts,
      'max':round(d_max.measured_value),
    })
   
    t = loader.get_template('marovi/heatmapLayer.html')
    return HttpResponse(t.render(c))


def display_rev_geocoding(request):
    """
    Example for doing a reverse geocoding and getting the boundary conds
    for the map
    """
    str_latlng = None
    # first get the latlong parameter
    try:
        str_latlng = request.META['X-AppEngine-CityLatLong']
    except KeyError, (strErr):
        logging.error("Exc in key error")
        str_latlng = "41.40163300058821,2.1845370964705877"
    
    logging.info("latlng from gae:" +  str_latlng )
    
    lat_pos_arr = str_latlng.split(',')
    
    # next do the webservice call for reverse geocoding
    url = "http://maps.googleapis.com/maps/api/geocode/json?latlng=";
    url = url + str_latlng + '&sensor=false';
    
    # lets call the webservice from Google MAPS
    result = urlfetch.fetch(url)
    
    sw_lat = ""
    sw_lng = ""
    ne_lat = ""
    ne_lng = ""
    
    # now process the input from the webservice call
    if result.status_code == 200:
        logging.info("ok - receiving")
        # if you want to see the response uncomment this line
#        logging.info("response: %s" % result.content )
        data = simplejson.loads(result.content)

        for r in data['results']:
            geometry = r['geometry']
            viewport = geometry['viewport']
            sw = viewport['southwest'];
            ne = viewport['northeast'];
            # now for each point get the boundary conditions
            sw_lat = sw['lat']
            sw_lng = sw['lng']
            
            ne_lat = ne['lat']
            ne_lng = ne['lng']
    
    str_resp = str(sw_lat) + ',' + str(sw_lng) + ',' + str(ne_lat) + ',' + str(ne_lng)
    c = Context({             
      'pos_str':str_resp,
      'lat': lat_pos_arr[0],
      'lng':lat_pos_arr[1],
    })
    
    t = loader.get_template('marovi/coord_map.html')
    return HttpResponse(t.render(c))

def processDataGeoPoint(request):
    """
    Converts a given stagging point to a geo model point
    """
    
    # receive data point
    stg_pt_id = None
    
    try:
        stg_pt_id = request.POST['id']
    except KeyError, (strErr):
        logging.error("Error receiving params %s" % strErr )
        return HttpResponse("Err: %s" % strErr )
     
    int_id = int(stg_pt_id)
    
    obj = GeoDataPt.get_by_id( int_id )
    
    # now create the geomodel 
#    p = GeoDataPt( location=db.GeoPt(obj.gpPos.lat, obj.gpPos.lon) )
#    p.measured_value = obj.measured_value
#    p.latitude = obj.gpPos.lat
#    p.longitude = obj.gpPos.lon
#    p.update_location()
#    p.put()
    
    return HttpResponse("OK")

def dataAggregatorWorker(request):
    """
    Bounding box analyzer
    """
    
    str_sw = None
    str_se = None
    str_nw = None
    str_ne = None
    str_resolution = None
    
    # now process the input
    try:
        str_sw = request.POST['sw']
        str_se = request.POST['se']
        str_nw  = request.POST['nw']
        str_ne = request.POST['ne']
        str_resolution = request.POST['resolution']
    except KeyError, (strErr):
        logging.error("Error receiving params %s" % strErr )
        return HttpResponse("Err: %s" % strErr )
    
    # now create the bounding box
    results = GeoDataPt.bounding_box_fetch(
              GeoDataPt.all().filter('bar >', 5),  # Rich query!
              geotypes.Box(float(str_ne), float(str_se) ,float(str_nw) ,
                           float(str_sw)),
              max_results=1000)
    # TODO: now for each result do a count
    
    return HttpResponse("done")    
    

def agg_temp_data_by_res(request, resolution, day, year, hour):
    """
    Process and inserts the first aggregation of data
    """
    logging.info("res: %s" % resolution)
    data_qry = TempDataLoad.all()
    data_qry.filter("resolution =", '0.' + resolution)
    data_qry.filter("title =", 'lat')
    
    dateStamped = '%s%s%s' % (year,day,hour)
    
    dt_converted = datetime.datetime.strptime(dateStamped, '%Y%j%H') 
    
    data_qry.filter(" =", dt_converted)
    
    logging.info("before the loop")
    
    for data in data_qry.run(limit=2000):
        # send to queue for processing
        logging.info("inside the loop")
        str_value = '%.3f' % data.value
        t = taskqueue.Task(
                      url='/marovi/processETLWorker/', 
                      params={'value': str_value,
                              'order': data.order,
                              'res': '0.'+ resolution,
                              'date': '%s%s%s' % (year,day,hour)
                              }
                      
                      )
        t.add(queue_name="geo-backgroud-proc")
        #cleaning up
#        db.delete(data)
        
    
    return HttpResponse("OK Send to Procesing")

def processETLWorker(request):
    """ First transform from temp data """
    lat_value = None
    order = None
    res = None
    dateStamped = None
    
    # process the post
    try:
        lat_value = request.POST['value']
        order = request.POST['order']
        res = request.POST['res']  
        dateStamped = request.POST['date']
    except KeyError, (strErr):
        logging.error("Error receiving params %s" % strErr )
        return HttpResponse("BAD")
    
    logging.debug("date incoming: %s" % dateStamped )
    
    # reconverting the dates
    dt_converted = datetime.datetime.strptime(dateStamped, '%Y%j%H')  
    int_order = int(order)
    
    data_qry = TempDataLoad.all()
    data_qry.filter("resolution =", res)
    data_qry.filter("title =", 'lng')
    data_qry.filter("order =", int_order)
#    data_qry.filter("dateStamped =", dt_converted )
    
    # get the value
    lng_tmp_val = data_qry.get()
    
    if lng_tmp_val is None:
        logging.error("no longitude found")
        return HttpResponse("Bad")
    
    # now get actual value
    data_qry = TempDataLoad.all()
    data_qry.filter("resolution =", res)
    data_qry.filter("title =", 'value')
    data_qry.filter("order =", int_order)
    data_qry.filter("dateStamped =", dt_converted )
    
    ptr_value = data_qry.get()
    
    if ptr_value is None:
        logging.error("Real value not found")
        return HttpResponse("Error")
    
    f_lat_val = float(lat_value)
    
    # now save to the datastore
    geo_entity = GeoDataPt(location=db.GeoPt(f_lat_val, lng_tmp_val.value),
                           resolution=res,
                           measured_value=ptr_value.value,
                           latitude=f_lat_val,
                           longitude=lng_tmp_val.value,
                           dt_dim=dt_converted,
                            )
    geo_entity.location = db.GeoPt(f_lat_val, lng_tmp_val.value)
    geo_entity.update_location()
    geo_entity.put()

    # cleaning up
    #db.delete(lng_tmp_val)

    # create first the lats
    return HttpResponse("OK")

def processAggStats(request):
    """ Process all data points and summarize stats """
    
    return HttpResponse("OK")


def processMicroDataWorker(request):
    """
    Details for the different resolution types
    """
    value = None
    res = None
    order = None
    day = None
    year = None
    hour = None
    load_str = None
    
    try:
        value = request.POST['value']
        res = request.POST['resolution']
        order = request.POST['order']
        day = request.POST['day']
        year = request.POST['year']  
        hour = request.POST['hour']
        load_str = request.POST['load']
    except KeyError, (strErr):
        logging.error("Error receiving params %s" % strErr )
        return HttpResponse("BAD")
    
    str_starting = "%s/%s%s" % (day, hour, year)
    #---------------------------------------------
    # date and time where %j is day of the year
    #---------------------------------------------
    dt_init = datetime.datetime.strptime(str_starting, '%j/%H%Y')
    
    if value.find('--') != -1:
        return HttpResponse("BAD")
    
    f_value = float(value)
    i_order = int(order)
    
    TempDataLoad(value=f_value, 
                 order=i_order, 
                 resolution=res,
                 load = load_str,
                 title= 'value',
                 dateStamped=dt_init).put()
    
    return HttpResponse("OK")

def processMasterLatLngWorker(request):
    """
    Process the file with the lat,lon 
    """
    value = None
    mastertype = None
    order = None
    res = None
    
    try:
        value = request.POST['value']
        mastertype = request.POST['type']
        order = request.POST['order']
        res = request.POST['resolution']
    except KeyError, (strErr):
        logging.error("Error receiving params %s" % strErr )
        return HttpResponse("BAD")
    
    
    # transformation
    f_value = 0
    i_order = 0
    try:
        f_lvalue = float(value)
        i_order = int(order)
    except ValueError, (strErr):
        logging.error("ConversionError: %s" % strErr)
        return HttpResponse("BAD %s" % strErr )
    
    # now save to datastore
    obj_key = TempDataLoad(value=f_lvalue,
                           order=i_order,
                           resolution=res,
                           title=mastertype,  
                  ).put()
#    MasterLatLngRes(latitude=f_lati,
#                    longitude=f_longi,
#                    position=i_order,
#                    resolution=res).put()
    
    return HttpResponse("OK")

def processGribWorker(request):
    """
    background processing thread in charge of parallel processing and processing all the data points
    once it is called from the excel loading routine
    """
    lat = None
    lng = None
    p_value = None
    timeStamp = None
    str_parent_key = None
    
    try:
        lat = request.POST['lat']
        lng = request.POST['lng']
        p_value  = request.POST['temp']
        timeStamp = request.POST['ts']
        str_parent_key = request.POST['pkey']
    except KeyError, (strErr):
        logging.error("Error receiving params %s" % strErr )
        return HttpResponse("Err: %s" % strErr )
        
    dt_init = None
    
    logging.info('queue timestamp: %s and float %s' % (timeStamp, p_value) )
    
    f_value = float(0)
    f_lati = 0
    f_longi = 0
    int_pkey = 0
    
    logging.info("latitude: %s and longitude: %s" % (lat,lng) )
    
    try:
        f_value = float(p_value)
        f_lati = float(lat)
        f_longi = float(lng)
        int_pkey = int(str_parent_key)
    except ValueError, (strErr):
        logging.error("ConversionError: %s" % strErr)
    
    try:
        dt_init = datetime.datetime.strptime(timeStamp, DATE_FORMAT)
    except ValueError, (strErr):
        logging.error("DatetimeError: %s" % strErr)

    logging.info("before persisting")
    
    # temporarily disabling the point filter
    p_obj = None
    #p_obj = StaggingPt.all().filter('dt_taken =',dt_init).get()
    
    parent_key = db.Key.from_path('DataLoad', int_pkey)
    
    # process datetime  dt_taken=dt_init
#    stg_pt = GeoDataPt( parent=parent_key,
#                         measured_value=f_value,
#                         dt_taken=dt_init,
#                         audit_dt_updated=datetime.datetime.now()
#                          )
    geo_entity = GeoDataPt(location=db.GeoPt(f_lati, f_longi),
                           resolution='0.25',
                           measured_value=f_value,
                           latitude=f_lati,
                           longitude=f_longi,
                           dt_dim=dt_init,
                            )
    geo_entity.location = db.GeoPt(f_lati, f_longi)
    geo_entity.update_location()
    
    try:
        geo_entity.put()
    except NotSavedError, (strErr):
        logging.error("Not Saved Error: " + strErr)

    
#    stg_pt.gpPos = db.GeoPt(f_lati,f_longi)
#    stg_pt.latitude = f_lati
#    stg_pt.longitude = f_longi
    
    # check to see if point has already being saved
#    if not p_obj and not f_lati == float(0) :
#        try:
#            stg_pt.put()
#        except db.Error, (strErr):
#            logging.error('ErrorSaving: %s' % strErr)
        
    logging.info("saved stagging")
    
    return HttpResponse("done")
    