from google.appengine.ext import db
#import table_schema as ts
import logging
from google.appengine.api import memcache

EXT_TYPES_MAPPING = {'str': 'String', 'float': 'Float', 'datetime':'date', 'location': 'String'}

def get_ext_store_meta(tableid, sortinfo=None, paging={}, flds=[]):
    '''return the meta data of a table for extjs data store'''
    
    if flds:
        pass
    else:
        flds = ts.get_fields(tableid)
    fields = []
    for fld in flds:
        d =  {'name': fld['field_name'], 'type': EXT_TYPES_MAPPING[fld['field_type']],
                        'header': fld['field_label'],'dataIndex': fld['field_name']}
        if EXT_TYPES_MAPPING[fld['field_type']]=='date':
            d['dateFormat'] = 'Y-m-d'
           

                             
        fields.append(d)
    if sortinfo:
        pass
    else:
        sortinfo = {
                   "field": fields[0]['name'],
                   "direction": "ASC"
                }
    if paging.get('start'):
        page_start = paging.get('start')
    else:
        page_start = 0
    
    if paging.get('limit'):
        page_limit = paging.get('limit')
    else:
        page_limit = 1000
    
    return {
                "idProperty": "ID",
                "root": "rows",
                "totalProperty": "total",
                "successProperty": "success",
                "fields":fields,
                # used by store to set its sortInfo
                "sortInfo":sortinfo,
                # paging data (if applicable)
                "start": paging['start'],
                "limit": paging['limit'],
                'groupField': 'project_name'
                
                # custom property
                #"foo": "bar"
            }




def get_records(options, table_model):
    logging.info('getting records from table %s' % table_model.kind())
    logging.info(options)
    mkey = str(options) + str(table_model.kind())
    result = memcache.get(str(mkey))
    if result:
        logging.info('query table, hit memcache')
        return result
    tableid = table_model.kind()
    fields = []
    formatted_fields = {}
    time_fields = []
    
    for fld in ts.get_fields(tableid):
        fields.append(fld['field_name'])
        if fld['field_format'] and fld['field_type'] in ['int', 'float']:
            formatted_fields[fld['field_name']] = fld['field_format']
        if fld['field_type'] in ['datetime']:
            time_fields.append(fld['field_name'])
    logging.info('time fields: %s' % str(time_fields)) 
    
    sql1 = '%(where)s' % options
    if options['sort']:
        if options.get('to_dt'):
        #form_date is an inequality query, so the first ordered item must be this field.
            sql1 = sql1 + ' ORDER BY form_date, %(sort)s ' % options
        else:
            sql1 = sql1 + ' ORDER BY %(sort)s ' % options
        if options['dir']:
            sql1 = sql1 + ' %(dir)s ' % options
    if options['limit']:
        sql1 = sql1 + ' LIMIT %(limit)s ' % options
    if options['start']:
        sql1 = sql1 + ' OFFSET %(start)s '  % options
    logging.info('where clause: %s' % sql1) 
    sql2 = '%(where)s' % options
    logging.info('total where clause: %s' % sql2)
    total = table_model.gql(sql2).count()
    rows = []
    for r in table_model.gql(sql1):
        row = {}
        for fld in fields:
            row[fld] = getattr(r, fld)
        row['google_key'] = str(r.key())
        logging.info('add record:')
        logging.info(row)
        rows.append(row)
    
    results = {'rows': rows, 'total': total, 'fields': fields, 'formatted_fields': formatted_fields, 'time_fields': time_fields}
    memcache.set(mkey, results)
    logging.info('query table, miss memcache')
    return results

def get_ext_store(options, table_model):
    '''query a table and return a list of records'''
      
    tableid = options['tableid']
    result = get_records(options, table_model)
    rows = result['rows']
    total = result['total']
    fields = result['fields']
    formatted_fields = result['formatted_fields']
    time_fields = result['time_fields']
    for row in rows:
    
        for k, v in formatted_fields.items():
            if row[k]:
                row[k] = v % float(row[k])
        for fld in time_fields:
            if row[fld]:
                try:
                    row[fld] = row[fld].isoformat().split('T')[0]
                except:
                    pass

    
            
    paging = {'start': options.get('start'), 'limit': options.get('limit')}
    sortinfo = {'field':options.get('sort'), 'direction': options.get('dir')}
    meta = get_ext_store_meta(tableid, sortinfo, paging)
    
    results = {'metaData': meta, 'success': True, 'total': total, 'rows': rows}
    
    return results

def insert_records(records, table_model):
    '''
    There is no check for the field name and type.
    Assuming the records are already validated and checked for those errors.
    '''
    memcache.flush_all()
    logging.info('flush cache for insert')
    batch = []
    for record in records:
        r = table_model()
        for fld in record.keys():
            setattr(r, fld, record[fld])
        batch.append(r)
    t = Transaction()
    t.description = '%s records imported into %s' % (len(records), table_model.kind())
    t.status = 'new'
    t.put()
    db.put(batch)
    t.status = 'complete'
    t.put()
    
    logging.info('%s records imported into %s' % (len(records), table_model.kind()))


        

def process_query(query, fields=None):
    '''
    convert the results of a query into a json object
    '''
    results = []
    for r in query:
        result = {}
        for fld in r.fields().keys():
            result[fld] = getattr(r, fld)
        results.append(result)
    return results 
        
class dlz_config(db.Expando):
    pass
class dlz_connection(db.Expando):
    '''
    the mapping between iformbuilder, gae and fusion table
    '''
    pass    
    
class compaction_form(db.Expando):
    pass

class compaction_test(db.Expando):
    pass

class asphalt_form(db.Expando):
    pass

class asphalt_test(db.Expando):
    pass
class dlz_project_list(db.Expando):
    pass

class dlz_table_schema(db.Model):
    table_name = db.StringProperty()
    field_name = db.StringProperty()
    field_label = db.StringProperty()
    field_type = db.StringProperty()
    field_format = db.StringProperty()
    field_position = db.IntegerProperty()
    exportable = db.StringProperty()
    comment = db.StringProperty()
     

class Transaction(db.Model):
    ctime = db.DateTimeProperty(auto_now_add=True)
    mtime = db.DateTimeProperty(auto_now_add=True)
    status = db.StringProperty()
    description = db.StringProperty()
    owner = db.UserProperty()
    
class SqlRequest(db.Model):
    transaction_ref = db.ReferenceProperty(Transaction)
    ctime = db.DateTimeProperty(auto_now_add=True)
    mtime = db.DateTimeProperty(auto_now_add=True)
    status = db.StringProperty()
    sql = db.StringProperty()
    description = db.StringProperty()
    owner = db.UserProperty()
    


class Email(db.Model):
    '''
    save all the notification emails sent from iformbuilder
    '''
    ctime = db.DateTimeProperty(auto_now_add=True)
    plaintext_content = db.TextProperty()
    html_content = db.TextProperty()
    sender = db.StringProperty()
    to = db.StringProperty()
    cc = db.StringProperty()
    subject = db.StringProperty()
    date = db.StringProperty()

    
    

class IformDatabaseLog(db.Model):
    '''
    log the failed activities of downloading data from iform database
    '''
    ctime = db.DateTimeProperty(auto_now_add=True)
    mtime = db.DateTimeProperty(auto_now_add=True)
    table = db.StringProperty()
    status = db.StringProperty()
    description = db.StringProperty()
    user = db.UserProperty()    


