import StringIO, logging, csv, model, datetime
import table_schema as ts
from google.appengine.ext import db
from google.appengine.api import memcache
from gae_db_manager import GaeDbManager

def date_convert1(text):
    '''
    convert string mm/dd/yyyy into a datetime object
    '''
    y, m, d = text.split('-')
    return datetime.datetime(int(y), int(m), int(d))

def date_convert2(text):
    '''
    convert yyyy-mm-dd hh:mm:ss into datetime object
    '''
    part1, part2 = text.split()
    y, m, d = part1.split('-')
    h, mm, s = part2.split(':')
    return datetime.datetime(*[int(x) for x in (y, m, d, h, m, s)])

DATE_FORMAT_FN = {'mm/dd/yyyy': date_convert1, 'yyyy-mm-dd hh:mm:ss': date_convert2 }
def get_conversion_function(field):
    '''
    conversion function 
    field is a dictionary:
    {
        field_name: xxx
        field_type: int/str/datetime
        field_format: mm/dd/yyyy
        etc...
    }
    '''
    field_type = field['field_type']
    
    if field_type=='float':
        fn = float
    
    elif field_type=='int':
        fn = int
    elif field_type=='str':
        fn = str
    elif field_type=='datetime':
        field_format = field['field_format']
        fn = DATE_FORMAT_FN[field_format]
    else:
        fn = lambda v: v
    return fn


def get_csv_file(csv_file):
    '''
    import csv via a upload form. Return a list, each item is a line in the csv file.
    csv_file: the uploaded file via the form file control
    table_model: gae datastore model
    '''
    '''pass a string of a csv file and parse it into json format'''
    fhandler = StringIO.StringIO(csv_file)
    reader = csv.reader(fhandler)
    results = [] # a list each line represents a line in the csv file
    for l in reader:
        results.append(l)
    fhandler.close()
    return results


def insert_records(csv_list, table_model, header_idx=0, fields=None):
    '''
    import list in csv format into a model
        table_model: gae datastore model
    header_idx: the index of the header line
    eg. 0, the first line is the header.
    Anyline before the headerline will be ignored.
    
    fields: the schema of the table. it is a list defined by the dlz_table_schema table.
    [
        {field_name:
        field_type:
        ..ect.
    ]
    
     
    '''
    h = csv_list[header_idx]
    results = []
    for l in csv_list[header_idx+1:]:
        r = dict(zip(h, l))
       
#        #validating the records
#        if fields:
#            for fld in fields:
#                field_name = fld['field_name']
#                fn = get_conversion_function(fld)
#                #deal with None value
#                if r.has_key(field_name):
#                    if r[field_name]:
##                        logging.info('%s-%s' % (field_name, r[field_name]))
#                        try:
#                            r[field_name] = fn(r[field_name])
#                        except:
#                            logging.info('failed to conver %s value: %s' % (field_name, r[field_name]))
#                            r[field_name] = None
##                        logging.info('%s-%s-%s' % (field_name, r[field_name], type(r[field_name])))
#
#                else:
#                    if field_name in ['rowid', 'form_comment']:
#                        r[field_name] = None
#                    else:
#                        raise KeyError('%s is not a valid key' % field_name)
        results.append(r)
    gdm = GaeDbManager(table_model.kind()) 
    gdm.insert_raw(results)
#    model.insert_records(results, table_model)


def upload_compaction_form(csv_file):
    
    table_model = model.compaction_form
    header_idx = 0
    fields = ts.get_fields('compaction_form')
    db.delete(table_model.all())
    csv_list = get_csv_file(csv_file)
    insert_records(csv_list, table_model, header_idx, fields)

def upload_asphalt_form(csv_file):
    
    table_model = model.asphalt_form
    header_idx = 0
    fields = ts.get_fields('compaction_form')
    db.delete(table_model.all())
    csv_list = get_csv_file(csv_file)
    insert_records(csv_list, table_model, header_idx, fields)
    
def upload_dlz_connection(csv_file):
    
    table_model = model.dlz_connection
    header_idx = 0
    fields = ts.get_fields('dlz_connection')
    db.delete(table_model.all())
    csv_list = get_csv_file(csv_file)
    insert_records(csv_list, table_model, header_idx, fields)

def upload_dlz_config(csv_file):
    
    table_model = model.dlz_config
    header_idx = 0
    fields = ts.get_fields('dlz_config')
    db.delete(table_model.all())
    csv_list = get_csv_file(csv_file)
    insert_records(csv_list, table_model, header_idx, fields)

def upload_compaction_test(csv_file):
    
    table_model = model.compaction_test
    header_idx = 0
    fields = ts.get_fields('compaction_test')
    db.delete(table_model.all())
    csv_list = get_csv_file(csv_file)
    insert_records(csv_list, table_model, header_idx, fields)

def upload_asphalt_test(csv_file):
    
    table_model = model.asphalt_test
    header_idx = 0
    fields = ts.get_fields('asphalt_test')
    db.delete(table_model.all())
    csv_list = get_csv_file(csv_file)
    insert_records(csv_list, table_model, header_idx, fields)
    
    
def upload_dlz_project_list(csv_file):
    
    table_model = model.dlz_project_list
    header_idx = 0
    fields = ts.get_fields('dlz_project_list')
    db.delete(table_model.all())
    csv_list = get_csv_file(csv_file)
    insert_records(csv_list, table_model, header_idx, fields)
   
def upload_email_list(csv_file):
    
    table_model = model.Email
    header_idx = 0
    fields = ts.get_fields('dlz_project_list')
    db.delete(table_model.all())
    csv_list = get_csv_file(csv_file)
    insert_records(csv_list, table_model, header_idx, fields)
def upload_dlz_table_schema(csv_file):
    '''
    upload the dlz_table_schema table
    '''
    
    table_model = model.dlz_table_schema
    header_idx = 0
    csv_list = get_csv_file(csv_file)
    fields = [{'field_name': 'field_name', 'field_label': 'Field Name', 'field_type': 'str', 'field_format': '', 'field_position': 100, 'table_name': 'dlz_table_schema', 'comment': ''},
              {'field_name': 'field_label', 'field_label': 'Field Label', 'field_type': 'str', 'field_format': '', 'field_position': 200, 'table_name': 'dlz_table_schema', 'comment': ''},
              {'field_name': 'field_type', 'field_label': 'Field Type', 'field_type': 'str', 'field_format': '', 'field_position': 300, 'table_name': 'dlz_table_schema', 'comment': ''},
              {'field_name': 'field_position', 'field_label': 'Field Position', 'field_type': 'int', 'field_format': '', 'field_position': 400, 'table_name': 'dlz_table_schema', 'comment': ''},
              {'field_name': 'exportable', 'field_label': 'Exportable', 'field_type': 'str', 'field_format': '', 'field_position': 500, 'table_name': 'dlz_table_schema', 'comment': ''},
              {'field_name': 'table_name', 'field_label': 'Table Name', 'field_type': 'str', 'field_format': '', 'field_position': 600, 'table_name': 'dlz_table_schema', 'comment': ''},
              {'field_name': 'comment', 'field_label': 'comment', 'field_type': 'str', 'field_format': '', 'field_position': 700, 'table_name': 'dlz_table_schema', 'comment': ''}]
    
    db.delete(table_model.all())
    insert_records(csv_list, table_model, header_idx, fields)
    