# -*- coding: utf-8 -*-

import logging, logging.handlers

def getConfiguredLogger(name):
    logger = logging.getLogger(name)
    if (len(logger.handlers) == 0):
        # This logger has no handlers, so we can assume it hasn't yet been configured
        # (Configure logger)
        import os
        formatter="%(asctime)s %(levelname)s %(process)s %(thread)s %(funcName)s():%(lineno)d %(message)s"
        logPath = os.path.join(request.folder,'logs')
        
        if not os.path.exists(logPath):
            os.makedirs(logPath)
        
        handler = logging.handlers.RotatingFileHandler(os.path.join(logPath, 'app.log'), maxBytes=1024, backupCount=2)
        handler.setFormatter(logging.Formatter(formatter))

        handler.setLevel(logging.DEBUG)

        logger.addHandler(handler)
        logger.setLevel(logging.DEBUG)
        
        # Test entry:
        logger.info(name + ' logger created')
    else:
        # Test entry:
        logger.info(name + ' already exists')        
    return logger

# Assign application logger to a global var 
logger = getConfiguredLogger(request.application)


#########################################################################
## This scaffolding model makes your app work on Google App Engine too
## File is released under public domain and you can use without limitations
#########################################################################

## if SSL/HTTPS is properly configured and you want all HTTP requests to
## be redirected to HTTPS, uncomment the line below:
# request.requires_https()

if not request.env.web2py_runtime_gae:
    ## if NOT running on Google App Engine use SQLite or other DB
    db = DAL('sqlite://storage.sqlite')
else:
    ## connect to Google BigTable (optional 'google:datastore://namespace')
    db = DAL('google:datastore')
    ## store sessions and tickets there
    session.connect(request, response, db = db)
    ## or store session in Memcache, Redis, etc.
    ## from gluon.contrib.memdb import MEMDB
    ## from google.appengine.api.memcache import Client
    ## session.connect(request, response, db = MEMDB(Client()))

## by default give a view/generic.extension to all actions from localhost
## none otherwise. a pattern can be 'controller/function.extension'
response.generic_patterns = ['*'] if request.is_local else []
## (optional) optimize handling of static files
# response.optimize_css = 'concat,minify,inline'
# response.optimize_js = 'concat,minify,inline'

#########################################################################
## Here is sample code if you need for
## - email capabilities
## - authentication (registration, login, logout, ... )
## - authorization (role based authorization)
## - services (xml, csv, json, xmlrpc, jsonrpc, amf, rss)
## - old style crud actions
## (more options discussed in gluon/tools.py)
#########################################################################

from gluon.tools import Auth, Crud, Service, PluginManager, prettydate
auth = Auth(db, hmac_key=Auth.get_or_create_key())
crud, service, plugins = Crud(db), Service(), PluginManager()

## create all tables needed by auth if not custom tables
auth.define_tables()

## configure email
mail=auth.settings.mailer
mail.settings.server = 'logging' or 'smtp.gmail.com:587'
mail.settings.sender = 'you@gmail.com'
mail.settings.login = 'username:password'

## configure auth policy
auth.settings.registration_requires_verification = False
auth.settings.registration_requires_approval = False
auth.settings.reset_password_requires_verification = True

## if you need to use OpenID, Facebook, MySpace, Twitter, Linkedin, etc.
## register with janrain.com, write your domain:api_key in private/janrain.key
from gluon.contrib.login_methods.rpx_account import use_janrain
use_janrain(auth,filename='private/janrain.key')

#########################################################################
## Define your tables below (or better in another model file) for example
##
## >>> db.define_table('mytable',Field('myfield','string'))
##
## Fields can be 'string','text','password','integer','double','boolean'
##       'date','time','datetime','blob','upload', 'reference TABLENAME'
## There is an implicit 'id integer autoincrement' field
## Consult manual for more options, validators, etc.
##
## More API examples for controllers:
##
## >>> db.mytable.insert(myfield='value')
## >>> rows=db(db.mytable.myfield=='value').select(db.mytable.ALL)
## >>> for row in rows: print row.id, row.myfield
#########################################################################

import os
import shutil

def storeDataFile(file, filename = None, path = None):
    path = os.path.join(request.folder, 'uploads', 'files', str(session.auth.user.id)) 

    if not os.path.exists(path):
        os.makedirs(path)

    pathFileName = os.path.join(path, filename)
    destinationFile = open(pathFileName, 'wb')

    try:
        shutil.copyfileobj(file, destinationFile)
    finally:
        destinationFile.close()
        
    return filename
    

# Ok, initial idea now is our custom retrieve is only for NON ADMIN users. Is only meant to retrieve
# files belonging to the logged in user. 
# Admin level file listing and selection/processing, lets do it separately.
def retrieveDataFile(filename = None, path = None):
    path = os.path.join(request.folder, 'uploads', 'files', str(session.auth.user.id)) 
    return (filename, open(os.path.join(path, filename), 'rb'))
    

def deleteDataFile(filename = None, path = None):
    path = os.path.join(request.folder, 'uploads', 'files', str(session.auth.user.id), filename) 
    os.remove(path)
    return filename
    
    
    
# Now we create our file ownership records table
db.define_table('files',
                Field('owner_id', db.auth_user, required=True, default=1, notnull=True, label='Owner',
                      requires=IS_IN_DB(db, db.auth_user.id, '%(first_name)s %(last_name)s - %(email)s')), # Used by SQLFORM, owner_id must reference a valid id in auth_user table
                Field('data_file_name','string', required=True, notnull=True, writable=False),
                Field('data_file_description','text'),
                Field('data_file_extension', 'string', required=True, notnull=True),
                Field('data_file', 'upload', uploadfield=True, custom_store = storeDataFile, custom_retrieve = retrieveDataFile, custom_delete = deleteDataFile, label='Data File', autodelete=True, comment = 'Allowed: SHP|SHX|DBF|PRJ|XML|TXT'),
                Field('data_file_path', 'string', required=True, notnull=True),
                Field('upload_datetime','datetime', required=True, notnull=True, writable=False))

db.files.data_file.requires = IS_UPLOAD_FILENAME(extension='^(shp)|(shx)|(dbf)|(prj)|(xml)|(txt)$')



#db.define_table('testtable', Field('owner_id', db.auth_user, required=True, default=1, notnull=True, label='Owner'))
#db.files.drop()  #Jinson's note: to let the DAL handle both metadata and actual db alteration operations, make sure those statements come AFTER the table definition
#db.executesql('drop table files') # the way to execute raw sql
