#!/usr/bin/env python

import fnmatch
import os, sys
import time, json
from datetime import datetime, timedelta

class CacheHandler ( object ):
	"""
	CacheHandler

	Generic handler for cache
	"""

	@staticmethod
	def create ( type_name, dct ):
		if not type_name: type_name = ""

		cls = { "memcache" : MemCacheHandler,
			"redis"    : RedisHandler,
			"file"     : FileHandler,
			"dbm"      : DBMHandler
		      }.get ( type_name.lower () )

		if not cls: raise Exception ( "CacheHandler Not Found: '%s'" % type_name )

		if type_name == 'memcache':
			servers = dct.get ( 'servers', [ '127.0.0.1' ] )
			if not isinstance ( servers, list ): servers = [ servers ]
			return cls ( servers )

		if type_name == 'redis':
			return cls ()

		if type_name == 'file':
			return cls ( path = dct.get ( "path", "/tmp" ) )

		if type_name == 'dbm':
			return cls ( dbm = dct.get ( "dbm" ), table_name = dct.get ( "table_name", "cache" ) )

		#return cls ( *args, ** kwargs )

	def __init__ ( self ):
		self._keys = {}
		self._handler = None

	# {{{ internal functions
	def _init ( self ):
		keys = self.get ( "__keys__" )
		if not keys: return

		for key in keys:
			self._keys [ key ] = 1

	def _save_keys ( self ):
		self._set ( "__keys__", self.keys () )

	def _get_keys ( self ):
		return self._keys.keys ()
	# }}}

	def set ( self, key, val, timeout = -1 ):
		"""
		set ( key, val, timeout = -1 )

		Set a new ``key`` with value ``val`` that will last ``timeout`` seconds.
		"""

		self._keys [ key ] = 1

		self._save_keys ()

                if timeout == -1: timeout = self._timeout

		return self._set ( key, val, timeout )

	def set_multi ( self, dct, timeout = -1 ):
		"""
		set_multi ( dct, timeout = -1 )

		Set all keys and values present in the ``dct`` provided.
		All keys will last ``timeout`` seconds.
		"""

                if timeout == -1: timeout = self._timeout

		for k in dct.keys ():
			self._keys [ k ] = 1

		self._set_multi ( dct, timeout = timeout )

	def get ( self, key, def_val = None ):
		return self._get ( key, def_val )

	def get_multi ( self, keys ):
		return self._get_multi ( keys )

	def delete ( self, pattern ):
		keys = self.search ( pattern )

		res = False

		for key in keys:
			if key in self._keys: del self._keys [ key ]

		res =  self._delete ( keys )

		self._save_keys ()

		return res

	def keys ( self ):
		return self._get_keys ()

	def search ( self, pattern ):
		return fnmatch.filter ( self.keys (), pattern )

	def clear_expired ( self ):
		pass

# {{{ MemCacheHandler ( CacheHandler )
class MemCacheHandler ( CacheHandler ):
	def __init__ ( self, * args, ** kwargs ):
		super ( MemCacheHandler, self ).__init__ ()

		try:
			import pylibmc
			self._handler = pylibmc.Client ( * args, ** kwargs )
		except:
			raise
			pass

		self._init ()
	
	def _set ( self, key, val, timeout = 0 ):
		return self._handler.set ( key, val, timeout )

	def _set_multi ( self, dct, timeout = 0 ):
		return self._handler.set_multi ( dct, timeout )

	def _delete ( self, keys ):
		return self._handler.delete_multi ( keys )

	def _get ( self, key, def_val = None ):
		res = self._handler.get ( key )
		if res is None: return def_val

		return res

	def _get_multi ( self, keys ):
		return self._handler.get_multi ( keys )
# }}}
# {{{ RedisHandler ( CacheHandler )
class RedisHandler ( CacheHandler ):
	def __init__ ( self, * args, ** kwargs ):
		super ( RedisHandler, self ).__init__ ()

		try:
			import redis
			self._handler = redis.StrictRedis ( ** kwargs )
		except:
			raise
			pass

		self._init ()

	def _get_keys ( self ):
		keys = self._handler.keys ()
		self._keys = {}
		for k in keys:
			self._keys [ k ] = 1;
		return keys

	def _save_keys ( self ):
		pass

	def _set ( self, key, val, timeout = 0 ):
		val = json.dumps ( val )

		if timeout:
			return self._handler.setex ( key, timeout, val )

		return self._handler.set ( key, val )

	def _set_multi ( self, dct, timeout = 0 ):
		for k, v in dct.iteritems ():
			self._set ( k, v, timeout )

	def _delete ( self, keys ):
		return self._handler.delete ( * keys )

	def _get ( self, key, def_val = None ):
		res = self._handler.get ( key )
		if res is None: return def_val

		return json.loads ( res )

	def _get_multi ( self, keys ):
		res = {}
		for k in keys:
			res [ k ] = self._get ( k )

		return res
# }}}
# {{{ FileHandler ( CacheHandler )
class FileHandler ( CacheHandler ):
	def __init__ ( self, * args, ** kwargs ):
		super ( FileHandler, self ).__init__ ()

		self.path = kwargs.get ( 'path', '.' )
		self.timeouts = self._get ( '__timeouts__', {} )

		self._init ()
	
	def _set ( self, key, val, timeout = 0 ):

		self.__lock ( key )

		f = open ( os.path.join ( self.path, key ), 'w' )
		f.write ( json.dumps ( val ) )
		f.close ()

		self.__unlock ( key )

		if timeout > 0:
			self.timeouts [ key ] = timeout

		if key not in ( '__keys__', '__timeouts__' ):
			self._save_timeouts ()

		return True

	def _set_multi ( self, dct, timeout = 0 ):
		for k, v in dct.iteritems ():
			self._set ( k, v, timeout )

		return True

	def _delete ( self, keys ):
		self.__lock ( '__timeouts__' )

		for k in keys:
			if self.timeouts.has_key ( k ):
				del self.timeouts [ k ]
			if os.path.exists ( os.path.join ( self.path, k ) ):
				os.unlink ( os.path.join ( self.path, k ) )

		self.__unlock ( '__timeouts__' )

		self._save_timeouts ()

		return True

	def _get ( self, key, def_val = None ):
		path = os.path.join ( self.path, key )

		if not os.path.exists ( path ):
			return def_val

		self.__lock ( key )

		s = os.stat ( path )

		mtime = datetime.fromtimestamp ( s.st_mtime )
		timeout = None
		
		if key not in ( '__keys__', '__timeouts__' ):
			timeout = self.timeouts.get ( key )

		if not timeout is None:
			if mtime + timedelta ( 0, timeout ) < datetime.now ():
				self.__unlock ( key )
				self._delete ( [ key ] )
				return def_val


		f = open ( path, 'r' )
		try:
			val = json.loads ( f.read () )
		except:
			val = {}
		f.close ()

		self.__unlock ( key )

		return val

	def _get_multi ( self, keys ):
		res = {}
		for k in keys:
			res [ k ] = self._get ( k )

		return res

	def _save_timeouts ( self ):
		self._set ( '__timeouts__', self.timeouts )

	def __lock ( self, key ):
		fname = "%s.lock" % key
		locked = True
		times = 0
		while locked:
			if times > 500:
				raise Exception ( "FileCache: Lock timeout!" )
			try:
				self.lock = os.open ( os.path.join ( self.path, fname ), os.O_CREAT | os.O_EXCL | os.O_RDWR )
				locked = False
			except OSError:
				time.sleep ( 0.01 )
				times += 1
				

	def __unlock ( self, key ):
		fname = "%s.lock" % key
		os.close ( self.lock )
		os.unlink ( os.path.join ( self.path, fname ) )

# }}}
# {{{ DBMHandler ( CacheHandler )
class DBMHandler ( CacheHandler ):
	"""
	
	Table example::

		CREATE TABLE `cache` (
  			`id` varchar(250) NOT NULL,
  			`value` text,
  			`created` datetime DEFAULT NULL,
  			`timeout` int(10) unsigned DEFAULT '0',

  			PRIMARY KEY (`id`),
  			KEY `created` (`created`)
		);

	"""

	def __init__ ( self, * args, ** kwargs ):
		super ( DBMHandler, self ).__init__ ()

		self._handler = kwargs [ 'dbm' ]
		self._table_name = kwargs [ 'table_name' ]

		self._init ()
	
	def _set ( self, key, val, timeout = 0 ):
                if key == '__keys__': return

                #sys.stderr.write ( "\n***************\nSET: key: %s - val: %s - TO: %s\n*****************\n" % ( key, val, timeout ) )

		if val:
                        if not timeout: timeout = 5

			dct = {
				"id" : key,
				"value" : json.dumps ( val ),
				"timeout" : timeout,
				"created" : self._handler.raw ( "NOW()" )
			}

			cond = self._handler.qb ()
			cond.where ( 'id = ?', key )

			return self._handler.replace ( self._table_name, dct, cond, debug = True )
		else:
			return self._delete ( [ key ] )

	def _set_multi ( self, dct, timeout = 0 ):
		for k, v in dct.iteritems ():
			self._set ( k, v, timeout )

	def _delete ( self, keys ):
		cond = self._handler.qb ()
		cond.where_in ( 'id', keys )

		return self._handler.delete ( self._table_name, cond, debug = True )

	def _get ( self, key, def_val = None ):
                if key == '__keys__': return

		self._clear_expired ()
		cond = self._handler.qb ()
		cond.select ( 'value' )
		cond.table ( self._table_name )
		cond.where ( 'id = ?', key )
		val = self._handler.single_row ( cond, debug = True )

		if not val: return def_val

		return json.loads ( str ( val [ 'value' ] ) )

	def _get_multi ( self, keys ):
		self._clear_expired ()
		res = {}
		cond = self._handler.qb ()
		cond.select ( 'key', 'value' )
		cond.table ( self._table_name )
		cond.where_in ( 'id', keys )
		values = self._handler.all_rows ( cond, debug = True )
		if not values: return {}

		for row in values:
			res [ row [ 'key' ] ] = json.loads ( str ( row [ 'value' ] ) )

		return res

	def _clear_expired ( self ):
		cond = self._handler.qb ()
		cond.where ( 'created + INTERVAL timeout SECOND < NOW() AND timeout <> -1' )
		return self._handler.delete ( self._table_name, cond )

	def _save_keys ( self ):
		pass

	def _get_keys ( self ):
		self._clear_expired ()
		cond = self._handler.qb ()
		cond.select ( 'id' )
		cond.table ( self._table_name )
		rows = self._handler.all_rows ( cond )
		if not rows: return []

		return [ x [ 'id' ] for x in rows ]
		
# }}}

if __name__ == '__main__':
	"""
	from os3.dbm.dbmanager import DBManager
	
	dbm = DBManager ( DBManager.DBM_TYPE_MYSQL )
	dbm.connect ( user = 'root', db = 'test' )

	m = CacheHandler.create ( "dbm", dbm = dbm, table_name = 'cache' )
	"""

	m = CacheHandler.create ( "memcache", { 'servers' : '127.0.0.1' } )
	#m = CacheHandler.create ( "redis" )
	#m = CacheHandler.create ( "file", path = "/ramdisk/cache" )

	print "KEY0: ", m.keys ()

	m.set ( "ciao", "mamma" )
	m.set ( "piazza", "pazza" )
	m.set ( "pre-aaa", "pazza" )
	m.set ( "pre-bbb", "pazza" )
	m.set ( "pre-ccc", "pazza" )

	m.set_multi ( { "a" : "1", "b": 2, "c" : 3 } )

	m.delete ( "piazza" )

	print m.search ( "pre-*" )

	m.delete ( "aaaa*" )
	m.delete ( "pre-*" )

	print m.get ( "a" )
	print "KEY1: ", m.keys ()


