import bsddb
import os
import hashlib
import md5
from utils import cdb_debug

class CdbMemcache:
	CACHE = {}
	def __init__(self, max_size , use_cache):
		self.can_use = True
		self.max_size = max_size
		self.current_size = 0
	
	def is_key_exist(self, key):
		if not self.can_use:
			return False
		return key in self.__class__.CACHE
	
	def get_value(self,key):
		if not self.can_use:
			return False
		return self.__class__.CACHE[key]
	
	def put_value(self,key,value):
		if self.can_use:
			self.current_size += float(len(value)) / 1000000.0
			if self.current_size < self.max_size:
				self.__class__.CACHE[key] = value
			else:
				cdb_debug('CACHE ERROR: CACHE IS FULL!')

class CdbStore:
	def __init__(self, data_dir, max_buckets):
		self.dbs = []
		for i in xrange(max_buckets):
			dbname = os.path.join(data_dir, 'bucket' + str(i))
			db = bsddb.btopen(dbname, 'c')
			self.dbs.append(db)
		cdb_debug('All Buckets Ready!')
		cache_max_size = 500   # default 500mb
		use_cache = True 
		self.cache = CdbMemcache(cache_max_size, use_cache)
		cdb_debug('CDB Memcache Ready!')

	def choose_bucket_id(self,key, number):
		key_md5 = hashlib.md5(key)
		hashval = long(key_md5.hexdigest(),16)
		return (hashval % number)

	def get_value (self, key):
		if self.cache.is_key_exist(key):
			cdb_debug('DATA STORE: fetching key '+ key + ' on cache')
			return self.cache.get_value(key)
		cdb_debug('DATA STORE: key not found in CACHE')
		bucket_id = self.choose_bucket_id(key,len(self.dbs))
		db = self.dbs[bucket_id]
		if key in db:
			data = db[key]
			self.cache.put_value(key,data)
			return data
		return None
	def put_value(self, key, data):
		self.cache.put_value(key,data)
		bucket_id = self.choose_bucket_id(key,len(self.dbs))
		db = self.dbs[bucket_id]
		db[key] = data
		db.sync()
	def close_all(self):
		for db in self.dbs:
			db.close()

