# -*- Mode: Python; tab-width: 4 -*-

# ================
# WORK IN PROGRESS
# ================

# LRU file cache.  Will this speed up performance?  [if it does, we
# should then investigate using avl for the LRU 'priority queue']
#
# cache entry:
# cache[filename] = [last_access, filename, contents]
# (last_access is first so we can sort by that field)
#

import stat
import time

class file_cache:
	def __init__ (self,
				  filesystem,
				  max_file_size=1024 * 1024,
				  max_total_size = 10 * 1024 * 1024
				  ):
		self.cache = {}
		self.filesystem = filesystem
		self.max_file_size = max_file_size
		self.max_total_size = max_total_size
		self.total_size = 0

	def open (self, filename, mode='rb'):
		# We will handle only 'rb' opens.
		if mode != 'rb':
			return self.filesystem.open (filename, mode)
		else:
			if self.cache.has_key (filename):
				entry = self.cache[filename]
				entry[0] = time.time()
				return cached_file (entry[2])
			else:
				# suck the whole file up.
				data = self.filesystem.open(filename,'rb').read()
				size = len(data)
				self.make_room (size)
				self.total_size = self.total_size + size
				self.cache[filename] = [
					time.time(),
					filename,
					data
					]
				return cached_file (data)
				
	def make_room (self, size):
		if self.total_size + size > self.max_total_size:
			# make room in the cache for <size> bytes.
			files = self.cache.values()
			# sort so the oldest are in the front
			files.sort()
			# how much do we need to trim?
			trim = (self.total_size + size) - self.max_total_size
			i = 0
			while trim > 0 and i < len(files):
				# keep removing files until there's room
				[access, name, data] = files[i]
				trim = trim - len(data)
				del self.cache [name]
				self.total_size = self.total_size - len(data)
				i = i + 1

class cached_file:
	def __init__ (self, data):
		self.data = data
		self.pos = 0

	def read (self, amount=None):
		ld = len(self)
		pos = self.pos
		if amount == None:
			amount = ld-pos
		if self.pos < ld:
			chunk = min (amount,ld-pos)
			self.pos = pos + chunk
			return self.data[pos:self.pos]
		else:
			return ''

	def close (self):
		pass

	def __len__ (self):
		return len(self.data)

import time
class timer:
	def __init__ (self):
		self.start = time.time()
	def end (self):
		return time.time() - self.start

if __name__ == '__main__':
	import random
	import glob
	import filesys
	fs = filesys.os_filesystem ('.','/')
	file_list = glob.glob ('./*.py')
	cache = file_cache (fs, 1024 * 50, 1024 * 100)
	print 'caching version...'
	t = timer()
	for x in range(500):
		f = random.choice (file_list)
		fd = cache.open (f)
		while 1:
			d = fd.read(512)
			if not d:
				break
	print t.end()
	print 'cache size', cache.total_size
	print 'non-caching version...'
	t = timer()
	for x in range(500):
		f = random.choice (file_list)
		fd = open (f, 'rb')
		while 1:
			d = fd.read(512)
			if not d:
				break
	print t.end()
	f = random.choice (file_list)
	if cache.open (f, 'rb').read() != open (f,'rb').read():
		print 'HEY! they are not the same!!!'
