#!/usr/bin/python


###################################################################################
#   Copyright 2010 Jason Hoover
#
#
#   This file is part of AutoCache.
#
#   AutoCache is free software: you can redistribute it and/or modify
#   it under the terms of the GNU Lesser General Public License as published by
#   the Free Software Foundation, either version 3 of the License, or
#   (at your option) any later version.
#
#   AutoCache is distributed in the hope that it will be useful,
#   but WITHOUT ANY WARRANTY; without even the implied warranty of
#   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#   GNU General Public License for more details.

#   You should have received a copy of the GNU Lesser General Public License
#   along with AutoCache.  If not, see <http://www.gnu.org/licenses/>.
###################################################################################



######################################
# ac-cache - Cache management functions. Depends on ac-common.
#
# Function Name		Description				Usage			Returns
#
# ac_checkup		Checks status of a given URL		ac_checkup(url)		Local URL if cached. False if invalid/stale. 
# ac_validate		Checks integrity of a cached object 	ac_validate(hash)	True or False
# ac_update		Updates a URL into the cache		ac_update(url)		Nothing
# ac_queue		Adds/Retrieves downloads from a queue   ac_queue([url])		Nothing if URL set, next URL from queue if unset.
# ac_dlcleanup		Cleans up the downloads folder.		ac_cleanup()		Nothing
# ac_remove		Removes a cache object directory.	ac_remove(path)		Nothing
# ac_cachesize		Fast handler of cache size queries.	ac_cachesize([adjust])  Nothing

import time
import cPickle
import urllib2
import fcntl

import ac_common
from ac_common import *

#############################
# ac_checkup - Check the freshness of a cache object.
# Will call ac_update to update a cached file.
#############################

def ac_checkup(url):

	# url_hash - Saves us from doing an md5 over and over..
	# Filename - A macro for accessing the filename of the URL being evaluated.
	# Fullpath - Full path to the file, saves us from a lot of ac_settings["cachedir"] concatination and soforth.

	url_hash=ac_urltohash(url)
	filename=ac_urltofile(url)
	fullpath=ac_settings["cache_dir"]+url_hash+"/"


	# See if the file exists.

	if os.path.isdir(fullpath):
		ac_log.debug("Cached file: " +url_hash+" exists.")

		try:
			# If the file in question has been served up less than dl_tolerance, serve our cached version instead.
			

			if ac_fileage(fullpath+"access_log") < ac_settings["trust_short"]:

				ac_log.debug(" Cached version of " + url_hash + " was served less than " + str(ac_settings["trust_short"]) + " seconds ago. Using cached version.")

				# Update the access timestamp. We may want to factor the timestamp updates out to the main part, or not.

				os.utime(fullpath+"access_log",None)
				return ac_settings["cache_web"]+url_hash+filename


		except OSError:
			ac_log.warning("Could not read access_log for: "+url_hash)
			return False


		# Attempt to use the metadata to return some information.

		try:
			# Read the object's local and remote metadata into memory.
			cache_metadata =	cPickle.load(open(fullpath+"metadata"))
			url_metadata   =	dict(urllib2.urlopen(url).info())		

			# If the URL's mtime is not the same as ours, ours is probably the wrong fucking file.
			if cache_metadata["last-modified"] != url_metadata["last-modified"]:
				ac_log.debug("Remote date: " + url_metadata["last-modified"] + " but cached file: " + cache_metadata["last-modified"])
				return False

			# If the URL has a different size, then wrong file.
			elif cache_metadata["content-length"] != url_metadata["content-length"]:
				ac_log.debug("Remote size: " + url_metadata["content-length"] + " but cached file: " + cache_metadata["content-length"])
				return False

			# If the server is serving a txt file instead of an exe, something is wrong here.
			elif cache_metadata["content-type"] != url_metadata["content-type"]:
				ac_log.debug("Remote content: "+ url_metadata["content-type"] + " but cached type: "+ cache_metadata["content-type"])
				return False

			# Otherwise, our cache is good!
			else:
				ac_log.debug("Cached version of "+url_hash+" good! Sending ours.")
				return ac_settings["cache_web"]+url_hash+filename				

		except KeyError:

			# A KeyError means that a key from one of the metadata sets was missing. Whups.				
			ac_log.warning("Value missing from metadata in "+url_hash+".\n Local metadata contents: "+cache_metadata+"\n Remote metadata contents: "+url_metadata)
			return False		

		except urllib2.URLError:

			# This means urllib2 couldn't reach the host.
			ac_log.debug("Could not reach host for: "+url+" sadly returning original result..")
			return False

		except urllib2.HTTPError:

			# This means urllib2 got a 404 or the like.
			ac_log.debug("Recieved error code for: "+url+" sadly returning original result..")
			return False


		except OSError:

			# An OSError means that the metadata could not be read.
			ac_log.warning("Could not read metadata for: "+url_hash)
			return False
			


	else:
		ac_log.debug("Object: "+url_hash+"does not exist in cache, at all.")
		return False

#######################################
# ac_validate - Validates the metadata for a given url hash.
# Note that this does not validate the file against the URL, but instead does a local sanity check.
# Use ac_checkup for remote tests. This is purely a I/O intenseive cache object validation
# subroutine, to be used once after a completed download and periodically after that.
#######################################

def ac_validate(url_hash):

		ac_log.debug("Going to validate: "+url_hash)

		# Attempt to use the metadata to return some information.

		fullpath=ac_settings["cache_dir"]+url_hash+"/"

		try:
			# Read the object's metadata into memory.
			cache_metadata=cPickle.load(file(fullpath+"metadata"))

			# Read the filename from the metadata. Use ac_urltofile to generate the filename.

			filename=fullpath+ac_urltofile(cache_metadata["original-url"])

			# Validate size..
	
			if int(os.stat(filename).st_size) != int(cache_metadata["content-length"]):
				ac_log.info("Metadata size is: " + cache_metadata["content-length"] + " but cached file: " + str(os.stat(filename).st_size))
				return False
	
			# Validate local-cache mtime.
			#
			# This part is a little tricky. 
			# Mtimes on systems are stored in local time, so you have to convert them to gmtime (GMT/UTC).
			# Web provided mtimes are always provided in GMT/UTC. 
			# Short note about time conversions:
			# url_metadata time format comes in the format of: "%a, %d %b %Y %H:%M:%S %Z" or "Sun, 18 Apr 2010 06:37:55 GMT"
			
			if time.strptime(cache_metadata["last-modified"],"%a, %d %b %Y %H:%M:%S %Z") != time.gmtime(os.stat(filename).st_mtime):
				ac_log.info("Metadata date: " + str(time.strptime(cache_metadata["last-modified"],"%a, %d %b %Y %H:%M:%S %Z")) + " but cached file: " + str( time.gmtime( os.stat(filename).st_mtime) ) )
				return False

		except KeyError:
			ac_log.warning("Some metadata values for " +url_hash+" were missing!")
			return False
		
		except ValueError:
			ac_log.warning("Some metadata values for "+url_hash+" were corrupt!")
			return False

		except IOError:
			ac_log.error("Some serious problem reading metadata or the stored file for: "+url_hash)
			return False

		ac_log.debug("Validation of "+url_hash+"passed!")
		return True

##################
# ac_update -  Removes the old destination cache dir and runs a loop of urls from the 
# queue to download. Blocks until finished and should be called as a threadded object. 
# It's primary function is to update files in the cache. Takes URLs as arguements.
##################

def ac_update(url):

	# Make sure URL is a string, so we don't have to do it later. You know, in case we get a Null or something stupid.

	url=str(url)

	# Make sure the URL is still reachable before attempting to update it. Otherwise we might wind up removing an 
	# object that's just temporarily 404.

	try:
		urllib2.urlopen(url).close()
	except:
		ac_log.info("Target URL " + url + " was unreachable or not found, queuing, removing or downloading.")	
		return

	url_hash=ac_urltohash(url)

	# Check to make sure that there are only 3 files in it, metadata, the file itself, access_log and
	# the download. It may be missing the metadata, which is why we can't know what the file names contained
	# within will be, so we have to just blindly remove the files in the directory.
	# Also note that this function should -never- be called unless the file is really out of date!

	if not os.path.isdir(ac_settings["cache_dir"]+url_hash):
		ac_log.debug("Cache dir: "+str(url_hash)+" already gone or nonexistant.")

	elif len(os.listdir(ac_settings["cache_dir"]+url_hash)) <  5:

		# Don't panic if this fails. Some other process may have done the deed for us.
		try:
			# Subtract the cache size from our current status before killing the directory.
			ac_cachesize(-ac_du(ac_settings["cache_dir"]+url_hash))
			ac_remove(ac_settings["cache_dir"]+url_hash)
		except:
			ac_log.warning("Something went wrong when I tried to remove: "+str(ac_settings["cache_dir"]+url_hash))

	else:
		ac_log.warning("Did not delete cache dir: "+url_hash+"for sanity reasons!")

	ac_queue(url)

	# Don't even bother with the first part of the download loop if there's no more slots.

	if len(os.listdir(ac_settings["state_dir"]+"download/locks")) > ac_settings["dl_slots"]:
		ac_log.debug("No more free download slots, not even bothering to start a download.")
		ac_dlcleanup()
		return

	# Load the first object from the queue using ac_queue before starting our loop.

	url=ac_queue()

	# Until we've run out of URLs...

	while url:
		ac_log.info("Downloading: "+url)

		# Hash the file, but don't quite bother with the metadata quite yet.

		download_fn	= ac_urltofile(url)
		download_hash	= ac_urltohash(url)

		# Other processes can blindly add stuff to the queue as requests come in, so we can have duplicates there.
		# If the object is being downloaded or already in the cache, skip it.

		if os.path.isdir(ac_settings["cache_dir"]+download_hash) or os.path.isfile(ac_settings["state_dir"]+"download/locks/"+download_hash):
			ac_log.debug("Download for: "+url+" was already completed or in progress. Skipping.")
			url=ac_queue()
			continue

		# If we can't get the metadata, the file may have gone missing in the time it took to get to it.

		try:
			download_metadata	= dict(urllib2.urlopen(url).info())
		except:
			ac_log.debug("Unable to retrieve metadata for: "+url)		
			url=ac_queue()
			continue

		# Make sure this doesn't put us over budget.

		try:
			if int(download_metadata["content-length"])+ac_cachesize() > ac_settings["cache_mblimit"]:
				ac_log.warning("Cache size would go (or is) over cache_mblimit: "+str(ac_cachesize())+"/"+str(ac_settings["cache_mblimit"]))
				url=ac_queue()
				continue

			elif ac_df(ac_settings["cache_dir"]) > ac_settings["cache_pctlimit"]:
				ac_log.warning("Cache size over cache_pctlimit: "+str(ac_df(ac_settings["cache_dir"]))+"/"+str(ac_settings["cache_pctlimit"]))
				url=ac_queue()
				continue
		except:
			ac_log.error("Got some horrible error trying to determine availible space. Crying to mommy.")
			raise
			return	

		# Confirm the availibility of a slot. Return from the function if none are availible.

		if len(os.listdir(ac_settings["state_dir"]+"download/locks")) > ac_settings["dl_slots"]:
			ac_log.debug("No more download slots availible. Terminating this thread and putting URL back in queue.")
			ac_dlcleanup()
			ac_queue(url)
			return
		else:	

			# Try to open a lock for the download. <statedir>/download/locks/<urlhash>
			try:
				download_lock=file(ac_settings["state_dir"]+"download/locks/"+download_hash,'w')
				fcntl.flock(download_lock,fcntl.LOCK_EX+fcntl.LOCK_NB)
			
			# Failing that, see if you can clean up the download directory and move on to the next file.
			except IOError:
				ac_log.warning("Unable to create a lock for: "+url)
				ac_dlcleanup()
				url=ac_queue()
				continue			

		# Will wind up making something like <statedir>/download/active/<bunchofnumbers>/ so that we can move it into
		# the cache when we're finished.

		download_dir=ac_settings["state_dir"]+"download/active/"+download_hash+"/"
		ac_log.debug("Making download dir: "+download_dir)
		os.mkdir(download_dir)

		# Add the URL source to the metadata.

		download_metadata["original-url"]=url

		# Put our metadata into the file.

		cPickle.dump(download_metadata,file(download_dir+"metadata",'wb'))

		# Start a wget subprocess
		# This call is a little complicated, so I'm going to break it down/out for you.
		# subprocess.call( 
		#	<path to wget>, 
		# 	<specify a destination filename, just to be safe.>
		# 	<The rate limit in kb's. Zero implies no rate limit.>
		#	<The output logfile for wget.>
		#	<The URL>

		ac_log.debug("Starting wget for: "+url+" in "+download_dir)

		subprocess.call([
				ac_settings["dl_exec"],
				"--output-document="+download_dir+download_fn,
				"--limit-rate="+str(ac_settings["dl_bwlimit"])+"k",
				"--output-file="+download_dir+"wget.log",
				url
			])

		# Make the access.log file in the dir.

		file(download_dir+"access_log",'a').close()

		# For now, assume the download went okay.

		try:
			ac_log.debug("Moving: "+download_hash+" into cache_dir")
			os.rename(download_dir,ac_settings["cache_dir"]+download_hash)
		except:
			ac_log.error("Unable to move object into cache : "+download_hash)

			# Clean up the downloaded version.

			ac_remove(download_dir)	

		# Make sure everything is okay, clean up if not.
			
		if not ac_validate(download_hash):
			ac_log.error("Hash object: "+download_hash+" did not validate! Removing.")
			ac_remove(ac_settings["cache_dir"]+download_hash)

		# Increase the cachesize.
		ac_cachesize(ac_du(ac_settings["cache_dir"]+download_hash))

		# Close and delete our particular lock.

		download_lock.close()
		os.remove(ac_settings["state_dir"]+"download/locks/"+download_hash)
		ac_log.info("Sucessfully cached "+url+ " as "+download_hash)

		url=ac_queue()


##############################
# ac_queue reads and writes the queue.
# I made this into a function because having it all in ac_download was too fucking much to be readable.
# I might re-add it later, but that'd take time and effort.
#############################

def ac_queue(url=""):

	try:

		# Lock the queue file here. We'll open a full r/w lock on it, because we can do all sorts of stuff during
		# this function. Make sure we do a seek to 0 before every operation.

		dl_queuefile=file(ac_settings["state_dir"]+"queue",'r+w+b')
		fcntl.flock(dl_queuefile,fcntl.LOCK_EX)
		dl_queuefile.seek(0)
		
		if url != "":

			# Open a file handle to lock the download queue. We're threaded, so we can happily wait for the lock.
			# Read the queue as a list object so that if it's empty, you can still append to it.
		
			ac_log.debug("Adding "+url+" to queue...")

			try:	
				dl_queuefile.seek(0)		
				dl_queue=list(cPickle.load(dl_queuefile))

			except EOFError:
				# cPickle.load returns an EOFError if the file is empty. Just presume blank if so.
				dl_queue=[]

			# Add this URL to the queue and write it. 
			dl_queue.append(url)
			dl_queuefile.seek(0)
			cPickle.dump(dl_queue,dl_queuefile,cPickle.HIGHEST_PROTOCOL)


		else:

			ac_log.debug("Retrieving next object from queue...")
		
			try:
				dl_queuefile.seek(0)			
				dl_queue=list(cPickle.load(dl_queuefile))

			except EOFError:

				# While an almost impossible scenario, if the dl_queue list returns an EOFError because it's
				# totally empty (possibly destroyed by a user), return a blank list.
				dl_queue = False
			
			# This time, take out the first URL in the list and remove it. If the list is empty, return an empty list.

			if dl_queue:
				url=dl_queue.pop(0)
			else:
				url=False
				

			# Write the file and release the lock again.
			dl_queuefile.seek(0)
			cPickle.dump(dl_queue,dl_queuefile,cPickle.HIGHEST_PROTOCOL)
			dl_queuefile.close()

		# Release the file and return the result. If something was added to the queue,
		# it doesn't matter what we return, if the queue was queried, then we return the result.
			
		dl_queuefile.close()
		return(url)

	except IOError:

		raise
	
		# This is to handle the loss or reset of a queue file, in case the user deletes it.
		ac_log.warning("Queue file destroyed/vanished/catastrophie. Attempting to make a new one.")
		
		# Attempt to recreate the file.
		try:
			# Running ac_stateinit reconfigures the state DIR.
			ac_stateinit()
		except:
			ac_log.critical("Queue file lost and unable to recreate. Unrecoverable.")			
			raise IOError("Queue file lost and unable to recreate, Unrecoverable.")

##################
# ac_dlcleanup - This function is responsible for cleaning up the download directory and is
# run once upon import. In order for us to make sure the download directory is in a sane state,
# we have to actually lock it. This does not have to be a function and can be mained in later.
################# 

def ac_dlcleanup():

	# Don't bother doing this if we just did it not to long ago.

	cleanup_log=ac_settings["state_dir"]+"lastcleanup"

	if not os.path.isfile(cleanup_log):
		ac_stateinit()

	if ac_fileage(cleanup_log) < ac_settings["trust_short"]:

		ac_log.debug("Download directory was cleaned less than time set in trust_short. Not cleaning again.")

		# Update the cleanup timestamp. 
		os.utime(cleanup_log,None)
		return

	ac_log.debug("Cleaning up download directory.")

	# We use these a lot.

	downloads_dir=ac_settings["state_dir"]+"download/active/"
	lock_dir=ac_settings["state_dir"]+"download/locks/"

	# If there's nothig there, there is nothing to do, except open a lock on the download file.

	if (len(os.listdir(downloads_dir)) + len(os.listdir(lock_dir))) == 0:
		ac_log.debug("No old downloads to clean up, good!")
		return

	# Troll the lock dir first.

	for each in os.listdir(lock_dir):

		# Try to get an exclusive, but nonblocking lock on the file. Flock ioerrors if
		# it is unsecssful.

		try:
			lock_file=file(lock_dir+each)
			fcntl.flock(lock_file,fcntl.LOCK_EX+fcntl.LOCK_NB)
			lock_file.close()
			ac_log.info("Removing dead download lock "+each)
			os.remove(lock_dir+each)
		except IOError:
			ac_log.debug("Download "+each+" still in progress.")
			continue

	# Every download in the download dir should have an asocciated lock.
	# If not, it's safe to presume it's dead. We did just remove the old locks, you know.

	for each in os.listdir(downloads_dir):
		if not os.path.isfile(lock_dir+each):
			ac_log.info("Removing dead download directory "+each)
			ac_remove(downloads_dir+each)

	os.utime(cleanup_log,None)

		
#########################
# ac_remove - This function removes a target cache directory. It does not return anything, there's
# many different scenarios under which this needs to be done, so a subroutine was made for it.
#########################

def ac_remove(cache_dir):

	ac_log.debug("Going to destroy: "+cache_dir)

	try:
		for each_file in os.listdir(cache_dir):

			# Put the file removals in a try loop so that we still remove the directory when finished.

			try:
				os.remove(cache_dir+"/"+each_file)
				#print("Would remove: "+cache_dir+"/"+each_file)

			except OSError:
				ac_log.warning("The file: "+each_file+" from "+cache_dir+" vanished or could not be removed!")	

		os.rmdir(cache_dir)		
		#print("Would remove directory:"+cache_dir)

	except OSError:
		raise
		ac_log.warning("The cache folder: "+cache_dir+" vanished or could not be removed! ")

########################
# ac_cachesize - Updates the size of the cache or returns it's current size.
# looks a lot like ac_queue, doesn't it?
#####################

def ac_cachesize(size=None):

	try:

		# Open and lock the cache size file, we're going to need it for a few moments.

		cache_sizefile=file(ac_settings["state_dir"]+"cachesize",'r+w+b')
		fcntl.flock(cache_sizefile,fcntl.LOCK_EX)
		cache_sizefile.seek(0)

		# Check the truthiness of our cache size file, if out of date, update!

		if ac_fileage(ac_settings["state_dir"]+"cachesize") > ac_settings["trust_long"]:

			cPickle.dump(ac_du(ac_settings["cache_dir"]),cache_sizefile,cPickle.HIGHEST_PROTOCOL)
			cache_sizefile.seek(0)
			ac_log.debug("Cache size file too old, updated.")

		# If not told to do something to the cachesize, return the current value.

		if size == None:
			size=int(cPickle.load(cache_sizefile))
			ac_log.debug("Cache size is now: "+str(size))
			return size

		# Otherwise, adjust the size by the amount given.

		else:
			ac_log.debug("Adjusting cache size file by: "+str(size))

			# Make sure we read this as an INT.
			size=int(cPickle.load(cache_sizefile))+size
			cache_sizefile.seek(0)
			cPickle.dump(size,cache_sizefile,cPickle.HIGHEST_PROTOCOL)
			return size

	except (TypeError, EOFError, IOError):
		# If we get one of these, it means that the file contained an invalid bit of data.
		# So we'll have to populate it with valid data and try again.
		ac_log.error("Had some serious trouble reading cachesize file. Resetting.")

		# Do a stateinit, just in case.
		ac_stateinit()

		# Remember to seek to 0 in case we're at some weird position in the cachesize file somehow.
		size=ac_du(ac_settings["cache_dir"])
		cache_sizefile.seek(0)
		cPickle.dump(size,cache_sizefile,cPickle.HIGHEST_PROTOCOL)
		ac_log.debug("Cache size is now: "+str(size))
		return size

###################
# ac_fileage - Return age of a file in seconds.
# Subtract time.mktime(time.localtime()) from the stat.st_mtime to get the difference.
###################

ac_fileage = lambda filename: int(time.mktime(time.localtime()) - os.stat(filename).st_mtime)
