#!/usr/bin/env python
import readline, urllib, re, base64, pprint, asyncore, sys, threading, random, os
import file, config, server, log, toc
from serialize import pack, unpack
import pdb

universal_list = {}

class async_request(asyncore.file_dispatcher):
	"""The asynchronous request class that will multiplex requests as per the asyncore python module."""

	def __init__(self, url, params, callback, sockmap=None, name=None, extra=None):
		urllib._urlopener = PomaresOpener()
		self.urlobj = urllib.urlopen(url+params)
		asyncore.file_dispatcher.__init__(self, self.urlobj, map=sockmap)
		self.buffer = ''
		self.url = url
		self.params = params
		self.name = name
		self.callback = callback
		self.extra = extra

	def handle_close(self):
		self.close()

	#def handle_error(self):
		#add code to handle errors

	def handle_read(self):
		_buffer = self.read(1024)
		if len(_buffer)  > 0:
			self.buffer += _buffer
		else:
			self.handle_close()
			log.log('done %s%s' % (self.url, self.params))
			pomares_id = None

			if self.urlobj.info().has_key('Pomares-ID'):
				pomares_id = self.urlobj.info()['Pomares-ID']

			self.callback({'buffer':self.buffer, 'url':self.url, 'url_params':self.params, 
					'filename':self.name, 'pomares_id':pomares_id,
					'status':self.urlobj.code, 'extra':self.extra,
			})

	def writable(self):
		return False

	def readable(self):
		return True

def parse_url(url):
	if url is None:
		return None

	r = re.compile(r'^(http[s]?://[a-zA-Z0-9_\-.]+)(:[0-9]+)*(/?[a-zA-Z0-9_\-/]*)?$')
	if r.match(url):
		base_url, port, pomar = r.findall(url)[0]
		if port == '':
			port = 80

		return (base_url, int(port[1:]), pomar)

	return None

def update_list(params):
	"""(Callback) Updates the TOC when given a LIST reply buffer."""

	if params['status'] != 200:
		log.log('got http status %d, not proceeding...' % params['status'])
		return params['status']

	reply = unpack(params['buffer'])
	url = params['url']
	pomares_id = params['pomares_id']
	pomar = parse_url(url)[2]

	if not reply.has_key('LIST'):
		return None

	tocs = toc.TOC()
	resolv = toc.Resolver()
	worker = RequestWorker()

	"update url timestamp"
	resolv.update(pomares_id, '%s:%d' % parse_url(url)[:2])

	for recvd_pomares_id, recvd_list_version in reply['LIST']:
		recvd_url = resolv.resolve(recvd_pomares_id)
		recvd_parsed_url = parse_url(recvd_url)

		if recvd_url:
			if recvd_parsed_url:
				recvd_from_version = tocs.lastVersionFor(recvd_pomares_id, pomar=pomar)
			else:
				log.log('recvd_url not valid: %s' % recvd_url)
				continue

			
			if recvd_from_version is None:
				recvd_from_version = 0 

			recvd_versions = (recvd_from_version, recvd_list_version)
			recvd_versions = (min(recvd_versions), max(recvd_versions))

			if recvd_versions[0] < recvd_versions[1]:
				new_request = '?request=PLIST&from=%d&to=%d' % recvd_versions
				worker.push(('%s%s' %(recvd_url,pomar), new_request, update_plist))
		else:
			resolv.update(recvd_pomares_id, None)
			new_request = '?request=RESOLV&pomares_id=%s' % recvd_pomares_id
			worker.push((url, new_request, update_resolv))


	worker.start()

def update_plist(params):
	"""(Callback) Updates the TOC when given a PLIST reply buffer."""

	if params['status'] != 200:
		log.log('got http status %d, not proceeding...' % params['status'])
		return params['status']

	reply = unpack(params['buffer'])
	url = params['url']
	pomares_id = params['pomares_id']

	if not reply.has_key('PLIST'):
		return None

	tocs = toc.TOC()
	resolv = toc.Resolver()
	worker = RequestWorker()

	"update url timestamp"
	resolv.update(pomares_id, '%s:%d' % parse_url(url)[:2])

	for recvd_filename, recvd_size, recvd_hash, recvd_pomares_id, recvd_path, recvd_listversion in reply['PLIST']:
		recvd_url = resolv.resolve(recvd_pomares_id)

		if recvd_url is None:
			resolv.update(recvd_pomares_id, None)
			new_request = '?request=RESOLV&pomares_id=%s' % recvd_pomares_id
			worker.push((url, new_request, update_resolv))
			

		base_url, port, pomar = parse_url(url)
		tocs.update({'filename':recvd_filename, 'size':recvd_size, 'hash':recvd_hash, 'uuid':recvd_pomares_id,
			'dirname':recvd_path, 'listversion':recvd_listversion}, 
		pomar) 

	worker.start()

def update_resolv(params):
	"""(Callback) Updates the TOC when given a RESOLV reply buffer."""
	if params['status'] != 200:
		log.log('got http status %d, not proceeding...' % params['status'])
		return params['status']

	reply = unpack(params['buffer'])
	url = params['url']
	pomares_id = params['pomares_id']


	if not reply.has_key('RESOLV'):
		return None

	resolv = toc.Resolver()

	"update url timestamp"
	resolv.update(pomares_id, '%s:%d' % parse_url(url)[:2])


	recvd_pomares_id, recvd_url = reply['RESOLV']
	resolv.update(recvd_pomares_id, recvd_url)



def update_file(params):
	"""(Callback) Writes a chunk to its file when given a FILE reply buffer."""

	if params['status'] != 200:
		log.log('got http status %d, not proceeding...' % params['status'])
		return

	reply = unpack(params['buffer'])
	url = params['url']
	pomares_id = params['pomares_id']
	worker, current_chunk = params['extra']


	if not reply.has_key('FILE'):
		return None

	resolv = toc.Resolver()
	tocs = toc.TOC()

	"update url timestamp"
	resolv.update(pomares_id, '%s:%d' % parse_url(url)[:2])

	try:
		chunk_number, buffer, hash  = reply['FILE']
		buffer = base64.b64decode(buffer)
		
		chunk_size, wip = worker.chunks[chunk_number]
		if current_chunk == chunk_number and file.hash_buffer(buffer) == hash:
			worker.file_obj.update(buffer, chunk_number*config.chunk_size)
			
			del worker.chunks[chunk_number] 
			tocs.peerSuccess(pomares_id, worker.hash)

		else:
			worker.chunks[chunk_number] = (chunk_size, False)
			tocs.peerSuccess(pomares_id, worker.hash, False)

	except:
		current_size, current_wip = worker.chunks[chunk_number]
		worker.chunks[chunk_number] = (current_size, False)
		tocs.peerSuccess(pomares_id, worker.hash, False)
		log.log('could not write FILE chunk %d for %s' % (chunk_number, worker.file_obj.path))

	
def chunk_list(file_size):
	"""Calculate number of possible chunks."""
	list = [config.chunk_size]*(file_size/config.chunk_size)
	if file_size%config.chunk_size > 0:
		list.append(file_size%config.chunk_size)

	return len(list), list


#def request_obj(type, url, args=None):
#	"""Returns a request tuple with the appropriate url and generated parameters."""
#	params='request=%s' % type
#
#	if args:
#		params='request=%s%s' % (type,  ''.join(['&args=%s'% a for a in args]) )
#		
#	return (url, params)

def refresh_list(*args):
	"""Sends a request and refreshes a very rudimentary list of files."""
	w = RequestWorker()
	for host in config.hosts:
		w.push((host, '?request=LIST', update_list))
		
	w.start()

def list_files(*args):
	"""Print a list of files."""
	t = toc.TOC()
	t.cursor.execute("""select filename, size, hash, uuid, dirname, listversion, pomar 
			from toc group by filename having max(listversion)"""
	)
	for entry in t.cursor.fetchall():
		print entry 
	

def add_dir(path, toc_obj, pomar='/'):
	"""Adds contents of directory path to the toc."""

	last_list_version = toc_obj.lastVersionFor(config.my_uuid, pomar)
	if last_list_version is None:
		last_list_version=0

	files = file.list(path)
	new_list_version = last_list_version+1

	for f in files:
		toc_obj.update({'filename':os.path.basename(files[f][0]),
			'size':files[f][1],
			'hash':f,
			'uuid':config.my_uuid,
			'dirname':os.path.dirname(files[f][0]),
			'listversion':new_list_version
			},
		pomar)
	
def pomar_add(args):
	"""Creates a new pomar and adds files from a pathname"""
	args = args[0].split(' ')
	print 'this might take a while depeding on your cpu and file sizes'
	tocs = toc.TOC()
	add_dir(args[1], tocs, args[0])
	tocs.updatePomarPath(args[0], args[1])

def get_files(args):
	"""Sends a FILE request where args is the number of a file in the list.
	Starts a FileWorker thread for this."""
	#print args
	tocs = toc.TOC()
	filehash = args[0]
	results = tocs.whoHas(filehash)

	uuid, filename, filesize, dirname, pomar = results[0]
	download_dir = os.path.join(config.download_path, dirname)
	relpath = os.path.relpath(download_dir, config.download_path) 
	if re.compile('^\.\.').match(relpath) or os.path.isfile(download_dir):
		print 'invalid final download dir: %s' % download_dir
		return None

	try:
		os.mkdir(download_dir)
	except:
		log.log('final download dir already exists: %s' % download_dir)


	download_filepath = os.path.join(download_dir, filename)
	relpath = os.path.relpath(download_filepath, config.download_path) 
	
	if re.compile('^\.\.').match(relpath) or os.path.isfile(download_filepath):
		print 'invalid or final file already exists in download path: %s' % download_filepath
		return None
	
	f = FileWorker((download_filepath, filesize, filehash))
	f.start()
		
	

def status(*args):
	pass

def byebye(*args):
	pass

def help(*args):
	print allow_cmds.keys()



allow_cmds = {'list':(' *', list_files), 'refresh':(' *', refresh_list), 
				'get':('[a-zA-Z0-9\.\-_]+', get_files), 'status':(' *',status), 
				'quit':(' *',byebye), 'help':(' *',help), 'pomar_add':('.*', pomar_add),}

def parse_input(input):
	"""Parses the input string and evaluates against the allow_cmds dictionary 
	by executing existing key/value matches."""
	req_args = re.compile('\ *').split(input)
	if allow_cmds.has_key(req_args[0]):
		fmt, func = allow_cmds[req_args[0]]
		parsed_args =  re.compile(fmt).findall(' '.join(req_args[1:]))
		print parsed_args
		print req_args
		if parsed_args:
			func(parsed_args)

#def query_list(file, chunk_number):
#	"""Return a request tuple with the url, requested file and chunk."""
#	results = universal_list[(file)]
#
#	return [ request_obj('FILE', host, [results[host], chunk_number]) for host in results.keys()]
		

class FileWorker(threading.Thread):
	"""A Thread that will work on all download requests for a filename."""

	def __init__(self, file_info):
		threading.Thread.__init__(self)
		self.name , self.size, self.hash = file_info
		self.file_obj = file.File(self.name, self.size, self.hash)
		self.map = {} #current asyncore map being looped
		self.queue = [] #waiting queue
		self.chunks = dict( [ (chunk_number,(chunk_size, False)) for chunk_number, 
					chunk_size in enumerate(chunk_list(self.size)[1]) ] )
		self.tocs = None
		self.resolv = None

	def push(self, request):
		"""Push a request made of a (url, params) tuple"""
		self.queue.append(request)

	def pop(self):
		"""Pop from waiting queue and push into working list."""
		try:
			current_chunk = self.queue.pop(0)
			#put current chunk as wip...
			current_size, current_wip = self.chunks[current_chunk]
			uuid, pomar, _ = self.tocs.peerFor(self.hash)
			url = self.resolv.resolve(uuid)

			self.chunks[current_chunk] = (current_size, True)
			url = '%s%s' % (url, pomar)
			params = '?request=FILE&hash=%s&chunk=%d' % (self.hash, current_chunk)
			
			request = async_request(url, params, update_file, sockmap=self.map, name=self.name, extra=(self, current_chunk)) 
			return request
		except IndexError:
			return None


	def run(self):
		print 'starting thread %s' % self.name
			
		self.tocs = toc.TOC()
		self.resolv = toc.Resolver()

		for chunk in self.chunks:
			self.push(chunk)

		while self.pop() is not None:
			self.pop() #popping twice for now...
			log.log('working')
			asyncore.loop(map=self.map)

		log.log('thread %s is dead' % self.name)
		print 'finishing thread %s' % self.name
			
class RequestWorker(threading.Thread):
	"""A Thread that will work on all other requests."""
	def __init__(self):
		threading.Thread.__init__(self)
		self.map = {} #current asyncore map being looped
		self.queue = [] #waiting queue

	def push(self, request):
		"Push a request made of a (url, params, callback) tuple"
		self.queue.append(request)

	def pop(self):
		"pop from waiting queue and push into working list"
		try:
			request = self.queue.pop(0)
			url, params, callback = request
			async_request(url, params, callback, sockmap=self.map) #create request here. it might need to be reachable
			return request
		except:
			return None

	def run(self):
		log.log('starting request thread')
		#need to write a pop() managing method that roundrobins throught different peers
		while self.pop() is not None:
			self.pop()
			asyncore.loop(map=self.map)

	def __del__(self):
		log.log('request thread dying...')


class PomaresOpener(urllib.FancyURLopener):
	version = 'Pomares/SVN\r\nPomares-ID:%s' % config.my_uuid


if __name__ == '__main__':
	s = server.Server()
	s.start()

	refresh_list()

	while True:
		input = raw_input('--> ')
		parse_input(input)


