

##jython patch
#import pwd, grp
#def key_error(*args, **kw): raise KeyError()
#pwd.getpwuid = key_error
#pwd.getgrgid = key_error
#grp.getgrgid = key_error


from urlparse import urlsplit
import httplib
import os
import sys
import threading

from pyftpdlib import ftpserver

import vkontakte
from vkontakte import Vkontakte
from string import printable




## {{{ http://code.activestate.com/recipes/577360/ (r1)

def concurrent_map(func, data):
    """
    Similar to the bultin function map(). But spawn a thread for each argument
    and apply `func` concurrently.

    Note: unlike map(), we cannot take an iterable argument. `data` should be an
    indexable sequence.
    """

    N = len(data)
    result = [None] * N

    # wrapper to dispose the result in the right slot
    def task_wrapper(i):
        result[i] = func(data[i])

    threads = [threading.Thread(target=task_wrapper, args=(i,)) for i in xrange(N)]
    for t in threads:
        t.start()
    for t in threads:
        t.join()

    return result
## end of http://code.activestate.com/recipes/577360/ }}}






class AdvancedVkontakte(Vkontakte):
	
	def __init__(self, pool_size=100):
		Vkontakte.__init__(self)
		#self.epool = eventlet.greenpool.GreenPool(pool_size)	
	
	def search(self, query, page_nr=0):
		entries = list(Vkontakte.search(self, query, page_nr))
		urls = [i['url'] for i in entries]
		return zip(entries, concurrent_map(self._getsize, urls))
	
	def complete_search(self, query):
		for a in concurent_map(self.search, range(10)):
			for b in a:
				for c in b:
					yield c

	def _getsize(self, url):
		u = urlsplit(url)
		conn = httplib.HTTPConnection(u.netloc)
		conn.request('HEAD', u.path, headers=vkontakte.HEADERS)
		resp_obj = conn.getresponse()
		return long(dict(resp_obj.getheaders()).get('content-length', 0))

	
	def download(self, url):
		u = urlsplit(url)
		conn = httplib.HTTPConnection(u.netloc)
		conn.request('GET', u.path, headers=vkontakte.HEADERS)
		resp_obj = conn.getresponse()
		return resp_obj


class HTTPConnection2File:
	def __init__(self, obj, filename):
		# make it more file obj like
		self.obj = obj
		self.closed = True
		self.name = filename.split(os.sep, 1)[-1]
	
	def seek(self, arg):
		pass
	
	def read(self, bytes):
		#print 'read', bytes
		read = self.obj.read(bytes)
		#print 'ok'
		return read
	

def abstracted_fs_factory(avk, on_block, on_unblock):
	class MyAbstractedFS(ftpserver.AbstractedFS):
	
		def _save_to_file(self, filename, url, size):
			f = open(filename, 'w')
			f.write(url + ' ' + str(size))
			f.close()
	
		def _load_from_file(self, filename):
			f = open(filename, 'r')
			read = f.read()
			f.close()
			return read.split(' ')
		
	
		def open(self, filename, mode):
			if not filename.endswith('.mp3'):
				return ftpserver.AbstractedFS.open(self, filename, mode)
			url, size = self._load_from_file(filename)
			obj = avk.download(url)
			return HTTPConnection2File(obj, filename)
			
		def listdir(self, path):
		
			ftp_path = self.fs2ftp(path)
			
			if not ftp_path == '/' and not self.isdir(os.path.join(path, 'more')):
			
				query = ftp_path.split('/')[1]
				page = ftp_path.count('/' + 'more')
			
				if page < 10:
					self.mkdir(os.path.join(path, 'more'))
				
				on_block()
				print '\tfetching "' + query + '", page ' + str(page)
				results = avk.search(query, page)
				on_unblock()
				
				m3u = open(os.path.join(path, 'all.m3u'), 'w')
				m3u.write("#EXTM3U\n")
				
				for entry, size in results:
				
					filename = entry['artist'] + ' - ' + entry['title'] + '.mp3'
			
					for a in '<>:"/\|?*':
						#filename = filename.replace(a, 'X')
						continue
					
					if len(filter(lambda x: x in printable, filename)) != len(filename):
						continue

					## i am not 100% sure we need this
					#try:
					#	unicode(filename, 'utf-8')
					#except UnicodeEncodeError:
					#	print 'ignoring', filename
					#	continue
						
						
					try:
						self._save_to_file(os.path.join(path, filename), entry['url'], size)
					except IOError, exc: #filename to long?
						print '\terror writing:', exc
					else:
						minutes, seconds = entry['duration'].split(':')
						seconds = int(seconds) + int(minutes)*60
						m3u.write('#EXTINF:'+str(seconds)+','+entry['artist'] + ' - ' + entry['title'] + '\n')
						m3u.write(entry['url'] + '\n')
				
				m3u.close()
		
			return ftpserver.AbstractedFS.listdir(self, path)
	
	
		def _stat(self, stat_type, path):
			s = getattr(ftpserver.AbstractedFS, stat_type)(self, path)
			if not path.endswith('.mp3'):
				return s
			if self.isfile(path):
				class Empty: pass
				obj = Empty()
				for attr in dir(s):
					if attr.startswith('st_'):
						setattr(obj, attr, getattr(s, attr))
				obj.st_size = self._load_from_file(path)[1]
				return obj
			return s

		def lstat(self, path):
			return self._stat('lstat', path)

		def stat(self, path):
			return self._stat('stat', path)
	
		def getsize(self, path):
			if self.isfile(path):
				return self._load_from_file(path)[1]
			return ftpserver.AbstractedFS.getsize(self, path)
	
	return MyAbstractedFS


def get_ftpd(ftp_dir, avk, on_block, on_unblock, address=("127.0.0.1", 2121)):
	# small buffer because otherwise the .read over network blocks the mainloop for too long
	ftpserver.FileProducer.buffer_size = 128
	ftpserver.FTPHandler.abstracted_fs = abstracted_fs_factory(avk, on_block, on_unblock)
	authorizer = ftpserver.DummyAuthorizer()
	authorizer.add_anonymous(ftp_dir, perm="elrdmfa")
	handler = ftpserver.FTPHandler
	handler.authorizer = authorizer
	ftpd = ftpserver.FTPServer(address, handler)
	return ftpd














