def currentTime():

class Parameters:
	def __init__( self, wave_size=100, time_inc=1, time_limit=10,
	def __str__(self):
	def save(self, file_name ):
	def returnSelf(self):

class SoloCrawler:
	def __init__(self):
	def runUntilDone(self, path, parameters, master_list={}, seed_list=[], current_wave=None, overwrite_existing_files=False ):
	def saveState(self, filename):
	def _initCrawlState(self, parameters, master_list={}, seed_list=[], current_wave=None ):
	def _initOutputFiles( self, path, parameters, seed_list, overwrite_existing_files=False ):
	def _checkIfDone(self):
	def _countCompletedUrls(self):
	def _chooseUrls(self):
	def _processUrls(self, urls):
	def _processResults(self, results):
	def _saveOutput(self, path, results ):

class MultiCrawler( SoloCrawler ):
	def __init__(self, pool_size=None ):
	def _processUrls(self, urls):

class ServerCrawler( SoloCrawler ):
	def __init__(self, port, pw, batch_size=20 ):
	def _initCrawlState(self, parameters, master_list={}, seed_list=[], current_wave=None ):
#	def _initCrawlState(self, parameters, server_info, master_list={}, seed_list=[], current_wave=None, batch_size=20 ):
		class IS_Manager(managers.BaseManager): pass
	def get_clients_queue(self): return self._clients_queue
	def get_process_queue(self): return self._process_queue
	def get_results_queue(self): return self._results_queue
	def get_activity_queue(self): return self._activity_queue
	def get_terminate_queue(self): return self._terminate_queue
#	def get_process_url_function(self): return self.parameters.process_url_function
	def get_parameters(self): return self.parameters
#	def runUntilDone(self, path, parameters, master_list={}, seed_list=[], current_wave=None, overwrite_existing_files=False ):
	def runUntilDone(self, path, parameters, master_list={}, seed_list=[], current_wave=None, overwrite_existing_files=False ):
	def _checkIfDone(self, depth):
	def _print_queue_status(self, header=False):
	def _shutDown(self):
	def _chooseUrls(self):
	def _getResults(self):

class ClientCrawler():
	def __init__(self, ip, port, pw, pool_size=None):
		class IS_Manager(managers.BaseManager): pass
	def runUntilDone(self):
	def _processUrls(self, urls):

def findEdges( url, text ):
def downloadUrl( url ):
def defaultProcessUrl( (url, params) ):
def null_processor( (url, params) ):
def default_decider(self):
class InterruptableThread(threading.Thread):
	def __init__(self, url, params, func):
	def run(self):
def _processUrl( (time_limit, url, params, func) ):
