#!/usr/bin/python
# Tested with python 2.4, 2.5 and 2.6

import urllib, urllib2, urlparse, sys, os, socket, re, cookielib, time, math
import imp, glob, gzip, zipfile, StringIO, threading
from httplib import HTTPException
from sgmllib import SGMLParser

__all__ = ["config", "plugins", "get_with_retry", "is_valid_proxy_url",
	"get_files_from_link", "yapget_version", "get_links", "find_file", "PersistentConfig"]

def get_url_from_line(line):
	return line.lower().replace('"', "'").split("href='")[-1].split("'")[0]

def get_freespace(folder=None):
	"""Ge free space on drive"""
	if not folder:
		folder = os.path.realpath(os.path.curdir)
	if 'statvfs' in dir(os):
		s = os.statvfs(folder)
		return s.f_bsize * s.f_bavail
	else:
		import win32api
		drive = os.path.splitdrive(os.path.realpath(folder))[0]
		(sectors_per_cluster, bytes_per_sector, total_free_clusters, total_clusters) = win32api.GetDiskFreeSpace(drive)
		return sectors_per_cluster * bytes_per_sector * total_free_clusters

def get_title(buffer):
	"""Get title string from buffer"""
	title = None
	for line in buffer.splitlines():
		line = line.lower()
		if '<title>' in line:
			title = line.split('<title>')[1].split('</title>')[0]
			break
	return title

def get_js_var(line):
	"""Get JavaScript variable value"""
	return line.split('var')[1].split(';')[0].split('=')[-1].strip()

def is_valid_proxy_url(url):
	"""Check if a URL describes a valid proxy URL in the format of http_proxy variable"""
	parsed_url = urlparse.urlparse(url)
	if parsed_url[0] != 'http':
		return False
	if not parsed_url[1]:
		return False
	if not urllib.splituser(parsed_url[1])[1]:
		return False
	return True

def find_file(fname):
	"""Find a file in a number of default locations"""
	_dirs = [
		os.path.split(sys.argv[0])[0], # where our executable is
		os.path.expanduser('~'), # user's home
	]
	for file_dir in _dirs:
		file_in_dir = os.path.join(file_dir, fname)
		if os.path.isfile(file_in_dir) and os.access(file_dir, os.W_OK or os.R_OK):
			return os.path.abspath(file_in_dir)
	if sys.platform.startswith('win') and os.access(_dirs[0], os.W_OK or os.R_OK):
		return os.path.abspath(os.path.join(_dirs[0], fname))
	else:
		return os.path.abspath(os.path.join(_dirs[1], fname))

class StatusReport:
	"""Base class for reporting download progress"""
	def report_start(self, url):
		pass

	def report_finish(self, fname):
		pass

	def report_progress(self, percent_str, counter, total_len_str, speed_str, eta_str):
		pass

	def report_completion(self, total_len_str, speed_str, time_str):
		pass

	def report_error(self, str):
		pass

	def report_warning(self, str):
		pass

class StatusReportConsole:
	"""Print error messages to console"""
	def report_start(self, url):
		print "Getting: ", url

	def report_finish(self, fname):
		print "Saved: ", fname

	def report_progress(self, percent_str, counter, total_len_str, speed_str, eta_str):
		sys.stdout.write('\rRetrieving: %5s%% (%8s of %s) at %8s/s ETA %s ' % (percent_str, counter, total_len_str, speed_str, eta_str))
		sys.stdout.flush()

	def report_completion(self, total_len_str, speed_str, time_str):
		sys.stdout.write('\rGot %s at %8s/s during %s                                \n' % (total_len_str, speed_str, time_str))
		sys.stdout.flush()

	def report_error(self, str):
		sys.stderr.write("Error: %s\n" % str)

	def report_warning(self, str):
		print str

class PersistentConfig:
	"""Save and restore configuration from config file"""
	main_sec = 'main'

	def __init__(self, conf_file):
		import ConfigParser
		self.parser = ConfigParser.SafeConfigParser()
		self.conf_file = find_file(conf_file)
		self._defaults = [
			['user', ''],
			['password', ''],
			['retries', '3'],
			['overwrite', 'False'],
			['timeout', '120'],
			['dl_dir', '.'],
			['cookie_dir', '.'],
			['no_proxy', 'False'],
			['explicit_proxy', ''],
		]

	def _validate_config(self):
		modified = False
		if not self.parser.has_section(self.__class__.main_sec):
			self.parser.add_section(self.__class__.main_sec)
			modified = True
		
		for vals in self._defaults:
			if not self.parser.has_option(self.__class__.main_sec, vals[0]):
				self.set(*vals)
				modified = True
		
		if modified:
			self.write()

	def set(self, setting, value, section=main_sec):
		if type(value) == type(u''):
			value = value.encode('utf-8')
		self.parser.set(section, setting, value)

	def get(self, setting, section=main_sec):
		value = self.parser.get(section, setting)
		return value.decode('utf-8')

	def getboolean(self, setting):
		return self.parser.getboolean(main_sec, setting)

	def read(self):
		if os.path.isfile(self.conf_file):
			self.parser.read(self.conf_file)
			self._validate_config()
		else:
			self.parser.add_section(main_sec)
			for vals in self._defaults:
				self.set(*vals)
			self.write()

	def write(self):
		fd = file(self.conf_file, 'w')
		self.parser.write(fd)
		fd.close()

	def __getitem__(self, value):
		return self.get(value)

	def __setitem__(self, value, setting):
		return self.set(value, setting)

	def _str2bool(self, str):
		if str == "True":
			return True
		return False

	def apply(self):
		config.set_dl_dir(self['dl_dir'])
		config.username = self['user']
		config.passwd = self['password']
		config.set_tmo(self['timeout'])
		config.set_retries(self['retries'])
		config.overwrite_existing = self._str2bool(self['overwrite'])
		config.init_comm(self['cookie_dir'], self._str2bool(self['no_proxy']), self['explicit_proxy'])

class Configuration:
	"""Keep track of our configuration received from user"""
	def __init__(self):
		self.username = None
		self.passwd = None
		self.cookies_loaded = False
		self.cookies = None
		self.cookie_file = 'cookies.lwp'
		self.retries = 3
		self.report = None
		self.dl_dir = '.'
		self.cookie_dir = '.'
		self.stop_getting = False
		self.overwrite_existing = False
		self.tmo = 15.0
		self.no_proxy = False
		self.explicit_proxy = None

	def initialize(self, username, passwd, retries, report, overwrite, dl_dir, tmo):
		self.username = username
		self.passwd = passwd
		self.report = report
		self.overwrite_existing = overwrite
		self.set_retries(retries)
		self.set_dl_dir(dl_dir)
		self.set_tmo(tmo)

	def set_retries(self, retries):
		self.retries = int(retries)
		
	def set_dl_dir(self, dl_dir):
		if not os.path.isdir(dl_dir):
			os.makedirs(dl_dir)
		self.dl_dir = os.path.abspath(dl_dir)

	def set_tmo(self, tmo):
		self.tmo = float(tmo)
		socket.setdefaulttimeout(self.tmo)

	def init_comm(self, cookie_dir, no_proxy, explicit_proxy = None):
		"""Initialize all urllib2 stuff"""
		self._set_cookiedir(cookie_dir)
		self.no_proxy = no_proxy
		self.explicit_proxy = explicit_proxy

		if explicit_proxy:
			opener = urllib2.build_opener(urllib2.ProxyHandler({'http':explicit_proxy}), urllib2.HTTPCookieProcessor(self.cookies))
		else:
			if no_proxy:
				opener = urllib2.build_opener(urllib2.ProxyHandler({}), urllib2.HTTPCookieProcessor(self.cookies))
			else:
				opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookies))

		# Internet Explorer 6.0 on XP - the only static header we are missing is Keep-Alive
		opener.addheaders = [
			('Accept', '*/*'),
			('Accept-Language', 'en-us'),
			('Accept-Encoding', 'gzip, deflate'),
			('User-Agent', 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 2.0.50727; .NET CLR 1.1.4322)'),
			('Cache-Control', 'no-cache'),
			]
	
		# Now all calls to urllib2.urlopen will use our opener
		urllib2.install_opener(opener)

	def _set_cookiedir(self, cookie_dir):
		self.cookies = cookielib.LWPCookieJar(os.path.join(cookie_dir, self.cookie_file))
		self.cookie_dir = cookie_dir
		if self.cookies_loaded:
			self.cookies_loaded = False
			CookiesLoaded()

	def __getstate__(self):
		return (self.username, self.passwd, self.cookies_loaded, self.cookie_file, self.retries, self.dl_dir, self.cookie_dir, self.overwrite_existing, self.tmo, self.no_proxy, self.explicit_proxy)

	def __setstate__(self, state):
		self.username, self.passwd, self.cookies_loaded, self.cookie_file, self.retries, self.dl_dir, self.cookie_dir, self.overwrite_existing, self.tmo, self.no_proxy, self.explicit_proxy = state
		self.set_tmo(self.tmo)
		self.set_dl_dir(self.dl_dir)
		self.init_comm(self.cookie_dir, self.no_proxy, self.explicit_proxy)

config = Configuration()

def CookiesLoaded():
	"""Load cookies if needed"""
	if not config.cookies_loaded:
		try:
			config.cookies.load()
			config.cookies_loaded = True
		except IOError:
			config.report.report_warning("Warning: No cookie file present")
	
	return config.cookies_loaded

class CantResume(Exception):
	"""An exception that is thrown when we can't resume download"""
	pass

class NoAuth(Exception):
	"""An exception that is thrown when authentication failed"""
	pass

class CantDownload(Exception):
	"""An exception occured while trying to download"""
	pass

class TemporarilyUnavailable(Exception):
	"""The URL is temprarily unavailable"""
	pass

def find_attr(data, attr):
	"""Find the attribute value from a URL"""
	if data.startswith('http://'):
		data = urlparse.urlparse(data)[4]
	for arg in data.split('&'):
		var, val = urllib.splitvalue(arg)
		if var == attr:
			return val
	return None

def html2ascii(text):
	"""Returns the ASCII decoded version of the given HTML string"""
	_xmlCodes = [
		['"', '&quot;'],
		['>', '&gt;'],
		['<', '&lt;'],
		['&', '&amp;'],
	]
	for code in _xmlCodes:
		text = text.replace(code[1], code[0])
	return text

class URLLister(SGMLParser):
	"""Class for getting all the links on an HTML page"""
	def reset(self):
		SGMLParser.reset(self)
		self.urls = []
		self.image_urls = []
		self.script_urls = []
	
	def start_a(self, attrs):
		href = [html2ascii(v) for k, v in attrs if k == 'href']
		if href:
			self.urls.extend(href)
	
	def start_img(self, attrs):
		src = [html2ascii(v) for k, v in attrs if k == 'src']
		if src:
			self.image_urls.extend(src)
	
	def start_script(self, attrs):
		src = [html2ascii(v) for k, v in attrs if k == 'src']
		if src:
			self.script_urls.extend(src)

class PostLister(SGMLParser):
	"""Class for getting all the links on an HTML page"""
	def reset(self):
		SGMLParser.reset(self)
		self.vars = {}
		self.action = None
	
	def start_input(self, attrs):
		if len(attrs) == 3 and attrs[0][0] == 'type' and attrs[0][1] == 'hidden':
			self.vars[attrs[1][1]] = attrs[2][1]

	def start_form(self, attrs):
		action = None
		for k, v in attrs:
			if k == 'action':
				action = v
		if action:
			self.action = action

class IFrameLister(SGMLParser):
	"""Class for getting all the iframe links on an HTML page"""
	def reset(self):
		SGMLParser.reset(self)
		self.urls = []
	
	def start_iframe(self, attrs):
		src = [html2ascii(v) for k, v in attrs if k == 'src']
		if src:
			self.urls.extend(src)

class MetaLister(SGMLParser):
	"""Class for getting all the meta data on page"""
	def reset(self):
		SGMLParser.reset(self)
		self.metas = {}
	
	def start_meta(self, attrs):
		equiv = None
		content = None

		for name, val in attrs:
			if name == 'http-equiv':
				equiv = val
			elif name == 'content':
				content = val

		if equiv and content:
			self.metas[equiv] = content

class Form:
	def __init__(self):
		self.vars = {}
		self.action = None
		self.id = ''
		self.title = ''
		self.name = ''

class FormLister(SGMLParser):
	"""Class for getting all the Forms on an HTML page"""
	def reset(self):
		SGMLParser.reset(self)
		self.forms = []
		self.curr_form = None
	
	def start_input(self, attrs):
		if not self.curr_form:
			return

		if len(attrs) == 3 and attrs[0][0] == 'type' and attrs[0][1] == 'hidden':
			self.curr_form.vars[attrs[1][1]] = attrs[2][1]

	def start_form(self, attrs):
		form = Form()
		for k, v in attrs:
			setattr(form, k.lower(), v)
		self.curr_form = form
		self.forms.append(form)

def sleep_interruptible(sleep_secs):
	sleep_secs = int(sleep_secs)
	minutes, seconds = divmod(sleep_secs, 60)
	if minutes > 0:
		if seconds > 0:
			config.report.report_warning('Site blocked us for %d minutes %d seconds, going to sleep.' % (minutes, seconds))
		else:
			config.report.report_warning('Site blocked us for %d minutes, going to sleep.' % minutes)
	else:
		config.report.report_warning('Site blocked us for %d seconds, going to sleep.' % seconds)
	for x in xrange(sleep_secs + 1):
		if config.stop_getting:
			config.stop_getting = False
			return False
		time.sleep(1)
	return True

def parse_cookie_content(data):
	"""Parse content data of HTTP meta tag for Set-Cookie"""
	res = [None, None, None]
	for valpair in data.split(';'):
		name, val = valpair.strip().split('=')
		if name == 'domain':
			res[0] = val
		elif name == 'expires':
			res[1] = val
		elif name == 'path':
			res[2] = val
		else:
			res.append('%s=%s' % (name, val))
	return res

def add_manual_cookie(name, value, domain, path=None):
	"""Add a cookie that data for it is manually entered"""
	is_dot = domain.startswith('.')
	have_path = bool(path)
	c = cookielib.Cookie(version=0, name=name, value=value, \
		port=None, port_specified=False, domain=domain, domain_specified=True, \
		domain_initial_dot=is_dot, path=path, path_specified=have_path, secure=False, \
		expires=int(time.time()) + 3600, discard=False, comment=None, \
		comment_url=None, rest={})
	config.cookies.set_cookie(c)

def get_links(url, filter = ''):
	"""Get all the links on page"""
	parser = URLLister()
	parser.feed(urllib2.urlopen(url).read())
	result = []
	pattern = re.compile(filter)
	for url in parser.urls:
		if pattern.search(url) and url not in result:
			result.append(url)
	
	for url in parser.image_urls:
		if pattern.search(url) and url not in result:
			result.append(url)
	
	parser.close()
	return result

def get_iframes(url, filter = ''):
	"""Get all the iframe links on page"""
	parser = IFrameLister()
	parser.feed(urllib2.urlopen(url).read())
	result = []
	pattern = re.compile(filter)
	for url in parser.urls:
		if pattern.search(url) and url not in result:
			result.append(url)
	
	parser.close()
	return result

def open_unique_file(fname, index = 0):
	"""Open a file and make it's filename unique if another one with the same name exists"""
	if index != 0:
		local_file = os.path.join(config.dl_dir, "%s_%d" % (fname, index))
	else:
		local_file = os.path.join(config.dl_dir, fname)
	
	if config.overwrite_existing:
		return file(local_file, 'wb'), local_file
	
	if os.path.exists(local_file):
		return open_unique_file(fname, index + 1)
	
	return file(local_file, 'wb'), local_file

def creat_unique_directory(directory, index = 0):
	"""Create a directory that is unique if another one with the same name exists"""
	if index != 0:
		local_dir = os.path.join(config.dl_dir, "%s_%d" % (directory, index))
	else:
		local_dir = os.path.join(config.dl_dir, directory)
	
	if config.overwrite_existing:
		if not os.path.isdir(local_dir):
			os.mkdir(local_dir)
		return local_dir
	
	if os.path.exists(local_dir):
		return creat_unique_directory(directory, index + 1)

	os.mkdir(local_dir)
	return local_dir

_const_1k = 1024
_const_initial_block_size = 10 * _const_1k
_const_epsilon = 0.0001

def new_block_size(before, after, bytes):
	"""Calculate new block size based on previous block size"""
	new_min = max(bytes / 2.0, 1.0)
	new_max = max(bytes * 2.0, 1.0)
	dif = after - before
	if dif < _const_epsilon:
		return int(new_max)
	rate = bytes / dif
	if rate > new_max:
		return int(new_max)
	if rate < new_min:
		return int(new_min)
	return int(rate)

def optimum_k_exp(num_bytes):
	"""Get optimum 1k exponent to represent a number of bytes"""
	global _const_1k
	if num_bytes == 0:
		return 0
	return long(math.log(num_bytes, _const_1k))

def format_bytes(num_bytes):
	"""Get optimum representation of number of bytes"""
	global _const_1k
	try:
		exp = optimum_k_exp(num_bytes)
		suffix = 'bkMGTPEZY'[exp]
		if exp == 0:
			return '%s%s' % (num_bytes, suffix)
		converted = float(num_bytes) / float(_const_1k**exp)
		return '%.2f%s' % (converted, suffix)
	except IndexError:
		sys.exit('Error: internal error formatting number of bytes.')

def format_time(start, end):
	"""Format time difference and return it in string format as MM:SS"""
	dif = end - start
	time_mins = dif / 60
	time_secs = dif % 60
	if time_mins > 99:
		return '--:--'
	return '%02d:%02d' % (time_mins, time_secs)

def calc_eta(start, now, total, current):
	"""Calculate ETA and return it in string format as MM:SS"""
	dif = now - start
	if current == 0 or dif < _const_epsilon:
		return '--:--'
	rate = float(current) / dif
	eta = long((total - current) / rate)
	eta_mins = eta / 60
	eta_secs = eta % 60
	if eta_mins > 99:
		return '--:--'
	return '%02d:%02d' % (eta_mins, eta_secs)

def calc_speed(start, now, bytes):
	"""Calculate speed and return it in string format"""
	dif = now - start
	if bytes == 0 or dif < _const_epsilon:
		return 'N/A b'
	return format_bytes(float(bytes) / dif)

def get_url_data(remote_url, is_post = False, post_data = {}, referer=None):
	"""Get URL and perform retries if necessary, also closes the connection"""
	req = urllib2.Request(remote_url)
	req.add_header("Host", req.get_host())
	if referer:
		req.add_header("Referer", referer)

	if is_post:
		response = urllib2.urlopen(req, urllib.urlencode(post_data))
	else:
		response = urllib2.urlopen(req)

	if config.stop_getting:
		config.stop_getting = False
		return None

	content_type = None
	try: content_type = response.headers.dict["content-type"]
	except: pass

	content_enc = None
	try: content_enc = response.headers.dict["content-encoding"]
	except: pass

	got = ''
	#try:
	#	total_size = long(response.headers.dict["content-length"])
	#	while len(got) < total_size:
	#		if config.stop_getting:
	#			config.stop_getting = False
	#			return None
	#		if len(got) != 0:
	#			config.report.report_warning("Retrying %s from %d" % (response.geturl(), len(got)))
	#			req = urllib2.Request(response.geturl())
	#			req.add_header("Range", "bytes=%d-" % len(got))
	#			req.add_header("Host", req.get_host())
	#			if is_post:
	#				response = urllib2.urlopen(req, urllib.urlencode(post_data))
	#			else:
	#				response = urllib2.urlopen(req)
	#			if response.code != 206: # server does not suppor partial requests
	#				raise CantResume("Server does not support resumes")
	#		try:
	#			got += response.read()
	#		except socket.error, msg:
	#			config.report.report_warning(msg)
	#			break
	#except KeyError:
	#	try:
	#		got = response.read()
	#	except socket.error, msg:
	#		config.report.report_warning(msg)
	#		return None
	try:
		got = response.read()
	except socket.error, msg:
		config.report.report_warning(msg)
		return None

	if content_enc == 'gzip' or got.startswith('\x1f\x8b'):
		try:
			return gzip.GzipFile(fileobj=StringIO.StringIO(got)).read()
		except IOError, msg:
			config.report.report_warning(msg)
			return None
	elif content_enc == 'deflate' or got.startswith('\x50\x4b\x03\x04'):
		try:
			return zipfile.ZipFile(StringIO.StringIO(got)).read()
		except zipfile.BadZipfile, msg:
			config.report.report_warning(msg)
			return None
	else:
		return got

def download_file(dst_file, response):
	"""Download data from http stream and save it to a file"""
	byte_counter = 0
	block_size = _const_initial_block_size
	start_time = time.time()
	try:
		total_len = long(response.headers.dict["content-length"])
		total_len_str = format_bytes(total_len)
	except KeyError:
		total_len = None
		total_len_str = 'N/A'
	
	after = time.time()
	while True:
		if total_len is not None:
			percent = float(byte_counter) / float(total_len) * 100.0
			percent_str = '%.1f' % percent
			eta_str = calc_eta(start_time, time.time(), total_len, byte_counter)
		else:
			percent_str = '---.-'
			eta_str = '--:--'
		
		counter = format_bytes(byte_counter)
		speed_str = calc_speed(start_time, time.time(), byte_counter)
		config.report.report_progress(percent_str, counter, total_len_str, speed_str, eta_str)
		
		if config.stop_getting:
			break
		before = time.time()
		try:
			data = response.read(block_size)
		except socket.error, msg:
			config.report.report_warning(msg)
			break
		after = time.time()
		dl_bytes = len(data)
		if dl_bytes == 0:
			break
		byte_counter += dl_bytes
		dst_file.write(data)
		block_size = new_block_size(before, after, dl_bytes)

	speed_str = calc_speed(start_time, after, byte_counter)
	time_str = format_time(start_time, after)
	if not config.stop_getting:
		config.report.report_completion(total_len_str, speed_str, time_str)
	return byte_counter

_min_disk_space = 201 * 1000 * 1000 # 201Mb

def download_file_with_retries(fname, remote_url, is_post = False, post_data = {}, referer=None):
	"""Download a file and perform retries if necessary, also closes the connection"""
	req = urllib2.Request(remote_url)
	req.add_header("Host", req.get_host())
	if referer:
		req.add_header("Referer", referer)

	if is_post:
		response = urllib2.urlopen(req, urllib.urlencode(post_data))
	else:
		response = urllib2.urlopen(req)

	content_type = None
	try:
		content_type = response.headers.dict["content-type"]
	except: pass

	dst_file, filename = open_unique_file(fname)
	try:
		if config.stop_getting:
			config.stop_getting = False
			return
		
		try:
			total_size = long(response.headers.dict["content-length"])
		except KeyError:
			config.report.report_warning("Could not get content size. Would not be able to resume download.")
			if get_freespace(filename) < _min_disk_space:
				raise CantDownload("Not enough space on hard disk.")
				
			download_file(dst_file, response)
			return None
		
		got = 0
		while got < total_size:
			if config.stop_getting:
				config.stop_getting = False
				return None
			if got != 0:
				if get_freespace(filename) < (total_size - got) + 1 * 1000 * 1000:
					raise CantDownload("Not enough space on hard disk.")
				config.report.report_warning("Retrying %s from %d" % (filename, got))
				req = urllib2.Request(response.geturl())
				req.add_header("Range", "bytes=%d-" % got)
				req.add_header("Host", req.get_host())
				response = urllib2.urlopen(req)
				if response.code != 206: # server does not suppor partial requests
					dst_file.close()
					os.unlink(filename)
					raise CantResume("Server does not support resumes")
			got += download_file(dst_file, response)
	finally:
		if not dst_file.closed:
			dst_file.close()
	return content_type

def get_img_url(url, filter):
	"""Get URL of an image based on some filter"""
	res = ''
	parser = URLLister()
	parser.feed(urllib2.urlopen(url).read())
	for img_url in parser.image_urls:
		if img_url.find(filter) != -1:
			res = img_url
			break
	parser.close()
	return res

def get_filename_from_url(url):
	"""Get the filename part from the URL"""
	parsed_url = urlparse.urlparse(url)
	if parsed_url[4]:
		local_file = parsed_url[4].split('=')[-1]
	else:
		local_file = parsed_url[2].split('/')[-1]
	loc = local_file.find('.htm')
	if loc != -1:
		local_file = local_file[:loc]
	return local_file

def cookie_present(site):
	"""Check if we have cookies for a certain file"""
	for index, data in enumerate(config.cookies):
		if site in data.domain:
			return True
	return False

def get_valid_name(name):
	"""Get valid file/directory name"""
	return name.replace('&', 'and').replace(':', '').replace('|', '').replace('/', '').replace('\\', '').replace('<', '').replace('>', '').replace('"', '').replace('?', '').replace('*', '').replace('+', '_')

def unzip_file(fname):
	"""Unzip a single file and delete the original zip afterwards"""
	directory, zip_name = os.path.split(fname)
	if directory == '':
		directory = '.'

	arch_file = zipfile.ZipFile(fname, 'r')
	if arch_file.testzip(): # either the archive is corrupt, or we can't read it...
		return

	for name in arch_file.namelist():
		to_get = os.path.split(name)[1]
		if to_get:
			file(os.path.join(directory, to_get), 'wb').write(arch_file.read(name))

	arch_file.close()
	os.unlink(fname)

def unrar_file(fname):
	"""Unrar a single file and delete the original rar afterwards"""
	directory, rar_name = os.path.split(fname)
	if directory == '':
		directory = '.'

	import rarfile
	arch_file = rarfile.RarFile(fname, 'r')

	for name in arch_file.namelist():
		to_get = os.path.split(name)[1]
		if to_get:
			file(os.path.join(directory, to_get), 'wb').write(arch_file.read(name))

	arch_file.close()
	os.unlink(fname)

def extract_file(fname):
	"""Extract a single file"""
	extension = os.path.splitext(fname)[1].lower()
	try:
		if extension == '.zip':
			unzip_file(fname)
		elif extension == '.rar':
			unrar_file(fname)
	except: pass

def extract_all(fname):
	"""Extract the file and if it contains more archives in it, extract those as well"""
	extract_file(fname)
	directory, arch_name = os.path.split(fname)
	if directory == '':
		directory = '.'
	for arch_name in glob.glob(os.path.join(directory, '*.zip')):
		extract_file(arch_name)
	for arch_name in glob.glob(os.path.join(directory, '*.rar')):
		extract_file(arch_name)

def get_files_from_link(url):
	"""Get all the files we support from a URL"""
	links = get_links(url, plugins.url_filter)
	stat_file = file("downloading.txt", "w")
	for l in links:
		stat_file.write(l + os.linesep)
	stat_file.close()
	
	for l in links:
		get_with_retry(l)
	os.unlink("downloading.txt")

def proccess_iframes_from_link(url):
	"""Get all the files we support from iframes in a URL"""
	links = get_iframes(url, plugins.url_filter)
	for l in links:
		get_with_retry(l)

def get_from_source_link(url):
	"""Get files from URL containing the links"""
	links = get_links(url, plugins.url_filter)
	new_dir = urlparse.urlparse(url)[2].split(".htm")[0].split('/')[-1]
	
	created_dir = creat_unique_directory(new_dir)
	stat_file = file(os.path.join(created_dir, 'downloading.txt'), 'w')
	curr_dl_dir = config.dl_dir
	try:
		config.dl_dir = created_dir
		for l in links:
			stat_file.write(l + os.linesep)
		stat_file.close()

		for l in links:
			get_with_retry(l)
	finally:
		if not stat_file.closed:
			stat_file.close()
		os.unlink(os.path.join(created_dir, 'downloading.txt'))
		config.dl_dir = curr_dl_dir

def remove_cookies(plugin):
	"""Remove the cookies that this plugin whants to block"""
	for c in plugin.cookies_to_block:
		try: config.cookies.clear(domain=c)
		except: pass

failed_lock = threading.Lock()

def get_with_retry(url, fname = None):
	"""Attempt to get a URL. Do retries if necessary"""
	getter_plugin = None
	for plugin in plugins.get():
		if plugin.url_matches(url):
			getter_plugin = plugin
			break
	
	if getter_plugin == None:
		config.report.report_error("Unsupported URL provided: %s" % url)
		return
	
	config.report.report_start(url)
	curr = 0
	while curr < config.retries:
		try:
			remove_cookies(getter_plugin)
			getter_plugin.get(url)
			return
		except socket.error, msg:
			config.report.report_error(msg)
			curr += 1
			time.sleep(1)
			continue
		except CantDownload, msg:
			config.report.report_error(msg)
			break
		except CantResume, msg:
			config.report.report_error(msg)
			curr += 1
			continue
		except urllib2.HTTPError, e:
			config.report.report_error(str(e))
			if e.code >= 400 and e.code < 500:
				break
			else:
				curr += 1
				time.sleep(1)
				continue
		except urllib2.URLError, msg:
			config.report.report_error(msg.reason)
			curr += 1
			time.sleep(1)
			continue
		except HTTPException, arg:
			config.report.report_error(repr(arg))
			curr += 1
			time.sleep(1)
			continue
		except NoAuth, msg:
			config.report.report_error(msg)
			break
		except TemporarilyUnavailable, msg:
			config.report.report_error(msg)
			curr += 1
			time.sleep(1)
			continue
	
	if fname == None:
		fname = 'failed.txt'
	else:
		fname = '%s_failed.txt' % fname
	
	failed_lock.acquire()
	try:
		fd = file(fname, 'a')
		fd.write(url)
		fd.write(os.linesep)
		fd.close()
	except: pass
	finally: failed_lock.release()

	config.report.report_error("Unable to get: %s" % url)

yapget_version = "1.4"

class Plugins:
	"""Class for managing plugins. Note that this class has to be defined _AFTER_ all classes/functions used by plugins"""
	def __init__(self):
		self._export_list = {}
		# List all the globals we need to export to a plugin
		_list = globals()
		_exclude_list = ['self', 'Configuration',
				'imp', 'SGMLParser', 'Plugins']
		for _item in _list:
			if not _item.startswith('_') and _item not in _exclude_list:
				self._export_list[_item] = _list[_item]

		self._plugins = []
		self.url_filter = ''
		self.sites = ''
		_py_desc = None
		_pyc_desc = None
		_pyo_desc = None

		for attrs in imp.get_suffixes():
			if attrs[0] == '.py':
				_py_desc = attrs
			elif attrs[0] == '.pyc':
				_pyc_desc = attrs
			elif attrs[0] == '.pyo':
				_pyo_desc = attrs

		self._descriptions = [_py_desc, _pyc_desc, _pyo_desc]

	def get(self):
		return self._plugins

	def _prepare_plugin(self, m):
		#setattr(sys.modules[__name__], name, m) # make the module visible

		for item in self._export_list:
			setattr(m, item, self._export_list[item])
		if 'modules' in dir(m):
			mods = m.modules
			add_url = True
		else:
			mods = m.sources
			add_url = False

		for plugin in mods:
			attrs = dir(plugin)
			if 'get' not in attrs \
				or 'url_matches' not in attrs \
				or 'version' not in attrs \
				or 'supported_sites' not in attrs \
				or 'supported_urls' not in attrs \
				or 'supports_resume' not in attrs \
				or 'max_concurrent' not in attrs \
				or 'param_names' not in attrs \
				or 'param_vals' not in attrs \
				or 'name' not in attrs \
				or 'cookies_to_block' not in attrs:
				print '%s plugin does not have all properties' % repr(plugin)
				continue
			if plugin.version() != 2:
				print '%s plugin is of unsupported version (%d)' % (plugin.name, plugin.version())
				continue
			self._plugins.append(plugin)
			for name in plugin.supported_sites:
				self.sites += '\t%s\n' % name
			if add_url:
				self.url_filter += '|'.join(plugin.supported_urls) + '|'

	def load_plugins(self, directory):
		"""Load all modules from directory"""
		imp.acquire_lock()
		for attrs in self._descriptions:
			if not attrs:
				continue
			for module in glob.glob(os.path.join(directory, '*' + attrs[0])):
				# mangle the name so we don't clash with something that already exists
				name = '_yapget_plugin__' + os.path.split(module)[1].replace(attrs[0], '')
				if name in sys.modules: # already got it
					continue
				fp = file(module, attrs[1])
				m = imp.load_module(name, fp, module, attrs)
				fp.close()
				contents = dir(m)
				if 'modules' not in contents and 'sources' not in contents:
					del m
					continue
				self._prepare_plugin(m)
		imp.release_lock()

		self.url_filter = self.url_filter.strip(' |').replace('?', '\\?')
		self.sites = self.sites.strip(' \n\t')

	def initialize(self):
		_dirs = [
			os.path.split(sys.argv[0])[0], # where our executable is
			'/usr/local/yapget',
			'/usr/yapget',
			os.path.expanduser('~'),
		]
		for plugin_dir in _dirs:
			file_in_dir = os.path.join(plugin_dir, 'plugins')
			if os.path.isdir(file_in_dir):
				self.load_plugins(file_in_dir)
		self._plugins.append(DefaultPlugin())

plugins = Plugins()

class DefaultPlugin:
	"""This is a default plugin that gets all the URLs that our plugins did not support"""
	def __init__(self):
		self.supported_sites = []
		self.supported_urls = self.supported_sites
		self.cookies_to_block = []
		self.supports_resume = False
		self.max_concurrent = 0
		self.param_names = []
		self.param_vals = {}
		self.name = 'Default'
	
	def get(self, url):
		"""Just get a file from URL"""
		local_file = get_valid_name(url.split('/')[-1])
		download_file_with_retries(local_file, url)
		config.report.report_finish(local_file)

	def url_matches(self, url):
		"""Check whether we support this URL"""
		# make sure our other plugins do not already handle this URL
		if re.search(plugins.url_filter, url):
			return False
		return True

	def version(self):
		"""Return plugin version - when interface changes, the version changes"""
		return 2

def main():
	"""Application main"""
	plugins.initialize()
	usage="usage: %prog [options]\nDownload files using " + str(len(plugins._plugins)) + " plugins:\n\t" + plugins.sites + \
"""\nUse proxy if http_proxy is set (must be 'http://username:password@host:port'),
or registry settings on Windows
Note that rapidshare authentication over proxy is not supported (but D/L is)
If some of the downloads fail, a text file with failed urls will be created"""

	from optparse import OptionParser

	parser = OptionParser(usage=usage, version="%prog " + yapget_version)
	parser.add_option("-s", "--source-url", dest="source_url",
		help="get links to download from a URL", metavar="URL")
	parser.add_option("-i", "--input-file", dest="fname",
		help="get links to download from a file", metavar="FILE")
	parser.add_option("-u", "--url", dest="url_name",
		help="get a single url", metavar="URL")
	parser.add_option("-l", "--login", dest="username",
		help="rapidshare/flazx username for authentication " \
		+ "(only needed the first time you download from rapidshare/flazx)",
		metavar="USERNAME")
	parser.add_option("-p", "--password", dest="passwd",
		help="rapidshare/flazx password for authentication " \
		"(only needed the first time you download from rapidshare/flazx)",
		metavar="PASSWORD")
	parser.add_option("-n", "--no-proxy", action="store_true", dest="no_proxy",
		help="do not use proxy even if such information is present",
		default=False)
	parser.add_option("-t", "--timeout", dest="tmo",
		help="timeout value for network operations (default 60)",
		default=60, metavar="SECONDS")
	parser.add_option("-r", "--retries", dest="retries",
		help="how many times to retry before giving up on a file (default 3)",
		default=3)
	parser.add_option("-c", "--cookie-dir", dest="cookie_dir",
		help="where to get/save cookie file (default '.')",
		default='.', metavar="DIRECTORY")
	parser.add_option("-q", "--quiet", action="store_true", dest="quiet",
		help="do not print anything to the console",
		default=False)
	parser.add_option("-d", "--dl-dir", dest="dl_dir",
		help="which directory to download files to (will be created if does not exist) (default '.')",
		default='.', metavar="DIRECTORY")
	parser.add_option("-f", "--force-overwrite", action="store_true", dest="overwrite",
		help="overwrite file if exists (default is to create a unique filename/directory)",
		default=False)
	parser.add_option("-P", "--explicit-proxy", dest="explicit_proxy",
		help="specify explicitely a proxy - must be in format of http_proxy",
		metavar='PROXY')
	(options, args) = parser.parse_args()

	if options.fname:
		options.fname = os.path.abspath(options.fname)
	if options.quiet:
		reporter = StatusReport()
	else:
		reporter = StatusReportConsole()

	if options.explicit_proxy and not is_valid_proxy_url(options.explicit_proxy):
		reporter.report_error('Invalid explicit proxy specified')
		return 1

	config.initialize(options.username, options.passwd, options.retries, reporter, options.overwrite, options.dl_dir, options.tmo)
	
	config.init_comm(options.cookie_dir, options.no_proxy, options.explicit_proxy)
	try:
		if options.fname:
			for line in file(options.fname, 'r').readlines():
				line = line.strip(' \r\n\t')
				if urlparse.urlparse(line)[0] == 'http':
					try: get_with_retry(line, options.fname)
					except Exception, msg:
						config.report.report_error("Exception occured: %s" % str(msg))
		elif options.url_name:
			get_with_retry(options.url_name)
		elif options.source_url:
			get_files_from_link(options.source_url)
		else:
			parser.print_help()
			return 1
	except ValueError, msg:
		config.report.report_error("Invalid argument: %s" % msg)
		return 1
	return 0

if __name__ == "__main__":
	ret = 0
	try:
		ret = main()
	except KeyboardInterrupt:
		print "Quiting"
	sys.exit(ret)

