# FeedFetcher.py
#
# Copyright (c) 2006 Michael Hobbs
#
# This file is part of iTorrent.
#
# iTorrent is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# iTorrent is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with iTorrent; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA


import os.path
import time
from StringIO import StringIO
from email.Utils import parsedate
import xml.dom.minidom as minidom
import xml.sax as sax

from BitTorrent.bencode import bdecode
from BitTorrent.ConvertedMetainfo import ConvertedMetainfo
from BitTorrent import BTFailure

import utils
import RawHTTPClient
from RawHTTPClient import CONNECT, DATA, CLOSED, FAILURE
from FeedStore import FeedStore, Record
from KeepAlive import FeedKeepAlive
from BTControl import BTHashCheck, LargestFileInfo


# This should be set to the number of concurrent network connections in iTunes:
NUM_PREFETCHES = 3

TORRENT_TYPE = 'application/x-bittorrent'
TORRENT_EXT = '.torrent'
MAX_FAILURE = 3  # TODO: Make this configurable


# TODO: should we reset numFailures in the feed record if the published date changes?
class FeedFetcher(object):
	def __init__(self, rawServer, feedStore, torrentFetcher, display, config):
		super(FeedFetcher, self).__init__()
		self.rawServer = rawServer
		self.feedStore = feedStore
		self.torrentFetcher = torrentFetcher
		self.display = display
		self.config = config
		self.keepAlives = {}
		self.clients = {}
		self.torrentFileFetchers = {}
		self.prefetchers = {}

	def fetch(self, rssUrl, writer, host):
		self.keepAlives[rssUrl] = FeedKeepAlive(writer, self.rawServer)
		self.keepAlives[rssUrl].start()
		dataBuffer = StringIO()
		client = RawHTTPClient.get(rssUrl, self.config.get('http_proxy'), self.rawServer, self.callback, dataBuffer, rssUrl, writer, host)
		self.clients[rssUrl] = client

	def stop(self, rssUrl):
		if rssUrl in self.keepAlives:
			self.keepAlives.pop(rssUrl).stop()
		if rssUrl in self.clients:
			self.clients.pop(rssUrl).close()
		if rssUrl in self.torrentFileFetchers:
			self.torrentFileFetchers.pop(rssUrl).stop()
		if rssUrl in self.prefetchers:
			[prefetcher.stop() for prefetcher in self.prefetchers.pop(rssUrl).values()]
		# end if

	def callback(self, event, data, dataBuffer, rssUrl, writer, host):
		if event == CONNECT:
			return True
		if event == DATA:
			dataBuffer.write(data)
			return True
		if event == CLOSED:
			del self.clients[rssUrl]
			dataBuffer.seek(0, 0)
			try:
				dom = minidom.parseString(dataBuffer.read(), sax.make_parser())
			except sax.SAXException, e:
				message = 'Error reading RSS feed %s: %s' % (rssUrl, e)
				self.display.error(message)
				self.keepAlives.pop(rssUrl).stop()
				# Write any bogus xml text to writer to cause iTunes to register an error.
				writer.write('error: ' + message)
				writer.close()
				return False
			self.transformEnclosures(dom, rssUrl, writer, host)
			return False
		if event == FAILURE:
			message = 'Connection to RSS feed %s failed: %s' % (rssUrl, data)
			del self.clients[rssUrl]
			self.display.error(message)
			self.keepAlives.pop(rssUrl).stop()
			# TODO: why would writer ever be closed? (Maybe if iTunes closes connection?)
			if not writer.isClosed():
				# Write any bogus xml text to writer to cause iTunes to register an error.
				writer.write('error: ' + message)
				writer.close()
			return False
		# end if

	def transformEnclosures(self, dom, rssUrl, writer, host):
		rss = dom.documentElement
		channel = rss.getElementsByTagName('channel')[0]
		enclosures = []
		for item in channel.getElementsByTagName('item'):
			itemEnclosures = item.getElementsByTagName('enclosure')
			if itemEnclosures:
				enclosure = itemEnclosures[0]
				url = enclosure.getAttribute('url').encode('utf8')
				enclosures.append((url, enclosure))
				if not isTorrent(url, enclosure):
					self.transformEnclosure(url, enclosure, host)
				# end if
			# end if
		# Sort enclosures to find the most recent ones first.
		enclosures.sort(lambda a, b: -cmp(getPubDate(a[1]), getPubDate(b[1])))
		torrentFileFetcher = TorrentFileFetcher(self, enclosures, host)
		self.torrentFileFetchers[rssUrl] = torrentFileFetcher
		torrentFileFetcher.start(self.torrentFilesFetched, enclosures, dom, rssUrl, writer)

	def transformEnclosure(self, url, enclosure, host):
		# TODO: don't transform the enclosure if we won't prefetch it (if record.sent)
		# TODO: refactor this with the other transformEnclosure()
		# TODO: Catch value errors on other fields as well
		try:
			length = int(enclosure.getAttribute('length').encode('utf8'))
		except ValueError:
			# Set length to zero and hope that we can use HTTP Content-Length header later.
			length = 0
		channel = getChannel(enclosure)
		title = getTitle(enclosure)
		record = self.feedStore.get(url)
		if not record:
			record = Record(url, length, channel, title)
			self.feedStore[url] = record
		else:
			# Don't reset length based on enclosure value. It may be wrong. (The
			# Content-Length received while prefetching is usually more accurate.)
			if record.length == 0:
				record.length = length
			record.channel = channel
			record.title = title
		record.published = getPubDate(enclosure)
		record.partialPath = utils.getPartialPathName(url)
		record.lastSeen = time.time()
		enclosure.setAttribute('url', utils.getEnclosureURL(url, host))

	def getPrefetcher(self, url, enclosure, rssUrl):
		if isTorrent(url, enclosure):
			prefetcher = TorrentPrefetcher(self, url)
		else:
			prefetcher = StandardPrefetcher(self, url)
		self.prefetchers.setdefault(rssUrl, {})[url] = prefetcher
		return prefetcher

	def torrentFilesFetched(self, enclosures, dom, rssUrl, writer):
		del self.torrentFileFetchers[rssUrl]
		writer.write(dom.documentElement.toxml(encoding='utf-8'))
		for _ in range(min(NUM_PREFETCHES, len(enclosures))):
			url, enclosure = enclosures.pop(0)
			# Skip problematic .torrent files
			record = self.feedStore.get(url)
			if isTorrent(url, enclosure) and not record.torrentFile:
				continue
			self.getPrefetcher(url, enclosure, rssUrl)
		if rssUrl not in self.prefetchers:
			# No enclosures?!
			self.keepAlives.pop(rssUrl).stop()
			writer.close()
			return
		# We can't call prefetcher.start() in the loop above, since it might call
		# prefetchedFirst() immediately, which will cause problems if it's called before
		# all of the prefetchers are accumulated.
		for url, prefetcher in self.prefetchers[rssUrl].items():
			prefetcher.start(self.prefetchedFirst, url, enclosures, rssUrl, writer)
		# end for

	def prefetchedFirst(self, url, enclosures, rssUrl, writer):
		del self.prefetchers[rssUrl][url]
		if self.prefetchers[rssUrl]:
			return
		del self.prefetchers[rssUrl]
		# The first NUM_PREFETCHES enclosures have been fetched. Close the writer so
		# that iTunes doesn't stay waiting while we prefetch the remaining enclosures.
		self.keepAlives.pop(rssUrl).stop()
		writer.close()
		prefetchers = []
		for _ in range(min(NUM_PREFETCHES, len(enclosures))):
			url, enclosure = enclosures.pop(0)
			prefetcher = self.getPrefetcher(url, enclosure, rssUrl)
			prefetchers.append((url, prefetcher))
		# Don't call prefetcher.start() in the loop above, since it might call
		# prefetchedOther() immediately, which will remove keys from enclosures and
		# cause the loop iteration to fail.
		for url, prefetcher in prefetchers:
			prefetcher.start(self.prefetchedOther, url, enclosures, rssUrl)
		# end for

	def prefetchedOther(self, url, enclosures, rssUrl):
		del self.prefetchers[rssUrl][url]
		if not self.prefetchers[rssUrl]:
			del self.prefetchers[rssUrl]
		if not enclosures:
			return
		url, enclosure = enclosures.pop(0)
		prefetcher = self.getPrefetcher(url, enclosure, rssUrl)
		prefetcher.start(self.prefetchedOther, url, enclosures, rssUrl)


class TorrentFileFetcher(object):
	def __init__(self, feedFetcher, enclosures, host):
		super(TorrentFileFetcher, self).__init__()
		self.rawServer = feedFetcher.rawServer
		self.feedStore = feedFetcher.feedStore
		self.display = feedFetcher.display
		self.config = feedFetcher.config
		self.host = host
		# TODO: make sure these dicts stay clean
		self.enclosures = [(url, enclosure) for url, enclosure in enclosures if isTorrent(url, enclosure)]
		self.clients = {}
		self.active = {}

	def start(self, callback, *args, **kwargs):
		while self.enclosures and len(self.clients) < NUM_PREFETCHES:
			url, enclosure = self.enclosures.pop(0)
			# Don't refetch the .torrent unless we encountered problems fetching or if the
			# the published date has changed.
			record = self.feedStore.get(url)
			if record and record.torrentFile and os.path.exists(record.torrentFile) and record.numFailures == 0 and record.published == getPubDate(enclosure):
				f = file(record.torrentFile, 'rb')
				self.transformEnclosure(url, enclosure, f.read())
				f.close()
				continue
			# Don't bother to do anything with the enclosure if it has failed too often.
			if record and record.numFailures > MAX_FAILURE:
				# TODO: should we completely remove the <item> if we can't get the .torrent file?
				continue
			self.display.info('Fetching .torrent file: ' + url)
			dataBuffer = StringIO()
			client = RawHTTPClient.get(url, self.config.get('http_proxy'), self.rawServer, self.callback, url, dataBuffer, callback, args, kwargs)
			self.clients[url] = client
			self.active[url] = enclosure
		if not self.active:
			callback(*args, **kwargs)
			return
		# end if

	def stop(self):
		[client.close() for client in self.clients.values()]

	def fetchMore(self, callback, args, kwargs):
		if not self.enclosures:
			if not self.active:
				callback(*args, **kwargs)
			return
		url, enclosure = self.enclosures.pop(0)
		self.display.info('Fetching .torrent file: ' + url)
		dataBuffer = StringIO()
		client = RawHTTPClient.get(url, self.config.get('http_proxy'), self.rawServer, self.callback, url, dataBuffer, callback, args, kwargs)
		self.clients[url] = client
		self.active[url] = enclosure

	def callback(self, event, data, url, dataBuffer, callback, args, kwargs):
		if event == CONNECT:
			return True
		if event == DATA:
			dataBuffer.write(data)
			return True
		if event == CLOSED:
			del self.clients[url]
			enclosure = self.active.pop(url)
			dataBuffer.seek(0, 0)
			self.transformEnclosure(url, enclosure, dataBuffer.read())
			self.fetchMore(callback, args, kwargs)
			return False
		if event == FAILURE:
			del self.clients[url]
			enclosure = self.active.pop(url)
			message = 'Download of .torrent file %s failed: %s' % (url, data)
			# TODO: should we completely remove the <item> if we can't get the .torrent file?
			self.failTorrent(url, enclosure, message)
			self.fetchMore(callback, args, kwargs)
			return False
		# end if

	def transformEnclosure(self, url, enclosure, data):
		# TODO: Remove this call from here:
		from BitTorrent.ConvertedMetainfo import set_filesystem_encoding
		set_filesystem_encoding('', None)
		try:
			metainfo = ConvertedMetainfo(bdecode(data))
		except BTFailure, e:
			message = 'Error reading .torrent file %s: %s' % (url, e)
			self.failTorrent(url, enclosure, message)
			return
		torrentFileName = utils.getTorrentPathName(url, metainfo.name_fs)
		torrentFile = file(torrentFileName, 'wb')
		torrentFile.write(data)
		torrentFile.close()
		self.updateFeed(url, enclosure, metainfo.total_bytes, torrentFileName)
		largestInfo = LargestFileInfo(metainfo)
		enclosure.setAttribute('length', str(largestInfo.length))
		# Strip type from enclosure, since it will confuse iTunes. It will look at the
		# extension, so we need to copy that over.
		# We need to save a copy of the type, though, since we may not be able to tell
		# that this is a BitTorrent from the URL alone.
		type = enclosure.getAttribute('type')
		enclosure.removeAttribute('type')
		enclosure.setAttribute('orig-type', type)
		enclosure.setAttribute('url', utils.getTorrentURL(url, self.host, largestInfo.name))

	def updateFeed(self, url, enclosure, length, torrentFile):
		channel = getChannel(enclosure)
		title = getTitle(enclosure)
		record = self.feedStore.get(url)
		if not record:
			record = Record(url, length, channel, title)
			self.feedStore[url] = record
		else:
			if length > 0:
				record.length = length
			record.channel = channel
			record.title = title
		record.torrentFile = torrentFile
		record.published = getPubDate(enclosure)
		record.lastSeen = time.time()
		return record

	def failTorrent(self, url, enclosure, message):
		self.display.error(message)
		record = self.updateFeed(url, enclosure, 0, None)
		record.failed = True


class TorrentPrefetcher(object):
	def __init__(self, feedFetcher, url):
		super(TorrentPrefetcher, self).__init__()
		self.rawServer = feedFetcher.rawServer
		self.torrentFetcher = feedFetcher.torrentFetcher
		self.display = feedFetcher.display
		self.config = feedFetcher.config
		self.url = url
		self.record = feedFetcher.feedStore[url]
		torrentFile = file(self.record.torrentFile, 'rb')
		self.metainfo = ConvertedMetainfo(bdecode(torrentFile.read()))
		torrentFile.close()

	def start(self, callback, *args, **kwargs):
		# Don't bother to prefetch again if it has failed too often.
		if self.record.sent or self.record.numFailures > MAX_FAILURE or \
				(self.record.partialPath and BTHashCheck(self.config, self.metainfo, self.record.partialPath).checkPiece(0)):
			callback(*args, **kwargs)
			return
		self.display.info('Prefetching first piece of torrent: ' + self.record.url)
		self.torrentFetcher.prefetch(self.url, self.finished, callback, args, kwargs)

	def stop(self):
		self.torrentFetcher.stop(self.url)

	def finished(self, pathName, callback, args, kwargs):
		# pathName will be None if the download failed
		# No need to update record.numFailures, since the TorrentFetcher will do it
		# for us.
		# TODO: Update record.numFailures here and in iTorrent, not in TorrentFetcher
		if not pathName:
			message = 'Prefetch of first piece of torrent %s failed' % (self.record.url)
			self.display.error(message)
		else:
			self.record.partialPath = pathName
		callback(*args, **kwargs)


class StandardPrefetcher(object):
	def __init__(self, feedFetcher, url):
		super(StandardPrefetcher, self).__init__()
		self.rawServer = feedFetcher.rawServer
		self.display = feedFetcher.display
		self.config = feedFetcher.config
		self.url = url
		self.record = feedFetcher.feedStore[url]
		self.client = None
		self.dataBuffer = StringIO()

	def start(self, callback, *args, **kwargs):
		# Don't prefetch the enclosure if we already have it.
		if self.record.sent or self.record.numFailures > MAX_FAILURE or \
				(self.record.partialPath and os.path.exists(self.record.partialPath) and os.path.getsize(self.record.partialPath) >= 2048):
			callback(*args, **kwargs)
			return
		self.display.info('Prefetching first 2KB of enclosure: ' + self.url)
		self.client = RawHTTPClient.get(self.url, self.config.get('http_proxy'), self.rawServer, self.callback, callback, args, kwargs)

	def stop(self):
		self.client.close()

	def callback(self, event, data, callback, args, kwargs):
		if event == CONNECT:
			if 'Content-Length' in data:
				self.record.length = int(data['Content-Length'])
			return True
		if event == DATA:
			self.dataBuffer.write(data)
			# Don't fetch more than 2KB.
			if self.dataBuffer.tell() >= 2048:
				self.saveDataBuffer()
				callback(*args, **kwargs)
				return False
			return True
		if event == CLOSED:
			self.saveDataBuffer()
			callback(*args, **kwargs)
			return False
		if event == FAILURE:
			self.record.numFailures += 1
			message = 'Prefetch of enclosure file %s failed: %s' % (self.url, data)
			self.display.error(message)
			callback(*args, **kwargs)
			return False
		# end if

	def saveDataBuffer(self):
		fileName = utils.getPartialPathName(self.url)
		f = file(fileName, 'wb')
		self.dataBuffer.seek(0, 0)
		f.write(self.dataBuffer.read())
		f.close()
		self.record.numFailures = 0


def getText(node):
	text = ''
	for node in node.childNodes:
		if node.nodeType == node.TEXT_NODE:
			text += node.data
		# end if
	return text.encode('utf8').strip()

def isTorrent(url, enclosure):
	type = enclosure.getAttribute('type').encode('utf8')
	if type == TORRENT_TYPE:
		return True
	type = enclosure.getAttribute('orig-type').encode('utf8')
	if type == TORRENT_TYPE:
		return True
	ext = os.path.splitext(url)[1]
	if ext == TORRENT_EXT:
		return True
	match = utils.URL_BASE_RE.match(url)
	ext = os.path.splitext(match.group(1))[1]
	if ext == TORRENT_EXT:
		return True
	return False

def getChannel(enclosure):
	item = enclosure.parentNode
	channel = item.parentNode
	return getText(channel.getElementsByTagName('title')[0])

def getTitle(enclosure):
	item = enclosure.parentNode
	titles = item.getElementsByTagName('title')
	if not titles:
		return None
	return getText(titles[0])

def getPubDate(enclosure):
	item = enclosure.parentNode
	pubDates = item.getElementsByTagName('pubDate')
	if not pubDates:
		return None
	return time.mktime(parsedate(getText(pubDates[0])))
