import os, random, sys, traceback, time, urllib2, urllib

from multiprocessing import Process, JoinableQueue, Event
from datetime import datetime, timedelta

import feedparser

import lib, sql

from xdh_logging import initLog, LogWriter
from settings import settings
from cache import cache
from nothing import Nothing
from tags import get_length
from podcast_track import PodcastTrack, PodcastTrackError
from MPD import MPD

log = initLog("podcast")

INITIALIZE = "-/<({[|INITIALIZE|]})>\-"

class PodCalculator(Process):
	Q = JoinableQueue()
	started = Event()

	@classmethod
	def startup(cls):
		if not cls.is_running():
			cls.started.set()
			cls.workers = []
			for i in xrange(settings.data.num_processes):
				worker = Nothing()
				setattr(worker, "running", Event())
				setattr(worker, "process", cls(worker.running))
				cls.workers.append(worker)

	@classmethod
	def is_running(cls):
		if not hasattr(cls, "workers"):
			return False

		running = not (cls.Q.empty() and cls.started.is_set())

		for worker in cls.workers:
			if worker.process.is_alive():
				running |= worker.running.is_set()

		return running

	@classmethod
	def shutdown(cls):
		if not hasattr(cls, "workers"):
			return

		for worker in cls.workers:
			if worker.process.is_alive():
				log.debug(u"Sending stop to {name} Queue.".format(name = cls.__name__))
				cls.Q.put(lib.STOP)

		cls.Q.join()

		for worker in cls.workers:
			if worker.process.is_alive():
				worker.process.join()

		del cls.workers

		sql.Q.join()

		cls.started.clear()


	def __init__(self, running):
		super(self.__class__, self).__init__()

		self.running = running

		self.start()

	def initialize(self, feed):
		self.feed = feed
		self.stats = []

		for feed in sql.PodcastFeedData.get(self.feed):
			self.url = feed.url
			self.author = feed.author


	def process(self):
		sql.Q.join()

		lengths = sql.PodcastTrackData.get_lengths(self.feed)
		self.count = len(lengths)
		start = datetime.today()
		self.hMean, self.aMean, self.gMean = lib.getMeans(lengths)
		diff = datetime.today() - start
		self.write_stat("Recalculate", "duration", diff.total_seconds())
		self.write_stat("Recalculate", "count", self.count)
		self.write_stat("Recalculate", "amean", self.aMean)
		self.write_stat("Recalculate", "hmean", self.hMean)
		self.write_stat("Recalculate", "gmean", self.gMean)

		log.info("<{podcast}> Recalculated, {num} tracks for an average of {sec} seconds.".format(podcast = self.feed, num = self.count, sec = self.gMean))

	def update_db(self):
		data = [{
			"title": self.feed,
			"url": self.url,
			"author": self.author,
			"num_tracks": self.count,
			"amean": self.aMean,
			"hmean": self.hMean,
			"gmean": self.gMean,
		}]

		sql.process(sql.PodcastFeedData, "update", data)

		sql.Q.join()

		cache["podcast_mean"] = sql.PodcastFeedData.get_weighted_length()
		cache["podcast_mean"] = sql.PodcastFeedData.get_weighted_length()
		cache["podcast_gap"] = sql.PodcastFeedData.get_podcast_gap()

	def finish_stats(self):
		sql.process(sql.PodFeedStatData, "insert", self.stats)

	def write_stat(self, category, step, value):
		if value > 0:
			data = { 
				'timestamp': datetime.today(),
				'title': self.feed,
				'category': category,
				'step': step,
				'value': value,
			}

			self.stats.append(data)
	def run(self):
		try:
			while True:
				item = self.__class__.Q.get()
				if item == lib.STOP:
					self.__class__.Q.task_done()
					log.debug("Got stop from {name} Queue.".format(name = self.__class__.__name__))
					break

				self.running.set()

				if not self.__class__.started.is_set():
					self.__class__.started.set()

				self.initialize(item)

				self.process()

				self.update_db()
				self.finish_stats()

				self.__class__.Q.task_done()

				self.running.clear()
		except Exception, e:
			log.error("{name}: {msg}".format(name = e.__class__.__name__, msg = e))
			log.error("".join(traceback.format_exception(*sys.exc_info())))


class PodUpdater(Process):
	Q = JoinableQueue()
	started = Event()
	ready = Event()

	@classmethod
	def startup(cls):
		if not cls.is_running():
			cls.started.set()
			cls.workers = []
			for i in xrange(1):
				worker = Nothing()
				setattr(worker, "running", Event())
				setattr(worker, "process", cls(worker.running))
				cls.workers.append(worker)


	@classmethod
	def do_first(cls):
		if "podcast_date" not in cache or "podcast_track" not in cache or "podcast_played" not in cache:
			cls.Q.put(INITIALIZE)

		elif datetime.today() > cache["podcast_date"] and cache["podcast_track"] not in MPD and cache["podcast_played"] == True:
			cache["podcast_played"] = True
			cls.Q.put(cache["podcast_track"])

	@classmethod
	def is_running(cls):
		if not hasattr(cls, "workers"):
			return False

		running = not (cls.Q.empty() and cls.started.is_set())

		for worker in cls.workers:
			if worker.process.is_alive():
				running |= worker.running.is_set()

		return running

	@classmethod
	def shutdown(cls):
		if not hasattr(cls, "workers"):
			return

		for worker in cls.workers:
			if worker.process.is_alive():
				log.debug(u"Sending stop to {name} Queue.".format(name = cls.__name__))
				cls.Q.put(lib.STOP)

		cls.Q.join()

		for worker in cls.workers:
			if worker.process.is_alive():
				worker.process.join()

		del cls.workers

		sql.Q.join()

		cls.started.clear()


	def __init__(self, running):
		super(self.__class__, self).__init__()

		self.running = running

		self.start()

	def initialize(self, filename = None):
		self.initial = filename == None
		if not self.initial:
			self.track = PodcastTrack(filename)

	def process(self):
		if not self.initial:
			for track in sql.PodcastTrackData.get(self.track.filename):
				data = [{
					"filename": track.filename,
					"feed": track.feed,
					"author": track.author,
					"title": track.title,
					"added": track.added,
					"published": track.published,
					"played": datetime.today(),
					"length": track.length,
				}]
			sql.process(sql.PodcastTrackData, "update", data)

			try:
				PodCalculator.Q.put(self.track.feed)

				PodCalculator.Q.join()
				sql.Q.join()
			except PodcastTrackError:
				pass

			shift = timedelta(seconds = sql.PodcastFeedData.get_podcast_gap())

		else:
			shift = timedelta(seconds = (random.random() * sql.PodcastFeedData.get_podcast_gap()))

		can_do = True
		new_date = datetime.today() + shift
		if new_date.strftime("%a") in settings.podcast.days:
			old_hour = new_date.hour
			if old_hour not in settings.podcast.days[new_date.strftime("%a")]:
				if old_hour > settings.podcast.days[new_date.strftime("%a")][-1]:
					days = 1
					while (new_date + timedelta(days = days)).strftime("%a") not in settings.podcast.days and days <= settings.podcast.max:
						days += 1
					new_date += timedelta(days = days)
					hour_diff = settings.podcast.days[new_date.strftime("%a")][0] - new_date.hour
					new_date += timedelta(seconds = hour_diff * 60 * 60)
				else:
					ndx = 0
					while settings.podcast.days[new_date.strftime("%a")][ndx] < new_date.hour:
						ndx += 1
					new_date.hour = settings.podcast.days[new_date.strftime("%a")][ndx]
			else:
				pass # This means that the date is within the podcast day/hour range.
		else:
			days = 1
			while (new_date + timedelta(days = days)).strftime("%a") not in settings.podcast.days and days <= settings.podcast.max:
				days += 1
			if days > settings.podcast.max:
				can_do = False
			else:
				new_date += timedelta(days = days)
				hour_diff = settings.podcast.days[new_date.strftime("%a")][0] - new_date.hour
				new_date += timedelta(seconds = hour_diff * 60 * 60)

		if can_do:
			cache["podcast_date"] = new_date
			cache["podcast_track"] = sql.PodcastTrackData.next()
			cache["podcast_played"] = False
		else:
			if "podcast_date" in cache:
				del cache["podcast_date"]
			if "podcast_track" in cache:
				del cache["podcast_track"]
			if "podcast_played" in cache:
				del cache["podcast_played"]

	def run(self):
		try:
			while True:
				item = self.__class__.Q.get()
				if item == lib.STOP:
					self.__class__.Q.task_done()
					log.debug("Got stop from {name} Queue.".format(name = self.__class__.__name__))
					break

				self.running.set()

				if not self.__class__.started.is_set():
					self.__class__.started.set()

				if item == INITIALIZE:
					self.initialize()
				else:
					self.initialize(item)

				self.process()

				self.__class__.Q.task_done()

				self.running.clear()
		except Exception, e:
			log.error("{name}: {msg}".format(name = e.__class__.__name__, msg = e))
			log.error("".join(traceback.format_exception(*sys.exc_info())))

class PodDownloader(Process):
	Q = JoinableQueue()
	started = Event()

	@classmethod
	def startup(cls):
		if not cls.is_running():
			cls.started.set()
			cls.workers = []
			for i in xrange(1):
				worker = Nothing()
				setattr(worker, "running", Event())
				setattr(worker, "process", cls(worker.running))
				cls.workers.append(worker)

	@classmethod
	def is_running(cls):
		if not hasattr(cls, "workers"):
			return False

		running = not (cls.Q.empty() and cls.started.is_set())

		for worker in cls.workers:
			if worker.process.is_alive():
				running |= worker.running.is_set()

		return running

	@classmethod
	def shutdown(cls):
		if not hasattr(cls, "workers"):
			return

		for worker in cls.workers:
			if worker.process.is_alive():
				log.debug(u"Sending stop to {name} Queue.".format(name = cls.__name__))
				cls.Q.put(lib.STOP)

		cls.Q.join()

		for worker in cls.workers:
			if worker.process.is_alive():
				worker.process.join()

		del cls.workers

		sql.Q.join()

		cls.started.clear()


	def __init__(self, running):
		super(self.__class__, self).__init__()

		self.running = running

		self.start()

	def get_filename_from_url(self, href):
		urlpath = urllib2.urlparse.urlparse(href).path
		unquoted = urllib.unquote_plus(urllib2.unquote(urlpath)).decode("utf8")
		return unquoted.split('/')[-1]
		
	def path_join(self, orig, ext):
		path = os.path.join(orig, ext)
		if not os.path.exists(path):
			os.mkdir(path)
		return path

	@property
	def path(self):
		path = self.path_join(settings.path.music_base, settings.path.podcast)
		return lib.fix_filename(self.path_join(path, self.feed), reverse = True)

	def initialize(self, feed, url, published, author, title, is_last):
		self.feed = feed
		self.url = url
		self.published = published
		self.is_last = is_last
		self.author = author
		self.title = title
		self.filename = os.path.join(self.path, self.get_filename_from_url(self.url))
		self.filepath = lib.fix_filename(self.filename)
		self.stats = []

	def process(self):
		try:
			remote = urllib2.urlopen(self.url)
			if os.path.exists(self.filepath):
				remote_size = int(remote.info().getheaders("Content-Length")[0])
				local_size = os.stat(self.filepath).st_size

				if remote_size == local_size:
					log.info(u"<{podcast}> file \"{filename}\" already exists.".format(podcast = self.feed, filename = self.filename))
					self.update_db()
					return
				else:
					log.info(u"<{podcast}> incomplete file \"{filename}\" being redownloaded.".format(podcast = self.url, filename = self.filename))

			start = datetime.today()
			with open(self.filepath, "wb") as local:
				local.write(remote.read())
			diff = datetime.today() - start
			self.write_stat(self.filename, "download duration", diff.total_seconds())

		except Exception, e:
			log.warning(u"<{podcast}> file \"{filename}\" download from \"{url}\" caused {errtype}: {error}".format(podcast = self.feed, filename = self.filename, url = self.url, errtype = e.__class__.__name__, error = e))
			return

		self.update_db()


	def update_db(self):
		if sql.PodcastTrackData.exists(self.filename):
			pass
		else:
			sql.process(sql.PodcastTrackData, "insert", [{
				"filename": self.filename,
				"feed": self.feed,
				"title": self.title,
				"author": self.author,
				"added": datetime.today(),
				"published": self.published,
				"played": None,
				"length": get_length(lib.fix_filename(self.filepath)),
			}])

	def finish_stats(self):
		sql.process(sql.PodFeedStatData, "insert", self.stats)

	def write_stat(self, category, step, value):
		if value > 0:
			data = { 
				'timestamp': datetime.today(),
				'title': self.feed,
				'category': category,
				'step': step,
				'value': value,
			}

			self.stats.append(data)

	def run(self):
		try:
			while True:
				item = self.__class__.Q.get()
				if item == lib.STOP:
					self.__class__.Q.task_done()
					log.debug("Got stop from {name} Queue.".format(name = self.__class__.__name__))
					break

				self.running.set()

				if not self.__class__.started.is_set():
					self.__class__.started.set()

				log.info(u"Downloading {url} for podcast <{feed}>.".format(feed = item[0], url = item[1]))

				self.initialize(*item)

				self.process()

				self.finish_stats()

				if self.is_last:
					PodCalculator.Q.put(self.feed)

				self.__class__.Q.task_done()

				self.running.clear()
		except Exception, e:
			log.error("{name}: {msg}".format(name = e.__class__.__name__, msg = e))
			log.error("".join(traceback.format_exception(*sys.exc_info())))


class PodWorker(Process):
	Q = JoinableQueue()
	started = Event()

	@classmethod
	def startup(cls):
		if not cls.is_running():
			cls.started.set()
			cls.workers = []
			for i in xrange(settings.data.num_processes):
				worker = Nothing()
				setattr(worker, "running", Event())
				setattr(worker, "process", cls(worker.running))
				cls.workers.append(worker)

	@classmethod
	def is_running(cls):
		if not hasattr(cls, "workers"):
			return False

		running = not (cls.Q.empty() and cls.started.is_set())

		for worker in cls.workers:
			if worker.process.is_alive():
				running |= worker.running.is_set()

		return running

	@classmethod
	def shutdown(cls):
		if not hasattr(cls, "workers"):
			return

		for worker in cls.workers:
			if worker.process.is_alive():
				log.debug(u"Sending stop to {name} Queue.".format(name = cls.__name__))
				cls.Q.put(lib.STOP)

		cls.Q.join()

		for worker in cls.workers:
			if worker.process.is_alive():
				worker.process.join()

		del cls.workers

		sql.Q.join()

		cls.started.clear()


	def __init__(self, running):
		super(self.__class__, self).__init__()

		self.running = running

		self.start()

	def get_podcast_author(self):
		if "author" in self.feed["feed"]:
			return self.feed["feed"]["author"]
		else:
			return None

	def get_published_parsed(self, item):
		if hasattr(self.config, "ignore_dates") and self.config.ignore_dates == True:
			self.date = None
		elif "published_parsed" in item:
			self.date = datetime.fromtimestamp(time.mktime(item["published_parsed"]))
		else:
			self.date = None

		return self.date

	def get_author(self, item):
		if "author" in item:
			return item["author"]
		else:
			return self.podcast_author

	def get_title(self, item):
		if "title" in item:
			return item["title"]
		elif self.date != None:
			return "{title} for {date}".format(title = self.podcast_title, date = date.strftime("%d %b %Y"))
		else:
			return self.podcast_title

	def get_filename_from_url(self, href):
		urlpath = urllib2.urlparse.urlparse(href).path
		unquoted = urllib.unquote_plus(urllib2.unquote(urlpath)).decode("utf8")
		return unquoted.split('/')[-1]
		

	def get_file(self, href, files, published, author, title):
		if settings.re.music_ext.search(href.lower()) == None:
			return None

		else:
			filename = os.path.join(self.path, self.get_filename_from_url(href))
			filepath = lib.fix_filename(filename)

			if not sql.PodcastTrackData.exists(filename):
				self.to_download.append((self.title, href, published, author, title))

			self.valid_files.append((os.path.basename(filename), published))

			if not sql.PodcastTrackData.is_played(lib.fix_filename(filepath, reverse = True)):
				self.count += 1
			return filename

	def cleanup_files(self):
		actual_files = os.listdir(lib.fix_filename(self.path))
		self.bad_files = (set(actual_files) - set([filename for filename, published in self.valid_files])) | set(sql.PodcastTrackData.get_deletable_files(self.title))

		self.del_count = len(self.bad_files)

		for filename in self.bad_files:
			filepath = lib.fix_filename(os.path.join(self.path, filename))
			if not os.path.isdir(filepath):
				try:
					os.remove(filepath)
					log.info(u"<{podcast}> old file \"{filename}\" removed.".format(podcast = self.title, filename = filename))
				except OSError:
					pass

	def get_entry(self, item):
		files = []

		date = self.get_published_parsed(item)

		author = self.get_author(item)

		title = self.get_title(item)

		for link in item["links"]:
			filename = self.get_file(link["href"], files, date, author, title)
			if filename != None:
				files.append(filename)

		if len(files) == 0:
			return None

		self.entry_count += 1
		self.file_count += len(files)

	def update_db(self):
		if not sql.PodcastFeedData.exists(self.title):
			sql.process(sql.PodcastFeedData, "insert", [{
				"title": self.title,
				"author": self.podcast_author,
				"url": self.url,
				"num_tracks": 0,
				"amean": 0,
				"hmean": 0,
				"gmean": 0,
			}]) # Fills in an incomplete entry so that individual tracks can be loaded.

		dels = [{"filename": filename} for filename in sql.PodcastTrackData.gen_old_tracks(self.title)]

		self.db_del_count = len(dels)

		if len(dels) > 0:
			sql.process(sql.PodcastTrackData, "delete", dels)
			PodCalculator.Q.put(self.title)


	def initialize(self, title, url):
		self.title = title
		self.url = url
		self.stats = []

		log.info(u"<{podcast}> parsing feed from \"{url}\".".format(podcast = self.title, url = url))
		start = datetime.today()
		self.feed = feedparser.parse(url)
		diff = datetime.today() - start
		self.write_stat("Feed", "duration", diff.total_seconds())
		self.write_stat("Feed", "number of items", len(self.feed["items"]))

		self.date = datetime.today()
		self.max_date = datetime.today() - timedelta(days = settings.podcast.max)

		self.valid_files = []
		self.bad_files = set()

		self.count = 0
		for feed in sql.PodcastFeedData.get(self.title):
			self.count = feed.num_tracks

		self.config = [feed for cat in settings.podcast.categories for feed in getattr(settings.podcast.pods, cat) if feed.url == url][0]

		self.podcast_author = self.get_podcast_author()

		self.to_download = []

		self.file_count = 0
		self.del_count = 0
		self.db_del_count = 0
		self.entry_count = 0

	def process(self):
		item_iter = iter(self.feed["items"])

		try:
			while (self.date != None and self.date > self.max_date) or (self.date == None and self.count < settings.podcast.max):
				self.get_entry(item_iter.next())

		except StopIteration:
			pass

		if self.entry_count > 0:
			self.cleanup_files()
                
		self.update_db()
                
		for item in self.to_download:
			is_last = item == self.to_download[-1]
			item = list(item)
			item.append(is_last)
			PodDownloader.Q.put(tuple(item))
                                

	def path_join(self, orig, ext):
		path = os.path.join(orig, ext)
		if not os.path.exists(path):
			os.mkdir(path)
		return path

	@property
	def path(self):
		path = self.path_join(settings.path.music_base, settings.path.podcast)
		return lib.fix_filename(self.path_join(path, self.title), reverse = True)

	def run(self):
		try:
			while True:
				item = self.__class__.Q.get()
				if item == lib.STOP:
					self.__class__.Q.task_done()
					log.debug("Got stop from {name} Queue.".format(name = self.__class__.__name__))
					break

				self.running.set()

				if not self.__class__.started.is_set():
					self.__class__.started.set()

				log.info(u"Starting processing for podcast: <{title}> {url}.".format(title = item[0], url = item[1]))

				self.initialize(*item)

				self.process()

				self.finish_stats()

				log.info(u"Finished processinig for podcast: <{title}> {url}.".format(title = item[0], url = item[1]))

				self.__class__.Q.task_done()

				self.running.clear()
		except Exception, e:
			log.error("{name}: {msg}".format(name = e.__class__.__name__, msg = e))
			log.error("".join(traceback.format_exception(*sys.exc_info())))

	def finish_stats(self):
		self.write_stat("Files", "count", self.file_count)
		self.write_stat("Files", "deleted", self.del_count)
		self.write_stat("Database", "delete", self.db_del_count)
		self.write_stat("Entry", "count", self.entry_count)

		sql.process(sql.PodFeedStatData, "insert", self.stats)

	def write_stat(self, category, step, value):
		if value > 0:
			data = { 
				'timestamp': datetime.today(),
				'title': self.title,
				'category': category,
				'step': step,
				'value': value,
			}

			self.stats.append(data)


class Podcast(object):
	def __init__(self):
		self.started = False

	def __enter__(self):
		PodCalculator.startup()
		PodUpdater.startup()
		return self

	def __exit__(self, type, value, tback):
		PodWorker.shutdown()
		PodDownloader.shutdown()
		PodUpdater.shutdown()
		PodCalculator.shutdown()

		sql.Q.join()

		if type != None:
			log.error("{name}: {msg}".format(name = value.__class__.__name__, msg = value))
			log.error("".join(traceback.format_exception(*sys.exc_info())))

	def calc(self):
		feeds = [podcast.title for group in settings.podcast.pods.__dict__.values() for podcast in group]
		random.shuffle(feeds)
		for feed in feeds:
			PodCalculator.Q.put(feed)

	def start(self, feeds = None):
		PodWorker.startup()
		PodDownloader.startup()

		if feeds == None:
			pods = [(podcast.title, podcast.url) for group in settings.podcast.pods.__dict__.values() for podcast in group]
		else:
			pods = [(podcast.title, podcast.url) for group in settings.podcast.pods.__dict__.values() for podcast in group if podcast.title in feeds]

		random.shuffle(pods)

		for pod in pods:
			PodWorker.Q.put(pod)

		PodWorker.started.set()
		PodDownloader.started.set()
		self.started = True
		log.info("Starting podcast feed processing.")

	def stop(self):
		PodWorker.shutdown()
		PodDownloader.shutdown()

		log.info("Podcast feed processing completed.")
		cache["podcast_regenerated"] = datetime.today()
		cache["podcast_mean"] = sql.PodcastFeedData.get_weighted_length()
		cache["podcast_mean"] = sql.PodcastFeedData.get_weighted_length()
		cache["podcast_gap"] = sql.PodcastFeedData.get_podcast_gap()
		MPD.update = True 
		self.started = False
		if not PodUpdater.ready.is_set():
			PodUpdater.do_first()
			PodUpdater.ready.set()

	@property
	def playable(self):
		ret = False
		date = datetime.today()
		if date.strftime("%a") in settings.podcast.days and date.hour in settings.podcast.days[date.strftime("%a")]:
			ret = True
		return ret

	@property
	def can_regenerate(self):
		if self.started:
			ret = False
		elif "podcast_regenerated" not in cache:
			ret = True
		elif datetime.today().strftime("%Y-%m-%d") != cache["podcast_regenerated"].strftime("%Y-%m-%d"):
			ret = True
#		elif datetime.today().hour in settings.podcast.regenerate:
#			ret = True
		else:
			ret = False

		return ret

	@property
	def is_starting(self):
		if "podcast_regenerated" not in cache:
			ret = False
		elif self.started:
			ret = False
		elif PodUpdater.ready.is_set():
			ret = False
		else:
			ret = True

		return ret

	@property
	def regeneration_complete(self):
		if not self.started:
			ret = False
		elif self.running:
			ret = False
		else:
			ret = True

		return ret

	@property
	def can_inject(self):
		if MPD.update:
			ret = False
		elif self.started:
			ret = False
		elif not self.playable:
			ret = False
		elif "podcast_date" not in cache:
			ret = False
		elif "podcast_track" not in cache:
			ret = False
		elif "podcast_played" not in cache:
			ret = False
		elif cache["podcast_played"] == True:
			ret = False
		elif datetime.today() < cache["podcast_date"]:
			ret = False
		elif cache["podcast_track"] in MPD:
			ret = False
		else:
			ret = True

		return ret

	def check(self):
		if self.can_regenerate:
			self.start()

		elif self.is_starting:
			PodUpdater.do_first()
			PodUpdater.ready.set()

		if self.regeneration_complete:
			self.stop()

		if self.can_inject:
			if len(MPD.all_files.find("file", cache["podcast_track"])) > 0:
				track = PodcastTrack(cache["podcast_track"])
				if "nextsong" in MPD.status:
					MPD.insert(MPD.status["nextsong"], track.filename)
				else:
					MPD.append(track.filename)
				log.info(u"Injected Podcast <{title}> track \"{filename}\" into playlist.".format(title = lib.safe_unicode(track.feed), filename = lib.safe_unicode(track.filename)))
				cache["podcast_played"] = True
			else:
				self.played(cache["podcast_track"])

	def played(self, filename):
		self.__class__.putPlayedQ(filename)

	@classmethod
	def putPlayedQ(cls, filename):
		PodUpdater.Q.put(filename)

	def is_podcast(self, filename):
		return sql.PodcastTrackData.exists(filename)

	@property
	def running(self):
		return PodWorker.is_running() | PodDownloader.is_running()

	@classmethod
	def process(cls, feeds = None):
		with LogWriter():
			with sql.RowWriter():
				with cls() as p:
					p.start(feeds)
					while p.running:
						time.sleep(1)
					p.stop()

