from __future__ import division
import os, sys, random, time, shutil, math, shelve, gc, Queue, traceback

from multiprocessing import JoinableQueue, Process, Event, Pipe

from datetime import datetime

import lib, sql

from MPD import MPD
from xdh_logging import initLog, LogWriter
from track import Track
from nothing import Nothing
from settings import settings
from cache import cache
from catfile import CatFile
from tpd import TPD
from tags import get_tag, get_mod, get_length

log = initLog("tracklist")

class FileWorker(Process):
	Q = JoinableQueue()
	started = Event()

	@classmethod
	def startup(cls):
		if not cls.is_running():
			cls.started.clear()
			cls.workers = []
			for i in xrange(settings.data.num_processes):
				worker = Nothing()
				setattr(worker, "running", Event())
				setattr(worker, "process", FileWorker(worker.running))
				cls.workers.append(worker)

	@classmethod
	def is_running(cls):
		if not hasattr(cls, "workers"):
			return False

		running = not (cls.Q.empty() and cls.started.is_set())

		for worker in cls.workers:
			if worker.process.is_alive():
				running |= worker.running.is_set()

		return running

	@classmethod
	def shutdown(cls):
		if not hasattr(cls, "workers"):
			return

		for worker in cls.workers:
			if worker.process.is_alive():
				log.debug("Sending stop to {name} Queue.".format(name = cls.__name__))
				cls.Q.put(lib.STOP)

		cls.Q.join()

		for worker in cls.workers:
			if worker.process.is_alive():
				worker.process.join()

		del cls.workers
		cls.started.clear()

	def __init__(self, running):
		super(FileWorker, self).__init__()
		self.running = running
		self.start()

	def write_stat(self, category, step, value):
		if value > 0:
			stat = {
				'timestamp': datetime.today(),
				'path': self.path,
				"category": category,
				"step": step,
				"value": value,
			}
			self.stats.append(stat)


	def make_track(self, filename, proc_type):
		"""
		Makes the dictionaries to load the sql.TrackData and sql.TrackFlagData tables.
		"""
		if proc_type != "del":
			track = {
				"filename": lib.fix_filename(filename, reverse = True),
				"path": self.path,
				"added": datetime.today() if proc_type == "add" else Track(filename).added,
				"modified": get_mod(filename),
				"category": get_cat(filename),
				"length": get_length(filename)
			}
        
			flags = []
        
			for flag in get_flags(filename):
				flags.append({
					"filename": lib.fix_filename(filename, reverse = True),
					"flag": flag
				})
       
			if proc_type == "add":
				return track, flags
			else:
				return track, flags, lib.fix_filename(filename, reverse = True)
		else:
			return {"filename": lib.fix_filename(filename, reverse = True), "category": Track(filename).category}


	def cat_stats(self, tracks, proc_type):
		cats = {}
		for track in tracks:
			if track['category'] not in cats:
				cats[track['category']] = 0
			cats[track['category']] += 1
		for cat, count in cats.iteritems():
			self.write_stat(cat, proc_type, count)

	def flag_stats(self, track_flags, proc_type):
		flags = {}
		for track_flag in track_flags:
			if track_flag['flag'] not in flags:
				flags[track_flag['flag']] = 0
			flags[track_flag['flag']] += 1
		for flag, count in flags.iteritems():
			self.write_stat(flag, proc_type, count)


	def process_adds(self):
		if self.adds != None:
			sql_tracks = []
			sql_flags = []
        
			for filename in self.adds:
				file_path = lib.fix_filename(filename)
				track, flags = self.make_track(file_path, "add")
				sql_tracks.append(track)
				sql_flags.extend(flags)
        
			self.cat_stats(sql_tracks, "insert")
			self.flag_stats(sql_flags, "insert")

			sql.process(sql.TrackData, "insert", sql_tracks)
			sql.process(sql.TrackFlagData, "insert", sql_flags)

	def process_upds(self):
		if self.upds != None:
			sql_tracks = []
			sql_flags = []
			sql_files = []
			for filename in self.upds:
				file_path = lib.fix_filename(filename)
				track, flags, filename = self.make_track(file_path, "upd")
				sql_tracks.append(track)
				sql_flags.extend(flags)
				sql_files.append(filename)

			self.cat_stats(sql_tracks, "update")
			self.flag_stats(sql_flags, "update")

			sql.process(sql.TrackData, "insert", sql_tracks)
			sql.process(sql.TrackFlagData, "delete", sql_files)
			sql.process(sql.TrackFlagData, "insert", sql_flags)

	def process_dels(self):
		if self.dels != None:
			sql_tracks = []
			for filename in self.dels:
				file_path = lib.fix_filename(filename)
				track = self.make_track(file_path, "del")
				sql_tracks.append(track)

			self.cat_stats(sql_tracks, "delete")

			sql.process(sql.TrackData, "delete", sql_tracks)

	def process(self):
		start = datetime.today()
		self.process_adds()
		self.process_upds()
		self.process_dels()
		diff = datetime.today() - start
		self.write_stat("Files", "duration", diff.total_seconds())

		sql.process(sql.DirStatData, "insert", self.stats)

		log.info(u"Files for directory \"{path}\" finished: {adds} adds, {upds} updates, {dels} deletes.".format(path = lib.safe_unicode(self.path), adds = 0 if self.adds == None else len(self.adds), upds = 0 if self.upds == None else len(self.upds), dels = 0 if self.dels == None else len(self.dels)))

	def load_initial(self, path, adds, upds, dels):
		self.path = path
		self.adds = adds
		self.upds = upds
		self.dels = dels

		self.stats = []

	def run(self):
		try:
			while True:
				item = FileWorker.Q.get()
				gc.collect()
				if item == lib.STOP:
					FileWorker.Q.task_done()
					log.debug("Got stop from {name} Queue.".format(name = FileWorker.__name__))
					break
				log.debug("Got data from {name} Queue.".format(name = FileWorker.__name__))
        
				self.running.set()
        
				if not FileWorker.started.is_set():
					FileWorker.started.set()
        
				self.load_initial(*item)
				self.process()
        
				FileWorker.Q.task_done()
        
				self.running.clear()
		except Exception, e:
			log.error("{name}: {msg}".format(name = e.__class__.__name__, msg = e))
			log.error("".join(traceback.format_exception(*sys.exc_info())))


class PathWorker(Process):
	"""
	This does the bulk of the processing of an individual directory. Once started, it scans the directory, sends off sub-directories
	to be processed, and generates the batch of SQL inserts, updates, and deletes to update the entire database.
	"""

	Q = JoinableQueue()
	started = Event()

	@classmethod
	def startup(cls):
		if not cls.is_running():
			cls.started.clear()
			cls.workers = []
			for i in xrange(settings.data.num_processes):
				worker = Nothing()
				setattr(worker, "running", Event())
				setattr(worker, "process", PathWorker(worker.running))
				cls.workers.append(worker)

			log.debug("Sending data to {name} Queue.".format(name = cls.__name__))
			cls.Q.put([settings.path.music_base])

	@classmethod
	def is_running(cls):
		if not hasattr(cls, "workers"):
			return False

		running = not (cls.Q.empty() and cls.started.is_set())

		for worker in cls.workers:
			if worker.process.is_alive():
				running |= worker.running.is_set()

		return running

	@classmethod
	def shutdown(cls):
		if not hasattr(cls, "workers"):
			return

		for worker in cls.workers:
			if worker.process.is_alive():
				log.debug("Sending stop to {name} Queue.".format(name = cls.__name__))
				cls.Q.put(lib.STOP)

		cls.Q.join()

		for worker in cls.workers:
			if worker.process.is_alive():
				worker.process.join()

		del cls.workers
		cls.started.clear()
	

	def __init__(self, running):
		"""
		Initialize the worker process with basic communications and coordination details, then start it. Note -- worker processes
		are designed to be able to be reused over and over by the system as it processes directories.
		"""
		super(PathWorker, self).__init__()
		self.running = running
		self.start()

	def process_dir(self, filename, file_path):
		self.num_dirs += 1
		self.actual_dirs.append(file_path)

	def process_file(self, filename, file_path):
		self.num_files += 1
		
		self.actual_files.append(file_path)
        
		if sql.TrackData.exists(file_path):
			track = Track(file_path)
        
			if track.modified != get_mod(track.filename):
				if self.upds == None:
					self.upds = []
				self.upds.append(file_path)
        
		else:
			if self.adds == None:
				self.adds = []
			self.adds.append(file_path)

	def process_entry(self, filename):
		filename = os.path.join(self.path, filename)
        
		file_path = os.path.join(self.dir_path, filename)

		if settings.path.podcast == file_path:
			return
        
		# Place directories onto the queue in order to have them get processed as well.
		if os.path.isdir(filename):
			self.process_dir(filename, file_path)
        
		elif os.path.isfile(filename) and settings.re.music_ext.search(filename.lower()) != None:
			self.process_file(filename, file_path)


	def prep_deletes(self):
		bad_files = set(sql.TrackData.get_path(self.path)) - set(self.actual_files)

		if len(bad_files) > 0:
			if self.dels == None:
				self.dels = []
			self.dels.extend(bad_files)

		bad_dirs = set([subdir for subdir in sql.DirData.get_subdirs(self.path)]) - set(self.actual_dirs)

		if len(bad_dirs) > 0:
			self.del_dirs.extend([{"filepath": filepath} for filepath in bad_dirs])


	def write_stat(self, category, step, value, parent = False):
		if value > 0:
			stat = {
				'timestamp': datetime.today(),
				'path': self.dir_path if not parent or self.parent == None else self.parent,
				"category": category,
				"step": step,
				"value": value,
			}
			self.stats.append(stat)


	def process_stats(self):
		self.write_stat("Directory", "subdirs", self.num_dirs)
		self.write_stat("Directory", "files", self.num_files)
		self.write_stat("Directory", "duration", self.duration)

	def get_path_modified(self):
		timeouts = 0

		while True:
			try:
				path_modified = datetime.fromtimestamp(os.path.getmtime(self.path))
				return path_modified
			except:
				if timeouts > 5:
					raise
				timeouts += 1
				time.sleep(settings.data.sleep)

	def make_add_dir(self):
		path_modified = self.get_path_modified()

		for dir_data in sql.DirData.get(self.dir_path):
			if dir_data == None or dir_data.modified != path_modified:
				self.add_dirs.append({"filepath": self.dir_path, "parent": self.parent, "modified": path_modified})

	def get_entries(self):
		timeouts = 0
		while True:
			try:
				entries = os.listdir(self.path)
				return entries
			except:
				if timeouts > 5:
					raise
				timeouts += 1
				time.sleep(settings.data.sleep)

	def process_entries(self):
		entries = self.get_entries()

		random.shuffle(entries)
		for entry in entries:
			self.process_entry(entry)

	def send_dirs(self):
		if len(self.sub_dirs) > 0:
			log.debug("Sending data to {name} Queue.".format(name = PathWorker.__name__))
			random.shuffle(self.sub_dirs)


			for batch in self.sub_dirs:
				PathWorker.Q.put(batch)

	def send_files(self):
		if len(self.file_blocks) > 0:
			log.debug("Sending data to {name} Queue.".format(name = FileWorker.__name__))
			random.shuffle(self.file_blocks)

			for batch in self.file_blocks:
				FileWorker.Q.put(batch)

	def init_path(self, path):
		self.path = path
		self.dir_path = lib.fix_filename(self.path, reverse = True)

		if len(lib.fix_filename(self.path, reverse = True)) == 0:
			self.parent = None
		else:
			self.parent = lib.fix_filename(os.path.dirname(self.path), reverse = True)

		self.adds = None
		self.upds = None
		self.dels = None

		self.num_dirs = 0
		self.num_files = 0

		self.actual_files = []
		self.actual_dirs = []


	def process(self, path):
		self.init_path(path)

		start = datetime.today()

		self.make_add_dir()
		self.process_entries()
		self.prep_deletes()

		if self.adds != None or self.upds != None or self.dels != None:
			self.file_blocks.append((self.dir_path, self.adds, self.upds, self.dels))

		self.sub_dirs.append(self.actual_dirs)

		diff = datetime.today() - start
		self.duration = diff.total_seconds()

		self.process_stats()

		log.info(u"Directory \"{path}\" finished: {dirs} dirs, {files} files.".format(path = lib.safe_unicode(self.dir_path), dirs = self.num_dirs, files = self.num_files))

	def process_parent_sql(self):
		# SQL directory inserts...
		sql.process(sql.DirData, "insert", self.add_dirs)
                
		# SQL directory deletes...
		sql.process(sql.DirData, "delete", self.del_dirs)

	def do_sends(self):
		# Send the next batch of directories to the queue.
		self.send_dirs()
                
		# Send the files to the file filter process' queue.
		self.send_files()

	def process_parent_stats(self, duration):
		if self.parent != None: # Don't worry about these stats if at root directory.
			self.write_stat("Sub Directories", "insert", len(self.add_dirs), parent = True)
			self.write_stat("Sub Directories", "delete", len(self.del_dirs), parent = True)
			self.write_stat("Sub Directories", "duration", duration, parent = True)
                        
        def initialize(self):
		self.add_dirs = []
		self.del_dirs = []
                
		self.sub_dirs = []
		self.file_blocks = []
                
		self.stats = []

	def run(self):
		try:
			while True:
				paths = PathWorker.Q.get()
				gc.collect()
                        
				if paths == lib.STOP:
					PathWorker.Q.task_done()
					log.debug("Got stop from {name} Queue.".format(name = PathWorker.__name__))
					break
				log.debug("Got data from {name} Queue.".format(name = PathWorker.__name__))
                        
				if not PathWorker.started.is_set():
					PathWorker.started.set()
                        
				self.running.set()
				start = datetime.today()

				self.initialize()
                        
				for path in paths:
					self.process(path)

				self.process_parent_sql()
				self.do_sends()
                        
				diff = datetime.today() - start
				self.process_parent_stats(diff.total_seconds())
                        
				# send stats...
				sql.process(sql.DirStatData, "insert", self.stats)
                        
				PathWorker.Q.task_done()
				self.running.clear()
		except Exception, e:
			log.error("{name}: {msg}".format(name = e.__class__.__name__, msg = e))
			log.error("".join(traceback.format_exception(*sys.exc_info())))


class TrackList(object):
	"""
	Manages the updating of the entire tracklist comprised of multiple trackfiles.
	This relies upon mutagen for being able to read the tag information straight
	from the music data files, and libmpd2 to access the mpd server directly. If
	necessary, it will force the mpd server to update itself accordingly.
	"""

	def __enter__(self):
		return self

	def __exit__(self, type, value, tback):
		FileWorker.shutdown()
		PathWorker.shutdown()

		sql.Q.join()

		if type != None:
			log.error("{name}: {msg}".format(name = value.__class__.__name__, msg = value))
			log.error("".join(traceback.format_exception(*sys.exc_info())))

	def start(self):
		MPD.update = True

		PathWorker.startup()
		FileWorker.startup()

	def stop(self):
		PathWorker.shutdown()
		FileWorker.shutdown()

		log.info("Recalculating Tracks Per Day.")
		TPD.calc()

	@property
	def running(self):
		return PathWorker.is_running() | FileWorker.is_running()

	@classmethod
	def process(cls, music_base_path = settings.path.music_base):
		with TrackList() as manager:
			manager.start()

			while manager.running:

				time.sleep(settings.data.sleep)

			manager.stop()
