# -*- coding: utf-8 -*-
__author__      = "Damien Lebrun"
__date__        = "$23-Nov-2008 12:43:58$"
__revision__    = "$Id: builder.py 372 2008-12-03 10:51:36Z dinoboff $"

from common import UtilsError, Command ,fix_eol, replace_tokens, FileList, get_yui_compressor
from subprocess import Popen, PIPE
import settings
import datetime
import docutils
import fileinput
import os
import re
import shutil
import urllib
import zipfile
import wikir

class BuildError(UtilsError): pass

class Builder(Command):
	"""
	Helper to create and package Vanilla extension
	"""
	RE_WIKI_HEADER = re.compile(r"^(=+)(.*)(\1)$", re.U | re.M)
	def __init__(self,
				 name=None,
				 version=u'n/a',
				 dist_name=None,
				 license=u'GPLv2',
				 licence_url=u'http://www.gnu.org/licenses/old-licenses/gpl-2.0.html',
				 minify=True,
				 wiki_source='readme.rst',
				 include_patterns=settings.INCLUDE_PATTERNS,
				 exclude_patterns=settings.EXCLUDE_PATTERNS,
				 *args, **kwargs):
		"""
		All options are options
		when the current path is the directory containing an extension
		(with its default.php file)
		"""
		kwargs['logger_name'] = 'vanillafriends.builder'
		super(Builder, self).__init__(*args, **kwargs)
		self.name = name
		self.version = version
		self.old_version = 'n/a'
		self.dist_name = dist_name
		self.minify = minify
		self.wiki_source = wiki_source
		self.include_patterns = include_patterns
		self.exclude_patterns = exclude_patterns
		self.details = {}
		# usefull variables for template
		self.details['LICENCE'] = license
		self.details['LICENCE_URL'] = licence_url
		self.details['YEAR'] = str(datetime.date.today().year)

	def build(self):
		"""
		Package extension.

		TODO: testing extension before building it.
		"""
		self.log.info(u'Building the extensions located in "%s"...' % self.base_dir)
		build_ext_dir = self.prepare()
		if self.version == self.old_version:
			self.log.warn(u'Version %s is already hosted on Lussumo.com.' % self.version)
		
		for file_path in self.find_files_to_pack():
			self.log.debug(u'"%s" will packed.' % file_path)
			self._copy_file(file_path, build_ext_dir)
		
		if self.minify:
			self.minfy_assets(build_ext_dir)
		
		self.prepare_file()
		self.build_wiki_doc(build_ext_dir)
		self.build_doc()
		
		zip_name = '-'.join((self.dist_name, self._normalize_name(self.version)))
		zip_path = os.path.join(self.base_dir, 'dist', zip_name)
		self.log.info(u'making dist/%s.zip' % zip_name)
		self.create_package(zip_path,os.path.join(self.base_dir, 'build'))
		self.log.info(u'%s version %s is built !' % (self.name,self.version,))

	def clean(self, paths=('build', 'dist',)):
		"""
		Remove files/directories created by previous build.
		Raise vanillafriends.utils.BuildError when it can't remove
		old build files or directories.
		"""
		try:
			for path in paths:
				full_path = os.path.join(self.base_dir, path)
				if os.path.exists(full_path):
					self.log.info(u'Removing "%s"...' % path)
					if os.path.isdir(full_path):
						shutil.rmtree(full_path)
					else:
						os.remove(full_path)
		except IOError, e:
			msg = u'IOError when trying to clean up the extension\'s last build: %s' % str(e)
			self.log.fatal(msg)
			raise BuildError(msg)

	def find_files_to_pack(self):
		"""
		Find files all files to pack accord to the file patterns
		defined in the include_patterns and exclude_patterns properties
		"""
		file_list = FileList()
		file_list.findall(dir=self.base_dir)
		for prefix, pattern in self.include_patterns:
			pattern, prefix = self._norm_patterns(pattern, prefix)
			file_list.include_pattern(
				pattern, anchor=False, prefix=prefix)
		for prefix, pattern in self.exclude_patterns:
			pattern, prefix = self._norm_patterns(pattern, prefix)
			file_list.exclude_pattern(
				pattern, anchor=False, prefix=prefix)
		self.file_list = file_list
		return file_list.files

	def prepare(self, build_dir=u'build', dist_dir=u'dist'):
		"""
		Clean old build, then re-install build and dist directories
		Returns the path to the directory where the files should be copied to.

		Raise a BuildError exception is it can't create the directories
		"""
		self.get_extension_details()
		self.log.info(u'Building "%s" version %s.' % (self.name, self.version))

		build_ext_dir = os.path.join(build_dir, self.dist_name)
		dirs = (build_ext_dir, build_dir, dist_dir,)
		self.clean(paths=dirs)
		try:
			for dir in dirs:
				path = os.path.join(self.base_dir, dir)
				if not os.path.exists(path):
					self.log.info(u'Creating "%s" directory' % dir)
					os.makedirs(path)
		except IOError, e:
			msg = u'IOError when trying to create the build directories: %s' % str(e)
			self.log.fatal(msg)
			raise BuildError(msg)
		
		return os.path.join(self.base_dir, build_ext_dir)

	def prepare_file(self, build_dir='build'):
		"""
		Fix end of line and replace tokens (Extensions details like @@AUTHOR@@)
		In the build folder.
		Raise BuildError if it can't open or update the files
		"""
		
		f = FileList()
		f.findall(dir=os.path.join(self.base_dir, build_dir))
		
		try:
			# Fix end of line
			for pattern in settings.EOLFIX_TARGET_PATTERN:
				f.include_pattern(pattern, anchor=False)
			for file_path in f.files:
				fix_eol(file_path)

			# Replace tokens
			f.files = []
			for pattern in settings.TOKKEN_REPLACEMENT_TARGET_PATTERN:
				f.include_pattern(pattern,anchor=False)
			for file_path in f.files:
				replace_tokens(file_path, self.details)
		except UtilsError, e:
			msg = u'Could not prepare files: %s.' % str(e)
			self.log.fatal(msg)
			raise BuildError(msg)

	def get_extension_details(self):
		"""
		Get the extension name and version from default.php
		and check them against lussumo.com add-on website.

		Raise vanillafriends.utils.BuildError when it can't open default.php
		or can't find the extension name and version in default.php
		"""
		default_path = os.path.join(self.base_dir, 'default.php')
		count = 0
		details = {}
		detail_search = re.compile(
			"^\s*(Extension Name|Extension Url|Description|Version|Author|Author Url):(.+)$"
			, re.UNICODE)
		try:
			for line in fileinput.input(files=(default_path,)):
				line = unicode(line, settings.FILE_CHARSET)
				result = detail_search.match(line)
				if result:
					key, value = result.groups()
					value = value.strip()
					if key == 'Extension Name':
						details['EXT_NAME'] = value
					elif key == 'Extension Url':
						details['EXT_URL'] = value
					elif key == 'Description':
						details['DESCRIPTION'] = value
					elif key == 'Version':
						details['VERSION'] = value
					elif key == 'Author':
						details['AUTHOR'] = value
					elif key == 'Author Url':
						details['AUTHOR_URL'] = value
				count += 1
				if count >= 15:
					break
			fileinput.close()
		except IOError, e:
			msg = u'Could not open default.php and get the the extension details.\n%s' % str(e)
			self.log.fatal(msg)
			raise BuildError(msg)
		try:
			self.name = details['EXT_NAME']
			self.version = details['VERSION']
			if self.dist_name:
				details['DIST_NAME'] = self.dist_name
			else:
				self.dist_name = details['DIST_NAME'] = self._normalize_name(self.name)
			self.old_version, app_url = self.get_details_from_lussumo(self.name)
		except KeyError:
			msg = u'Could not find extensions details in default.php' % default_path
			self.log.fatal(msg)
			raise BuildError(msg)
		except BuildError, e:
			self.log.warn(str(e))
		except ValueError:
			self.log.warn(u'Cannot find extension (%s) on Lussumo.com' % self.name)

		self.details.update(details)
		return self.details

	def get_details_from_lussumo(self, extension_name):
		"""
		Gets the extension details from lussumo.com.
		Return a tuple with the current version of the extension on lussumo.com,
		and the url of the extension page.
		Return an empty tuple if the extension can't be found on Lussumo.com.

		Raise BuildError if it can't fetch the webpage.
		"""
		params = urllib.urlencode({'Extension': extension_name})
		try:
			f = urllib.urlopen(settings.LUSSUMO_UPDATECHECK_URL % params)
		except IOError, e:
			raise BuildError(u'Could not open the page'
							 'about the extension on Lussumo.com. %s.' % str(e))

		resp = re.compile("([-\.\d\w]+)\|(.+)", re.UNICODE).match(f.read())
		if resp:
			return resp.groups()
		else:
			return ()

	def _norm_patterns(self, pattern, prefix=None):
		"""
		Normalize the prefix and pattern of files
		(using correct slashes for the platform)
		and remove current directory symbole from prefix (. and ./).
		"""
		cur_dir = ('.', '.' + os.path.sep)
		pattern = os.path.normpath(pattern)
		if prefix:
			prefix = os.path.normpath(prefix)
			if prefix in cur_dir:
				prefix = None
			elif self.base_dir in cur_dir:
				prefix = os.path.normpath(prefix)
			else:
				prefix = os.path.join(self.base_dir, prefix)
				prefix = os.path.normpath(prefix)
		return (pattern, prefix)

	def _copy_file(self, src, base_dst):
		"""
		Copy files but conserve the path between the base_dir and file
		inn the base_dst.
		"""
		src = os.path.normpath(src)
		try:
			rel_src = src.replace(self.base_dir, '', 1)
			dst = os.path.join(
				base_dst,
				rel_src.strip(os.path.sep))

			root_dst = os.path.split(dst)[0]
			if not os.path.exists(root_dst):
				os.makedirs(root_dst)
		except IOError, e:
			msg = 'Could not create the archive: %s.' % str(e)
			self.log.fatal(msg)
			raise BuildError(msg)
		self.log.debug('Copying %s to %s' % (src,dst,))
		return shutil.copy2(src,dst)

	def create_package(self, dst_file, src_dir, mode='w'):
		"""
		Create a zip package of the BUILD_FOLDER_PATH content.

		Raise a BuildError when it can't create the archive.

		TODO: find a way to create empty directory in the archive
		"""
		self.log.info('Creating "%s.zp"...' % dst_file)
		try:
			dst_file = dst_file + '.zip'
			z = zipfile.ZipFile(dst_file.encode(settings.FILE_CHARSET), mode, zipfile.ZIP_DEFLATED)
			src_dir = os.path.normpath(src_dir)
			for dir, sub_dirs, files in os.walk(src_dir):
				for file in files:
					file_path = os.path.join(dir,file,)
					z.write(file_path,
							file_path
								.replace(src_dir + os.path.sep,'',1)
								.encode(settings.FILE_CHARSET))
			z.close()
		except zipfile.error, e:
			msg = 'Could not create the archive: %s.' % str(e)
			self.log.fatal(msg)
			raise BuildError(msg)
	
	def minfy_assets(self, base_dst, yuicompressor_path=None):
		
		if yuicompressor_path is None:
			yuicompressor_path = get_yui_compressor()
			if not yuicompressor_path:
				self.log.warn('Could not find Yui-compressor.')
				return
		
		if not os.path.exists(yuicompressor_path):
			self.log.warn('Could not find Yui-compressor at "%s".', yuicompressor_path)
			return
		
		args = ['java', '-jar', yuicompressor_path, '--charset', 'UTF-8']
		
		asset_list = FileList()
		asset_list.findall(self.base_dir)
		asset_list.include_pattern('*.css')
		asset_list.include_pattern('*.js')
		for dir, pattern in settings.EXCLUDE_PATTERNS:
			asset_list.exclude_pattern(pattern, prefix=dir)
			
		try:
			for file_path in asset_list.files:
				file_args = args[:]
				file_args.append(file_path)
				command = Popen(file_args, stdout=PIPE, stdin=PIPE)
				minified = command.communicate(input=open(file_path, 'rb').read())
				if command.returncode == 0:
					rel_path = file_path.replace(asset_list.base_dir, '').strip(os.sep)
					dst = os.path.join(base_dst, rel_path)
					self.log.debug('Creating "%s" with...\n %s\n', dst, minified[0])
					open(dst, 'wb').write(minified[0])
				else:
					self.log.warn('Could not minify "%s":\n"%s"\n', file_path, minified[1])
		except OSError:
			self.log.warn('Could not minified the css and js files.'
						  'Make sure Java is installed and in your Path.')
			
	def build_doc(self, build_dir='build'):
		try:
			list = FileList()
			list.findall(os.path.join(self.base_dir, build_dir))
			list.include_pattern('*.rst')
			
			for file_path in list.files:
				html_doc = self._get_html_from_rst_file(
					file_path,
					settings.TEMPLATE_DOC)
				
				doc_path = os.path.splitext(file_path)[0] + '.html'
				open(doc_path, 'wb').write(html_doc.encode('utf-8'))
				os.unlink(file_path)
		except Exception, e:
			raise BuildError('could not convert rst doc to html: %s.' % str(e))

	def build_wiki_doc(self, src_dir, dist_dir='dist'):
		try:
			if self.wiki_source:
				src_wiki = os.path.join(src_dir, self.wiki_source)
				if os.path.exists(src_wiki):
					wiki_content = self._get_wiki_from_rst_file(
						src_wiki, settings.TEMPLATE_WIKI)
					
					wiki_path = os.path.join(
						self.base_dir, dist_dir, self.dist_name + '.wiki')
					open(wiki_path, 'wb').write(wiki_content)
		except Exception, e:
			raise BuildError('could not create wiki doc: %s.' % str(e))

	def _get_html_from_rst_file(self, file_path, template='%(DOC_BODY)s', level=2):
		settings_overrides = {
			'output_encoding': 'unicode',
			'input_encoding': 'unicode',
			'initial_header_level': level
		}
		raw_doc = unicode(open(file_path, 'rb').read(), 'utf-8')
		template_data = self.details.copy()
		template_data['DOC_BODY'] = docutils.core.publish_parts(
			source=raw_doc,	writer_name='html',
			settings_overrides=settings_overrides)['html_body']
		return template % template_data

	def _get_wiki_from_rst_file(self, file_path, template='%(DOC_BODY)s', level=2):
		settings_overrides = {
			'output_encoding': 'unicode',
			'input_encoding': 'unicode'
		}
		raw_doc = unicode(open(file_path, 'rb').read(), 'utf-8')
		wiki_doc = wikir.publish_string(
			raw_doc,settings_overrides=settings_overrides)
		
		if level > 1:
			header_replacement = '=' * (level - 1)
			wiki_doc = self.RE_WIKI_HEADER.sub(
				r"\1%s\2\1%s" % (header_replacement, header_replacement),
				wiki_doc)
		
		template_data = self.details.copy()
		template_data['DOC_BODY'] = wiki_doc
		return template % template_data