# -*- coding: utf-8 -*-
import sys #system functions (write, etc)
import os
import urllib #url functions (download, encode, etc)
import zipfile #zip compression decompression
import bz2 #bzip compression decompression
import re #regular expressions
import Image #image management
import cPickle #serialization
import xmlrpclib #xmlrpc web service management
import time #get time info
import shutil #ultis library
import pdb #debug using pdb.set_trace()
import threading #threads

import libUtilities

#constants
D = 8

#for Directory
baseUrl = "http://www.mangaeden.com"
directoryUrl = "http://static2.mangaeden.com/data.js"
directoryDataRegex1 = "manga_list_En(.+)]];"
directoryDataRegex2 = "\[.+?href=\"(/En/.+?)\"" #"href=\"(/En/.+?)\">(.+?)<.+?dtlatest.+?none;\">(.+?)<"

#for GetSerieInfo
serieNameRegex = "manga_title\">(.+?)<"
serieDescriptionRegex = "(Status:.+?)<div" #replace <br /> -> \n
serieLastChapterRegex = "aaData.+?\"([0-9]+)\"" #get element [1]

#for ViewSerieChapters
preChapterRegex = "\[\"([0-9]+)\".+?<a href=\"(/En/"
postChapterRegex =".+?)\">(.+?)</a"

#for DownloadChapter
nextRegex = "id=\"imageDiv.+?chatbox_wrapper.+?href=\"(.+?)\""
imageRegex = "id=\"imageDiv.+?chatbox_wrapper.+?<img.+?src=\"(.+?)\""

#general
configFile = "config.dat"
configBinary = False
directoryFile = "directory.dat"
dirSeries = "Series"

following = {}
started = False

class Serie:
	id = 0
	name = "New Serie"
	description = "Description of serie"
	url = ""
	lastdownloaded = "-1"
	lastavaliable = "-1"
	lastupdated = ""

def DownloadAllSeriesLastChapters():
	print "download all series last chapters"
	if libUtilities.logEnabled:
		pdb.set_trace()
	
	toUpdate = list(following.values())
	i = 1
	for serie in toUpdate:
		while threading.active_count() >5:
			i = -1
			#do nothing but wait
		
		nThread = threading.Thread(None,DownloadSerieLastChapters,serie.name,[serie.id])
		nThread.start()
		#DownloadSerieLastChapters(serie.id)
	
	print "Waiting fo all threads to end activity..."
	while threading.active_count() > 1:
		time.sleep(0.25)
	print "Finished!"

def DownloadSerieLastChapters(id):
	#pdb.set_trace()
	print "download serie %d last chapters" % (id)
	if not id in following:
		return -1
	
	UpdateSerie(id)
	serie = following[id]
	
	start = float(serie.lastdownloaded)+0.1
	end = float(serie.lastavaliable)
	
	DownloadSerieChapters(id,start,end)

def DownloadSerieChapters(id,start,end):
	if libUtilities.logEnabled:
		pdb.set_trace()
	if not id in following:
		return -1
	if start > end:
		return -2
		
	#pdb.set_trace()
	serie = following[id]
	chapters = ViewSerieChapters(id)
	
	chapterList = []
	for chapter in chapters:
		if float(chapter[0]) >= start and float(chapter[0]) <= end:
			chapterList.append(chapter)
			
	#pdb.set_trace()
	chapterList.reverse()
	print "Downloading serie %d from %s to %s" % (id,chapterList[0][0],chapterList[-1][0])

	for chapter in chapterList:
		DownloadChapter("%s%s" % (baseUrl,chapter[1]))
		
	#pdb.set_trace()
	if end > float(serie.lastdownloaded):
		serie.lastdownloaded = end
		
	libUtilities.Serialize(following,configFile,configBinary)

def DownloadChapter(url):
	#pdb.set_trace()
	#get serie name and chapter
	urlTokens = url.split("/")
	serie = urlTokens[-3]
	chapter = urlTokens[-2]
	
	#create path
	seriePath = dirSeries+"/"+serie
	chapterPath = seriePath+"/"+chapter
	if not os.path.exists(seriePath):
		os.mkdir(seriePath)
	if not os.path.exists(chapterPath):
		os.mkdir(chapterPath)
		
	count = 0
	image = [0]
	next = [url]
	
	while len(next) > 0:
		count += 1
		
		print "Downloading page",count
		#download page
		res = libUtilities.DownloadFileS(next[0]).replace("\n","")
		
		#download image
		image = re.findall(imageRegex,res)
		next = re.findall(nextRegex,res)
		
		ext = ""
		if image[0].find(".jpg") >= 0:
			ext = "jpg"
		elif image[0].find(".png") >= 0:
			ext = "png"
		elif image[0].find(".gif") >= 0:
			ext = "gif"
		
		filePath = "%s/%s.%s" % (chapterPath,str(count).zfill(2),ext)
		libUtilities.DownloadFile(image[0],filePath)
		print "Downloaded",image[0]
		print "as",filePath
		
		#prepare next url
		if len(next) > 0:
			if next[0].find(chapter) >=0:
				next[0] = baseUrl + next[0]
				print "next"
				print ""
			else:
				next.remove(next[0])
				print "exit"
	
	#pdb.set_trace()
	filePath = "%s/%s.zip" % (seriePath,chapter.zfill(4))
	print "Zipping chapter as",filePath
	newZip = zipfile.ZipFile(filePath,"w")
	images = os.listdir(chapterPath)
	images.sort()
	for image in images:
		newZip.write("%s/%s" % (chapterPath,image))
		print "Writing",image
		
	newZip.close()
	print "Zip closed"
	
	shutil.rmtree(chapterPath)
	print "Temporary files deleted"
	
	return count

def ViewSerieChapters(id):
	if not id in following:
		return -1
	
	#pdb.set_trace()
	serie = following[id]
	res = libUtilities.DownloadFileS(baseUrl+serie.url).replace("\n","")
	regex = preChapterRegex + postChapterRegex # + serie.url + postChapterRegex
	chapters = re.findall(regex,res)
	if len(chapters) > 0:
		chapters[0] = (serie.lastavaliable,chapters[0][1],chapters[0][2])
	
	return chapters

def RemoveSerie(id):
	#pdb.set_trace()
	global following
	removed = 0
	if id == -1:
		removed = len(following)
		following = {}
		libUtilities.Serialize(following,configFile,configBinary)
		return removed
	elif not id in following:
		return -1
	else:
		following.pop(id)
		libUtilities.Serialize(following,configFile,configBinary)
		return 1

def UpdateSerie(id):
	#pdb.set_trace()
	if id == -1:
		for key in following.keys():
			serieInfo = GetSerieInfo(following[key].url,True)
			following[key] = serieInfo
			libUtilities.Serialize(following,configFile,configBinary)
			return following.values()
	elif not id in following:
		return -1
	else:
		serieInfo = GetSerieInfo(following[id].url,True)
		following[id] = serieInfo
		libUtilities.Serialize(following,configFile,configBinary)
		return [serieInfo]

def ViewSerie(id):
	if not id in following:
		return -1
	else:
		return following[id]

def AddSerie(newSerie, lastDownloaded = "-1"):
	#pdb.set_trace()
	if newSerie.id in following:
		return -1
	else:
		following[newSerie.id] = newSerie
		newSerie.lastdownloaded = lastDownloaded
		libUtilities.Serialize(following,configFile,configBinary)
		return 0

def GetFromCatalog(catalog_id = 0):
	if not os.path.exists(directoryFile):
		return -1
	else:
		avaliable = libUtilities.Deserialize(directoryFile)
		for serie in avaliable:
			if int(serie[0]) == catalog_id:
				return serie
		return 0

def GetSerieInfo(url, force = False):
	newSerie = None
	#get id
	#pdb.set_trace()
	for item in following.values():
		if item.url.lower() == url.lower():
			newSerie = item
			break
	if newSerie == None or force:
		res = libUtilities.DownloadFileS(baseUrl+url).replace("\n","")
		if not force:
			newSerie = Serie()
			newSerie.id = 1 if len(following) == 0 else following.values()[-1].id+1
		#get data
		newSerie.name = re.findall(serieNameRegex,res)[0]
		newSerie.description = re.findall(serieDescriptionRegex,res)[0].replace("<br />","\n").replace("<b>","").replace("</b>","")
		newSerie.lastavaliable = re.findall(serieLastChapterRegex,res)[1]
		newSerie.url = url
		newSerie.lastupdated = libUtilities.Now()
	
	return newSerie

def Loaded():
	if not started:
		CheckConfig()

def Following():
	Loaded()
	return following

def List(text = ""):
	#pdb.set_trace()
	if not os.path.exists(directoryFile):
		return -1
	else:
		avaliable = libUtilities.Deserialize(directoryFile)
		if text == "":
			return avaliable
		else:
			filtered = []
			for serie in avaliable:
				if serie[1].lower().find(text.lower()) >= 0:
					filtered.append(serie)
			return filtered

def CheckConfig():
	global following
	if not os.path.exists(configFile):
		libUtilities.Serialize(following,configFile,configBinary)
		libUtilities.Log("Config not found, created new one")
	else:
		following = libUtilities.Deserialize(configFile)
		libUtilities.Log("Config found, loaded.")
	if not os.path.exists(dirSeries):
		os.mkdir(dirSeries)
		libUtilities.Log("Series directory not found. Created new one")
	started = True

def GetCatalog():
	libUtilities.Log("Downloading directory from "+directoryUrl)
	directory = libUtilities.DownloadFileS(directoryUrl)
	libUtilities.Log("Downloaded %d bytes" % (len(directory)))
	match = re.findall(directoryDataRegex1,directory)
	match = re.findall(directoryDataRegex2,match[0])
	#pdb.set_trace()
	match.sort()
	for pos in range(len(match)-1,-1,-1):
		data = match[pos].split("/")
		if len(data) > 4 and data[-2].isdigit():
			match.remove(match[pos])
		else:
			match[pos] = [pos,urllib.unquote_plus(data[2]).replace("\\u002D","-"),match[pos]]
	
	#pdb.set_trace()
	#match.reverse()
	libUtilities.Serialize(match,directoryFile)
	libUtilities.Log("Saved %d series" % (len(match)))
	#libUtilities.Log("Saved %d bytes" % (len(directory)))
	return match
