#!C:\Python26

import os.path
from pluginManage import Plugin


import BeautifulSoup as bs
import bs4						# Yep, using two different versions ob BeautifulSoup. Eventually I plan to comvert to bs4 entirely
import sys
import time
import re

import cPickle

import logmanager
import threading
import Queue

import urllib

# OHHHHHHH GOD EVERYTHING NEEDS REFACTORING/COMPLETE REWRITE

# This is the oldest and, as such, the shittiest xadownloader plugin.
# Basically it needs a complete rewrite.




class getDA(Plugin):

	capabilities			=	"wScraper"										# Used for detecting if any random .py file is a plugin file
	version				=	1.0											# Used for detecting available configuration options
	pluginName			=	"DAscrape"										# Plugin Name variable
	siteName			=	"DeviantArt"									# Value for populating "Art Site" dropbox
	nameSourceEN			=	1											# Determines if Name Source input box is enabled (Boolean, 0-1)
	artistListEN			=	1											# Determines if Artist List input box is enabled (Boolean, 0-1)
	overwriteModes			=	["Check Folders", "Check Files w/Memory and Time",
						"Check Files w/Memory", "Check Files", "Overwrite Files"]				# Determines if overwrite mode dropbox is enabled for plugin
	retrievalHandles		=	5											# How may retrevial threads to use

	DLFolder			=	"DeviantArt"
	logFileName			=	"LogFile.txt"
	successFileName			=	"Success.txt"
	pickleFileName			=	"Pickle.dat"

	cookieCapable = 1



	waitTimer = 2.5


	def checkCookie(self, cookiejar):
		authCookie		=		re.search("<Cookie auth=[_%0-9a-z]*? for \.deviantart\.com/>", "%s" % cookiejar, re.IGNORECASE)
		authSecureCookie	=	re.search("<Cookie auth_secure=[_%0-9a-z]*? for \.deviantart\.com/>", "%s" % cookiejar, re.IGNORECASE)

		if authCookie and authSecureCookie:
			return "Have DA Cookies:\n	%s\n	%s" % (authCookie.group(0), authSecureCookie.group(0))
		# print authCookie, authSecureCookie
		return "Do not have DA login Cookies"

	def getCookie(self, userName, password, opener):


		params = urllib.urlencode({"username" : userName, "password" : password, "remember_me" : "1", "ref" : "https://www.deviantart.com/users/loggedin", "action" : "Log In"})
		page = opener.open('https://www.deviantart.com/users/login', params)
		pagetext = page.read()
		page.close()
		# print pagetext
		if re.search("The username or password you entered was incorrect", pagetext):
			return "Login Failed"
		else:
			return "Logged In"



	def getGalleryLst(self, aName, getHandle, config):

		debugSave = False

		artlinks = []

		config.progBars.pulseSubBar()
		galOffset = 0

		subGalleries = [".deviantart.com/gallery/?catpath=/&offset=", ".deviantart.com/gallery/?catpath=scraps&offset="]
		for gallery in subGalleries:
			loopCounter = 0

			while loopCounter >= 0:

				config.progBars.pulseSubBar()

				time.sleep(self.waitTimer)

				turl = "http://%s%s%s" % (aName, gallery, loopCounter * 24)

				self.log.info("Getting = " + turl)

				mpgctnt = getHandle.getpage(turl)
				config.progBars.pulseSubBar()

				if debugSave:
					tempHtmlPath = os.path.join(self.DLFolder, aName, "test %d.html" % (loopCounter + galOffset * 10))
					tempTxtPath = os.path.join(self.DLFolder, aName, "test %d.txt" % (loopCounter + galOffset * 10))

					fp = open(tempHtmlPath, "wb")
					fp.write(mpgctnt.encode("utf-8"))
					fp.close()

				if mpgctnt == "Failed":
						self.log.error("Cannot get Page %s" % turl)
						return []



				soup = bs4.BeautifulSoup(''.join(mpgctnt), "lxml")

				config.progBars.pulseSubBar()

				mainSection = soup.find('div', attrs={"name" : "gmi-ResourceStream", "id" : "gmi-ResourceStream"})

				if not mainSection:		# Happens when we have a empty gallery. E.g, a DA user with no posted art
					self.log.error("User %s seems to have no art!" % aName)
					return []

				links = mainSection.findAllNext("a", class_="t")



				config.progBars.pulseSubBar()

				linksOnThisPage = 0							# track number of art links on page (to determine when we are at the end of the gallery)
				staleLinks = 0

				if debugSave:
					fp = open(tempTxtPath, "wb")

				for link in links:
					if not link["href"] in artlinks:				# See if link is to art page, and if it's a duplicate

						artlinks.append(link["href"])
						linksOnThisPage += 1


						if debugSave:

							fp.write(link["href"])
							fp.write("\n")
					else:
						staleLinks += 1
				if debugSave:
					fp.close()



				self.log.info("Found %s links on this page, %d old links" % (linksOnThisPage, staleLinks))

				config.progBars.pulseSubBar()

				if linksOnThisPage:
					loopCounter += 1
				else:
					loopCounter = -1

			galOffset += 1
		self.log.info("Found %s links" % len(artlinks))
		return artlinks



	def getpic(self, link, config, getHandle, daName):


		mpgctnt = getHandle.getpage(link)					# Get Webpage
		if mpgctnt == "Failed":
			self.log.error("cannot get page")
			return "Failed"
								# Readout Webpage filehandle into string


		soup = bs.BeautifulSoup(''.join(mpgctnt))



		imgurl = ""

		link = soup.find("a", attrs={"id" : "download-button"})
		if link:							# Try for DDL (Works for flash and most stories too)
			imgurl = link["href"]
			self.log.info("Found DDL Link! - %s", imgurl)

		if not imgurl:							# Try for non-DDL pic

			link = soup.find("img", attrs={"name" : "gmi-ResViewSizer_fullimg", "id" : "gmi-ResViewSizer_fullimg"})
			if link:
				imgurl = link["src"]
				self.log.info("Whoops, had to manually extract Img URL - %s", imgurl)

		if not imgurl:							# Try for video
			self.log.info("Trying for Video Link")
			try:
				link = soup.findAll("a", attrs={"class" : "b"})[-1]
				if link:

					urlAddr = link["href"]
					linkHandle = urllib.urlopen(urlAddr)
					imgurl = linkHandle.geturl()
			except:
				imgurl = ""


		if imgurl == "" :
			self.log.critical("OH NOES!!! No image on page = %s", link)
			# print mpgctnt
			return "Failed"										# Return Fail
		else:

			regx3 = re.compile("http://.+/")							# Pull out filename only
			fname = regx3.sub("" , imgurl)
			self.log.info("			Filename = " + fname)
			self.log.info("			FileURL = " + imgurl)

			# Sanitize filename
			fname = "".join([x for x in fname if x.isalpha() or x.isdigit() or x == "_" or x == "-" or x == "." or x == "(" or x == ")"	])
			# print self.DLFolder, daName, fname
			filePath = os.path.join(self.DLFolder, daName, fname)
			if (not os.access(filePath, os.W_OK)) or config.ovwMode == "Overwrite Files" :


				imgdat = getHandle.getpage(imgurl)							# Request Image
				if imgdat == "Failed":
					self.log.error("cannot get image")
					return "Failed"

				errs = 0
				fp = None

				while not fp:
					try:
						fp = open(filePath, "wb")								# Open file for saving image (Binary)
						fp.write(imgdat)
						fp.close()
					except IOError:
						try:
							fp.close()
						except:
							pass
						errs += 1
						self.log.critical("Error attempting to save image file - %s" % filePath)
						if errs > 3:
							self.log.critical("Could not open file for writing!")
							return "Failed"
														# Write Image to File

					except:
						print "Error saving image - what?"
						print "Source URL: ", link
						print type(imgdat)
						import traceback
						traceback.print_exc()


				commentary = soup.find("div", attrs={"class" : "text block"})
				if commentary:

					# Save the text that accompanies the image to a html file
					accSoup = bs.BeautifulSoup()
					htmlTag = bs.Tag(accSoup, "html")
					bodyTag = bs.Tag(accSoup, "body")
					accSoup.insert(0, htmlTag)
					htmlTag.insert(0, bodyTag)
					bodyTag.insert(0, commentary.extract())

					accFilePath = os.path.join(self.DLFolder, daName, "%s%s" % (fname, ".accFile.html"))



					errs = 0
					accFp = None

					while not accFp:
						try:
							accFp = open(accFilePath, "w")								# Open file for saving image (Binary)
							accFp.write(accSoup.prettify())
							accFp.close()
						except IOError:
							try:
								accFp.close()
							except:
								pass
							errs += 1
							self.log.critical("Error attempting to save image file - %s" % filePath)
							if errs > 3:
								self.log.critical("Could not open file for writing!")
								return "Failed"
															# Write Image to File




				self.log.info("Successfully got: " + imgurl)
				return "Downloaded"									# Return Success
			else:
				self.log.info("Exists, skipping...")
				return "Exists"



	def getDA(self, daName, config, getHandles):									# Main Function Call - Uses other functions

		# logFile = os.path.join(folder, daName, mainHandle.logFileName)
		# print logFile

		# print "Getting artist", daName

		if os.access(os.path.join(self.DLFolder, daName), os.W_OK) and config.ovwMode == "Check Folders":
			self.log.info("Folder PyDir/%s/ Exists: " % daName)
			self.log.info("Assuming page has already been downloaded, skipping")

		else:
			if not os.access(os.path.join(self.DLFolder, daName), os.W_OK):
				try:
					os.mkdir(os.path.join(self.DLFolder, daName))
					self.log.info("Created folder " + daName)
					self.log.info("Beginning Download")
				except:
					self.log.info("Cannot Make working directory %s/. Do you have write Permissions? %s" % (daName, sys.exc_info()[0]))
					return []





			if "Check Files w/Memory" in config.ovwMode:
				try:
					pickleFile = open(os.path.join(self.DLFolder, daName, self.pickleFileName), "rb")
					pickleFileCtnt = cPickle.load(pickleFile)
					pickleFile.close()

					previousScanTime = 0
					existingLinks = []

					if type(pickleFileCtnt) == list:
						existingLinks = pickleFileCtnt
						#print "Picklefile is old format. Converting"

					elif type(pickleFileCtnt) == dict:
						existingLinks = pickleFileCtnt["links"]
						previousScanTime = pickleFileCtnt["date"]

					else:
						pass			# Any other pickle file structure normally either means that the app died mid-download, or something was brokn during the last retreival attempt
									# As such, we should completely rescan the whole directory again.

					if (time.time() - previousScanTime) < (60 * 60 * 24 * 3) and "Check Files w/Memory and Time" in config.ovwMode:
						self.log.info("Artist %s has been scanned within the last three days - Skipping" % daName)
						return []




				except:
					self.log.info("No Pickle file - Is this the first run on this username?")

					existingLinks = []

			else:

				existingLinks = []

			artLinksAll = []
			artLinksAll.extend(self.getGalleryLst(daName, getHandles, config))					# Scrape!

			if artLinksAll == []:
				self.log.critical("Artist has no content? - %s" % daName)
				return []


			artlinks = []
			previouslyDoneLinks = []

			for link in artLinksAll:
					if link in existingLinks:
						previouslyDoneLinks.append(link)
						pass
					else:
						artlinks.append(link)


			pickleFile = open(os.path.join(self.DLFolder, daName, self.pickleFileName), "wb")		# Nuke pickle file so if the software crashes, it'll try to get everything the next time
			cPickle.dump("", pickleFile)
			pickleFile.close()

			self.log.info("%s%s%s%s%s" % ("- - - - For Artist : ", daName, " at : ", time.ctime(time.time()), " - - - -"))

			self.log.info("- - - Total Pages: %s - - -" % len(artlinks))
			self.log.info("- - - %s Pages previously Retrieved - - -" % len(previouslyDoneLinks))
			# for link in artlinks:
				# self.log.info(link)

			successlinks = []
			existlinks = []
			faillinks = []
			numinfo = 1

			config.progBars.setSubBarLen(len(artlinks))


			for link in artlinks:
				success = self.getpic(link, config, getHandles, daName)
				# Scrape Pictures!
				self.log.info("%s%s%s%s" % ("On page ", numinfo, " of ", len(artlinks)))
				numinfo = numinfo + 1
				config.progBars.incSubBar()
				if success == "Downloaded":
					successlinks.append(link)
				elif success == "Exists":
					existlinks.append(link)
				else:
					faillinks.append(link)

				time.sleep(self.waitTimer)


			if len(faillinks) > 0:
				self.log.info("%s%s%s" % ("- - - Failed Pages: ", len(faillinks), " - - -"))
				for link in faillinks:
					self.log.info(link)
			else:
				self.log.info("- - - No Failed Pages - - -")

			sucessfulLinks = successlinks
			sucessfulLinks.extend(existlinks)
			sucessfulLinks.extend(previouslyDoneLinks)




			pickleFile.close()

			pickleFile = open(os.path.join(self.DLFolder, daName, self.pickleFileName), "wb")



			dumpFileContent = {"date" : time.time(), "links" : sucessfulLinks}
			cPickle.dump(dumpFileContent, pickleFile)

			pickleFile.close()

			return faillinks

	def childThread(self, config, getHandle, nameQueue, failedQueue):

		while not config.stopChildThreads:
			try:
				name = nameQueue.get_nowait()

				config.progBars.addSubProgressBar()

				self.log.info("Getting %s's Gallery" % name)
				faillinks = self.getDA(name, config, getHandle)

				if faillinks:
					# print faillinks
					failedQueue.put(faillinks)

				self.log.info("Done")



				config.progBars.removeSubProgressBar()

				config.progBars.incMainBar()

			except Queue.Empty:
				self.log.info("Thread", threading.currentThread(), "queue is empty. Exiting")
				break


	def main(self, config, getHandles):

		self.log = logmanager.ThreadLog(self.retrievalHandles)


		self.DLFolder = os.path.join(config.downloadDir, self.DLFolder)

		if not os.access(self.DLFolder, os.W_OK):
			try:
				os.mkdir(self.DLFolder)
			except:
				self.log.error("Need write Permissions")
				return




		print "Starting"


		config.progBars.setupMainBar(len(config.aList))

		nameQueue = Queue.Queue()
		failedQueue = Queue.Queue()

		for name in config.aList:
			nameQueue.put(name)
		# print nameQueue

		threadNumber = 0
		threads = []

		print "Starting Threads"
		config.stopChildThreads = False

		for getHandle in getHandles:
			temp = threading.Thread(target=self.childThread, name = "Thread %d" % threadNumber, args=(config, getHandle, nameQueue, failedQueue))
			threadNumber += 1
			temp.daemon = True
			temp.start()
			threads.append(temp)

		import time
		alive = True

		while not nameQueue.empty() and alive:
			alive = False
			for childThread in threads:
				alive = alive or childThread.isAlive()		# Do not exit untill all child threads have returned

			time.sleep(0.1)

		totalfaillinks = []
		while not failedQueue.empty():
			totalfaillinks.append(failedQueue.get())

		if len(totalfaillinks) > 0:
			self.log.error("%s%s%s" % ("- - - - - - - - - - -Overall Failed Pages: ", len(totalfaillinks), " - - - - - - - - - - -"))
			self.log.error("Done!")
			self.log.error("")


		for link in totalfaillinks:
			if link:
				self.log.error(link)


		logmanager.clearThreadLog(self.log)

		return




