# !C:\Python26


from pluginManage import Plugin


import BeautifulSoup
import bs4
import sys
import time
import re
import urlparse
import os.path
import cPickle

import urllib

import traceback


import logmanager
import logging


class getHF(Plugin):

	capabilities			=	"wScraper"												# Used for detecting ifany random .py file is a plugin file
	version				=	1.0													# Used for detecting available configuration options
	pluginName			=	"HFscrape"												# Plugin Name variable
	siteName			=	"Hentai-Foundry"											# Value for populating "Art Site" dropbox
	nameSource			=	["Single Name", "Name List File"]									# namesource value passed to plugin
	nameSourceEN			=	1													# Determines if Name Source input box is enabled (Boolean, 0-1)
	artistNameEN			=	1													# Determines if Artist Name input box is enabled (Boolean, 0-1)
	artistListEN			=	1													# Determines if Artist List input box is enabled (Boolean, 0-1)
	overwriteModes			=	["Check Folders", "Check Files with Memory", "Check Files", "Overwrite Files"]				# Determines if overwrite mode dropbox is enabled for plugin
	retrievalHandles		=	1													# How may http retrieval handles app wants

	DLFolder			=	"Hentai Foundry"
	logFileName			=	"LogFile.txt"
	pickleFileName			=	"Pickle.dat"

	cookieCapable = 1

	genericlog = logging.getLogger("Main.Plugin")

	def checkCookie(self, cookiejar):
		YII_CSRF_TOKEN = re.search("<Cookie YII_CSRF_TOKEN=[\w%]*? for www\.hentai-foundry\.com\/>", "%s" % cookiejar)
		PHPSESSID = re.search("<Cookie PHPSESSID=[\w%]*? for www\.hentai-foundry\.com\/>", "%s" % cookiejar)

		if YII_CSRF_TOKEN and PHPSESSID:
			return "Have HF Cookies:\n	%s\n	%s" % (YII_CSRF_TOKEN.group(0), PHPSESSID.group(0))

		return "Do not have HF login Cookie"



	def getCookie(self, userName, password, opener):
		try:
			self.genericlog.info("Getting Entrance Cookie")
			page = opener.open('http://www.hentai-foundry.com/?enterAgree=1')
			soup = bs4.BeautifulSoup(''.join(page), "lxml")
			hiddenInput = soup.find('input', attrs={"name" : "YII_CSRF_TOKEN"})
			#print soup
			if hiddenInput:
				self.genericlog.info("Got Entrance Cookie, logging in")
				YII_CSRF_TOKEN = hiddenInput["value"]
				print("Got Entrance Cookie, logging in")
				logondict = {"YII_CSRF_TOKEN" : YII_CSRF_TOKEN, "LoginForm[username]" : userName, "LoginForm[password]" : password, "LoginForm[rememberMe]" : "1"}



				params = urllib.urlencode(logondict)
				page = opener.open('http://www.hentai-foundry.com/site/login', params)
				pagetext = page.read()
				page.close()

				if not re.search("Incorrect username or password\.", pagetext):
					return "Logged In"
			return "Login Failed"

		except:
			print "Error"
			traceback.print_exc()
			return "Login Failed"


	def getGalleryLst(self, userName, getHandles):


		artlinks = []

		userName = userName.strip()

		subGalleries = ["http://www.hentai-foundry.com/pictures/user/%s/page/%s", "http://www.hentai-foundry.com/pictures/user/%s/scraps/page/%s"]

		for gallery in subGalleries:
			pageNumber = 1
			while pageNumber > 0:
				turl = gallery % (userName, pageNumber)
				self.log.info("Getting = " + turl)
				mpgctnt = getHandles.getpage(turl)
				if mpgctnt == "Failed":
						self.log.error("Cannot get Page")
						return "Failed"


				soup = bs4.BeautifulSoup(''.join(mpgctnt), "lxml")

				artcnt = 0								# track number of art pages

				imageLinks = soup.find_all('img', class_="thumb")

				for imageLink in imageLinks:

					try:
						link = imageLink.find_parent()["href"]
						fullLink = urlparse.urljoin("http://www.hentai-foundry.com/", link)			 # Extract link
					except KeyError:										# badly formed link ? probably just a named anchor like '<a name="something">'
						continue
					if (fullLink.find("pictures") + 1) and (not fullLink in artlinks):				# See if link is to art page, and if it's a duplicate

						artcnt = artcnt + 1
						artlinks.append(fullLink)


				pageNumber += 1
				if artcnt == 0:
					pageNumber = 0

		self.log.info("Found %s links" % (len(artlinks)))
		return artlinks


	def getpic(self, link, getHandles, artistName):


		mpgctnt = getHandles.getpage(link)					# Get Webpage

		if mpgctnt == "Failed":
			self.log.error("cannot get page")
			return "Failed"

		soup = bs4.BeautifulSoup(mpgctnt, "lxml")

		contentDiv = soup.find('div', attrs={"class" : "box", "id" : "yw0"})			# Image should be in the first <div>
		boxDiv = contentDiv.findNext("div", attrs={"class" : "boxbody"})
		imgLink = boxDiv.findNext("img")

		imageURL = ""

		if imgLink:
			try:
				if imgLink["src"].find("pictures") + 1:			# Content is hosted on the  pictures.hentai-fountry.net subdomain. Therefore, we want the
					imageURL = imgLink["src"]
					self.log.info("%s%s" % ("Found Image URL : ", imageURL))
			except:
				print "Error:", link


		if not imageURL:

			flashContent = boxDiv.findNext("div", attrs={"id" : "flash"})
			if flashContent:
				imageURL = flashContent.findNext("embed")["src"]
				self.log.info("%s%s" % ("Found Flash URL : ", imageURL))





		if not imageURL:



			subPageLink = boxDiv.findNext("a")

			if subPageLink:

				tempLink = urlparse.urljoin("http://www.hentai-foundry.com/", subPageLink["href"])

				redirLinkFound = False

				self.log.info("Image is on a sub-Page; Fetching sub page")
				mpgctnt = getHandles.getpage(tempLink)					# Get Webpage

				if mpgctnt != "Failed":
					soup = BeautifulSoup.BeautifulSoup(mpgctnt)

					imgLink = soup.find('div', attrs={"class" : "box"}).findNext("div", attrs={"class" : "boxbody"}).findNext("img")

					if imgLink:
						if imgLink["src"].find("pictures") + 1:			# Content is hosted on the  pictures.hentai-fountry.net subdomain. Therefore, we want the


							imageURL = imgLink["src"]

							redirLinkFound = True
							self.log.info("%s%s" % ("Found Image URL : ", imageURL))

				if not redirLinkFound:
					self.log.error("Failed to retreive image!")
					return "Failed"
			else:
				return "Failed"







		if imageURL == "" :
			self.log.error("OH NOES!!! No image on page = " + link)

			return "Failed"										# Return Fail
		else:

			fTypeRegx	= re.compile("http://.+?\.com.*/.*?\.")
			fNameRegex	= re.compile("http://.+/")
			ftype		= fTypeRegx.sub("" , imageURL)
			fname		= fNameRegex.sub("" , imageURL)

			if ftype == imageURL:								# If someone forgot the filename it may not be a .jpg, but it's likely any image viewer can figure out what it is.
				ftype = ".jpg"



			imgTitleContainer = contentDiv.findNext("span", class_="imageTitle")
			imgTitle = None

			if imgTitleContainer.contents:
				imgTitle = imgTitleContainer.contents[0]
			else:
				self.log.info("No Title for Image!")

			self.log.info("			Filename			= %s" % fname)
			self.log.info("				Page Image Title	= %s" % imgTitle)
			self.log.info("				FileURL			= %s" % imageURL)
			self.log.info("			FileType			= %s" % ftype)

			if imgTitle != None:
				fname = "%s.%s" % (re.sub('[^a-zA-Z0-9\-_.() ]', '', imgTitle), ftype)

			filePath = os.path.join(self.DLFolder, artistName, fname)

			if (not os.access(filePath, os.W_OK)) or self.config.ovwMode == "Overwrite Files" :

				imgdath = getHandles.getpage(imageURL)							# Request Image

				if imgdath == "Failed":
					self.log.error("cannot get image")
					return "Failed"



				errs = 0
				fp = None

				while not fp:
					try:
						fp = open(filePath, "wb")								# Open file for saving image (Binary)
						fp.write(imgdath)						# Write Image to File
						fp.close()
					except IOError:
						try:
							fp.close()
						except:
							pass
						errs += 1
						self.log.critical("Error attempting to save image file - %s" % filePath)
						if errs > 3:
							self.log.critical("Could not open file for writing!")
							return "Failed"



				self.log.info("Successfully got: %s" % imageURL)
				return "Downloaded"									# Return Success
			else:
				self.log.info("Exists, skipping...")
				return "Exists"



	def getArtist(self, artistName, getHandles, config):									# Main Function Call - Uses other functions

		logFile = os.path.join(self.DLFolder, artistName, self.logFileName)
		self.log.info(logFile)

		config.progBars.addSubProgressBar()
		self.DLFolder = os.path.join(self.config.downloadDir, self.DLFolder)

		if os.access(os.path.join(self.DLFolder, artistName), os.W_OK) and self.config.ovwMode == "Check Folders":
			self.log.info("Folder PyDir/" + artistName + "/ Exists: ")
			self.log.info("Assuming page has already been downloaded, skipping")

		else:
			if not os.access(os.path.join(self.DLFolder, artistName), os.W_OK):
				try:
					os.mkdir(os.path.join(self.DLFolder, artistName))
					self.log.info("Created folder " + artistName)
					self.log.info("Beginning Download")
				except:
					self.log.critical("Cannot Make working directory %s/. Do you have write Permissions? %s" % (artistName, sys.exc_info()[0]))
					return 0

			self.log.info("Input username: " + artistName)							# Show Website that's being scrubbed



			artLinksAll = self.getGalleryLst(artistName, getHandles)					# Walk gallery pages, return list of links to individual art pages!

			if artLinksAll != "Failed":									# If getGalleryLst found any pages:
				if self.config.ovwMode == "Check Files with Memory":						# Pickle file speeds up traversal of pages that have already been walked.

					try:
						pickleFile = open(os.path.join(self.DLFolder, artistName, self.pickleFileName), "rb")		# Load list of links
						existingLinks = cPickle.load(pickleFile)
						pickleFile.close()

						pickleFile = open(os.path.join(self.DLFolder, artistName, self.pickleFileName), "w")		# Erase list of links, that way if something crashes while downloading the gallery, it'll check everything again
						pickleFile.close()

						artlinks = []
						for link in artLinksAll:
								if link in existingLinks:
									self.log.info("Already Have: " + link)
								else:
									artlinks.append(link)



					except:
						self.log.info("No Pickle file - Is this the first run on this username?")
						artlinks = artLinksAll
				else:
					artlinks = artLinksAll


				pickleFile = open(os.path.join(self.DLFolder, artistName, self.pickleFileName), "wb")
				cPickle.dump("", pickleFile)
				pickleFile.close()

				successlinks = []
				existlinks = []
				faillinks = []
				numinfo = 1

				config.progBars.setSubBarLen(len(artlinks))							# Setup local progress bar


				for link in artlinks:
					success = self.getpic(link, getHandles, artistName)		# Download the actual pictures themselves Pictures!
					self.log.info("%s%s%s%s" % ("On page ", numinfo, " of ", len(artlinks)))
					numinfo = numinfo + 1
					config.progBars.incSubBar()
					if success == "Downloaded":
						successlinks.append(link)
					elif success == "Exists":
						existlinks.append(link)
					else:
						faillinks.append(link)

				config.progBars.incSubBar()

					# Dump to the multitude of logs (Theyre EVERYWHERE)

				logptr = open(os.path.join(self.DLFolder, artistName, "log.txt"), "a")			# Open log file.

				logptr.write("\n\n\n- - - - - - - - - - - Total Pages: %s - - - - - - - - - - -\n" % (len(artlinks)))
				for link in artlinks:
					logptr.write(link + "\n")


				logptr.write("%s%s%s" % ("\n\n\n- - - - - - - - - - - Successful Pages: ", len(successlinks), " - - - - - - - - - - -\n"))
				for link in successlinks:
					logptr.write("\n		" + link)
				logptr.write("%s%s%s" % ("\n\n\n- - - - - - - - - - - Skipped Pages: ", len(existlinks), " - - - - - - - - - - -\n"))
				for link in existlinks:
					logptr.write("\n		" + link)
				logptr.write("%s%s%s" % ("\n\n\n- - - - - - - - - - - Failed Pages: ", len(faillinks), " - - - - - - - - - - -\n"))
				for link in faillinks:
					logptr.write("\n		" + link)

				self.log.info("")
				self.log.info("%s%s%s" % ("- - - - - - - - - - - Successful Pages: ", len(successlinks), " - - - - - - - - - - -"))
				self.log.info("")
				for link in successlinks:
					self.log.info(link)

				self.log.info("")
				self.log.info("%s%s%s" % ("- - - - - - - - - - - Skipped Pages: ", len(existlinks), " - - - - - - - - - - -"))
				self.log.info("")
				for link in existlinks:
					self.log.info(link)

				self.log.info("")
				self.log.info("%s%s%s" % ("- - - - - - - - - - - Failed Pages: ", len(faillinks), " - - - - - - - - - - -"))
				self.log.info("")
				for link in faillinks:
					self.log.info(link)

			else:
				self.log.error("Could not get pages of artist: " + artistName)
				logptr = open(os.path.join(self.DLFolder, artistName, "log.txt"), "a")			# Open log file.

				logptr.write("Could not get  pages of artist: " + artistName + "\n\n")

			logptr.close()

			pickleFile = open(os.path.join(self.DLFolder, artistName, self.pickleFileName), "wb")		# Recreate pickle file
			cPickle.dump(artLinksAll, pickleFile)
			pickleFile.close()
		config.progBars.removeSubProgressBar()
		try:
			return faillinks
		except:
			return []

	def main(self, config, getHandles):

		self.log = logmanager.ThreadLog(self.retrievalHandles)

		self.config = config
		self.log.info("Starting")
		# progBars = webFunctions.ProgressBars()

		self.DLFolder = os.path.join(self.config.downloadDir, self.DLFolder)

		if not os.access(self.DLFolder, os.W_OK):									# Creates General Directory
			try:
				os.mkdir(self.DLFolder)
			except:
				self.log.error("Need write Permissions")
				return



		config.progBars.setupMainBar(len(config.aList))								# Initialise Progress bars

		totalfaillinks = []
		for name in self.config.aList:
				name = name.strip()
				self.log.info(name)
				faillinks = (self.getArtist(name, getHandles, config))		# Downloads gallery. Returns pages that failed for logging purposes
				if faillinks != None:
					totalfaillinks = totalfaillinks + faillinks
				config.progBars.incMainBar()

		faillogptr = open("Failed Pages.txt", "a")

		self.log.info("")
		self.log.info("%s%s%s" % ("- - - - - - - - - - -Overall Failed Pages: ", len(totalfaillinks), " - - - - - - - - - - -"))		# Print list of all pages that the plugin couldn't find an image on.
		self.log.info("")

		faillogptr.write("%s%s%s%s%s" % ("\n\n\n- - - - - - - - - - - Successful Pages: ", len(totalfaillinks), " at ", time.ctime(time.time()), " - - - - - - - - - - -\n"))

		for link in totalfaillinks:
			faillogptr.write("\n		" + link)
		for link in totalfaillinks:
			self.log.info(link)

		faillogptr.close()
		self.log.info("")
		self.log.info("Done!")
		self.log.info("")


		logmanager.clearThreadLog(self.log)
