from django.db import models

from django.conf import settings
from django.db.models import Q

from HTMLParser import HTMLParser

import sys, threading, os, time, random
import pycurl
import urllib
import StringIO
from stat import ST_SIZE

from petra.rsaccounts.models import *

# Create your models here.


"""
	This module contains all the data models used to represent and interact
	with a download item.

"""

class Download(models.Model):
	"""
		This class represents a downloadable item. It has attributes like:
			- Name of the download.
			- Resource's public URL.
			- Dates.
			- Download priority.
	"""	
	#Attributes
	name = models.CharField(maxlength=200, blank=True, null=True)
	url = models.URLField()
	start_date = models.DateTimeField('Starting date', auto_now_add = True)
	end_date = models.DateTimeField('Ending date', auto_now = True)
	status = models.IntegerField( default = 0)
	priority = models.SmallIntegerField(default = 0)

	#Status constants
	STATUS_QUEUED = 0
	STATUS_DOWNLOADING = 1
	STATUS_FINISHED = 2

	def __str__(self):
		"""
			Method that return a humanized name when a Download object is used
			as a string.
		"""
		return self.url

	def alter_priority(self, priority):
		"""
			Change download priority
		"""
		self.priority += priority
		self.save()

	def LatestsDownloads( num = 10 ):
		"""
			Static method that will return the latest downloads. This downloads are those
			whom statis is downloading or queued.
		"""
		return Download.objects.filter(Q(status=Download.STATUS_DOWNLOADING) | Q(status=Download.STATUS_QUEUED)).order_by("-priority", "start_date")[:num]	

	def GetNextDownload():
		"""
			This method will be executed in a a thread safe mode (this thread safe will
			be implemented outside this method) and returns the candidate download item.
		"""
		if len(Download.objects.filter(status=Download.STATUS_QUEUED).order_by("-priority", "start_date")) >= 1:
			return Download.objects.filter(status=0).order_by("-priority", "start_date")[0]
		else:
			return None

	def LatestsFinishedDownloads( num = 10 ):
		"""
			Returns an array containing the number of downloads with
			status finised
		"""
		return Download.objects.filter(Q(status =Download.STATUS_FINISHED)).order_by("-end_date")[:num]

	LatestsDownloads = staticmethod(LatestsDownloads)
	LatestsFinishedDownloads = staticmethod(LatestsFinishedDownloads)
	GetNextDownload = staticmethod(GetNextDownload)

	class Admin:
		list_filter = ('start_date', 'end_date', 'status')
		list_display = ('name', 'url', 'status', 'priority')
		search_fields = ('url')
		fields = (
				('Basic info', {'fields' : ('name', 'url', 'status', 'priority',)}),
				('Dates info', {'fields' : ('start_date', 'end_date',), 'classes': 'collapse'}),
			)

class DownloadProgress(models.Model):
	"""
		This class represents the current progress of a download.
	"""
	download = models.OneToOneField(Download, primary_key = True)
	progress = models.PositiveSmallIntegerField(default = 0)

	def __str__(self):
		return "Download progress for %s" % self.download

	class Admin:
		pass

class DownloadWorker(threading.Thread):
	"""
		Threading class that will ask the current downloads queue and
		will perform any download.

		It needs a valid rapidshare premium account to download an item.
	"""

	STATUS_IDLE 	= 0
	STATUS_WORKING 	= 1

	def __init__(self, semaph, worker_name):
		"""
			Constructor, receives a lock (semaphore), and a name to log
			what's doing this thread.
		"""
		threading.Thread.__init__(self)
		self.status = DownloadWorker.STATUS_WORKING
		self.lock = semaph
		self.name = worker_name

		self.current_download = None

		#Hardcoded: RS Account, solve this with the Account Manager
		#from petra.rsaccounts.models import *
		try:
			self.account = Account.objects.all()[0]
			self.account.authenticate()
		except:
			pass

		#LinkSolver
		self.linksolver = LinkSolver(self.account)

	def run(self):
		"""
			Run method, start the thread activity with this function.
			Performs the download, always is running until status changes
			or execution is aborted.
		"""
		while self.status == DownloadWorker.STATUS_WORKING:
			#Try to get the lock
			print "%s: trying to get a download" % self.name
			self.lock.acquire()

			#Get the Download object
			download = Download.GetNextDownload()

			if download is None:
				time.sleep(settings.WORKER_SLEEP_TIME)	
				#Release the resource
				self.lock.release()
				continue

			#Change status, now is downloading and create progress if not exists
			download.status = Download.STATUS_DOWNLOADING
			try:
				dp = download.downloadprogress
			except:
				dp = DownloadProgress(download = download)
				dp.save()

			download.save()

			self.current_download = download

			print "%s: selected to download ->  %s" % (self.name, download.name)

			#Release the lock
			self.lock.release()

			#Now start the download

			curl = pycurl.Curl()

			#Get a valid RS premium link
			premium_links = self.linksolver.resolve(self.current_download.url)
			if len(premium_links) > 0:
				URL = random.choice(premium_links)
			else:
				#There is an error, no premium links for this download
				self.current_download.status = Download.STATUS_QUEUED
                                self.current_download.save()
				time.sleep(settings.WORKER_SLEEP_TIME)
				print "%s: dying cause no premium link found" % self.name
				continue
				
			
			print "%s: obtained a link premium -> %s" % (self.name, URL)

			curl.setopt(pycurl.URL, URL)

 			#Check if we have to resume the download
                        if os.path.exists(os.path.join(settings.DOWNLOADS_DIR,os.path.basename(URL))):
                                #File exists, so we have to resume download
                                file_name = os.path.join(settings.DOWNLOADS_DIR,os.path.basename(URL))
                                offset = os.stat(file_name)[ST_SIZE]
                                curl.setopt(pycurl.RESUME_FROM, offset)
                                print "%s: resuming download from offset %d (bytes)" % (self.name, offset)


			fp = open(os.path.join(settings.DOWNLOADS_DIR,os.path.basename(URL)), "ab")		

			curl.setopt(pycurl.WRITEDATA, fp)
			curl.setopt(pycurl.FOLLOWLOCATION, 1)
                	curl.setopt(pycurl.NOPROGRESS, 0)
                	curl.setopt(pycurl.PROGRESSFUNCTION, self.progress)
                	curl.setopt(pycurl.MAXREDIRS, 5)
                	curl.setopt(pycurl.NOSIGNAL, 1)
			#Check config for DOWNLOAD_LIMIT param
                	if settings.DOWNLOAD_LIMIT and settings.DOWNLOAD_LIMIT <> -1:
                        	curl.setopt(pycurl.MAX_RECV_SPEED_LARGE, settings.DOWNLOAD_LIMIT / settings.MAX_WORKERS)

			#Set the valid cookie for RS Premium accounts
			curl.setopt(pycurl.COOKIEFILE, str(self.account.cookie_jar))

                	self.last_progress = 0
			print "%s: download started" % self.name
			try:
				curl.perform()
				curl.close()
			except Exception, e:
				print "%s: Error in download: %s" % (self.name, e)
				self.current_download.status = Download.STATUS_QUEUED
				self.current_download.save()

				fp.close()

				time.sleep(settings.WORKER_SLEEP_TIME)
				continue
				
			else:
				print "%s: download fininshed" % self.name

			self.current_download.status = Download.STATUS_FINISHED
			self.current_download.save()

			fp.close()

			time.sleep(settings.WORKER_SLEEP_TIME)

        def progress(self, download_t, download_d, upload_t, upload_d):
                """
                        Updates download status
                """
                if download_t == 0:
                        self.round = self.round + 0.1
                        if self.round >= 1.0:  self.round = 0.0
                else:
                        self.round = float(download_d) / float(download_t)
                new_round = int(self.round*100)
                if new_round > self.last_progress:
                        p = self.current_download.downloadprogress
                        p.progress = new_round
                        p.save()
			self.current_download.save()
                        self.last_progress = new_round

class LinkSolver:
	"""
		Resolve a public rapidshare link to a premiun one
	"""

	class LinkParser(HTMLParser):
		"""
			Internal class to parse html content and get
			premium links
		"""
		links = list()
		DETECTLINKS_REGEXP = re.compile("""document\.dlf\.action='(.+)';""")
		
		def handle_starttag(self, tag, attrs):
			if tag == "input":
				if type(attrs[0][1]) is str and attrs[0][1] == "radio":
					onclick = attrs[2][1]
					self.links.append(LinkSolver.LinkParser.DETECTLINKS_REGEXP.findall(onclick)[0])

		def get_links(self):
			return self.links

	def __init__(self, account):
		"""
			Constructor, get a RSAccount object
		"""
		self.account = account

	def resolve(self, link):
		"""
			Get a rapidshare anonymous link and return a  
			rapidshare premium link list	
		"""
		#First check the account
		if not self.account.cookie_jar:
			#Try to login
			if self.account.authenticate() <> -1:
				#Could not authenticate with a valid account
				print "Account not valid"
				return None

		#Now we are sure we have a cookie_jar for a valid(active or inactive) RS account 
		html = StringIO.StringIO()

		curl = pycurl.Curl()
		curl.setopt(pycurl.URL, str(link))
		curl.setopt(pycurl.WRITEFUNCTION, html.write)
		curl.perform()
		html.seek(0)
		html_content = html.read()
		html.close()
		curl.close()

		#Find the premium button
		index1 = html_content.find('action="')+8
		real_file = html_content[index1 : html_content.find('"',index1)]
		
		#Now get the real file with a premium account cookie
		html = StringIO.StringIO()
		
		curl = pycurl.Curl()
		login_form = {'dl.start': "PREMIUM" }
		login_form_data = urllib.urlencode(login_form)
		curl.setopt(pycurl.COOKIEFILE, str(self.account.cookie_jar))
		curl.setopt(pycurl.POSTFIELDS, login_form_data)
		curl.setopt(pycurl.URL, real_file)
		curl.setopt(pycurl.WRITEFUNCTION, html.write)

		curl.perform()
		html.seek(0)
		html_content = html.read()
		html.close()
		curl.close()
		
		parser = self.LinkParser()
		parser.links = list()
		parser.feed(html_content)

		return parser.get_links()
