import threading
import time
import socket
import httplib, mimetypes
import urlparse
import os
import nirvanix
import sys
import nirvanixutils

#################################################
# See the settings file for configurable values.#
#################################################
import settings

class RetryException(Exception):
	def __init__(self, error):
		self.error = error
		
	def __str__(self):
		return repr(self.error)

class DownloadThread(threading.Thread):
	def __init__(self, hostname, path, file, file_size, start_pos, end_pos):
		threading.Thread.__init__(self)
		self.file = file
		self.hostname = hostname
		self.path = path
		self.file_size = file_size
		self.start_pos = start_pos
		self.end_pos = end_pos
		
	def run(self):
		
		retryCount = 0
		strerror = ""
		while retryCount < settings.number_of_retries:
			try:
				socket.setdefaulttimeout(60)
				if settings.debug:
					print "DEBUG: Thread: %s Connecting to %s" % (threading.current_thread().name, self.hostname)

				# this is only known after the first request and should be used
				# on retries to define the range where some of the file has been recieved.
				header = {}
				if self.end_pos > self.file_size:
					header = {'Range': 'bytes=%d-%d' % (self.start_pos, self.file_size)}
				else:
					header = {'Range': 'bytes=%d-%d' % (self.start_pos, self.end_pos)}

				proxy = os.getenv('http_proxy')
				if proxy == None:
					conn = httplib.HTTPConnection(self.hostname)
					conn.request('GET',self.path,None,header)
				else:
					proxy_url = urlparse.urlparse(proxy)
					conn = httplib.HTTPConnection(proxy_url.netloc)
					conn.request('GET',"http://" + self.hostname + self.path,None,header)

				resp = conn.getresponse()
				if settings.debug:
					print "DEBUG: HTTP Status: %d Start: %d End: %d File Size: %d" % (resp.status, self.start_pos, self.end_pos, self.file_size)
				if resp.status != 200 and resp.status != 206:
					raise httplib.BadStatusLine("Server error when communicating with node.") 
				bufsize = 1024*1024 # 1MB chunks are used to copy from the http stream to the file, if you run into memory trouble this can be reduced.
				dataRead = 0
				while 1:
					data = resp.read(bufsize)
					if settings.debug:
						print 'DEBUG: Writing to file at: %d' % (self.start_pos + dataRead)
					self.file.seek(self.start_pos + dataRead)							
					self.file.write(data)
					dataRead += len(data)
					# did a full pass with no errors, reset retry counter.
					retryCount = 0

					if len(data)==0:
						break
					if self.start_pos + dataRead == self.end_pos:
						break
				conn.close()
			except (socket.error, socket.timeout), strerror:
				if settings.debug:
					print "DEBUG: Socket error connecting to node, retrying. Error: %s" % (strerror)
				retryCount+=1
				time.sleep(settings.retry_sleep_time*retryCount)
				continue
			except httplib.BadStatusLine, e:
				# handle bad status and exit retry logic.
				if settings.debug:
					print "DEBUG: ", e, e.line
				retryCount+=1
				time.sleep(settings.retry_sleep_time*retryCount)
				continue
			except httplib.HTTPException:
				# error downloading file this is likey a failed read or timeout so retry.
				if settings.debug:
					print "DEBUG: Http error connecting to node, retrying. Error: %s" % (strerror)
				retryCount+=1
				time.sleep(settings.retry_sleep_time*retryCount)
				continue
			else:
				# Exit from the error handling logic.
				break
		# we made it here with an error.  Start over building the excludedNodes
		if retryCount > 0:
			if settings.debug:
				print "DEBUG: Node unavailable, re-routing to a new node."
			nirvanix.ex_queue.put(self.hostname.split('.')[0])
			#continue
		if settings.debug:
			print "DEBUG: DownloadThread complete"
		#break
		nirvanix.activethreads = nirvanix.activethreads - 1
		return

class Connection:
	def __init__(self, base_url, show_percentage = True):
		self.base_url = base_url
		self.url = urlparse.urlparse(base_url)
		self.host = None
		self.show_percentage = show_percentage

	def request_get(self, resource, args = None):
		return self.request(resource, "GET", args)

	def request_post(self, resource, args = None):
		return self.request(resource, "POST", args)

	def request(self, path, method = "GET", args = None):
		webservice = None
		args = args + '\r\n'

		header = {'User-Agent': 'Nirvanix Python API', 'Host': self.url.netloc, 'Content-length': '%d' % len(args), 'Content-Type': 'application/x-www-form-urlencoded'}

		proxy = os.getenv('http_proxy')
		if proxy == None:
			if (self.url.scheme == 'http'):
				webservice = httplib.HTTPConnection(self.url.netloc)
				webservice.request(method,path,body=args,headers=header)
			else:
				webservice = httplib.HTTPSConnection(self.url.netloc)
				webservice.request(method,path,body=args,headers=header)
		else:
			proxy_url = urlparse.urlparse(proxy)
			if (self.url.scheme == 'http'):
				webservice = httplib.HTTPConnection(proxy_url.netloc)
				webservice.request(method,"http://" + self.url.netloc + path,body=args,headers=header)
			else:
				webservice = httplib.HTTPSConnection(proxy_url.netloc)
				webservice.request(method,"https://" + self.url.netloc + path,body=args,headers=header)
				
		resp = webservice.getresponse()
		# get the response
		if settings.debug:
			print "DEBUG: HTTP Response: ", resp.status, resp.msg
		return resp.read()
		   
	def post_multipart(self, host, selector, fields, files, start, end, length, retry=False):
		"""
		Post fields and files to an http host as multipart/form-data.
		fields is a sequence of (name, value) elements for regular form fields.
		files is a sequence of (name, filename, value) elements for data to be uploaded as files
		Return the server response.
		"""
		
		content_type, body = self.encode_multipart_formdata(fields, files, start, end, length)

		if self.host == None:
			version = sys.version[:3]
			if version <= 1.5:
				socket.setdefaulttimeout(20)
				self.host = httplib.HTTPConnection(host)
			else:
				self.host = httplib.HTTPConnection(host, timeout=20)

			proxy = os.getenv('http_proxy')
			if proxy == None:
				version = sys.version[:3]
				if version <= 1.5:
					socket.setdefaulttimeout(20)
					self.host = httplib.HTTPConnection(host)
				else:
					self.host = httplib.HTTPConnection(host, timeout=20)
			else:
				proxy_url = urlparse.urlparse(proxy)
				version = sys.version[:3]
				if version <= 1.5:
					socket.setdefaulttimeout(20)
					self.host = httplib.HTTPConnection(proxy_url.netloc)
				else:
					self.host = httplib.HTTPConnection(proxy_url.netloc, timeout=20)

		headers = {
		 	'Content-Length': str(len(body)),
			'User-Agent': 'Nirvanix Python SDK',
			'Content-Type': content_type
		}
		url = selector
		if retry:
			url = url + "?rangeOverwrite=true"
		self.host.request('POST', url, body, headers)
		res = self.host.getresponse()
		if settings.debug:
			print "DEBUG: HTTP Response Status Code: " + str(res.status)
			print "DEBUG: HTTP Response Message: " + str(res.reason)
		return res.status, res.reason, res.read()

	def encode_multipart_formdata(self, fields, files, start, end, length, boundary = None, buf = None):
		"""
		fields is a sequence of (name, value) elements for regular form fields.
		files is a sequence of (name, filename, value) elements for data to be uploaded as files
		start is the start of the file part used for partial file uploads
		end is the end of the file part used for partial file uploads
		length is the total length of the file
		boundary can be set if you want to specify your own boundry
		buf should only be set if you need to pre-pend some data to the stream be aware of encoding issues
		Return (content_type, body) ready for httplib.HTTP instance
		"""
		if boundary is None:
			boundary = '----------NvxBOUNDNvxBOUNDNvxBOUND'
		# Set the buffer to an empty unicode string
		if buf is None:
			buf = u''
		#loop through each pair creating a content-disposition for each one
		for(key, value) in fields:
			buf += '--%s\r\n' % boundary
			buf += 'Content-Disposition: form-data; name="%s"' % key
			buf += '\r\n\r\n' + value + '\r\n'
		# loop through all files / file parts doing the upload.  Realistically this should be called
		# one at a time but (should) support multiples.
		for(key, filename, value) in files:
			buf += '--%s\r\n' % boundary
			buf += 'Content-Disposition: form-data; name="%s"; filename="%s"\r\n' % (key, filename)
			buf += 'Content-Type: %s\r\n' % self.get_content_type(filename)
			buf += 'Content-Range: %s-%s/%s\r\n\r\n' % (start, end, length)
			# be aware that we have to make sure we convert the buf to a buffer type
			# so the encoding will not be applied.
			if (value != None):
				buf = buffer(buf.encode('utf-8')) + buffer(value) 
		# append the following boundry after the last file.
		buf += '\r\n--%s--\r\n' % boundary
		content_type = 'multipart/form-data; boundary=%s' % boundary
		return content_type, str(buf)

	def get_footer(self, boundary = None):
		"""
		Used to generate a boundry and finish off the file.
		"""
		if boundary is None:
			boundary = '----------NvxBOUNDNvxBOUNDNvxBOUND'
		CRLF = '\r\n'
		L = []
		L.append('')
		L.append('--' + boundary + '--')
		L.append('')
		return CRLF.join(L)

	def get_content_type(self, filename):
		# Uses the mimetypes lib to get the content type.
		return mimetypes.guess_type(filename)[0] or 'application/octet-stream'

	#--- Upload file ---#

	def uploadfile(self, uploadHost, uploadToken, localFilename, destFolderPath, destFilename, callbackURL):
		"""
		uploadHost is the host that will do the upload for you
		"""
		url = "/upload.ashx"
		params = [('uploadtoken', uploadToken), ('destFolderPath', destFolderPath)]
		if (callbackURL != None):
			params.append(('callbackURL', callbackURL))
		f = file(localFilename, "rb")
		# 1 Mb is fairly efficient but may need to be changed based on memory or network speed.
		buffersize = 1024*1024
		start = f.tell()
		data = f.read(buffersize)
		filelen = os.path.getsize(localFilename)
		retryCount = 0
		if self.show_percentage:
			percent = 0
			count = 0
			kbpersec = 0
			sys.stdout.write("\r%-30s |%-20s|%3d%% %6dkb/sec" % (destFilename,'#'*count,percent*100,kbpersec))
			sys.stdout.flush()

		while retryCount < settings.number_of_retries:
			self.host = None
			try:
				while data:
					start_time = time.time()
					if retryCount == 0:
						datalen = len(data) - 1
						end = start + datalen
						if settings.debug:
							print "DEBUG: Uploading from byte: %d" % start
						httpstatus, httpreason, httpbody = self.post_multipart(uploadHost, url, params, [('FILE1',destFilename,data)],
							start, end, filelen)
						if httpstatus > 206:
							raise RetryException(httpreason)
						else:
							nirvanixutils.check_response(httpbody)
					else:
						if settings.debug:
							print "DEBUG: Retrying from byte: %d" % start
						httpstatus, httpreason, httpbody = self.post_multipart(uploadHost, url, params, [('FILE1',destFilename,data)],
							start, end, filelen, True)
						if httpstatus > 206:
							raise RetryException(httpreason)
						else:
							nirvanixutils.check_response(httpbody)
					end_time = time.time()
					if self.show_percentage:
						percent = (float(end+1)/float(filelen))
						count = int(percent * 20)
						kbpersec = float(datalen / 1024) // float(end_time - start_time)
						sys.stdout.write("\r%-30s |%-20s|%3d%% %6dkb/sec" % (destFilename,'#'*count,percent*100,kbpersec))
						sys.stdout.flush()

					start = f.tell()
					data = f.read(buffersize)
					retryCount = 0
			except RetryException:
				if settings.debug:
					print "DEBUG: Error passing data."
				retryCount+=1
				time.sleep(settings.retry_sleep_time * retryCount)
				continue
			except socket.error, strerror:
				if settings.debug:
					print "DEBUG: Socket error connecting to node, retrying. Error: %s" % (strerror)
				retryCount+=1
				time.sleep(settings.retry_sleep_time * retryCount)
				continue
			except httplib.BadStatusLine, e:
				# handle bad status and exit retry logic.
				raise RetryException(e)
			except httplib.HTTPException:
				# error downloading file this is likey a failed read or timeout so retry.
				if settings.debug:
					print "DEBUG: Http error connecting to node, retrying. Error: %s" % (strerror)
				retryCount+=1
				time.sleep(settings.retry_sleep_time * retryCount)
				continue
			else:
				# exit from retry count
				break
		f.close()
		if retryCount > 0:
			raise RetryException("The maximum number of retries has been exceeded.")
