import asyncore, asynchat, socket, sys ,threading, os , os.path,traceback, time , base64 , email
import re, socket, urllib, urlparse
import httplib, urllib, socket
import Queue
import smtpd
import regxml
JUST_ONE_PAGE = True
__version__ = 'OWA WRAPPER 0.0.1'
NEWLINE = TERMINATOR = '\r\n'
socket.setdefaulttimeout(150)
class InvalidLogin(Exception):
	pass

class RetrievalError(Exception):
    pass

DEBUG = False
socket.setdefaulttimeout(150)
LIST = "<?xml version='1.0'?>\
		<a:propfind xmlns:a='DAV:' xmlns:m='urn:schemas:mailheader:'  \
			xmlns:n='urn:schemas:httpmail:' xmlns:x='http://schemas.microsoft.com/exchange/'>\
			<a:prop>\
			<m:message-id/>\
			<m:subject/>\
			<a:isfolder/>\
			<n:read/>\
			<x:permanenturl/>\
			<a:getcontentlength/>\
			<a:displayname/>\
			<a:href/>\
			</a:prop>\
			</a:propfind>"
			
			# BMOVE /pub2/folder1/ HTTP/1.1
			# Destination: http://www.contoso.com/pub2/folder2/
			# Host: www.contoso.com

BMOVE= "<?xml version=\"1.0\" ?>\
			<D:move xmlns:D=\"DAV:\">\
			   <D:target>\
			<D:href>%s</D:href>\
			   </D:target>\
			</D:move>"
#/?Cmd=emptydeleteditems
class OWA(object):
	"Scraper that keeps track of getting and setting cookies."
	def __init__(self, domain, username, password):
		self.domain = domain
		self.username =  username
		self.password =  password
		self.is_logged_in = False
		self.base_href = None
		self.headers = {
						'User-Agent': 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X; en-US; rv:1.8.1.17) Gecko/20080829 Firefox/2.0.0.17',
						'Keep-Alive': '300',
						'Accept': 'text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5',
						'Connection': 'Keep-Alive'
						}
		self.keep_headers={'set-cookie':'Cookie','location':'_location'}
		self.host =  None
		self.conn = None
		self.secureconn = None
	def clone(self):
		pass
	def request(self,host,method,url,paramheaders,data,secure=True):
		#print "DATA:" , data
		headers = {}
		for name in self.headers:
			headers[name]= self.headers[name]
		for name in paramheaders:
			headers[name]= paramheaders[name]
		
		status, reason, data , hdrs= self.__request(host,method,url,headers,data,secure)
		if status > 399:
			raise RetrievalError
		if status==302:
			location = self.headers['_location']
			location = location + '/'
			protocol, host , url , parameters, query, fragment = urlparse.urlparse(location)
			return self.request(host,'GET',url,paramheaders,'')
		else:
			return status,reason, data	,hdrs

	def __request(self,host,method,url,headers,data,secure=True):
		
		
		try:
			if secure:
				if not self.secureconn or self.lasthost!=host:
					##print "Create secure connection"
					self.secureconn = httplib.HTTPSConnection(host)
					self.c = self.secureconn
			else:
				if not self.conn or self.lasthost!=host:
					##print "Create  connection"
					self.conn = httplib.HTTPConnection(host)
					self.c = self.conn
			self.lasthost = host
			if len(data)>0:
				headers['Content-Length'] = repr(len(data))
			headers['Host'] = host
			if method=='POST':
				##print 'POST'
				headers['Content-Type']='application/x-www-form-urlencoded'
			##print "Connecting...--------------------------------------------" 
			if DEBUG:
				print ">> ", method , host , url
				print  ">> " , data
			##print "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
			
			self.c.request(method, url, data, headers)
			response = self.c.getresponse() 
			status = response.status
			reason = response.reason
			data = response.read()
			headers =  response.getheaders()
			for name, value in headers:
				if name == 'set-cookie':
					cookies = value.split(' ')
					credentials = ''
					for cookie in cookies:
						if cookie.startswith('session') or cookie.startswith('cadata'):
							credentials+=cookie+' '
					self.headers['Cookie'] = credentials
				if name=='location':
					self.headers['_location'] = value
			if DEBUG:
				print "<< " ,status , reason
				#print headers
				print "<< " ,data
			return status, reason, data , headers
		except:
			err ="\n".join(traceback.format_exception(*sys.exc_info()))
			#if DEBUG:
				#print "---ERROR" , err
			raise RetrievalError



	def login(self):
		
		status, reason,html,hdrs =  self.request(self.domain,'POST','/exchweb/bin/auth/owaauth.dll',{},urllib.urlencode({
			'destination': urlparse.urljoin('https://'+self.domain, 'exchange'),
			'flags': '0',
			'username': self.username,
			'password': self.password,
			'SubmitCreds': 'Log On',
			'forcedownlevel': '0',
			'trusted': '4',
		}))
		##print html
		if 'You could not be logged on to Outlook Web Access' in html:
			raise InvalidLogin
		m = re.search(r'(?i)<BASE href="([^"]*)">', html)
		if not m:
			raise RetrievalError, "Couldn't find <base href> on page after logging in."
		self.base_href = m.group(1)
		##print "Base HREF:" , self.base_href
		self.is_logged_in = True
		return status, reason

	
		
	def getMessages(self, folder,firstPage=False):
		if not self.is_logged_in: self.login()
		protocol, host , burl , parameters, query, fragment = urlparse.urlparse(self.base_href)
		page = 1
		url = burl + urllib.quote(folder) + '/?Cmd=contents&SortBy=Received&SortOrder=descending&Page=%s'%page
		status, reason,html ,hdrs= self.request(self.domain,'GET',url,{}, '')
		messages = re.findall(r'(?i)NAME=MsgID value="([^"]*)"', html)
		if not firstPage:
			pages = re.findall(r'<B>&nbsp;of&nbsp;(.*)</B>',html)
			nr = 1
			if len(pages)==1:
				nr = int(pages[0])
				#if nr> 10:
				#	nr=10
				for i in range(2,nr+1):
					url = burl + urllib.quote(folder) + '/?Cmd=contents&SortBy=Received&SortOrder=descending&Page=%s'%i
					status, reason,html ,hdrs= self.request(self.domain,'GET',url,{}, '')
					msgs = re.findall(r'(?i)NAME=MsgID value="([^"]*)"', html)
					for msg in msgs:
						messages.append(msg)
		return messages
	
	def query(self,folder,data):
		#print "QUERY:", data
		##print "Inspecting " , msgid
		if not self.is_logged_in: self.login()
		protocol, host , burl , parameters, query, fragment = urlparse.urlparse(self.base_href)
		url = burl + urllib.quote(folder)  
		status, reason,html,hdrs = self.request(self.domain,'PROPFIND',url,{'Content-Type':'text/xml','Translate':'f'},data)
		return status, reason ,html, hdrs
		
	def getHeader(self,msgid):
		##print "Inspecting " , msgid
		if not self.is_logged_in: self.login()
		protocol, host , burl , parameters, query, fragment = urlparse.urlparse(self.base_href)
		url  = burl + msgid 
		status, reason,html,hdrs = self.request(self.domain,'HEAD',url,{'Translate':'f'}, '')
		return status, reason,html,hdrs
		
	def getMessage(self, msgid):
		"Returns the raw e-mail for the given message ID."
		##print "Fetching " , msgid
		if not self.is_logged_in: self.login()
		
		protocol, host , burl , parameters, query, fragment = urlparse.urlparse(self.base_href)
		#print burl
		#print msgid
		url  = burl + msgid +'?Cmd=body'
		status, reason,html ,hdrs= self.request(self.domain,'GET',url,{'Translate':'f'}, '')
		# Sending the "Translate=f" HTTP header tells Outlook to include
		# full e-mail headers. Figuring that out took way too long.
		return html

	def delete(self, msgid):
		"Deletes the e-mail with the given message ID."
		if not self.is_logged_in: self.login()
		protocol, host , burl , parameters, query, fragment = urlparse.urlparse(self.base_href)	
		url  = burl + msgid 
		status, reason,html ,hdrs  = self.request(self.domain ,'DELETE',url,{},'')

	def deleteMessage(self, msgid):
		"Deletes the e-mail with the given message ID."
		##print "Moving " , msgid , " to thrash folder"
		if not self.is_logged_in: self.login()
		protocol, host , burl , parameters, query, fragment = urlparse.urlparse(self.base_href)	
		url  = burl + msgid 
		status, reason,html ,hdrs  = self.request(self.domain ,'POST',url,{}, urllib.urlencode({
			'MsgId': msgid,
			'Cmd': 'delete',
			'ReadForm': '1',
		}))
		if status < 400:
			return True
		else:
			return False
	
	def sendMessage(self,subject,message):
		protocol, host , burl , parameters, query, fragment = urlparse.urlparse(self.base_href)
		tmp = urllib.urlencode({'x':subject})
		subject = tmp[2:]
		fullurl = burl + "/Drafts/"+subject+".EML"
		##print "Full:" , fullurl
		status, reason, html ,hdrs= self.request(self.domain,'PUT',fullurl,{'Content-Type':'message/rfc822'}, message)
		if status < 400:
			suburl = protocol+'://' + host + burl +"/##DavMailSubmissionURI##"
			return self.request(self.domain,'MOVE',fullurl,{'Destination':suburl},'')
		return status , reason ,html, hdrs
	def undeleteMessage(self,url):
		if not self.is_logged_in: self.login()
		protocol, host , burl , parameters, query, fragment = urlparse.urlparse(self.base_href)	
		index = url.rfind('/')
		name = url[0:index]
		furl  = burl + name
		status, reason,html ,hdrs  = self.request(self.domain ,'POST',furl,{}, urllib.urlencode({
			'MsgId': url,
			'Cmd': 'moveselect',
			'ReadForm': '1',
		}))
		if status < 400:
			status, reason,html ,hdrs  = self.request(self.domain ,'POST',burl,{}, urllib.urlencode({
				'Action': 'Apply',
				'Cmd': 'move',
				'FldID': '/Inbox',
			}))
			if status < 400:
				return True
			else:
				return False
		else:
			return False
	
	def emptyTrash(self):
		protocol, host , burl , parameters, query, fragment = urlparse.urlparse(self.base_href)
		tmp =  self.base_href+'/Deleted%20Items'
		
		
	def list(self,folder):
		if not self.is_logged_in: self.login()
		status, reason,html,hdrs = self.query(folder,LIST)
		parser = regxml.XMLParser()
		parser.parseXML(html)
		entries = parser.tag('a:response')
		result = []
		for entry in entries:
			isfoldertags = parser.tag('a:isfolder',entry)
			if not isfoldertags:
				print "Skip Message maybe folder"
				continue
			isFolder = int( parser.tag('a:isfolder',entry)[0].content)
			if isFolder:
				print "Skip Message folder"
				continue
			read = int( parser.tag('e:read',entry)[0].content)
			status = parser.tag('a:status',entry)[0].content
			if status=='HTTP/1.1 200 OK':
				id = parser.tag('d:message-id',entry)[0]
				uid = id.content
				if uid:
					uid = uid.replace("&lt;","<")
					uid = uid.replace("&gt;",">")
				href = parser.tag('a:href',entry)[0].content
				permanent = parser.tag('f:permanenturl',entry)[0].content
				protocol, host , burl , parameters, query, fragment = urlparse.urlparse(href)	
				index = burl.find( urllib.quote(folder))
				burl = burl[index-1:]
				length = parser.tag('a:getcontentlength',entry)[0].content
				if uid and href and length and permanent:
					#print uid, length, burl, permanent
					result.append((uid,length,burl,permanent,None))
		return result
		
