#!/usr/bin/env python
"""
Copyright (c) 2008, Chris Weisel
All rights reserved.

Redistribution and use in source and binary forms, with or without modification, 
are permitted provided that the following conditions are met:

    * Redistributions of source code must retain the above copyright notice, 
      this list of conditions and the following disclaimer.
    * Redistributions in binary form must reproduce the above copyright notice, 
      this list of conditions and the following disclaimer in the documentation 
      and/or other materials provided with the distribution.
    * Neither the name of the Space Hamsters Bot Project nor the names of its contributors 
      may be used to endorse or promote products derived from this software without 
      specific prior written permission.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY 
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT 
SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT 
OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR 
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
"""

import urllib2, re, time, urllib
from BeautifulSoup import BeautifulSoup
import settings

planeturl =  'http://www.spacehamsters.com/planetmap.aspx?pid=%d&x=%d&y=%d&nacid=%d'
cityurl = 'http://www.spacehamsters.com/citymap.aspx?nacid=%d'
lookurl =    'http://www.spacehamsters.com/planetbuild.aspx?pid=%d&x=%d&y=%d&rx=15&ry=8&z=3&op=dospec'
exploreurl = 'http://www.spacehamsters.com/planetbuild.aspx?pid=%d&x=%d&y=%d&rx=15&ry=8&z=3&op=explore'
techsurl = 'http://www.spacehamsters.com/researchtree.aspx?tab=3'
researchurl = 'http://www.spacehamsters.com/researchops.aspx?sid=%d&tab=%d&action=start'

opener = urllib2.build_opener()

def main():
	loggedin = 0
	cookies = {}
	
	while 1:
		try:
			if not loggedin:
				opener = urllib2.build_opener()
				login = opener.open('http://www.spacehamsters.com/logon.aspx')
				cookies["ASP.NET_SessionId"] = get_session_id(login.info()['Set-Cookie'])
				if cookies["ASP.NET_SessionId"] == None:
					raise Exception('bad session id')
				headers = [('Cookie', ';'.join([str(key) + '=' + str(cookies[key]) for key in cookies.keys()]))]
				print headers
				opener.addheaders = headers
				data = 'username=%s&password=%s&action=Logon' % settings.user
				opener.open('http://www.spacehamsters.com/logon.aspx', data)
				loggedin = 1
			
			for city in settings.cities:
				cityid = city[1]['id']
				for planetid in city[1]['planets']:
					x, y = 14, 7
					
					soup = BeautifulSoup(opener.open(planeturl % (planetid, x, y, cityid)))
					mapx, mapy = get_map_size(soup)
					
					while 1:
						print 'Get Map %s' % (repr((planetid, x, y, cityid)))
						soup = BeautifulSoup(opener.open(planeturl % (planetid, x, y, cityid)))
						if get_explorers(soup):
							if not do_looks(soup, planetid, city):
								if not do_explores(soup, planetid, city):
									x = x + 14
									if x > mapx and y > mapy:
										print 'city %s has nothing to do on planet #%d' % (city[0], planetid)
										break
									if x > mapx:
										x, y = 14, y + 7
						else:
							print 'city %s has no available explorers' % city[0]
							break
							
				do_research(cityid, settings.techs)
			print 'time for a nap (45s)'
			time.sleep(45)
		except Exception, inst:
			print "Exception, logging out and sleeping 30 seconds.  Details:"
			print type(inst)     # the exception instance
			print inst           # __str__ allows args to printed directly
			cookies = {}
			opener = urllib2.build_opener()
			loggedin = 0
			time.sleep(30)
			
def get_session_id(cookie):	
	re1='(ASP)'	# Word 1
	re2='(\\.)'	# Single Character 1
	re3='(NET_SessionId)'	# Var 1
	re4='(=)'	# Single Character 2
	re5='((?:[a-z][a-z0-9_]*))'	# Var 2

	rg = re.compile(re1+re2+re3+re4+re5,re.IGNORECASE|re.DOTALL)
	m = rg.search(cookie)
	if m:
		word1=m.group(1)
		c1=m.group(2)
		var1=m.group(3)
		c2=m.group(4)
		var2=m.group(5)
		
		return var2
			
def get_explorers(soup):
	# Find # of explorers available
	exp = soup.find(text=re.compile('Explorers'))
	
	re1='(Explorers)'	# Word 1
	re2='(.)'	# Single Character 1
	re3='(\\s+)'	# White Space 1
	re4='(\\d+)'	# Integer Number 1
	re5='(\\s+)'	# White Space 2
	re6='(.)'	# Single Character 2
	re7='(\\s+)'	# White Space 3
	re8='(\\d+)'	# Integer Number 2

	rg = re.compile(re1+re2+re3+re4+re5+re6+re7+re8,re.IGNORECASE|re.DOTALL)
	m = rg.search(exp)
	return int(m.group(4))
	
def get_map_size(soup):
	mapsize = soup.find(text=re.compile('Map Size'))

	re1='.*?'	# Non-greedy match on filler
	re2='\\d+'	# Uninteresting: int
	re3='.*?'	# Non-greedy match on filler
	re4='(\\d+)'	# Integer Number 1
	re5='.*?'	# Non-greedy match on filler
	re6='(\\d+)'	# Integer Number 2

	rg = re.compile(re1+re2+re3+re4+re5+re6,re.IGNORECASE|re.DOTALL)
	m = rg.search(mapsize)
	if m:
	    int1=m.group(1)
	    int2=m.group(2)
	    return (int(int1), int(int2))
	
	
def do_looks(soup, planetid, city):
	looks = soup.findAll(attrs={'onclick':re.compile('dospec')}, limit=1)
	if len(looks) > 0:
		look = looks[0]
		for attr in look.attrs:
			if attr[0] == u'onclick':
				re1='.*?'	# Non-greedy match on filler
				re2='(\\d+)'	# Integer Number 1
				re3='.*?'	# Non-greedy match on filler
				re4='(\\d+)'	# Integer Number 2

				rg = re.compile(re1+re2+re3+re4,re.IGNORECASE|re.DOTALL)
				m = rg.search(attr[1])

				x=int(m.group(1))
				y=int(m.group(2))

				opener.open(lookurl % (planetid, x, y))
				print 'city %s looked at planet #%d, x=%d, y=%d' % (city[0], planetid, x, y)
				
	return len(looks)
				
def do_explores(soup, planetid, city):
	explores = soup.findAll(attrs={'onclick':re.compile('explore')}, limit=1)
	if len(explores) > 0:
		explore = explores[0]
		for attr in explore.attrs:
			if attr[0] == u'onclick':
				re1='.*?'	# Non-greedy match on filler
				re2='(\\d+)'	# Integer Number 1
				re3='.*?'	# Non-greedy match on filler
				re4='(\\d+)'	# Integer Number 2

				rg = re.compile(re1+re2+re3+re4,re.IGNORECASE|re.DOTALL)
				m = rg.search(attr[1])

				x=int(m.group(1))
				y=int(m.group(2))

				opener.open(exploreurl % (planetid, x, y))
				print 'city %s explored at planet #%d, x=%d, y=%d' % (city[0], planetid, x, y)
	return len(explores)
	
def do_research(cityid, techs):
	opener.open(cityurl % cityid)
	for tech in techs:
		try:
			print 'city %s trying to research tech #%d' % (cityid, tech)
			opener.open(researchurl % (tech, 3))
		except Exception, ex:
			pass

if __name__ == '__main__':
	main()
		
