import urllib2, mimetools, urlparse
from BeautifulSoup import BeautifulSoup
from ConfigParser import SafeConfigParser as ConfigParser
import db

import os
CONFIG_FILE = os.path.join(os.environ['HOME'], '.wjs.ini')

URL = "https://secure.eu.playstation.com/psn/mypsn/trophies/"

ENCODE_TEMPLATE= """--%(boundary)s
Content-Disposition: form-data; name="%(name)s"

%(value)s
""".replace('\n','\r\n')
def encode_multipart_formdata(fields):
	"""
	fields is a sequence of (name, value) elements for regular form fields.
	Return (content_type, body) ready for httplib.HTTP instance
	"""
	BOUNDARY = mimetools.choose_boundary()
	body = ""
	for key, value in fields.iteritems():
		body += ENCODE_TEMPLATE % {
					'boundary': BOUNDARY,
					'name': str(key),
					'value': str(value)
				}
	body += '--%s--\n\r' % BOUNDARY
	content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
	return body, content_type

def connect(url):
	global config
	request_iframe = urllib2.Request(url)
	response_iframe = urllib2.urlopen(request_iframe)
	soup_iframe = BeautifulSoup(response_iframe.read())
	form = soup_iframe.find("form", id="mainform")
	assert form
	values = {}
	for node in form.findAll("input"):
		try:
			name = str(node["name"])
			if name == "loginName":
				value = config.get("psn", "login")
			elif name == "password":
				value = config.get("psn", "password")
			else:
				value = node["value"]
			values[name] = value
		except KeyError:
			pass
	body, content_type = encode_multipart_formdata(values)
	url_conn = urlparse.urljoin(response_iframe.geturl(), form['action'])
	request_conn = urllib2.Request(url_conn, body, {'Content-Type': content_type})
	response_conn = urllib2.urlopen(request_conn)
	soup_conn = BeautifulSoup(response_conn.read())

def get(url):
	request = urllib2.Request(url)
	response = urllib2.urlopen(request)
	soup = BeautifulSoup(response.read())
	frame = soup.find("iframe", id="psnSigninFrame")
	if frame:
		connect(frame["src"])
		response = urllib2.urlopen(request)
		soup = BeautifulSoup(response.read())
	return soup

def get_image(url):
	request = urllib2.Request(url)
	response = urllib2.urlopen(request)
	return response.read()

def update_trophies(url, platform):
	soup = get(url)
	name = unicode(soup.find("div", "gameTitle").find("a").string)
	print "updating", name

	try:
		game = db.Game.select(db.AND(db.Game.q.name == name, db.Game.q.platform == platform)).getOne()
		print "game found (%s)" % game.name
	except db.SQLObjectNotFound:
		game = db.getFTSMatch(name)
		if game:
			assert(game.platform == platform)
			print "game found using FTS (%s)" % game.name
		else:
			game = db.Game(name = name, platform = platform)
			print "game created (%s)" % game.name
			assert game.name == name

	if not game.icon:
		image = soup.find("div", "gameLogoImage").find("img")['src']
		image = get_image(urlparse.urljoin(url, image))
		game.icon = image

	db.Trophy.deleteMany(db.Trophy.q.game == game)

	index = 0
	for node in soup.findAll("div", "gameLevelListItem"):
		obtained, hidden = True, False
		image = node.find("div", "gameLevelImage").find("img")['src']
		if image.find(u'padlock') > 0:
			image = None
			obtained = False
		else:
			image = get_image(urlparse.urljoin(url, image))
		type = node.find("div", "gameLevelTrophyType").find("img")['alt']
		title, date, desc = None, None, None
		for p in node.find("div", "gameLevelDetails").findAll("p"):
			try:
				class_ = str(p["class"])
				if class_ == 'title':
					title = unicode(p.string)
				elif class_ == 'date':
					date = unicode(p.string)
			except KeyError:
				desc = unicode(p.string)
		if not obtained and title == u'???':
			title = None
			hidden = True

		trophy = db.Trophy(name = title, type = type, game = game, description = desc,
			icon = image, date = date, obtained = obtained, hidden = hidden, order = index)
		index += 1
	print game.name, "updated", game.trophies.count(), "trophies added"

def update():
	soup = get(URL)
	table = soup.find("table", "psnTrophyTable")
	assert table
	platform = db.getPlatform(u'PS3')
	for node in table.findAll("td", "col1"):
		url = urlparse.urljoin(URL, node.find("a")["href"])
		update_trophies(url, platform)

def main():
	global config
	config = ConfigParser()
	config.read(CONFIG_FILE)
	db.init(config.get("dsn", "games"))

	opener = urllib2.build_opener()
	opener.add_handler(urllib2.HTTPCookieProcessor())
	urllib2.install_opener(opener)

	db.enableFTS()
	try:
		update()
	finally:
		db.disableFTS()

if __name__ == '__main__':
	main()
