#revision 7 (Derek Leung)

import time; 
import pickle;
import urllib2; 
import urllib;
import re;
from HTMLParser import HTMLParser;
import os
import md5
import mimetypes
#from gzip import GzipFile
import cStringIO
from cPickle import loads,dumps
import cookielib
import copy    

class GoldmanParser(HTMLParser):
	def __init__(self):
		HTMLParser.__init__(self)
		self.picks = {}
		self.curStock = ''
		self.subElCount = 0
	def handle_starttag(self, tag, attrs):
		if len(attrs) == 0:
			return
		elif (tag == 'tr' and attrs[0][0]=='bgcolor'):
			self.subElCount = 0
		elif (tag == 'td' and attrs[0][1]=='snml'):
			self.subElCount += 1		 
	def handle_data(self, data):
		if (self.subElCount == 2):
			if data.strip() != "":
				self.curStock=data 
				self.picks[self.curStock] = []
		elif (self.subElCount == 3):
			self.picks[self.curStock].append(data)
			self.subElCount += 1
		elif (self.subElCount == 5):
			self.picks[self.curStock].append(data[3:])
			self.subElCount +=1
		elif (self.subElCount == 7):
			self.picks[self.curStock].append(data[:-1])
			self.subElCount += 1

NDOLLARS = 1000;
gCookie = 'GsLogin_prod=q6tYfgaSZ1m2k2WaHcKk3zyHKQR58hC6p7ZZ1LzSQ29NrQ==' #replace this string 
def getCleanGoldman():
	global gCookie
	headersGoldman = {'Host':'www.goldman.com','User-Agent':'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.15) Gecko/2009102815 Ubuntu/9.04 (jaunty) Firefox/3.0.15', 'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-us,en;q=0.5', 'Keep-Alive':'300', 'Connection':'keep-alive', 'Referer':'https://www.goldman.com/gs/p/iris/product/latest?productid=4020', 'Cookie':gCookie}
	urlGoldman = "https://www.goldman.com/gs/p/iris/convictionlist?region=220010000"
	goldmanReq = urllib2.Request(urlGoldman, headers=headersGoldman)
	goldmanPage = urllib2.urlopen(goldmanReq)
	goldmanText = goldmanPage.read()
	goldmanHeaders = goldmanPage.info().headers
	for header in goldmanHeaders:
		if header.__contains__("GsLogin_prod"):
			gCookie = header.split(": ")[1].split("; ")[0]
	print gCookie
	print goldmanHeaders
	stripScript = re.compile("<script.*?>(.*?)</script>", flags=re.DOTALL)
	return stripScript.sub("", goldmanText)
def getDb():
	return open("goldmanDB.txt", "r+")
def purchase(symbol, shares):
	print "purchase", symbol, shares
	symbol = symbol.replace('_', '')
	purch_url = "http://www.kaching.com/api/users/84709/portfolio/orders"
	purch_data = urllib.urlencode({'type':'market-order', 'action':'buy', 					 'symbol':symbol,'quantity':str(shares)})
	purch_headers = {'X-KaChing-ApiKey':'BFwdp7vHGQBzDBWU37XsHerjAcyvTnna',
			   'X-KaChing-RemoteKey':'blvd1248curfew'}
	purchase_req = urllib2.Request(purch_url,data=purch_data,
								   headers=purch_headers)
	purchase_info = urllib2.urlopen(purchase_req)
	#print purchase_info.read()
def sell(symbol, shares):
	print "sell", symbol, shares
	symbol = symbol.replace('_', '')
	sell_url = "http://www.kaching.com/api/users/84709/portfolio/orders"
	sell_data = urllib.urlencode({'type':'market-order', 'action':'sell', 					 'symbol':symbol,'quantity':str(shares)})
	sell_headers = {'X-KaChing-ApiKey':'BFwdp7vHGQBzDBWU37XsHerjAcyvTnna',
			   'X-KaChing-RemoteKey':'blvd1248curfew'}
	sell_req = urllib2.Request(sell_url,data=sell_data,
								   headers=sell_headers)
	sell_info = urllib2.urlopen(sell_req)
	#print sell_info.read() 
def run():
	db = getDb()
 	try:
		storedCompanies = pickle.load(db)
	except EOFError:
		storedCompanies = {} 
	while True:
		companiesCopy = copy.copy(storedCompanies.keys())
		goldmanParser = GoldmanParser()
		goldmanParser.feed(getCleanGoldman())
		for company in goldmanParser.picks:
			c_data = goldmanParser.picks[company]
			curOwned = companiesCopy.__contains__(company)
			if curOwned:
				companiesCopy.remove(company)
			if c_data[0]=='Buy' and float(c_data[2]) > 5 and not curOwned:
				numShares = int(NDOLLARS/float(c_data[1]))
				purchase(company, numShares)
				storedCompanies[company] = numShares
			elif c_data[0]=='Sell' and not curOwned:
				pass #short-sell 
			elif ((float(c_data[2]) < 5) or c_data[0]=='Sell')  and curOwned:
				sell(company, storedCompanies[company])
				storedCompanies.__delitem__(company) 
		for removedCompany in companiesCopy:
			sell(removedCompany, storedCompanies[removedCompany])
			storedCompanies.__delitem__(removedCompany)
		del companiesCopy		 
		pickle.dump(storedCompanies, db)	
		db.flush() #writes buffer to file
		os.fsync(db.fileno())
		time.sleep(10)
		

run()




	 
