import time; 
import pickle;
import urllib2; 
import urllib;
import re;
from HTMLParser import HTMLParser;
import os
import md5
import mimetypes
#from gzip import GzipFile
import cStringIO
from cPickle import loads,dumps
import cookielib
import copy    
import ystockquote
import datetime

class GoldmanParser(HTMLParser):
	def __init__(self):
		HTMLParser.__init__(self)
		self.picks = {}
		self.curStock = ''
		self.subElCount = 0
	def handle_starttag(self, tag, attrs):
		if len(attrs) == 0:
			return
		elif (tag == 'tr' and attrs[0][0]=='bgcolor'):
			self.subElCount = 0
		elif (tag == 'td' and attrs[0][1]=='snml'):
			self.subElCount += 1		 
	def handle_data(self, data):
		if (self.subElCount == 2):
			if data.strip() != "":
				self.curStock=data 
				self.picks[self.curStock] = []
		elif (self.subElCount == 3):
			self.picks[self.curStock].append(data)
			self.subElCount += 1
		elif (self.subElCount == 5):
			self.picks[self.curStock].append(data[3:])
			self.subElCount +=1
		elif (self.subElCount == 7):
			self.picks[self.curStock].append(data[:-1])
			self.subElCount += 1

NDOLLARS = 1000
NSAMPLES = 100
TSELL = 0 #trend constants
TBUY = 1
DIS_THRESH = 2 #i.e. more than 50% of samples disagree with trend -- refine this figure
gCookie = 'GsLogin_prod=1dU3tHZGFnfHrYKRHBKUMIW8BULP5/c0qQmPMliPtM7n9A==' #replace this string
def getCleanGoldman():
	global gCookie
	headersGoldman = {'Host':'www.goldman.com','User-Agent':'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.15) Gecko/2009102815 Ubuntu/9.04 (jaunty) Firefox/3.0.15', 'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-us,en;q=0.5', 'Keep-Alive':'300', 'Connection':'keep-alive', 'Referer':'https://www.goldman.com/gs/p/iris/product/latest?productid=4020', 'Cookie':gCookie}
	urlGoldman = "https://www.goldman.com/gs/p/iris/convictionlist?region=220010000"
	goldmanReq = urllib2.Request(urlGoldman, headers=headersGoldman)
	goldmanPage = urllib2.urlopen(goldmanReq)
	goldmanText = goldmanPage.read()
	goldmanHeaders = goldmanPage.info().headers
	for header in goldmanHeaders:
		if header.__contains__("GsLogin_prod"):
			gCookie = header.split(": ")[1].split("; ")[0]
	print gCookie
	print goldmanHeaders
	stripScript = re.compile("<script.*?>(.*?)</script>", flags=re.DOTALL)
	return stripScript.sub("", goldmanText)
def marketOpen():
	today = datetime.date.today().isoformat().replace('-', '')
	if len(ystockquote.get_historical_prices('GOOG', today, today)) > 1:
		return True
	else:
		return False
def getHoldingsDb():
	return open("goldmanDB.txt", "r+")
def getConcavityDb():
	return open("concavityDB.txt", "r+")
def getConcavityList():
	return open("concavityList.txt", "r+")
def purchase(symbol, shares):
	print "purchase", symbol, shares
	symbol = symbol.replace('_', '')
	purch_url = "http://www.kaching.com/api/users/84709/portfolio/orders"
	purch_data = urllib.urlencode({'type':'market-order', 'action':'buy', 					 'symbol':symbol,'quantity':str(shares)})
	purch_headers = {'X-KaChing-ApiKey':'BFwdp7vHGQBzDBWU37XsHerjAcyvTnna',
			   'X-KaChing-RemoteKey':'blvd1248curfew'}
	purchase_req = urllib2.Request(purch_url,data=purch_data,
								   headers=purch_headers)
	purchase_info = urllib2.urlopen(purchase_req)
	#print purchase_info.read()
def sell(symbol, shares):
	print "sell", symbol, shares
	symbol = symbol.replace('_', '')
	sell_url = "http://www.kaching.com/api/users/84709/portfolio/orders"
	sell_data = urllib.urlencode({'type':'market-order', 'action':'sell', 					 'symbol':symbol,'quantity':str(shares)})
	sell_headers = {'X-KaChing-ApiKey':'BFwdp7vHGQBzDBWU37XsHerjAcyvTnna',
			   'X-KaChing-RemoteKey':'blvd1248curfew'}
	sell_req = urllib2.Request(sell_url,data=sell_data,
								   headers=sell_headers)
	print sell_req
	sell_info = urllib2.urlopen(sell_req)
	time.sleep(5)
	#print sell_info.read() 
def runGoldman(sellOnly, storedCompanies):
	companiesCopy = copy.copy(storedCompanies.keys())
	goldmanParser = GoldmanParser()
	goldmanParser.feed(getCleanGoldman())
	for company in goldmanParser.picks:
		c_data = goldmanParser.picks[company]
		curOwned = companiesCopy.__contains__(company)
		if curOwned:
			companiesCopy.remove(company)
		if c_data[0]=='Buy' and float(c_data[2]) > 5 and not curOwned and not sellOnly:
			numShares = int(NDOLLARS/float(c_data[1]))
			purchase(company, numShares)
			storedCompanies[company] = numShares
		elif c_data[0]=='Sell' and not curOwned:
			pass #short-sell 
		elif ((float(c_data[2]) < 5) or c_data[0]=='Sell') and curOwned: #high precedence sell indicator
			sell(company, storedCompanies[company])
			storedCompanies.__delitem__(company) 
	for removedCompany in companiesCopy: #high precedence sell indicator
		sell(removedCompany, storedCompanies[removedCompany])
		storedCompanies.__delitem__(removedCompany)
	del companiesCopy		 
def runConcavity(storedCompanies, companyMA, useGoldman):
	if useGoldman:
		goldmanParser = GoldmanParser()
		goldmanParser.feed(getCleanGoldman())
		for company in goldmanParser.picks:
			print "CONCAVITY ", company
			if not companyMA.__contains__(company):
				companyMA[company] = []
	for company in companyMA:
		print "CONCAVITY ", company
		if len(companyMA[company]) > NSAMPLES:
			companyMA[company] = companyMA[company][1:] #eliminate oldest of 100 data points
		companyMA[company].append(ystockquote.get_200day_moving_avg(company)) #or 50-day
		prevDelta = None 
		trend = None
		numFollowers = 0 #Samples that align with trend
		numDissidents = 0 #Samples that don't align
		flippedConcav = False #Legitimate trend identified + concavity change
		for i in range(1, len(companyMA[company])):
			curDelta = int(companyMA[company][i]) - int(companyMA[company][i-1])
			if prevDelta != None:
				deltaDelta = curDelta - prevDelta
				if deltaDelta < 0:
					if trend == None:
						trend = TSELL
						numFollowers = 1
					elif trend == TSELL:
						numFollowers += 1
					else:
						numDissidents += 1
						if numDissidents > 0 and numFollowers/numDissidents < DIS_THRESH:
							flippedConcav = False
						elif numDissidents == 0 and numFollowers >= DIS_THRESH:
							flippedConcav = True 
						trend = TSELL
						numFollowers = 1
						numDissidents = 0
				else:
					if trend == None:
						trend = TBUY
						numFollowers = 1
					elif trend == TBUY:
						numFollowers += 1
					else:
						numDissidents += 1
						if numDissidents > 0 and numFollowers/numDissidents < DIS_THRESH:
							flippedConcav = False
						elif numDissidents == 0 and numFollowers >= DIS_THRESH:
							flippedConcav = True
						trend = TBUY
						numFollowers = 1
						numDissidents = 0
			prevDelta = curDelta
		if ((numDissidents > 0 and numFollowers/numDissidents > DIS_THRESH) or 
		(numDissidents == 0 and numFollowers >= DIS_THRESH)) and flippedConcav:
			if trend == TBUY:
				purchase(company, NSHARES)
				storedCompanies[company] = NSHARES
			elif storedCompanies.__contains__(company): #ensure we own before selling
				sell(company, storedCompanies[company])	
				storedCompanies.pop(company)	

def run(tandem):
	while True:
		curTime = time.localtime() 
		if curTime[1] > 4: #weekend
			time.sleep(60*60*24)
			continue
		elif not marketOpen():
			if curTime[3] < 14: #2:00, assumed time when Yahoo releases day's data TODO: may have to modify
				time.sleep(60*60*(14-curTime[3]))
			else:
				time.sleep(60*60*24) #probably a holiday, wait until next day
			continue 
		holdingsDb = getHoldingsDb()
		concDb = getConcavityDb()
 		try:
			storedCompanies = pickle.load(holdingsDb)
		except EOFError:
			storedCompanies = {}
		try:
			companyMA = pickle.load(concDb)
		except EOFError:
			companyMA = {}
			if not tandem:
				for company in getConcavityList().read().split("\n"):
					if company != '':
						companyMA[company.strip()] = []		
		runConcavity(storedCompanies, companyMA, tandem)
		if tandem:
			runGoldman(True, storedCompanies)
		pickle.dump(companyMA, concDb)
		pickle.dump(storedCompanies, holdingsDb)
		concDb.flush() #writes buffer to file
		os.fsync(concDb.fileno())
		holdingsDb.flush() #writes buffer to file
		os.fsync(holdingsDb.fileno())		
		time.sleep(60*60*24)
		#only run when market open

	
run(False)




	 
