import os,urllib, csv,stockdetails,datetime
from BeautifulSoup import BeautifulSoup
import subprocess

def timeStamped(fname, fmt='%Y-%m-%d-%H-%M-%S_{fname}'):
    return datetime.datetime.now().strftime(fmt).format(fname=fname)

def tempDir():
    if (os.path.exists(os.getenv("temp", ) + '\\data\\') <> True):
        os.mkdir(os.getenv("temp", ) + '\\data\\')
    return os.getenv("temp", ) + '\\data\\'

def open_ex(dir_path):
    subprocess.Popen('explorer '+ tempDir())   
    return

def expirylist(s):
    url = "http://finance.yahoo.com/q/op?s=" + s
    optionsPage = urllib.urlopen(url)
    soup = BeautifulSoup(optionsPage)
    expirylist = []
    #x = soup.findAll('td', attrs={"href=" : "/q/op?s=" + s + "&amp;m=2012-09"})
    for link in soup.findAll('a'):
        if link.has_key('href'):
            if (link['href'].count("/q/op?") > 0) and (link['href'].count("m=") > 0):
                expirylist.append(link['href'].replace("/q/op?s=" + s + "&m=",""))
    return expirylist

#List of stocks to download data for.  Should be put in the command line or a text file.
stocks = ['GE','SU','TCK','C','BAC','JPM','INTC','GTY','HST','SPG','BPO','BAM']
#stocks = ['C','JPM']

f = csv.writer(open(tempDir() + timeStamped('options.csv'), 'wb'))
header_row = ['strike','opt_code','last_trade','vol','bid','offer','vol','open_interest','ticker','exp_date','spot_price','url']
f.writerow(header_row)

#Setup the urls where the data is
url = "http://finance.yahoo.com/q/op?s=<s>&m=<exp>"
spot_url ="http://finance.yahoo.com/q?s=<s>"
outputdata = []
strDate = datetime.datetime.now() 

#Download the data
for s in stocks:
    page = url.replace('<s>', s)
    spot = stockdetails.spot(s)
    exp = expirylist(s)
    for e in exp:
        fetch = page.replace('<exp>', e)
        optionsPage = urllib.urlopen(fetch)
        soup = BeautifulSoup(optionsPage)
        optionsTable = [[x.text for x in y.parent.contents] for y in soup.findAll('td', attrs={"class" : "yfnc_tabledata1", "nowrap" : "nowrap"})]
        optionsTable2 =[[x.text for x in y.parent.contents] for y in soup.findAll('td', attrs={"class" : "yfnc_h", "nowrap" : "nowrap"})]
        optionsTable.extend(optionsTable2)
        for row in optionsTable:
            row.append(s)
            row.append(e)
            row.append(spot)
            row.append(fetch)
        outputdata.append(optionsTable)
        print "Quotes for expiry " + e + " downloaded for " + s

#Save the data out to csv files

for opt in outputdata:
    for row in opt:
        f.writerow(row)

open_ex(tempDir())