#!/usr/bin/env python
#coding=utf-8

from BeautifulSoup import BeautifulSoup 
import urllib,time, datetime
from pysqlite2 import dbapi2 as sqlite

import utils
from ex import NoDataToday 

tp_codes = [] # today 停牌 股票

def get_1day_data(c, d=utils.get_today_isostr()):
    items = ['open', 'high','close','low', 'volume']
    data = []
    result = {}
    
    t = soup.find(text=d)
    
    # is停牌?
    if not t: raise NoDataToday(d)

    row = t.parent.parent.parent
    for td in row.contents[1:6]:
        data.append(td.string)
    
    for k,v in zip(items, data):
        result[k] = v
        
    result = {d: result}
    return result

def get_ndays_data(soup, ds):
    # ds is a datestr list. eg: ['2007-05-01', '2007-06-2']
    result = []
    for d in ds:
        data = get_1day_data(soup, d)
        result.append(data)
    return result
    
c = utils.get_coder()
log = utils.get_logger()


while True:
    try:
        code = c.next()[0]
        if code == '600003':break
    except KeyError:
        break
    else:
        url = "http://biz.finance.sina.com.cn/company/history.php?symbol=sh%s"%code
        s = utils.get_soup(url)
        
        d1 = datetime.date(2007,6,29).isoformat()
        d2 = datetime.date(2007,6,28).isoformat()
        try:
            rs = get_ndays_data(s, [d1,d2])
        except ex_nothing_found:
            tp_codes.append(code)
        else:
            print rs
            time.sleep(0.2)
            
s = ','.join(tp_codes)
log.info('今天没有获得数据的股票: %s'%s)
