#coding=utf-8
#从上交所网站获取A股代码和名称
from BeautifulSoup import BeautifulSoup 
import urllib

from pysqlite2 import dbapi2 as sqlite

create_sql = "create table codes(id INTEGER PRIMARY KEY,\
                                 code text not null,\
                                 name text not null)"
                                
insert_sql = "insert into codes (code, name) values(?,?)"

def createdb(con):
    c = con.cursor()
    c.execute(create_sql)
    c.close()
    con.commit()
    
def insert_code(con, data):
    c = con.cursor()
    
    for d in data:
        c.execute(insert_sql, d)
        
    c.close()
    con.commit()    
    
def parse_row(row):
    code = row.contents[1].contents[0].string
    name = row.contents[3].string 
    return [code, name]

def get_code(soup):
    result = []
    
    t = unicode('证券代码', 'u8')
    row = soup.find(text=t).parent.parent    
    while True:
        row =  row.nextSibling.nextSibling 
        if not row: break
        data = parse_row(row)
        result.append(data)
    return result

def next50(soup):
    nextp = unicode('下一页', 'u8')
    url = soup.find(text=nextp)
    if not url:return None
    
    url= url.parent['href']
    url = 'http://www.sse.com.cn' + url
    f = urllib.urlopen(url)
    soup = BeautifulSoup(f, fromEncoding='gb18030')    
    return soup


con = sqlite.connect('/home/jay/stock.db')
createdb(con)

url = 'http://www.sse.com.cn/sseportal/webapp/datapresent/SSEQueryStockInfoAct?reportName=BizCompStockInfoRpt&PRODUCTID=&PRODUCTJP=&PRODUCTNAME=&keyword=&tab_flg=&CURSOR=1'
f = urllib.urlopen(url)
soup = BeautifulSoup(f, fromEncoding='gb18030')

while True:
    if not soup:break
    data = get_code(soup)
    insert_code(con, data)
    soup = next50(soup)

