# -*- coding: utf-8 -*-
'''
把指定页面上的数据结构转换成SQL DDL
'''

import sys
import requests
from BeautifulSoup import BeautifulSoup

def parseContent(url):
    r=requests.request('GET',url)
    soup=BeautifulSoup(r.content,fromEncoding=r.encoding)
    tbName=soup.find('div',{'class':'title-wrap'}).h1.text.strip()
    tbComment=soup.find('p',{'class':'introduction'}).text.strip()
    dl=list()
    for tr in soup.find('table',{'class':'api-table'}).tbody.findAll('tr'):
        dl.append([t.text for t in tr.findAll('td')])
    return tbName,tbComment,dl

def parseDDL(content):
    ddl="USE TOPAPI;\nDROP TABLE IF EXISTS %s;\n"%content[0].upper()
    ddl=ddl+"CREATE TABLE `%s` (\n"%content[0].upper()
    for tr in content[2]:
        if tr[1]=='Number':type='int'
        elif tr[1]=='String':type='varchar(100)'
        elif tr[1]=='Date':type='datetime'
        elif tr[1]=='Boolean':type='tinyint(1)'
        else:type='varchar(20)'
        ddl=ddl+"`%s` %s COMMENT '%s',\n"%(tr[0],type,tr[4])
    ddl=ddl+"`population_tsmp` timestamp\n"
    #ddl=ddl[:ddl.rindex(',')]+'\n'
    ddl=ddl+")ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='%s';"%content[1]
    fs=''
    for i,li in enumerate(content[2]):
        fs=fs+li[0]+','
        if (i-4.0)%5==0:fs=fs+'\\\n'
    fs="/*\n'%s'\n*/\n"%fs[:-1]
    return (fs+ddl).encode('utf-8')

if __name__=='__main__':
    url=sys.argv[1]
    content=parseContent(url)
    print parseDDL(content)
