import requests
from bs4 import BeautifulSoup as bs
import xlwt 
from xlutils.copy import copy
import time
a=time.time()

def saveExcel(title, data):
    wbt = xlwt.Workbook()
    sheet = wbt.add_sheet('人员信息表')
    for i in range(len(title)):
        sheet.write(0, i, title[i])
    for m in range(len(data)):
        for n in range(len(data[m])):
            sheet.write(m+1, n, data[m][n])
    wbt.save('person.xls')

header = {
    "User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3514.0 Safari/537.36"
}
url_string = 'http://www.zuihaodaxue.cn/zuihaodaxuepaiming2018.html'
response = requests.get(url_string,headers=header)
response.encoding = 'utf-8'
html = response.text
soup = bs(html, 'html.parser')
catalg = []
for cat in soup.find_all('thead'):
    for i in cat.find_all('th'):
        if i.string != None:
            catalg.append(i.string)
        else:   
            for j in i.find_all('select'):
                for k in j.find_all('option'):
                    catalg.append(k.string)
data_big = []     
for item in soup.find_all('tbody'):
    for i in item.find_all('tr'):
        data_litte = []
        for j in i.find_all('td'):
            data_litte.append(j.string)
        data_big.append(data_litte)

saveExcel(catalg, data_big)
print(time.time()-a)

