from urllib import request
from bs4 import  BeautifulSoup
import csv
import time
#需要爬的一些信息。
def get_message(bf):

    Teacher_xymc=bf.find("span",{"id":"Labelxymc"})
    Teacher_daima=bf.find("span",{"id":"Labeldsdm"})
    Teacher_name=bf.find("span",{"id":"Labeldsxm"})
    Teacher_sex=bf.find("span",{"id":"Labelxb"})
    Teacher_Title=bf.find("span",{"id":"Labelzc"})
    Teacher_Email=bf.find("span",{"id":"Labelemail"})
    Teacher_Academic_experience=bf.find("span",{"id":"Labelxsjl"})
    Teacher_Personal_profile=bf.find("span",{"id":"Labelgrjj"})

    if(Teacher_xymc.get_text()!="Label"):#说明不是空
        Teacher_list=[str(Teacher_xymc.get_text()),str(Teacher_daima.get_text()),str(Teacher_name.get_text())
                      ,str(Teacher_sex.get_text()),str(Teacher_Title.get_text()),
                      str(Teacher_Email.get_text()),str(Teacher_Academic_experience.get_text()),
                      str(Teacher_Personal_profile.get_text())
                      ]
    else:
        Teacher_list=[]
    return Teacher_list



csvFile = open("test1.csv", "w", newline='')##如果找不到，就会自动新建一个。。。
writer = csv.writer(csvFile)
#writer.writerow(['所在学院', '导师代码', '导师姓名', '导师性别','导师职称','导师邮箱','导师学术经历','导师个人简介'])
html = request.urlopen('http://222.197.183.99/TutorDetails.aspx?id=2')
bf = BeautifulSoup(html.read().decode("utf-8"), 'lxml')
list=get_message(bf)
print(len(list))
if(len(list)!=0):
    writer.writerow(list)
else:
    pass
print("导师信息读取完毕")
time.sleep(0.2)



csvFile.close()