import requests #请求模块
import datetime #时间日期模块
import lxml
from bs4 import BeautifulSoup #数据解析提取模块，需要提前使用pip下载”
import json
import pandas as pd

#获取当天的日期,并进行格式化,用于后面文件命名，格式:20200420
today = datetime.date.today().strftime('%Y%m%d')
#创建爬取数据函数
def crawl_wiki_data():
    #创建请求头
    headers={
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36'
    }
    url='https://baike.baidu.com/item/%E9%9D%92%E6%98%A5%E6%9C%89%E4%BD%A0%E7%AC%AC%E4%B8%89%E5%AD%A3/51140980'
    try:
        response=requests.get(url,headers=headers)
        # 创建解析对象
        soup=BeautifulSoup(response.text,'lxml')
        # 查找表单标签
        div=soup.find_all("div",attrs={"data-uuid":"go12lpqgpn"})
        return div[0].find_all("table")
    except json.JSONDecodeError as e:
        print(e)

def parse_wiki_data(table):
    #把table转化为BeautifulSoup对象
    soup=BeautifulSoup(str(table),'lxml')
    #获取表格元素中的所有行
    all_trs=soup.find_all('tr')
    #存储参赛学员信息
    stars=[]
    for tr in all_trs[1:]:
        star={}
        all_tds=tr.find_all('td')
        #姓名
        star["name"]=all_tds[0].text
        #连接
        if not all_tds[0].find('a') is None:
            star["link"]= 'https://baike.baidu.com'+all_tds[0].find('a').get('href')
        else:
            star["link"]= 'https://baike.baidu.com'
        #籍贯
        star["zone"]=all_tds[1].text
        #身高
        star["height"]=all_tds[2].text
        #体重
        star["weight"]=all_tds[3].text
        #公司
        star["company"]=all_tds[4].text
        #添加
        stars.append(star)
    print('stars count:{0}'.format(len(stars)))
    #序列化编码
    with open(r".\\"+today+'.json', 'w', encoding="UTF-8") as file:
        #永久化
        json.dump(stars, file, ensure_ascii=False)

        
def output_mh_mw():
    player = pd.read_json(".\\"+today+'.json', encoding="UTF-8")
    # 通过二维表的列名读取信息'
    player_height = player["height"].str.slice(0, 3).astype(int)
    # 求平均身高
    average_height = player_height.mean()
    # 查询身高在180及以上人员数量
    player["height_int"] = player["height"].str.slice(0, 3).astype(int)
    # print(player.loc[player["height_int"]>=180,"name"])
    player["weight_float"] = player["weight"].str.slice(0, -1 - 1).astype(float)
    player_mw=player["weight_float"].mean()
    print('mean_weight is %f' % player_mw)
    player_mh=player["height_int"].mean()
    print('mean_height is %f' % player_mh)


if __name__=="__main__":
    table=crawl_wiki_data()
    parse_wiki_data(table=table)
    output_mh_mw()