import io
import sys
import requests
from lxml import etree
import pymysql

# 定义网址
start_url = 'https://s.weibo.com/top/summary?cate=realtimehot'
# 定义要插入数据的表名和数据库名
tablename='t_weibohot'
databasename='resoudata'
# 定义浏览器标识和相应cookie，F12+network获取
# 添加请求头
headers = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.109 Safari/537.36',
    'cookie': 'SUB=_2AkMW3o4rf8NxqwJRmPwdy2zibIVzzQDEieKggn_wJRMxHRl-yT9jqkgDtRB6PV6gxAuDDhArHdiZJkcVVzQ3QtjgwQzg; '
          'SUBP=0033WrSXqPxfM72-Ws9jqgMF55529P9D9W5lCVhHvhRjPP7ys9zgIjOO; Apache=2518694645431.9116.1635909919107; '
          'SINAGLOBAL=2518694645431.9116.1635909919107; _s_tentry=passport.weibo.com; '
          'ULV=1635909919120:1:1:1:2518694645431.9116.1635909919107:'
}

sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8')  # 改变标准输出的默认编码

# 保存到数据库
def saveDAO(db,number,topic,hot,href):
    cursor = db.cursor()
    hotsql = """insert into """ + tablename  + """ (ranking,topic,hot,url,daydate,mindate) values (%s,%s,%s,%s,curdate(),curtime())"""
    cursor.execute(hotsql, (number, topic, hot,href))
    cursor.close()
    db.commit()


# 进行requests请求
def get(url):
    response = requests.get(url, headers=headers)
    if response. status_code == 200:
        print("微博热搜请求成功")
        parse(url)
    else:
        print("网页请求失败了")

# 将微博热搜的数据给爬下来
def parse(url):
    response = requests.get(url, headers=headers)
    # 定义选择器
    selector = etree.HTML(response.text)
    # 获取微博热搜信息
    number = [each.text for each in selector.cssselect('#pl_top_realtimehot > table > tbody > tr > td.td-01.ranktop')] # 热搜排名
    topic = [each.text for each in selector.cssselect('#pl_top_realtimehot > table > tbody > tr > td.td-02 > a')]
    hot = [each.text for each in selector.cssselect('#pl_top_realtimehot > table > tbody > tr > td.td-02 > span')]
    href = [each.get("href") for each in selector.cssselect('#pl_top_realtimehot > table > tbody > tr > td.td-02 > a')]

    #  遇到爬取网页热点新闻的序号为"•"时，删除这条数据
    while "•" in number:
        exclude_number_index = number.index("•")
        number.pop(exclude_number_index)
        topic.pop(exclude_number_index + 1)
        hot.pop(exclude_number_index)
        href.pop(exclude_number_index + 1)

    # 数据清理，删除hot中的不相关字符
    for i in range(len(number)):
        hot[i] = ''.join(filter(lambda c: 47< ord(c) < 58, hot[i]))#ord返回字符ASCII值，定义匿名函数判定字符是否为数字，filter将序列中的每个字符传给匿名函数
        hot[i]= int("".join(hot[i].split()))
        href[i+1] = 'https://s.weibo.com' + href[i+1]
#         print(number[i], topic[i+1], hot[i], href[i+1])
    # 连接数据库
    db = pymysql.connect(host="localhost", user="root", password="root", database=databasename)
    # 因为id自动递增，让自动增长从1开始，已经有了就跳过，让id连续
    cursor = db.cursor()
    str = """ALTER TABLE """ +  tablename + """ AUTO_INCREMENT = 1"""
    cursor.execute(str)
    cursor.close()
    db.commit()
    # 保存并提交到数据库
    try:
        for i in range(len(number)):
            saveDAO(db, number[i], topic[i+1], hot[i], href[i+1])
    except BaseException as ex:
        print(ex)
    else:
        db.commit()
        db.close()
    response.close()
#程序从此处开始运行
if __name__ == "__main__":
    get(start_url)



