import requests
import pymysql
import time
import log_helper
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

db = pymysql.connect(host='82.157.127.245',
                     user='root',
                     port=3306,
                     password='ydsungan0406',
                     database='weibo',
                     charset='utf8mb4')
cursor = db.cursor()
sql = "insert into mblog(user_name, user_id, mblog_id, text, pic_urls, video_url, date) values (%s,%s,%s,%s,%s,%s,%s);"


request_headers = {
    "accept": "application/json, text/plain, */*",
    "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.61 Safari/537.36",
    "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
    "referer": "https://weibo.com/u/7203149652/home?wvr=5",
    "connection": "close",
    "cookie": "SINAGLOBAL=3408463932239.498.1635578620037; UOR=,,cn.bing.com; SUBP=0033WrSXqPxfM725Ws9jqgMF55529P9D9WWMpsjhFWGvDNhUl8gSQh155JpX5KMhUgL.Fo-RShqfSK2peoM2dJLoIpeLxKML1--LB-80IsHV9c8790e0; ALF=1667291636; SSOLoginState=1635755659; SCF=Au3USWtE9DqSnmw3ffXUwx6DEvgbEZBgqMOGTtu_hrZkWjtWiDMLBH0BLO9DhHqLHVDU-eKJlOrKQtlKe5w7fCA.; SUB=_2A25Me9baDeRhGeNG71QU9S_NyTuIHXVv8U8SrDV8PUNbmtAKLXHXkW9NS0WHTC0bEVb358oARd4bSZX8RhJVpG3w; XSRF-TOKEN=yw4Jl8rxjr_7BtfdpW-z0awv; _s_tentry=weibo.com; Apache=4082797372840.7563.1635755715428; ULV=1635755715472:5:1:3:4082797372840.7563.1635755715428:1635687862369; WBPSESS=Dt2hbAUaXfkVprjyrAZT_Iwy3zlb3lXqwlPewyNU_kwAUtHQo5rkdRJiof8QDzo6PWGB-o-pEveUG7xp4dtVIVh0UjPZO3y7KMo-96wu4cWul-4P-UO1BdE4mc-t4Ud_VFx0hXynFowEyW726LkH3E_9K_9zFuKICGTNilVB3xpsQhmLTsMTaBL4660y11eBO_du9F_LHnFv6m6i9b8k9g=="

}
#hotSearch:get

hot_search_url = "https://weibo.com/ajax/side/hotSearch"

hot_search_resp = requests.session().get(hot_search_url, headers=request_headers, verify=False)
realtime_list = hot_search_resp.json().get('data').get('realtime')

search_url = "https://weibo.com/ajax/search/all"

time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
blog_cards = []
for realtime in realtime_list:
    #try:
    event = {
        "word": realtime.get("word"),
        "category": realtime.get("category"),
        "rank": realtime.get("rank") + 1,
        "time": time_str,
        "num": realtime.get("num"),
        "emoticon": realtime.get("emoticon"),
    }
    page = 1

    print("正在获取话题#{}#的相关微博".format(realtime.get("word")))
    while True:

        params = {
            "containerid": "100103type=1&q=#" + realtime.get("word") + "#&t=3",
            "page": page,
            "count": 20
        }
        status = -1

        try:
            search_resp = requests.session().get(search_url, headers=request_headers, params=params, verify=False)
            status = search_resp.json().get("ok")
        except Exception as e:
            log_helper.error("话题#{}#的第{}页获取失败".format(realtime.get("word"), page))
            continue


        if status < 0:
            log_helper.error("cookies失效")
            break;
        if search_resp.json().get("data") is None:
            continue

        cards = search_resp.json().get("data").get("cards")

        if len(cards) == 0:
            log_helper.error("话题#{}#评论获取完成".format(realtime.get("word")))
            break;
        for card in cards:
            pic_list = []
            if card["card_type"] == 9:
                text_raw = card.get("mblog").get("text_raw")
                video_url = ""

                if card.get("mblog").get("url_struct") is not None:
                    long_url = card.get("mblog").get("url_struct")[0].get("long_url")
                    if long_url.find("video.weibo.com") != -1 :
                        video_url = long_url

                pic_infos = card.get("mblog").get("pic_infos", {})
                pic_ids = card.get("mblog").get("pic_ids", [])

                pic_urls = ""
                for pic_id in pic_ids:
                    if pic_infos.get(pic_id) is None:
                        continue
                    if pic_infos.get(pic_id).get("original") is None:
                        continue
                    pic_list.append(pic_infos.get(pic_id).get("original").get("url"))
                pic_urls = ",".join(pic_list)
                create_time = card.get("mblog").get("created_at")
            if card["card_type"] == 11:
                if card.get("card_group")[0].get("card_type") == 10:
                    continue
                if card.get("card_group")[0].get("card_type") == 9:

                    text_raw = card.get("card_group")[0].get("mblog").get("text_raw")

                    pic_infos = card.get("card_group")[0].get("mblog").get("pic_infos",{})
                    pic_ids = card.get("card_group")[0].get("mblog").get("pic_ids",[])
                    for pic_id in pic_ids:
                        if pic_infos.get(pic_id) is None:
                            continue
                        pic_list.append(pic_infos.get(pic_id).get("original").get("url"))
                    pic_urls = ",".join(pic_list)
                    video_url = ""
                    if card.get("card_group")[0].get("mblog").get("url_struct") is not None:
                        long_url = card.get("card_group")[0].get("mblog").get("url_struct")[0].get("long_url")
                        if long_url.find("video.weibo.com") != -1 :
                            video_url = long_url
                    create_time = card.get("card_group")[0].get("mblog").get("created_at")

            if card["card_type"] != 7:
                blog_cards.append((user_screen_name, uid, mblogid, text_raw, pic_urls, video_url, create_time))
        page += 1
    if len(blog_cards) != 0 :
        pass

log_helper.error("{} 完成一次爬虫".format(time_str))
cursor.close()
db.close()