import requests
import datetime
from pyquery import PyQuery
import os
from redis import StrictRedis
import json
import schedule
import time

# 数据对象
class Data(object):
    def __init__(self):
        self.title = ''
        self.url = ''
        self.hot = 0

# 保存url获得的网页，
def save_body(url):
    response = requests.get(url)
    body = response.content.decode('utf-8')
    path = os.path.join('bili', '{}.html'.format(datetime.date.today()))
    if not os.path.exists('bili'):
        os.mkdir('bili')
    with open(path, 'w', encoding='utf-8')as f:
        f.write(body)
    return body

# 将结果保存至列表
def get_data_list(body):
    hot_list = PyQuery(body)('.rank-list')('.rank-item')
    data_list = []
    for e in hot_list:
        data = Data()
        data.title = PyQuery(e)('.info')('.title').text()
        data.url = 'https:' + PyQuery(e)('.title').attr('href')
        for i in PyQuery(e)('.detail')('.data-box'):
            data.hot = PyQuery(i).text()
            break
        data_list.append(data)
    log('示例：第一条：', data_list[0].__dict__)
    return data_list

# 将数据保存至redis
def save_redis(redis, data_list):
    redis.delete('bili_hot')
    for x in data_list:
        s = json.dumps(x.__dict__, ensure_ascii=False)
        redis.rpush('bili_hot', s)


def log(*args, **kwargs):
    format = '%B %d %Y %H:%M:%S'
    value = time.localtime(int(time.time()))
    dt = time.strftime(format, value)
    with open('bili_log.txt', 'a', encoding='utf-8') as f:
        print(dt, *args, file=f, **kwargs)


def run():
    body = save_body('https://www.bilibili.com/ranking?spm_id_from=333.334.b_62616e6e65725f6c696e6b.1')
    data_list = get_data_list(body)

    redis = StrictRedis(host='localhost', port=6379, decode_responses=True)
    save_redis(redis, data_list)

    log('B站热榜已经存入redis，List的key是bili_hot')


if __name__ == '__main__':
    # 定时函数，每天7点更新
    schedule.every().day.at("07:00").do(run)
    while True:
        schedule.run_pending()
