# encoding: utf-8
"""
@author: 夏洛
@QQ: 1972386194
@file: 03-去重.py
"""
'''
redis 去重 集合类型
布隆算法

2种方式
    URL去重   针对采集的地址
    data去重  针对某一个数据
    
    redis  默认会进行encode 编码  获取数据要解码 decode
    
'''
import requests
from base_request import Spiders
from lxml import etree
from loguru import logger
import time
import redis
client = redis.Redis()
import hashlib

class Crawl(Spiders):

    def __init__(self):
        self.url = 'https://36kr.com/information/web_news/latest/'
        self.maps = lambda x:x[0] if x else x

    def ma5_data(self,content):
        m = hashlib.md5()
        m.update(content.encode())
        return m.hexdigest()

    def crawl(self):
        res = self.fetch(self.url)
        html = etree.HTML(res.text)
        obj = html.xpath('//div[@class="information-flow-list"]/div')
        for i in obj:
            title = self.maps(i.xpath('.//p[@class="title-wrapper ellipsis-2"]/a/text()'))
            xxx = self.ma5_data(title) # 对数据进行压缩  减少内存损耗
            tag = client.sadd('xialuo3',xxx) # 返回值 0 1
            if tag:
                # 表示数据可以继续爬  对数据进行入库   logger = print
                logger.info('可以入库{}'.format(title))
                self.save(title)
            else:
                time.sleep(5)
                logger.info('休息5秒钟')
                self.crawl()

    def save(self,data):
        with open('data.txt','a',encoding='utf-8') as x:
            x.write(data)
            x.write('\r\n')

    def run(self):
        while True:
            logger.info('开始启动爬虫')
            self.crawl()

if __name__ == '__main__':
    Crawl().run()


