import time

from scrapy.dupefilters import BaseDupeFilter
from scrapy.utils.request import request_fingerprint

from squirrel_core.commons.scrapy_base import defaults
from squirrel_core.commons.utils.logger import Logging


logger = Logging()


class RFPDupeFilter(BaseDupeFilter):
    logger = logger

    def __init__(self, server, key, debug=False):
        self.server = server
        self.key = key
        self.debug = debug
        self.logdupes = True

    @classmethod
    def from_settings(cls, settings):
        key = defaults.DUPEFILTER_KEY % {'timestamp': int(time.time())}
        debug = settings.getbool('DUPEFILTER_DEBUG')
        return cls(None, key=key, debug=debug)

    @classmethod
    def from_crawler(cls, crawler):
        return cls.from_settings(crawler.settings)

    def delete_df(self, fingerprint):
        self.server.set_del(self.key, fingerprint)

    def request_seen(self, request, include_headers=None, s_urls=[]):
        fp = self.request_fingerprint(request, include_headers)
        added = self.server.set_add(self.key, fp)
        return not added

    def request_fingerprint(self, request, include_headers=None):
        return request_fingerprint(request, include_headers)

    def close(self, reason=''):
        self.clear()

    def clear(self):
        self.server.set_clear(self.key)

    def log(self, request, spider):
        if self.debug:
            msg = "Filtered duplicate request: %(request)s"
            self.logger.debug(msg, {'request': request}, extra={'spider': spider})
        elif self.logdupes:
            msg = ("Filtered duplicate request %(request)s"
                   " - no more duplicates will be shown"
                   " (see DUPEFILTER_DEBUG to show all duplicates)")
            self.logger.debug(msg, {'request': request}, extra={'spider': spider})
            self.logdupes = False


class MemoryWrapperRFPDupeFilter(RFPDupeFilter):
    logger = logger
    
    DUP_DATA_SIZE_BYTES = 32  # 32 bytes
    MAX_MEMORY_DUP_SIZE = 5 * 1024 * 1024  # 5M
    
    def __init__(self, server, key, debug=False):
        super(MemoryWrapperRFPDupeFilter, self).__init__(server, key, debug)
        self.memory_set = set()
        self.memory_set_full = False
        self.memory_set_counter = 0
        self.init_dupe_filter_set()

    def init_dupe_filter_set(self):
        try:
            keys = self.server.set_keys(self.key, limit=int(1*1024*1024 / 32))
            self.memory_set = set(keys)
            self.memory_set_counter = len(self.memory_set)
        except Exception as err:
            pass

    def delete_df(self, fingerprint):
        try:
            self.memory_set.remove(fingerprint.encode())
        except Exception as err:
            pass
        self.server.set_del(self.key, fingerprint)

    def request_seen(self, request, include_headers=None, s_urls=[]):
        if request.url in s_urls:
            return False
        fp = self.request_fingerprint(request, include_headers)
        memory_existed = fp.encode() in self.memory_set
        logger.info('Request:{} request_seen result {} from memory:{}'.format(
            request.url, fp, "existed" if memory_existed else "not existed"
        ))
        if memory_existed:
            return True
        self.add_memory_set(fp.encode())
        added = self.server.set_add(self.key, fp)
        logger.debug(f'Request:{request.url} request_seen result {fp} from server:{"existed" if not added else "not existed"}')
        existed = not added
        return existed

    def clear(self):
        self.memory_set.clear()
        self.memory_set_counter = 0
        self.server.set_clear(self.key)
    
    def add_memory_set(self, dup):
        if not self.memory_set_full:
            self.memory_set.add(dup)
            self.memory_set_counter += 1
            if self.memory_set_counter * self.DUP_DATA_SIZE_BYTES > self.MAX_MEMORY_DUP_SIZE:
                self.memory_set_full = True
