#coding=utf-8
import logging
import sys
from contextlib import closing

import MySQLdb
from scrapy.dupefilters import BaseDupeFilter

reload(sys)
sys.setdefaultencoding('utf-8')

default_host = 'localhost'
default_user = 'root'
default_passwd = 'root1234'
default_db = 'scrapydb'
default_port = 3306
default_charset = 'utf8'

class DbDupeFilter(BaseDupeFilter):
    @classmethod
    def from_settings(cls, settings):
        debug = settings.getbool('DUPEFILTER_DEBUG')
        table = settings["DUPEFILTER_TABLE"]
        return cls(table=table, debug=debug)

    def __init__(self, host=default_host, user=default_user, passwd=default_passwd, db=default_db, port=default_port, charset=default_charset, table=None, debug=False):
        self.host = host
        self.user = user
        self.passwd = passwd
        self.db = db
        self.port = port
        self.charset = charset
        self.table = table
        self.debug = debug
        self.logdupes = True
        self.new_url_set = set()
        self.logger = logging.getLogger(__name__)
        if host and user and passwd and db and port and table:
            self.seen_url_set = self.get_seen_request()

    def get_seen_request(self):
        self.logger.info("getting seen url list, fetching from %s..." % self.table)
        with closing(MySQLdb.connect(host=self.host, user=self.user, passwd=self.passwd, db=self.db, port=self.port,
                               charset=self.charset)) as conn:
            with closing(conn.cursor()) as cur:
                cur = conn.cursor()
                cur.execute("select url from %s" % self.table)
                query_result = cur.fetchall()

        seen_url_list = [c[0] for c in query_result]
        seen_url_set = set(seen_url_list)
        self.logger.info("there are %d seen url in the set..." % len(seen_url_set))
        return seen_url_set

    def request_seen(self, request):
        url = request.url
        have_seen = url in self.seen_url_set
        if have_seen:
            # if self.debug:
                # logging.debug("Filtered duplicate request: %s" % request.url)
            return True
        else:
            self.seen_url_set.add(url)
            self.new_url_set.add(url)
        if len(self.new_url_set) > 1:
            self.persist_new_url_set(self.new_url_set)

    def persist_new_url_set(self, url_set):

        sql_insert = "insert into %s (url, createDtTm) values " % self.table
        for url in url_set:
            sql_insert += "('%s', current_timestamp)," % url
        sql_insert = sql_insert.rstrip(",")

        try:
            with closing(MySQLdb.connect(host=self.host, user=self.user, passwd=self.passwd, db=self.db, port=self.port,
                                   charset=self.charset)) as conn:
                with closing(conn.cursor()) as cur:
                    cur.execute(sql_insert)
                    conn.commit()
        except MySQLdb.Error, e:
            self.logger.fatal("Mysql insert error %d: %s" % (e.args[0], e.args[1]))
            conn.rollback()
        # fp = self.request_fingerprint(request)
        # if fp in self.fingerprints:
        #     return True
        # self.fingerprints.add(fp)
        # if self.file:
        #     self.file.write(fp + os.linesep)

    def log(self, request, spider):
        if self.debug:
            msg = "Filtered duplicate request: %(request)s"
            self.logger.debug(msg, {'request': request}, extra={'spider': spider})
        elif self.logdupes:
            msg = ("Filtered duplicate request: %(request)s"
                   " - no more duplicates will be shown"
                   " (see DUPEFILTER_DEBUG to show all duplicates)")
            self.logger.debug(msg, {'request': request}, extra={'spider': spider})
            self.logdupes = False

        spider.crawler.stats.inc_value('dupefilter/filtered', spider=spider)