# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy.utils.project import get_project_settings
from twisted.enterprise import adbapi
from scrapy import log
import MySQLdb
import MySQLdb.cursors
import time

class GetProxyPipeline(object):
    def __init__(self):
        # 读取settings中的配置
        settings = get_project_settings()

        params = dict(
            host = settings.get('MYSQL_HOST'),
            db = settings.get('MYSQL_DB'),
            user = settings.get('MYSQL_USER'),
            passwd = settings.get('MYSQL_PWD'),
            charset = 'utf8',#编码要加上，否则可能出现中文乱码问题
            cursorclass = MySQLdb.cursors.DictCursor,
            use_unicode = False,
        )
        self.dbpool = adbapi.ConnectionPool('MySQLdb', **params)  # **表示将字典扩展为关键字参数,相当于host=xxx,db=yyy....

    def handle_error(self, e):
        log.err(e)

    def process_proxy_item(self, tx, item):
        tx.execute("select id from proxy_list where ip = %s", [item['ip']])
        result = tx.fetchone()
        if not result:
            tx.execute("insert into proxy_list (ip, port, type, location, protocol, source, good) values (%s,%s,%s,%s,%s,%s,%s)",\
                (item['ip'], item['port'], item['type'], item['location'], item['protocol'], item['source'], item['good']))
        else:
            print "proxy exists"

    def process_item(self, item, spider):
        query = self.dbpool.runInteraction(self.process_proxy_item, item)
        query.addErrback(self.handle_error)
        return item