# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
from CrawlerProxy.DBHelper import *

class CrawlerProxyPipeline(object):
    def process_item(self, item, spider):
        dbHelper = DBHelper()
        # sql = 'SELECT id FROM proxies WHERE ip = %s, port= %s'
        # params = (item['ip'], item['port'])
        # results = dbHelper.select(sql, params)
        sql = 'INSERT INTO proxies (id, ip, port, type, position, speed, \
        last_check_time) \
         VALUES (%s, %s, %s, %s, %s, %s, %s)'
        # 执行单条
        params = (
            (0, str(item['ip']), str(item['port']), str(item['type']),
                str(item['position']), str(item['speed']),
                str(item['last_check_time'])),
        )
        results = dbHelper.insert(sql, params)
        print(results)

        return item
