# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import logging
from datetime import date, time

import pymysql


class HttpproxyPipeline(object):

    def process_item(self, item, spider):
        conn = pymysql.connect(host='192.168.124.150', port=3306, user='root', password='root', db='blade', charset='utf8', cursorclass=pymysql.cursors.DictCursor)
        try:
            cursor = conn.cursor()
            cursor.execute("select count(IP) isExist from SPIDER_PROXY where IP = '{}' and PORT = {}".format(str(item['ip']), str(item['port'])))
            data = cursor.fetchone()
            exist_ = data['isExist']
            if 0 == exist_:
                cursor.execute("insert into SPIDER_PROXY (IP, PORT, ANONYMOUS, TYPE, ADDRESS, SPEED, VERIFICATION_TIME, CREATE_DATE, SOURCE, UPDATE_DATE) values ('{}', {}, '{}', '{}', '{}', '{}', '{}', sysdate(), '{}', sysdate())".format(str(item['ip']), int(item['port']), str(item['anonymous']), str(item['type']), str(item['address']), str(item['speed']), str(item['verification_time']).strip(), '快代理'))
            else:
                cursor.execute("update SPIDER_PROXY set ANONYMOUS = '{}', TYPE = '{}', ADDRESS = '{}', SPEED = '{}', VERIFICATION_TIME = '{}', SOURCE = '{}', UPDATE_DATE = sysdate()".format(str(item['anonymous']), str(item['type']), str(item['address']), str(item['speed']), str(item['verification_time']).strip(), '快代理'))
            conn.commit()
        finally:
            conn.close()
        return item

