# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html

import pymysql
from urllib.parse import unquote
import redis

class FoodspiderPipeline(object):
    def __init__(self):
        # self.connect = pymysql.connect(host='localhost', user='root', password='wu123456', db='foodinfo', port=3306)
        # self.cursor = self.connect.cursor()
        self.conn = redis.Redis(host="127.0.0.1", port=6379)

    def __del__(self):
        # self.cursor.close()
        # self.connect.close()
        pass

    def process_item(self, item, spider):
        # item["content"] = str(pymysql.escape_string(item["content"]))
        # item["url"] = unquote(item["url"], "utf-8")
        # print(item["url"])
        # sql = 'insert into food_info_source (url, content, datetime) values ("%s", "%s", now());' % (item["url"], item["content"])
        # self.cursor.execute(sql)
        # self.connect.commit()
        print(item['name'])
        self.conn.sadd("foodname:list", item['name'])
        return item

    # def close_spider(self, spider):
    #     self.cursor.close()
    #     self.connect.close()
