# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
'''
mysql连接,通过pipeline
'''
import pymysql.cursors


class ScrapylPipeline(object):
    def process_item(self, item, spider):
        return item

    #初始化mysql
    # def __init__(self):
    #     # 连接数据库
    #     self.connect = pymysql.connect(
    #         host='127.0.0.1',  # 数据库地址
    #         port=3306,  # 数据库端口
    #         db='scrapyMysql',  # 数据库名
    #         user='root',  # 数据库用户名
    #         passwd='root',  # 数据库密码
    #         charset='utf8',  # 编码方式
    #         use_unicode=True)
    # 
    #     # 通过cursor执行增删查改
    #     self.cursor = self.connect.cursor();
