# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html

import pymysql
from twisted.enterprise import adbapi
# from scrapy import log
import logging
log = logging.getLogger(__name__)

class BytravelPipeline(object):

    db = None
    cursor = None

    def open_spider(self, spider):
        log.info('---MySQL_CONNECTING---连接到mysql服务器...---MySQL_CONNECTING---')
        self.db = pymysql.connect(host='agno.top', port=3306, user='db_scrapy_test',
                             passwd='db_scrapy_test', db='db_scrapy_test', charset='utf8')
        log.info("---MySQL_CONNECTED---数据库已连接---MySQL_CONNECTED---")

    def get_cursor(self, db):
        return self.db.cursor()

    def close_spider(self, db):
        self.db.close()

    def process_item(self, item, spider):
        try:
            self.cursor = self.get_cursor(self.db)
            sql = "INSERT INTO tb_bytravel(name, url, commont, img, img_details, content, pro_name, pro_url, distinction) values(%s, %s, %s, %s, %s, %s, %s, %s, %s)"
            self.cursor.execute(sql, (item['name'], item['url'], item['commont'], item['img'], item['img_details'], item['content'], item['pro_name'], item['pro_url'], item['distinct']))
            self.db.commit()
        except Exception as e:
            log.error(e)
        return item