# -*- coding: utf-8 -*-
import pymysql


class MysqlPipelines(object):
    def __init__(self, cursor,conn):
        self.cursor = cursor
        self.conn =conn

    @classmethod
    def from_settings(cls, settings):
        host=settings['MYSQL_HOST']
        db=settings['MYSQL_NAME']
        user=settings['MYSQL_USER']
        passwd=settings['MYSQL_PWD']
        port=settings['MYSQL_PORT']

        conn = pymysql.connect(host=host, port=port, user=user, passwd=passwd, db=db, charset='utf8')
        cursor = conn.cursor(cursor=pymysql.cursors.DictCursor)
        return cls(cursor,conn)




    # pipeline默认调用
    def process_item(self, item, spider):
        data_value = [] #值列表
        data_key = [] #值列表
        replace = []

        for key in item:
            replace.append('%s')
            data_value.append(item[key])
            data_key.append(key)

        replace_key_str =','.join(replace)
        data_key_str = ','.join(data_key)
        sql = "insert into cmf_crawl ("+data_key_str+") values ("+replace_key_str+")"
        ret = self.cursor.execute(sql, tuple(data_value))
        self.conn.commit()
        # self.cursor.close()
        # self.conn.close()
        return item


    def close_spider(self,spider):
        if not self.conn.ping() is None:
            self.cursor.close()
            self.conn.close()