# -*- coding: utf-8 -*-

__author__ = 'bitfeng'
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html


import sys
import os
import datetime
import logging
from commonfun import renameFile, joinpath
from scrapy import log
from scrapy import signals
from db_connect import MySQLConnect, PostgreSQLConnect, MongoDBConnect

reload(sys)
sys.setdefaultencoding('utf-8')


# 格式转换
class FormatItemPipeline(object):
    def process_item(self, item, spider):
        for key in item.keys():
            if type(item[key]) == list:  # 将list转换为string
                if '' in item[key]:
                    item[key].remove('')
                else:
                    pass
                if len(item[key]) == 0:
                    item[key] = 'null'
                elif len(item[key]) == 1:
                    item[key] = item[key][0]
                else:
                    item[key] = '|'.join(item[key])
        return item


# MYSQL数据库，dbpool线程池，实现多线程插入数据
class DBSQLPipeline(object):

    @classmethod
    def from_crawler(cls, crawler):
        pip = cls(crawler.settings)
        # crawler.signals.connect(pip.close_spider, signals.spider_closed)
        return pip

    def __init__(self, settings):
        db = settings.get('DB_USE')['PIPELINE']
        if db == 'POSTGRESQL':
            self.dbpool = PostgreSQLConnect.createDbpool(settings.get('POSTGRESQL_URI'))
        else:
            self.dbpool = MySQLConnect.createDbpool(settings.get('MYSQL_URI'))
        self.base_path = settings.get('FILE_BASEPATH')

    # def close_spider(self, spider):
    #     self.dbpool.close()

    def process_item(self, item, spider):
        # run db query in thread pool
        if item.__class__.__name__ not in ['FileItem', 'MongoItem']:
            query = self.dbpool.runInteraction(self._item_insert, item)
            query.addErrback(self.handle_error)
        return item

    # item_insert根据item的变量名自适应插入数据库
    def _item_insert(self, tx, item):
    #     # create record if doesn't exist.
    #     # all this block run on it's own thread
    #     itemclassname = item.__class__.__name__.lower()
    #     unique_sql = "select %s from " % item['_id']+itemclassname+" where %s = '%s'" % (item['_id'], item[item['_id']])
    #     tx.execute(unique_sql)
    #     result = tx.fetchone()
    #     if result:
    #         logging.log(logging.DEBUG, "Item already stored in sqldb: %s" % item[item['_id']])
    #     else:
    #         itemdict = dict(item)
    #         # 数据入库之前,需要将_id主键标记去掉, _id是标记数据中哪个是主键
    #         itemdict.pop('_id')
    #         itemkeys = itemdict.keys()
    #
    #         sqli = 'insert into '+itemclassname+'('+', '.join(itemkeys)+') values('+','.join(['%s']*len(itemkeys))+')'
    #         tmp = [itemdict.setdefault(key, None) for key in itemkeys]
    #         tx.execute(sqli, tmp)
    #
    # def handle_error(self, e):
    #     log.err(e)
        # create record if doesn't exist.
        # all this block run on it's own thread

        itemclassname = item.__class__.__name__.lower()
        itemdict = dict(item)
        itemkeys = itemdict.keys()

        sqli = 'insert into '+itemclassname+'('+', '.join(itemkeys)+') values('+','.join(['%s']*len(itemkeys))+')'
        # tx.execute("select url from "+itemclassname+" where url = %s", (item['url'],))
        # result = tx.fetchone()
        # if result:
        #     logging.log(logging.DEBUG, "Item already stored in db: %s" % item['url'])
        # else:
        itemdict = dict(item)
        tmp = [itemdict.setdefault(key, None) for key in itemkeys]
        tx.execute(sqli, tmp)

        # logging.log(logging.DEBUG, "Item stored in db: %s" % item['url'])

    def handle_error(self, e):
        log.err(e)


class MongoDBPipeline(object):

    @classmethod
    def from_crawler(cls, crawler):
        pip = cls(crawler.settings)
        crawler.signals.connect(pip.close_spider, signals.spider_closed)
        return pip

    def __init__(self, settings):
        self.conn = MongoDBConnect.createConn(settings.get('MONGODB_URI'))
        self.dbname = settings.get('MONGODB_URI')['database']

    def close_spider(self, spider):
        self.conn.close()

    def process_item(self, item, spider):
        if item.__class__.__name__ == 'MongoItem':
            coll = getattr(spider, 'collection', None)
            if not coll:
                coll = spider.name
            content = dict(item['value'])
            update_result = self.conn[self.dbname][coll].update_one(
                filter={'_id': item['id']},
                update={'$set': {'meta': item['meta'], item['key']: content}},
                upsert=True)
            if update_result.matched_count > 0:
                logging.log(logging.DEBUG, "Item already stored in mongodb: %s" % item['id'])

        return item


class FileStoragePipeline(object):

    @classmethod
    def from_crawler(cls, crawler):
        pip = cls(crawler.settings)
        return pip

    def __init__(self, settings):
        self.base_path = settings.get('FILE_BASEPATH')

    def process_item(self, item, spider):

        if item.__class__.__name__ == 'FileItem':
            base_path = getattr(spider, 'base_path', self.base_path)
            pathfile = joinpath([base_path, item['relativePath'], renameFile(item['filename'])+'.'+item['suffix']])
            if os.path.isfile(pathfile):
                logging.log(logging.INFO, '[Warning] The file [%s] exists')
            else:
                dirname = os.path.dirname(pathfile)
                if not os.path.isdir(dirname):
                    os.makedirs(dirname)
                f = open(pathfile, 'wb')
                f.write(item['content'])
                f.close()
        return item


# class  qiyegongshi(object):
#
#     @classmethod
#     def from_crawler(cls, crawler):
#         pip = cls(crawler.settings)
#         return pip
#
#     def __init__(self, settings):
#         self.base_path = settings.get('FILE_BASEPATH')
#
#     def process_item(self, item, spider):
#
#         if item.__class__.__name__ == 'QiYeGongShi':
#             file_abspath = joinpath(self.base_path, item['company_name'])
#             # pathfile = joinpath([base_path, item['relativePath'], renameFile(item['filename'])+'.'+item['suffix']])
#             if os.path.isfile(file_abspath):
#                 logging.log(logging.INFO, '[Warning] The file [%s] exists')
#             # else:
#             # dirname = os.path.dirname(file_abspath)
#             # if not os.path.isdir(dirname):
#             #     os.makedirs(dirname)
#             f = open(file_abspath, 'ab')
#             f.write(item['content'])
#             f.close()
#         return item


