# -*- coding: utf-8 -*-

# Define your item pipelines here

import csv
import codecs
from string import Template
import MySQLdb
import MySQLdb.cursors
from twisted.enterprise import adbapi
from scrapy import log
from scrapy import signals
from scrapy.exporters import XmlItemExporter, CsvItemExporter
from scrapy.exceptions import DropItem
from car58.items import EasySpiderItem


# Duplicates filter
class DuplicatesPipeline(object):

    def __init__(self):
        self.ids_seen = set()

    def process_item(self, item, spider):
        if item['id'] in self.ids_seen:
            raise DropItem("Duplicate item found: %s" % item)
        else:
            self.ids_seen.add(item['id'])
            return item


class ContactPipeline(object):

    fields_to_export = ['name', 'mobile', 'address']

    def __init__(self):
        self.files = {}

    @classmethod
    def from_crawler(cls, crawler):
        pipeline = cls()
        crawler.signals.connect(pipeline.spider_opened, signals.spider_opened)
        crawler.signals.connect(pipeline.spider_closed, signals.spider_closed)
        return pipeline

    def spider_opened(self, spider):
        file = open('%s_items.csv' % spider.name, 'wb')
        file.write(codecs.BOM_UTF8)
        self.files[spider] = file
        self.writer = csv.writer(file)
        self.writer.writerow(ContactPipeline.fields_to_export)

    def process_item(self, item, spider):
        self.writer.writerow([item[field] for field in ContactPipeline.fields_to_export])
        return item

    def spider_closed(self, spider):
        self.files.pop(spider).close()


class XmlExportPipeline(object):

    def __init__(self):
        self.files = {}

    @classmethod
    def from_crawler(cls, crawler):
        pipeline = cls()
        crawler.signals.connect(pipeline.spider_opened, signals.spider_opened)
        crawler.signals.connect(pipeline.spider_closed, signals.spider_closed)
        return pipeline

    def spider_opened(self, spider):
        file = open('%s_products.xml' % spider.name, 'w+b')
        self.files[spider] = file
        self.exporter = XmlItemExporter(file)
        self.exporter.start_exporting()

    def spider_closed(self, spider):
        self.exporter.finish_exporting()
        file = self.files.pop(spider)
        file.close()

    def process_item(self, item, spider):
        self.exporter.export_item(item)
        return item


class CsvExportPipeline(object):

    fields_to_export = ['name', 'mobile', 'address']

    def __init__(self):
        self.files = {}

    @classmethod
    def from_crawler(cls, crawler):
        pipeline = cls()
        crawler.signals.connect(pipeline.spider_opened, signals.spider_opened)
        crawler.signals.connect(pipeline.spider_closed, signals.spider_closed)
        return pipeline

    def spider_opened(self, spider):
        file = open('%s_items.csv' % spider.name, 'wb')
        file.write(codecs.BOM_UTF8)
        self.files[spider] = file
        self.exporter = CsvItemExporter(file, include_headers_line=True, join_multivalued=',')
        self.exporter.fields_to_export = CsvExportPipeline.fields_to_export
        self.exporter.start_exporting()

    def spider_closed(self, spider):
        self.exporter.finish_exporting()
        file = self.files.pop(spider)
        file.close()

    def process_item(self, item, spider):
        self.exporter.export_item(item)
        return item


SQL_CREATE_TABLE = Template("CREATE TABLE IF NOT EXISTS $table_name(id int auto_increment primary key,$fields)")
SQL_INSERT = Template("INSERT INTO $table_name($fields) VALUES($values)")


class MySQLPipeline(object):

    fields = EasySpiderItem.fields

    def __init__(self, host, port, db, user, pwd, spider_name, item_order):
        # adbapi:DB-API 2.0 API的非阻塞接口,可以访问各种关系数据库
        self.dbpool = adbapi.ConnectionPool('MySQLdb',
                                            host=host,
                                            port=port,
                                            db=db,
                                            user=user,
                                            passwd=pwd,
                                            cursorclass=MySQLdb.cursors.DictCursor,
                                            charset='utf8',
                                            use_unicode=True
                                            )
        self.spider_name = spider_name
        self.fields_to_export = item_order

    @classmethod
    def from_crawler(cls, crawler):
        pipeline = cls(
            host=crawler.settings.get('MYSQL_HOST', 'localhost'),
            port=crawler.settings.getint('MYSQL_PORT', 3306),
            db=crawler.settings.get('MYSQL_DB', 'easyspider'),
            user=crawler.settings.get('MYSQL_USER', 'root'),
            pwd=crawler.settings.get('MYSQL_PWD', '33535'),
            spider_name=crawler.settings.get('SPIDER_NAME'),
            item_order=crawler.settings.getlist('ITEM_ORDER'),
        )
        return pipeline

    def open_spider(self, spider):
        query = self.dbpool.runInteraction(self._create_table)
        query.addErrback(self.handle_error)

    def close_spider(self, spider):
        pass

    def process_item(self, item, spider):
        # run db query in thread pool
        query = self.dbpool.runInteraction(self._insert, item)
        query.addErrback(self.handle_error)
        return item

    def _create_table(self, tx):
        fields = ','.join([f+' varchar(256)' for f in self.fields_to_export])
        sql = SQL_CREATE_TABLE.substitute(table_name=self.spider_name, fields=fields)
        tx.execute(sql)

    def _insert(self, tx, item):
        sql = SQL_INSERT.substitute(table_name=self.spider_name,
                                    fields=','.join(self.fields_to_export),
                                    values=','.join(['\"%s\"' % item[f] for f in self.fields_to_export])
                                    )
        tx.execute(sql)

    def handle_error(self, e):
        log.err(e)


import datetime
import pymongo


class MongoPipeline(object):

    def __init__(self, mongo_uri, mongo_db, mongo_collection):
        self.mongo_uri = mongo_uri
        self.mongo_db = mongo_db
        self.collection_name = mongo_collection

    @classmethod
    def from_crawler(cls, crawler):
        return cls(
            mongo_uri=crawler.settings.get('MONGO_URI'),
            mongo_db=crawler.settings.get('MONGO_DB', 'easyspider'),
            mongo_collection=crawler.settings.get('MONGO_COLLECTION')
        )

    def open_spider(self, spider):
        self.client = pymongo.MongoClient(self.mongo_uri)
        self.db = self.client[self.mongo_db]
        self.collection = self.db[self.collection_name]

    def close_spider(self, spider):
        self.client.close()

    def process_item(self, item, spider):
        item = dict(item)
        item['ts'] = str(datetime.datetime.now())
        self.collection.insert(item, continue_on_error=True)
        return item

