import scrapy
import MySQLdb


class ValidateItem(object):
    def process_item(self, item, spider):
        for key in item.fields.keys():
            if key not in item:
                print('item dropped')
                raise scrapy.exceptions.DropItem('key %s is missing' % key)
        return item


class TextWriterPipeline:

    def open_spider(self, spider):
        self.file = open('items.txt', 'w')

    def close_spider(self, spider):
        self.file.close()

    def process_item(self, item, spider):
        for k in ['text', 'author', 'tags']:
            value = item[k]
            self.file.write('%s: %s\n' % (k, value))
        self.file.write('\n')
        return item


class MysqlPipeline:

    @classmethod
    def from_crawler(cls, crawler):
        user = crawler.settings.get('USER')
        password = crawler.settings.get('PASSWORD')
        database = crawler.settings.get('DATABASE')
        charset = crawler.settings.get('CHARSET')
        dbtable = crawler.settings.get('DBTABLE')
        return cls(user, password, database, charset, dbtable)

    def __init__(self, user, password, database, charset, dbtable):
        self.user = user
        self.password = password
        self.database = database
        self.charset = charset
        self.dbtable = dbtable

    def open_spider(self, spider):
        self.conn = MySQLdb.connect(user=self.user, password=self.password,
                                    database=self.database, charset=self.charset)
        self.cursor = self.conn.cursor()

    def close_spider(self, spider):
        self.conn.commit()
        self.conn.close()

    def process_item(self, item, spider):
        sql = 'insert into ' + self.dbtable + ' values (%s, %s, %s)'
        data = (item['text'], item['author'], item['tags'])
        self.cursor.execute(sql, args=data)
        return item
