# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import sqlite3

class ReadfundPipeline(object):

    def __init__(self,sqlite_file,sqlite_table):
        self.sqlite_file=sqlite_file
        self.sqlite_table=sqlite_table

    @classmethod
    def from_crawler(cls,crawler):
        return cls(
            sqlite_file=crawler.settings.get('SQLITE_FILE'),
            sqlite_table = crawler.settings.get('SQLITE_TABLE')
        )

    def open_spider(self,spider):
        self.con = sqlite3.connect(self.sqlite_file)
        self.cu = self.con.cursor()

    def process_item(self, item, spider):

        values=tuple(item.values())

        #print(item.keys())
        #print(item.values())

        insert_sql="insert into {0} ({1})  VALUES({2}) ".format(self.sqlite_table,
                                                                ', '.join(item.keys()),
                                                                ', '.join(['?']*len(item.keys())))
        self.cu.execute(insert_sql,values)
        self.con.commit()

        return item

    def close_spider(self,spider):
        self.con.close()