import json
import os
import sqlite3
from os import path
from urllib.request import urlopen

from urllib import request


class DBPipeline(object):
    def __init__(self, sqlite_file, sqlite_table):
        self.sqlite_file = sqlite_file
        self.sqlite_table = sqlite_table
        self.insert_datas = []

    @classmethod
    def from_crawler(cls, crawler):
        return cls(
            sqlite_file=crawler.settings.get('SQLITE_FILE'),  # 从 settings.py 提取
            sqlite_table=crawler.settings.get('SQLITE_TABLE', 'face_youma')
        )

    def open_spider(self, spider):
        self.conn = sqlite3.connect(self.sqlite_file)
        self.cur = self.conn.cursor()

        self.cur.execute("delete from {0}".format(self.sqlite_table))
        self.conn.commit()

    def close_spider(self, spider):
        insert_sql = "insert into {0}({1}) values ({2})".format(self.sqlite_table,
                                                                ', '.join(['username', 'imgurl', 'videourl', 'buf']),
                                                                ', '.join(['?'] * 4))
        self.cur.executemany(insert_sql, self.insert_datas)
        self.conn.commit()

        self.conn.close()
        print('一共' + str(len(self.insert_datas)) + '行')

    def process_item(self, item, spider):
        # self.cur.execute(insert_sql, [item['username'], item['imgurl'], item['videourl']])
        # self.conn.commit()

        self.insert_datas.append(
            [item['username'], item['imgurl'], item['videourl'], item['buf']])
        return item

    def get_imgbase64str(self, url):
        req = request.Request(url)
        resp = urlopen(req)
        content = resp.read().decode('utf-8')
        if content:
            return content
        else:
            return ''
