# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter

from scrapy_qimanwu.items import ScrapyQimanwuItem, ScrapyComicItem
import urllib.request
import os

# 加载settings文件
from scrapy.utils.project import get_project_settings


class ScrapyQimanwuPipeline:
    def process_item(self, item, spider):
        if isinstance(item, ScrapyQimanwuItem):
            settings = get_project_settings()
            local_path = settings['LOCAL_PATH']
            name = item['name']
            img_list = item['imgs']
            i = 1
            for img in img_list:
                path = local_path + '/' + name + '/' + item['chaptername']
                # 判断文件夹是否存在，不存在就创建
                folder = os.path.exists(path)
                if not folder:
                    os.makedirs(path)
                filename = path + '/' + str(i) + '.jpg'
                # 判断这张图片是否下载，未下载才下载
                is_load = os.path.exists(filename)
                if not is_load:
                    urllib.request.urlretrieve(img, filename)
                i += 1
        return item


import pymysql


class MysqlPipeline:
    def open_spider(self, spider):
        settings = get_project_settings()
        self.host = settings['DB_HOST']
        self.port = settings['DB_PORT']
        self.user = settings['DB_USER']
        self.password = settings['DB_PASSWORD']
        self.name = settings['DB_NAME']
        self.charset = settings['DB_CHARSET']
        self.connect()

    def connect(self):
        self.conn = pymysql.connect(host=self.host,
                                    port=self.port,
                                    user=self.user,
                                    password=self.password,
                                    db=self.name,
                                    charset=self.charset)

        self.cursor = self.conn.cursor()

    def process_item(self, item, spider):
        if isinstance(item, ScrapyQimanwuItem):
            name = item['name']
            select = 'select * from comics where name = "{}" ;'.format(name)
            self.cursor.execute(select)
            results = self.cursor.fetchall()
            global comics_id
            for result in results:
                comics_id = result[0]

            select_chapter = 'select * from chapter where chapterid = "{}"'.format(item['chapterid'])
            self.cursor.execute(select_chapter)
            chapter_results = self.cursor.fetchall()
            # 判断这一话是否已经写入
            if len(chapter_results) > 0:
                return item
            sql = 'insert into chapter(chapterid,chaptername,href,xh,comics_id) values("{}","{}","{}","{}","{}")'.format(
                item['chapterid'],
                item['chaptername'],
                item['href'],
                item['xh'],
                comics_id)
            # 执行sql语句
            self.cursor.execute(sql)
            chapter_id = self.conn.insert_id()

            # 提交
            self.conn.commit()

            img_list = item['imgs']
            name = item['name']
            i = 1
            for img in img_list:
                localpath = '/' + name + '/' + item['chaptername'] + '/' + str(i) + '.jpg'
                sql2 = 'insert into imgs(src,localpath,chapter_id) values("{}","{}","{}")'.format(img,
                                                                                                  localpath,
                                                                                                  chapter_id)
                self.cursor.execute(sql2)
                self.conn.commit()
                i += 1
        return item

    def close_spider(self, spider):
        self.cursor.close()
        self.conn.close()


# 处理书名的管道
class ComicPipeline:
    def open_spider(self, spider):
        settings = get_project_settings()
        self.host = settings['DB_HOST']
        self.port = settings['DB_PORT']
        self.user = settings['DB_USER']
        self.password = settings['DB_PASSWORD']
        self.name = settings['DB_NAME']
        self.charset = settings['DB_CHARSET']
        self.connect()

    def connect(self):
        self.conn = pymysql.connect(host=self.host,
                                    port=self.port,
                                    user=self.user,
                                    password=self.password,
                                    db=self.name,
                                    charset=self.charset)

        self.cursor = self.conn.cursor()

    def process_item(self, item, spider):
        if isinstance(item, ScrapyComicItem):
            comic_name = item['comic_name']
            select = 'select * from comics where name = "{}" ;'.format(comic_name)
            self.cursor.execute(select)
            results = self.cursor.fetchall()
            if len(results) == 0:
                sql = 'insert into comics(name) values("{}")'.format(comic_name)
                self.cursor.execute(sql)
                self.conn.commit()
        return item

    def close_spider(self, spider):
        self.cursor.close()
        self.conn.close()
