# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import json

# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
from pymysql import cursors
import pymysql
from twisted.internet.defer import gatherResults
from scrapy.pipelines.files import FilesPipeline
from twisted.enterprise import adbapi
from scrapy import settings
import redis
from datetime import datetime
import os
GLOBAL_HOST = os.getenv('GLOBAL_HOST', 'localhost')

class ScrapyServicePipeline:

    def __init__(self):
        dbparams = {
            'host': GLOBAL_HOST,
            'port': 3306,
            'user': 'alvin',
            'password': '123456',
            'database': 'auto_scrapy',
            'charset': 'utf8mb4',
            'cursorclass': cursors.DictCursor  # 指定cursor的类
        }
        self.db_pool = adbapi.ConnectionPool('pymysql', **dbparams)
        # self.redis_conn = redis.StrictRedis(
        #     host= settings.get()
        # )
        self.keyword = None
        self.poster_id = None

    def process_item(self, item, spider):
        # if spider.name == 'Id':
        #     data = json.loads(item)
        #
        spider.logger.info('Processing item through pipeline')

        items = item['data']
        # spider.logger.info(f'Items to be processed: {items}')
        # print(items)
        # 操作数据，将数据写入数据库
        # 如果是同步写入的话，使用的是cursor.execute(),commit()
        # 异步存储的方式：函数方式pool.map(self.insert_db,[1,2])
        if spider.name == 'Id':
            query = self.db_pool.runInteraction(self.insert_note, items,spider)
        elif spider.name == 'note_by_keyword':
            if self.keyword is None:
                self.keyword = spider.keyword

            # query_note = self.db_pool.runInteraction(self.insert_note, items, spider)
            # query_keyword_note = self.db_pool.runInteraction(self.insert_keyword_note, items, spider)
            insert_note_deferred = self.db_pool.runInteraction(
                self.insert_note, items, spider
            )

            # 当insert_note操作完成时，再执行insert_keyword_note操作
            def on_insert_note_done(ignored):
                # insert_note成功完成后，执行insert_keyword_note
                spider.logger.info("insert into table keyword_notes successfully")
                return self.db_pool.runInteraction(
                    self.insert_keyword_note, items, spider
                )

            # 链接回调函数
            insert_keyword_note_deferred =insert_note_deferred.addCallback(on_insert_note_done)

            # 使用gatherResults来等待所有的Deferred对象完成
            # 这将保证insert_keyword_note在insert_note之后执行
            # 使用 gatherResults 来等待所有的 Deferred 对象完成
            query = gatherResults([insert_note_deferred, insert_keyword_note_deferred])
        elif spider.name == 'note_by_poster_id':
            if self.poster_id == None:
                self.poster_id = spider.poster_id


            insert_note_deferred = self.db_pool.runInteraction(self.insert_note, items,spider)

            # 当insert_note操作完成时，再执行insert_poster_id_note操作
            def on_insert_note_done(ignored):
                # insert_note成功完成后，执行insert_keyword_note
                spider.logger.info("insert into table publisher_notes successfully")
                return self.db_pool.runInteraction(
                    self.insert_publisher_note, items, spider
                )
            # 链接回调函数
            insert_publisher_note_deferred = insert_note_deferred.addCallback(on_insert_note_done)
            query = gatherResults([insert_note_deferred, insert_publisher_note_deferred])

        # for q in query:
        #     q.addErrback(self.handle_error, item, spider)
        query.addErrback(self.handle_error, item, spider)
        return items

    def insert_publisher_note(self, cursor, items, spider):
        sql = "INSERT INTO publisher_notes (publisher_id, publisher_note_id) VALUES (%s, %s)"
        for item in items:
            values = (
                self.poster_id,
                item['note_id']
            )
            cursor.execute(sql, values)

    def insert_note(self, cursor, items,spider):
        for item in items:
            values = (
                item['note_id'],
                item['note_title'],
                json.loads(json.dumps(item['note_content'],ensure_ascii=False)),
                json.dumps(item['img_url_list']),
                item['video_url'],
                item['duration'],
                item['collected_count'],
                item['comment_count'],
                item['liked_count'],
                item['share_count'],
                json.dumps(item['tag_list'],ensure_ascii=False),
                item['ip_location'],
                item['user_id'],
                item['avatar'],
                item['nickname'],
                item['formatted_last_update_time']
            )
            sql = 'INSERT INTO notes(note_id, note_title, note_content, img_url_list, video_url, duration, collected_count, comment_count, liked_count, share_count, tag_list, ip_location, user_id, avatar, nickname, formatted_last_update_time) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)'
            spider.logger.info(f'Inserting values: {values}')
            cursor.execute(sql, values)
            # print("Insert 成功了")
            spider.logger.info("Insert note successful")
    def insert_keyword_note(self,cursor,items,spider):
        sql = "INSERT INTO keyword_notes (keyword, keyword_note_id) VALUES (%s, %s)"
        for item in items:
            values = (
                self.keyword,
                item['note_id']
            )
            cursor.execute(sql, values)

    def final_db_update(self, data,spider):
        def do_update(cursor):
            job_id = data['job_id']
            status = data['status']
            result = data['result']
            end_time = data['end_time']
            # 将result转换为JSON字符串
            result_json = json.dumps(result, ensure_ascii=False)
            # 这里假设你的任务记录表叫做`task_records`
            sql = '''
                     UPDATE task_records 
                     SET status = %s, end_time = %s, result = %s 
                     WHERE task_id = %s
                 '''
            cursor.execute(sql, (status, end_time, result_json, job_id))
            spider.logger.info(f'Updated task {job_id} status to {status}')

        self.db_pool.runInteraction(do_update)
    def close_spider(self,spider):
        if spider.name == 'note_by_keyword':
            # 完成时间
            now = datetime.now()
            # 格式化日期和时间
            formatted_now = now.strftime("%Y-%m-%d %H:%M:%S")
            # 将最终状态和job_id传递给pipeline的final_db_update方法
            final_data = {
                'job_id': spider.job_id,
                'status': 'finished',
                'result': spider.final_result,  # 或者其他需要传递的数据
                'end_time': formatted_now
            }
            self.final_db_update(final_data,spider)
        if spider.name == 'note_by_poster_id':
            # 完成时间
            now = datetime.now()
            # 格式化日期和时间
            formatted_now = now.strftime("%Y-%m-%d %H:%M:%S")
            # 将最终状态和job_id传递给pipeline的final_db_update方法
            final_data = {
                'job_id': spider.job_id,
                'status': 'finished',
                'result': spider.final_result,  # 或者其他需要传递的数据
                'end_time': formatted_now
            }
            self.final_db_update(final_data,spider)
        if spider.name == 'Id':
            # 完成时间
            now = datetime.now()
            # 格式化日期和时间
            formatted_now = now.strftime("%Y-%m-%d %H:%M:%S")
            # 将最终状态和job_id传递给pipeline的final_db_update方法
            final_data = {
                'job_id': spider.job_id,
                'status': 'finished',
                'result': spider.final_result,  # 或者其他需要传递的数据
                'end_time': formatted_now
            }
            self.final_db_update(final_data, spider)
    def handle_error(self, error, item, spider):
        spider.logger.error('=' * 10 + "error" + '=' * 10)
        spider.logger.error(error)
        spider.logger.error('=' * 10 + "error" + '=' * 10)

