# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
import logging
import traceback
from email.mime import image

from bs4 import BeautifulSoup, Comment
from tuiqiuspiderPro.utils.db_util import MysqlUtil
from tuiqiuspiderPro.utils.saveImge import SaveImg
from tuiqiuspiderPro.utils.base import delattrs


class TuiqiuspiderproPipeline:
    def process_item(self, item, spider):
        return item


class MySqlPoolPipeline(object):
    pool = None

    def __init__(self):
        pass

    def open_spider(self, spider):
        self.pool = MysqlUtil()

    def process_item(self, item, spider):
        """

        :type item: object
        """
        try:
            sql_select = """select news_id from news where title = %(new_title)s"""
            params_select = {'new_title': item['title']}
            flag = self.pool.get_one(sql_select, params_select)
            if flag:
                logging.info('The record already exists:{%s,title:%s,url:%s}', flag, item['title'], item['source_href'])
                return
            sql_insert = """
            insert into news(title,author,publish_time,content,tag,source,source_href)
            values(%(title)s,%(author)s,%(publish_time)s,%(content)s,%(tag)s,%(source)s,%(source_href)s)
             """
            params = {'title': item['title'],
                      'author': item['author'],
                      'publish_time': item['publish_time'],
                      # 'content': item['content'],
                      'content': self.save_images(item['content'], spider),
                      'tag': self.get_tag_id(item['tag']),
                      'source': item['source'],
                      'source_href': item['source_href']
                      }
            self.pool.insert_one(sql_insert, params)
            self.pool.end("commit")
        except Exception as e:
            logging.error('Insert failed:[%s]', e)
            traceback.print_exc()
            self.pool.end("rollback")

    def get_tag_id(self, tag):
        tag_id = self.pool.get_one("SELECT id FROM news_tag WHERE name=%s", tag)
        if tag_id:
            return tag_id['id']
        else:
            return self.pool.insert_one("INSERT INTO news_tag (name) values (%s)", tag)

    def close_spider(self, spider):
        self.pool.dispose()

    def save_images(self, content, spider):
        soup = BeautifulSoup(content, 'lxml')
        comments = soup.find_all(text=lambda text: isinstance(text, Comment))
        [comment.extract() for comment in comments]
        img_urls = soup.find_all('img')
        for imgUrl in img_urls:
            if spider.name == "dongqiudi":
                ImgLink = str(imgUrl.attrs['orig-src']).split('?')[0]
                img_link = SaveImg(ImgLink)
                del imgUrl.attrs['orig-src']
                imgUrl.attrs['data-src'] = img_link
                return soup
            elif spider.name == "sina":
                imgUrl['src'] = SaveImg("http:" + imgUrl['src'])
                return str(delattrs(soup)).replace(chr(0x3000), "")
            elif spider.name == 'wangyi':
                imgUrl['src'] = SaveImg("http:" + imgUrl['src'])
                return soup
            else:
                imgUrl['src'] = SaveImg(image['src'])
                return soup