# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
import re
import pymysql
from .settings import *
from lxml import html
from lxml.html.clean import Cleaner
class BaiduzhidaoPipeline:

    def re_repl(self,content):
        content = re.sub(r"[0-9a-z]{10,100}","",content).replace('展开全部','')
        content = re.sub(r'^$\n*', '\n', content, flags=re.MULTILINE)
        content = re.sub(r"([\u4e00-\u9fa5]{1}|，)\d{4}([\u4e00-\u9fa5]{1}|，)",r"\1\2",content)
        content = re.sub(r"([\u4e00-\u9fa5]{1}|，)[a-z]{2,4}([\u4e00-\u9fa5]{1}|，)",r"\1\2",content)
        content = re.sub(r"#####[\s|#####]*#####",r"#####",content)
        return content
    def process_item(self, item, spider):
        remove_tags = frozenset([
            'a','font','div','span','img'
        ])
        cleaner = Cleaner(remove_tags=remove_tags)
        cleaner.javascript = True  # This is True because we want to activate the javascript filter
        cleaner.style = True  # clean the style element
        tree = item['content']
        tree = cleaner.clean_html(tree)
        item['content'] = self.re_repl(tree)
        print(item['content'])
        return item


class BAIDUYSQLPIPLINES:
    def open_spider(self,spider):
        self.db = pymysql.connect(
            host=MYSQL_HOST,
            user=MYSQL_USER,
            password=MYSQL_PASSWORD,
            database=MYSQL_DB,
            charset=MYSQL_CHAR,
        )
        self.cursor = self.db.cursor()
        print('打开数据库连接')
    def process_item(self,item,spider):
        insert_sql = '''INSERT INTO db (title,keyword,content,url,sanjiceshi) VALUES(%s,%s,%s,%s,%s)'''
        values_list = [item['title'],item['keyword'],item['content'],item['url'],item['sanjiceshi']]
        self.cursor.execute(insert_sql,values_list)
        self.db.commit()
        print(item['title']+'入库成功')
        return item
    def close_spider(self,spider):
        self.cursor.close()
        self.db.close()
        print('关闭数据库连接')