# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import pymysql


# useful for handling different item types with a single interface

class SchoolPipeline:

    conn = None
    cursor = None
    new_Num = 0
    def open_spider(self,spider):
        print("网站信息开始收集...")
        self.conn = pymysql.Connect(
            user='root',
            password='root',
            host='localhost',
            port=3306,
            database='yu'
        )

    def process_item(self, item, spider):

        self.cursor = self.conn.cursor()

        self.fp = open('./media/' + item['school'] + ' ' + item['Title'] + '.txt', 'w', encoding='utf-8')
        self.fp.write(item["Text"] + "\n 栏目：综合新闻 " + " 新闻来源：" + item['URL'] + " 发布时间：" + item["Time"])
        print("txt写入成功")

        try:
            query = "insert into app01_schoolnews values ('%s','%s','%s','%s','%s','%s','%s','%s','%s')"
            values = (
            item['school'], item['Time'], item['Col'], item['Title'], item['Text'],item['URL'],item['Provenance'], item['Heat'],
            item['FWLCount'])
            QUERY = format(query%values)
            # print(QUERY)
            self.cursor.execute(QUERY)
            self.conn.commit()
            print("插入数据库成功...")
            self.new_Num += 1
        except Exception as e:
            print("mysql连接异常...",e)
        # return item

    def close_spider(self,spider):
        self.cursor.close()
        self.conn.close()
        print("关闭数据库连接。共入库%d条数据..."%self.new_Num)