# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html

import codecs
import json
import codecs
import pymysql



class ScrapyNewsPipeline(object):
    # def process_item(self, item, spider):
    #     # print item
    #     return item

    '''
            自定义导出json文件
        '''

    def __init__(self):
        # 使用codecs模块的打开方式，可以指定编码打开，避免很多编码问题
        self.file = codecs.open("news.json", "w", encoding="utf-8")
        self.file.write("[" + "\n")

    def process_item(self, item, spider):
        # print(item)

        lines = json.dumps(dict(item), ensure_ascii=False) + "," + "\n"
        self.file.write(lines)
        # 注意别忘返回Item给下一个管道
        return item

    def close_spier(self, spider):
        self.file.write("]")
        self.file.close()

class ScrapyNewsMysqlPipeline(object):


    def __init__(self):
        self.conn = pymysql.connect(host='127.0.0.1',user='root',passwd='123456',db ='ll_newscms')
        self.cursor = self.conn.cursor()

    def process_item(self, item, spider):

        sql ="insert into ll_article(ll_title,ll_time,ll_content,ll_cid,ll_rnum,ll_uid,ll_state,ll_pics) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"

        self.cursor.execute(sql,(item['ll_title'],item['ll_time'],item['ll_content'],item['ll_cid'],item['ll_rnum'],item['ll_uid'],item['ll_state'],item['ll_pics']))
        self.conn.commit()
        return item


    def close_spider(self,spider):
        self.conn.close()