# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import pymysql

class ZhihuPipeline(object):

    def process_item(self, item, spider):
        connect = pymysql.connect(host="127.0.0.1",user="root",passwd="root",db="zhihu",use_unicode=True, charset="utf8")
        for i in range(0,len(item["title"])):
            title = item["title"][i]
            href = ""
            content = ""
            writer = ""
            if len(item["href"])>=len(item["title"]):
                href = item["href"][i]
            if len(item["content"])>=len(item["title"]):
                content = item["content"][i]
            if len(item["writer"])>=len(item["title"]):
                writer = item["writer"][i]
            sql="insert into topic3(title,writer,href,content) values('"+title+"','"+writer+"','"+href+"','"+content+"')"
            connect.query(sql)

        connect.close()
        return item
