# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
from zhihu.items import AnswerItem,ArticleItem

class ZhihuPipeline(object):

    def __init__(self):
        self.conn = pymysql.connect(host="127.0.0.1", user="root", passwd="root", db="zhihu")


    def process_item(self, item, spider):
#-------------判断item是AnswerItem还是ArticleItem并传递信息---------
        if isinstance(item,AnswerItem):
            print(1)
            question_title = item['question_title'][0]
            question_content = item['question_content'][0]
            answer_content = item['answer_people'][0]
            answer_people = item['answer_content'][0]
            print(question_title)
            sql = "insert into answer(qtitle,qcontent,apeople,acontent) value('" + question_title + "','" + question_content + "','" + answer_people + "','"+ answer_content +"')"
            self.conn.query(sql)
            self.conn.commit()

        elif isinstance(item,ArticleItem):
            print(2)
            title = item['title']
            content = item['people']
            people = item['content']
            print(title)
            sql = "insert into article(title,people,content) value('" + title + "','" +people + "','" + content + "')"
            self.conn.query(sql)
            self.conn.commit()

        return item

    def close_spider(self):
        self.conn.close()
