# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
import configparser

from itemadapter import ItemAdapter
import json,requests,time,os,random
from scrapy.utils.project import get_project_settings

class ProPipeline:
    # 重写父类方法，该方法开始爬虫被调用一次
    # def open_spider(self,spider):
    #     print('开始爬虫。。。。')
    def process_item(self, item, spider):
        rw =item['rw']
        pm =item['pm']
        id =item['id']
        bh =item['bh']
        zt =item['zt']
        cf = configparser.ConfigParser()
        cf.read('./config.ini', encoding='utf-8')
        input_return_url =cf.get("Scrapy","Input_return_url")

        params = {
            'rw': rw,
            'pm': pm,
            'id': id,
            'bh': bh,
            'zt': zt,
        }
        print(params)
        print('Api执行后返回' + input_return_url)
        result = requests.post(input_return_url, params=params)
        if result.text == "True":
            print('查询成功')
            return  True
        else:
            print('查询失败')
            return False
    def close_spider(self,spider):
        print('爬虫结束...')
