# Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html

from scrapy import signals
import random
import csv


class WyRedisSpiderMiddleware:
    @classmethod
    def from_crawler(cls, crawler):
        s = cls()
        crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
        return s

    def spider_opened(self, spider):
        # self.fieldnames = ['province','city','name','address','telephone', 'hos_level', 'remarks', 'order_num','url']
        # self.filename = 'wy_hos.csv'
        # spider.file = open(self.filename, 'a', encoding='utf-8', newline='')
        # spider.writer = csv.DictWriter(spider.file, self.fieldnames)
        # spider.writer.writeheader()

        self.fieldnames1 = ['name','hospital', 'depart', 'disease_list', 'goodat', 'remarks','url']
        self.filename1 = 'wy_doc.csv'
        spider.file1 = open(self.filename1, 'a', encoding='utf-8', newline='')
        spider.writer1 = csv.DictWriter(spider.file1, self.fieldnames1)
        spider.writer1.writeheader()


    def spider_closed(self, spider):
        spider.file.close()
        spider.file1.close()


class WyRedisDownloaderMiddleware:
    def __init__(self, user_agent):
        self.user_agent = user_agent

    @classmethod
    def from_crawler(cls, crawler):
        return cls(
            user_agent=crawler.settings.get('USER_AGENTS')
        )

    def process_request(self, request, spider):
        agent = random.choices(self.user_agent)
        request.headers['User-Agent'] = agent
