import scrapy


class MiddleUseSpider(scrapy.Spider):
    """
    学习使用下载中间件
    实现UA伪装 代理的使用
    """
    name = 'middle_use'
    # allowed_domains = ['www.xxx.com']
    start_urls = ['http://www.xxx.com/']

    def parse(self, response):
        page_content = response.text
        with open('id.html', 'w', encoding='utf-8') as f:
            f.write(page_content)
