import scrapy
from scrapy import Request
import time


class UaSpider(scrapy.Spider):
    name = 'ua'
    start_urls = ['https://www.lagou.com/jobs/3256114.html']

    def parse(self, response):
        open('xx.html', 'w').write(response.text)


class UaSpiderSlow(scrapy.Spider):
    name = 'uaslow'

    def start_requests(self):
        for i in range(2000):
            print('yielding %s request' % i)
            yield Request('http://quotes.toscrape.com/page/1/?round=%s' % i,
                          meta={'i': i})

    def parse(self, response):
        print('processing page %s' % response.meta['i'])


class ManyJobSpider(scrapy.Spider):
    name = 'many'

    def start_requests(self):
        for i in range(2000):
            print('yielding %s request' % i)
            yield Request('http://quotes.toscrape.com/page/1/?round=%s' % i,
                          meta={'i': i})

    def parse(self, response):
        print('processing page %s' % response.meta['i'])
