import scrapy
from scrapy_splash import SplashRequest


class JsSpider(scrapy.Spider):

    name = 'js'

    def start_requests(self):
        urls = ['http://quotes.toscrape.com/js/page/1/',
                'http://quotes.toscrape.com/js/page/2/',
                'http://quotes.toscrape.com/js/page/3/']
        for url in urls:
            yield SplashRequest(url)

    def parse(self, response):
        num = response.url.rstrip('/').split('/')[-1]
        ofile = open('page_%s.html' % num, 'w')
        ofile.write(response.text)
        ofile.close()

