import scrapy
{% if data_checked %}
from scraper.items import ScraperItem
from scraper.spiders.utils import get_selectors

class DataSpider(scrapy.Spider):
    name = 'data'
    {% if allowed_domains|length > 0 %}
    allowed_domains = {{ allowed_domains|safe }}
    {% endif %}
    start_urls = {{ start_urls|safe }}

    def parse(self, response):
        # css: a[class ="list-title"]::text
        # css: a[class ="info-title"]::text
        {% for d in data_items %}
        {{ d[0] }} = get_selectors(response, '{{ d[1] }}', '{{ d[2] }}')
        {% endfor %}
        min_len = min([{% for d in data_items %}len({{ d[0] }}),{% endfor %}])
        ls = list(zip(*[{% for d in data_items %}{{ d[0] }}[:min_len],{% endfor %}]))
        print('zip:', ls)
        for _n in ls:
            d = dict(zip({{ data_items|name }}, _n))
            yield ScraperItem(**d)
        {% for d in next_urls %}
        urls = get_selectors(response, '{{ d[0] }}', '{{ d[1] }}')
        if urls:
            yield from response.follow_all(urls, callback=self.parse)
        {% endfor %}
{% endif %}

