# -*- coding: utf-8 -*-
import scrapy
from job.items import JobItem


class ZhilianSpider(scrapy.Spider):
    name = 'zhilian'
    allowed_domains = []

    def __init__(self, place=None, *args, **kwargs):
        super(ZhilianSpider, self).__init__(*args, **kwargs)
        if place is not None:
            self.place = place
        else:
            self.place = '杭州'
        print(self.place)

    def start_requests(self):
        urls = [
            'http://10.31.161.52/1.html',
        ]
        for url in urls:
            req = scrapy.Request(url, callback=self.parse)
            yield req

    def parse(self, response):

        print(response.status)
        jobs = response.css('#newlist_list_content_table > table')
        for job in jobs:
            item = JobItem()
            item['name'] = job.css("td.zwmc > div > a::text").extract_first()
            yield item


