#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Project: spd-sxmcc
"""
@author: lyndon
@time Created on 2018/11/19 22:53
@desc
"""
import re
import sys

from pyspider.libs.base_handler import *
from laccelllatitude.header_switch import HeadersSelector
# from pyspider.libs.header_switch import HeadersSelector

defaultencoding = 'utf-8'
if sys.getdefaultencoding() != defaultencoding:
    reload(sys)
    sys.setdefaultencoding(defaultencoding)


class Handler(BaseHandler):
    crawl_config = {
        "user_agent": "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",
        "timeout": 120,
        "connect_timeout": 60,
        "retries": 5,
        # "fetch_type": 'js',
        # "auto_recrawl": True,
    }

    @every(minutes=24 * 60)
    def on_start(self):
        header_slt = HeadersSelector()
        header = header_slt.select_header()  # 获取一个新的 header
        # header["X-Requested-With"] = "XMLHttpRequest"
        for i in range(1, 2):
            orig_href = 'http://taiyuan.newhouse.fang.com/house/s/b9%s/?ctm=1.taiyuan.xf_search.page.%s' % (i, i + 1)
            self.crawl(orig_href, callback=self.index_page, validate_cert=False,
                       headers=header)  # 请求头必须写在 crawl 里，cookies 从 response.cookies 中找

    @config(age=24 * 60 * 60)
    def index_page(self, response):
        urllst = []
        for each in response.doc('* .nlcd_name > a').items():
            urllst.append(each.attr.href)
        print(urllst)
        for url in urllst:
            self.crawl(url, callback=self.detail_page, validate_cert=False)

    @config(priority=2)
    def detail_page(self, response):
        for each in response.doc('.red01').items():
            self.crawl(each.attr.href, callback=self.detail_page2, validate_cert=False)

    @config(priority=2)
    def detail_page2(self, response):
        developer = ""
        print('------------------')
        try:
            price = response.doc('.main-info-price > em').text()
            equity = response.doc('.cqnx_512 > p').text()
            address = response.doc('.list-right-text').text().split(' ')
            developer = response.doc('.list-right-text > a').text().split(' ')
            planinfo = response.doc('.list-right').text().split(' ')
        except Exception:
            pass

        return {
            "url": response.url,
            "title": response.doc('h1 > a').text(),
            "price": price,
            "equity": equity,
            "developer": developer,
            "address": address,
            "planinfo": planinfo,
        }


