#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Created on 2018-11-21 00:22:09
# Project: fangtianxia_ty_rsf

import re
import sys

from pyspider.libs.base_handler import *
# from pyspider.libs.header_switch import HeadersSelector
from spidermanager.header_switch import HeadersSelector

defaultencoding = 'utf-8'
if sys.getdefaultencoding() != defaultencoding:
    reload(sys)
    sys.setdefaultencoding(defaultencoding)


class Handler(BaseHandler):
    crawl_config = {
        "user_agent": "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",
        "timeout": 120,
        "connect_timeout": 60,
        "retries": 5,
        "fetch_type": 'js',
        "auto_recrawl": True,
    }

    @every(minutes=24 * 60)
    def on_start(self):
        header_slt = HeadersSelector()
        header = header_slt.select_header()
        self.crawl('http://taiyuan.esf.fang.com/housing/', callback=self.index_page, validate_cert=False,
                   headers=header, fetch_type='js', save={'my-header': header})

    @config(age=10 * 24 * 60 * 60)
    def index_page(self, response):
        header = response.save['my-header']
        for each in response.doc('.plotTit').items():
            self.crawl(each.attr.href, callback=self.detail_page, validate_cert=False)
        self.crawl([x.attr.href for x in response.doc('#PageControl1_hlk_next').items()], callback=self.index_page,
                   validate_cert=False, fetch_type='js', headers=header, save={'my-header': header})

    @config(priority=2)
    def detail_page(self, response):
        header = response.save['my-header']
        for each in response.doc('#kesfxqxq_A01_03_01 > a').items():
            self.crawl(each.attr.href, callback=self.detail_page2, validate_cert=False, fetch_type='js', headers=header)

    @config(priority=2)
    def detail_page2(self, response):
        try:
            name = response.doc('h1 > a').text()
            prices = [x.text() for x in response.doc('.con_left > .clearfix dd').items()]
            infos = [x.text() for x in response.doc('.clearfix > dd').items()]
            others = [x.text() for x in response.doc('.clearfix > dt').items()]
            traffics = [x.text() for x in response.doc('.floatl > dt').items()]
        except Exception:
            pass
        return {
            "url": response.url,
            "name": name,
            "title": response.doc('title').text(),
            "price": prices,
            "infos": infos,
            "others": others,
            "traffics": traffics,
        }
