# -*- coding: utf-8 -*-
from crawler import settings
from crawler.items import *
from crawler.core.service_owner import *
from crawler.core.dict_url_city import *
from crawler.core.dict_xpath import *
import scrapy, re, logging, json, os, uuid

class EsfByOwner(scrapy.Spider):
    """
    @author Hongv
    @desc 爬去58二手房个人房源
    """
    name = "ershoufang.owner.58.list"
    custom_settings = settings.MIDDLEWARES_OWNER_FIRST

    def __init__(self, urls_tag='ershoufang_owner_58_sd_jn', *args, **kwargs):
        super(EsfByOwner, self).__init__(*args, **kwargs)
        self.esf_service = EsfOwnerService(urls_tag=urls_tag)

    def start_requests(self):
        start_urls = self.esf_service.get_crawler_urls()
        for url in start_urls:
            meta = {
                'download_timeout': self.custom_settings['DOWNLOAD_TIME'],
                'referer':url.split('0/')[0]
            }
            yield scrapy.Request(url, dont_filter=True, meta=meta, callback=self.list_parse)

    def list_parse(self, response):
        if 'callback.58.com' in response.url:
            redirect_urls = response.meta.get('redirect_urls', '')
            logging.info('\n--->\n url: {0}\n NEED VERIFY \n<---'.format(redirect_urls))
        else:
            # 获取爬去城市的中文名字
            base_url = re.search(r'\w+\.58\.com', response.url).group()
            city_belong = URL_TO_CITY.get(base_url, '')
            if not city_belong: raise Exception('Not Found City Belong')

            items = response.xpath(ESF_OWNER_58['items'])
            for it in items:
                house_info = EsfOfOwner()
                try:
                    house_id = it.xpath(ESF_OWNER_58['item_id']).extract_first()
                    title = it.xpath(ESF_OWNER_58['title']).extract_first()
                    baseinfo = it.xpath(ESF_OWNER_58['baseinfo']).extract()
                    address = it.xpath(ESF_OWNER_58['address']).extract()
                    total_price = it.xpath(ESF_OWNER_58['total_price']).extract_first()
                    unit_price = it.xpath(ESF_OWNER_58['unit_price']).extract_first()
                    publisher = it.xpath(ESF_OWNER_58['publisher']).extract()
                    published_time = it.xpath(ESF_OWNER_58['published_time']).extract_first()
                    detail_link = it.xpath(ESF_OWNER_58['detail_link']).extract_first()

                    house_info['city_belong'], house_info['web_belong'] = city_belong, ESF_OWNER_58['web']
                    house_info['house_id'] = re.search(r'esf_id:(\d+),', house_id).group(1)
                    house_info['title'] = title.replace('\xa0', '')
                    for i in re.findall(r'(\d+)室(\d+)厅(\d+)卫|(\d+)室(\d+)厅', baseinfo[0]):
                        house_info['room'] = i[0] if i[0] else i[3]
                        house_info['hall'] = i[1] if i[1] else i[4]
                        house_info['toilet'] = i[2] if i[2] else ''
                    for i in re.findall(r'(\d+\.\d+)㎡|(\d+)㎡', baseinfo[1]):
                        house_info['builtuparea'] = i[0] if i[0] else i[1]
                    house_info['orientation'] = baseinfo[-2].replace('\r\n', '') if len(baseinfo) >= 4 else ''
                    house_info['floor'] = baseinfo[-1].replace('\r\n', '') if '层' in baseinfo[-1] else ''
                    house_info['community_name'], house_info['district_belong'] = address[0], address[1]
                    house_info['total_price'], house_info['unit_price'], house_info['publisher_name'] = total_price, unit_price, publisher[-1]
                    house_info['published_time'] = published_time if published_time else ''
                    house_info['detail_url'] = '%s:%s' % ('https', detail_link) if not detail_link.startswith('http') else detail_link
                    house_info['created_time'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")

                    if not house_info['title'] or not house_info['publisher_name'] or not house_info['total_price'] or not house_info['unit_price']:
                        logging.exception('\n*** ***\n Info Not Complete\n*** ***')
                    yield house_info
                except Exception as e:
                    logging.exception('\n*** ***\n ershoufang 58 owner \n {0}\n Exception {1}\n*** ***'.format(house_id, str(e)))