# coding: utf-8

import bs4
import scrapy
from scrapy.contrib.spiders.crawl import CrawlSpider, Rule
from ..items import LejuItem


class LejuSpider(CrawlSpider):
    #
    # 抓取乐居网站 规则2
    #

    name = 'leju2spider'
    allowed_domains = ['leju.com']
    start_urls = ['http://bj.leju.com/']

    def parse(self, response):

        city_box = [
            ['安庆', 'http://house.leju.com/ai/new/#wt_source=nscan_dh2_zxkp'],
            ['包头', 'http://house.leju.com/bt/new/#wt_source=nscan_dh2_zxkp'],
            ['亳州', 'http://bozhou.leju.com/exhibit/?wt_source=nscan_dh2_xfzx'],
            ['百色', 'http://house.leju.com/bc/new/#wt_source=nscan_dh2_zxkp'],
            ['常州', 'http://house.leju.com/cz/new/#wt_source=nscan_dh2_zxkp'],
            ['常德', 'http://house.leju.com/ca/new/#wt_source=nscan_dh2_zxkp'],
            ['沧州', 'http://house.leju.com/co/new/#wt_source=nscan_dh2_zxkp'],
            ['东营', 'http://house.leju.com/dy/new/#wt_source=nscan_dh2_zxkp'],
            ['广西', 'http://house.leju.com/gx/new/#wt_source=nscan_dh2_zxkp'],
            ['广州', 'http://house.leju.com/gz/new/#wt_source=nscan_dh2_zxkp'],
            ['惠州', 'http://house.leju.com/hv/new/#wt_source=nscan_dh2_zxkp'],
            ['怀化', 'http://house.leju.com/hh/new/#wt_source=nscan_dh2_zxkp'],
            ['菏泽', 'http://house.leju.com/eh/new/#wt_source=nscan_dh2_zxkp'],
            ['衡水', 'http://house.leju.com/hp/new/#wt_source=nscan_dh2_zxkp'],
            ['江门', 'http://house.leju.com/jm/new/#wt_source=nscan_dh2_zxkp'],
            ['江阴', 'http://house.leju.com/jy/new/#wt_source=nscan_dh2_zxkp'],
            ['嘉兴', 'http://house.leju.com/jx/new/#wt_source=nscan_dh2_zxkp'],
            ['吉林', 'http://house.leju.com/jx/new/#wt_source=nscan_dh2_zxkp'],
            ['揭阳', 'http://house.leju.com/je/new/#wt_source=nscan_dh2_zxkp'],
            ['开封', 'http://house.leju.com/kf/new/#wt_source=nscan_dh2_zxkp'],
            ['泸州', 'http://house.leju.com/lu/new/#wt_source=nscan_dh2_zxkp'],
            ['连云港', 'http://house.leju.com/yg/new/#wt_source=nscan_dh2_zxkp'],
            ['六安', 'http://house.leju.com/un/new/#wt_source=nscan_dh2_zxkp'],
            ['南京', 'http://house.leju.com/nj/new/#wt_source=nscan_dh2_zxkp'],
            ['平顶山', 'http://house.leju.com/pd/new/#wt_source=nscan_dh2_zxkp'],
            ['濮阳', 'http://house.leju.com/pz/new/#wt_source=nscan_dh2_zxkp'],
            ['青岛', 'http://house.leju.com/qd/new/#wt_source=nscan_dh2_zxkp'],
            ['清远', 'http://house.leju.com/qy/new/#wt_source=nscan_dh2_zxkp'],
            ['三亚', 'http://house.leju.com/sa/new/#wt_source=nscan_dh2_zxkp'],
            ['石家庄', 'http://house.leju.com/sj/new/#wt_source=nscan_dh2_zxkp'],
            ['三门峡', 'http://house.leju.com/mx/new/#wt_source=nscan_dh2_zxkp'],
            ['天津', 'http://house.leju.com/tj/new/#wt_source=nscan_dh2_zxkp'],
            ['泰安', 'http://house.leju.com/ta/new/#wt_source=nscan_dh2_zxkp'],
            ['泰兴', 'http://house.leju.com/tx/new/#wt_source=nscan_dh2_zxkp'],
            ['台州', 'http://house.leju.com/tz/new/#wt_source=nscan_dh2_zxkp'],
            ['温州', 'http://house.leju.com/wz/new/#wt_source=nscan_dh2_zxkp'],
            ['咸阳', 'http://house.leju.com/xy/new/#wt_source=nscan_dh2_zxkp'],
            ['许昌', 'http://house.leju.com/xu/new/#wt_source=nscan_dh2_zxkp'],
            ['信阳', 'http://house.leju.com/xb/new/#wt_source=nscan_dh2_zxkp'],
            ['西宁', 'http://house.leju.com/xn/new/#wt_source=nscan_dh2_zxkp'],
            ['岳阳', 'http://house.leju.com/yv/new/#wt_source=nscan_dh2_zxkp'],
            ['肇庆', 'http://house.leju.com/zq/new/#wt_source=nscan_dh2_zxkp'],
            ['株洲', 'http://house.leju.com/zz/new/#wt_source=nscan_dh2_zxkp'],
            ['镇江', 'http://house.leju.com/zd/new/#wt_source=nscan_dh2_zxkp'],
            ['淄博', 'http://house.leju.com/zb/new/#wt_source=nscan_dh2_zxkp']]

        #
        # 获取乐居 部分城市
        #

        for child in city_box:
            city = child[0]
            city_url = child[1]
            city_id = city_url.replace('http://house.leju.com/', '').replace('/new/#wt_source=nscan_dh2_zxkp', '')
            citys = {
                'website': '乐居网', 'web_url': 'leju.com',
                'city': city, 'city_id': city_id
            }
            yield scrapy.Request(city_url, callback=self.parse_cityPage_url, meta=citys)

    def parse_cityPage_url(self, response):
        meta = response.meta
        data = response.body
        city_id = meta['city_id']
        soup = bs4.BeautifulSoup(data, 'lxml')

        #
        # 获取城市页数 进行翻页
        #

        pages = soup.select('.b_pageBox > a ')[-1].get('href')
        page_num = pages.replace('?page=', '').replace('#wt_source=pc_phb_down_fy', '')
        if page_num == '2':
            page = soup.select('.b_pageBox > a ')[-2].get('href')
            page_num = page.replace('?page=', '').replace('#wt_source=pc_phb_down_fy', '')
        num = int(page_num.strip())
        for n in range(num):
            site_url = 'http://house.leju.com/' + city_id + 'new/?page=' + str(
                    n + 1) + '#wt_source=pc_phb_down_fy'
            yield scrapy.Request(site_url,
                                 callback=self.parse_city_estate,
                                 meta=meta)

    def parse_city_estate(self, response):
        meta = response.meta
        data = response.body
        city_id = meta['city_id']
        soup = bs4.BeautifulSoup(data, 'lxml')

        #
        # 获取楼盘信息
        #

        boxs = soup.find_all('div', class_='b_titBox01')
        for child in boxs:
            cols = child.find_all('h2')
            for col in cols:
                estate = col.a.get_text()
                estate_url = 'http://house.leju.com' + col.a.get('href')
                estate_id = col.a.get('href').replace('/#wt_source=pc_phb_list_lpbt', '').replace(
                        '/', '')
                estate_id2 = estate_id.replace(city_id, '')
                areas = col.find_next_siblings('h3')[0]
                area = areas.get_text().strip().split(' ')[0]

                item = LejuItem()
                item['website'] = meta['website']
                item['web_url'] = meta['web_url']
                item['city'] = meta['city']
                item['city_id'] = meta['city_id']
                item['area'] = area
                item['estate'] = estate
                item['estate_id'] = estate_id
                item['estate_id2'] = estate_id2
                item['estate_url'] = estate_url
                yield item
