# coding: utf-8

import bs4
from scrapy.contrib.spiders.crawl import CrawlSpider
import scrapy
from ..items import SoufItem


class Souf2Spider(CrawlSpider):

    #
    # 搜房网 规则2
    #

    name = 'souf2spider'
    allowed_domains = ['fang.com']
    start_urls = ['http://www.fang.com/SoufunFamily.htm']

    def parse(self, response):

        citybox = [
            ['重庆', 'cq', 'http://newhouse.cq.fang.com/house/s/'],
            ['鞍山', 'anshan', 'http://newhouse.anshan.fang.com/house/s/'],
            ['珠海', 'zh', 'http://newhouse.zh.fang.com/house/s/'],
            ['淄博', 'zb', 'http://newhouse.zb.fang.com/house/s/'],
            ['漳州', 'zhangzhou', 'http://newhouse.zhangzhou.fang.com/house/s/'],
            ['中山', 'zs', 'http://newhouse.zs.fang.com/house/s/'],
            ['株洲', 'zhuzhou', 'http://newhouse.zhuzhou.fang.com/house/s/'],
            ['郑州', 'zz', 'http://newhouse.zz.fang.com/house/s/'],
            ['舟山', 'zhoushan', 'http://newhouse.zhoushan.fang.com/house/s/'],
            ['镇江', 'zhenjiang', 'http://newhouse.zhenjiang.fang.com/house/s/'],
            ['湛江', 'zj', 'http://newhouse.zj.fang.com/house/s/'],
            ['阳江', 'yangjiang', 'http://newhouse.yangjiang.fang.com/house/s/'],
            ['岳阳', 'yueyang', 'http://newhouse.yueyang.fang.com/house/s/'],
            ['盐城', 'yancheng', 'http://newhouse.yancheng.fang.com/house/s/'],
            ['银川', 'yinchuan', 'http://newhouse.yinchuan.fang.com/house/s/'],
            ['宜昌', 'yc', 'http://newhouse.yc.fang.com/house/s/'],
            ['烟台', 'yt', 'http://newhouse.yt.fang.com/house/s/'],
            ['扬州', 'yz', 'http://newhouse.yz.fang.com/house/s/'],
            ['邢台', 'xingtai', 'http://newhouse.xingtai.fang.com/house/s/'],
            ['徐州', 'xz', 'http://newhouse.xz.fang.com/house/s/'],
            ['襄阳', 'xiangyang', 'http://newhouse.xiangyang.fang.com/house/s/'],
            ['西安', 'xian', 'http://newhouse.xian.fang.com/house/s/'],
            ['西宁', 'xn', 'http://newhouse.xn.fang.com/house/s/'],
            ['厦门', 'xm', 'http://newhouse.xm.fang.com/house/s/'],
            ['湘潭', 'xt', 'http://newhouse.xt.fang.com/house/s/'],
            ['芜湖', 'wuhu', 'http://newhouse.wuhu.fang.com/house/s/'],
            ['潍坊', 'wf', 'http://newhouse.wf.fang.com/house/s/'],
            ['温州', 'wz', 'http://newhouse.wz.fang.com/house/s/'],
            ['乌鲁木齐', 'xj', 'http://newhouse.xj.fang.com/house/s/'],
            ['武汉', 'wuhan', 'http://newhouse.wuhan.fang.com/house/s/'],
            ['无锡', 'wuxi', 'http://newhouse.wuxi.fang.com/house/s/'],
            ['威海', 'weihai', 'http://newhouse.weihai.fang.com/house/s/'],
            ['通辽', 'tl', 'http://newhouse.tl.fang.com/house/s/'],
            ['泰州', 'taizhou', 'http://newhouse.taizhou.fang.com/house/s/'],
            ['台州', 'tz', 'http://newhouse.tz.fang.com/house/s/'],
            ['唐山', 'ts', 'http://newhouse.ts.fang.com/house/s/'],
            ['天津', 'tj', 'http://newhouse.tj.fang.com/house/s/'],
            ['太原', 'taiyuan', 'http://newhouse.taiyuan.fang.com/house/s/'],
            ['石家庄', 'sjz', 'http://newhouse.sjz.fang.com/house/s/'],
            ['沈阳', 'sy', 'http://newhouse.sy.fang.com/house/s/'],
            ['汕头', 'st', 'http://newhouse.st.fang.com/house/s/'],
            ['深圳', 'sz', 'http://newhouse.sz.fang.com/house/s/'],
            ['宿迁', 'sq', 'http://newhouse.sq.fang.com/house/s/'],
            ['苏州', 'suzhou', 'http://newhouse.suzhou.fang.com/house/s/'],
            ['三亚', 'sanya', 'http://newhouse.sanya.fang.com/house/s/'],
            ['青岛', 'qd', 'http://newhouse.qd.fang.com/house/s/'],
            ['泉州', 'qz', 'http://newhouse.qz.fang.com/house/s/'],
            ['秦皇岛', 'qhd', 'http://newhouse.qhd.fang.com/house/s/'],
            ['绍兴', 'sx', 'http://newhouse.sx.fang.com/house/s/'],
            ['南充', 'nanchong', 'http://newhouse.nanchong.fang.com/house/s/'],
            ['南昌', 'nc', 'http://newhouse.nc.fang.com/house/s/'],
            ['南通', 'nt', 'http://newhouse.nt.fang.com/house/s/'],
            ['宁波', 'nb', 'http://newhouse.nb.fang.com/house/s/'],
            ['南宁', 'nn', 'http://newhouse.nn.fang.com/house/s/'],
            ['马鞍山', 'mas', 'http://newhouse.mas.fang.com/house/s/'],
            ['绵阳', 'mianyang', 'http://newhouse.mianyang.fang.com/house/s/'],
            ['南京', 'nanjing', 'http://newhouse.nanjing.fang.com/house/s/'],
            ['临沂', 'linyi', 'http://newhouse.linyi.fang.com/house/s/'],
            ['乐山', 'leshan', 'http://newhouse.leshan.fang.com/house/s/'],
            ['聊城', 'lc', 'http://newhouse.lc.fang.com/house/s/'],
            ['廊坊', 'lf', 'http://newhouse.lf.fang.com/house/s/'],
            ['洛阳', 'ly', 'http://newhouse.ly.fang.com/house/s/'],
            ['柳州', 'liuzhou', 'http://newhouse.liuzhou.fang.com/house/s/'],
            ['连云港', 'lyg', 'http://newhouse.lyg.fang.com/house/s/'],
            ['兰州', 'lz', 'http://newhouse.lz.fang.com/house/s/'],
            ['昆山', 'ks', 'http://newhouse.ks.fang.com/house/s/'],
            ['昆明', 'km', 'http://newhouse.km.fang.com/house/s/'],
            ['济宁', 'jining', 'http://newhouse.jining.fang.com/house/s/'],
            ['江阴', 'jy', 'http://newhouse.jy.fang.com/house/s/'],
            ['九江', 'jiujiang', 'http://newhouse.jiujiang.fang.com/house/s/'],
            ['江门', 'jm', 'http://newhouse.jm.fang.com/house/s/'],
            ['济南', 'jn', 'http://newhouse.jn.fang.com/house/s/'],
            ['嘉兴', 'jx', 'http://newhouse.jx.fang.com/house/s/'],
            ['吉林', 'jl', 'http://newhouse.jl.fang.com/house/s/'],
            ['衡水', 'hs', 'http://newhouse.hs.fang.com/house/s/'],
            ['衡阳', 'hengyang', 'http://newhouse.hengyang.fang.com/house/s/'],
            ['湖州', 'huzhou', 'http://newhouse.huzhou.fang.com/house/s/'],
            ['呼和浩特', 'nm', 'http://newhouse.nm.fang.com/house/s/'],
            ['海南', 'hn', 'http://newhouse.hn.fang.com/house/s/'],
            ['合肥', 'hf', 'http://newhouse.hf.fang.com/house/s/'],
            ['淮安', 'huaian', 'http://newhouse.huaian.fang.com/house/s/'],
            ['哈尔滨', 'hrb', 'http://newhouse.hrb.fang.com/house/s/'],
            ['杭州', 'hz', 'http://newhouse.hz.fang.com/house/s/'],
            ['惠州', 'huizhou', 'http://newhouse.huizhou.fang.com/house/s/'],
            ['邯郸', 'hd', 'http://newhouse.hd.fang.com/house/s/'],
            ['广州', 'gz', 'http://newhouse.gz.fang.com/house/s/'],
            ['上海', 'sh', 'http://newhouse.sh.fang.com/house/s/'],
            ['桂林', 'guilin', 'http://newhouse.guilin.fang.com/house/s/'],
            ['贵阳', 'gy', 'http://newhouse.gy.fang.com/house/s/'],
            ['赣州', 'ganzhou', 'http://newhouse.ganzhou.fang.com/house/s/'],
            ['福州', 'fz', 'http://newhouse.fz.fang.com/house/s/'],
            ['佛山', 'fs', 'http://newhouse.fs.fang.com/house/s/'],
            ['大庆', 'daqing', 'http://newhouse.daqing.fang.com/house/s/'],
            ['东莞', 'dg', 'http://newhouse.dg.fang.com/house/s/'],
            ['大连', 'dl', 'http://newhouse.dl.fang.com/house/s/'],
            ['常州', 'cz', 'http://newhouse.cz.fang.com/house/s/'],
            ['长春', 'changchun', 'http://newhouse.changchun.fang.com/house/s/'],
            ['成都', 'cd', 'http://newhouse.cd.fang.com/house/s/'],
            ['长沙', 'cs', 'http://newhouse.cs.fang.com/house/s/'],
            ['承德', 'chengde', 'http://newhouse.chengde.fang.com/house/s/'],
            ['蚌埠', 'bengbu', 'http://newhouse.bengbu.fang.com/house/s/'],
            ['常熟', 'changshu', 'http://newhouse.changshu.fang.com/house/s/'],
            ['北海', 'bh', 'http://newhouse.bh.fang.com/house/s/'],
            ['保定', 'bd', 'http://newhouse.bd.fang.com/house/s/'],
            ['北京', 'bj', 'http://newhouse.fang.com/house/s/'],
            ['包头', 'bt', 'http://newhouse.bt.fang.com/house/s/'],
            ['昌吉', 'changji', 'http://esf.changji.fang.com/'],
            ['石河子', 'shihezi', 'http://esf.shihezi.fang.com/'],
            ['吴江', 'wj', 'http://esf.wj.fang.com/']

        ]

        #
        # 获取 部分城市
        #

        for child in citybox:
            city = child[0]
            city_id = child[1]
            city_url = child[2]

            citys = {
                'website': '搜房网', 'web_url': 'fang.com',
                'city': city, 'city_id': city_id, 'city_url': city_url
            }
            yield scrapy.Request(city_url, callback=self.parse_city_area, meta=citys)

    def parse_city_area(self, response):

        meta = response.meta
        city_url = meta['city_url']
        city = meta['city']
        city_id = meta['city_id']
        data = response.body
        current_url = response.url
        current_page = current_url.split('/')[-2]
        soup = bs4.BeautifulSoup(data, 'html5lib')

        # 进入城市 楼盘页面
        # ---第一页检查自营---------------
        #

        if current_page == 's':
            ziying = soup.select('.nhouse_list_nav > li')[1]
            dianshang = ziying.select('a')[0].get_text().split('(')[0].strip()
            if dianshang == '新房自营':
                dianshang_url = city_url.replace('/s/', '/dianshang/')
                yield scrapy.Request(dianshang_url, callback=self.parse_dianshang_estate, meta=meta)

        # ------------获取楼盘列表----------------
        estatebox = soup.select('.nl_con > ul > li')
        # if current_url == 'http://newhouse.bengbu.fang.com/house/s/b97/':
        for child in estatebox:
            estates = child.select('.nlcd_name')
            # ---------新房列表-------
            if estates:
                estate = estates[0].select('a')[0].get_text().strip()
                estate_url = estates[0].select('a')[0].get('href').strip()
                estate_id = ''
                estate_id2 = estates[0].select('a')[0].get('href').strip().split('/')[2].replace('.fang.com', '')
                dizhis = child.select('.address')
                address = ''
                area = ''
                if dizhis:
                    dizhi = child.select('.address > a')[0].get_text().strip()
                    if ']' in dizhi:
                        address = '地址：' + dizhi.split(']')[1].strip()
                        area = dizhi.split(']')[0].strip() + ']'
                    else:
                        address = '地址：' + dizhi
                        area = ''
                print('3333', area, estate, estate_id, estate_id2, estate_url, address)
                meta['estate'] = estate
                meta['estate_id'] = estate_id
                meta['estate_id2'] = estate_id2
                meta['estate_url'] = estate_url
                meta['address'] = address
                meta['area'] = area
                meta['estate_class'] = ''
                if estate_url != '/house/dianshang/':
                    yield scrapy.Request(estate_url, callback=self.parse_city_estate, meta=meta)

            # ----------信息不全列表或二手房----------
            else:
                estates2 = child.select('.sslalone')[0]
                estate_id = estates2.get('id')
                estatelist = estates2.select('dl > dd')[0]
                estate = estatelist.a.get_text()
                estate_url = estatelist.a.get('href')
                estate_id2 = estatelist.a.get('href').split('/')[2].replace('.fang.com', '').replace('esf.', '')
                if estate_id2 == city_id:
                    estate_id2 = estatelist.a.get('href').split('/')[3]

                estate_class = estatelist.span.get_text()
                address = estates2.select('.add > a')[0].get_text()
                meta['estate'] = estate
                meta['estate_id'] = estate_id
                meta['estate_id2'] = estate_id2
                meta['estate_url'] = estate_url
                meta['address'] = address
                meta['area'] = ''
                meta['estate_class'] = estate_class
                print('232323', estate, estate_id, estate_id2, estate_class, address)
                yield scrapy.Request(estate_url, callback=self.parse_city_estate, meta=meta)

        #
        # 翻页
        #
        pagebox = soup.select('.page > ul > li')
        if pagebox:
            pages = pagebox[1].select('a')[-2].get_text().strip()
            if pages == '下一页':
                next_num = pagebox[1].select('a')[-2].get('href').split('/')[3]
                next_url = city_url + next_num + '/'
                print(city, next_url)
                yield scrapy.Request(next_url, callback=self.parse_city_area, meta=meta)

    def parse_dianshang_estate(self, response):
        meta = response.meta
        city_id = meta['city_id']
        data = response.body
        soup = bs4.BeautifulSoup(data, 'html5lib')
        #
        # 搜房自营 楼盘列表
        #
        estatebox = soup.select('.nl_con > ul > li')
        for child in estatebox:
            estates = child.select('.nlcd_name')
            estate = estates[0].select('a')[0].get_text().strip()
            estate_url = estates[0].select('a')[0].get('href').strip()
            estate_id = ''
            estate_id2 = estates[0].select('a')[0].get('href').strip().replace('http://', '').replace('.fang.com/',
                                                                                                      '').replace('/',
                                                                                                                  '')
            dizhi = child.select('.address > a')[0].get_text().strip()
            address = '地址：' + dizhi.split(']')[1].strip()
            area = dizhi.split(']')[0].strip() + ']'
            print('电商', area, estate, estate_id, estate_id2, address, estate_url)
            meta['estate'] = estate
            meta['estate_id'] = estate_id
            meta['estate_id2'] = estate_id2
            meta['estate_url'] = estate_url
            meta['address'] = address
            meta['area'] = area
            meta['estate_class'] = ''
            yield scrapy.Request(estate_url, callback=self.parse_city_estate, meta=meta)

        # 翻页
        #
        pagebox = soup.select('.page > ul > li')
        if pagebox:
            pages = pagebox[1].select('a')
            for child in pages:
                next = child.get_text().strip()
                if next == '下一页':
                    next_num = child.get('href')
                    next_url = 'http://newhouse.' + city_id + '.fang.com' + next_num
                    yield scrapy.Request(next_url, callback=self.parse_dianshang_estate, meta=meta)

    def parse_city_estate(self, response):
        meta = response.meta
        data = response.body
        soup = bs4.BeautifulSoup(data, 'html5lib')

        #
        # 楼盘页面 获取区域、获取 详情页面url并进入
        #

        arelist = soup.find_all('ul', class_='tf')
        if arelist:
            areabox = soup.find_all('ul', class_='tf')[1]
            area = areabox.find_all('li')[2].get_text().replace('楼盘', '').replace('>', '').strip()
            meta['area'] = area

        detal = soup.select('#orginalNaviBox > a')
        if detal:
            origin = detal[1].get_text().strip()
            origin_url = detal[1].get('href')
            if origin == '楼盘详情' or origin == '详细信息':
                meta['estate_id'] = origin_url.split('/')[-2].replace('house-xm', '')
                yield scrapy.Request(origin_url, callback=self.parse_get_estate_result, meta=meta,errback=lambda x:self.parse_error_result(x,meta))
            else:
                origin1 = detal[2].get_text().strip()
                origin_url1 = detal[2].get('href')
                meta['estate_id'] = origin_url1.split('/')[-2].replace('house-xm', '')

                #
                # 进入 详情页面 或 二手房详情页面 没有详情直接入库
                #

                if origin1 == '楼盘详情' or origin == '详细信息':
                    yield scrapy.Request(origin_url1, callback=self.parse_get_estate_result, meta=meta,errback=lambda x:self.parse_error_result(x,meta))
                else:
                    ershoufang = ''
                    ershoufang_url = ''
                    if origin == '小区详情':
                        ershoufang = origin
                        ershoufang_url = origin_url
                    if origin1 == '小区详情':
                        ershoufang = origin1
                        ershoufang_url = origin_url1
                    if ershoufang == '小区详情':
                        yield scrapy.Request(ershoufang_url, callback=self.parse_get_estate_result2, meta=meta,errback=lambda x:self.parse_error_result(x,meta))
                    else:
                        item = SoufItem()
                        item['website'] = meta['website']
                        item['web_url'] = meta['web_url']
                        item['city'] = meta['city']
                        item['city_id'] = meta['city_id']
                        item['estate_class'] = meta['estate_class']
                        item['estate'] = meta['estate']
                        item['estate_id'] = meta['estate_id']
                        item['estate_id2'] = meta['estate_id2']
                        item['estate_url'] = meta['estate_url']
                        item['address'] = meta['address']
                        item['developer'] = ''
                        item['building_class'] = ''
                        item['decoration'] = ''
                        item['estate1'] = ''
                        item['estate2'] = ''
                        item['estate3'] = ''
                        item['estate4'] = ''
                        item['area'] = meta['area']
                        print('5555', meta['estate'], meta['estate_url'])
                        yield item
        else:
            item = SoufItem()
            item['website'] = meta['website']
            item['web_url'] = meta['web_url']
            item['city'] = meta['city']
            item['city_id'] = meta['city_id']
            item['estate_class'] = meta['estate_class']
            item['estate'] = meta['estate']
            item['estate_id'] = meta['estate_id']
            item['estate_id2'] = meta['estate_id2']
            item['estate_url'] = meta['estate_url']
            item['address'] = meta['address']
            item['developer'] = ''
            item['building_class'] = ''
            item['decoration'] = ''
            item['estate1'] = ''
            item['estate2'] = ''
            item['estate3'] = ''
            item['estate4'] = ''
            item['area'] = meta['area']
            yield item
            print('5555', meta['estate'], meta['estate_url'])

    def parse_get_estate_result2(self, response):
        meta = response.meta
        estate = meta['estate']
        area = meta['area']
        data = response.body
        soup = bs4.BeautifulSoup(data, 'html5lib')

        #
        # 详情页面 获取详细信息
        # --------------------别名--------------

        estate_by_names = soup.select('.con_max')
        estate1 = ''
        estate2 = ''
        estate3 = ''
        estate4 = ''
        if estate_by_names:
            estate_by_name = estate_by_names[0].get_text().replace('别名：', '').split(' ')
            estate_num = len(estate_by_name)
            if estate_num < 4:
                estate_by_name = estate_by_name + [None] * (4 - estate_num)

            estate1 = estate_by_name[0]
            estate2 = estate_by_name[1]
            estate3 = estate_by_name[2]
            estate4 = estate_by_name[3]
        # --------------------building_class---------------------
        estate_detail = soup.find_all('.inforwrap > dl > dd')
        if estate_detail:
            for child in estate_detail:
                name = child.strong.get_text()
                if name == '小区地址：':
                    address = child.get_text()
                if name == '所属区域：':
                    area = child.get_text().split(' ')[0]
                if name == '建筑类别：':
                    building_class = child.get_text()

                    #        --------------------decoration---------------------
        decoration = ''
        item = SoufItem()
        item['website'] = meta['website']
        item['web_url'] = meta['web_url']
        item['city'] = meta['city']
        item['city_id'] = meta['city_id']
        item['area'] = ''
        item['estate_class'] = meta['estate_class']
        item['estate'] = estate
        item['estate1'] = estate1
        item['estate2'] = estate2
        item['estate3'] = estate3
        item['estate4'] = estate4
        item['estate_id'] = meta['estate_id']
        item['estate_id2'] = meta['estate_id2']
        item['estate_url'] = meta['estate_url']
        item['developer'] = meta['developer']
        item['address'] = meta['address']
        item['building_class'] = building_class
        item['decoration'] = decoration
        yield item

    def parse_get_estate_result(self, response):
        meta = response.meta
        estate = meta['estate']
        area = meta['area']
        data = response.body
        soup = bs4.BeautifulSoup(data, 'html5lib')

        #
        # 楼盘详情页面 获取详细信息
        # ----别名-----------------

        estate_by_names = soup.select('.h1_label')
        estate1 = ''
        estate2 = ''
        estate3 = ''
        estate4 = ''
        if estate_by_names:
            estate_by_name = estate_by_names[0].get_text().replace('别名：', '').split(' ')
            estate_num = len(estate_by_name)
            if estate_num < 4:
                estate_by_name = estate_by_name + [''] * (4 - estate_num)

            estate1 = estate_by_name[0]
            estate2 = estate_by_name[1]
            estate3 = estate_by_name[2]
            estate4 = estate_by_name[3]

        # --------------------building_class---------------------
        ee_detail = soup.find_all('ul', class_='list')
        if ee_detail:
            estate_detail = soup.find_all('ul', class_='list')[0]
            builds = estate_detail.select('.bulid-type')[0].get_text().strip().split(' ')
            build = ''
            for child in builds:
                build = child.strip() + '、' + build

            # -----------estate_class--------------
            estate_kind = estate_detail.select('li')[0]
            estate_class = estate_kind.select('.list-right')[0].get_text().strip()
            # ------------decoration----------------
            decorationbox = estate_detail.select('li')[3]
            decoration = decorationbox.select('.list-right')[0].get_text().strip()
            # -----------developer---------------
            developers = estate_detail.select('li')[6]
            dev = developers.select('.list-right-text')
            if dev:
                developer = '开发商：' + developers.select('.list-right-text')[0].get_text()
            else:
                developer = '开发商：' + developers.select('.list-right')[0].get_text()
            # -----------address ---------------
            addresss = estate_detail.select('li')[7]
            adr = addresss.select('.list-right-text')
            if adr:
                address = '地址：' + addresss.select('.list-right-text')[0].get_text()
            else:
                address = '地址：' + addresss.select('.list-right')[0].get_text()

            item = SoufItem()
            item['website'] = meta['website']
            item['web_url'] = meta['web_url']
            item['city'] = meta['city']
            item['city_id'] = meta['city_id']
            item['area'] = area
            item['estate_class'] = estate_class
            item['estate'] = estate
            item['estate1'] = estate1
            item['estate2'] = estate2
            item['estate3'] = estate3
            item['estate4'] = estate4
            item['estate_id'] = meta['estate_id']
            item['estate_id2'] = meta['estate_id2']
            item['estate_url'] = meta['estate_url']
            item['developer'] = developer
            item['address'] = address
            item['building_class'] = build
            item['decoration'] = decoration
            print('7777', area, estate, estate1, estate2, estate3, estate4, estate_class, build, decoration, address)
            yield item
        else:

            # --------building_class------------
            estate_detail = soup.find_all('.inforwrap > dl > dd')
            if estate_detail:
                for child in estate_detail:
                    name = child.strong.get_text()
                    if name == '小区地址：':
                        address = child.get_text()
                    if name == '所属区域：':
                        area = child.get_text().split(' ')[0]
                    if name == '建筑类别：':
                        building_class = child.get_text()
                    if name == '项目特色：':
                        decoration = child.get_text()
                    if name == '开 发 商：':
                        developer = child.get_text()

                item = SoufItem()
                item['website'] = meta['website']
                item['web_url'] = meta['web_url']
                item['city'] = meta['city']
                item['city_id'] = meta['city_id']
                item['area'] = ''
                item['estate_class'] = meta['estate_class']
                item['estate'] = estate
                item['estate1'] = estate1
                item['estate2'] = estate2
                item['estate3'] = estate3
                item['estate4'] = estate4
                item['estate_id'] = meta['estate_id']
                item['estate_id2'] = meta['estate_id2']
                item['estate_url'] = meta['estate_url']
                item['developer'] = meta['developer']
                item['address'] = meta['address']
                item['building_class'] = building_class
                item['decoration'] = decoration
                # print('7777',estate, meta['estate_url'])
                yield item

    def parse_error_result(self, e, meta):

        item = SoufItem()
        item['website'] = meta['website']
        item['web_url'] = meta['web_url']
        item['city'] = meta['city']
        item['city_id'] = meta['city_id']
        item['estate'] = meta['estate']
        item['estate_id'] = meta['estate_id']
        item['estate_id2'] = meta['estate_id2']
        item['estate_url'] = meta['estate_url']
        item['estate_class'] = ''
        item['address'] = ''
        item['developer'] = ''
        item['building_class'] = ''
        item['decoration'] = ''
        item['estate1'] = ''
        item['estate2'] = ''
        item['estate3'] = ''
        item['estate4'] = ''
        item['area'] = meta['area']
        print('error', meta['city'], meta['area'], meta['estate'])
        yield item
