#!/usr/bin/env python
# encoding=utf-8

import sys
import time

from pymongo import MongoClient
from scpy.logger import get_logger
from xtls.basecrawler import BaseCrawler
from xtls.timeparser import now
from xtls.util import BeautifulSoup

from config import *

reload(sys)
sys.setdefaultencoding('utf-8')

__author__ = 'xlzd'
logger = get_logger(__file__)
MONGO = MongoClient(MONGO_HOST, MONGO_PORT)
CITYS = {
    u'北京': 'http://office1.fang.com/',
    u'上海': 'http://office.sh.fang.com/',
    u'广州': 'http://office.gz.fang.com/',
    u'深圳': 'http://office.sz.fang.com/',
    u'天津': 'http://office.tj.fang.com/',
    u'重庆': 'http://office.cq.fang.com/',
    u'成都': 'http://office.cd.fang.com/',
    u'苏州': 'http://office.suzhou.fang.com/',
    u'武汉': 'http://office.wuhan.fang.com/',
    u'西安': 'http://office.xian.fang.com/',
    u'东莞': 'http://office.dg.fang.com/',
    u'杭州': 'http://office.hz.fang.com/',
    u'济南': 'http://office.jn.fang.com/',
    u'无锡': 'http://office.wuxi.fang.com/',
    u'郑州': 'http://office.zz.fang.com/',
    u'南昌': 'http://office.nc.fang.com/',
    u'青岛': 'http://office.qd.fang.com/',
    u'石家庄': 'http://office.sjz.fang.com/',
    u'南京': 'http://office.nanjing.fang.com/',
    u'大连': 'http://office.dl.fang.com/',
    u'昆明': 'http://office.km.fang.com/',
}


class RegionCrawler(BaseCrawler):

    def __init__(self, city, url):
        super(RegionCrawler, self).__init__(city=city, url=url, regions=[])
        self.find_all_region()

    def find_all_region(self):
        soup = BeautifulSoup(self.get(self.url), from_encoding='gbk')

        region_div = soup.find('div', class_='qxName')
        if not region_div:
            logger.info('region err at %s-%s' % (self.city, self.url))
            return
        for region_a in region_div.find_all('a'):
            region_name = region_a.getText().strip()
            if region_name == u'不限':
                continue
            region_link = region_a['href'][1:]
            self.regions.append((region_name, self.url + region_link))

    def run(self):
        for region, link in self.regions:
            logger.info('now region %s-%s' % (region, link))
            soup = BeautifulSoup(self.get(link), from_encoding='gbk')
            district_p = soup.find('p', id='tagContent0')
            if not district_p:
                logger.info('district err at %s-%s-%s' % (self.city, region, link))
            for district_a in district_p.find_all('a'):
                district_name = district_a.getText().strip()
                if district_name == u'不限':
                    continue
                district_link = district_a['href'][1:]

                item = {
                    '_id': u'%s#%s#%s' % (self.city, region, district_name),
                    'city': self.city,
                    'region': region,
                    'district': district_name,
                    'url': self.url + district_link,
                    'updateTime': now()
                }
                MONGO[DB_NAME]['officeDistrict'].update_one(
                    filter={'_id': item['_id']},
                    update={'$set': item},
                    upsert=True
                )
            time.sleep(1)


def main():
    for city, url in CITYS.iteritems():
        logger.info('now city : %s-%s' % (city, url))
        RegionCrawler(city, url).run()


if __name__ == '__main__':
    main()
