#!/usr/bin/env python
# -*- coding:utf-8 -*-
import re
import sys
import threading
import multiprocessing

from bs4 import BeautifulSoup
from scpy.logger import get_logger

import utils

logger = get_logger(__file__)

reload(sys)
sys.setdefaultencoding("utf-8")

NUMBER_PATTERN = re.compile(ur'\d+')


def parse_item(item):
    item_result = {
        '_id': '',       # url
        'city': '',      # 城市
        'district': '',  # 区县
        'name': '',      # 小区名称
        'address': '',   # 地址
        'price': 0,      # 房价
        'trend': ''      # 房价趋势
    }
    try:
        house_info = item.find('div', attrs={'class': 'house'}).find('dl')
        dt = house_info.find('dt')
        span = dt.find('span', attrs={'class': 'housetitle'})
        item_result['name'] = utils.deal_str(span.getText())  # 'http://fangjia.fang.com%s' %
        href = span.find('a')['href']
        if href.startswith('/process/'):
            href = 'http://fangjia.fang.com%s' % href
        item_result['_id'] = href
        try:
            item_result['address'] = dt.find('p', attrs={'class': 'mt8'}).find('span', attrs={'class': 'pl5'})['title']
        except:
            pass
    except:
        return None
    try:
        for dd in house_info.find_all('dd', attrs={'class': 'money mt30'}):
            try:
                text = utils.deal_str(dd.getText())
                if u'元/平米' in text:
                    item_result['price'] = float(re.findall(NUMBER_PATTERN, text)[0])
                elif u'%' in text:
                    item_result['trend'] = '%s%s' % (('-', '+')['red-up' in dd.find('span')['class']], text)
            except:  pass
    except:  pass
    return item_result


def find_next_page(soup):
    try:
        is_next = False
        for span in soup.find('p', attrs={'class': 'pages floatr'}).find_all('span', attrs={'class': 'floatl'}):
            if is_next:
                return 'http://fangjia.fang.com%s' % span.find('a')['href']
            if span.find('strong'):
                is_next = True
        return None
    except:
        return None


def crawl_one_shangquan(shangquan):
    logger.info('start a new shangquan : %s.' % shangquan['_id'])
    url = shangquan['url']
    while url:
        try:
            logger.info('now shangquan : %s, now page : %s.' % (shangquan['_id'], url))
            item_list = []
            content = utils.get_url_content(url).decode('gb2312', 'ignore')
            soup = BeautifulSoup(content, from_encoding='gb2312')
            div = soup.find('div', attrs={'class': 'list'})
            for item in div.find_all('div', attrs={'class': 'bkyellow'}):
                item_dict = parse_item(item)
                if not item_dict:
                    continue
                item_dict['city'] = shangquan['city']
                item_dict['district'] = shangquan['district']
                item_dict['shangquan'] = shangquan['shangquan']
                item_list.append(item_dict)
            count = utils.save_2_db(item_list, 'fangjia', 'fangjia')
            logger.info('saved %d / %d' % (count, len(item_list)))
            url = find_next_page(soup)
        except Exception, e:
            logger.info('exception at crawl_one_shangquan : %s' % e)
            break


def craw_city(city):
    logger.info('new city : ' + city)
    for shangquan in utils.select_from_db({'city': city}, 'fangjia', 'district'):
        crawl_one_shangquan(shangquan)


if __name__ == '__main__':
    citys = ['重庆',  '成都',  '芜湖',  '南京',  '宁波',  '泉州',  '兰州',  '武汉',  '镇江',  '扬州',  '唐山',
             '青岛',  '天津',  '昆明',  '洛阳',  '烟台',  '深圳',  '长沙',  '沈阳',  '北京',  '嘉兴',  '潍坊',
             '福州',  '中山',  '西安',  '江门',  '银川',  '惠州',  '济南',  '三亚',  '无锡',  '威海',  '南宁',
             '昆山',  '东莞',  '泰州',  '合肥',  '南昌',  '佛山',  '吉林',  '桂林',  '珠海',  '杭州',  '宜昌',
             '保定',  '南通',  '湖州',  '广州',  '太原',  '大连',  '上海',  '徐州',  '贵阳',  '郑州',  '海南',
             '苏州',  '常州',  '厦门',  '长春',  '哈尔滨',  '石家庄',  '呼和浩特',  '秦皇岛']
    for city in citys:
        craw_city(city)
    # processes = []
    # for city in citys:
    #     processes.append(multiprocessing.Process(target=craw_city, args=(city,), name='craw fangjia of %s' % city))
    # for process in processes:
    #     process.start()
    # for process in processes:
    #     process.join()
    logger.info('OVER')
