#!/usr/bin/env python3

import sys
import os
import time
import pymongo

import requests
from bs4 import BeautifulSoup

urls = ['http://bj.xiaozhu.com/search-duanzufang-p{}-0/'.format(str(page)) for page in range(1, 4)]

id = 0

def getencoding(response):
   """ Re-Detect page encoding """
   if response.encoding == 'ISO-8859-1':
       encodings = requests.utils.get_encodings_from_content(response.text)
       if encodings:
           encoding = encodings[0]
       else:
           encoding = response.apparent_encoding
           # encoding = 'utf-8'
   else:
       encoding = response.encoding

   return encoding


def getsex(flag):
    if flag == ['member_ico1']:
        return '女'
    elif flag == ['member_ico']:
        return '男'
    else:
        return '未知'


def getdetail(url):
    data = {}
    webdata = requests.get(url)
    webdata.encoding = getencoding(webdata)

    soup = BeautifulSoup(webdata.text, 'lxml')

    title = soup.select('body > div.wrap.clearfix.con_bg > div.con_l > div.pho_info > h4 > em')
    address = soup.select('body > div.wrap.clearfix.con_bg > div.con_l > div.pho_info > p > span.pr5')
    price = soup.select('div.day_l')
    pic = soup.select('img[id="curBigImage"]')
    photo = soup.select('div.js_box.clearfix > div.member_pic > a > img')
    name = soup.select('div.js_box.clearfix > div.w_240 > h6 > a')
    sex = soup.select('div.js_box.clearfix > div.member_pic > div')

    for t, a, pr, pi, ph, n, s in zip(title, address, price, pic, photo, name, sex):
        data = {
                'pic' : pi.get('src'),
                'sex' : getsex(s.get('class')),
                'name' : n.get_text(),
                'address' : a.get_text().replace('\n', '').strip(),
                'price' :  pr.get_text(),
                'title' : t.get_text(),
                'photo' : ph.get('src')
                }
        return data

def getfang(url):
    data = []
    webdata = requests.get(url)
    webdata.encoding = getencoding(webdata)

    soup = BeautifulSoup(webdata.text, 'lxml')

    fang = soup.select('a[class="resule_img_a"]')
    for f in fang:
        data.append(f.get('href'))
    return data

def savefile(path, content, id):
    with open(path, 'a') as f:
        f.write("# %d" % id)
        f.write('\t' + 'title: ' + content['title'] + '\n')
        f.write('\t' + 'address: ' + content['address'] + '\n')
        f.write('\t' + 'price: ' + content['price'] + '\n')
        f.write('\t' + 'photo: ' + content['photo'] + '\n')
        f.write('\t' + 'name: ' + content['name'] + '\n')
        f.write('\t' + 'sex: ' + content['sex'] + '\n')
        f.write('\t' + 'pic: ' + content['pic'] + '\n')
        f.write('\n')


def savedb(content, id):
    print("# %d" % id)
    client = pymongo.MongoClient('localhost', 27017)
    xiaozhu = client['xiaozhu']
    fang = xiaozhu['fang']
    fang.insert_one(content)


# 挑选出房价>= 500的房源
def db_filter():
    client = pymongo.MongoClient('localhost', 27017)
    xiaozhu = client['xiaozhu']
    fang = xiaozhu['fang']
    for f in fang.find():
        if (int(f['price'][1:]) >= 500):
            print(f)


for url in urls:
    fangs = getfang(url)
    for fang in fangs:
        id += 1
        if (id > 300):
            break;
        print("get fang #%d detail.\n" % id)
        detail = getdetail(fang)
        savedb(detail, id)

db_filter()
