import json
import re
import requests
from requests.exceptions import RequestException
from multiprocessing import Pool
import random,time
from fake_useragent import UserAgent
ua = UserAgent()


def get_one_page(url):
    headers = {"Content-Type": "application/x-www-form-urlencoded",
               "User-Agent": ua.random,
               "Referer": "http://sh,fang.ke.com/"}
    try:
        response = requests.get(url, headers=headers)
        if response.status_code == 200:
            return response.text
        return response.text
    except RequestException:
        return None


def parse_one_page(html, offset):
    pattern = re.compile('<li class="resblock-list post_ulog_exposure_scroll has-results".*?>.*?title="('
                         + '.*?)".*?data-original="(.*?)".*?resblock-type".*?">(.*?)</span>.*?<span style="background.*?">(.*?)</span>.*?</div>.*?<i class="icon location-icon"></i>('
                         + '.*?)</a>.*?<a class="resblock-room".*?href="(.*?)".*?>(.*?)</a>.*?<div class="resblock-tag">.*?<span>(.*?)</span>.*?<span>(.*?)</span>.*?<span>('
                         + '.*?)</span>.*?<span>(.*?)</span>.*?</div>.*?number">(.*?)</span>.*?</li>', re.S)

    items = re.findall(pattern, html)
    print(items)
    for item in items:
        check = re.findall('area', item[6])
        if check:
            pattern = re.compile('area">(.*?)</span>', re.S)
            item6 = re.findall(pattern, item[6])
            item6 = item6[0]
        else:
            item6 = '暂无'
        yield {
            'page': 'pg'+ str(offset),
            'name': item[0],
            'image': item[1],
            'status': item[2],
            'type': item[3],
            'address': item[4].strip(),
            'link': item[5],
            'area': item6,
            'label': item[7] + ',' + item[8] + ',' + item[9] + ',' + item[10],
            'price': item[11]
        }


def write_to_file(content):
    with open("sh_fang_1-33.txt", 'a', encoding='utf-8') as f:
        f.write(json.dumps(content, ensure_ascii=False) + '\n')
        f.close()


def main(offset):
    url = 'https://sh.fang.ke.com/loupan/pg' + str(offset) + '/'
    # url = 'https://sh.fang.ke.com/loupan/nht1pg' + str(offset) + '/'
    print(url)
    html = get_one_page(url)
    for item in parse_one_page(html, offset):
        print(item)
        write_to_file(item)


if __name__ == '__main__':
    for i in range(34, 100):
        main(i)
        time.sleep(random.randint(5, 30))
    # pool = Pool(4)
    # pool.map(main, [i for i in range(1, 2)])
