import json
import re
import time

import requests
from requests.exceptions import RequestException
from multiprocessing import Pool
from bs4 import BeautifulSoup
from fake_useragent import UserAgent
ua = UserAgent()
filename = time.strftime('%Y%m%d%H%M', time.localtime(time.time()))
def get_one_page(url):
    headers = {"Content-Type": "application/x-www-form-urlencoded",
               "User-Agent": ua.random,
               "Referer": "https://wf.fang.anjuke.com/"}
    try:
        response = requests.get(url, headers=headers)
        print(response.status_code)
        if response.status_code == 200:
            return response.text
        return response.text
    except RequestException:
        return None


def parse_one_page(html, offset):
    soup = BeautifulSoup(html, 'lxml')
    # print(soup)
    listall = soup.find(class_="key-list imglazyload")
    for i in listall.find_all(class_="item-mod"):
        # print(i.find('a')['href'])
        yield {
                 'url' : i.find('a')['href'],
                 'page': 'pg'+ str(offset),
                 'name': i.find(class_="items-name").text,
                 'address': i.find(class_="list-map").text.replace('\xa0',' '),
                 'huxing': i.find(class_="huxing").text.replace('\t','').replace('\n','').replace(' ',''),
                 'price': i.find(class_="favor-pos").text.replace('\n','').replace(' ',''),
        }


def write_to_file(content):
    with open("anjvke"+filename+".txt", 'a', encoding='utf-8') as f:
        f.write(json.dumps(content, ensure_ascii=False) + '\n')
        f.close()


def main(offset):
    url = 'https://wf.fang.anjuke.com/loupan/all/p' + str(offset) + '/'
    print(url)
    html = get_one_page(url)
    # print(html)
    for item in parse_one_page(html, offset):
        print(item)
        write_to_file(item)


if __name__ == '__main__':
    # for i in range(1, 3):
    #     print(i)
    #     main(i)
    pool = Pool(1)
    pool.map(main, [i for i in range(2, 4)])
