import os
from multiprocessing import Pool
from fake_useragent import UserAgent
import requests
from bs4 import BeautifulSoup
import csv
import time

def getHTML(url):
    ua = UserAgent().random
    header = {
            'Referer': 'https://www.tudinet.com/',
            'User-Agent': ua
     }
    response = requests.get(url, headers=header, verify=False)
    return BeautifulSoup(response.text)
def out_csv(data):
    f = open('data1.csv','a',encoding='utf-8',newline='')

    # 2. 基于文件对象构建 csv写入对象
    csv_writer = csv.writer(f)
    # 4. 写入csv文件内容
    csv_writer.writerow(data)
def get_second_data(url):
    data = []
    html = getHTML(url)
    data_list = html.find_all('ul',class_='row hh-sort-text')
    address = data_list[0].find_all('li')[0].text.split("：")[1]
    aera_use = data_list[0].find_all('li')[1].text.split("：")[1]
    company = data_list[1].find_all('li')[1].text.split("：")[1]
    deal_time = data_list[1].find_all('li')[4].text.split("：")[1]
    deal_mony = data_list[1].find_all('li')[7].text.split("：")[1]
    data.append(address)
    data.append(aera_use)
    data.append(company)
    data.append(deal_time)
    data.append(deal_mony)
    return data
def get_data(data):
    for i in data.find("div", class_='land-l-cont').find_all('dl'):
        data_list = []
        title = i.find('a').text.strip()
        start_time = i.find_all('p')[0].text.split("：")[1]
        area = i.find_all('p')[2].text.split("：")[1]
        url = i.a['href']
        data_list.append(title)
        data_list.append(start_time)
        data_list.append(area)
        time.sleep(5)
        try:
            data_list = data_list + get_second_data(url)
        except:
            pass
        out_csv(data_list)

def run(fn):
    print("子进程：", os.getpid())
    url = "https://www.tudinet.com/market-65-0-2-0/list-pg{}.html"
    new_url = url.format(fn + 1)
    html = getHTML(new_url)
    print(new_url)
    get_data(html)


if __name__ == "__main__":
    print("父进程：", os.getpid())
    pool = Pool(4)  # 创建拥有4个进程数量的进程池
    for fn in range(1, 31):
        pool.apply_async(run, args=(fn,))
    pool.close()  # 关闭进程池，不再接受新的进程
    pool.join()  # 主进程阻塞等待子进程的退出
