import requests
from bs4 import BeautifulSoup
import pandas as pd

def scrape_property_data(district_url):
    response = requests.get(district_url)
    soup = BeautifulSoup(response.text, 'html.parser')

    properties = []
    for listing in soup.select('price_fix'):  # 根据实际选择器修改
        name = listing.select_one('.nlcd_name').text.strip()
        price = listing.select_one('.nhouse_price').text.strip()
        properties.append({'name': name, 'price': price})

    return properties

districts = {'丰台':'https://newhouse.fang.com/house/s/fengtai/', '西城':'https://newhouse.fang.com/house/s/xicheng/','东城':'https://newhouse.fang.com/house/s/dongcheng/','昌平':'https://newhouse.fang.com/house/s/changping/','大兴':'https://newhouse.fang.com/house/s/daxing/','通州':'https://newhouse.fang.com/house/s/tongzhou/','房山':'https://newhouse.fang.com/house/s/fangshan/','顺义':'https://newhouse.fang.com/house/s/shunyi/','石景山':'https://newhouse.fang.com/house/s/shijingshan/','密云':'https://newhouse.fang.com/house/s/miyun/','门头沟':'https://newhouse.fang.com/house/s/mentougou/','怀柔':'https://newhouse.fang.com/house/s/huairou/','延庆':'https://newhouse.fang.com/house/s/yanqing/','平谷':'https://newhouse.fang.com/house/s/pinggu/',
             '朝阳': 'https://newhouse.fang.com/house/s/chaoyang/',
                '海淀': 'https://newhouse.fang.com/house/s/haidian/'}  # 替换为实际区名和URL

for district, url in districts.items():
    data = scrape_property_data(url)
    df = pd.DataFrame(data)
    df.to_csv(f'{district}.csv', index=False)

print("数据抓取完成，并已保存为CSV文件。")

import os
print(os.getcwd())


