import requests
from bs4 import BeautifulSoup
import csv

headers= {'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64)'
                       ' AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36'}

for i in range(1,21):
    link= 'https://beijing.anjuke.com/sale/p' +str(i) + ' /#filtersort'
    r=requests.get(link,headers=headers)

    soup = BeautifulSoup(r.text,'lxml')
    # find all li(refers to list of the web) whose class is list-item. All the
    # li-list-item store the information of houses.
    house_list = soup.find_all('li',class_='list-item')
    # Read each list from house_list
    with open('house_bejing', 'w+', encoding='UTF-8', newline='') as csvfile:
        for house in house_list:
            name = house.find('div',class_='house-title').a.text.strip()
            price= house.find('span',class_='price-det').text.strip()
            price_area = house.find('span', class_= 'unit-price').text.strip()
            no_room = house.find('div',class_='details-item').span.text
            area = house.find('div',class_='details-item').contents[3].text
            floor = house.find('div', class_='details-item').contents[5].text
            year = house.find('div', class_='details-item').contents[7].text
            broker = house.find('span',class_='brokername').text
            broker = broker[1:]
            address = house.find('span',class_='comm-address').text.strip()
            r = [name, price, price_area, no_room, area, floor, year, broker, address]

            w = csv.writer(csvfile)
            w.writerow(r)

            r.clear()





