import requests
from fake_useragent import UserAgent
from bs4 import BeautifulSoup
import csv
import time
import random


class LianjiaSpider(object):
    def __init__(self) -> None:
        self.url = 'https://cs.lianjia.com/ershoufang/pg{}/'
        self.blog = 1
        self.header = {}

    def get_html(self, url, header):
        if self.blog <= 3:
            try:
                res = requests.get(url=url.format(1), headers=header)
                return res
            except Exception as e:
                self.blog += 1
                self.get_html(url=url)

    def parse_html(self, url, i):
        html = self.get_html(url, self.header)

        soup = BeautifulSoup(html.content, 'html.parser')

        li_list = soup('li', class_='clear LOGVIEWDATA LOGCLICKDATA')

        # rows = [('name', 'model', 'area', 'direction', 'perfect', 'floor', 'address', 'total_list', 'price_list')]
        rows = []

        for li in li_list:
            name = li('a', attrs={'data-el': 'region'})[0].text
            model = li('div', class_='houseInfo')[0].text
            total = li('div', class_='totalPrice')[0]('span')[0].text
            price = li('div', class_='unitPrice')[0]('span')[0].text

            rows.append((name, model, name, name, name,
                        name, name, total, price))

        with open('../.cache-file/长沙二手房出售.csv'.format(i), 'a', encoding='utf8', newline='') as f:
            w = csv.writer(f)
            w.writerows(rows)

    def run(self):
        for i in range(1, 11):
            ua = UserAgent()
            self.header = {'User-Agent': ua.random}

            self.parse_html(self.url.format(i), i)

            time.sleep(random.randint(1, 3))

            self.blog = 1


if __name__ == '__main__':
    spider = LianjiaSpider()
    spider.run()
    # with open('../.cache-file/test.csv', 'w', encoding='utf8', newline='') as f:
    #   w = csv.writer(f)
    #   w.writerow(['name', 'model', 'area', 'direction', 'perfect', 'floor', 'address', 'total_list', 'price_list'])
