# coding=utf-8
import requests
import xlrd
import xlwt

from bs4 import BeautifulSoup
from lxml import etree


def test_data():
    workbook = xlwt.Workbook()
    m = 0
    sheet1 = workbook.add_sheet('id')
    url = "https://shanghai.fangdd.com/web-api/data/fetchEsfList"
    body = {"cityId": 121, "condition": "p1_i2000"}
    r1 = requests.post(url, json=body)
    result = r1.json()['data']['list']
    for i in result:
        sheet1.write(m, 0, i['id'])
        m += 1
    workbook.save('fangddID.xls')


def loupan_Info():
    m = 0
    workbook_write = xlwt.Workbook()
    workbook = xlrd.open_workbook(r"fangddID.xls", formatting_info=False)
    # sheetname = workbook.sheet_names()[0]
    sheet = workbook.sheet_by_index(0)
    dat = []
    for a in range(sheet.nrows):  # 循环读取表格内容（每次读取一行数据）
        cells = sheet.row_values(a)  # 每行数据赋值给cells
        data = int(cells[0])  # 因为表内可能存在多列数据，0代表第一列数据，1代表第二列，以此类推
        dat.append(data)  # 把每次循环读取的数据插入到list
    print(dat)
    sheet1 = workbook_write.add_sheet('loupanInfo')
    n = 2000
    for j in dat:
        loupan_url = "https://shanghai.fangdd.com/esf/n-%s.html?SSR_JSON=true" % (j)
        r2 = requests.get(loupan_url)
        end_info = r2.json()['esfDetail']['xqDetail']
        loupan_name = end_info['name']
        quyu = end_info['districtName']
        years = end_info['buildTime']
        developer = end_info['developer']
        propertyCompany = end_info['propertyCompany']
        totalHouseholds = end_info['totalHouseholds']
        sheet1.write(m, 0, loupan_name)
        sheet1.write(m, 1, developer)
        sheet1.write(m, 2, propertyCompany)
        sheet1.write(m, 3, quyu)
        sheet1.write(m, 4, totalHouseholds)
        sheet1.write(m, 5, years)
        m += 1
        n -= 1
        print(n)
    workbook_write.save('fangdd.xls')


if __name__ == '__main__':
    loupan_Info()
