import requests
from bs4 import BeautifulSoup
import pandas as pd
import time
import os
from view import areaPieView, areaView, doubleView, houseBarView, housePlotView, likeView
from sql import save_model


def getUrls():
	urls = []  # 列表
	baseUrl = "https://quanzhou.lianjia.com/ershoufang/fengzequ1/"
	for i in range(1, 6):  # 限制5页
		url = "{}pg{}".format(baseUrl, i)
		urls.append(url)
	return urls


headers = {
	"User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36"
}


def getHtml(url):
	try:
		r = requests.get(url, headers=headers)  # 发送请求
		r.raise_for_status()
		return r.text
	except:
		return ""


def parse_page(html):
	if html == '':
		return
	soup = BeautifulSoup(html, features="html.parser")
	# 标题,名称,区域,户型,面积,朝向,装修,楼层,结构,关注,时间,总价,单价,标签
	info = soup.select("div .info")
	# 定义一个列表 用于存储一页的数据
	data = []
	# 格式打印
	for item in info:
		title = item.contents[0].text
		areaName = item.contents[1].contents[0].contents[1].text
		areaPosition = item.contents[1].contents[0].contents[3].text
		# 户型|面积(平米)|朝向|装修|楼层|结构
		msgMix = str(item.contents[2].text).split("|")
		if len(msgMix) != 6:
			break
		# 关注/时间
		msgMix2 = str(item.contents[3].text).split("/")
		tag = item.contents[4].text
		totalPrice = item.contents[5].contents[0].contents[1].text
		price = item.contents[5].contents[1].contents[0].text.replace("元/平", "").replace(',', "")
		data.append([title, areaName, areaPosition, msgMix[0], msgMix[1].replace("平米", ""),
		             msgMix[2], msgMix[3], msgMix[4],
		             msgMix[5], msgMix2[0].replace("人关注", ""), msgMix2[1],
		             totalPrice, price, tag])
	return data


def getData():
	if os.path.exists('data.csv'):
		os.remove('data.csv')
	urls = getUrls()  # 返回值是一个列表
	for i in range(len(urls)):
		# 用 html 保存获取到的网页的所有信息
		html = getHtml(urls[i])
		# 解析每一页的数据
		data = parse_page(html)
		# 把数据写入文件
		df = pd.DataFrame(data)  # DataFrame 数据帧 二维表格结构
		df.to_csv("data.csv", mode="a", index=False, header=False, encoding="utf-8_sig")  # 追加
		print("第{}页写入完毕！".format(i + 1))
		time.sleep(2)  # 延时等待2秒


def draw():
	if os.path.exists('data.csv'):
		columns = ["标题", "名称", "区域", "户型", "面积", "朝向", "装修", "楼层", "结构", "关注", "时间", "总价",
		"单价", "标签"]

		df = pd.read_csv("data.csv")
		df = df.dropna()
		df = df.drop_duplicates()
		df.columns = columns
		areaView(df)
		areaPieView(df)
		housePlotView(df)
		houseBarView(df)
		doubleView(df)
		likeView(df)
		# 保存数据到数据库
		table_columns = ['title', 'name', 'area', 'modal', 'extent', 'direct', 'decoration', 'floor', 'structure',
		                 'like', 'time', 'total_price', 'price', 'tag', ]
		df.columns = table_columns
		# 写入MySQL
		# save_model(df)


if __name__ == "__main__":
	# getData()
	draw()
