#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2021/1/10 11:13
# @Author  : amos
# @Site    :
# @File    : 365taofang.py
# @Software: PyCharm
import requests
from bs4 import BeautifulSoup
from django.conf import settings
from django.core.management.base import BaseCommand
from search.models import Residential,School,SchoolResidentialRel,Property
from django.db import transaction

class Command(BaseCommand):
    help=""
    def handle(self, *args, **options):
        self.crawler_school()

    def crawler_school(self):
        try:
            print("走一个！")
            page = 7
            while True:
                resp = requests.get("http://nj.sell.house365.com/school/n1-p{}.html".format(str(page)))
                resp.encoding == 'ISO-8859-1'
                soup = BeautifulSoup(resp.content,"lxml")
                items = soup.find_all(class_="listItem clearfix school")
                if not items:
                    break

                for item in items:

                    url = item.find(class_="pl10").find('a')['href']
                    residential_num= int(item.find(class_="pl10").find('a').text.replace(" ","").replace("\n","").replace("个",""))
                    name = item.find(class_="listItem__title line1 fl").find('a').text.replace(" ","").replace("\n","")
                    if page == 7 and name not in ["浦口外国语学校高新分校","马府街小学"]:
                        continue
                    region = item.find(class_="listItem__local pr18").find('a').text.replace(" ","").replace("\n","")
                    tmp = item.find(class_="listItem__content").find_all("span")[-1].text.replace(" ","").replace("\n","").split("|")
                    if len(tmp)>1:
                        stage = tmp[0]
                        nature = tmp[1]
                    else:
                        stage = ""
                        nature = ""
                    School.objects.create(
                        name=name,
                        region=region,
                        stage=stage,
                        nature=nature,
                        residential_num=residential_num,
                        url=url,
                    )
                    print(url,residential_num,name)
                    self.crawler_residential(url, name)
                page+=1
        except Exception as e:
            print("出幺蛾子了！",e)

    def crawler_residential(self,residential_list_url,school_name):
        try:
            page = 1
            while True:
                url = residential_list_url.split("community")[0]+"community_p{}.html".format(str(page))
                resp = requests.get(url)
                resp.encoding == 'ISO-8859-1'
                soup = BeautifulSoup(resp.content,"lxml")
                items = soup.find_all(class_="mainList_item_child clearfix")
                if not items:
                    break
                for item in items:
                    try:
                        url = item.find(class_="w300 cd1 fl").find('a')['href']
                        name = item.find(class_="mask").text
                        avg_price = item.find(class_="area f36").text
                        # resp = requests.get(url)
                        # resp.encoding == 'ISO-8859-1'
                        # soup_info = BeautifulSoup(resp.content, "lxml")
                        # infos = soup_info.find(class_="villdetail-tab").find_all("tr")
                        # households = int(infos[1].find_all(class_="vill-short")[0].text.replace(" ","").replace("\n","").replace("户",""))
                        # park_num = int(infos[1].find_all(class_="vill-short")[1].text.replace(" ","").replace("\n","").replace("个",""))
                        # complete_date = infos[2].find_all(class_="vill-long")[0].text.replace(" ","").replace("\n","")
                        # property_name = infos[3].find_all(class_="vill-long")[0].text.replace(" ","").replace("\n","")
                        # property_fee = float(infos[4].find_all(class_="vill-long")[0].text.replace(" ","").replace("\n","").split("元")[0])
                        # developers = infos[5].find_all(class_="vill-long")[0].text.replace(" ","").replace("\n","")
                        # desc = soup_info.find(class_="detail__mainCotetn__intro").text.replace(" ","").replace("\n","")
                        with transaction.atomic():
                            Residential.objects.create(
                                name=name,
                                url=url,
                                avg_price=avg_price,
                                # households=households,
                                # park_num=park_num,
                                # complete_date=complete_date,
                                # property_fee=property_fee,
                                # property_name=property_name,
                                # developers=developers,
                                # desc=desc,
                            )
                            SchoolResidentialRel.objects.create(
                                school_name=school_name,
                                residential_name=name
                            )
                    except:
                        continue
                page+=1
        except Exception as e:
            print("小区出幺蛾子了！",e)

# if __name__=="__main__":
#     crawler_school()
#     # crawler_house_detail(["http://nj.sell.house365.com/s_302194053.html"])
