#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2021/1/10 11:13
# @Author  : amos
# @Site    :
# @File    : 365taofang.py
# @Software: PyCharm
import requests
from bs4 import BeautifulSoup
from django.conf import settings
from django.core.management.base import BaseCommand
from search.models import Residential,School,SchoolResidentialRel,Property
from django.db import transaction
import multiprocessing
import json

class Command(BaseCommand):
    help=""
    def handle(self, *args, **options):
        self.crawler_school()

    # def multiprocessing_run(self):
    #     pool = multiprocessing.Pool(processes=10)
    #     residential_s = Residential.objects.all()
    #     num = 0
    #     length = residential_s.count()
    #     while num < length:
    #         residential_s_tmp = residential_s[num:num+10]
    #         # pool.apply_async(self.crawler_school, (residential_s_tmp,))
    #         # import pdb
    #         # pdb.set_trace()
    #         self.crawler_school(residential_s_tmp)
    #     pool.close()
    #     pool.join()

    def crawler_school(self):
        residential_s = Residential.objects.filter(park_num__isnull=True)
        count=0
        for obj in residential_s:
            try:
                url = obj.url
                resp = requests.get(url,timeout=10)
                resp.encoding == 'ISO-8859-1'
                soup_info = BeautifulSoup(resp.content, "lxml")
                infos = soup_info.find(class_="villdetail-tab").find_all("tr")
                try:
                    households = int(infos[1].find_all(class_="vill-short")[0].text.replace(" ","").replace("\n","").replace("户",""))
                except:
                    households = 0
                try:
                    park_num = int(infos[1].find_all(class_="vill-short")[1].text.replace(" ","").replace("\n","").replace("个",""))
                except:
                    park_num=0
                try:
                    complete_date = infos[2].find_all(class_="vill-long")[0].text.replace(" ","").replace("\n","")
                except:
                    complete_date = ""
                try:
                    property_name = infos[3].find_all(class_="vill-long")[0].text.replace(" ","").replace("\n","")
                except:
                    property_name = ""
                try:
                    property_fee = float(infos[4].find_all(class_="vill-long")[0].text.replace(" ","").replace("\n","").split("元")[0])
                except:
                    property_fee=0.5
                try:
                    developers = infos[5].find_all(class_="vill-long")[0].text.replace(" ","").replace("\n","")
                except:
                    developers = ""
                try:
                    desc = soup_info.find(class_="detail__mainCotetn__intro").text.replace(" ","").replace("\n","")
                except:
                    desc = ""

                # dict_ = dict(
                #     id = obj.id,
                #     households=households,
                #     park_num=park_num,
                #     complete_date=complete_date,
                #     property_fee=property_fee,
                #     property_name=property_name,
                #     developers=developers,
                #     desc=desc,
                # )
                obj.households=households
                obj.park_num=park_num
                obj.complete_date=complete_date
                obj.property_fee=property_fee
                obj.property_name=property_name
                obj.developers=developers
                obj.desc=desc

                count+=1
                obj.save()
                # with open("aa.json","a") as f:
                #     f.write(json.dumps(dict_,ensure_ascii=False))
                #     f.write("\n")
                print(count)

            except Exception as e:
                continue
                print("小区出幺蛾子了！",e)

# if __name__=="__main__":
#     crawler_school()
#     # crawler_house_detail(["http://nj.sell.house365.com/s_302194053.html"])
