'''
@Author: ysz
@Date: 2019-12-26 10:13:48
@LastEditors  : ysz
@LastEditTime : 2020-01-14 09:21:24
@Description: 爬取租房信息
'''
import json
import requests
import re
import random
import time
import datetime
from lxml import etree
import os
import os.path

from mysqlhelp import MysqlHelp
from get_user_agent import User_Agent_Help
from common_object import Baixing

myhelp=MysqlHelp()
my_useragent=User_Agent_Help()

class Rent_House(Baixing):

    #获取详情页信息
    def get_detail_page(self,url):
        print('-----'*15)
        headers={
                'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
                'Accept-Encoding': 'gzip, deflate, br',
                'Accept-Language': 'zh-CN,zh;q=0.9',
                'Cache-Control': 'max-age=0',
                # 'Connection': 'keep-alive',
                'Cookie': '__trackId=157708634659878; _ga=GA1.2.1771081173.1577086347; _gid=GA1.2.2007515902.1577086347; __admx_track_id=vfJpS1IJYFYMR3fbVh_ZGg; __admx_track_id.sig=Czr8QkwjeddFn8A7chX1oS7OPlQ; __city=shenyang; __s=3npmfnphm23e80a9r9j2ie3bf7; Hm_lvt_5a727f1b4acc5725516637e03b07d3d2=1577173801,1577235357,1577260336,1577326375; __sense_session_pv=4; _gat=1; Hm_lpvt_5a727f1b4acc5725516637e03b07d3d2=1577329005',
                'Host': 'shenyang.baixing.com',
                'Referer': 'https://shenyang.baixing.com/zhengzu/?page=2',
                'Sec-Fetch-Mode': 'navigate',
                'Sec-Fetch-Site': 'same-origin',
                'Sec-Fetch-User': '?1',
                'Upgrade-Insecure-Requests': '1',
                'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Safari/537.36',
            }
        # try:
        response=requests.get(url=url,headers=headers)
        html_ele=etree.HTML(response.text)
        if '这条信息已经搞定了！刷刷其他信息，可以找到更好的！' in html_ele.xpath('//h4/text()'):
            return 
        #所在页面信息


        #房源的url的id
        about_id=url.split('?')[0].split('/')[-1].split('.')[0].replace('a','')

        # 房源信息的来源在58同城上的url地址
        about_url=url

        #标题
        title=html_ele.xpath('//div[@class="viewad-title"]/h1/text()')[0].strip()
        print(title)
        

        response=response.text

            # 租房类型：
        if re.findall("class='viewad-meta2-item '.*?租房类型：.*?<a .*?>(.*?)</a",response,re.S):
            rent_way=re.findall("class='viewad-meta2-item '.*?租房类型：.*?<a .*?>(.*?)</a",response,re.S)[0].strip()
        else:
            rent_way='未知'
        #总价
        money=html_ele.xpath('//div[@class="viewad-actions"]/span[1]/text()')[0].strip().replace('元','')

            #付款方式
        if re.findall("class='viewad-meta2-item '.*?付款方式：.*?<label .*?>(.*?)</label",response,re.S):
            payment_way=re.findall("class='viewad-meta2-item '.*?付款方式：.*?<label .*?>(.*?)</label",response,re.S)[0].strip()
        else:
            payment_way='未知'
        print(payment_way,'付款方式')

        #房屋朝向
        if re.findall("class='viewad-meta2-item '.*?房间朝向：.*?<label .*?>(.*?)</label",response,re.S):
            direction=re.findall("class='viewad-meta2-item '.*?房间朝向：.*?<label .*?>(.*?)</label",response,re.S)[0]
        else:
            direction='未知'

        #获取该房源的所有相关图片url
        if html_ele.xpath('//div[@class="featured-height"]/div/a/@style'):
            imglist=html_ele.xpath('//div[@class="featured-height"]/div/a/@style')
            pattern=re.compile('url\((.*?)\)')
            imglist=pattern.findall(str(imglist))
            
        elif html_ele.xpath('//div[@class="full-height"]/div/img/@src'):
             imglist=html_ele.xpath('//div[@class="full-height"]/div/img/@src')
        else:
            imglist=html_ele.xpath('//div[@class="full-height"]/div/a/@style')
            pattern=re.compile('url\((.*?)\)')
            imglist=pattern.findall(str(imglist))
        # imglist=[re.sub('/',"\/",i) for i in imglist]
        # imglist='  '.join(imglist)
        imgs=json.dumps(imglist)
        if not imgs:
            imgs=''
        print(imgs)
        

        # #获取主要数据来源
        if html_ele.xpath('//div[@class="poster-detail"]/a/text()'):
            person_name=html_ele.xpath('//div[@class="poster-detail"]/a/text()')[0]
        else:
            person_name='未知'
        # 联系人电话
        if not html_ele.xpath('//div[@id="weixinPart"]/div[@class="infoPart"]/p[@id="mobileNumber"]/strong/text()'):
            return 
        phone=html_ele.xpath('//div[@id="weixinPart"]/div[@class="infoPart"]/p[@id="mobileNumber"]/strong/text()')[0]
        
        print(phone)
        # 
        if re.findall("class='viewad-meta2-item '.*?房屋配置：.*?<label .*?>(.*?)</label",response,re.S):
            configuration=re.findall("class='viewad-meta2-item '.*?房屋配置：.*?<label .*?>(.*?)</label",response,re.S)[0].strip()
        else:
            configuration=''
        # configuration=json.dumps(configuration)

        # 室厅卫
        # model1
        # model2
        # model3

            #所属小区
        if re.findall("class='viewad-meta2-item '.*?小区名：.*?<label .*?>(.*?)</label",response,re.S):
            xq=re.findall("class='viewad-meta2-item '.*?小区名：.*?<label .*?>(.*?)</label",response,re.S)[0]
        else:
            xq='未知'

        # 所在区域
        dz_1=html_ele.xpath('//ul[@class="search-crumbs"]/li[last()-2]/a/text()')[0]
        dz_2=html_ele.xpath('//ul[@class="search-crumbs"]/li[last()-1]/a/text()')[0]
        ssqy=dz_1.split('/')[0]+'/'+dz_2.split('/')[0]

        #具体位置
        if "具体地点：" in  html_ele.xpath('//div[@class="viewad-meta2-item "]/label[1]/text()'):
            wz=re.findall("class='viewad-meta2-item '.*?具体地点：.*?<label .*?>(.*?)</label",response,re.S)[0].strip()
        else:
            wz='未知'

        # 房屋亮点
        # fwld

        # 出租要求
        # czyq

        #房屋描述
        #  房屋简介
        if html_ele.xpath('//div[@class="viewad-text"]/text()'):
            desc=html_ele.xpath('//div[@class="viewad-text"]/text()')[0]
            # desc=re.sub(' ','',desc)
        else:
            desc=html_ele.xpath('//div[@class="viewad-text"]//p/span/text()')
            desc='\n'.join(desc)
    
        #爬取的时间
        create_time=int(time.time())
        #状态
        status=1
        print(myhelp.query_data(title=title,phone=phone,xq=xq,money=money,configuration=configuration,desc=desc,rent_way=rent_way,house_type='rent'),'能不能往数据库存储')
        if myhelp.query_data(title=title,phone=phone,xq=xq,money=money,configuration=configuration,desc=desc,rent_way=rent_way,house_type='rent'):
            sql='insert into db_houserent_sh(ssqy,about_id,about_url,title,rent_way,money,payment_way,direction,`imgs`,owner,`configuration`,xq,xxdz,`fyms`,create_time,`status`,phone) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'
            data=[ssqy,about_id,about_url,title,rent_way,money,payment_way,direction,imgs,person_name,configuration,xq,wz,desc,create_time,status,phone]
            house_id=myhelp.execute_sql(sql,data)
            print(house_id,'房源id')
            # print(imglist,'图片url列表',type(imglist))
            # for img_url in imglist:
            #     create_time=int(time.time())
            #     time.sleep(random.randint(10,20))
            #     pic_path=self.down_img_pic(img_url,url,'rent')
            #     sql='insert into db_picture2_rent(path,status,create_time,housesecond_id) values(%s,%s,%s,%s)'
            #     data=[pic_path,1,create_time,house_id]
            #     myhelp.execute_sql(sql,data)
            # print('图片下载完成')

        # except:
        #     time.sleep(10)
        #     self.get_detail_page(url)

#程序的入口  
if __name__ == "__main__":

    # path='./百姓_二手房/download'
    # if not os.path.exists(path):
    #     os.mkdir(path)
    # user_agent=my_useragent.get_useragent()
    url='https://shenyang.baixing.com/zhengzu/?grfy=1&page={}'
    my=Rent_House(url)
    my.all_page_list()

