# -*-coding: Utf-8 -*-
# @File : spider .py
# author: Chimengmeng
# blog_url : https://www.cnblogs.com/dream-ze/
# Time：2023/10/9
import ast
import json
import os
import random

import requests
from lxml import etree
from fake_useragent import UserAgent
from database import DbHandler, MysqlController
from main_spider.spider_ip import SpiderIp
from settings import db_settings


class BaseSpider():
    def __init__(self):
        self.headers = {
            'User-Agent': UserAgent().random,
            'Host': 'nb.anjuke.com',
            'Cookie': 'aQQ_ajkguid=C00D6399-8248-436D-B1DE-2E5156B7AB8E; sessid=1758F69D-268F-4C64-93F7-6EC033881D3F; ajk-appVersion=; seo_source_type=0; fzq_h=eb718b6816f2a3c34196cb88435116e6_1696837014211_7c849d6a42d640df91865d0489288694_47924990094821451283923248109006072713; id58=CrIej2UjrZiY3omsCqvAAg==; ctid=32; twe=2; 58tj_uuid=64f1d1f1-c349-4ee3-a7a5-525f6e2a8793; _ga=GA1.2.1788623706.1696900510; _gid=GA1.2.176830058.1696900510; als=0; fzq_js_anjuke_ershoufang_pc=a85df12604bdf5cbc7519075c99324b8_1696900528463_24; xxzl_cid=1ac5ff5ccd1a443d9b4fa1a74bd75828; xxzl_deviceid=kCs1agHNxoHpCS6XzAh64AtkYIPoALgpYwl0IKtgkbAFiLeskQXciqLYIFMqPLZL; new_session=1; init_refer=; new_uv=2; obtain_by=2; _gat=1; _ga_DYBJHZFBX2=GS1.2.1696904308.2.0.1696904308.0.0.0',
        }

        self.db_handler = DbHandler()
        self.mysql_handler = MysqlController()
        self.spider_ip = SpiderIp()
        # self.proxy = self.test_ip()
        self.proxy = self.test_proxy_pool()

    def get_proxy(self):
        return requests.get("http://127.0.0.1:5010/get/").json()

    def delete_proxy(self, proxy):
        requests.get("http://127.0.0.1:5010/delete/?proxy={}".format(proxy))

    def test_proxy_pool(self):
        print(self.get_proxy())
        proxy = self.get_proxy().get("proxy")
        print(proxy)
        try:
            response = requests.get('https://www.baidu.com', proxies={"http": "http://{}".format(proxy)})
            if response.status_code == 200:
                # 使用代理访问
                return {"http": "http://{}".format(proxy)}
            else:
                # 删除代理池中代理
                self.delete_proxy(proxy)
        except Exception:
            # 删除代理池中代理
            self.delete_proxy(proxy)
            self.test_proxy_pool()

    def pre_user_ip_mysql(self):
        sql = 'select * from proxy_data'
        res = self.mysql_handler.query(sql, one=False)
        if len(res) == 0:
            self.spider_ip.save_to_mysql()
            self.spider_ip.save_to_json()
        return res

    def pre_use_ip_text(self):
        file_path = os.path.join(db_settings.DATABASE_DIR, 'proxies.json')
        with open(file_path, 'r') as f:
            data = json.load(f)
        return data

    def test_ip_mysql(self):
        res = self.pre_user_ip_mysql()
        can_use = random.choice(res)
        can_use = dict(can_use)
        proxy_str = can_use.get("proxy")
        proxy = ast.literal_eval(proxy_str) if proxy_str else None
        print(proxy)
        response = requests.get('https://www.baidu.com/', proxies=proxy)
        print(' Request Headers:', response.request.headers)
        print('Proxy in Request Headers:', response.request.headers.get('Proxy'))
        if response.status_code == 200:
            return proxy
        else:
            id = can_use.get('id')
            sql = f'delete from proxy_data where id="{id}"'
            self.mysql_handler.exec(sql)
            print(f'已删除无效代理 :>>> {proxy}')
            self.test_ip_mysql()

    def test_ip_text(self):
        headers = {
            'User-Agent': UserAgent().random
        }
        ip_dict = self.pre_use_ip_text()
        ip_list = list(ip_dict.values())
        proxy = random.choice(ip_list)
        try:
            response = requests.get('https://www.baidu.com/', proxies=proxy, headers=headers)
            if response.status_code == 200:
                return proxy
        except Exception as e:
            print('Error', e)
            self.test_ip_text()


class Spider(BaseSpider):

    def pre_spider_url_list(self):
        tag_url_list = []
        for i in range(1, 10):
            if i == 1:
                tag_url = 'https://nb.anjuke.com/sale/jiangbeia/'
                tag_url_list.append(tag_url)
            else:
                # https://nb.anjuke.com/sale/jiangbeia/p2/
                tag_url = f'https://nb.anjuke.com/sale/jiangbeia/p{i}/'
                tag_url_list.append(tag_url)
        return tag_url_list

    def spider_house_data_list(self):
        house_data_list = []
        tag_url_list = self.pre_spider_url_list()
        for tag_url in tag_url_list:
            response = requests.get(url=tag_url, headers=self.headers, proxies=self.proxy)
            page_text = response.text
            tree = etree.HTML(page_text)
            div_lists = tree.xpath('/html/body/div[1]/div/div/section/section[3]/section[1]/section[2]/div')
            # 房名 ： house_name
            # 详情链接 : house_detail_url
            # 房型详情 : house_detail
            # 建面 : house_area
            # 建造时间 : house_create
            # 封面 : house_cover
            # 总价 : house_price_total
            # 每平价格 : house_pre_price
            # 市区 : house_center
            # 地区 : house_place
            # 详情地址 : house_location
            # 标签 : house_tag

            # 是否具有预售证 : house_sale_certify
            # 房屋的装修情况（毛坯，简装修） : house_renovation
            # 公司 房屋类型（别墅）: house_type
            # 交房时间 : house_delivery
            # 开盘时间 : house_open_time
            # 售房情况（在售） : house_sale
            for div in div_lists:
                try:
                    house_info = {}
                    house_name = div.xpath('./a/div[2]/div[1]/section/div[2]/p[1]/text()')[0]
                    house_detail_url = div.xpath('./a/@href')[0]
                    house_detail = div.xpath('./a/div[2]/div[1]/div[1]/h3/@title')[0]
                    house_area = div.xpath('./a/div[2]/div[1]/section/div[1]/p[2]/text()')[0].strip()
                    house_create = div.xpath('./a/div[2]/div[1]/section/div[1]/p[5]/text()')[0].strip()
                    house_cover = div.xpath('./a/div[1]/img/@src')[0]
                    house_price_total = div.xpath('./a/div[2]/div[2]/p[1]/span[1]/text()')[0].strip() + '万'
                    house_pre_price = div.xpath('./a/div[2]/div[2]/p[2]/text()')[0].strip()
                    house_location = ''
                    house_location_span_list = div.xpath('./a/div[2]/div[1]/section/div[2]/p[2]/span')
                    for house_location_span in house_location_span_list:
                        house_location += house_location_span.xpath('./text()')[0].strip() + '-'
                    house_tag_span_list = div.xpath('./a/div[2]/div[1]/section/div[3]/span')
                    house_tag = ''
                    for house_tag_span in house_tag_span_list:
                        house_tag += house_tag_span.xpath('./text()')[0].strip() + '-'
                    house_info['house_name'] = house_name
                    house_info['house_detail_url'] = house_detail_url
                    house_info['house_detail'] = house_detail
                    house_info['house_area'] = house_area
                    house_info['house_create'] = house_create
                    house_info['house_cover'] = house_cover
                    house_info['house_price_total'] = house_price_total
                    house_info['house_pre_price'] = house_pre_price
                    house_info['house_location'] = house_location
                    house_info['house_tag'] = house_tag
                    house_data_list.append(house_info)
                except Exception as e:
                    print(f"加载数据错误 {e}")
                    continue
        return house_data_list

    def save_to_csv(self):
        house_data_list = self.spider_house_data_list()

        for house_data in house_data_list:
            print(house_data)
            house_name = house_data['house_name']
            house_detail_url = house_data['house_detail_url']
            house_detail = house_data['house_detail']
            house_area = house_data['house_area']
            house_create = house_data['house_create']
            house_cover = house_data['house_cover']
            house_price_total = house_data['house_price_total']
            house_pre_price = house_data['house_pre_price']
            house_location = house_data['house_location']
            house_tag = house_data['house_tag']
            self.db_handler.save_row_data([
                house_name,
                house_detail_url,
                house_detail,
                house_area,
                house_create,
                house_cover,
                house_price_total,
                house_pre_price,
                house_location,
                house_tag,
            ])

    def save_to_mysql(self):
        house_data_list = self.spider_house_data_list()
        for house_data in house_data_list:
            print(house_data)
            house_name = house_data['house_name']
            house_detail_url = house_data['house_detail_url']
            house_detail = house_data['house_detail']
            house_area = house_data['house_area']
            house_create = house_data['house_create']
            house_cover = house_data['house_cover']
            house_price_total = house_data['house_price_total']
            house_pre_price = house_data['house_pre_price']
            house_location = house_data['house_location']
            house_tag = house_data['house_tag']
            sql = f'insert into house_info(house_name,house_detail_url,house_detail,house_area,house_create,house_cover,house_price_total,house_pre_price,house_location,house_tag) values ("{house_name}","{house_detail_url}","{house_detail}","{house_area}","{house_create}","{house_cover}","{house_price_total}","{house_pre_price}","{house_location}","{house_tag}")'
            res = self.mysql_handler.exec(sql)
        self.mysql_handler.close()

    def save_to_all(self):
        self.save_to_csv()
        self.save_to_mysql()


class SpiderDetail(BaseSpider):

    def __init__(self):
        super().__init__()

    def pre_spider_index_url(self):
        index_url_list = []
        for i in range(1, 10):
            if i == 1:
                index_url = 'https://sh.5i5j.com/zufang/'
                index_url_list.append(index_url)
            else:
                index_url = f'https://sh.5i5j.com/zufang/n{i}/'
                index_url_list.append(index_url)
        return index_url_list

    def spider_data(self):
        tag_url = 'https://sh.5i5j.com/zufang/'
        print(self.proxy)
        response = requests.get(url=tag_url, headers=self.headers, proxies=self.proxy)
        print(response.request.headers)

    def spider_index_page(self):
        index_url_list = self.pre_spider_index_url()
        for url in index_url_list:
            ...

    def spider_house_data_list(self):
        house_data_list = []
        tag_url = 'https://nb.anjuke.com/sale/jiangbeia/'
        response = requests.get(url=tag_url, headers=self.headers, proxies=self.proxy)
        page_text = response.text
        tree = etree.HTML(page_text)
        div_lists = tree.xpath('/html/body/div[1]/div/div/section/section[3]/section[1]/section[2]/div')
        # 房名 ： house_name
        # 详情链接 : house_detail_url
        # 房型详情 : house_detail
        # 建面 : house_area
        # 建造时间 : house_create
        # 封面 : house_cover
        # 总价 : house_price_total
        # 每平价格 : house_pre_price
        # 市区 : house_center
        # 地区 : house_place
        # 详情地址 : house_location
        # 标签 : house_tag

        # 是否具有预售证 : house_sale_certify
        # 房屋的装修情况（毛坯，简装修） : house_renovation
        # 公司 房屋类型（别墅）: house_type
        # 交房时间 : house_delivery
        # 开盘时间 : house_open_time
        # 售房情况（在售） : house_sale
        for div in div_lists:
            try:
                house_info = {}
                house_name = div.xpath('./a/div[2]/div[1]/section/div[2]/p[1]/text()')[0]
                house_detail_url = div.xpath('./a/@href')[0]
                house_detail = div.xpath('./a/div[2]/div[1]/div[1]/h3/@title')[0]
                house_area = div.xpath('./a/div[2]/div[1]/section/div[1]/p[2]/text()')[0].strip()
                house_create = div.xpath('./a/div[2]/div[1]/section/div[1]/p[5]/text()')[0].strip()
                house_cover = div.xpath('./a/div[1]/img/@src')[0]
                house_price_total = div.xpath('./a/div[2]/div[2]/p[1]/span[1]/text()')[0].strip() + '万'
                house_pre_price = div.xpath('./a/div[2]/div[2]/p[2]/text()')[0].strip()
                house_location = ''
                house_location_span_list = div.xpath('./a/div[2]/div[1]/section/div[2]/p[2]/span')
                for house_location_span in house_location_span_list:
                    house_location += house_location_span.xpath('./text()')[0].strip() + '-'
                house_tag_span_list = div.xpath('./a/div[2]/div[1]/section/div[3]/span')
                house_tag = ''
                for house_tag_span in house_tag_span_list:
                    house_tag += house_tag_span.xpath('./text()')[0].strip() + '-'
                house_info['house_name'] = house_name
                house_info['house_detail_url'] = house_detail_url
                house_info['house_detail'] = house_detail
                house_info['house_area'] = house_area
                house_info['house_create'] = house_create
                house_info['house_cover'] = house_cover
                house_info['house_price_total'] = house_price_total
                house_info['house_pre_price'] = house_pre_price
                house_info['house_location'] = house_location
                house_info['house_tag'] = house_tag
                print('已调用', house_info)
                house_data_list.append(house_info)
            except Exception as e:
                print(f"加载数据错误 {e}")
                continue
        return house_data_list

    def save_to_csv(self):
        house_data_list = self.spider_house_data_list()
        print('调用后', house_data_list)

        for house_data in house_data_list:
            print(house_data)
            house_name = house_data['house_name']
            house_detail_url = house_data['house_detail_url']
            house_detail = house_data['house_detail']
            house_area = house_data['house_area']
            house_create = house_data['house_create']
            house_cover = house_data['house_cover']
            house_price_total = house_data['house_price_total']
            house_pre_price = house_data['house_pre_price']
            house_location = house_data['house_location']
            house_tag = house_data['house_tag']
            self.db_handler.save_row_data([
                house_name,
                house_detail_url,
                house_detail,
                house_area,
                house_create,
                house_cover,
                house_price_total,
                house_pre_price,
                house_location,
                house_tag,
            ])

    def save_to_mysql(self):
        house_data_list = self.spider_house_data_list()
        for house_data in house_data_list:
            print(house_data)
            house_name = house_data['house_name']
            house_detail_url = house_data['house_detail_url']
            house_detail = house_data['house_detail']
            house_area = house_data['house_area']
            house_create = house_data['house_create']
            house_cover = house_data['house_cover']
            house_price_total = house_data['house_price_total']
            house_pre_price = house_data['house_pre_price']
            house_location = house_data['house_location']
            house_tag = house_data['house_tag']
            sql = f'insert into house_info(house_name,house_detail_url,house_detail,house_area,house_create,house_cover,house_price_total,house_pre_price,house_location,house_tag) values ("{house_name}","{house_detail_url}","{house_detail}","{house_area}","{house_create}","{house_cover}","{house_price_total}","{house_pre_price}","{house_location}","{house_tag}")'
            res = self.mysql_handler.exec(sql)
        self.mysql_handler.close()

    def save_to_all(self):
        self.save_to_csv()
        self.save_to_mysql()


if __name__ == '__main__':
    s = Spider()
    s.save_to_all()
