# !/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2024/10/11 11:51
# @Author  : 王凯
# @File    : pdd_browser_spider.py
# @Project : scrapy_spider
import datetime
import json
import random
import threading
import time
import uuid

import parsel
import requests
from DrissionPage import ChromiumOptions, SessionOptions, WebPage
from loguru import logger

from components.config import WFQ_SOURCE_MYSQL_CONFIG
from utils.db.mysqldb import MysqlDB
from utils.proxies_tools import get_company_ip_crawler_by_api

wfq_source_db = MysqlDB(
    ip=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_IP"],
    port=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_PORT"],
    db=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_DB"],
    user_name=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_USER_NAME"],
    user_pass=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_USER_PASS"],
)


class PddBrowserSpider(object):
    base_url = "https://mobile.yangkeduo.com"

    def __init__(self, cookie=None, proxy=None, wx=True, ua=None):
        if cookie is None:
            cookie = {}
        if proxy is None:
            proxy = {}
        if ua is None:
            if wx:
                self.ua = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36 NetType/WIFI MicroMessenger/7.0.20.1781(0x6700143B) WindowsWechat(0x63090c11) XWEB/11275 Flue'
            else:
                self.ua = 'Mozilla/5.0 (iPhone; CPU iPhone OS 16_6 like Mac OS X) AppleWebKit/804.1.15 (KHTML, like Gecko) Version/16.6 Mobile/15E148 Safari/604.1'
        else:
            self.ua = ua
        self.cookie = cookie
        self.proxy = proxy
        self.wx = wx
        self.page = None
        self.height = 1920
        self.width = 800
        self.one_page_interval = 30
        self.one_mall_interval = 60 * 2
        self.device_id = self.cookie.get("pdd_user_id")

    def init(self):
        co = ChromiumOptions().auto_port()
        # co = ChromiumOptions()

        co.set_pref('credentials_enable_service', False)  # 阻止“自动保存密码”的提示气泡
        # co.set_argument('--no-sandbox')  # 禁用沙箱 禁用沙箱可以避免浏览器在加载页面时进行安全检查,从而提高加载速度 默认情况下，所有Chrome 用户都启用了隐私沙盒选项  https://zhuanlan.zhihu.com/p/475639754
        co.set_argument("--disable-gpu")  # 禁用GPU加速可以避免浏览器在加载页面时使用过多的计算资源，从而提高加载速度
        co.set_argument('--hide-crash-restore-bubble')  # 阻止“要恢复页面吗？Chrome未正确关闭”的提示气泡
        co.set_user_agent(user_agent=self.ua)  # 设置ua
        co.set_argument('--window-size', f'{self.width},{self.height}')

        # 1、设置switchyOmega插件
        co.add_extension(r'C:\Users\wfq\Downloads\proxy_switchyomega-2.5.20-an+fx')
        so = SessionOptions()
        browser = WebPage(chromium_options=co, session_or_options=so)
        browser.clear_cache()
        # 设置为手机模式

        # 2、重置switchyOmega插件
        self.switch_ip(browser)
        browser.get("https://ip.gs", retry=0)
        html_text = browser.ele('x://pre').text
        logger.success(f">>>>当前的ip {html_text}")

        # 3、切换代理ip
        self.switch_ip(browser, proxy=self.proxy)
        # browser.get("https://ip.gs", retry=0)
        # try:
        #     html_text = browser.ele('x://pre').text
        #     logger.success(f">>>>当前的ip {html_text}")
        # except DrissionPage.errors.ElementNotFoundError as e:
        #     browser.quit()
        #     raise Exception("ip 无了")

        return browser

    def switch_ip(self, browser, proxy: dict = None):
        if proxy:
            # 设置proxy
            tab = browser.new_tab()
            tab.get("chrome-extension://padekgcemlokbadohgkifijomclgjgif/options.html#!/profile/proxy")
            tab.ele('x://input[@ng-model="proxyEditors[scheme].host"]').input(proxy['host'], clear=True)
            tab.ele('x://input[@ng-model="proxyEditors[scheme].port"]').input(proxy['port'], clear=True)

            if proxy.get('username'):
                tab.ele('x://button[@title="代理登录"]').click()
                tab.wait(1)
                tab.ele('x://input[@placeholder="用户名"]').input(proxy['username'], clear=True)
                tab.ele('x://input[@placeholder="密码"]').input(proxy['password'], clear=True)
                tab.wait(1)
                tab.ele('x://button[@ng-disabled="!authForm.$valid"]').click()
                tab.wait(1)

            tab.ele('x://a[@ng-click="applyOptions()"]').click()
            tab.wait(1)
            logger.info(f"切换代理成功 {proxy}")

            tab.get("chrome-extension://padekgcemlokbadohgkifijomclgjgif/popup/index.html#")
            tab.wait(1)
            tab.ele('x://span[text()="proxy"]').click()
        else:
            tab = browser.new_tab()
            tab.get("chrome-extension://padekgcemlokbadohgkifijomclgjgif/options.html#")
            if tab.ele('x://button[text()="跳过教程"]'):
                tab.ele('x://button[text()="跳过教程"]').click()
            tab.get("chrome-extension://padekgcemlokbadohgkifijomclgjgif/popup/index.html#")
            if tab.ele('x://span[text()="[直接连接]"]'):
                tab.ele('x://span[text()="[直接连接]"]').click()

        if len(browser.tab_ids) > 1:
            logger.debug(f"当前tab个数{len(browser.tab_ids)} {browser.tab_ids}")
            tab.close()

        # if proxy:
        #     browser.close_tabs(browser.tab_ids[-1])

    def __enter__(self):
        self.page = self.init()
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        pass
        # self.page.quit()

    def get_mall_info(self, mall_id=409099714):
        # 开始监听，指定获取包含该文本的数据包
        mall_url = f"https://mobile.yangkeduo.com/mall_page.html?mall_id={mall_id}"
        logger.info(f"开始抓取店铺 {mall_url} ")
        self.page.get(mall_url)
        if self.page.ele('x://div[text()="该店铺已失效"]'):
            logger.error(f"店铺已失效 {mall_url}")
            wfq_source_db.update_smart("net_pdd_shop_info", {"flag": -1, "shop_status": "该店铺已失效", "last_crawler_time": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")},
                                       f"mall_id = '{mall_id}'")
            return
        if self.page.ele('x://div[text()="该店铺正在上传商品"]'):
            logger.error(f"该店铺正在上传商品 {mall_url}")
            wfq_source_db.update_smart("net_pdd_shop_info", {"flag": -1, "shop_status": "该店铺正在上传商品", "last_crawler_time": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")},
                                       f"mall_id = '{mall_id}'")
            return
        if self.page.ele('x://div[text()="品牌好货"]'):
            logger.error(f"品牌好货 {mall_url}")
            return
        # self.page.ele('x://div[contains(@class, "mallHead")]//div[contains(@class, "logo")]').click()
        # self.page.ele('x://div[contains(@class, "mall-desc-board")]').wait.displayed()
        # self.page.wait(3)
        # self.page.ele('x://div[contains(@class, "mcb-hide")]').click()
        # self.page.wait(1)
        self.page.ele('x://div[text()="全部商品"]').click()
        res = PDDBrowserSaver._resolve_raw_data(self.page.html, self.page.url)
        if not (res.get("store") or {}).get("goodsSet"):
            raise Exception("没有商品, 可能是账号封了")
        times = 0
        while True:
            self.page.actions.scroll(delta_y=self.height * 4)
            if self.page.ele('x://div[text()="本店暂无更多商品"]'):
                logger.info("没有更多商品了")
                wfq_source_db.update_smart("net_pdd_shop_info", {"last_crawler_time": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), "device_id": self.device_id}, f"mall_id = '{mall_id}'")
                break
            if self.page.ele('x://div[text()="点击展开新疆西藏专属商品"]'):
                wfq_source_db.update_smart("net_pdd_shop_info", {"last_crawler_time": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), "device_id": self.device_id}, f"mall_id = '{mall_id}'")
                break
            times += 1
            if times > 50:
                logger.error("翻页超过50次，可能没有更多商品")
                wfq_source_db.update_smart("net_pdd_shop_info", {"last_crawler_time": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), "device_id": self.device_id}, f"mall_id = '{mall_id}'")
                break
            time.sleep(random.randint(-5, 5) + self.one_page_interval)
            logger.info("翻下一页")

    def run(self, mall_ids=None):

        if mall_ids is None:
            mall_ids = []

        for k, v in self.cookie.items():
            self.page.set.cookies({'name': k, 'value': v, 'domain': '.yangkeduo.com'})
        self.page.get("https://mobile.yangkeduo.com")
        self.page.listen.start(PDDBrowserSaver.allow_keywords)
        threading.Thread(target=PDDBrowserSaver().thread_print, args=(self.page,)).start()
        for i in range(200):
            i = requests.get('https://tmp.wfq2020.com/wangyi-captcha/api/pdd/proxy/get_not_crawler_shop').json()[0]['mall_id']
            print(i)
            self.get_mall_info(i)
            time.sleep(random.randrange(50, 120))


class PDDBrowserSaver(object):
    allow_keywords = ['/mall_page.html?', '/api/turing/mall/query_mall_category_list', "/api/turing/mall/query_cat_goods", "/api/turing/mall/query_mall_detail_info"]

    @staticmethod
    def _resolve_raw_data(html, url):
        try:
            response = parsel.Selector(html)
            raw_data = response.re_first(r"window\.rawData\s*=\s*(\{.*\});")
            if not raw_data:
                logger.error(f"未获取到数据 {url}")
                return False
        except Exception as e:
            logger.error(f"解析数据失败: {e} {url}")
            return False
        raw_data = json.loads(raw_data)
        return raw_data

    def thread_print(self, page_cls):
        logger.info("开始抓取数据")
        for packet in page_cls.listen.steps():
            if any(c in packet.url for c in self.allow_keywords):
                logger.debug(f"{packet.url}")
                item = {
                    "request_url": packet.url,
                    "base_url": packet.url.split("?")[0],
                    "mall_id": packet.url.split("mall_id=")[-1].split("&")[0],
                    "page_no": packet.url.split("page_no=")[-1].split("&")[0] if "page_no" in packet.url else None,
                    "response": packet.response.body if isinstance(packet.response.body, dict) else {"data": self._resolve_raw_data(packet.response.body, packet.url), "html": packet.response.body, },
                    "source": "web"
                }
                wfq_source_db.add_smart("net_pdd_proxy_log", item)


if __name__ == '__main__':
    # proxies = get_company_ip_crawler_by_api(static=True) or {"http": ''}
    # host = proxies.get('http').replace('http://squid:70226ff9ff818edbd816e8c06b76ef97@', "").split(":")[0]
    # host = "117.90.152.210"
    # host = "221.226.54.119"
    # host = "49.85.189.43"

    cookie = {
        "api_uid": "CiTat2cZsF5vVQBdD0GjAg==",
        "_nano_fp": "Xpmxlpmqn5Con5TbnT_zS0uEQugxxR7ku8LECJWe",
        "webp": "1",
        "jrpl": "btE1Il1HYlYWyu9xgkaGezBiqVYsaOHZ",
        "njrpl": "btE1Il1HYlYWyu9xgkaGezBiqVYsaOHZ",
        "dilx": "GDnM8oemKykW6whUWmg0O",
        "PDDAccessToken": "VY2IIMD6555KJPUXI3WQ2XHR2AUFZZZB2SH4X7K5BZU6QM4HQDKA1207d27",
        "pdd_user_id": "7363834308733",
        "pdd_user_uin": "MWEQSKWX434KTQKOFDHEWGVRAQ_GEXDA",
        "pdd_vds": "gaLBNLLenuOsIGtNELtxoxNGOdiIPmmdQeymPOnNibLmQLOIoNInQubbixNB"
    }

    proxy = {
        'host': 'isp.visitxiangtan.com',
        'port': 10004,
        'username': 'spwom2a0ca',
        'password': 'g9C_yfw5y2fIF7pWom'
    }

    with PddBrowserSpider(
            proxy=proxy, cookie=cookie, wx=False,
            ua="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36 NetType/WIFI MicroMessenger/7.0.20.1781(0x6700143B) WindowsWechat(0x63090c11) XWEB/11275 Flue"
    ) as spider:
        spider.run()
