# -*- coding: utf-8 -*-
# @Time     : 2025/9/18 上午12:23
# @Author   : LoneRanger
# @FileName : 得物混淆.py
# @Software : PyCharm
import execjs
import requests
import json
from datetime import datetime
import hashlib
from pymongo import MongoClient, UpdateOne
from time import time
from apscheduler.schedulers.blocking import BlockingScheduler


class DW_Spider(object):
    def __init__(self):
        self.mongo_client = MongoClient()
        self.collection = self.mongo_client['py_spider']['DeWu']
        self.headers = {
            "Accept": "*/*",
            "Accept-Language": "zh-CN,zh;q=0.9",
            "Cache-Control": "no-cache",
            "Connection": "keep-alive",
            "Content-Type": "application/json",
            "Origin": "https://dewu.com",
            "Pragma": "no-cache",
            "Referer": "https://dewu.com/",
            "SK;": "",
            "Sec-Fetch-Dest": "empty",
            "Sec-Fetch-Mode": "cors",
            "Sec-Fetch-Site": "same-site",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36",
            # "adi": "YTA0MDdjZDU5MWI1NjBhMzw61PwqRZdjlOIMO7w7E6V8OSEB1HPe+/ru+/rcOww6kdwpVJWcOuwp7Do8KywopRaGMdQ8OCAX7DkMOjfsO2w7/CtcKow6ATaHsDwpwNBD9gwrVsdQ==",
            # "ltk": "F8KMw7PDrMKUCcKJwpvDt8KWNMKgwqnCoMKVRMOmwpbCnMKmNTXCk3vDn8OnXMOPKAvCv3LCrinCsMOCwqhrBcKdQWPCq8Oww5TDu8O1w7PDksK8",
            "sec-ch-ua": "\"Google Chrome\";v=\"137\", \"Chromium\";v=\"137\", \"Not/A)Brand\";v=\"24\"",
            "sec-ch-ua-mobile": "?0",
            "sec-ch-ua-platform": "\"Windows\"",
            "shumeiId": "",  # need
            "sks": "1,hdw4",
            # "traceparent": "00-f572a2db68906c957af6e4aebf11eeaa-5441cc9bfe978e9b-01"
        }

        self.url = "https://app.dewu.com/api/v1/h5/commodity-pick-interfaces/pc/pick-rule-result/feeds/info"

    # 爬取数据
    def spider(self):
        # 获取当前时间或指定时间
        current_time = datetime.now()  # 当前时间
        # 拼接固定字符串
        data = current_time.strftime("%Y%m%d%H%M%S") + "1a9b23f4d04b6a17f937d22b4d0c2bed00c0"

        # 计算 MD5 哈希值
        md5_hash = hashlib.md5(data.encode('utf-8')).hexdigest()
        # 截取 MD5 哈希值的第10到22位（索引从0开始）
        substring_md5 = md5_hash[10:22]
        # 拼接最终结果
        shumeiId = data + substring_md5

        with open('./traceparent.js', 'r', encoding='utf-8') as f: js = f.read()

        self.headers['shumeiId'] = shumeiId
        self.headers['traceparent'] = execjs.compile(js).call('get_traceparent')

        with open('./dewu.js', 'r', encoding='utf-8') as f: js_code = f.read()
        data_ = execjs.compile(js_code).call('get_data', shumeiId)
        data = {
            "data": data_
        }
        data = json.dumps(data, separators=(',', ':'))
        response = requests.post(self.url, headers=self.headers, data=data)
        # print(response.text)
        result = execjs.compile(js_code).call('decrypt', response.text)
        # print(result)
        return json.loads(result)

    # 解析数据
    def parse(self, res):
        data_list = res['data']['list']
        if data_list:
            item_list = []
            for data in data_list:
                data_dict = {
                    "authPrice": data['authPrice'],  # 授权价格
                    "logoUrl": data['logoUrl'],  # logo
                    "originPrice": data['originPrice'],  # 原价
                    "price": data['price'],  # 现价
                    "propertyValueId": data['propertyValueId'],  # 属性值id
                    "skuId": data['skuId'],  # skuId
                    "spuId": data['spuId'],  # spuId
                    "title": data['title'],  # 标题
                    "utime": int(time() * 1e3),  # 时间戳
                }
                print(data_dict)
                item_list.append(data_dict)
            return item_list
        else:
            print('没有数据')

    # 保存数据【增量入库】
    def save_to_mongodb(self, items):
        if not items:
            return
        # 建复合唯一索引
        self.collection.create_index(
            [("skuId", 1), ("propertyValueId", 1)],
            unique=True
        )
        # 组装bulk
        ops = []
        for it in items:
            _filter = {
                "skuId": it['skuId'],
                "propertyValueId": it['propertyValueId']
            }
            update_doc = {
                "$set": it,
                "$setOnInsert": {"crawlFirstTime": it.get("utime", int(time() * 1e3))},
            }
            ops.append(UpdateOne(_filter, update_doc, upsert=True))

        if ops:
            # 一次性写入
            res = self.collection.bulk_write(ops, ordered=False)
            print(f"已有:{res.matched_count} "
                  f"更新:{res.modified_count} "
                  f"新增:{len(res.upserted_ids)}")

    def main(self):
        res = self.spider()
        items = self.parse(res)
        self.save_to_mongodb(items)


if __name__ == '__main__':
    dw = DW_Spider()
    # 先执行一边先
    dw.main()
    # 定时任务
    scheduler = BlockingScheduler()
    # 每天定时执行【一个小时执行一次】
    scheduler.add_job(dw.main, 'interval', hours=1)
    scheduler.start()
