# -*- coding: utf-8 -*-
# @Time     : 2025/9/16 下午7:12
# @Author   : LoneRanger
# @FileName : 得物女装爬取.py
# @Software : PyCharm
import requests
import json
import execjs
from pymongo import MongoClient, UpdateOne
from time import time
from apscheduler.schedulers.blocking import BlockingScheduler


class DW_Spider(object):
    def __init__(self):
        with open('sign.js', encoding='utf-8') as f: js_code = f.read()
        self.js_code = js_code
        self.mongo_client = MongoClient()
        self.collection = self.mongo_client['py_spider']['DeWu']
        self.headers = {
            "Accept": "*/*",
            "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
            "Cache-Control": "no-cache",
            "Connection": "keep-alive",
            "Content-Type": "application/json",
            "Origin": "https://www.dewu.com",
            "Pragma": "no-cache",
            "Referer": "https://www.dewu.com/",
            "Sec-Fetch-Dest": "empty",
            "Sec-Fetch-Mode": "cors",
            "Sec-Fetch-Site": "same-site",
            "User-Agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 18_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.5 Mobile/15E148 Safari/604.1 Edg/140.0.0.0"
        }
        self.cookies = {
            "dw_edge_er_cookie": "d9545d1f-3cee-a505-f16c-85c17efdbf10",
            "sajssdk_2015_cross_new_user": "1",
            "sensorsdata2015jssdkcross": "%7B%22distinct_id%22%3A%2219952152a6114b9-05a661e7b306404-4c657b58-1327104-19952152a62249d%22%2C%22first_id%22%3A%22%22%2C%22props%22%3A%7B%22%24latest_traffic_source_type%22%3A%22%E5%BC%95%E8%8D%90%E6%B5%81%E9%87%8F%22%2C%22%24latest_search_keyword%22%3A%22%E6%9C%AA%E5%8F%96%E5%88%B0%E5%80%BC%22%2C%22%24latest_referrer%22%3A%22https%3A%2F%2Fmail.qq.com%2F%22%7D%2C%22identities%22%3A%22eyIkaWRlbnRpdHlfY29va2llX2lkIjoiMTk5NTIxNTJhNjExNGI5LTA1YTY2MWU3YjMwNjQwNC00YzY1N2I1OC0xMzI3MTA0LTE5OTUyMTUyYTYyMjQ5ZCJ9%22%2C%22history_login_id%22%3A%7B%22name%22%3A%22%22%2C%22value%22%3A%22%22%7D%2C%22%24device_id%22%3A%2219952152a6114b9-05a661e7b306404-4c657b58-1327104-19952152a62249d%22%7D"
        }
        self.url = "https://app.dewu.com/api/v1/h5/commodity-pick-interfaces/pc/pick-rule-result/feeds/info"
        self.data = {
            # "sign": "c5e8e89f6b528227b1068fe2ff1781e1",
            "pickRuleId": 644468,
            "pageNum": 1,
            "pageSize": 24,
            "filterUnbid": True,
            "showCspu": True
        }

    # 爬取数据
    def spider(self):
        self.data['sign'] = execjs.compile(self.js_code).call('p', self.data)
        self.data = json.dumps(self.data, separators=(',', ':'))  # 去空格
        response = requests.post(self.url, headers=self.headers, cookies=self.cookies, data=self.data
                                 )
        response.raise_for_status()
        return response.json()

    # 解析数据
    def parse(self, res):
        data_list = res['data']['list']
        if data_list:
            item_list = []
            for data in data_list:
                data_dict = {
                    "authPrice": data['authPrice'],  # 授权价格
                    "logoUrl": data['logoUrl'],  # logo
                    "originPrice": data['originPrice'],  # 原价
                    "price": data['price'],  # 现价
                    "propertyValueId": data['propertyValueId'],  # 属性值id
                    "skuId": data['skuId'],  # skuId
                    "spuId": data['spuId'],  # spuId
                    "title": data['title'],  # 标题
                    "utime": int(time()*1e3),  # 时间戳
                }
                print(data_dict)
                item_list.append(data_dict)
            return item_list
        else:
            print('没有数据')

    # 保存数据【增量入库】
    def save_to_mongodb(self, items):
        if not items:
            return
        # 建复合唯一索引
        self.collection.create_index(
            [("skuId", 1), ("propertyValueId", 1)],
            unique=True
        )
        # 组装bulk
        ops = []
        for it in items:
            _filter = {
                "skuId": it['skuId'],
                "propertyValueId": it['propertyValueId']
            }
            update_doc = {
                "$set": it,
                "$setOnInsert": {"crawlFirstTime": it.get("utime",int(time()*1e3))},
                }
            ops.append(UpdateOne(_filter, update_doc, upsert=True))

        if ops:
            # 一次性写入
            res = self.collection.bulk_write(ops, ordered=False)
            print(f"已有:{res.matched_count} "
                  f"更新:{res.modified_count} "
                  f"新增:{len(res.upserted_ids)}")

    def main(self):
        res = self.spider()
        items = self.parse(res)
        self.save_to_mongodb(items)


if __name__ == '__main__':
    dw = DW_Spider()
    # 先执行一边先
    dw.main()
    # 定时任务
    scheduler = BlockingScheduler()
    # 每天定时执行【一个小时执行一次】
    scheduler.add_job(dw.main, 'interval', hours=1)
    scheduler.start()