# 腾讯云市场爬虫

from bs4 import BeautifulSoup

import math
import json
import time
import random
import urllib3

from ._base import *

# 腾讯的SSL证书验证有问题，所以在requests里面设置了禁用验证，下面的代码可以避免Warning
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)


# 下面的函数移植自腾讯的js文件
# 源文件链接：https://imgcache.qq.com/qcloud/market/scripts/release/pcSearch.28a902b57a3e92836b5b.js
def guid():
    def s4():
        return str(hex(int(65536 * (1 + random.random()))))[2:-1]

    return s4() + s4() + "-" + s4() + "-" + s4() + "-" + s4() + "-" + s4() + s4() + s4()


class SpiderTencent(SpiderBase):
    def __init__(self):
        super().__init__()
        self.source_url = 'https://market.cloud.tencent.com'

    def get_app_list(self, url=None, page=1):
        # 经测试得到count最大只能为100，更大的count是无效的
        r = requests.post(
            self.source_url + '/ncgi/search/getSearch',
            params={'t': str(int(time.time())), 'uin': '', 'csrfCode': '', 'reqSeqId': guid()},
            data={'count': 1000, 'page': page},
            headers={'referer': self.source_url + '/categories/' + ('' if page == 1 else '?page=' + str(page))},
            verify=False)
        r_json = r.json()
        return r_json['data']['productSet'], int(math.ceil(r_json['data']['totalCount'] / 100))

    def get_app_info(self, item):
        # 此函数接受self.get_app_list()返回的list的一项，而非url，返回值与京东万象有如下区别：
        # tags、label、view、favorite、shop_url均为None
        # PS.其实shop_url可以取到，链接为，https://market.cloud.tencent.com/ncgi/capi，只是感觉没必要
        # label_url是categoryId，完整url格式为/categories/{categoryId}
        # price格式不同
        # rating为int，且为50分满分，而非5分满分
        # 增加deliver属性，交付方式，分为SAAS、MANUAL、API
        info_dict = {
            'url': item['productId'],
            'title': item['productName'],
            'tags': None,
            'intro': item['summary'],
            'price': [],
            'rating': int(item['score']),
            'label': None,
            'label_url': item['categoryId'],
            'shop': item['companyName'],
            'shop_url': None,
            'view': None,
            'buy': item['times'],
            'favorite': None,
            'comment': item['comments'],
            'deliver': item['deliverType']
        }
        r = requests.get(self.source_url + '/products/' + str(item['productId']), verify=False)
        soup = BeautifulSoup(r.text, 'lxml')
        info_dict['price'] = json.loads(soup.find(class_='fn-buy-form')['data-pricelist'])
        return info_dict
