import time
import httpx
import execjs
import csv
# 从外部文件到如你自己的导入cookies，你也可以注释掉下面这行，直接写在__init__中
from secrecy import cookies


class ReviewsCrawler:

    def __init__(self, url):
        self.url = url
        self.client = httpx.Client(http2=True)
        self.headers = {
            'authority': 'h5api.m.tmall.com',
            'referer': 'https://detail.tmall.com/',
            'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36 Edg/120.0.0.0',
        }
        self.cookies = cookies
        self.params = {
            'jsv': '2.7.2',
            'appKey': '12574478',
            't': str(int(time.time()*1000)),  # 时间戳
            'sign': '',  # sign加密参数
            'api': 'mtop.alibaba.review.list.for.new.pc.detail',
            'v': '1.0',
            'isSec': '0',
            'ecode': '0',
            'timeout': '10000',
            'ttid': '2022@taobao_litepc_9.17.0',
            'AntiFlood': 'true',
            'AntiCreep': 'true',
            'preventFallback': 'true',
            'data': '{"itemId":"742106313947","bizCode":"ali.china.tmall","channel":"pc_detail","pageSize":20,"pageNum":1}',  # "pageNum":1 控制翻页
        }
        self.json_data = {}
        self.data_list = []
        self.ctx = None

    def get_json(self):
        self.create_sign()
        print(f'{self.params = }')
        response = self.client.get(
            url=self.url,
            params=self.params,
            cookies=self.cookies,
            headers=self.headers,
        )
        self.json_data = response.json()
        print(self.json_data)

    def parse_json(self):
        for review in self.json_data['data']['module']['reviewVOList']:
            review_dict = {
                '购买商品类型': review['skuText'],
                '评论时间': review['reviewDate'],
                '评论内容': review['reviewWordContent'],
                '客服回复': review['reply'],
            }
            self.data_list.append(review_dict)
        print(self.data_list)

    def save_json(self):
        with open('../data/reviews.csv', 'a', newline="", encoding='utf-8-sig') as f:
            csv_writer = csv.DictWriter(f, fieldnames=['购买商品类型', '评论时间', '评论内容', '客服回复'])
            csv_writer.writeheader()
            csv_writer.writerows(self.data_list)

    def init_js(self):
        with open('生成sign的代码片段.js', 'r') as f:
            js_code = f.read()
        self.ctx = execjs.compile(js_code)

    def create_sign(self):
        # 准备生成sign参数的变量
        l = self.params['t']
        s = '12574478'
        data = self.params['data']
        token = '0588f0b1319b65fe070882fba8654142'
        pre_data = token + "&" + l + "&" + s + "&" + data
        # 调用JS来生成sign参数
        sign = self.ctx.call('createSign', pre_data)
        # 将sign放入请求参数中
        self.params['sign'] = sign

    def run(self):
        self.init_js()
        self.get_json()
        self.parse_json()
        # self.save_json()


if __name__ == '__main__':

    review_url = 'https://h5api.m.tmall.com/h5/mtop.alibaba.review.list.for.new.pc.detail/1.0/'
    # 默认只会采集第一页
    spider = ReviewsCrawler(review_url)
    # 如果报错是“被挤爆了”那就说明cookie改变了，要重新设置cookie
    spider.run()
