import scrapy
import execjs

class JzwSpider(scrapy.Spider):
    name = 'jzw'
    # allowed_domains = ['jzw.com']
    start_urls = ['http://jzsc.mohurd.gov.cn/api/webApi/dataservice/query/comp/list?pg=1&pgsz=15&total=450' ]

    def start_requests(self):
        url  = 'http://jzsc.mohurd.gov.cn/api/webApi/dataservice/query/comp/list?pg=1&pgsz=15&total=450'
        headers ={
    "Accept": "application/json, text/plain, */*",
    "Accept-Encoding": "gzip, deflate",
    "Accept-Language": "zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7,zh-TW;q=0.6",
    "accessToken": "",
    "Cache-Control": "no-cache",
    "Connection": "keep-alive",
    "Cookie": "Hm_lvt_b1b4b9ea61b6f1627192160766a9c55c=1655110849,1655900015",
    "Host": "jzsc.mohurd.gov.cn",
    "Pragma": "no-cache",
    "Referer": "http://jzsc.mohurd.gov.cn/data/company",
    "timeout": "30000",
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/537.36"
}
        yield scrapy.Request(url=url,headers=headers)

    def parse(self, response):
        data = response.text
        print(data)
        result = self.parse_data(data)
        print(result)

    def parse_data(self,text):
        js_code = """
            var CryptoJS = require('crypto-js');

        function h(t) {
           f = CryptoJS.enc.Utf8.parse("jo8j9wGw%6HbxfFn");
           m = CryptoJS.enc.Utf8.parse("0123456789ABCDEF");
    
                var e = CryptoJS.enc.Hex.parse(t)
                  , n = CryptoJS.enc.Base64.stringify(e)
                  , a = CryptoJS.AES.decrypt(n, f, {
                    iv: m,
                    mode: CryptoJS.mode.CBC,
                    padding:CryptoJS.pad.Pkcs7
                })
                  , r = a.toString(CryptoJS.enc.Utf8);
                return r.toString()
            }
        """
        res = execjs.compile(js_code).call('h',text)
        return res

