# -*- coding:utf8 -*-
import json
import requests
import base64
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.backends import default_backend


def decrypt_string(response1_base64, response2_base64, response3_base64):
    response1 = base64.b64decode(response1_base64)
    response2 = base64.b64decode(response2_base64)
    response3 = base64.b64decode(response3_base64)

    cipher = Cipher(
        algorithms.AES(response3),
        modes.CBC(response2),
        backend=default_backend()
    )
    decryptor = cipher.decryptor()

    decrypted = decryptor.update(response1) + decryptor.finalize()

    padding_length = decrypted[-1]
    if padding_length <= 16:
        decrypted = decrypted[:-padding_length]

    return decrypted.decode('utf-8')

header = {
    "Host": "itra.run",
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:138.0) Gecko/20100101 Firefox/138.0",
    "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
    "Accept-Language": "zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2",
    "Accept-Encoding": "gzip, deflate, br, zstd",
}
login_header = {
    "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
    "accept-encoding": "gzip, deflate, br",
    "accept-language": "zh,zh-CN;q=0.9",
    "cache-control": "max-age=0",
    "content-type": "application/x-www-form-urlencoded",
    "upgrade-insecure-requests": "1",
    "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36",
    "Host": "itra.run"
}
def get_proxy():
    p = requests.get("http://api.dmdaili.com/dmgetip.asp?apikey=062c68ee&pwd=2ceacf762585db3e1d24cbdb4ef11091&getnum=1&httptype=1&geshi=1&fenge=1&fengefu=&operate=all&setcity=scchengdu").text
    proxies = {
        'http': f'http://{p}',
        'https': f'http://{p}'
    }
    return proxies


def run(param):
    proxies = get_proxy()
    runner_name = param.get("spider_config", {}).get("runner_name", "")
    ss=requests.get("https://itra.run/", headers=header, proxies=proxies)
    login_header["Cookie"] = ss.headers.get('Set-Cookie')
    data = {"name": runner_name, "start": "1", "count": "10"}
    response=requests.post("https://itra.run/api/runner/find", data=data, headers=login_header, proxies=proxies)
    content = json.loads(response.text)
    response1 = content.get("response1", "")
    response2 = content.get("response2", "")
    response3 = content.get("response3", "")
    result = decrypt_string(response1, response2, response3)
    result_dict = json.loads(result)
    runner_list = result_dict.get("Results", [])
    total = result_dict.get("ResultCount", 0)
    start_list = list(range(1, total, 10))[1:]
    for i in start_list:
        proxies = get_proxy()
        data["start"] = str(i)
        response = requests.post("https://itra.run/api/runner/find", data=data, headers=login_header, proxies=proxies)
        content = json.loads(response.text)
        response1 = content.get("response1", "")
        response2 = content.get("response2", "")
        response3 = content.get("response3", "")
        result = decrypt_string(response1, response2, response3)
        result_dict = json.loads(result)
        runner_list.extend(result_dict.get("Results", []))
    result_data = {"serialNumber": param.get("serialNumber"), "webType": param.get("web_type"), "crawlerType": param.get("crawler_type"), "data": str(runner_list)}
    try:
        resp = requests.post("http://132.232.166.135:8905/api/scrapy/dealData", json=result_data, headers={"Content-Type": "application/json"})
        print("数据推送成功")
    except Exception as e:
        print(e)

if __name__=="__main__":
    data = {
  "crawler_type": "runnername",
  "serialNumber": "222222",
  "spider_config": {
    "filterFiled": [
      "startTime",
      "address"
    ],
    "runner_name": "liu bo"
  },
  "web_type": "itra"
}
    run(data)

