'''_____________________________________________________________________
|[] R3DXPL0IT SHELL											|ROOT]|!"|
|"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""|"|
|CODED BY > R3DXPLOIT(JIMMY)										  | |
|EMAIL > RETURN_ROOT@PROTONMAIL.COM								   | |
|GITHUB > https://github.com/r3dxpl0it								| |
|WEB-PAGE > https://r3dxpl0it.Github.io							   |_|
|_____________________________________________________________________|/|
'''

import requests
import bs4
import argparse
import traceback

external_links = []
unknown_links = []
fuzzables_links = []


def extractor(soup, host):
    all_links = list()
    for link in soup.find_all("a", href=True):
        if link["href"].startswith("/"):
            if link["href"] not in all_links:
                all_links.append(host + link["href"])
        elif host in link["href"]:
            if link["href"] not in all_links:
                all_links.append(link["href"])
        elif "http://" in host:
            if (
                "https://" + host.split("http://")[1] in link["href"]
                and link["href"] not in all_links
            ):
                all_links.append(link["href"])
        elif (
            "http" not in link["href"]
            and "www" not in link["href"]
            and len(link["href"]) > 2
            and "#" not in link["href"]
        ):
            if link["href"] not in all_links:
                all_links.append(host + "/" + link["href"])
        elif len(link["href"]) > 6:
            external_links.append(link["href"])
        else:
            unknown_links.append(link["href"])
    return all_links


def fuzzable_extract(linklist):
    fuzzables_links = []
    for link in linklist:
        if "=" in link:
            fuzzables_links.append(link)
    return fuzzables_links


def xploit(link, host=None):
    try:
        if host is None:
            host = link
        res = requests.get(link, allow_redirects=True)
        soup = bs4.BeautifulSoup(res.text, "lxml")
        return extractor(soup, host)
    except Exception as e:
        error_message = traceback.format_exc()
        print(error_message)
        print(f"\n\nThe Error URL: {link}\n\n")
        return list()


def level2(linklist, host):
    final_list = list()
    for link in linklist:
        for x in xploit(link, host):
            if x not in final_list:
                final_list.append(x)
                # print("Appended" , x)
        if link not in final_list:
            final_list.append(link)
    return final_list


def get_urls(_url, deepcrawl=False, fuzzable=False, external=False):
    _link_list = []
    try:
        _results = set()
        if _url is None:
            quit()
        if "http" not in _url:
            _url = "http://" + _url
        if deepcrawl:
            links = level2(xploit(_url), _url)
            if len(links) > 1:
                # print('\n\nLINKS WITH DEEPCRAWL : \n\n')
                for link in links:
                    # print('>\t' , link)
                    _results.add(link)
            else:
                print("\n\nNo Link Found\n\n")
        else:
            links = xploit(_url)
            if len(links) > 1:
                # print('\n\nLINKS : \n\n')
                for link in links:
                    # print('>\t' , link)
                    _results.add(link)
            else:
                print("\n\nNo Link Found\n\n")

        if fuzzable:
            if len(links) > 1:
                if len(fuzzable_extract(links)) > 1:
                    # print('\n\nFUZZABLE LINKS : \n\n')
                    for link in fuzzable_extract(links):
                        # print('>\t' , link)
                        _results.add(link)
                else:
                    print("\n\nNo Fuzzable Link Found\n\n")

        if external:
            if len(external_links) > 1:
                # print('\n\nEXTERNAL LINKS : \n\n')
                for link in external_links:
                    # print('>\t' , link)
                    _results.add(link)
            else:
                print("\n\nNo EXTERNAL Link Found\n\n")

        _link_list = sorted(_results)
    except Exception as e:
        error_message = traceback.format_exc()
        print(error_message)
    finally:
        return _link_list


# if __name__ == "__main__" :
# 	banner = '''
#  _____________________________________________________________________
# |CODED BY > R3DXPLOIT(JIMMY)										  | |
# |GITHUB > https://github.com/r3dxpl0it								| |
# |_____________________________________________________________________|/|
# '''
# print(banner)
# parser = argparse.ArgumentParser()
# parser.add_argument('-u', '--url', help='root url', dest='url')
# parser.add_argument('-d', '--deepcrawl', help='crawl deaply', dest='deepcrawl', action='store_true')
# parser.add_argument('-f', '--fuzzable', help='extract fuzzable', dest='fuzzable', action='store_true')
# parser.add_argument('-e', '--external', help='extract external', dest='external', action='store_true')
# args = parser.parse_args()
# get_urls(args.url, args.deepcrawl, args.fuzzable, args.external)
