#
# Copyright 2013 - Tom Alessi
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


"""
Custom Burp Extension that facilitates automated spidered or targeted (specific urls) scans
using the Burp scanner tool.  This extension is meant to work with the Berper web application 
and will not function properly without a correctly configured Django/Berper environment.  
See http://www.berper.org for more details on setup, installation and configuration.
"""


import os
from django.conf import settings
os.environ['DJANGO_SETTINGS_MODULE'] = 'berper.settings'
from berperHelpers import berperzxJDBC

import datetime
import logging
import pytz
import re
import time
import urlparse
import uuid

from burp import IBurpExtender
from burp import IScannerListener
from burp import IBurpExtenderCallbacks
from burp import IHttpListener

from java.io import File
from java.net import URL


class BurpExtender(IBurpExtender, IScannerListener, IBurpExtenderCallbacks, IHttpListener):


    def __init__(self):
        """
        Constructor
        """

        # Scan method (True:active or False:passive).  We'll start with a passive scan, unless
        # told otherwise
        self.active = False

        # Scan type (True:targeted or False:spidered).  We'll start with a spidered scan, unless
        # told otherwise
        self.targeted = False 

        # Setup an instance of the berper logger
        # Logging is thread-safe so all threads will share the same logger
        # This logger is for high level logging.  Log items specific to individual
        # scans will be stored separately with each scan report
        self.main_logger = logging.getLogger('berper')
        self.main_logger.debug('Starting Burp Scanner Extension with new scan.')


    def	registerExtenderCallbacks(self, callbacks):

        self.callbacks = callbacks


        # Set our extension name
        self.callbacks.setExtensionName("Berper Extension")


        # Get arguments
        # If this is a request for help, just return so the user can setup 
        # their burp environment
        args = self._getArgs()

        if 'H' in args:
            print 'Extension executed in testing mode, normally.'
            return

        scan_id = args['I']
        report_uid = args['R']


        # The total number of requests for this scan.  We want to keep
        # track for reporting purposes
        self.num_requests = 0


        # Keep track of not found (404) urls for this scan.  We want to keep
        # track for reporting purposes so we can investigate these later
        self.not_found = []


        # Keep track of scanned URLs so we can report on them
        self.scanned_urls = []


        # Obtain the date path part of the report location and
        # create the full path
        date_path = self._create_report_location(report_uid)


        # Setup the report logger
        self.report_logger = self._create_logger(date_path,report_uid)


        # Get a DB connection
        # Obtain database parameters from DJango
        # If we cannot obtain a DB connection, quit
        host = settings.DATABASES['default']['HOST']
        user = settings.DATABASES['default']['USER']
        password = settings.DATABASES['default']['PASSWORD']
        name = settings.DATABASES['default']['NAME']
        self.bs = berperzxJDBC.sql(host,user,password,name,self.report_logger)
        if self.bs.status:
            _exit_burp()


        # Determine if this is an active or passive scan
        self._is_active(scan_id)


        # Determine if this is a targeted or spidered scan
        self._is_targeted(scan_id)


        # Obtain the current time so we can save the start time
        # and we'll know how long the scan took
        start = self._convert_date_utc(datetime.datetime.now())


        # Obtain the site we are scanning
        site = self._get_site(scan_id)


        # Obtain the HTTP and HTTPS ports for this scan
        # This is only required for spidered scans so that
        # we can add the host/port combinations to the global
        # scope
        if not self.targeted:
            (hport,sport) = self._get_ports(scan_id)
            

        # Obtain the seed URL for this scan (the starting point)
        # This is only required for spidered scans
        if not self.targeted:
            seed = self._get_seed(scan_id)


        # Add job to the DB queue so users know its running
        self._add_queue(scan_id,start,report_uid)

   
        # Include the host/port in scope (include both HTTP and HTTPS)
        # We only need this for spidered scans and the spider will only follow 
        # links that are in scope
        if not self.targeted:
            if hport:
                self.report_logger.info('Including in scope: http://%s:%s.' % (site,hport))
                self.callbacks.includeInScope(URL('http://%s:%s' % (site,hport)))

            if sport:
                self.report_logger.info('Including in scope: https://%s:%s.' % (site,sport))
                self.callbacks.includeInScope(URL('https://%s:%s' % (site,sport)))
        

        # Register a scanner listener 
        # This method is used to register a listener which will be notified of new issues 
        # that are reported by the Scanner tool.
        self.report_logger.info('Registering a scanner listener to listen for new issues.')
        self.slistener = self.callbacks.registerScannerListener(self)
 

        # Register an HTTP listener 
        # This method is used to register a listener which will be notified of requests 
        # and responses made by any Burp tool.  When requests or responses are detected, 
        # they are then intercepted by the processHttpMessage method and checked for 
        # vulnerabilities.
        self.report_logger.info('Registering an HTTP listener to listen for requests/responses from burp tools.')
        self.hlistener = self.callbacks.registerHttpListener(self)


        # Assume the time of the first request is right now
        # Once there has been zero activity on the scanner for 60
        # seconds, we'll quit
        self.last_request_time = datetime.datetime.now()


        # If this is a targeted scan, then process all configured URLs
        if self.targeted:

            # Obtain the URLs for this scan
            urls = self._obtain_urls(scan_id)
            
            # Start scanning them
            for url in urls:

                url = url[0]

                # Remove any whitespace
                url = url.strip()
                self.report_logger.info('Processing URL for targeted scan: %s.' % url)

                # We will assume that if a URL was added to berper, its
                # already been validated so we won't check it again for syntax

                # We need to obtain the port and whether or not this request
                # uses HTTPS.
                u = urlparse.urlparse(url)
                u_port = u.port
                u_scheme = u.scheme
                
                
                # Find out if this is HTTP or HTTPS
                if u_scheme == 'https':
                    self.report_logger.info('URL is using HTTPS.')
                    useHttps = True
                else:
                    self.report_logger.info('URL is using HTTP.')
                    useHttps = False

                # If there is no port listed, then it's likely 80 or 443
                # depending on whether it's HTTP or HTTPS
                if not u_port:
                    if useHttps:
                        u_port = '443'
                    else:
                        u_port = '80'
                self.report_logger.info('URL port is %s.' % u_port)


                # Create an HTTP GET request to the URL
                request = self.callbacks.getHelpers().buildHttpRequest(URL(url))
                self.report_logger.info('HTTP request created for URL: %s.' % url)

                # Issue the HTTP request to the URL
                # The response will be evaluated by the processHttpMessage method
                self.report_logger.info('Issuing HTTP request for site:%s, port:%s, useHttps:%s.' % (site,u_port,useHttps))
                self.callbacks.makeHttpRequest(site, int(u_port), useHttps, request)
        
        # This is a spidered scan, so send the spider the seed URL and proceed
        else:
            self.report_logger.info('Sending seed url to spider: http://%s:%s%s.' % (site,hport,seed))
            self.callbacks.sendToSpider(URL('http://%s:%s%s' % (site,hport,seed)))


        # Close the DB connection because scanning is most likely going to take a long
        # time and the connection will timeout.  We'll open a new one after scanning is
        # complete
        self.bs.close()


        # Check status every minute until we're done (for active scans only)
        while True:
            self.report_logger.info('Checking if scanner is finished.')
            now = datetime.datetime.now()
            diff = now - self.last_request_time
            if diff.seconds > 60:
                self.report_logger.info('Scanner is complete.')
                break
            else:
                self.report_logger.info('Scanner not complete, sleeping 60 seconds.')
                time.sleep(60)


        # -- All done with scanning! -- #


        # Open a new database connection
        self.bs = berperzxJDBC.sql(host,user,password,name,self.report_logger)


        # Obtain the finish time
        finish = self._convert_date_utc(datetime.datetime.now())


        # Save the report 
        self._save_report(scan_id,date_path,report_uid,start,finish)


        # Remove from DB queue
        self._remove_queue(scan_id,report_uid)


        # Get the recently created report id
        report_id = self._get_report_id(report_uid)
        

        # Save high level severity information
        self._save_sevs(report_id)


        # Save not found information
        self._save_not_found(report_id)
        

        # Save the scanned Urls information
        self._save_scanned_urls(report_id)


        # Save the requests made
        self._save_requests_made(report_id)


        # Disconnect from the DB server
        self.bs.close()


        # Exit Burp
        self.report_logger.info('Scanning complete.')
        self._exit_burp()
   

    def _getArgs(self):
        # Obtain command line args
        # Args need to be of the format:
        #    -I<id>
        
        self.main_logger.info('Checking command line arguments.')

        args_raw = self.callbacks.getCommandLineArguments()
        args = {}

        for arg in args_raw:
            m = re.search('^\-(\w)([\S]+)',arg)

            if m:
                argument = m.group(1)
                value = m.group(2)
                args[argument] = value
            else:
                self._usage()

        # If this is a request for help, just return so the user can setup 
        # their burp environment
        if 'H' in args:
            return(args)

        # Make sure we have all required args
        if not 'I' in args:
            self.main_logger.error('Missing scan ID (arg I), exiting.')
            self._usage()

        if not 'R' in args:
            self.main_logger.error('Missing report ID (arg R), exiting.')
            self._usage()

        # Return formatted arguments to the caller
        return(args)


    def _usage(self):
        """
        Print out usage information when incorrect arguments are given
        """

        u = """
        USAGE:
         -I : Scan ID
         -R : Unique Report ID
        """

        print u
        self._exit_burp()


    def _convert_date_utc(self,d):
        """
        Convert a datetime given in the local timezone to UTC 
        """

        # The DJango timezone
        tz = pytz.timezone(settings.TIME_ZONE)

        # Localize the naive time (time w/o timezone) to the local timezone and then convert to UTC
        converted = tz.localize(d).astimezone(pytz.timezone('Etc/UTC')).strftime("%Y-%m-%d %H:%M:%S")

        return converted


    def _get_ports(self,scan_id):
        """
        Return the HTTP and HTTPS ports associated with a particular scan
        """

        self.report_logger.info('Obtaining scan ports.')

        sql = """
                SELECT hport,sport
                FROM main_port
                WHERE name_id=%s
              """ % scan_id
        (rowcount,rows) = self.bs.execute(sql)
        if self.bs.status:
            self._exit_burp()
        
        hport = rows[0][0]
        sport = rows[0][1]
        self.report_logger.info('Scan HTTP and HTTPS ports are: %s, %s.' % (hport,sport))

        return (hport,sport)


    def _get_seed(self,scan_id):
        """
        Return the seed Url associated with a particular spidered scan
        """

        self.report_logger.info('Obtaining seed url for this scan.')

        sql = """
                SELECT path
                FROM main_seed_url
                WHERE name_id=%s
              """ % scan_id
        (rowcount,rows) = self.bs.execute(sql)
        if self.bs.status:
            self._exit_burp()
        
        seed = rows[0][0]
        self.report_logger.info('Seed url for this scan is: %s.' % seed)

        return seed


    def _get_site(self,scan_id):
        """
        Return the site for a particular scan
        """
       
        self.report_logger.info('Obtaining site for this scan.')

        sql = """
                SELECT site
                FROM main_site
                WHERE name_id=%s
              """ % scan_id
        (rowcount,rows) = self.bs.execute(sql)
        if self.bs.status:
            self._exit_burp()
        
        site = rows[0][0]
        self.report_logger.info('Site for this scan is: %s.' % site)

        return site


    def _add_queue(self,scan_id,start,report_uid):
        """
        Add a scan to the DB queue
        """

        self.report_logger.info('Adding scan job to database queue.')

        sql = """
                INSERT INTO main_queue (name_id,start,report_uid)
                VALUES (%s,'%s','%s')
              """ % (scan_id,start,report_uid)
        self.bs.execute(sql)
        if self.bs.status:
            self._exit_burp()
        
        self.report_logger.info('Scan job added to database queue.')


    def _remove_queue(self,scan_id,report_uid):
        """
        Remove a scan from the DB queue
        """
        
        self.report_logger.info('Removing scan job from database queue.')

        sql = """
                DELETE FROM main_queue
                WHERE name_id=%s
                AND report_uid='%s'
              """ % (scan_id,report_uid)
        (rowcount,rows) = self.bs.execute(sql)
        self.report_logger.info('Scan job removed from database queue.')


    def _is_active(self,scan_id):
        """
        Determine is this scan is active or passive
        """

        self.report_logger.info('Checking scan method.')

        sql = """
                SELECT active
                FROM main_scan_method
                WHERE name_id=%s
              """ % scan_id
        (rowcount,rows) = self.bs.execute(sql)
        if self.bs.status:
            self._exit_burp()

        self.active = rows[0][0]
        self.report_logger.info('Scan method is active: %s.' % self.active)


    def _is_targeted(self,scan_id):
        """
        Determine is this scan is targeted or spidered
        """

        self.report_logger.info('Checking scan type.')

        sql = """
                SELECT targeted
                FROM main_scan_type
                WHERE name_id=%s
              """ % scan_id
        (rowcount,rows) = self.bs.execute(sql)
        if self.bs.status:
            self._exit_burp()

        self.targeted = rows[0][0]
        self.report_logger.info('Scan type is: %s.' % self.targeted)


    def _obtain_urls(self,scan_id):
        """
        Obtain the URLs given a scan ID
        """

        self.report_logger.info('Obtaining scan urls.')

        sql = """
                SELECT url
                FROM main_tscan_list
                INNER JOIN main_url on main_tscan_list.url_id=main_url.id
                WHERE main_tscan_list.name_id=%s
              """ % scan_id
        (rowcount,rows) = self.bs.execute(sql)
        if self.bs.status:
            self.report_logger.error('No scan urls found, exiting.')
            self._exit_burp()

        return rows


    def _create_report_location(self,report_uid):
        """
        Create a unique report location to store the final report based on the date
        and the report_uid.  If the path cannot be created, exit.
        """

        self.main_logger.info('Creating file system report location for this scan.')

        # The report path will be based on the date
        date_path = time.strftime('%Y/%m/%d')
        report_path = '%s/%s' % (date_path,report_uid)
        
        # Only create the report path if it does not already exist
        if not os.path.exists('%s/%s' % (settings.BURPREPORTS,report_path)):
            try:
                os.makedirs('%s/%s' % (settings.BURPREPORTS,report_path))
            except Exception as e:
                self.main_logger.error('Could not create report location, exiting: %s.' % e)
                self._exit_burp()
        
        self.main_logger.info('Full scan report path - logging for this scan will be located here: %s/%s.' % (settings.BURPREPORTS,report_path))

        return date_path


    def _save_report(self,scan_id,date_path,report_uid,start,finish):
        """Save information about the completed report in the database
        and write the report to disk
        """

        self.report_logger.info('Saving scan report.')

        # Save info in the DB
        sql = """
                INSERT INTO main_report (name_id,path,report_uid,start,finish)
                VALUES (%s,'%s','%s','%s','%s')
              """ % (scan_id,date_path,report_uid,start,finish)
        (rowcount,rows) = self.bs.execute(sql)

        # Write out the report
        # Passive scans can take a bit to complete so give them a chance to complete
        count = 0
        max_count = 5

        # Get the issues
        issues = self.callbacks.getScanIssues(self.slistener)

        # Check at intervals to ensure scans are complete        
        while count < max_count:
            time.sleep(5)
            current_issues = self.callbacks.getScanIssues(self.slistener)
            
            # If they are different, sleep a bit
            if len(issues) != len(current_issues):
                self.report_logger.info('Scan issues are still updating, they were (%s) and now are (%s).  Check %s of %s.' % (len(issues),len(current_issues),count,max_count))
                issues = current_issues
                count += 1
            else:
                self.report_logger.info('Scan issues are through updating.')
                break

        self.report_logger.info('Writing scan report to %s/%s/%s/report.html.' % (settings.BURPREPORTS,date_path,report_uid))
        out = File('%s/%s/%s/report.html' % (settings.BURPREPORTS,date_path,report_uid))
        self.callbacks.generateScanReport('HTML',issues,out)


    def _get_report_id(self,report_uid):
        """
        Obtain a report id, given the report_uid
        """
        
        self.report_logger.info('Obtaining report id.')

        sql = """
                SELECT id
                FROM main_report
                WHERE report_uid='%s'
              """ % (report_uid)
        (rowcount,rows) = self.bs.execute(sql)
        
        report_id = rows[0][0]
        self.report_logger.info('Report id is %s.' % report_id)

        return report_id


    def _check_not_found_path(self,path):
        """Check if we have already saved a particular not found 
        URL into the master list"""

        self.report_logger.info('Checking not found path in master list.')

        sql = """
                SELECT id
                FROM main_not_found
                WHERE path='%s'
              """ % path
        (rowcount,rows) = self.bs.execute(sql)
        return rows


    def _save_not_found_path(self,path):
        """Save a particular not found url into the master list"""

        self.report_logger.info('Saving not found path.')

        sql = """
                INSERT INTO main_not_found (path)
                VALUES ('%s')
              """ % path
        self.bs.execute(sql)


    def _check_not_found_list(self,report_id,path):
        """Check if we have already saved a particular not found 
        url for a particular report"""

        self.report_logger.info('Checking if not found url (%s) has already been saved for report id %s.' % (path,report_id))

        sql = """
                SELECT main_not_found_list.id
                FROM main_not_found_list
                INNER JOIN main_not_found.id on main_not_found_list.path_id=main_not_found.id
                WHERE report_id=%s
                AND path='%s'
              """ % (report_id,path)
        (rowcount,rows) = self.bs.execute(sql,(report_id,path))
        return rows


    def _save_not_found(self,report_id):
        """Save all not found response URLs"""

        self.report_logger.info('Checking for not found urls.')
        
        if self.not_found:

            self.report_logger.info('%s not found urls found.' % len(self.not_found))

            for path in self.not_found:
                
                # Check if the URL is already in the master list
                self.report_logger.info('Checking if not found path already exists in database: %s.' % path)
                exists_path = self._check_not_found_path(path)

                if not exists_path:
                    # The URL does not exist in the master list so save it
                    self.report_logger.info('Path does not exist, so adding it.')
                    self._save_not_found_path(path)
                else:
                    self.report_logger.info('Path already exists.')

                # Check if the URL is already attached to this report as a 404
                self.report_logger.info('Checking if path is already associated with this report.')
                exists_list = self._check_not_found_list(report_id,path)

                if not exists_list:
                    # The URL is not already attached to this report as a 404
                    # so attach it
                    self.report_logger.info('Path is not associated with this report so adding it.')
                    sql = """
                            INSERT INTO main_not_found_list (report_id,path_id)
                            VALUES (%s,
                                   (SELECT id from main_not_found WHERE path='%s')
                                   )
                          """ % (report_id,path)
                    self.bs.execute(sql)  
                else:
                    self.report_logger.info('Path is already associated with this report.')
        else:
            self.report_logger.info('There were no not found urls found.')


    def _save_scanned_urls(self,report_id):
        """Save the number of scanned URLs"""

        self.report_logger.info('Saving the number of scanned urls found: %s.' % len(self.scanned_urls))

        sql = """
                INSERT INTO main_urls_scanned (report_id,urls)
                VALUES (%s,%s)
              """ % (report_id,len(self.scanned_urls))

        self.bs.execute(sql)  


    def _save_requests_made(self,report_id):
        """Save the number of requests made"""

        self.report_logger.debug('Saving the number of requests made: %s.' % self.num_requests)

        sql = """
                INSERT INTO main_requests_made (report_id,requests)
                VALUES (%s,%s)
              """ % (report_id,self.num_requests)

        self.bs.execute(sql)  


    def _save_sevs(self,report_id):
        """Save high level severity information for reporting

        There are 4 types of severities
            - information
            - low
            - medium
            - high

        """

        self.report_logger.info('Saving high level severity information.')

        issues = self.callbacks.getScanIssues(self.slistener)

        information = 0
        low = 0
        medium = 0
        high = 0

        sevs = {
            'Information':0,
            'Low':0,
            'Medium':0,
            'High':0
        }
       
        # Obtain severity levels
        for issue in issues:

            sev = issue.getSeverity()
            self.report_logger.debug('Issue found with severity: %s.' % sev)

            # If this severity does not exist in our list, ignore
            if sev in sevs:
                sevs[sev] += 1
            else:
                self.report_logger.debug('Issue found with unknown severity, not storing.')

        # Save severities in the DB
        sql = """
                INSERT INTO main_severity (report_id,information,low,medium,high)
                VALUES (%s,%s,%s,%s,%s)
              """ % (report_id,sevs['Information'],sevs['Low'],sevs['Medium'],sevs['High'])
        self.bs.execute(sql)


    def _create_logger(self,date_path,report_uid):
        """Create a custom local report logger for this scan"""

        self.main_logger.info('Creating custom local logger in %s/%s/%s/out.txt.' % (settings.BURPREPORTS,date_path,report_uid))

        # Create a new logger
        ll = logging.getLogger()

        # Set the logging level of the report logger to inherit from
        # the main berper webapp
        ll.setLevel(self.main_logger.getEffectiveLevel())

        # Set the formatter and handler
        formatter = logging.Formatter('%(levelname)s %(asctime)s %(module)s %(process)d %(threadName)s %(thread)d %(message)s')
        handler = logging.FileHandler('%s/%s/%s/out.txt' % (settings.BURPREPORTS,date_path,report_uid))
        handler.setFormatter(formatter)
        
        # Add the formatter/handler to the report logger
        ll.addHandler(handler)

        return ll


    def _exit_burp(self, force=None):
        """Exit Burp"""

        self.main_logger.info('Exiting Burp.')

        # Instruct burp to quit
        self.callbacks.exitSuite(False)

        # Exit this script, if requested
        if force:
            sys.exit(0)


    def processHttpMessage(self, tool_flag,isRequest,messageInfo):
        """
        Called by the HttpListener each time an HTTP request or HTTP response is generated from a Burp tool
        """

        tool_name = self.callbacks.getToolName(tool_flag)
        self.report_logger.debug('processHttpMessage triggered: %s, %s, %s.' % (tool_name,isRequest,messageInfo))

        # We are only interested in analyzing responses for vulnerabilities
        # If a request comes through, save the time, we'll use this to know
        # when requests have stopped and we can quit
        if isRequest:

            self.report_logger.debug('This is a request, not launching new scan: %s.' % messageInfo)
            self.last_request_time = datetime.datetime.now()
            self.num_requests += 1

        else:

            # We are only interested in responses from the Extender (these would be direct HTTP requests sent during a 
            # targeted scan) or the Scanner.
            self.report_logger.debug('This is a response, checking if it came from the Extender or Spider: %s.' % messageInfo)

            if tool_name == 'Extender' or tool_name == 'Spider':

                self.report_logger.info('Processing HTTP response from Extender or Spider: %s.' % messageInfo)

                # Keep track of the URLs we are scanning
                if not messageInfo.getUrl() in self.scanned_urls:
                    self.scanned_urls.append(messageInfo.getUrl())


                # Save any 404 responses for later investigation.  We are only saving 404's from the Extender
                # or Spider because then we know they were directly referenced, and not queries from the scanner
                if messageInfo.getStatusCode() == 404:
                    self.report_logger.debug('This response is a 404, not proceeding to scan: %s.' % messageInfo)
                    self.not_found.append(messageInfo.getUrl())
                    
                    # No sense in scanning these
                    return


                # Check if this is HTTP or HTTPS
                if messageInfo.getProtocol() == 'https':
                    self.report_logger.debug('protocol is: https: %s.' % messageInfo)
                    useHttps = True
                else:
                    self.report_logger.debug('protocol is: http %s.' % messageInfo)
                    useHttps = False


                # Active Scan
                if self.active:
                    self.report_logger.info('Launching active scan: host:%s, port:%s, https:%s, path:%s.' % (
                                                                                                messageInfo.getHost(), 
                                                                                                messageInfo.getPort(), 
                                                                                                useHttps,
                                                                                                messageInfo.getUrl()
                                                                                                )
                                                                                            )
                    scan = self.callbacks.doActiveScan(messageInfo.getHost(), messageInfo.getPort(), useHttps, messageInfo.getRequest())
                else:
                    self.report_logger.info('Launching passive scan: host:%s, port:%s, https:%s, path: %s.' % (
                                                                                                messageInfo.getHost(), 
                                                                                                messageInfo.getPort(), 
                                                                                                useHttps,
                                                                                                messageInfo.getUrl()
                                                                                                )
                                                                                            )
                    self.callbacks.doPassiveScan(messageInfo.getHost(), messageInfo.getPort(), useHttps, messageInfo.getRequest(),messageInfo.getResponse())
