#!/usr/bin/python

#
# Copyright 2013 - Tom Alessi
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


"""Berper Scan Queue Runner

   Requirements:
    - A properly configured Berper environment (database, logging, configuration params)

"""

import os
import logging
import berper
import datetime
import Queue
import threading
import time
import traceback
import subprocess
import uuid
from optparse import OptionParser
from django.conf import settings
from berperHelpers import berperEmail
from berperHelpers import berperSQL


# -- GLOBALLY AVAILABLE -- #

# The path to save reports
report_path = ''

# Grab the berper webapp settings
os.environ['DJANGO_SETTINGS_MODULE'] = 'berper.settings'

# Setup an instance of the berper logger
# Logging is thread-safe so all threads will share the same logger
logger = logging.getLogger('berper')

# Parse Arguments
parser = OptionParser(description='berper scan queue runner', version='%prog 1.0')
(options, args) = parser.parse_args()

# -- END GLOBALLY AVAILABLE -- #


class Worker(threading.Thread):
    """Worker thread for running scans and saving data

    To avoid contention, each thread will have its own sql and mail helper instances
    The worker thread is responsible for the following:
        - Creating a unique ID for the scan (to make it easier to identify the thread)
        - Launching off Burp and giving it the ID of the scan, and the unique ID

    Burp will then be responsible for the following:
        - Updating the DB queue so that users know this scan is running
        - Obtaining information about the scan from the DB (active/passive, targeted/spidered, etc)
        - Running the scan
        - Saving the report and associated high level severity data
        - Updating the DB queue
    """

    def __init__(self, queue):
        threading.Thread.__init__(self)
        self.queue = queue


    def run(self):
        t_name = threading.current_thread().name
        t_pid = os.getpid()
        logger.info('Starting thread: %s with pid: %s' % (t_name,t_pid))

        # Create berperSQL and berperEmail instances
        (bm,bs) = create_resources()

        while True:
            # Pop the next job off the queue and don't block if empty
            try:
                job = self.queue.get(False)
                logger.debug('Job pulled off queue: %s' % job)

                # Generate a unique report ID 
                report_id = uuid.uuid4()

                # Run burp and save start/stop time
                logger.info('Starting Burp and initiating scan %s...' % report_id)
                command = '%s %s -Djava.awt.headless=true -classpath %s:%s burp.StartBurp -I%s -R%s' % (
                     return_config(bm,bs,'java'),
                     return_config(bm,bs,'burp_memory'),
                     return_config(bm,bs,'mysql_java'),
                     return_config(bm,bs,'burpjar'),
                     job[0],
                     report_id
                )

                logger.info('Executing command: %s' % command)
                subprocess.check_call(command,shell=True)
                
                # Let the queue know I am done
                self.queue.task_done()

            except Queue.Empty:
                # Queue is empty, close down this thread.
                logger.info('Queue is empty, stopping thread %s' % t_name)

                # Disconnect from the DB server
                bs.close()

                # Stop the thread
                break

            except subprocess.CalledProcessError:
                # Most likely there was a problem with Burp
                logger.error('Exception encountered with thread %s: %s' % (t_name,traceback.format_exc()))
                if settings.SMTP_NOTIFY_ERROR:
                    bm.send('Error','Exception encountered with thread %s: %s' % (t_name,traceback.format_exc()))

                # Disconnect from the DB server
                bs.close()

                # Stop the thread
                break

            except Exception:
                # Some other exception occurred that we don't know how to handle.
                logger.error('Exception encountered with thread %s: %s' % (t_name,traceback.format_exc()))
                if settings.SMTP_NOTIFY_ERROR:
                    bm.send('Error','Exception encountered with thread %s: %s' % (t_name,traceback.format_exc()))

                # Disconnect from the DB server
                bs.close()

                # Stop the thread
                break


def obtain_scans(bm,bs):
    """
    Obtain a list of tests
    """

    # We need to look for jobs that are expected to run on the current day and time

    scan_time = datetime.datetime.now().strftime('%H:%M:00')
    scan_day = datetime.datetime.now().strftime('%A').lower()

    logger.info('Looking for scans on %s and %s' % (scan_time,scan_day))

    # Create the query and execute
    sql = """
            SELECT main_scan.id 
            FROM main_scan
            INNER JOIN main_schedule on main_scan.id=main_schedule.name_id
            WHERE time='%s' 
            AND %s=1
          """ % (scan_time,scan_day)

    (rowcount,rows) = bs.execute(sql)
    if bs.status != 0 and settings.SMTP_NOTIFY_ERROR:
        bm.send('Error','Error executing sql statement:\n%s\n\nERROR:\n%s' % (sql,bs.emessage))

    # return the data
    return rows


def create_resources():
    """Create and return the following resources
       - a berperSQL instance
       - a berperEmail instance
    """
       
    # Obtain database parameters from DJango
    host = settings.DATABASES['default']['HOST']
    user = settings.DATABASES['default']['USER']
    password = settings.DATABASES['default']['PASSWORD']
    name = settings.DATABASES['default']['NAME']

    # Create a berperEmail instance
    bm = berperEmail.notify(settings.SMTP_HOST,
                      settings.SMTP_SENDER,
                      settings.SMTP_RECIPIENT,
                      logger
                     )

    # Create a berperSQL instance
    bs = berperSQL.sql(host,user,password,name,logger)

    return (bm,bs)


def return_config(bm,bs,option):
    """
    Return a configuration parameter
    """

    logger.info('Looking for option: %s' % option)

    # Create SQL statement
    sql = """
           SELECT config_value
           FROM main_config
           WHERE config_name=%s"""

    (rowcount,rows) = bs.execute(sql,(option))
    if bs.status != 0 and settings.SMTP_NOTIFY_ERROR:
            bm.send('Error','Error executing sql statement:\n%s\n\nERROR:\n%s' % (sql,bs.emessage))

    if rowcount == 0:
        return
    else:
        return rows[0][0]


def main():
    """ Main Program Execution"""

    # Create berperSQL and berperEmail instances so we can do some work
    (bm,bs) = create_resources()

   
    # Check all domains and urls
    scans = obtain_scans(bm,bs)
    if not scans:
        logger.error('No new scans to run at this time')
        exit(0)
    else:
        # Create a queue for the tests
        queue = Queue.Queue()

        # Add all tests to the queue 
        for row in scans:
            queue.put(row)

        # Count the scans and set the number of threads
        scan_count = len(scans)
        logger.debug('Scan count: %s' % scan_count)

        # If there are less scans to run than the max thread count
        # then don't start as many threads (they'll just exit anyway with no work)
        config_max_threads = int(return_config(bm,bs,'scan_threads'))
        if scan_count < config_max_threads:
            max_threads = scan_count
        else:
            max_threads = config_max_threads

        # Start the workers.  
        #   Notes:
        #   - Keep the main thread open until it is the only one left
        #   - Do not daemonize the threads nor join on the queue
        #   - If the threads experience exceptions, or if they detect
        #     that the queue is empty, they will die.
        #   - The above prevents the threads dieing due to exceptions w/ the scanning
        #     jobs and the queue never emptying.   It is preferable to have the worker 
        #     die and someone get alerted and investigate.
        #   - Start the lesser of (scans to run) or (thread_limit)
        for i in range(max_threads):
            # Create a worker and pass it the queue
            w = Worker(queue)

            # Start the worker
            w.start()

        # If all threads except this one are done, then quit
        # Check at 1 second intervals
        while threading.active_count() > 1: 
            time.sleep(1)


    # All done
    logger.info('All done with scans')

    # Disconnect from the DB server
    bs.close()


# This program can only run if executed directly
if __name__ == "__main__":

    # Run main program
    main()

    # Quit program
    exit(0)

