# uncompyle6 version 3.2.3
# Python bytecode 3.6 (3379)
# Decompiled from: Python 3.6.8 |Anaconda custom (64-bit)| (default, Feb 21 2019, 18:30:04) [MSC v.1916 64 bit (AMD64)]
# Embedded file name: wvs.py
__author__ = "sanyi"
debug = False
if __name__ == "__main__":
    import ssl

    ssl._DEFAULT_CIPHERS += ":!DES-CBC3-SHA:!TLS_RSA_WITH_3DES_EDE_CBC_SHA:!AES128-SHA:!AES256-SHA:!CAMELLIA128-SHA:!CAMELLIA256-SHA:!SEED-SHA:!ECDHE-RSA-DES-CBC3-SHA:!ECDHE-RSA-AES128-SHA:!ECDHE-RSA-AES256-SHA:!ECDHE-RSA-RC4-SHA:!RC4-SHA:!RC4-MD5"
    import signal

    if not debug:
        import gevent.monkey

        gevent.monkey.patch_all()
        import psycogreen.gevent

        psycogreen.gevent.patch_psycopg()
    from helpers.application import on_premise_init
    from settings_provider.onpremise_settings import default_settings

    on_premise_init(default_settings=default_settings)
import logging

__logger = logging.getLogger("service.master")
swagger_logger = logging.getLogger("swagger")
swagger_logger.setLevel(logging.WARN)


def go_master(run_state_signal):
    from settings_provider import settings
    import time, os.path, tempfile, datetime
    from broker import Broker
    from broker.worker_manager.on_premise_multi_slave import WmOnPremiseStaticMultiSlave
    from server.routers.api import ApiRouter
    from server.routers.static import StaticRouter
    from server.application import Application
    from server.routers.upload import UploadRouter
    from api.handlers.uploads.targets_upload_handler import TargetsUploadHandler
    from helpers.licensing import get_local_license
    from bxss.importer import BXSSImporter
    from api.descriptors.frontend import data as frontend_api
    from api.descriptors.status import data as status_api
    from helpers.exceptions.api_error_handlers import api_error_handlers
    from helpers.status_cache.manual_browsing_requests_cache import (
        ManualBrowsingStatusCache,
    )
    from helpers.status_cache.scan_job_status_memory_cache import (
        ScanJobStatusMemoryCache,
    )
    from helpers.application.utils import (
        adjust_ssl_ca_bundle_path,
        create_system_updater,
        create_system_proxy_callback,
        adjust_version_info,
    )
    from api.handlers.frontend.account import Account
    from api.handlers.frontend.child_users import ChildUsers
    from api.handlers.frontend.intents import Intents
    from api.handlers.frontend.reports import Reports
    from api.handlers.frontend.results import Results
    from api.handlers.frontend.scanner import Scanner
    from api.handlers.frontend.scans import Scans
    from api.handlers.frontend.status import Status
    from api.handlers.frontend.target_groups import TargetGroups
    from api.handlers.frontend.targets import Targets
    from api.handlers.frontend.vulnerabilities import Vulnerabilities
    from api.handlers.frontend.notifications import Notifications
    from api.handlers.frontend.system_config import SystemConfig
    from api.handlers.frontend.issue_trackers import IssueTrackers
    from api.handlers.frontend.excluded_hours import ExcludedHours
    from api.handlers.frontend.workers import WorkerManager
    from api.handlers.status.scanstatus import ScanStatus
    from api.handlers.status.worker_master import WorkerMaster
    from api.handlers.status.dependency_server import DependencyServer
    from helpers.hashing.password import strategy_direct
    from helpers.rate_limiter import RateLimiter
    from broker.worker_manager.workers_storage import WorkersStorage
    from broker.worker_manager.jobs_storage import JobsStorage

    settings.set("pause_scan_on_excluded_hours", True)
    settings.set("password_hashing_strategy", strategy_direct)
    settings.set("api_key_hashing_strategy", strategy_direct)
    settings.set("rt_api_key_brute_force", RateLimiter(0.5, 1, 10000))
    settings.set("rt_session_brute_force", RateLimiter(0.5, 1, 10000))
    settings.set("rt_upload_id_brute_force", RateLimiter(0.1, 1, 10000))
    settings.set("rt_login_brute_force", RateLimiter(0.1, 2, 10000))
    settings.set("rt_intent_brute_force", RateLimiter(0.1, 1, 10000))
    adjust_version_info()
    shard_name = "master"
    settings.set(
        "storage.scan_state_db",
        settings.get("storage.bxss", default=tempfile.gettempdir()),
    )
    __logger.info("starting backend")
    __logger.info("getting license")
    try:
        get_local_license()
    except Exception as ex:
        __logger.error("error reading license %s", ex)

    adjust_ssl_ca_bundle_path()
    __logger.info("creating system proxy callback")
    create_system_proxy_callback()
    __logger.info("creating system updater")
    system_updater = create_system_updater()
    if system_updater.first_run():
        __logger.info("Bye.")
        import sys

        sys.exit(0)
    settings.set("scan_status_cache", ScanJobStatusMemoryCache())
    __logger.info("registering session handlers")
    from server.routers.api.auth import (
        XAuthFactory,
        UiSessionAuth,
        ApiStatusAuth,
        ApiKeyAuth,
    )

    auth_factory = XAuthFactory(rate_limiter=settings.get("rt_session_brute_force"))
    auth_factory.register_auth_type(UiSessionAuth)
    auth_factory.register_auth_type(ApiStatusAuth)
    auth_factory.register_auth_type(ApiKeyAuth)
    UiSessionAuth.SESSION_SECRET = settings.get("server.frontend.session_secret")
    from helpers.mailer.sender.smtp import SmtpSender
    from helpers.mailer.threaded_mailer import ThreadedMailer
    from helpers.mailer.renderer.jinja import JinjaRenderer
    from helpers.mailer.settings_loader import on_premise_settings_loader
    from helpers.mailer.monthly import MonthlyMailer

    if settings.get("wvs.app_dir"):
        email_templates_path = os.path.join(
            settings.get("wvs.app_dir"), "data", "templates", "emails"
        )
    else:
        email_templates_path = os.path.join(os.getcwd(), "data", "templates", "emails")
    settings.set("email_templates", email_templates_path)
    email_renderer = JinjaRenderer(email_templates_path)
    email_sender = SmtpSender(on_premise_settings_loader("master"))
    mailer_object = ThreadedMailer("master", email_sender, email_renderer)
    settings.set("mailer_object", mailer_object)
    import functools

    monthly_mailer = MonthlyMailer(
        generator=functools.partial(MonthlyMailer.on_premise_gen, shard=shard_name),
        mailer=email_sender,
        shard=shard_name,
    )
    settings.set("manual_browsing_storage", ManualBrowsingStatusCache())
    from helpers.uploads.target_actions import TargetUploadActionHandler
    from helpers.worker.status_manager import check_workers, WorkerStatusStorage

    worker_status_storage = WorkerStatusStorage()
    settings.set("worker_status_storage", worker_status_storage)
    settings.set("max_target_deletion_allowance", 2)
    from helpers.db_updates import update_targets_address_canonical_form

    update_targets_address_canonical_form("master")
    from helpers.licensing.usage import update_license_usage

    update_license_usage()

    def create_on_premise_server():
        __logger.info("creating the http server")
        routers = []
        front_api_router = ApiRouter(
            frontend_api,
            handler_classes=dict(
                Account=Account,
                ChildUsers=ChildUsers,
                Intents=Intents,
                Reports=Reports,
                Results=Results,
                Scanner=Scanner,
                Scans=Scans,
                Status=Status,
                TargetGroups=TargetGroups,
                Targets=Targets,
                Vulnerabilities=Vulnerabilities,
                Notifications=Notifications,
                SystemConfig=SystemConfig,
                IssueTrackers=IssueTrackers,
                ExcludedHours=ExcludedHours,
                WorkerManager=WorkerManager,
            ),
            filter_on_host=False,
            filter_on_schema=False,
            auth_factory=auth_factory,
        )
        routers.append(front_api_router)
        upload_router = UploadRouter(
            "/uploads",
            TargetsUploadHandler(
                settings["storage.targets"],
                TargetUploadActionHandler(
                    "master", file_storage=settings["storage.targets"]
                ),
            ),
        )
        routers.append(upload_router)
        status_api_router = ApiRouter(
            status_api,
            handler_classes=dict(
                ScanStatus=ScanStatus,
                WorkerMaster=WorkerMaster,
                DependencyServer=DependencyServer,
            ),
            filter_on_host=False,
            filter_on_schema=False,
            auth_factory=auth_factory,
        )
        routers.append(status_api_router)
        static_document_root = settings.get("server.static_document_root")
        if static_document_root:
            front_static_route = StaticRouter(
                static_document_root,
                index="index.html",
                extra_headers={"X-Frame-Options": "SAMEORIGIN"},
            )
            routers.append(front_static_route)
        return Application(routers, error_handlers=api_error_handlers)

    settings.set(
        "wvs_checks_file_path",
        os.path.join(settings["server"]["static_document_root"], "checks.json"),
    )
    ssl_certificate = settings.get("server.ssl.certificate")
    ssl_private_key = settings.get("server.ssl.private_key")
    is_https = False
    if ssl_certificate:
        if ssl_private_key:
            if os.path.exists(ssl_certificate):
                if os.path.exists(ssl_private_key):
                    is_https = True
                __logger.warning("SSL certificate is set but not present!")
                settings.set("secure_cookie", False)
        __logger.warning("SSL certificate is not set!")
        settings.set("secure_cookie", False)
    wvs_status_api_url = "https://localhost%s" if is_https else "http://localhost%s"
    server_port = settings.get("server.port")
    if server_port:
        wvs_status_api_url = wvs_status_api_url % ":%s" % server_port
    manual_browsing_storage = settings.get("manual_browsing_storage")
    wvs_status_api_url += "/status/v1/"
    __logger.info("status api on %s", wvs_status_api_url)
    worker_manager = WmOnPremiseStaticMultiSlave(
        shard_name,
        wvs_status_api_url,
        worker_storage=WorkersStorage("master"),
        jobs_storage=JobsStorage("master"),
        debug_mode=settings.get("wvs.debug_mode", False),
    )
    __logger.info("creating the broker")
    broker = Broker(
        shard_name,
        worker_manager,
        settings.get("continuous.profile.p1", "11111111-1111-1111-1111-111111111114"),
        settings.get("continuous.profile.p2", "11111111-1111-1111-1111-111111111118"),
        manual_browsing_storage=manual_browsing_storage,
    )
    from threading import Thread

    __logger.info("starting the broker")
    broker_thread = Thread(target=broker.loop)
    broker_thread.start()

    def create_server_instance():
        from gevent import pywsgi

        extra_args = {}
        if is_https:
            extra_args["keyfile"] = ssl_private_key
            extra_args["certfile"] = ssl_certificate
        application = create_on_premise_server()
        from helpers.pywsgi_extra import WSGIHandlerTcpNoDelay

        return pywsgi.WSGIServer(handler_class=WSGIHandlerTcpNoDelay, **extra_args)

    from reporter.reporter import Reporter

    def reporter_process():
        __logger.info("reporter worker started")
        reporter = Reporter(
            "master",
            reports_local_storage=settings.get("storage.reports"),
            pdf_converter=settings.get("pdf_converter"),
        )
        while not run_state_signal.get("shut_down"):
            try:
                report_data = reporter.get_next_report()
                if not report_data:
                    time.sleep(5)
                else:
                    reporter.generate_report(report_data)
            except Exception as _e:
                __logger.exception("exception in main loop %s", _e)
                time.sleep(10)

        __logger.info("reporter worker stop.")

    server_instance = create_server_instance()
    __logger.info("starting the http server")
    server_thread = Thread(target=server_instance.serve_forever)
    server_thread.start()
    __logger.info("starting the reporter")
    reporter_thread = Thread(target=reporter_process)
    reporter_thread.start()
    __logger.info("starting the system updater")
    updater_thread = Thread(target=system_updater.run)
    updater_thread.start()
    __logger.info("starting the acumonitor thread")
    wvs_basedir = settings.get(
        "wvs.app_dir",
        default=os.path.normpath(os.getcwd() + os.path.sep + ".." + os.path.sep),
    )
    bxss_importer = BXSSImporter(
        wvs_app_dir=wvs_basedir,
        shard=shard_name,
        requests_proxies_cb=settings.get("system_proxy_cb", None),
    )
    acumonitor_thread = Thread(target=bxss_importer.loop)
    acumonitor_thread.start()
    __logger.info("starting the mailer thread")
    mailer_thread = Thread(target=mailer_object.run)
    mailer_thread.start()
    __logger.info("starting the monthly assessment thread")
    monthly_mailer_thread = Thread(target=monthly_mailer.run)
    monthly_mailer_thread.start()

    def worker_checker_process():
        __logger.info("worker_checker started")
        while not run_state_signal.get("shut_down"):
            try:
                check_workers(worker_status_storage, "master")
            except Exception as _e:
                __logger.exception("exception in worker_checker main loop %s", _e)

            for i in range(0, 60):
                if run_state_signal.get("shut_down"):
                    break
                time.sleep(1)

        __logger.info("worker_checker stop.")

    worker_checker_thread = Thread(target=worker_checker_process)
    worker_checker_thread.start()
    time.sleep(1)
    __logger.info("system init completed.")
    from helpers.cleanup import PeriodicCleanupWorker
    from helpers.cleanup.on_premise_scan_state import CleanupScanState

    cleanup_worker = PeriodicCleanupWorker()
    cleanup_worker.add(CleanupScanState(datetime.timedelta(days=1)))
    cleanup_worker_thread = Thread(target=cleanup_worker.run)
    cleanup_worker_thread.start()
    while True:
        if run_state_signal.get("shut_down"):
            break
        if not system_updater.running:
            run_state_signal["shut_down"] = True
        time.sleep(1)

    __logger.info("shutting down...")
    try:
        cleanup_worker.stop()
    except Exception as er:
        __logger.exception("shutdown failed with %s", er)

    __logger.info("shutting down broker")
    try:
        broker.shutdown()
    except Exception as er:
        __logger.exception("shutdown failed with %s", er)

    __logger.info("shutting bxss engine")
    try:
        bxss_importer.stop()
    except Exception as er:
        __logger.exception("shutdown failed with %s", er)

    __logger.info("shutting server instance")
    try:
        server_instance.close()
    except Exception as er:
        __logger.exception("shutdown failed with %s", er)

    __logger.info("shutting worker collection")
    try:
        worker_manager.shutdown()
    except Exception as er:
        __logger.exception("shutdown failed with %s", er)

    __logger.info("shutting update engine")
    try:
        system_updater.stop()
    except Exception as er:
        __logger.exception("shutdown failed with %s", er)

    __logger.info("shutting mailer thread")
    try:
        mailer_object.stop()
    except Exception as er:
        __logger.exception("shutdown failed with %s", er)

    __logger.info("shutting monthly mailer thread")
    try:
        monthly_mailer.stop()
    except Exception as er:
        __logger.exception("shutdown failed with %s", er)

    __logger.info("bye.")


if __name__ == "__main__":
    from settings_provider import settings

    settings.set("features.pause_resume", True)
    sgn = dict()

    def signal_handler(_signal, _frame):
        global sgn
        __logger.debug("shutdown signal received:")
        sgn["shut_down"] = True

    signal.signal(signal.SIGINT, signal_handler)
    from helpers.single_instance import SingleInstance
    from helpers.hashing import md5
    import os

    SingleInstance(md5(os.getcwd() + settings.get("engineonly", "0")))
try:
    if settings.get("engineonly") == "1":
        from on_premise_worker import go_worker

        go_worker(sgn)
    else:
        go_master(sgn)
except Exception as e:
    logging.exception("application failed with %s", e)
