# uncompyle6 version 3.2.3
# Python bytecode 3.6 (3379)
# Decompiled from: Python 3.6.8 |Anaconda custom (64-bit)| (default, Feb 21 2019, 18:30:04) [MSC v.1916 64 bit (AMD64)]
# Embedded file name: broker\worker_manager\base.py
import logging
from urllib.parse import urlencode
import time, datetime
from os.path import splitext
from sqlalchemy import select as sql_select
from db import Connection
from db.tables.targets import TargetRow
from db.tables.uploads import UploadRow
from db.tables.targets_configurations import TargetConfigurationRow
from settings_provider import settings
from api.classes.scan.auto import *
from helpers.encryption.exceptions import InvalidData
from helpers.hashing import sha256
from helpers.encryption.text_aes_cbc import decrypt
import os.path
from helpers.setup_types import *
from helpers.address import update_query_parameters
from broker.worker_manager.jobs_storage import JobsStorage
from broker.worker_manager.workers_storage import WorkersStorage
from helpers.hashing import hmac256hex
from scanners.constants import text_data_identifier
from scanners.scan_app.errors import *
from helpers.hashing.rand import get_random_string, ascii_lower_digits

logger = logging.getLogger("broker.worker_manager")


class WorkerManager:
    DEPENDENCY_TARGET_UPLOADS = "u"
    DEPENDENCY_SCAN_STATUS_DB = "s"
    START_DEADLINE_INSTANT = datetime.timedelta(minutes=60)
    START_DEADLINE_SCHEDULED_FIXED = datetime.timedelta(minutes=5)
    START_DEADLINE_SCHEDULED = datetime.timedelta(hours=12)
    START_DEADLINE_CONTINUOUS = datetime.timedelta(hours=20)

    @classmethod
    def get_start_deadline(
        cls, schedule_start_date, schedule_time_sensitive, recurrence
    ):
        if recurrence is None:
            if not schedule_start_date:
                return cls.START_DEADLINE_INSTANT
            if schedule_time_sensitive:
                return cls.START_DEADLINE_SCHEDULED_FIXED
            return cls.START_DEADLINE_SCHEDULED
        else:
            if schedule_time_sensitive:
                return cls.START_DEADLINE_SCHEDULED_FIXED
            return cls.START_DEADLINE_SCHEDULED

    @classmethod
    def get_scan_end_deadline(cls):
        return datetime.timedelta(days=7)

    def workers_maintenance(self):
        """
        Override this if there are maintenance steps required for workers
        :return:
        """
        pass

    @staticmethod
    def generate_s3_link(bucket_name, file_name, expiration_delta=600, connection=None):
        from helpers.aws.s3 import generate_link

        return generate_link(bucket_name, file_name, expiration_delta, connection)

    @staticmethod
    def generate_dependency_descriptor(
        dependency_type, dependency, expiration_delta=1200
    ):
        """
        :param dependency_type: U(uploads), S(scan_state_file for resume)
        :param dependency:
        :param expiration_delta:
        :return:
        """
        descriptor = "%s%s%08x%s" % (
            dependency_type,
            get_random_string(7, ascii_lower_digits),
            int(time.time() + expiration_delta),
            dependency,
        )
        return descriptor + hmac256hex(descriptor, settings.get("uploads_salt", ""))

    def __init__(
        self, shard_name, status_api_url, worker_storage, jobs_storage, debug_mode=False
    ):
        self.shard_name = shard_name
        self.supported_scanning_apps = set()
        self.workers_storage = worker_storage
        self.jobs_storage = jobs_storage
        self.status_api_url = status_api_url
        self.debug_mode = debug_mode
        self.work_queue = []

    def has_slots(self, scanning_app):
        raise NotImplementedError()

    def queue_job(self, func, expiration_delta, *args, **kwargs):
        """
        This will enqueue a task to be retried until expiration timestamp
        :param func:
        :param expiration_delta: seconds
        :param args:
        :param kwargs:
        :return:
        """
        self.work_queue.append((func, time.time() + expiration_delta, args, kwargs))

    def process_queued_jobs(self):
        """
        This method should be called from time to time by the broker to allow the worker collection to do:
         maintenance steps, retry api requests, etc.
        :return:
        """
        work_queue = list(self.work_queue)
        self.work_queue = []
        for job in work_queue:
            func, expiration, args, kwargs = job
            try:
                func(*args, **kwargs)
            except JobNotFound:
                pass
            except WorkerNotFound:
                pass
            except Exception:
                logger.exception("wm job failed failed")
                if time.time() < expiration:
                    self.work_queue.append((func, expiration, args, kwargs))
                else:
                    logger.error("aborting worker manager job")

    def _get_target_worker_id(self, target_id, scanning_app):
        with Connection(self.shard_name) as (db_connection):
            q = (
                sql_select((TargetConfigurationRow.value,))
                .where(TargetConfigurationRow.target_id == target_id)
                .where(TargetConfigurationRow.name == "workers")
            )
            value = db_connection.execute(q).scalar()
            if value:
                return value.get(scanning_app)
            return

    def _prepare_target(self, target_id):
        """
        :param target_id:
        :return:
        """
        uploads_salt = settings.get("uploads_salt")
        with Connection(self.shard_name) as (db_connection):
            q = (
                sql_select((TargetRow,))
                .where(TargetRow.target_id == target_id)
                .where(TargetRow.deleted_at.is_(None))
            )
            target = db_connection.execute(q).fetchone()
            if not target:
                logger.warning("target not found %s", target_id)
                return
            target_secret_password = settings.get("target_secret_salt")
            if target_secret_password:
                target_secret_password = sha256(target_id, target_secret_password)
            query = sql_select(
                (TargetConfigurationRow.name, TargetConfigurationRow.value)
            ).where(TargetConfigurationRow.target_id == target_id)
            login_sequence = None
            client_certificate = None
            tc = dict()
            imported_files = None
            for config in db_connection.execute(query).fetchall():
                name = config.name
                value = config.value
                if name == "login_sequence":
                    login_sequence = value and value["upload_id"]
                elif name == "client_certificate":
                    client_certificate = value and value["upload_id"]
                elif name == "imported_files":
                    imported_files = (
                        value and isinstance(value, (list, tuple)) and value
                    )
                else:
                    if name == "login":
                        pass
                    if value["kind"] == "automatic":
                        try:
                            if target_secret_password:
                                value["credentials"]["password"] = decrypt(
                                    value["credentials"]["password"],
                                    target_secret_password,
                                ).decode("utf-8")
                        except (KeyError, InvalidData):
                            pass

                        tc[name] = value
                    else:
                        if name == "sensor_secret":
                            pass
                        if value:
                            try:
                                if target_secret_password:
                                    tc[name] = decrypt(
                                        value, target_secret_password
                                    ).decode("utf-8")
                            except InvalidData:
                                tc[name] = value

                        else:
                            if name == "client_certificate_password":
                                pass
                            if value:
                                try:
                                    if target_secret_password:
                                        tc[name] = decrypt(
                                            value, target_secret_password
                                        ).decode("utf-8")
                                except InvalidData:
                                    tc[name] = value

                            else:
                                if (
                                    name == "ssh_credentials"
                                    and value
                                    and value.get("username")
                                    and value.get("password")
                                ):
                                    try:
                                        if target_secret_password:
                                            value["password"] = decrypt(
                                                value.get("password"),
                                                target_secret_password,
                                            ).decode("utf-8")
                                    except InvalidData:
                                        pass

                                    if value.get("ssh_key"):
                                        pass
                                    try:
                                        if target_secret_password:
                                            value["ssh_key"] = decrypt(
                                                value.get("ssh_key"),
                                                target_secret_password,
                                            ).decode("utf-8")
                                    except InvalidData:
                                        pass

                                if value.get("key_password"):
                                    try:
                                        if target_secret_password:
                                            value["key_password"] = decrypt(
                                                value.get("key_password"),
                                                target_secret_password,
                                            ).decode("utf-8")
                                    except InvalidData:
                                        pass

                                    tc[name] = value
                                else:
                                    if name == "authentication":
                                        if value:
                                            pass
                                    if value.get("password"):
                                        try:
                                            if target_secret_password:
                                                value["password"] = decrypt(
                                                    value.get("password"),
                                                    target_secret_password,
                                                ).decode("utf-8")
                                        except InvalidData:
                                            pass

                                        tc[name] = value
                                    else:
                                        if name == "proxy":
                                            if value:
                                                pass
                                        if value.get("password"):
                                            try:
                                                if target_secret_password:
                                                    value["password"] = decrypt(
                                                        value.get("password"),
                                                        target_secret_password,
                                                    ).decode("utf-8")
                                            except InvalidData:
                                                pass

                                            tc[name] = value
                                        else:
                                            tc[name] = value

            if imported_files:
                q = (
                    sql_select((UploadRow,))
                    .where(UploadRow.upload_id.in_(imported_files))
                    .where(UploadRow.status.is_(True))
                )
                filtered_imported_files = []
                for upload in db_connection.execute(q).fetchall():
                    _, ext = splitext(upload.name)
                    if is_setup_type_on_premise_master():
                        _, ext = splitext(upload.name)
                        dd = self.generate_dependency_descriptor(
                            self.DEPENDENCY_TARGET_UPLOADS, upload.upload_id
                        )
                        uu = (
                            self.status_api_url
                            + "downloads/"
                            + dd
                            + "?"
                            + urlencode(dict(ext=ext))
                        )
                        filtered_imported_files.append(uu)
                        print(uu)
                    else:
                        if has_feature(SetupFeatures.STORE_SCAN_DEPENDENCY_ON_S3):
                            extra_parameters = dict(ext=ext)
                            if uploads_salt:
                                extra_parameters["secret"] = sha256(
                                    target["owner_id"] + upload.upload_id, uploads_salt
                                )
                            link = self.generate_s3_link(
                                settings["storage.targets.s3.bucket"],
                                "targets/%s/%s%s"
                                % (target["owner_id"], upload.upload_id, ext),
                            )
                            filtered_imported_files.append(
                                update_query_parameters(link, extra_parameters)
                            )

                imported_files = filtered_imported_files
            if login_sequence:
                q = (
                    sql_select((UploadRow,))
                    .where(UploadRow.upload_id == login_sequence)
                    .where(UploadRow.status.is_(True))
                )
                upload = db_connection.execute(q).fetchone()
                if not upload:
                    logger.warning(
                        "login_sequence not found %s:%s", target_id, login_sequence
                    )
                    login_sequence = None
                else:
                    if is_setup_type_on_premise_master():
                        lsr_file_path = os.path.join(
                            settings.get("storage.targets"), login_sequence
                        )
                        try:
                            lsr_data = open(lsr_file_path, "rb").read()
                        except Exception as e:
                            logging.exception("problems loading lsr file %s", e)
                            login_sequence = None
                        else:
                            if lsr_data:
                                if uploads_salt:
                                    secret = sha256(
                                        target["owner_id"] + login_sequence,
                                        uploads_salt,
                                    )
                                    try:
                                        lsr_data = (
                                            text_data_identifier
                                            + (
                                                decrypt(lsr_data, secret, b64=False)
                                            ).decode()
                                        )
                                    except InvalidData:
                                        lsr_data = (
                                            text_data_identifier + lsr_data.decode()
                                        )

                                lsr_data = text_data_identifier + lsr_data.decode()
                            login_sequence = lsr_data

                    else:
                        if has_feature(SetupFeatures.STORE_SCAN_DEPENDENCY_ON_S3):
                            _, ext = splitext(upload.name)
                            extra_parameters = dict(ext=ext)
                            if uploads_salt:
                                extra_parameters["secret"] = sha256(
                                    target["owner_id"] + login_sequence, uploads_salt
                                )
                            login_sequence = self.generate_s3_link(
                                settings["storage.targets.s3.bucket"],
                                "targets/%s/%s%s"
                                % (target["owner_id"], login_sequence, ext),
                            )
                            login_sequence = update_query_parameters(
                                login_sequence, extra_parameters
                            )
                        else:
                            login_sequence = None
            if client_certificate:
                q = (
                    sql_select((UploadRow,))
                    .where(UploadRow.upload_id == client_certificate)
                    .where(UploadRow.status.is_(True))
                )
                upload = db_connection.execute(q).fetchone()
                if not upload:
                    client_certificate = None
                    logger.warning(
                        "client_certificate not found %s:%s",
                        target_id,
                        client_certificate,
                    )
                else:
                    if is_setup_type_on_premise_master():
                        _, ext = splitext(upload.name)
                        dd = self.generate_dependency_descriptor(
                            self.DEPENDENCY_TARGET_UPLOADS, client_certificate
                        )
                        client_certificate = (
                            self.status_api_url
                            + "downloads/"
                            + dd
                            + "?"
                            + urlencode(dict(ext=ext))
                        )
                    else:
                        if has_feature(SetupFeatures.STORE_SCAN_DEPENDENCY_ON_S3):
                            _, ext = splitext(upload.name)
                            extra_parameters = dict(ext=ext)
                            if uploads_salt:
                                extra_parameters["secret"] = sha256(
                                    target["owner_id"] + client_certificate,
                                    uploads_salt,
                                )
                            client_certificate = self.generate_s3_link(
                                settings["storage.targets.s3.bucket"],
                                "targets/%s/%s%s"
                                % (target["owner_id"], client_certificate, ext),
                            )
                            client_certificate = update_query_parameters(
                                client_certificate, extra_parameters
                            )
                        else:
                            client_certificate = None
                        return ClsTarget(
                            address=target.address,
                            login_sequence=login_sequence,
                            client_certificate=client_certificate,
                            configuration=ClsTargetConfiguration(**tc),
                            imports=imported_files if imported_files else [],
                        )

    def add_job(
        self,
        scan_session_id,
        scan_key,
        allowed_targets,
        target_id,
        jobs,
        scanning_app,
        scan_id,
        **_
    ):
        """
        Attempt to start a job
        :param scan_id:
        :param scan_session_id:
        :param scan_key:
        :param allowed_targets:
        :param target_id:
        :param jobs:
        :param scanning_app:
        :param _:
        :return:
        """
        raise NotImplementedError()

    def check_single_job_status(self, scan_session_id, scanning_app):
        """
        :param scanning_app:
        :param scan_session_id:
        :return:
        """
        raise NotImplementedError()

    def abort_job(self, scan_session_id, scanning_app, reason=None):
        """
        :param scanning_app:
        :param scan_session_id:
        :param reason:
        :return: true if job was lost and retrying aborting does not make sense
        """
        raise NotImplementedError()

    def pause_job(self, scan_session_id, scanning_app, reason=None):
        """
        :param scanning_app:
        :param scan_session_id:
        :param reason:
        :return:
        """
        raise NotImplementedError()

    def delete_job(self, scan_session_id, scanning_app):
        """
        :param scanning_app:
        :param scan_session_id:
        :return:
        """
        raise NotImplementedError()

    def shutdown(self):
        self.process_queued_jobs()

    def download_scan_state_db_from_worker(self, scan_session_job):
        """
        in some cases we may want to retrieve or do operations on the scan_state_db file generated by the worker
        :param scan_session_job:
        :return:
        """
        pass
