# uncompyle6 version 3.2.3
# Python bytecode 3.6 (3379)
# Decompiled from: Python 3.6.8 |Anaconda custom (64-bit)| (default, Feb 21 2019, 18:30:04) [MSC v.1916 64 bit (AMD64)]
# Embedded file name: api\handlers\status\scanstatus.py
__author__ = "Attila Gerendi (Sanyi)"
import hashlib, logging, os.path, errno, sqlalchemy, sqlalchemy.exc
from sqlalchemy import func as sql_func
from sqlalchemy import or_ as sql_or
from sqlalchemy import select as sql_select
from helpers.hashing import md5
from api.classes.status import *
from api.handlers.status import ScanStatusResourceHandler
from db import Connection
from db import FunctionWrapper
from db.tables.events import create_event, EventResources, EventSeverity
from db.tables.locations import LocationsTable, LocationRow
from db.tables.scan_session_vulns import ScanSessionVulnsTable, ScanSessionVulnRow
from db.tables.scan_sessions import ScanSessionRow, ScanSessionsTable
from db.tables.scan_session_jobs import ScanSessionJobRow
from helpers.scan_result_handling import (
    get_vulnerability_type,
    get_scan_data,
    get_target_id_for_domain,
    insert_target_vuln,
)
from helpers.address import extract_domain_from_address
from server.http_responses.http_status_codes import Http204, Http200
from server.http_responses.http_errors import Http409, Http401
from db.tables.targets import TargetRow, TargetsTable
from db.tables.target_vulns import TargetVulnRow
from settings_provider import settings
from helpers.future_events.base import FutureEvents
from helpers.setup_types import *

logger = logging.getLogger("scan_status")
WVS_SCANNER_EVENTS = dict(
    crawl_memlimit="scan_wvs_crawl_memlimit",
    aborted="scan_wvs_aborted",
    sensor="scan_wvs_sensor_found",
    no_sensor="scan_wvs_sensor_not_found",
    ls_error="scan_wvs_ls_error",
    au_error="scan_wvs_au_error",
    al_error="scan_wvs_al_error",
    crawling="scan_wvs_crawling",
    deep_scan="scan_wvs_deep_scan",
    scanning="scan_wvs_scan_started",
    finished="scan_wvs_scan_finished",
    scan_resumed="scan_wvs_scan_resumed",
    manual_browsing="scan_wvs_manual_browsing",
)


def insert_location_placeholder(connection, scan_session_id, loc_id):
    """
    This function is called whenever we have a location dependency prior of actually receiving the location
    :param connection:
    :param scan_session_id:
    :param loc_id:
    :return:
    """
    try:
        connection.execute(
            LocationsTable.insert().values(
                loc_id=loc_id, scan_session_id=scan_session_id, name="", path=""
            )
        )
    except sqlalchemy.exc.IntegrityError as e:
        str_e = str(e)
        if "locations_pkey" in str_e:
            return
        raise


def update_location_placeholder(connection, scan_session_id, location):
    """
    :param connection:
    :param scan_session_id:
    :param location:
    :return:
    """
    if location.parent_id == location.loc_id:
        location.parent_id = None
    if location.parent_id == 0:
        location.parent_id = None
    if location.source_id == 0:
        location.source_id = None
    if location.root_id == 0:
        location.root_id = None
    while True:
        q = (
            (
                LocationsTable.update(
                    values=dict(
                        name=location.name,
                        source_id=location.source_id,
                        parent_id=location.parent_id,
                        root_id=location.root_id,
                        path=location.path,
                        loc_type=location.loc_type,
                    )
                )
            )
            .where(LocationRow.loc_id == location.loc_id)
            .where(LocationRow.scan_session_id == scan_session_id)
            .where(LocationRow.name != "")
            .returning(LocationRow.loc_id)
        )
        try:
            result = connection.execute(q).scalar()
            if not result:
                raise Http409("loc_id collision")
            break
        except sqlalchemy.exc.IntegrityError as e:
            str_e = str(e)
            if "(scan_session_id, source_id)" in str_e:
                insert_location_placeholder(
                    connection, scan_session_id, location.source_id
                )
                continue
            else:
                if "(scan_session_id, parent_id)" in str_e:
                    insert_location_placeholder(
                        connection, scan_session_id, location.parent_id
                    )
                    continue
                else:
                    raise


class ScanStatus(ScanStatusResourceHandler):
    def __init__(
        self,
        scan_session_id,
        request_auth,
        current_shard="master",
        scan_session_events=None,
        **_
    ):
        super().__init__()
        if not request_auth:
            raise Http401("access denied")
        self.current_shard = current_shard
        self.scan_data = get_scan_data(scan_session_id)
        if self.scan_data is False:
            raise Http401("scan_session_id %s not found", scan_session_id)
        else:
            if self.scan_data is None:
                with Connection(self.current_shard) as (connection):
                    self.scan_data = get_scan_data(scan_session_id, connection)
                    if self.scan_data is False:
                        raise Http401("scan_session_id %s not found", scan_session_id)
        if self.scan_data["scan_key"].replace("-", "") != request_auth.api_key:
            raise Http401("access denied")
        self.session_events = scan_session_events
        self.last_known_status = dict()

    def __schedule_job_recheck(
        self, scan_session_id, scanning_app, status, time_delta=0
    ):
        key = scan_session_id + ":" + scanning_app
        if self.last_known_status.get(key) == status:
            return
        self.last_known_status[key] = status
        if self.session_events:
            self.session_events.add(key, time_delta, namespace=self.current_shard)

    def add_app_message(self, scanning_app, scan_session_id, body, **_):
        """
        ClsMessage:
            level: int (-1-unimportant, 0-info, 1-warning, 2-error, 3-critical)
            host: str
            kind: str
                - crawl_memlimit
                - aborted
                - sensor
                - no_sensor
                - ls_error
                - au_error
                - al_error
                - crawling
                - deep_scan
                - scanning
                - finished
                - manual_browsing
                - scan_resumed
            data: str
        
        :param scanning_app:
        :param scan_session_id:
        :param body:
        :param _:
        :return:
        """
        if settings.get("features.status_api.logging.api", False):
            logger.debug(
                "add_app_message %s:%s = %s", scan_session_id, scanning_app, body
            )
        message = ClsMessage(**body)
        target_id = None
        host = None
        if message.get("host"):
            host = extract_domain_from_address(message.host)
            target_id = get_target_id_for_domain(scan_session_id, host)
        settings.get("scan_status_cache").add_message(
            self.current_shard,
            scanning_app,
            scan_session_id,
            kind=message.kind,
            data=message.get("data"),
            level=message.level,
            target_info=(dict(target_id=target_id, host=host)) if target_id else None,
        )
        if message.level == -1:
            return Http204()
        else:
            if message.kind == "manual_browsing":
                logger.debug(
                    "manual browsing event received: scan_session_id:%s, message_data:%s",
                    scan_session_id,
                    message.get("data"),
                )
                manual_browsing_storage = settings.get("manual_browsing_storage")
                if manual_browsing_storage:
                    manual_browsing_storage.update_current_manual_browsing_event(
                        scan_session_id=scan_session_id,
                        scanning_app=scanning_app,
                        data=message.get("data"),
                    )
                else:
                    logger.debug("warning no manual browsing storage defined")
            else:
                if message.kind == "scan_resumed":
                    logger.debug(
                        "scan resumed event received: scan_session_id:%s",
                        scan_session_id,
                    )
                    manual_browsing_storage = settings.get("manual_browsing_storage")
                    if manual_browsing_storage:
                        manual_browsing_storage.update_current_manual_browsing_event(
                            scan_session_id=scan_session_id,
                            scanning_app=scanning_app,
                            data=None,
                        )
                    else:
                        logger.debug("warning no manual browsing storage defined")
                    return Http204()
            with Connection(self.current_shard) as (connection):
                inhibit_notification = False
                if self.scan_data["continuous"]:
                    inhibit_notification = True
                event_level = message.get("level", 0)
                event_query = create_event(
                    WVS_SCANNER_EVENTS.get(message.kind, "scan_scanner_event"),
                    owner_id=self.scan_data["owner_id"],
                    user_id=self.scan_data["creator_id"],
                    severity=event_level,
                    resource_type=EventResources.scan_session,
                    resource_id=scan_session_id,
                    data=dict(
                        target_id=target_id,
                        scan_id=self.scan_data["scan_id"],
                        address=message.get("host"),
                        scanning_app=scanning_app,
                        kind=message.get("kind"),
                        data=message.get("data"),
                    ),
                    shard=self.current_shard,
                    inhibit_notification=inhibit_notification,
                )
                connection.execute(event_query)
                query = (
                    (ScanSessionsTable.update(values=dict(event_level=event_level)))
                    .where(ScanSessionRow.scan_session_id == scan_session_id)
                    .where(
                        sql_or(
                            ScanSessionRow.event_level.is_(None),
                            ScanSessionRow.event_level < event_level,
                        )
                    )
                )
                connection.execute(query)
            return Http204()

    def update_app_status(self, scanning_app, scan_session_id, body, **_):
        """
        - changes in status should be reflected immediately into the database
        - web_scan_status should be only cached then saved on scan end
        
        AppStatus:
            status: str
                - running
                - finished
                - crashed
                - aborted
            duration: int
            progress: int
            web_scan_status: WebScanStatus
                                    avg_response_time: int
                                    max_response_time: int
                                    request_count: int
                                    locations: int
        
        :param scanning_app:
        :param scan_session_id:
        :param body:
        :param _:
        :return:
        """
        app_status = ClsAppStatus(**body)
        if settings.get("features.status_api.logging.api", False):
            logger.debug(
                "update_app_status %s:%s = %s", scan_session_id, scanning_app, body
            )
        scan_status_cache = settings.get("scan_status_cache")
        if scan_status_cache:
            scan_status_cache.update_main_keys(
                self.current_shard,
                scanning_app,
                scan_session_id,
                dict(
                    progress=app_status.get("progress"),
                    web_scan_status=body.get("web_scan_status"),
                    status=app_status.status,
                    duration=app_status.get("duration"),
                ),
            )
        if app_status.status in ("running", "finished", "aborted", "crashed"):
            self._ScanStatus__schedule_job_recheck(
                scan_session_id, scanning_app, app_status.status
            )
        else:
            raise Exception("unknown status %s" % app_status.status)
        return Http204()

    def update_scan_info(self, scan_session_id, scanning_app, body, **_):
        """
        
        This data should be cached and only saved on scan end into the database
        
        ScanInfo:
            host: str
            aborted: boolean
            aborted_reason: str
            web_scan_status: WebScanStatus
                avg_response_time: int
                max_response_time: int
                request_count: int
                locations: int
            target_info: ScanTargetInfo
                server: str
                os: str
                sensor_detected: bool
                responsive: bool
                technologies: list of str
        
        :param scan_session_id:
        :param scanning_app:
        :param body:
        :param _:
        :return:
        """
        info = ClsScanInfo(**body)
        logger.debug("update_scan_info [%s] = %s", scan_session_id, body)
        target_id = self.scan_data["target_id"]
        host = None
        if info.get("host"):
            host = extract_domain_from_address(info.host)
            target_id = get_target_id_for_domain(scan_session_id, host)
        settings.get("scan_status_cache").update_sub_target_keys(
            self.current_shard,
            scanning_app,
            scan_session_id,
            target_id=target_id,
            host=host,
            is_starting_host=target_id == self.scan_data["target_id"],
            new_data=dict(
                web_scan_status=body.get("web_scan_status"),
                target_info=body.get("target_info"),
                aborted=body.get("aborted"),
                aborted_reason=body.get("aborted_reason"),
            ),
        )
        q = (
            (
                TargetsTable.update(
                    values=dict(
                        last_scan_session_id=scan_session_id,
                        last_scan_id=self.scan_data["scan_id"],
                    )
                )
            )
            .where(TargetRow.target_id == target_id)
            .where(
                sql_or(
                    TargetRow.last_scan_session_id != scan_session_id,
                    TargetRow.last_scan_session_id.is_(None),
                )
            )
        )
        eq = None
        target_info = info.get("target_info", None)
        if target_info and target_info.get("responsive", None) is False:
            eq = create_event(
                "target_not_responsive",
                owner_id=self.scan_data["owner_id"],
                user_id=self.scan_data["creator_id"],
                severity=EventSeverity.CRITICAL,
                resource_type=EventResources.target,
                resource_id=scan_session_id,
                data=dict(
                    target_id=target_id,
                    scan_id=self.scan_data["scan_id"],
                    host=host,
                    scanning_app=scanning_app,
                ),
                shard=self.current_shard,
            )
        with Connection(self.current_shard) as (db):
            db.execute(q)
            if eq is not None:
                db.execute(eq)
        return Http204()

    def upload_http_response(self, vuln_hash, body, **_):
        q = sql_select((TargetVulnRow.target_id,)).where(
            TargetVulnRow.vuln_hash == vuln_hash
        )
        with Connection(self.current_shard) as (db):
            target_id = db.execute(q).scalar()
        if not target_id:
            raise Http409("vuln hash not found")
        if is_setup_type_on_premise():
            base_storage = settings.get("base_storage")
            if not base_storage:
                return Http204()
            filename = os.path.join(
                base_storage,
                "http_resp",
                vuln_hash[0],
                vuln_hash[1],
                vuln_hash[2],
                vuln_hash,
            )
            try:
                os.makedirs(os.path.dirname(filename))
            except OSError as exc:
                if exc.errno != errno.EEXIST:
                    raise

            with open(filename, "wb") as (f):
                while True:
                    data = body.read(8192)
                    if not data:
                        break
                    f.write(data)

        else:
            if is_setup_type_aws():
                upload_queue = settings.get("http_response_upload_queue")
                logger.info("saving temporally file")
                if upload_queue:
                    import tempfile

                    filename = os.path.join(tempfile.gettempdir(), vuln_hash)
                    logger.info("saving temporally file to %s", filename)
                    with open(filename, "wb") as (f):
                        while True:
                            data = body.read(8192)
                            if not data:
                                break
                            f.write(data)

                    logger.info("saving temporally file to %s done, queueing", filename)
                    upload_queue.put((target_id.replace("-", ""), vuln_hash, filename))
        return Http204()

    def add_vulnerability(self, scan_session_id, scanning_app, body, **_):
        """
        
        :param scanning_app:
        :param scan_session_id:
        :param body:
        :param _:
        :return:
        """
        if settings.get("features.status_api.logging.api", False):
            logger.debug(
                "add_vulnerability %s:%s = %s", scan_session_id, scanning_app, body
            )
        vuln = ClsVulnerability(**body)
        with Connection(self.current_shard) as (connection):
            vulnerability_type = get_vulnerability_type(vuln.vt_id, connection)
            if not vulnerability_type:
                logger.error("vt_id not found %s", vuln.vt_id)
                return Http409("vt_id not found")
            target_id = get_target_id_for_domain(scan_session_id, vuln.host, connection)
            if not target_id:
                logger.error("no target_id detected for %s: %s", scan_session_id, body)
                return Http409("no target_id detected")
            target_id_vuln_hash = target_id + ":::" + vuln.vuln_hash
            m = hashlib.md5()
            m.update(target_id_vuln_hash.encode())
            target_id_vuln_hash = m.hexdigest()
            if vuln.get("loc_id", 0) == 0:
                vuln.loc_id = None
            target_vuln = dict(
                target_id=target_id,
                scan_session_id=scan_session_id,
                vt_id=vuln.vt_id,
                source=vuln.source,
                url=vuln.get("loc_url", ""),
                loc_detail=vuln.loc_detail,
                vuln_hash=target_id_vuln_hash,
                first_seen=sql_func.now(),
                last_seen=sql_func.now(),
                details=vuln.details,
                details_type=vuln.details_type,
                sensor_details=vuln.get("sensor_details"),
                request=vuln.get("request"),
                criticality=self.scan_data["criticality"],
                severity=vulnerability_type["severity"],
                name=vulnerability_type["name"],
                tags=vuln.get("tags", []),
                use_ssl=vuln.get("use_ssl"),
                attack_vector=vuln.get("attack_vector"),
                continuous=self.scan_data["continuous"],
            )
            vuln_id, rediscovered = insert_target_vuln(
                connection, scan_session_id, target_vuln
            )
            if rediscovered:
                event_query = create_event(
                    "vulnerability_rediscovered",
                    self.scan_data["owner_id"],
                    self.scan_data["creator_id"],
                    data=dict(
                        scan_id=self.scan_data["scan_id"],
                        scan_session_id=scan_session_id,
                        target_id=target_id,
                    ),
                    resource_type=EventResources.vulnerability,
                    resource_id=str(vuln_id),
                    shard=self.current_shard,
                )
                connection.execute(event_query)
            if self.scan_data["continuous"]:
                return Http204()
            scan_vuln = dict(
                scan_session_id=scan_session_id,
                source=vuln.source,
                vt_id=vuln.vt_id,
                loc_id=vuln.loc_id,
                loc_detail=vuln.loc_detail,
                vuln_hash=target_id_vuln_hash,
                details=vuln.details,
                details_type=vuln.details_type,
                sensor_details=vuln.get("sensor_details"),
                tags=vuln.get("tags"),
                request=vuln.get("request"),
                use_ssl=vuln.get("use_ssl"),
                attack_vector=vuln.get("attack_vector"),
            )
            while 1:
                try:
                    q = (ScanSessionVulnsTable.insert().values(**scan_vuln)).returning(
                        ScanSessionVulnRow.vuln_id
                    )
                    scan_session_vuln_id = connection.execute(q).scalar()
                    scan_status_cache = settings.get("scan_status_cache")
                    if scan_status_cache:
                        scan_status_cache.add_vuln(
                            self.current_shard,
                            scanning_app,
                            scan_session_id,
                            vuln_name=vulnerability_type["name"],
                            vuln_id=str(scan_session_vuln_id),
                            target_info=dict(target_id=target_id, host=vuln.host),
                            severity=vulnerability_type["severity"],
                        )
                    break
                except sqlalchemy.exc.IntegrityError as e:
                    str_e = str(e)
                    if "Key (scan_session_id, loc_id)" in str_e:
                        insert_location_placeholder(
                            connection, scan_session_id, vuln.loc_id
                        )
                        continue
                    else:
                        if "ix_scan_session_vulns_unique_hash" in str_e:
                            logger.error("vuln_hash collision %s", scan_vuln)
                            raise Http409("vuln_hash collision")
                    raise

        return Http200(
            ClsVulnerabilityData(vuln_id=target_id_vuln_hash, rediscovered=rediscovered)
        )

    def update_location(self, scan_session_id, body, **_):
        if self.scan_data["continuous"]:
            return Http204()
        else:
            if settings.get("features.status_api.logging.api", False):
                logger.debug("update_location %s = %s", scan_session_id, body)
            location = ClsLocation(**body)
            loc_id = location.get("loc_id", 0)
            if loc_id == 0:
                logger.debug("loc_id required")
                raise Http409("loc_id required")
            input_data = location.get("input_data")
            tags = location.get("tags")
            loc_type = location.get("loc_type")
            if not (input_data or tags or loc_type):
                logger.debug("no input_data tags or loc_type")
                return Http204()
            with Connection(self.current_shard, create_transactions=True) as (
                connection
            ):
                loc = connection.execute(
                    sql_select((LocationRow.tags,))
                    .where(LocationRow.loc_id == loc_id)
                    .where(LocationRow.scan_session_id == scan_session_id)
                ).fetchone()
                if loc is None:
                    logger.error(
                        "attempting to update undefined location %s:%s",
                        scan_session_id,
                        loc_id,
                    )
                    raise Http409("loc_id not found")
                update_values = dict()
                if tags:
                    original_tags = set(loc.tags) if loc.tags else set()
                    new_tags = set(tags) if tags else set()
                    update_values["tags"] = list(original_tags.union(new_tags))
                if input_data:
                    update_values["input_data"] = input_data
                if loc_type:
                    update_values["loc_type"] = loc_type
                query = (
                    (LocationsTable.update(values=update_values))
                    .where(LocationRow.loc_id == loc_id)
                    .where(LocationRow.scan_session_id == scan_session_id)
                )
                connection.execute(query)
            return Http204()

    def add_location(self, scan_session_id, body, **_):
        """
        TODO: possible improvement: unique(parent_id, name), loc_id can be nul -> auto-generated, returns loc_id
        :param scan_session_id:
        :param body:
        :param _:
        :return:
        """
        auto_generated_loc_id = False
        if self.scan_data["continuous"]:
            return Http204()
        else:
            if settings.get("features.status_api.logging.api", False):
                logger.debug("add_location %s = %s", scan_session_id, body)
            location = ClsLocation.from_dict(body)
            loc_id = location.get("loc_id", 0)
            if loc_id == 0:
                logger.debug("loc_id required")
                raise Http409("loc_id required")
            location_name = location.get("name")
            if not location_name:
                logger.debug("name required")
                raise Http409("name required")
            loc_type = location.get("loc_type")
            if location.parent_id == location.loc_id:
                location.parent_id = None
            if location.parent_id == 0:
                location.parent_id = None
            if location.source_id == 0:
                location.source_id = None
            if location.root_id == 0:
                location.root_id = None
            location_value = dict(
                loc_id=loc_id,
                scan_session_id=scan_session_id,
                name=location_name,
                source_id=location.source_id,
                parent_id=location.parent_id,
                root_id=location.root_id,
                path=location.path,
                loc_type=loc_type,
            )
            input_data = location.get("input_data")
            if input_data:
                location_value["input_data"] = input_data
            tags = location.get("tags")
            if tags:
                location_value["tags"] = tags
            with Connection(self.current_shard) as (connection):
                while 1:
                    try:
                        connection.execute(
                            LocationsTable.insert().values(**location_value)
                        )
                        if location.parent_id:
                            q = (
                                (LocationsTable.update(values=dict(loc_type="folder")))
                                .where(LocationRow.loc_id == location.parent_id)
                                .where(LocationRow.scan_session_id == scan_session_id)
                                .where(LocationRow.loc_type == "file")
                            )
                            connection.execute(q)
                        break
                    except sqlalchemy.exc.IntegrityError as e:
                        str_e = str(e)
                        if "locations_pkey" in str_e:
                            if auto_generated_loc_id:
                                pass
                            update_location_placeholder(
                                connection, scan_session_id, location
                            )
                            break
                        else:
                            if "(scan_session_id, source_id)" in str_e:
                                insert_location_placeholder(
                                    connection, scan_session_id, location.source_id
                                )
                            else:
                                if "(scan_session_id, parent_id)" in str_e:
                                    insert_location_placeholder(
                                        connection, scan_session_id, location.parent_id
                                    )
                                    continue
                                else:
                                    raise

            return Http204()

    def add_location_tags(self, scan_session_id, loc_id, body, **_):
        if self.scan_data["continuous"]:
            return Http204()
        else:
            if settings.get("features.status_api.logging.api", False):
                logger.debug(
                    "add_location_tags %s:%s = %s", scan_session_id, loc_id, body
                )
            tags = body["items"]
            with Connection(self.current_shard) as (connection):
                for tag in set(tags):
                    connection.execute(
                        sql_select(
                            FunctionWrapper(
                                "locations_add_tag",
                                scan_session_id=scan_session_id,
                                loc_id=loc_id,
                                tag=tag,
                            )
                        )
                    ).scalar()

            return Http204()

    def add_external_urls(self, scanning_app, scan_session_id, body, **_):
        """
        this is tipically called by scanning_app = 'web' but only on the main host
        external_hosts are cached
        :param scanning_app:
        :param scan_session_id:
        :param body:
        :param _:
        :return:
        """
        if settings.get("features.status_api.logging.api", False):
            logger.debug(
                "add_external_urls %s:%s = %s", scan_session_id, scanning_app, body
            )
        external_hosts = body["items"]
        if external_hosts:
            settings.get("scan_status_cache").update_sub_target_keys(
                self.current_shard,
                scanning_app,
                scan_session_id,
                target_id=self.scan_data["target_id"],
                host=self.scan_data["domain"],
                is_starting_host=True,
                new_data=dict(external_hosts=external_hosts),
            )
        return Http204()

    def can_start_scan(self, scanning_app, scan_session_id, worker_identifier, **_):
        if settings.get("features.status_api.logging.api", False):
            logger.debug(
                "can_start_scan %s:%s:%s",
                scan_session_id,
                scanning_app,
                worker_identifier,
            )
        q = (
            sql_select((ScanSessionJobRow.worker_id,))
            .where(ScanSessionJobRow.scanning_app == scanning_app)
            .where(ScanSessionJobRow.scan_session_id == scan_session_id)
        )
        with Connection(self.current_shard) as (db):
            worker_id = db.execute(q).scalar()
            if worker_id is None:
                logging.debug(
                    "no worker found yet %s:%s", scanning_app, scan_session_id
                )
                return dict(can_start="wait")
            if md5(worker_id) != worker_identifier:
                logging.warning(
                    "invalid worker %s:%s:%s",
                    worker_id,
                    md5(worker_id),
                    worker_identifier,
                )
                raise Http409("invalid worker")
        return dict(can_start="true")
