# uncompyle6 version 3.2.3
# Python bytecode 3.6 (3379)
# Decompiled from: Python 3.6.8 |Anaconda custom (64-bit)| (default, Feb 21 2019, 18:30:04) [MSC v.1916 64 bit (AMD64)]
# Embedded file name: bxss\importer.py
__author__ = "Attila Gerendi (Sanyi)"
import logging, time, hashlib, os, os.path, json
from sqlalchemy import func as sql_func
from sqlalchemy import select as sql_select
import sqlalchemy.exc
from db.tables.events import create_event, EventResources
from db.tables.scan_session_vulns import ScanSessionVulnsTable
from db.tables.locations import LocationsTable
from bxss.bxss import BXSSHelper
from helpers.licensing import get_bxss_credentials
from helpers.scan_result_handling import (
    get_vulnerability_type,
    insert_target_vuln,
    get_scan_data,
)
from helpers.licensing.features import Features, BaseLicense
from helpers.setup_types import *
from db import Connection
from db.tables.scan_session_jobs import ScanSessionJobRow
from settings_provider import settings

logger = logging.getLogger("bxss.importer")
root_location_table_alias = LocationsTable.alias("root_table")
acumon_id_to_scan_session_id_cache = dict()
cash_alerts_during_scan = []


class BXSSImporter:
    """
    Class to automatically pull acumonitor alerts for an acumonitor account, detect the corresponding
    scan/session/job, create the vulnerability entries and related events
    """

    def __init__(
        self,
        wvs_app_dir,
        shard,
        requests_proxies_cb=None,
        bxss_api_key=None,
        bxss_user_id=None,
    ):
        if is_setup_type(AWS):
            requests_proxies_cb = None
        self.shard = shard
        self.bxx_helper = BXSSHelper(
            wvs_app_dir, requests_proxies_cb=requests_proxies_cb
        )
        self.should_stop = False
        self.bxss_api_key = bxss_api_key
        self.bxss_user_id = bxss_user_id
        if is_setup_type(ON_PREMISE_MASTER):
            if requests_proxies_cb:
                proxies = requests_proxies_cb()
                logger.debug("using proxies: %s", proxies)

    def stop(self):
        self.should_stop = True

    def process_alerts(self, raw_alerts):
        scan_state_db_storage = settings.get("storage.scan_state_db")
        if not scan_state_db_storage:
            print("scan_state_db_storage not defined")
            return
        filtered_alerts = []
        with Connection(self.shard) as (db):
            for alert in raw_alerts:
                logger.debug("process_bxss_alert %s", alert["req_id"])
                try:
                    acumonitor_id = int(alert["req_id"].split("-")[0])
                except Exception:
                    logger.error(
                        "could not decode acumonitor_id %s",
                        alert["req_id"].split("-")[0],
                    )
                    continue

                scan_session_id = acumon_id_to_scan_session_id_cache.get(acumonitor_id)
                if scan_session_id is False:
                    continue
                if scan_session_id is None:
                    q = sql_select((ScanSessionJobRow.scan_session_id,)).where(
                        ScanSessionJobRow.acumonitor_id == acumonitor_id
                    )
                    scan_session_id = db.execute(q).scalar()
                    if scan_session_id is None:
                        acumon_id_to_scan_session_id_cache[acumonitor_id] = False
                        logging.warning(
                            "acumonitor_id %s not found in the db", acumonitor_id
                        )
                        continue
                        acumon_id_to_scan_session_id_cache[
                            acumonitor_id
                        ] = scan_session_id
                        filename = os.path.join(
                            scan_state_db_storage, "wvs", scan_session_id
                        )
                        if os.path.exists(filename):
                            alert["scan_session_id"] = scan_session_id
                            alert["filename"] = filename
                            alert["acumonitor_id"] = acumonitor_id
                            filtered_alerts.append(alert)
                        else:
                            logging.warning(
                                "scan_state_db not found %s", scan_session_id
                            )

        if not filtered_alerts:
            return
        preprocessed_alerts = self.bxx_helper.process_vulns(filtered_alerts)
        if len(preprocessed_alerts) != len(filtered_alerts):
            logger.error("bxx_helper.process_vulns returned different response length")
        for i in range(0, len(preprocessed_alerts)):
            preprocessed_alerts[i]["scan_session_id"] = filtered_alerts[i][
                "scan_session_id"
            ]

        for alert in preprocessed_alerts:
            try:
                self.process_alert(alert)
            except Exception as e:
                logger.exception("processing alert failed with %s", e)

    def process_alert(self, alert):
        logger.debug("importing %s", alert)
        vt_id = alert["vt_id"]
        with Connection(self.shard) as (db):
            vulnerability_type = get_vulnerability_type(vt_id, db)
            if not vulnerability_type:
                logger.warning("vt_id %s not found for alert %s", vt_id, alert)
                return
            scan_data = get_scan_data(alert["scan_session_id"], db)
            if not scan_data:
                logger.warning(
                    "scan_data not found for acumonitor_id=%s alert %s",
                    alert["scan_session_id"],
                    alert,
                )
                return
            if alert["host"] == scan_data["host"]:
                target_id = scan_data["target_id"]
            else:
                target_id = scan_data["allowed_targets"].get(alert["host"])
                if not target_id:
                    target_id = scan_data["allowed_targets"].get(
                        alert["host"].split(":")[0]
                    )
            if target_id is None:
                logger.warning(
                    "no target id for %s:%s",
                    scan_data["scan_session_id"],
                    alert["host"],
                )
                return
            target_id_vuln_hash = target_id + ":::" + alert["vuln_hash"]
            m = hashlib.md5()
            m.update(target_id_vuln_hash.encode())
            target_id_vuln_hash = m.hexdigest()
            details = alert["details"]
            if alert["details_type"] == "json":
                pass
            if isinstance(details, dict):
                try:
                    details = json.dumps(details)
                except Exception as e:
                    logger.exception(
                        "could not json serialize alert details %s, %s", e, details
                    )

                target_vuln = dict(
                    target_id=target_id,
                    scan_session_id=scan_data["scan_session_id"],
                    vt_id=alert["vt_id"],
                    source=alert["source"],
                    url=alert["loc_url"],
                    loc_detail="",
                    vuln_hash=target_id_vuln_hash,
                    first_seen=sql_func.now(),
                    last_seen=sql_func.now(),
                    details=details,
                    details_type=alert["details_type"],
                    request=alert["request"],
                    criticality=scan_data["criticality"],
                    severity=vulnerability_type["severity"],
                    name=vulnerability_type["name"],
                    tags=[],
                    use_ssl=None,
                    attack_vector=None,
                    continuous=scan_data["continuous"],
                )
                vuln_id, rediscovered = insert_target_vuln(
                    db, scan_data["scan_session_id"], target_vuln
                )
                if vuln_id:
                    if rediscovered:
                        logger.debug("fixed alert rediscovered")
                        event_query = create_event(
                            "vulnerability_rediscovered",
                            scan_data["owner_id"],
                            scan_data["creator_id"],
                            data=dict(
                                scan_id=scan_data["scan_id"],
                                scan_session_id=scan_data["scan_session_id"],
                                target_id=target_id,
                            ),
                            resource_type=EventResources.vulnerability,
                            resource_id=str(vuln_id),
                            shard=self.shard,
                        )
                    else:
                        event_query = create_event(
                            "new_acumonitor_vulnerability",
                            scan_data["owner_id"],
                            scan_data["creator_id"],
                            data=dict(
                                scan_id=scan_data["scan_id"],
                                scan_session_id=scan_data["scan_session_id"],
                                target_id=target_id,
                            ),
                            resource_type=EventResources.vulnerability,
                            resource_id=str(vuln_id),
                            shard=self.shard,
                        )
                    db.execute(event_query)
                if scan_data["continuous"]:
                    logger.debug("continuous scan, scan vuln will not be generated")
                    return
                scan_vuln = dict(
                    scan_session_id=scan_data["scan_session_id"],
                    source=alert["source"],
                    vt_id=alert["vt_id"],
                    loc_id=alert["loc_id"],
                    loc_detail="",
                    vuln_hash=target_id_vuln_hash,
                    details=alert["details"],
                    details_type=alert["details_type"],
                    tags=[],
                    request=alert["request"],
                    use_ssl=None,
                    attack_vector=None,
                )
                try:
                    db.execute(ScanSessionVulnsTable.insert().values(**scan_vuln))
                except sqlalchemy.exc.IntegrityError as e:
                    str_e = str(e)
                    if "ix_scan_session_vulns_unique_hash" in str_e:
                        logger.error("vuln_hash collision %s", scan_vuln)
                    else:
                        if "Key (scan_session_id, loc_id)" in str_e:
                            logger.error("loc_id not found for %s", scan_vuln)
                        else:
                            logger.exception("bxss scan session vuln failed with %s", e)

                logger.debug("import done")

    def get_new_alerts(self):
        if is_setup_type(ON_PREMISE_MASTER):
            bxss_api_key, bxss_user_id = get_bxss_credentials()
        else:
            if is_setup_type(AWS):
                bxss_api_key = self.bxss_api_key
                bxss_user_id = self.bxss_user_id
            else:
                return
            if not bxss_api_key or not bxss_user_id:
                return
        try:
            raw_alerts = self.bxx_helper.poll_acumonitor(bxss_api_key, bxss_user_id)
            if raw_alerts:
                logger.debug("got %s new bxx alerts", len(raw_alerts))
                self.process_alerts(raw_alerts)
        except Exception as e:
            logger.exception("poll_AcuMonitor failed with %s", e)

    def __sleep(self, value):
        for i in range(0, value):
            if self.should_stop:
                break
            time.sleep(1)

    def loop(self, pool_interval=7200):
        self.should_stop = False
        while not self.should_stop:
            if is_setup_type(ON_PREMISE_MASTER):
                l = BaseLicense.get_system_license()
                if not l.has_feature(Features.ACUMONITOR):
                    self._BXSSImporter__sleep(600)
                    continue
                self.get_new_alerts()
                if is_setup_type_on_premise():
                    scan_state_db_storage = settings.get("storage.scan_state_db")
                    if not scan_state_db_storage:
                        delta = 1728000
                        with os.scandir(scan_state_db_storage) as (it):
                            for entry in it:
                                if entry.is_file():
                                    if entry.stat().st_mtime + delta < time.time():
                                        try:
                                            os.remove(entry.path)
                                        except Exception as e:
                                            logger.error(
                                                "failed to delete %s with %s",
                                                entry.path,
                                                e,
                                            )

                                        logger.info("deleted %s", entry.path)

                self._BXSSImporter__sleep(pool_interval)

        logger.debug("BXSSImporter.loop aborted, bye.")
