# uncompyle6 version 3.2.3
# Python bytecode 3.6 (3379)
# Decompiled from: Python 3.6.8 |Anaconda custom (64-bit)| (default, Feb 21 2019, 18:30:04) [MSC v.1916 64 bit (AMD64)]
# Embedded file name: helpers\status_cache\scan_job_status_memory_cache.py
import copy
from threading import RLock
import datetime
from dateutil.tz import tzlocal
from helpers.status_cache.scan_job_status_cache_base import ScanJobStatusCache


class ScanJobStatusMemoryCache(ScanJobStatusCache):
    def __init__(self, max_vuln_backlog=5, max_message_backlog=10):
        super().__init__(max_vuln_backlog, max_message_backlog)
        self._ScanJobStatusMemoryCache__lock = RLock()
        self._ScanJobStatusMemoryCache__items = {}

    def __get_entry(self, shard, scanning_app, scan_session_id, enable_db_lookup=True):
        entry = self._ScanJobStatusMemoryCache__items.get(
            (shard, scanning_app, scan_session_id)
        )
        if entry is None:
            if enable_db_lookup:
                entry = self._get_entry_from_jobs_table(
                    shard, scanning_app, scan_session_id
                )
        return self._populate_entry(entry)

    def get_entry(self, shard, scanning_app, scan_session_id, enable_db_lookup=True):
        with self._ScanJobStatusMemoryCache__lock:
            entry = self._ScanJobStatusMemoryCache__items.get(
                (shard, scanning_app, scan_session_id)
            )
            if entry is None:
                if enable_db_lookup:
                    entry = self._get_entry_from_jobs_table(
                        shard, scanning_app, scan_session_id
                    )
                else:
                    entry = copy.deepcopy(entry)
                return entry

    def del_entry(self, shard, scanning_app, scan_session_id):
        with self._ScanJobStatusMemoryCache__lock:
            try:
                del self._ScanJobStatusMemoryCache__items[
                    (shard, scanning_app, scan_session_id)
                ]
            except KeyError:
                pass

    def update_main_keys(self, shard, scanning_app, scan_session_id, new_data):
        with self._ScanJobStatusMemoryCache__lock:
            entry = self._ScanJobStatusMemoryCache__get_entry(
                shard, scanning_app, scan_session_id
            )
            self._merge_in_main_keys(entry, new_data)
            self._ScanJobStatusMemoryCache__items[
                (shard, scanning_app, scan_session_id)
            ] = entry

    def update_sub_target_keys(
        self,
        shard,
        scanning_app,
        scan_session_id,
        target_id,
        host,
        is_starting_host,
        new_data,
    ):
        with self._ScanJobStatusMemoryCache__lock:
            entry = self._ScanJobStatusMemoryCache__get_entry(
                shard, scanning_app, scan_session_id
            )
            self._merge_in_sub_target_keys(
                entry, target_id, host, is_starting_host, new_data
            )
            self._ScanJobStatusMemoryCache__items[
                (shard, scanning_app, scan_session_id)
            ] = entry

    def add_message(
        self,
        shard,
        scanning_app,
        scan_session_id,
        kind,
        level,
        data=None,
        target_info=None,
    ):
        with self._ScanJobStatusMemoryCache__lock:
            entry = self._ScanJobStatusMemoryCache__get_entry(
                shard, scanning_app, scan_session_id
            )
            entry["main"]["messages"].insert(
                0,
                dict(
                    time=(datetime.datetime.now(tz=tzlocal())).isoformat(),
                    target_info=target_info,
                    kind=kind,
                    data=data,
                    level=level,
                ),
            )
            if len(entry["main"]["messages"]) > self.max_message_backlog:
                entry["main"]["messages"] = entry["main"]["messages"][
                    : self.max_message_backlog
                ]
            self._ScanJobStatusMemoryCache__items[
                (shard, scanning_app, scan_session_id)
            ] = entry

    def add_vuln(
        self,
        shard,
        scanning_app,
        scan_session_id,
        vuln_name,
        vuln_id,
        severity,
        target_info,
    ):
        with self._ScanJobStatusMemoryCache__lock:
            entry = self._ScanJobStatusMemoryCache__get_entry(
                shard, scanning_app, scan_session_id
            )
            entry["main"]["vulns"].insert(
                0,
                dict(
                    time=(datetime.datetime.now(tz=tzlocal())).isoformat(),
                    target_info=target_info,
                    name=vuln_name,
                    vuln_id=vuln_id,
                    severity=severity,
                ),
            )
            if len(entry["main"]["vulns"]) > self.max_vuln_backlog:
                entry["main"]["vulns"] = entry["main"]["vulns"][: self.max_vuln_backlog]
            self._ScanJobStatusMemoryCache__items[
                (shard, scanning_app, scan_session_id)
            ] = entry

    def save_entry(self, shard, scanning_app, scan_session_id, merge=False):
        with self._ScanJobStatusMemoryCache__lock:
            entry = self._ScanJobStatusMemoryCache__items.get(
                (shard, scanning_app, scan_session_id)
            )
            if entry is None:
                return
            self._save_entry_to_json_table(
                shard, scanning_app, scan_session_id, entry, merge=False
            )

    def save_all(self, shard, merge=False, empty_cache=True):
        with self._ScanJobStatusMemoryCache__lock:
            to_remove_keys = []
            for key, entry in self._ScanJobStatusMemoryCache__items.items():
                _shard, scanning_app, scan_session_id = key
                if _shard != shard:
                    continue
                if empty_cache:
                    to_remove_keys.append(key)
                self._save_entry_to_json_table(
                    shard, scanning_app, scan_session_id, entry, merge=False
                )

            for key in to_remove_keys:
                del self._ScanJobStatusMemoryCache__items[key]
