code
stringlengths
17
6.64M
def obj_from_dict(info, parent=None, default_args=None): 'Initialize an object from dict.\n\n The dict must contain the key "type", which indicates the object type, it\n can be either a string or type, such as "list" or ``list``. Remaining\n fields are treated as the arguments for constructing the object...
def set_random_seed(seed, deterministic=False, use_rank_shift=False): 'Set random seed.\n\n Args:\n seed (int): Seed to be used.\n deterministic (bool): Whether to set the deterministic option for\n CUDNN backend, i.e., set `torch.backends.cudnn.deterministic`\n to True and ...
def is_tensorrt_available(): try: import tensorrt del tensorrt return True except ModuleNotFoundError: return False
def get_tensorrt_op_path(): 'Get TensorRT plugins library path.' (bright_style, reset_style) = ('\x1b[1m', '\x1b[0m') (red_text, blue_text) = ('\x1b[31m', '\x1b[34m') white_background = '\x1b[107m' msg = ((white_background + bright_style) + red_text) msg += 'DeprecationWarning: This function w...
def is_tensorrt_plugin_loaded(): 'Check if TensorRT plugins library is loaded or not.\n\n Returns:\n bool: plugin_is_loaded flag\n ' (bright_style, reset_style) = ('\x1b[1m', '\x1b[0m') (red_text, blue_text) = ('\x1b[31m', '\x1b[34m') white_background = '\x1b[107m' msg = ((white_backg...
def load_tensorrt_plugin(): 'load TensorRT plugins library.' (bright_style, reset_style) = ('\x1b[1m', '\x1b[0m') (red_text, blue_text) = ('\x1b[31m', '\x1b[34m') white_background = '\x1b[107m' msg = ((white_background + bright_style) + red_text) msg += 'DeprecationWarning: This function will ...
def preprocess_onnx(onnx_model): 'Modify onnx model to match with TensorRT plugins in mmcv.\n\n There are some conflict between onnx node definition and TensorRT limit.\n This function perform preprocess on the onnx model to solve the conflicts.\n For example, onnx `attribute` is loaded in TensorRT on ho...
def onnx2trt(onnx_model, opt_shape_dict, log_level=trt.Logger.ERROR, fp16_mode=False, max_workspace_size=0, device_id=0): 'Convert onnx model to tensorrt engine.\n\n Arguments:\n onnx_model (str or onnx.ModelProto): the onnx model to convert from\n opt_shape_dict (dict): the min/opt/max shape of ...
def save_trt_engine(engine, path): 'Serialize TensorRT engine to disk.\n\n Arguments:\n engine (tensorrt.ICudaEngine): TensorRT engine to serialize\n path (str): disk path to write the engine\n ' (bright_style, reset_style) = ('\x1b[1m', '\x1b[0m') (red_text, blue_text) = ('\x1b[31m', ...
def load_trt_engine(path): 'Deserialize TensorRT engine from disk.\n\n Arguments:\n path (str): disk path to read the engine\n\n Returns:\n tensorrt.ICudaEngine: the TensorRT engine loaded from disk\n ' (bright_style, reset_style) = ('\x1b[1m', '\x1b[0m') (red_text, blue_text) = ('\...
def torch_dtype_from_trt(dtype): 'Convert pytorch dtype to TensorRT dtype.' if (dtype == trt.bool): return torch.bool elif (dtype == trt.int8): return torch.int8 elif (dtype == trt.int32): return torch.int32 elif (dtype == trt.float16): return torch.float16 elif...
def torch_device_from_trt(device): 'Convert pytorch device to TensorRT device.' if (device == trt.TensorLocation.DEVICE): return torch.device('cuda') elif (device == trt.TensorLocation.HOST): return torch.device('cpu') else: return TypeError(('%s is not supported by torch' % de...
class TRTWrapper(torch.nn.Module): 'TensorRT engine Wrapper.\n\n Arguments:\n engine (tensorrt.ICudaEngine): TensorRT engine to wrap\n input_names (list[str]): names of each inputs\n output_names (list[str]): names of each outputs\n\n Note:\n If the engine is converted from onnx ...
class TRTWraper(TRTWrapper): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) warnings.warn('TRTWraper will be deprecated in future. Please use TRTWrapper instead', DeprecationWarning)
class ConfigDict(Dict): def __missing__(self, name): raise KeyError(name) def __getattr__(self, name): try: value = super(ConfigDict, self).__getattr__(name) except KeyError: ex = AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") ...
def add_args(parser, cfg, prefix=''): for (k, v) in cfg.items(): if isinstance(v, str): parser.add_argument((('--' + prefix) + k)) elif isinstance(v, int): parser.add_argument((('--' + prefix) + k), type=int) elif isinstance(v, float): parser.add_argumen...
class Config(): 'A facility for config and config files.\n\n It supports common file formats as configs: python/json/yaml. The interface\n is the same as a dict object and also allows access config values as\n attributes.\n\n Example:\n >>> cfg = Config(dict(a=1, b=dict(b1=[0, 1])))\n >>...
class DictAction(Action): "\n argparse action to split an argument into KEY=VALUE form\n on the first = and append to a dictionary. List options can\n be passed as comma separated values, i.e 'KEY=V1,V2,V3', or with explicit\n brackets, i.e. 'KEY=[V1,V2,V3]'. It also support nested brackets to build\n...
def collect_env(): 'Collect the information of the running environments.\n\n Returns:\n dict: The environment information. The following fields are contained.\n\n - sys.platform: The variable of ``sys.platform``.\n - Python: Python version.\n - CUDA available: Bool, indi...
def check_ops_exist(): ext_loader = pkgutil.find_loader('mmcv._ext') return (ext_loader is not None)
def get_logger(name, log_file=None, log_level=logging.INFO, file_mode='w'): 'Initialize and get a logger by name.\n\n If the logger has not been initialized, this method will initialize the\n logger by adding one or two handlers, otherwise the initialized logger will\n be directly returned. During initia...
def print_log(msg, logger=None, level=logging.INFO): 'Print a log message.\n\n Args:\n msg (str): The message to be logged.\n logger (logging.Logger | str | None): The logger to be used.\n Some special loggers are:\n - "silent": no message will be printed.\n - oth...
def _ntuple(n): def parse(x): if isinstance(x, collections.abc.Iterable): return x return tuple(repeat(x, n)) return parse
def is_str(x): 'Whether the input is an string instance.\n\n Note: This method is deprecated since python 2 is no longer supported.\n ' return isinstance(x, str)
def import_modules_from_strings(imports, allow_failed_imports=False): "Import modules from the given list of strings.\n\n Args:\n imports (list | str | None): The given module names to be imported.\n allow_failed_imports (bool): If True, the failed imports will return\n None. Otherwise...
def iter_cast(inputs, dst_type, return_type=None): 'Cast elements of an iterable object into some type.\n\n Args:\n inputs (Iterable): The input object.\n dst_type (type): Destination type.\n return_type (type, optional): If specified, the output object will be\n converted to th...
def list_cast(inputs, dst_type): 'Cast elements of an iterable object into a list of some type.\n\n A partial method of :func:`iter_cast`.\n ' return iter_cast(inputs, dst_type, return_type=list)
def tuple_cast(inputs, dst_type): 'Cast elements of an iterable object into a tuple of some type.\n\n A partial method of :func:`iter_cast`.\n ' return iter_cast(inputs, dst_type, return_type=tuple)
def is_seq_of(seq, expected_type, seq_type=None): 'Check whether it is a sequence of some type.\n\n Args:\n seq (Sequence): The sequence to be checked.\n expected_type (type): Expected type of sequence items.\n seq_type (type, optional): Expected sequence type.\n\n Returns:\n boo...
def is_list_of(seq, expected_type): 'Check whether it is a list of some type.\n\n A partial method of :func:`is_seq_of`.\n ' return is_seq_of(seq, expected_type, seq_type=list)
def is_tuple_of(seq, expected_type): 'Check whether it is a tuple of some type.\n\n A partial method of :func:`is_seq_of`.\n ' return is_seq_of(seq, expected_type, seq_type=tuple)
def slice_list(in_list, lens): 'Slice a list into several sub lists by a list of given length.\n\n Args:\n in_list (list): The list to be sliced.\n lens(int or list): The expected length of each out list.\n\n Returns:\n list: A list of sliced list.\n ' if isinstance(lens, int): ...
def concat_list(in_list): 'Concatenate a list of list into a single list.\n\n Args:\n in_list (list): The list of list to be merged.\n\n Returns:\n list: The concatenated flat list.\n ' return list(itertools.chain(*in_list))
def check_prerequisites(prerequisites, checker, msg_tmpl='Prerequisites "{}" are required in method "{}" but not found, please install them first.'): 'A decorator factory to check if prerequisites are satisfied.\n\n Args:\n prerequisites (str of list[str]): Prerequisites to be checked.\n checker ...
def _check_py_package(package): try: import_module(package) except ImportError: return False else: return True
def _check_executable(cmd): if (subprocess.call(f'which {cmd}', shell=True) != 0): return False else: return True
def requires_package(prerequisites): "A decorator to check if some python packages are installed.\n\n Example:\n >>> @requires_package('numpy')\n >>> func(arg1, args):\n >>> return numpy.zeros(1)\n array([0.])\n >>> @requires_package(['numpy', 'non_package'])\n >>>...
def requires_executable(prerequisites): "A decorator to check if some executable files are installed.\n\n Example:\n >>> @requires_executable('ffmpeg')\n >>> func(arg1, args):\n >>> print(1)\n 1\n " return check_prerequisites(prerequisites, checker=_check_executable)
def deprecated_api_warning(name_dict, cls_name=None): 'A decorator to check if some arguments are deprecate and try to replace\n deprecate src_arg_name to dst_arg_name.\n\n Args:\n name_dict(dict):\n key (str): Deprecate argument names.\n val (str): Expected argument names.\n\n ...
def is_method_overridden(method, base_class, derived_class): 'Check if a method of base class is overridden in derived class.\n\n Args:\n method (str): the method name to check.\n base_class (type): the class of the base class.\n derived_class (type | Any): the class or instance of the der...
def has_method(obj: object, method: str) -> bool: 'Check whether the object has a method.\n\n Args:\n method (str): The method name to check.\n obj (object): The object to check.\n\n Returns:\n bool: True if the object has the method else False.\n ' return (hasattr(obj, method) a...
def is_rocm_pytorch() -> bool: is_rocm = False if (TORCH_VERSION != 'parrots'): try: from torch.utils.cpp_extension import ROCM_HOME is_rocm = (True if ((torch.version.hip is not None) and (ROCM_HOME is not None)) else False) except ImportError: pass ret...
def _get_cuda_home(): if (TORCH_VERSION == 'parrots'): from parrots.utils.build_extension import CUDA_HOME elif is_rocm_pytorch(): from torch.utils.cpp_extension import ROCM_HOME CUDA_HOME = ROCM_HOME else: from torch.utils.cpp_extension import CUDA_HOME return CUDA_HOM...
def get_build_config(): if (TORCH_VERSION == 'parrots'): from parrots.config import get_build_info return get_build_info() else: return torch.__config__.show()
def _get_conv(): if (TORCH_VERSION == 'parrots'): from parrots.nn.modules.conv import _ConvNd, _ConvTransposeMixin else: from torch.nn.modules.conv import _ConvNd, _ConvTransposeMixin return (_ConvNd, _ConvTransposeMixin)
def _get_dataloader(): if (TORCH_VERSION == 'parrots'): from torch.utils.data import DataLoader, PoolDataLoader else: from torch.utils.data import DataLoader PoolDataLoader = DataLoader return (DataLoader, PoolDataLoader)
def _get_extension(): if (TORCH_VERSION == 'parrots'): from parrots.utils.build_extension import BuildExtension, Extension CppExtension = partial(Extension, cuda=False) CUDAExtension = partial(Extension, cuda=True) else: from torch.utils.cpp_extension import BuildExtension, Cpp...
def _get_pool(): if (TORCH_VERSION == 'parrots'): from parrots.nn.modules.pool import _AdaptiveAvgPoolNd, _AdaptiveMaxPoolNd, _AvgPoolNd, _MaxPoolNd else: from torch.nn.modules.pooling import _AdaptiveAvgPoolNd, _AdaptiveMaxPoolNd, _AvgPoolNd, _MaxPoolNd return (_AdaptiveAvgPoolNd, _Adapti...
def _get_norm(): if (TORCH_VERSION == 'parrots'): from parrots.nn.modules.batchnorm import _BatchNorm, _InstanceNorm SyncBatchNorm_ = torch.nn.SyncBatchNorm2d else: from torch.nn.modules.batchnorm import _BatchNorm from torch.nn.modules.instancenorm import _InstanceNorm ...
class SyncBatchNorm(SyncBatchNorm_): def _check_input_dim(self, input): if (TORCH_VERSION == 'parrots'): if (input.dim() < 2): raise ValueError(f'expected at least 2D input (got {input.dim()}D input)') else: super()._check_input_dim(input)
def is_filepath(x): return (is_str(x) or isinstance(x, Path))
def fopen(filepath, *args, **kwargs): if is_str(filepath): return open(filepath, *args, **kwargs) elif isinstance(filepath, Path): return filepath.open(*args, **kwargs) raise ValueError('`filepath` should be a string or a Path')
def check_file_exist(filename, msg_tmpl='file "{}" does not exist'): if (not osp.isfile(filename)): raise FileNotFoundError(msg_tmpl.format(filename))
def mkdir_or_exist(dir_name, mode=511): if (dir_name == ''): return dir_name = osp.expanduser(dir_name) os.makedirs(dir_name, mode=mode, exist_ok=True)
def symlink(src, dst, overwrite=True, **kwargs): if (os.path.lexists(dst) and overwrite): os.remove(dst) os.symlink(src, dst, **kwargs)
def scandir(dir_path, suffix=None, recursive=False, case_sensitive=True): 'Scan a directory to find the interested files.\n\n Args:\n dir_path (str | :obj:`Path`): Path of the directory.\n suffix (str | tuple(str), optional): File suffix that we are\n interested in. Default: None.\n ...
def find_vcs_root(path, markers=('.git',)): 'Finds the root directory (including itself) of specified markers.\n\n Args:\n path (str): Path of directory or file.\n markers (list[str], optional): List of file or directory names.\n\n Returns:\n The directory contained one of the markers o...
class ProgressBar(): 'A progress bar which can print the progress.' def __init__(self, task_num=0, bar_width=50, start=True, file=sys.stdout): self.task_num = task_num self.bar_width = bar_width self.completed = 0 self.file = file if start: self.start() ...
def track_progress(func, tasks, bar_width=50, file=sys.stdout, **kwargs): 'Track the progress of tasks execution with a progress bar.\n\n Tasks are done with a simple for-loop.\n\n Args:\n func (callable): The function to be applied to each task.\n tasks (list or tuple[Iterable, int]): A list ...
def init_pool(process_num, initializer=None, initargs=None): if (initializer is None): return Pool(process_num) elif (initargs is None): return Pool(process_num, initializer) else: if (not isinstance(initargs, tuple)): raise TypeError('"initargs" must be a tuple') ...
def track_parallel_progress(func, tasks, nproc, initializer=None, initargs=None, bar_width=50, chunksize=1, skip_first=False, keep_order=True, file=sys.stdout): 'Track the progress of parallel task execution with a progress bar.\n\n The built-in :mod:`multiprocessing` module is used for process pools and\n ...
def track_iter_progress(tasks, bar_width=50, file=sys.stdout): 'Track the progress of tasks iteration or enumeration with a progress\n bar.\n\n Tasks are yielded with a simple for-loop.\n\n Args:\n tasks (list or tuple[Iterable, int]): A list of tasks or\n (tasks, total num).\n b...
def build_from_cfg(cfg, registry, default_args=None): 'Build a module from config dict.\n\n Args:\n cfg (dict): Config dict. It should at least contain the key "type".\n registry (:obj:`Registry`): The registry to search the type from.\n default_args (dict, optional): Default initializatio...
class Registry(): "A registry to map strings to classes.\n\n Registered object could be built from registry.\n\n Example:\n >>> MODELS = Registry('models')\n >>> @MODELS.register_module()\n >>> class ResNet:\n >>> pass\n >>> resnet = MODELS.build(dict(type='ResNet'))\n...
def worker_init_fn(worker_id: int, num_workers: int, rank: int, seed: int): 'Function to initialize each worker.\n\n The seed of each worker equals to\n ``num_worker * rank + worker_id + user_seed``.\n\n Args:\n worker_id (int): Id for each worker.\n num_workers (int): Number of workers.\n ...
def check_python_script(cmd): 'Run the python cmd script with `__main__`. The difference between\n `os.system` is that, this function exectues code in the current process, so\n that it can be tracked by coverage tools. Currently it supports two forms:\n\n - ./tests/data/scripts/hello.py zz\n - python ...
def _any(judge_result): 'Since built-in ``any`` works only when the element of iterable is not\n iterable, implement the function.' if (not isinstance(judge_result, Iterable)): return judge_result try: for element in judge_result: if _any(element): return Tru...
def assert_dict_contains_subset(dict_obj: Dict[(Any, Any)], expected_subset: Dict[(Any, Any)]) -> bool: 'Check if the dict_obj contains the expected_subset.\n\n Args:\n dict_obj (Dict[Any, Any]): Dict object to be checked.\n expected_subset (Dict[Any, Any]): Subset expected to be contained in\n ...
def assert_attrs_equal(obj: Any, expected_attrs: Dict[(str, Any)]) -> bool: 'Check if attribute of class object is correct.\n\n Args:\n obj (object): Class object to be checked.\n expected_attrs (Dict[str, Any]): Dict of the expected attrs.\n\n Returns:\n bool: Whether the attribute of ...
def assert_dict_has_keys(obj: Dict[(str, Any)], expected_keys: List[str]) -> bool: 'Check if the obj has all the expected_keys.\n\n Args:\n obj (Dict[str, Any]): Object to be checked.\n expected_keys (List[str]): Keys expected to contained in the keys of\n the obj.\n\n Returns:\n ...
def assert_keys_equal(result_keys: List[str], target_keys: List[str]) -> bool: 'Check if target_keys is equal to result_keys.\n\n Args:\n result_keys (List[str]): Result keys to be checked.\n target_keys (List[str]): Target keys to be checked.\n\n Returns:\n bool: Whether target_keys is...
def assert_is_norm_layer(module) -> bool: 'Check if the module is a norm layer.\n\n Args:\n module (nn.Module): The module to be checked.\n\n Returns:\n bool: Whether the module is a norm layer.\n ' from torch.nn import GroupNorm, LayerNorm from .parrots_wrapper import _BatchNorm, _...
def assert_params_all_zeros(module) -> bool: 'Check if the parameters of the module is all zeros.\n\n Args:\n module (nn.Module): The module to be checked.\n\n Returns:\n bool: Whether the parameters of the module is all zeros.\n ' weight_data = module.weight.data is_weight_zero = w...
class TimerError(Exception): def __init__(self, message): self.message = message super(TimerError, self).__init__(message)
class Timer(): "A flexible Timer class.\n\n Examples:\n >>> import time\n >>> import mmcv\n >>> with mmcv.Timer():\n >>> # simulate a code block that will run for 1s\n >>> time.sleep(1)\n 1.000\n >>> with mmcv.Timer(print_tmpl='it takes {:.1f} seconds'):...
def check_time(timer_id): "Add check points in a single line.\n\n This method is suitable for running a task on a list of items. A timer will\n be registered when the method is called for the first time.\n\n Examples:\n >>> import time\n >>> import mmcv\n >>> for i in range(1, 6):\n ...
def is_jit_tracing() -> bool: if ((torch.__version__ != 'parrots') and (digit_version(torch.__version__) >= digit_version('1.6.0'))): on_trace = torch.jit.is_tracing() if isinstance(on_trace, bool): return on_trace else: return torch._C._is_tracing() else: ...
def digit_version(version_str: str, length: int=4): 'Convert a version string into a tuple of integers.\n\n This method is usually used for comparing two versions. For pre-release\n versions: alpha < beta < rc.\n\n Args:\n version_str (str): The version string.\n length (int): The maximum n...
def _minimal_ext_cmd(cmd): env = {} for k in ['SYSTEMROOT', 'PATH', 'HOME']: v = os.environ.get(k) if (v is not None): env[k] = v env['LANGUAGE'] = 'C' env['LANG'] = 'C' env['LC_ALL'] = 'C' out = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=env).communicate()[0...
def get_git_hash(fallback='unknown', digits=None): "Get the git hash of the current repo.\n\n Args:\n fallback (str, optional): The fallback string when git hash is\n unavailable. Defaults to 'unknown'.\n digits (int, optional): kept digits of the hash. Defaults to None,\n m...
def parse_version_info(version_str: str, length: int=4) -> tuple: 'Parse a version string into a tuple.\n\n Args:\n version_str (str): The version string.\n length (int): The maximum number of version levels. Default: 4.\n\n Returns:\n tuple[int | str]: The version info, e.g., "1.3.0" i...
class Cache(): def __init__(self, capacity): self._cache = OrderedDict() self._capacity = int(capacity) if (capacity <= 0): raise ValueError('capacity must be a positive integer') @property def capacity(self): return self._capacity @property def size(...
class VideoReader(): "Video class with similar usage to a list object.\n\n This video warpper class provides convenient apis to access frames.\n There exists an issue of OpenCV's VideoCapture class that jumping to a\n certain frame may be inaccurate. It is fixed in this class by checking\n the positio...
def frames2video(frame_dir, video_file, fps=30, fourcc='XVID', filename_tmpl='{:06d}.jpg', start=0, end=0, show_progress=True): 'Read the frame images from a directory and join them as a video.\n\n Args:\n frame_dir (str): The directory containing video frames.\n video_file (str): Output filename...
@requires_executable('ffmpeg') def convert_video(in_file, out_file, print_cmd=False, pre_options='', **kwargs): 'Convert a video with ffmpeg.\n\n This provides a general api to ffmpeg, the executed command is::\n\n `ffmpeg -y <pre_options> -i <in_file> <options> <out_file>`\n\n Options(kwargs) are ma...
@requires_executable('ffmpeg') def resize_video(in_file, out_file, size=None, ratio=None, keep_ar=False, log_level='info', print_cmd=False): 'Resize a video.\n\n Args:\n in_file (str): Input video filename.\n out_file (str): Output video filename.\n size (tuple): Expected size (w, h), eg, ...
@requires_executable('ffmpeg') def cut_video(in_file, out_file, start=None, end=None, vcodec=None, acodec=None, log_level='info', print_cmd=False): 'Cut a clip from a video.\n\n Args:\n in_file (str): Input video filename.\n out_file (str): Output video filename.\n start (None or float): S...
@requires_executable('ffmpeg') def concat_video(video_list, out_file, vcodec=None, acodec=None, log_level='info', print_cmd=False): 'Concatenate multiple videos into a single one.\n\n Args:\n video_list (list): A list of video filenames\n out_file (str): Output video filename\n vcodec (Non...
class Color(Enum): 'An enum that defines common colors.\n\n Contains red, green, blue, cyan, yellow, magenta, white and black.\n ' red = (0, 0, 255) green = (0, 255, 0) blue = (255, 0, 0) cyan = (255, 255, 0) yellow = (0, 255, 255) magenta = (255, 0, 255) white = (255, 255, 255) ...
def color_val(color): 'Convert various input to color tuples.\n\n Args:\n color (:obj:`Color`/str/tuple/int/ndarray): Color inputs\n\n Returns:\n tuple[int]: A tuple of 3 integers indicating BGR channels.\n ' if is_str(color): return Color[color].value elif isinstance(color,...
def choose_requirement(primary, secondary): 'If some version of primary requirement installed, return primary, else\n return secondary.' try: name = re.split('[!<>=]', primary)[0] get_distribution(name) except DistributionNotFound: return secondary return str(primary)
def get_version(): version_file = 'mmcv/version.py' with open(version_file, 'r', encoding='utf-8') as f: exec(compile(f.read(), version_file, 'exec')) return locals()['__version__']
def parse_requirements(fname='requirements/runtime.txt', with_version=True): 'Parse the package dependencies listed in a requirements file but strips\n specific versioning information.\n\n Args:\n fname (str): path to requirements file\n with_version (bool, default=False): if True include vers...
def get_extensions(): extensions = [] if (os.getenv('MMCV_WITH_TRT', '0') != '0'): (bright_style, reset_style) = ('\x1b[1m', '\x1b[0m') (red_text, blue_text) = ('\x1b[31m', '\x1b[34m') white_background = '\x1b[107m' msg = ((white_background + bright_style) + red_text) m...
def test_quantize(): arr = np.random.randn(10, 10) levels = 20 qarr = mmcv.quantize(arr, (- 1), 1, levels) assert (qarr.shape == arr.shape) assert (qarr.dtype == np.dtype('int64')) for i in range(arr.shape[0]): for j in range(arr.shape[1]): ref = min((levels - 1), int(np.fl...
def test_dequantize(): levels = 20 qarr = np.random.randint(levels, size=(10, 10)) arr = mmcv.dequantize(qarr, (- 1), 1, levels) assert (arr.shape == qarr.shape) assert (arr.dtype == np.dtype('float64')) for i in range(qarr.shape[0]): for j in range(qarr.shape[1]): assert (...
def test_joint(): arr = np.random.randn(100, 100) levels = 1000 qarr = mmcv.quantize(arr, (- 1), 1, levels) recover = mmcv.dequantize(qarr, (- 1), 1, levels) assert (np.abs((recover[(arr < (- 1))] + 0.999)).max() < 1e-06) assert (np.abs((recover[(arr > 1)] - 0.999)).max() < 1e-06) assert (...
def test_build_conv_layer(): with pytest.raises(TypeError): cfg = 'Conv2d' build_conv_layer(cfg) with pytest.raises(KeyError): cfg = dict(kernel_size=3) build_conv_layer(cfg) with pytest.raises(KeyError): cfg = dict(type='FancyConv') build_conv_layer(cfg) ...
def test_infer_norm_abbr(): with pytest.raises(TypeError): infer_norm_abbr(0) class MyNorm(): _abbr_ = 'mn' assert (infer_norm_abbr(MyNorm) == 'mn') class FancyBatchNorm(): pass assert (infer_norm_abbr(FancyBatchNorm) == 'bn') class FancyInstanceNorm(): pass ...
def test_build_norm_layer(): with pytest.raises(TypeError): cfg = 'BN' build_norm_layer(cfg, 3) with pytest.raises(KeyError): cfg = dict() build_norm_layer(cfg, 3) with pytest.raises(KeyError): cfg = dict(type='FancyNorm') build_norm_layer(cfg, 3) with p...