code
stringlengths
101
5.91M
class FairseqEncoderModel(BaseFairseqModel): def __init__(self, encoder): super().__init__() self.encoder = encoder assert isinstance(self.encoder, FairseqEncoder) def forward(self, src_tokens, src_lengths, **kwargs): return self.encoder(src_tokens, src_lengths, **kwargs) def...
_grad() def show_flops_params(model, device, input_shape=[1, 3, 1024, 2048], logger=None): input = torch.randn(*input_shape).to(torch.device(device)) (flops, params) = profile(model, inputs=(input,), verbose=False) if (logger is not None): logger.info('{} flops: {:.3f}G input shape is {}, params: {:...
class SubGaussianInverseProbabilityWeightingTuning(BaseOffPolicyEstimatorTuning): estimator_name: str = 'sg-ipw' def __post_init__(self) -> None: self.base_ope_estimator = SubGaussianInverseProbabilityWeighting super()._check_lambdas(max_val=1.0) super()._check_init_inputs() self...
def _render(template, context, app): before_render_template.send(app, template=template, context=context) rv = template.render(context) template_rendered.send(app, template=template, context=context) return rv
def _expmap0(u, c): sqrt_c = (c ** 0.5) u_norm = torch.clamp_min(u.norm(dim=(- 1), p=2, keepdim=True), 1e-05) gamma_1 = ((tanh((sqrt_c * u_norm)) * u) / (sqrt_c * u_norm)) return gamma_1
class Assign(FunctionAssignment): def __str__(self): return ('%s := %s' % (self.fluent, self.expression))
def test_root_get_distance(branchless_codeobject_goal, mocker): mock = mocker.patch('pynguin.ga.coveragegoals.cfd.get_root_control_flow_distance', return_value=42) distance = branchless_codeobject_goal.get_distance(MagicMock(), MagicMock()) assert (distance == 42) mock.assert_called_once()
def test_line_str_parser(): data_ret = ['sample1.jpg hello\n', 'sample2.jpg world'] keys = ['filename', 'text'] keys_idx = [0, 1] separator = ' ' with pytest.raises(AssertionError): parser = LineStrParser('filename', keys_idx, separator) with pytest.raises(AssertionError): parser...
def main(unused_argv): config = configs.load_config(save_config=False) config.render_path = True dataset = datasets.load_dataset('test', config.data_dir, config) (model, init_variables) = models.construct_mipnerf(random.PRNGKey(), dataset.peek()['rays'], config) optimizer = flax.optim.Adam(config.lr...
class PitchExtractionInterface(metaclass=ABCMeta): def calc_prob(self, x): def calc_embed(self, x): def calc_pitch(self, x):
.imputils.lower_constant(ArrayBuilderType) def lower_const_ArrayBuilder(context, builder, arraybuildertype, arraybuilder): layout = arraybuilder._layout attrs = arraybuilder._attrs rawptr = context.get_constant(numba.intp, arraybuilder._layout._ptr) proxyout = context.make_helper(builder, arraybuilderty...
def _load_video_1cam(idx: int, paths: List[str], poses: torch.Tensor, out_h: int, out_w: int, load_every: int=1): filters = [('scale', f'w={out_w}:h={out_h}')] all_frames = iio.imread(paths[idx], plugin='pyav', format='rgb24', constant_framerate=True, thread_count=2, filter_sequence=filters) (imgs, timestam...
class _UniformExpertAssignment(torch.autograd.Function): def forward(ctx, x, num_experts): out = torch.arange(x.numel(), dtype=x.dtype, device=x.device) out = torch.remainder(out, num_experts) return out.view(x.shape)
def test_synthetic_sample_results_in_sampled_delay_with_weighted_delays_per_arm(): n_actions = 3 delay_function = ExponentialDelaySampler(max_scale=100.0, min_scale=10.0, random_state=12345).exponential_delay_function_expected_reward_weighted dataset = BanditEnvironmentSimulator(n_actions=n_actions, reward_...
_model def densenetblur121d(pretrained=False, **kwargs): model = _densenet('densenetblur121d', growth_rate=32, block_config=(6, 12, 24, 16), pretrained=pretrained, stem_type='deep', aa_layer=BlurPool2d, **kwargs) return model
def check_label_existence(value_label, usr_utt_tok): (in_usr, usr_pos) = get_token_pos(usr_utt_tok, value_label) if ((not in_usr) and (value_label in LABEL_MAPS)): for value_label_variant in LABEL_MAPS[value_label]: (in_usr, usr_pos) = get_token_pos(usr_utt_tok, value_label_variant) ...
class SympyPredictingOptimizer(ABC): def step(self): pass def prediction(self, nsteps): pass
def set_learning_rate(optim, lr): for param_group in optim.param_groups: param_group['lr'] = lr
def get_arguments(): parser = argparse.ArgumentParser() parser.add_argument('l', help=((('Location whose data needs to be trained/tested with' + 'Values can be one of [Bondville, Boulder, Desert_Rock,') + 'Fort_Peck,Goodwin_Creek, Penn_State,') + 'Sioux_Falls]')) parser.add_argument('y', help='4 digit Test ...
class Trainer(DefaultTrainer): def build_evaluator(cls, cfg, dataset_name, output_folder=None): if (output_folder is None): output_folder = os.path.join(cfg.OUTPUT_DIR, 'inference') if ('coco' in dataset_name): return COCOEvaluator(dataset_name, cfg, True, output_folder) ...
def make_batches(lines, args, task, max_positions, encode_fn): tokens = [task.source_dictionary.encode_line(encode_fn(src_str), add_if_not_exist=False).long() for src_str in lines] lengths = [t.numel() for t in tokens] itr = task.get_batch_iterator(dataset=task.build_dataset_for_inference(tokens, lengths), ...
class BaseTestCase(unittest.TestCase): def test_base_correct(self): query = '' (corrected_sent, detail) = m.correct(query) print(corrected_sent, detail) self.assertEqual(corrected_sent, '') self.assertEqual(detail, [('', '', 0, 2), ('', '', 9, 11)]) def test_base_demos(se...
def build_argparse(): parser = argparse.ArgumentParser() parser.add_argument('--data_dir', type=str, default='data/constituency', help='Directory of constituency data.') parser.add_argument('--wordvec_dir', type=str, default='extern_data/wordvec', help='Directory of word vectors') parser.add_argument('-...
_interface.register_fl_task(model='net_model', data_loader='val_loader', device='device') def validate(net_model, val_loader, device): device = torch.device('cuda') if (not torch.cuda.is_available()): device = 'cpu' net_model.eval() net_model.to(device) val_loader = tqdm.tqdm(val_loader, des...
class SMPBlock(nn.Module): def __init__(self, in_channels, dw_channels, lk_size, drop_path, n_points=None, n_points_divide=4): super().__init__() self.pw1 = conv_bn_relu(in_channels, dw_channels, 1, 1, 0, groups=1) self.pw2 = conv_bn(dw_channels, in_channels, 1, 1, 0, groups=1) self....
def potential_neighbors(dom1, dom2): if (isinstance(dom1, DOMElementPAD) or isinstance(dom2, DOMElementPAD)): return False if (dom1.ref == dom2.ref): return False return True
class IntegerVectorsModPermutationGroup(UniqueRepresentation): def __classcall__(cls, G, sum=None, max_part=None, sgs=None): if ((sum is None) and (max_part is None)): if G.domain(): return IntegerVectorsModPermutationGroup_All(G, sgs=sgs) else: return...
class MutableString(UserString): def __init__(self, string=''): self.data = string def __hash__(self): raise TypeError('unhashable type (it is mutable)') def __setitem__(self, index, sub): if (index < 0): index += len(self.data) if ((index < 0) or (index >= len(se...
def quantize_model_(model, p=0.2, bits=8, update_step=3000): quantized_layers = get_layers(model, '(.*?)') for layer in quantized_layers: is_master_process = ((not dist.is_initialized()) or (dist.is_initialized() and (dist.get_rank() == 0))) module = attrgetter(layer)(model) if is_master...
def get_dataset(dataset, batch_size=256, augment=False): mean = (0.5, 0.5, 0.5) std = (0.5, 0.5, 0.5) num_workers = 4 if (dataset in ['mnist', 'kmnist', 'fashionmnist']): if augment: transform_train = transforms.Compose([transforms.RandomCrop(28, padding=4), transforms.RandomHorizont...
def type_check(param_name, param_schema, input_params): type_check_error = [] doc_type = convert_type(param_schema.get('type', '')) if (doc_type and (not isinstance(input_params[param_name], doc_type))): type_error = True if (((doc_type in [int, float, bool]) and (type(input_params[param_nam...
(scope='module') .usefixtures('columns') def simple_dataframe_pandas(columns): data = [(1, 2, 19842), (1, 4, 19844), (1, 3, 19843), (1, 5, 19845), (1, 6, 19846), (1, 7, 19847), (2, 1, 19841), (2, 2, 19842), (2, 3, 19843), (2, 4, 19844), (3, 10, 19844), (4, 11, 19843), (4, 12, 19845), (1, 1, 19841)] return pd.Da...
def _check_likelihood(model: GPModel, classification: bool, likelihood_variance: Optional[float], empirical_variance: Optional[TensorType], trainable_likelihood: bool) -> None: if classification: assert isinstance(model.likelihood, gpflow.likelihoods.Bernoulli) else: assert isinstance(model.like...
class OrderedMultisetPartitionsIntoSets_n_constraints(OrderedMultisetPartitionsIntoSets): def __init__(self, n, **constraints): self._n = n OrderedMultisetPartitionsIntoSets.__init__(self, True, size=n, **constraints) def _repr_(self): cdict = dict(self.constraints) cdict.pop('si...
class RawExplorationStrategy(ExplorationStrategy, metaclass=abc.ABCMeta): def get_action_from_raw_action(self, action, **kwargs): pass def get_actions_from_raw_actions(self, actions, **kwargs): raise NotImplementedError() def get_action(self, t, policy, *args, **kwargs): (action, age...
class SAGE(torch.nn.Module): def __init__(self, in_channels, hidden_channels, out_channels, num_layers): super().__init__() self.num_layers = num_layers self.convs = torch.nn.ModuleList() self.convs.append(SAGEConv(in_channels, hidden_channels)) for _ in range((num_layers - 2...
.parametrize('tifunc,npfunc', [((lambda x: ti.tanh(x)), (lambda x: np.tanh(x))), ((lambda x: ti.sin(x)), (lambda x: np.sin(x))), ((lambda x: ti.cos(x)), (lambda x: np.cos(x))), ((lambda x: ti.acos(x)), (lambda x: np.arccos(x))), ((lambda x: ti.asin(x)), (lambda x: np.arcsin(x)))]) _has_autograd _utils.test() def test_t...
.pairing .dependency() def test_build_and_search(): global _tempdir conf = utils.relative_file('spacegraphcats/conf/twofoo-short.yaml') target = 'search' status = run_snakemake(conf, verbose=True, outdir=_tempdir, extra_args=[target]) assert (status == 0) output_files = ['twofoo-short_k31/bcalm....
def handle_sampling_error(is_tmp_file, output_file_path, sampling_error): if ('Unable to sample any rows for the given conditions' in str(sampling_error)): raise sampling_error error_msg = None if is_tmp_file: error_msg = f'Error: Sampling terminated. Partial results are stored in a temporar...
def validate_before_compute_similarity(float_tensor: Any, fxp_tensor: Any): assert isinstance(float_tensor, np.ndarray) assert isinstance(fxp_tensor, np.ndarray) assert (float_tensor.shape == fxp_tensor.shape)
def fake_colmap_normal(in_depth_path, out_normal_path): depth_image = read_gipuma_dmb(in_depth_path) image_shape = np.shape(depth_image) normal_image = np.ones_like(depth_image) normal_image = np.reshape(normal_image, (image_shape[0], image_shape[1], 1)) normal_image = np.tile(normal_image, [1, 1, 3...
class Embeder(nn.Module): def __init__(self, conf, fields): super(Embeder, self).__init__() self.conf = conf if ('pos' in fields.inputs): self.pos_emb = nn.Embedding(fields.get_vocab_size('pos'), conf.n_pos_embed) else: self.pos_emb = None if ('char' i...
class FocalLoss(nn.Module): def __init__(self, focusing_param=2, balance_param=0.25): super(FocalLoss, self).__init__() self.focusing_param = focusing_param self.balance_param = balance_param def forward(self, output, target, reduction='mean'): cross_entropy = F.cross_entropy(out...
def _merge_a_into_b(a, b, stack=None): assert isinstance(a, AttrDict), '`a` (cur type {}) must be an instance of {}'.format(type(a), AttrDict) assert isinstance(b, AttrDict), '`b` (cur type {}) must be an instance of {}'.format(type(b), AttrDict) for (k, v_) in a.items(): full_key = ((('.'.join(stac...
def distshift_detector_preprocess(data: List[pd.DataFrame], domain_index: Union[(List[int], np.ndarray)], domain_index_name: str='domain_index', n_states: int=2): df = pd.concat(data) domain_index = np.array(domain_index) df[domain_index_name] = domain_index data_array = df.to_numpy() var_names = df...
def logits_to_probs(logits, is_binary=False): if is_binary: return torch.sigmoid(logits) return F.softmax(logits, dim=(- 1))
def unregister_compiled_sdfg_call_hook(hook_id: int): if (hook_id >= len(_COMPILED_SDFG_CALL_HOOKS)): raise ValueError('Invalid hook ID') _COMPILED_SDFG_CALL_HOOKS[hook_id] = None
('This function is now called SE3ToXYZQUAT. Please change for this new signature to delete this warning.') def se3ToXYZQUAT(M): return pin.SE3ToXYZQUAT(M)
def main(): p = argparse.ArgumentParser() p.add_argument('node_mh_pickle') p.add_argument('lca_db') args = p.parse_args() node_mhs = pickle.load(open(args.node_mh_pickle, 'rb')) lca_obj = LCA_Database() lca_obj.load(args.lca_db) databases = ((lca_obj, args.lca_db, 'LCA'),) d = {} ...
def GenCircle_PUNGraph(Nodes, NodeOutDeg=1, IsDir=True): return _snap.GenCircle_PUNGraph(Nodes, NodeOutDeg, IsDir)
def process_constraints(constraints, columns, slots): slot_values = {} skip_db_with_one_table = False for constraint in constraints: if ('P0==' == constraint): assert ('{OP0}' in slots) slot_values['{OP0}'] = '=' elif ('P1==' == constraint): assert ('{OP1}...
class Token(object): def __init__(self, start_mark, end_mark): self.start_mark = start_mark self.end_mark = end_mark def __repr__(self): attributes = [key for key in self.__dict__ if (not key.endswith('_mark'))] attributes.sort() arguments = ', '.join([('%s=%r' % (key, ge...
class CondBatchNorm3d(_CondBatchNorm): def _check_input_dim(self, input): if (input.dim() != 5): raise ValueError('expected 5D input (got {}D input)'.format(input.dim()))
class FairseqAdamConfig(FairseqDataclass): adam_betas: Any = field(default=(0.9, 0.999), metadata={'help': 'betas for Adam optimizer'}) adam_eps: float = field(default=1e-08, metadata={'help': 'epsilon for Adam optimizer'}) weight_decay: float = field(default=0.0, metadata={'help': 'weight decay'}) use_...
class RegressionLoss(object): def __call__(self, pred, gtruth): loss = self.criterion(pred, gtruth) return loss
def resnet_mark_before_relu(model): if isinstance(model, DataParallel): model.module.conv1.before_relu = True else: model.conv1.before_relu = True mark_bottlenetck_before_relu(model) mark_basicblock_before_relu(model)
def test_top_selector_find_top_k_binary_values_none(): ts = TopSelector() tst = TopSelectorTorch() with pytest.raises(TypeError): ts.find_top_k_binary(None, k) with pytest.raises(TypeError): tst.find_top_k_binary(None, k)
class MaxMarginRankingLoss(nn.Module): def __init__(self, margin=1.0, negative_weighting=False, batch_size=1, n_pair=1, hard_negative_rate=0.5): super(MaxMarginRankingLoss, self).__init__() self.margin = margin self.n_pair = n_pair self.batch_size = batch_size easy_negative_r...
def action_open_cache_dir(): d = misc.get_cache_home() misc.info(f'Opening cache directory: {d}') if (sys.platform == 'win32'): os.startfile(d) elif (sys.platform == 'darwin'): subprocess.Popen(['open', d]) else: subprocess.Popen(['xdg-open', d])
def load_data(args): if (args.dataset_str == 'dblp'): (adj_list, features, train_data, train_label, test_data, test_label) = load_data_dblp('dataset/DBLP4057_GAT_with_idx_tra200_val_800.mat') node_size = features.shape[0] node_embedding = features.shape[1] class_size = train_label.sh...
def visualize_heatmap(image, mask): masks = norm_image(mask).astype(np.uint8) heatmap = cv2.applyColorMap(masks, cv2.COLORMAP_JET) heatmap = np.float32(heatmap) heatmap = cv2.resize(heatmap, (image.shape[1], image.shape[0])) cam = ((0.4 * heatmap) + (0.6 * np.float32(image))) return cam
.lower_builtin('end_tuple', ArrayBuilderType) def lower_endtuple(context, builder, sig, args): (arraybuildertype,) = sig.args (arraybuilderval,) = args proxyin = context.make_helper(builder, arraybuildertype, arraybuilderval) call(context, builder, libawkward.ArrayBuilder_endtuple, (proxyin.rawptr,)) ...
class TFRobertaModel(metaclass=DummyObject): _backends = ['tf'] def __init__(self, *args, **kwargs): requires_backends(self, ['tf'])
def create_cumprod_input(rng, shape, axis, with_mask, with_random_zero_pos, zero_pos): x = rng.randn(*shape).astype(np.float32) if with_mask: if with_random_zero_pos: mask = (rng.rand(*shape) > (1.0 / shape[axis])) x = (x * mask) else: x.swapaxes(0, axis)[zero...
def load_config(path, default_path=None): with open(path, 'r') as f: cfg_special = yaml.load(f, Loader=yaml.CLoader) if (default_path is not None): with open(default_path, 'r') as f: cfg_default = yaml.load(f, Loader=yaml.CLoader) else: cfg_default = dict() cfg_import...
def get_args(): parser = argparse.ArgumentParser() parser.add_argument('--in_data', type=str) parser.add_argument('--nb_train', type=int, default=(- 1)) parser.add_argument('--nb_jobs', type=int, default=8) parser.add_argument('--nb_splits', type=int, default=5) parser.add_argument('--eval_split...
_spec([HookScope.GLOBAL]) def after_call(context: HookContext, case: Case, response: GenericResponse) -> None:
def is_square(input_matrix: Union[(sparse.csr_matrix, np.ndarray)]) -> bool: return (input_matrix.shape[0] == input_matrix.shape[1])
class MatFile5Reader(MatFileReader): def __init__(self, mat_stream, byte_order=None, mat_dtype=False, squeeze_me=False, chars_as_strings=True, matlab_compatible=False, struct_as_record=True, verify_compressed_data_integrity=True, uint16_codec=None): super(MatFile5Reader, self).__init__(mat_stream, byte_orde...
def ismatrix(t): return ((isinstance(t, (list, tuple)) and (len(t) > 0) and issequence(t[0])) or (isinstance(t, np.ndarray) and (t.ndim == 2)))
def test_ListOffset(): builder = ListOffset('int64', Numpy('float64', ''), '') subbuilder = builder.begin_list() subbuilder.append(1.1) subbuilder.append(2.2) subbuilder.append(3.3) builder.end_list() subbuilder = builder.begin_list() builder.end_list() subbuilder = builder.begin_lis...
class ReActNetBlock(nn.Module): def __init__(self, inplanes, planes, stride=1): super(ReActNetBlock, self).__init__() norm_layer = nn.BatchNorm2d self.move11 = LearnableBias(inplanes) self.binary_activation1 = BinaryActivation() self.binary_3x3 = conv3x3(inplanes, inplanes, s...
def get_quantizers(cfg, test, pname, with_bias=True): if (cfg.w_quantize in ['fp', 'parametric_fp_b_xmax', 'parametric_fp_d_xmax', 'parametric_fp_d_b', 'pow2', 'parametric_pow2_b_xmax', 'parametric_pow2_b_xmin', 'parametric_pow2_xmin_xmax']): if (pname in nn.get_parameters()): delta = find_delta...
.parametrize('grid', [1, 3, 6]) .parametrize('block_size, context', [(40, 0), (55, 3), (80, 10), (128, 17), (256, 80), (512, 93)]) def test_cover2D(block_size, context, grid): lbl = real_image2d()[1] lbl = lbl.astype(np.int32) max_sizes = tuple(calculate_extents(lbl, func=np.max)) min_overlap = tuple(((...
class Normal_Loader(Dataset): def __init__(self, is_train=1, path='/workspace/DATA/UCF-Crime/'): super(Normal_Loader, self).__init__() self.is_train = is_train self.path = path if (self.is_train == 1): data_list = os.path.join(path, 'train_normal.txt') with op...
class MBInvertedConvLayer(MyModule): def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, expand_ratio=6, mid_channels=None, act_func='relu6', use_se=False): super(MBInvertedConvLayer, self).__init__() self.in_channels = in_channels self.out_channels = out_channels ...
def import_cves(): cf.logger.info(('-' * 70)) if db.table_exists('cve'): cf.logger.warning('The cve table already exists, loading and continuing extraction...') else: for year in range(initYear, (currentYear + 1)): extract_target = (('nvdcve-1.1-' + str(year)) + '.json') ...
def dump_paths(Graph, rating_pair, maxLen, sample_size, fw_file): for pair in rating_pair: user_id = pair[0] movie_id = pair[1] user_node = ('u' + user_id) movie_node = ('i' + movie_id) if (Graph.has_node(user_node) and Graph.has_node(movie_node)): mine_paths_betw...
class FrameLevel(nn.Module): def __init__(self, input_dim, output_dim, hiddens=None, activation='ReLU', **kwargs): super().__init__() latest_dim = input_dim self.hiddens = [] if (hiddens is not None): for dim in hiddens: self.hiddens += [nn.Linear(latest_d...
def load_bootstrap_config(cfg: CN): if (not cfg.BOOTSTRAP_DATASETS): return bootstrap_datasets_cfgnodes = [] for dataset_cfg in cfg.BOOTSTRAP_DATASETS: _C = get_bootstrap_dataset_config().clone() _C.merge_from_other_cfg(CN(dataset_cfg)) bootstrap_datasets_cfgnodes.append(_C) ...
def get_solver(solver_, warm_start_, num_cpu_, default_action_): if (solver_ == 'random_shooting'): mpc_solver = RandomShooting(dynamical_model=dynamical_model, reward_model=reward_model, horizon=HORIZON, gamma=1.0, num_samples=NUM_SAMPLES, num_elites=NUM_ELITES, termination=termination, terminal_reward=val...
def compatible_system_lift(compatible_system, split_primes_list): if (len(split_primes_list) != len(compatible_system)): raise ValueError('The number of primes does not match the length of the given exponent vectors.') exponent_vector_lift = [ZZ(compatible_system[0][0][0])] complement_vector_lift = ...
class EarlyStopping(): def __init__(self, early_stopping_ns: SimpleNamespace, validation_metric: str, validation_k: int, cutoffs: t.List, simple_metrics: t.List): self.logger = logging.get_logger(self.__class__.__name__, pylog.DEBUG) self.validation_metric = validation_metric self.validation...
() def get(key: str): try: value = cloud_config.get_flag(key) console.print(f'[bold][blue]{key}[/blue] = [italic][green]{value}[/italic][/green]') except KeyError: console.print(f'[red][bold]{key}[/bold] is not a valid config key[/red]')
class JonesDatabase(): def __init__(self): self.root = None def __repr__(self): return "John Jones's table of number fields with bounded ramification and degree <= 6" def _load(self, path, filename): print(filename) i = 0 while filename[i].isalpha(): i += ...
class Max(Module): def __init__(self, dimension=0): super(Max, self).__init__() self.dimension = dimension self._output = None self._indices = None def _getPositiveDimension(self, input): dimension = self.dimension if (dimension < 0): dimension = (inpu...
def ResNet18(num_classes=10): return ResNet(BasicBlock, layers=[2, 2, 2, 2], filters=[64, 128, 256, 512], num_classes=num_classes)
class CPPInliner(): def __init__(self, inline_target, inline_val): self.inline_target = inline_target self.inline_val = inline_val def inline(self, code: str): return re.sub(('\\b%s\\b' % re.escape(self.inline_target)), (('(' + self.inline_val) + ')'), code)
def test_interval_real_not_int(): constraint = Interval(RealNotInt, 0, 1, closed='both') assert constraint.is_satisfied_by(1.0) assert (not constraint.is_satisfied_by(1))
class TFViTMAEPreTrainedModel(metaclass=DummyObject): _backends = ['tf'] def __init__(self, *args, **kwargs): requires_backends(self, ['tf'])
def max_pool_3d(input, ds, ignore_border=False): if (input.ndim < 3): raise NotImplementedError('max_pool_3d requires a dimension >= 3') vid_dim = input.ndim if ((ds[1] > 1) or (ds[2] > 1)): frame_shape = input.shape[(- 2):] batch_size = tensor.prod(input.shape[:(- 2)]) batch...
def simplify_onnx_model(model: onnx.ModelProto, auto_merge: bool) -> onnx.ModelProto: (model, check) = onnxsim.simplify(model, skip_fuse_bn=True) if (not check): raise RuntimeError('onnx-simplifier optimizations failed') return model
def load_frames_directory_dict(directory: str, pattern: str) -> OpenPoseFrames: frames = {} with os.scandir(directory) as entry_iterator: for entry in entry_iterator: with open(entry.path, 'r') as f: frame_id = get_frame_id(entry.name, pattern=pattern) frame_d...
def _reconstitute(form, length, container, getkey, backend, byteorder, simplify): if isinstance(form, ak.forms.EmptyForm): if (length != 0): raise ValueError(f'EmptyForm node, but the expected length is {length}') return ak.contents.EmptyArray() elif isinstance(form, ak.forms.NumpyFo...
class Polynomial_generic_sparse_cdvf(Polynomial_generic_sparse_cdv, Polynomial_generic_cdvf): pass
def test_serialize_int_float(): obj = MyObject(1.0) assert (obj.float_prop == 1.0) json_obj = obj.to_json() json_obj['float_prop'] = int(json_obj['float_prop']) obj = MyObject.from_json(json_obj) assert (obj.float_prop == 1.0)
class TarIO(ContainerIO.ContainerIO): def __init__(self, tarfile, file): self.fh = open(tarfile, 'rb') while True: s = self.fh.read(512) if (len(s) != 512): raise OSError('unexpected end of tar file') name = s[:100].decode('utf-8') i = ...
class BasicBlock(nn.Sequential): def __init__(self, in_channels, out_channels, kernel_size, stride=1, bias=True, conv3x3=default_conv, norm=default_norm, act=default_act): modules = [] modules.append(conv3x3(in_channels, out_channels, kernel_size, stride=stride, bias=bias)) if (norm is not N...
def iter_rows(rows, col_count): for row in rows: row = tuple(row) (yield (row + (('',) * (col_count - len(row)))))
def _assert_override(spy, arg, original, overridden): for (key, value) in {**original, **overridden}.items(): kwargs = spy.call_args[1] assert (kwargs[arg][key] == value) assert all(((key not in kwargs) for key in overridden))