code
stringlengths
101
5.91M
def calibration_time_to_event(Forecast, T, E): cdfs = Forecast.cdf(T) kmf = KaplanMeierFitter() kmf.fit(cdfs, E) idxs = np.round(np.linspace(0, (len(kmf.survival_function_) - 1), 11)) preds = np.array(kmf.survival_function_.iloc[idxs].index) obs = (1 - np.array(kmf.survival_function_.iloc[idxs]....
def get_prober_name(): if which('avprobe'): return 'avprobe' elif which('ffprobe'): return 'ffprobe' else: warn("Couldn't find ffprobe or avprobe - defaulting to ffprobe, but may not work", RuntimeWarning) return 'ffprobe'
def main_worker(args): train_dataset = MC_Dataset(data_path=args.data_dir, split='train', caption_type='gt') train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=args.batch_size, shuffle=False, num_workers=args.workers, pin_memory=True, drop_last=False) model = BartCaptionModel(max_length=ar...
def init_tparams(params): tparams = OrderedDict() for (kk, pp) in params.iteritems(): tparams[kk] = theano.shared(params[kk], name=kk) return tparams
class FreeGradedModuleElement(IndexedFreeModuleElement): def dense_coefficient_list(self, order=None): if (order is None): order = self.parent()._indices return [self[i] for i in order] def degree(self): if self.is_zero(): raise ValueError('the zero element does n...
def get_task_type(values: np.ndarray) -> TaskType: n_unique_values = np.unique(values).shape[0] task: str if (n_unique_values == 1): raise RuntimeError('Only unique value in target') elif (n_unique_values == 2): task = TaskType.BIN else: task = TaskType.REG return task
def centrality_scores(X, alpha=0.85, max_iter=100, tol=1e-10): n = X.shape[0] X = X.copy() incoming_counts = np.asarray(X.sum(axis=1)).ravel() print('Normalizing the graph') for i in incoming_counts.nonzero()[0]: X.data[X.indptr[i]:X.indptr[(i + 1)]] *= (1.0 / incoming_counts[i]) dangle ...
_types('array') def change_items(context: MutationContext, draw: Draw, schema: Schema) -> MutationResult: items = schema.get('items', {}) if (not items): return MutationResult.FAILURE if isinstance(items, dict): return _change_items_object(context, draw, schema, items) if isinstance(item...
def trivial_task(solvable): variables = sas_tasks.SASVariables([2], [(- 1)], [['Atom dummy(val1)', 'Atom dummy(val2)']]) mutexes = [] init = sas_tasks.SASInit([0]) if solvable: goal_fact = (0, 0) else: goal_fact = (0, 1) goal = sas_tasks.SASGoal([goal_fact]) operators = [] ...
class Residual(nn.Module): def __init__(self, numIn, numOut, inputResH, inputResW, stride=1, net_type='preact', useConv=False, baseWidth=9, cardinality=4): super(Residual, self).__init__() self.con = ConcatTable([convBlock(numIn, numOut, inputResH, inputResW, net_type, baseWidth, cardinality, stride...
class DistributedDataParallelCommHookTest(MultiProcessTestCase): def setUp(self): super(DistributedDataParallelCommHookTest, self).setUp() self._fork_processes() def tearDown(self): try: os.remove(self.file_name) except OSError: pass def world_size(sel...
def load_phrases_dict(phrases_dict, style='default'): if (style == 'tone2'): for (k, value) in phrases_dict.items(): v = [list(map(_replace_tone2_style_dict_to_default, pys)) for pys in value] PHRASES_DICT[k] = v else: PHRASES_DICT.update(phrases_dict) mmseg.retrain(m...
_task('masked_lm') class MaskedLMTask(LegacyFairseqTask): def add_args(parser): parser.add_argument('data', help='colon separated path to data directories list, will be iterated upon during epochs in round-robin manner') parser.add_argument('--sample-break-mode', default=...
class _DummyChromosomeOutputVariableFactory(ChromosomeOutputVariableFactory): def get_data(self, individual: tsc.TestSuiteChromosome) -> int: return 42
def calculate_matches(all_docs: Dict[(object, Tuple[(str, str)])], answers: List[List[str]], closest_docs: List[Tuple[(List[object], List[float])]], workers_num: int, match_type: str) -> QAMatchStats: global dpr_all_documents dpr_all_documents = all_docs tok_opts = {} tokenizer = SimpleTokenizer(**tok_o...
def get_eth_consensus(): client = docker.from_env() all_containers = client.containers.list() for container in all_containers: labels = container.attrs['Config']['Labels'] if ('EthereumService' in labels.get('org.seedsecuritylabs.seedemu.meta.class', [])): return labels.get('org....
def GetKCoreNodes_PNGraph(Graph, CoreIdSzV): return _snap.GetKCoreNodes_PNGraph(Graph, CoreIdSzV)
def pca_feature(feature, dim=None): if torch.is_tensor(feature): feature = feature.numpy() assert isinstance(feature, np.ndarray), 'feature is nor a tensor or a numpy ndarray' pca = decomposition.PCA(n_components=dim) feature_pca = pca.fit_transform(feature) return feature_pca
def test_label_combination_hoeffding_tree_coverage(): max_samples = 10000 max_size_kb = 50 stream = MultilabelGenerator(n_samples=10000, n_features=15, n_targets=3, n_labels=4, random_state=112) learner = LabelCombinationHoeffdingTreeClassifier(n_labels=3, leaf_prediction='mc', memory_estimate_period=20...
class EvalModel(collections.namedtuple('EvalModel', ('graph', 'model', 'src_file_placeholder', 'tgt_file_placeholder', 'iterator'))): pass
class CodeGenConfig(PretrainedConfig): model_type = 'codegen' attribute_map = {'max_position_embeddings': 'n_positions', 'hidden_size': 'n_embd', 'num_attention_heads': 'n_head', 'num_hidden_layers': 'n_layer'} def __init__(self, vocab_size=50400, n_positions=2048, n_ctx=2048, n_embd=4096, n_layer=28, n_hea...
def get_set_encoding(source_set, onehot=True): num_elements = len(source_set) source_list = list(source_set) source_list.sort() thing2idx = {s: i for (i, s) in enumerate(source_list)} idx2thing = [s for (i, s) in enumerate(source_list)] if onehot: thing2vec = {s: idx_to_onehot(i, num_ele...
def analyze_datars(times1, times2, values1, values2, colors=('red', 'navy')): from adjustText import adjust_text all_ts = [] all_times = [*times1, *times2] all_vals = [*values1, *values2] for (times, values, color) in zip([times1, times2], [values1, values2], colors): max = np.max(values) ...
def parse_argv(parser): parser.add_argument('--pred_file', required=True, type=str, help='Name of dataset to run prediction for; will be ignored if --evaluate is test') parser.add_argument('--tasks', dest='task_names', nargs='+', required=True, help='task names for prediction') parser.add_argument('--seed',...
def main(args): if (args.func == 'plot_pert'): plot_perturtation(args.plot_data_path) elif (args.func == 'plot_bpf'): plot_band_pass_filter(args.plot_data_path) elif (args.func == 'plot_freq_ana'): plot_freq_analysis(args.plot_data_path) else: raise ValueError
def LF_icd_complication(c): complication = c.complication.get_span().lower() v = ('996' in complication) return (1 if v else 0)
class CorpusReader(): def __init__(self, src_file, trg_file=None, max_sentence_length=80, cache_size=1000): self.src_file = src_file self.trg_file = trg_file self.epoch = 1 self.pending = set() self.length2pending = collections.defaultdict(set) self.next = 0 s...
class BaseBatchNormalizationFolding(BaseKerasFeatureNetworkTest, ABC): def __init__(self, unit_test, linear_layer): self.linear_layer = linear_layer super(BaseBatchNormalizationFolding, self).__init__(unit_test=unit_test, experimental_exporter=True) def get_tpc(self): tp = generate_test_...
def requeue_job(): if (SLURM_JOBID is None): return if (not REQUEUE.is_set()): return if distrib.is_initialized(): distrib.barrier() if ((not distrib.is_initialized()) or (distrib.get_rank() == 0)): logger.info(f'Requeueing job {SLURM_JOBID}') subprocess.check_cal...
def numberfiltering(sents): sents = [sent.strip().split() for sent in sents] for idx in range(len(sents)): for pos in range(len(sents[idx])): if hasNumbers(sents[idx][pos]): sents[idx][pos] = 'BlahBlah' return [' '.join(sent) for sent in sents]
def quickumls(doc_list): from quickumls import QuickUMLS assert (not (args.quickumls_path is None)), 'Provide path where QuickUMLS is installed' def process_data(pid, doc_list): data = [] matcher = QuickUMLS(args.quickumls_path, 'score', threshold=0.6) for (i, doc) in enumerate(doc_l...
class deltaEColorLoss(nn.Module): def __init__(self, normalize=None): super(deltaEColorLoss, self).__init__() self.loss = [] self.normalize = normalize self.device = torch.device(('cuda:0' if torch.cuda.is_available() else 'cpu')) def torchTensorToNumpy(self, image): imag...
def sxs_handler(format_string): import itertools import re from . import catalog, metadata, horizons, waveforms if (not format_string): raise ValueError('Empty string cannot be associated with a handler') elif format_string.lower().startswith('catalog'): format_string = re.sub('^cata...
def batchnorm_reconstruction_node_matchers() -> NodeOperationMatcher: conv_node = ((NodeOperationMatcher(DepthwiseConv2D) | NodeOperationMatcher(Conv2D)) | NodeOperationMatcher(Conv2DTranspose)) activation_linear = NodeFrameworkAttrMatcher(ACTIVATION, LINEAR) source_node = (conv_node & activation_linear) ...
def parse_args(parser): assert isinstance(parser, ArgumentParser) args = parser.parse_args() (pos_group, optional_group) = (parser._action_groups[0], parser._action_groups[1]) args_dict = args._get_kwargs() pos_optional_arg_names = ([arg.dest for arg in pos_group._group_actions] + [arg.dest for arg ...
def add_upload_command(subparsers): from nnabla.utils.cli.uploader import upload_command subparser = subparsers.add_parser('upload', help='Upload dataset to Neural Network Console.') subparser.add_argument('-e', '--endpoint', help='set endpoint uri', type=str) subparser.add_argument('token', help='token...
def _get_name(x): if isinstance(x.userData, dict): return x.userData.get('name') return None
def proc_one(path_midi, path_outfile): midi_obj = miditoolkit.midi.parser.MidiFile(path_midi) instr_notes = collections.defaultdict(list) for instr in midi_obj.instruments: if (instr.name not in INSTR_NAME_MAP.keys()): continue instr_idx = INSTR_NAME_MAP[instr.name] for n...
def test_synthetic_sample_results_with_exponential_delay_function_has_same_delays_each_dataset(): n_actions = 3 delay_function = ExponentialDelaySampler(max_scale=1000.0, random_state=12345).exponential_delay_function dataset = BanditEnvironmentSimulator(n_actions=n_actions, reward_function=logistic_sparse_...
def strictly_upper_triangular_matrices(R, n): from sage.matrix.matrix_space import MatrixSpace from sage.algebras.lie_algebras.lie_algebra import LieAlgebraFromAssociative MS = MatrixSpace(R, n, sparse=True) one = R.one() names = tuple(('n{}'.format(i) for i in range((n - 1)))) gens = tuple((MS(...
def test_full_scores_chars_length(): print(('Loaded language model: %s' % language_model_path)) r = list(model.full_scores(sentence_char_split)) n = list(model.full_scores(sentence_char_split, bos=False, eos=False)) print(r) print(n) assert (len(r) == (len(n) + 1)) print(len(n), len(sentence...
class TomlArraySeparatorEncoder(TomlEncoder): def __init__(self, _dict=dict, preserve=False, separator=','): super(TomlArraySeparatorEncoder, self).__init__(_dict, preserve) if (separator.strip() == ''): separator = (',' + separator) elif separator.strip(' \t\n\r,'): ...
class SoftmaxShift(common.BaseSubstitution): def __init__(self, nodes: List[BaseNode], bias_str: str): super().__init__(matcher_instance=nodes) self.bias_str = bias_str def substitute(self, graph: Graph, nodes: List[BaseNode]) -> Graph: first_node = nodes[0] if first_node.is_acti...
class AST_Comment(AST_Node): def __init__(self, context, text): AST_Node.__init__(self, context) self.text = text def get_children(self): return [] def replace_child(self, old, new): raise ValueError('AST_Comment has no children') def generate_code(self, sdfg, state): ...
def mse_r(s_hat, log_r_hat, t_hat, y, log_r, t): return (mse_r0(s_hat, log_r_hat, t_hat, y, log_r, t) + mse_r1(s_hat, log_r_hat, t_hat, y, log_r, t))
def down_sample_avg(x, scale_factor=2): return tf.layers.average_pooling2d(x, pool_size=3, strides=scale_factor, padding='SAME')
def main(args): if (os.path.splitext(args.input_file_path)[1] not in FileExtension.as_list()): err_msg = f'The input file is not a jsonl or txt file {args.input_file_path}' raise ValueError(err_msg) verify_input_file(args.input_file_path) output_dir = get_output_dir(args.cmd, args.output_pat...
class RandomUnkFeature(VectorFeature): def __init__(self, parent: EmbeddingBase): super().__init__(parent) self.words: Dict[(str, np.ndarray)] = {} def apply(self, pos: int, word: str, weight: float, vector) -> Tuple[(float, Optional[np.ndarray])]: if (vector is not None): re...
class GraphCL(torch.nn.Module): def __init__(self, gnn, hid_dim=16): super(GraphCL, self).__init__() self.gnn = gnn self.projection_head = torch.nn.Sequential(torch.nn.Linear(hid_dim, hid_dim), torch.nn.ReLU(inplace=True), torch.nn.Linear(hid_dim, hid_dim)) def forward_cl(self, x, edge_i...
class PersonMaskRCNNDetector(object): COCO_INSTANCE_CATEGORY_NAMES = ['__background__', 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light', 'fire hydrant', 'N/A', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow', 'elephant', 'be...
def spectrogram(*args, **kwargs): kwargs['model_config'] = os.path.join(os.path.dirname(__file__), 'spectrogram.yaml') return baseline_local(*args, **kwargs)
def CheckSpacingForFunctionCall(filename, clean_lines, linenum, error): line = clean_lines.elided[linenum] fncall = line for pattern in ('\\bif\\s*\\((.*)\\)\\s*{', '\\bfor\\s*\\((.*)\\)\\s*{', '\\bwhile\\s*\\((.*)\\)\\s*[{;]', '\\bswitch\\s*\\((.*)\\)\\s*{'): match = Search(pattern, line) i...
def simReadVisionSensor(sensorHandle): auxValues = ffi.new('float **') auxValuesCount = ffi.new('int **') state = lib.simReadVisionSensor(sensorHandle, auxValues, auxValuesCount) auxValues2 = [] if (state == 0): s = 0 for i in range(auxValuesCount[0]): auxValues2.append(a...
def get_word_overlap(text1, text2): tokens1 = tokenize(text1, lemmas=True) tokens2 = tokenize(text2, lemmas=True) union = set((tokens1 + tokens2)) intersection = list((set(tokens1) & set(tokens2))) return (len(intersection) / len(union))
def GetQAofImage(id=61512): page = 1 next = ((('/api/v0/image/' + str(id)) + '/qa?page=') + str(page)) qas = [] image_map = {} while True: data = utils.RetrieveData(next) for d in data['results']: if (d['image'] not in image_map): image_map[d['image']] = G...
def autogen_all(): from sage.env import SAGE_SRC interpreters.rebuild(os.path.join(SAGE_SRC, 'sage', 'ext', 'interpreters')) return ['sage.ext.interpreters']
def LinkFileLock(*args, **kwds): from . import linklockfile return _fl_helper(linklockfile.LinkLockFile, 'lockfile.linklockfile', *args, **kwds)
def mobilenet_load_pretrained_imagenet_weights(model): (_, ext) = os.path.splitext(cfg.TRAIN.IMAGENET_PRETRAINED_WEIGHTS) if (ext == '.pkl'): with open(cfg.TRAIN.IMAGENET_PRETRAINED_WEIGHTS, 'rb') as fp: src_blobs = pickle.load(fp, encoding='latin1') if ('blobs' in src_blobs): ...
class ActuatedTrajectoryDataset(dataset.TensorDataset): def __init__(self, traj_q_T_B, traj_v_T_B, traj_u_T_B): self.q_B_T = traj_q_T_B.transpose(1, 0) self.v_B_T = traj_v_T_B.transpose(1, 0) self.u_B_T = traj_u_T_B.transpose(1, 0) assert (self.q_B_T.size(0) == self.v_B_T.size(0) == ...
def compute_measures_for_binary_segmentation_summed(predictions, targets): res = [compute_measures_for_binary_segmentation_single_image(p, t) for (p, t) in zip(predictions, targets)] accum = res[0] for r in res[1:]: for (k, v) in r.items(): accum[k] += v return accum
def test_case42(): url = (brokerIp + '/ngsi-ld/v1/subscriptions/') headers = {'Content-Type': 'application/ld+json', 'Link': '<{{link}}>; rel=" type="application/ld+json"'} r = requests.post(url, data=json.dumps(ld_data.subdata30), headers=headers) print(r.content) print(r.status_code) url = (di...
def block1_deactivate_all(): for I in ti.grouped(block3): ti.deactivate(block3, I) for I in ti.grouped(block2): ti.deactivate(block2, I) for I in ti.grouped(block1): ti.deactivate(block1, I)
class HighResolutionModule(nn.Module): def __init__(self, num_branches, blocks, num_blocks, num_inchannels, num_channels, fuse_method, multi_scale_output=True): super(HighResolutionModule, self).__init__() self._check_branches(num_branches, blocks, num_blocks, num_inchannels, num_channels) s...
def _ntuple(n): def parse(x): if isinstance(x, collections.Iterable): return x return tuple(repeat(x, n)) return parse
class TestKind(util.F2PyTest): sources = [_path('src', 'kind', 'foo.f90')] .slow def test_all(self): selectedrealkind = self.module.selectedrealkind selectedintkind = self.module.selectedintkind for i in range(40): assert_((selectedintkind(i) in [selected_int_kind(i), (- ...
def build_head(cfg): head_cfg = deepcopy(cfg) name = head_cfg.pop('name') if (name == 'YOLOv5Head'): return YOLOv5Head(**head_cfg) elif (name == 'YOLOXHead'): return YOLOXHead(**head_cfg) elif (name == 'YOLOv6Effidehead'): return YOLOv6Effidehead(**head_cfg) elif (name ==...
class Function_Bessel_J(BuiltinFunction): def __init__(self): BuiltinFunction.__init__(self, 'bessel_J', nargs=2, conversions=dict(maple='BesselJ', mathematica='BesselJ', maxima='bessel_j', sympy='besselj', fricas='besselJ', giac='BesselJ')) def _eval_(self, n, x): if ((not isinstance(x, Express...
def record_result(results_dir, result): with open(os.path.join(results_dir, 'results.json'), 'a') as results_file: results_file.write(json.dumps(result)) results_file.write('\n')
_metaclass(ABCMeta) class CostFunction(object): def __init__(self, ds, da, *args, **kwargs): (self.ds, self.da) = (ds, da) def get_parameters(self): pass def log_likelihood(self, states, costs): pass def evaluate(self, states): pass def __getstate__(self): ret...
def report_upload(setup_server, next_url, upload_message, correlation_id): return setup_server((lambda h: h.respond_with_json({'message': upload_message, 'next': next_url, 'correlation_id': correlation_id}, status=202)), 'POST', '/reports/upload/')
class LampOff(Task): def init_task(self) -> None: self.bulb_glass_visual = Shape('bulb') self.bulb_glass_visual.set_color([1, 1, 1]) self.joint = Joint('target_button_joint') self.condition = JointCondition(self.joint, 0.003) def init_episode(self, index: int) -> List[str]: ...
def _worker_init(G, id): if (singleton_pool.n_parallel > 1): import os os.environ['THEANO_FLAGS'] = 'device=cpu' os.environ['CUDA_VISIBLE_DEVICES'] = '' G.worker_id = id
class OptGapC3Test(AbstractTest): def __init__(self): super().__init__() self.problem = OptGapC3() def name(self): return 'optgapc3' def run(self): ncf = NcfEpi.new_total_flow(4) hc = HardCodedPartitioning(partition_vector=[0, 0, 1, 2, 2, 3, 4]) ncf.solve(self...
class TFRobertaForTokenClassification(metaclass=DummyObject): _backends = ['tf'] def __init__(self, *args, **kwargs): requires_backends(self, ['tf'])
.parametrize('tree,dataset', [(DecisionTreeClassifier(max_depth=2, random_state=0), datasets.make_classification(random_state=0)), (DecisionTreeRegressor(max_depth=2, random_state=0), datasets.make_regression(random_state=0))]) def test_score_sample_weight(tree, dataset): rng = np.random.RandomState(0) (X, y) =...
def anisotropic_primes(self): possible_primes = (prime_divisors((2 * self.det())) + [(- 1)]) return [p for p in possible_primes if self.is_anisotropic(p)]
def read_vasp(filename): with open(filename) as f: lines = f.readlines() return _get_cell(lines) return None
def convert_dynamic_fx(graph_module, inplace=False, debug=False): return _convert_fx(graph_module, inplace, debug, is_dynamic_quant=True)
def get_fngrad_norm(loader, model, device, fngrads=None, grads=None): if (fngrads is None): fngrads = get_fngrads(loader, model, device, grads=grads) return torch.cat([fngrad.view((- 1)) for fngrad in fngrads]).norm()
def find_bpe_position_by_offset(bpe_offsets, target_offset): bpe_nums = [] for (sent_num, sent) in enumerate(bpe_offsets): if (sent[(- 1)][0] < target_offset[0]): continue for (bpe_num, bpe) in enumerate(sent): if ((target_offset[0] <= bpe[0]) and (bpe[1] <= target_offset...
class PositionwiseFeedForward(nn.Module): def __init__(self, d_model, d_ff, dropout=0.1): super(PositionwiseFeedForward, self).__init__() self.w_1 = nn.Linear(d_model, d_ff) self.w_2 = nn.Linear(d_ff, d_model) self.dropout = nn.Dropout(dropout) def forward(self, x): retur...
class Test_sctype2char(object): def test_scalar_type(self): assert_equal(np.sctype2char(np.double), 'd') assert_equal(np.sctype2char(np.int_), 'l') assert_equal(np.sctype2char(np.unicode_), 'U') assert_equal(np.sctype2char(np.bytes_), 'S') def test_other_type(self): asser...
def test_wilson_efficient_sample(kernel, inducing_variable, whiten): eigenfunctions = RandomFourierFeaturesCosine(kernel, 100, dtype=default_float()) eigenvalues = np.ones((100, 1), dtype=default_float()) kernel2 = KernelWithFeatureDecomposition(kernel, eigenfunctions, eigenvalues) (q_mu, q_sqrt) = _get...
class BaseConverter(metaclass=ABCMeta): ACCEPTED_MODES = None def __init__(self, modes=[]): self.modes = modes for mode in self.modes: if (mode not in self.ACCEPTED_MODES): raise ValueError(f'Input mode not in {self.ACCEPTED_MODES}') def convert(self): pas...
class hypergeom_gen(rv_discrete): def _rvs(self, M, n, N): return self._random_state.hypergeometric(n, (M - n), N, size=self._size) def _get_support(self, M, n, N): return (np.maximum((N - (M - n)), 0), np.minimum(n, N)) def _argcheck(self, M, n, N): cond = (((M > 0) & (n >= 0)) & (N...
class ATTACK(object): FGSM = 'fgsm' BIM = 'bim' BIM_L2 = 'bim_l2' BIM_Li = 'bim_li' DEEPFOOL = 'deepfool' CW_L0 = 'cw_l0' CW_L2 = 'cw_l2' CW_Linf = 'cw_linf' JSMA = 'jsma' ONE_PIXEL = 'onepixel' MIM = 'mim' PGD = 'pgd' def get_supported_attacks(cls): return [c...
def train(epoch): global trainloader, optimizer, args, feat_net, pred_net pred_net.train() feat_net.eval() correct = 0 total = 0 total_loss = 0 optimizer.zero_grad() tot_iters = len(trainloader) for batch_idx in tqdm.tqdm(range(tot_iters), total=tot_iters): (inputs, targets) ...
class SequenceTaggingDecoderMixin(DecoderMixinBase): def scheme(self): return self._scheme def scheme(self, scheme: str): self._scheme = scheme self.translator = ChunksTagsTranslator(scheme=scheme) def idx2tag(self): return self._idx2tag .setter def idx2tag(self, idx2...
def check_sampler_get_feature_names_out_pandas(name, sampler_orig): try: import pandas as pd except ImportError: raise SkipTest('pandas is not installed: not checking column name consistency for pandas') tags = sampler_orig._get_tags() if (('2darray' not in tags['X_types']) or tags['no_v...
class TestEmptyField(object): def test_assign(self): a = np.arange(10, dtype=np.float32) a.dtype = [('int', '<0i4'), ('float', '<2f4')] assert_((a['int'].shape == (5, 0))) assert_((a['float'].shape == (5, 2)))
class ResBlock(BaseModule): def __init__(self, in_channels, conv_cfg=None, norm_cfg=dict(type='BN', requires_grad=True), act_cfg=dict(type='LeakyReLU', negative_slope=0.1), init_cfg=None): super(ResBlock, self).__init__(init_cfg) assert ((in_channels % 2) == 0) half_in_channels = (in_channel...
def test_instruction_equal(): module = 'foo' code_object_id = 1 node_id = 1 opcode = 1 arg = None lineno = 42 offset = 42 instr1 = ExecutedInstruction(module, code_object_id, node_id, opcode, arg, lineno, offset) instr2 = ExecutedInstruction(module, code_object_id, node_id, opcode, a...
class TestPPOPendulumGRU(TfGraphTestCase): .mujoco_long def test_ppo_pendulum_gru(self): with LocalTFRunner(snapshot_config) as runner: env = GarageEnv(normalize(gym.make('InvertedDoublePendulum-v2'))) gru_policy = GaussianGRUPolicy(env_spec=env.spec) baseline = Gauss...
class Partition4(nn.Module): LAYER_SCOPES = ['VisionTransformer/ModuleList[blocks]/Block[5]/Mlp[mlp]/Dropout[drop]', 'VisionTransformer/ModuleList[blocks]/Block[5]/Identity[drop_path]', 'VisionTransformer/ModuleList[blocks]/Block[6]/LayerNorm[norm1]', 'VisionTransformer/ModuleList[blocks]/Block[6]/Attention[attn]/L...
def get_missing_parameters_message(keys: List[str]) -> str: groups = _group_checkpoint_keys(keys) msg = 'Some model parameters or buffers are not found in the checkpoint:\n' msg += '\n'.join(((' ' + colored((k + _group_to_str(v)), 'blue')) for (k, v) in groups.items())) return msg
class TestMultivariate(unittest.TestCase): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.max_forecast_steps = 10 self.i = 0 dataset = 'seattle_trail' (d, md) = SeattleTrail(rootdir=join(rootdir, 'data', 'multivariate', dataset))[0] t = in...
def do_flop(cfg): if isinstance(cfg, CfgNode): data_loader = build_detection_test_loader(cfg, cfg.DATASETS.TEST[0]) model = build_model(cfg) DetectionCheckpointer(model).load(cfg.MODEL.WEIGHTS) else: data_loader = instantiate(cfg.dataloader.test) model = instantiate(cfg.m...
class StoDepth_BasicBlock(nn.Module): expansion = 1 def __init__(self, prob, multFlag, inplanes, planes, stride=1, downsample=None): super(StoDepth_BasicBlock, self).__init__() self.conv1 = conv3x3(inplanes, planes, stride) self.bn1 = nn.BatchNorm2d(planes) self.relu = nn.ReLU(in...
def create_logger(filepath, rank): log_formatter = LogFormatter() if (filepath is not None): if (rank > 0): filepath = ('%s-%i' % (filepath, rank)) file_handler = logging.FileHandler(filepath, 'a') file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(log_for...
def is_tuple(ann) -> bool: if (ann is Tuple): raise_error_container_parameter_missing('Tuple') if (not hasattr(ann, '__module__')): return False return ((ann.__module__ == 'typing') and ((getattr(ann, '__origin__', None) is Tuple) or (getattr(ann, '__origin__', None) is tuple)))