code stringlengths 101 5.91M |
|---|
class MobileNetV1Config(PretrainedConfig):
model_type = 'mobilenet_v1'
def __init__(self, num_channels=3, image_size=224, depth_multiplier=1.0, min_depth=8, hidden_act='relu6', tf_padding=True, classifier_dropout_prob=0.999, initializer_range=0.02, layer_norm_eps=0.001, **kwargs):
super().__init__(**kwa... |
def init_seed(seed):
torch.cuda.manual_seed_all(seed)
torch.manual_seed(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False |
def calculate_uplift_at_top(y_true: np.ndarray, uplift_pred: np.ndarray, treatment: np.ndarray, top: float=30):
uplift_percentile = np.percentile(uplift_pred, (100 - top))
mask_top = (uplift_pred > uplift_percentile)
control_true_top = y_true[((treatment == 0) & mask_top)].sum()
treatment_true_top = y_t... |
class LPPool2d(_LPPoolNd):
kernel_size: _size_2_t
stride: _size_2_t
def forward(self, input: Tensor) -> Tensor:
return cF.complex_fcaller(F.lp_pool2d, input, float(self.norm_type), self.kernel_size, self.stride, self.ceil_mode) |
def track_parallel_progress(func, tasks, nproc, initializer=None, initargs=None, bar_width=50, chunksize=1, skip_first=False, keep_order=True, file=sys.stdout):
if isinstance(tasks, tuple):
assert (len(tasks) == 2)
assert isinstance(tasks[0], Iterable)
assert isinstance(tasks[1], int)
... |
class DelegatorData():
def __init__(self, name, construct, skip_methods=(), fit_args=make_classification(random_state=0)):
self.name = name
self.construct = construct
self.fit_args = fit_args
self.skip_methods = skip_methods |
def register_types_ns3_Config(module):
root_module = module.get_root()
module.add_class('MatchContainer', import_from_module='ns.core')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Object > > const_iterator', u'ns3::Config::MatchContainer::Iterator')
typehandlers.add_type_alias(u'std::vecto... |
def get_sub_token_ids(question_tokens, span_ids, tu):
(st, ed) = span_ids
prefix_tokens = question_tokens[:st]
prefix = tu.tokenizer.convert_tokens_to_string(prefix_tokens)
prefix_sub_tokens = tu.tokenizer.tokenize(prefix)
span_tokens = question_tokens[st:ed]
span = tu.tokenizer.convert_tokens_t... |
def cubic_param_shape(initializer: Callable, extents: np.ndarray, pixel_spacing: float, control_point_spacing: float, pos: Union[(np.ndarray, goos.Function)], var_name: Optional[str]=None, reflection_symmetry: List[int]=None, periods: List[int]=None, **kwargs) -> Tuple[(goos.Variable, Shape)]:
from spins.goos impor... |
class RelativeRamifiedExtensionRingCappedRelative(EisensteinExtensionGeneric, pAdicCappedRelativeRingGeneric):
def __init__(self, exact_modulus, approx_modulus, prec, print_mode, shift_seed, names, implementation):
self._exact_modulus = exact_modulus
unram_prec = (((prec + approx_modulus.degree()) -... |
def runtime_fn(logfile_path):
runtime = None
with open(logfile_path, 'r') as f:
lines = f.readlines()
for line in lines[(- 10):]:
m = re.match('Mean allocation computation time: (\\d+\\.\\d+) seconds', line)
if (m is not None):
runtime = round(float(m.grou... |
def validate_bg_pnf(df: Union[(str, pd.Series, dd.Series, pd.DataFrame, dd.DataFrame)], column: str='') -> Union[(bool, pd.Series, pd.DataFrame)]:
if isinstance(df, (pd.Series, dd.Series)):
return df.apply(pnf.is_valid)
elif isinstance(df, (pd.DataFrame, dd.DataFrame)):
if (column != ''):
... |
def contained_in(filename, directory):
filename = os.path.normcase(os.path.abspath(filename))
directory = os.path.normcase(os.path.abspath(directory))
return (os.path.commonprefix([filename, directory]) == directory) |
def trainLRModel(train_all, train_label, window_size_list, ngram_extract_mode, flag, save_model=False):
train_ngram_all = tokenExtraction(window_size_list, train_all, mode=ngram_extract_mode)
(train_ngram_counter, train_ngram_dict) = buildTrainDict(train_ngram_all, verbose=False, set_threshold=True, threshold=1... |
class RandomTransforms(object):
def __init__(self, transforms):
assert isinstance(transforms, (list, tuple))
self.transforms = transforms
def __call__(self, *args, **kwargs):
raise NotImplementedError() |
def findCosineDistance(source_representation: Union[(np.ndarray, list)], test_representation: Union[(np.ndarray, list)]) -> np.float64:
if isinstance(source_representation, list):
source_representation = np.array(source_representation)
if isinstance(test_representation, list):
test_representatio... |
class Attention(nn.Module):
def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0.0, proj_drop=0.0, sr_ratio=1):
super().__init__()
assert ((dim % num_heads) == 0), f'dim {dim} should be divided by num_heads {num_heads}.'
self.dim = dim
self.num_heads = num_... |
_testing
def test_random_chain_complex(level=1, trials=1, verbose=False):
deprecation(33777, 'the CHomP interface is deprecated; hence so is this function')
for i in range(trials):
C = random_chain_complex(level=level)
for d in C.differential():
chomp = C.homology(d, verbose=verbose)... |
def process_single_table(table, all_entity_set, min_num=3):
processed_data = {}
core_entities = {}
table_id = table.get('_id', '')
pgTitle = table.get('pgTitle', '').lower()
pgEnt = table.get('pgId', (- 1))
if (pgEnt not in all_entity_set):
pgEnt = (- 1)
secTitle = table.get('section... |
def _random_dismantlable_lattice(n):
from sage.misc.prandom import randint
D = DiGraph({0: [(n - 1)]})
for i in range(1, (n - 1)):
a = randint(0, (i // 2))
b_ = list(D.depth_first_search(a))
b = b_[randint(1, (len(b_) - 1))]
D.add_vertex(i)
D.add_edge(a, i)
D.... |
def re_match(utter, value):
search_span = re.search((('[?,.! ]' + value) + '[?,.! ]'), ((' ' + utter) + ' '))
if search_span:
return True
else:
return False |
_cache
def get_request_signature() -> inspect.Signature:
import requests
return inspect.signature(requests.Request) |
def distributed_init(config):
if (config.distributed.world_size == 1):
raise ValueError('Cannot initialize distributed with distributed_world_size=1')
logger.info(f'XLA Mode:{is_xla()}')
if is_xla():
config.device_id = xm.get_local_ordinal()
config.distributed.rank = xm.get_ordinal()... |
def islong_doublefunction(rout):
if (not isfunction(rout)):
return 0
if ('result' in rout):
a = rout['result']
else:
a = rout['name']
if (a in rout['vars']):
return islong_double(rout['vars'][a])
return 0 |
class Afformer(nn.Module):
def __init__(self, encoder: nn.Module, decoder: nn.Module, predictor: nn.Module):
super().__init__()
self.encoder = encoder
self.decoder = decoder
self.predictor = predictor
def forward(self, batch):
(images, videos, num_frames_list) = batch[:3]... |
('data.dtd', 'class')
class DTDData(base.ImageTfdsData):
def __init__(self, data_dir=None):
dataset_builder = tfds.builder('dtd:3.*.*', data_dir=data_dir)
dataset_builder.download_and_prepare()
tfds_splits = {'train': 'train', 'val': 'validation', 'trainval': 'train+validation', 'test': 'tes... |
class SuperbKS(SuperbProblem):
_cfg(**SuperbProblem.setup.default_except(corpus=dict(CLS=gsc_v1_for_superb, dataset_root='???'), train_datapipe=dict(CLS=UtteranceClassificationPipe, train_category_encoder=True, sox_effects=EFFECTS), train_sampler=dict(CLS=BalancedWeightedSampler, batch_size=32), valid_datapipe=dict... |
class Partition2(nn.Module):
LAYER_SCOPES = ['T5ForConditionalGeneration/T5Stack[encoder]/ModuleList[block]/T5Block[6]/ModuleList[layer]/T5LayerSelfAttention[0]/T5LayerNorm[layer_norm]', 'T5ForConditionalGeneration/T5Stack[encoder]/ModuleList[block]/T5Block[6]/ModuleList[layer]/T5LayerSelfAttention[0]/T5Attention[S... |
def clean_ad_nrt(df: Union[(pd.DataFrame, dd.DataFrame)], column: str, output_format: str='standard', split: bool=False, inplace: bool=False, errors: str='coerce', progress: bool=True) -> pd.DataFrame:
if (output_format not in {'compact', 'standard'}):
raise ValueError(f'output_format {output_format} is inv... |
def compute_mmd(samples1, samples2, kernel, is_hist=True, *args, **kwargs):
if is_hist:
samples1 = [(s1 / np.sum(s1)) for s1 in samples1]
samples2 = [(s2 / np.sum(s2)) for s2 in samples2]
return ((disc(samples1, samples1, kernel, *args, **kwargs) + disc(samples2, samples2, kernel, *args, **kwarg... |
def add_checkpoint_args(parser):
group = parser.add_argument_group('Checkpointing')
group.add_argument('--save-dir', metavar='DIR', default='checkpoints', help='path to save checkpoints')
group.add_argument('--restore-file', default='checkpoint_last.pt', help='filename from which to load checkpoint (default... |
def unit_to_english(u: str) -> str:
return {'ns': 'nanosecond', 'us': 'microsecond', 'ms': 'millisecond', 's': 'second'}[u] |
class ProbabilisticDistance(NumpyArrayMetric):
def __init__(self, metric: str='PROBDST'):
super().__init__(metric)
def calculate(self):
gt = self.reference.flatten().astype(np.int8)
seg = self.prediction.flatten().astype(np.int8)
probability_difference = np.absolute((gt - seg)).s... |
class TestBartlett():
def test_basic(self):
assert_allclose(windows.bartlett(6), [0, 0.4, 0.8, 0.8, 0.4, 0])
assert_allclose(windows.bartlett(7), [0, (1 / 3), (2 / 3), 1.0, (2 / 3), (1 / 3), 0])
assert_allclose(windows.bartlett(6, False), [0, (1 / 3), (2 / 3), 1.0, (2 / 3), (1 / 3)]) |
class MetaNeXtBlock(nn.Module):
def __init__(self, dim, token_mixer=nn.Identity, norm_layer=nn.BatchNorm2d, mlp_layer=ConvMlp, mlp_ratio=4, act_layer=nn.GELU, ls_init_value=1e-06, drop_path=0.0):
super().__init__()
self.token_mixer = token_mixer(dim)
self.norm = norm_layer(dim)
self.... |
def sample_gaussian(mu, logvar):
epsilon = tf.random_normal(tf.shape(logvar), name='epsilon')
std = tf.exp((0.5 * logvar))
z = (mu + tf.multiply(std, epsilon))
return z |
class SurfaceClassifier_multiLoss(nn.Module):
def __init__(self, opt, filter_channels_2d, filter_channels_3d, filter_channels_joint):
super(SurfaceClassifier_multiLoss, self).__init__()
self.filters_2d = []
for idx in range(0, (len(filter_channels_2d) - 1)):
if (idx == 0):
... |
class RandomVariable_generic(Parent):
def __init__(self, X, RR):
if (not is_ProbabilitySpace(X)):
raise TypeError(('Argument X (= %s) must be a probability space' % X))
Parent.__init__(self, X)
self._codomain = RR
def probability_space(self):
return self.base()
de... |
_utils.test()
def test_nested():
x = ti.field(ti.i32)
y = ti.field(ti.i32)
n = 128
ti.root.dense(ti.i, (n // 4)).dense(ti.i, 4).place(x)
ti.root.dense(ti.i, n).place(y)
def fill():
for i in x:
x[i] = i
y[i] = (i * 2)
fill()
for i in range(n):
asser... |
class BucketizedColumnTransformer(CategoricalColumnTransformer):
def __init__(self, source_column, boundaries):
for i in six.moves.range((len(boundaries) - 1)):
assert (boundaries[i] < boundaries[(i + 1)]), 'Boundaries must be sorted in ascending order'
self.source_column = source_column... |
class LabelSanitizer(BaseEstimator, TransformerMixin):
def __init__(self, sanitize_labels):
self.sanitize_labels = sanitize_labels
def transform(self, X, corrections):
X = X.copy(deep=True)
if (not self.sanitize_labels):
print('Label sanization will be skipped.')
else... |
def retrieval_yr(var_cf_code, time, months, days, grid, area, lvllist, levtype, year, target):
import cdsapi
server = cdsapi.Client()
print('variable: {}'.format(var_cf_code))
print(year)
print('months: {}'.format(months))
print('days {}'.format(days))
if (levtype == 'sfc'):
server.r... |
class AutoEncoder(object):
def __init__(self, **kwargs):
params = {'nI': None, 'nH': 3, 'cf': 1, 'activation': 'tanh', 'optimizer': None, 'verbose': 0}
for (key, item) in kwargs.items():
params[key] = item
self.params = params
def create_model(self):
nI = self.params[... |
class JointProbabilityDistribution(DiscreteFactor):
def __init__(self, variables, cardinality, values):
if np.isclose(np.sum(values), 1):
super(JointProbabilityDistribution, self).__init__(variables, cardinality, values)
else:
raise ValueError("The probability values doesn't ... |
def analyze_sdfg(sdfg: SDFG, w_d_map: Dict[(str, sp.Expr)], analyze_tasklet, assumptions: [str], detailed_analysis: bool=False) -> None:
sdfg = deepcopy(sdfg)
pipeline = FixedPointPipeline([StrictSymbolSSA()])
pipeline.apply_pass(sdfg, {})
array_symbols = get_array_size_symbols(sdfg)
(equality_subs,... |
def prepare_urban_sound_8k(data_folder, audio_data_folder, save_json_train, save_json_valid, save_json_test, train_fold_nums=[1, 2, 3, 4, 5, 6, 7, 8], valid_fold_nums=[9], test_fold_nums=[10], skip_manifest_creation=False):
if (type(train_fold_nums) is int):
train_fold_nums = [train_fold_nums]
if (type(... |
def get_step_index(cfg, cur_epoch):
steps = (cfg.SOLVER.STEPS + [cfg.SOLVER.MAX_EPOCH])
for (ind, step) in enumerate(steps):
if (cur_epoch < step):
break
return (ind - 1) |
def apply_hooks(operation: APIOperation, context: HookContext, hooks: (HookDispatcher | None), strategy: st.SearchStrategy, location: str) -> st.SearchStrategy:
container = LOCATION_TO_CONTAINER[location]
return apply_to_all_dispatchers(operation, context, hooks, strategy, container) |
def index_num_in_tokenized_utterance(tokenized_utterance, ent_mask=None):
tk_list = tokenized_utterance.split()
if (ent_mask is None):
ent_mask = ([False] * len(tk_list))
assert (len(tk_list) == len(ent_mask))
num2idxs = {}
for (_idx_t, _tk) in enumerate(tk_list):
if ent_mask[_idx_t]... |
def get_config_single(config_path: str, overwrites: str=None) -> Dict[(str, any)]:
config_path_yaml = config_path
if (not config_path.endswith('config.yaml')):
config_path_yaml = os.path.join(config_path, 'config.yaml')
if ((not os.path.exists(config_path_yaml)) and (not os.path.isabs(config_path)))... |
_utils.test()
def test_3d():
x = ti.field(ti.f32, shape=(16, 32, 64))
def func():
for (i, j, k) in ti.ndrange((4, 10), (3, 8), 17):
x[(i, j, k)] = ((i + (j * 10)) + (k * 100))
func()
for i in range(16):
for j in range(32):
for k in range(64):
if ((... |
def make_batch_bert(sessions):
(batch_input, batch_labels) = ([], [])
for session in sessions:
data = session[0]
label_list = session[1]
(context_speaker, context, emotion, sentiment) = data
now_speaker = context_speaker[(- 1)]
speaker_utt_list = []
inputString = ... |
def get_visible_commands_starting_with(ctx, starts_with):
for c in ctx.command.list_commands(ctx):
if c.startswith(starts_with):
command = ctx.command.get_command(ctx, c)
if (not command.hidden):
(yield command) |
def add_model_args(parser):
group = parser.add_argument_group('Model configuration')
group.add_argument('--arch', '-a', default='fconv', metavar='ARCH', required=True, choices=ARCH_MODEL_REGISTRY.keys(), help='Model Architecture')
group.add_argument('--criterion', default='cross_entropy', metavar='CRIT', ch... |
.mpl_image_compare
def test_random_summary_bar_with_data():
np.random.seed(0)
fig = plt.figure()
shap.summary_plot(np.random.randn(20, 5), np.random.randn(20, 5), plot_type='bar', show=False)
fig.set_layout_engine('tight')
return fig |
class UniDaTrainer(DefaultTrainer):
def __init__(self, cfg):
super().__init__(cfg)
(self.source_data_loader, self.target_data_loader, self.test_data_loader, self.val_data_loader) = self.build_data_loaders(cfg)
self.evaluator = self.build_evaluator(cfg)
self.max_iter = cfg.max_iter
... |
class PostProcessMentionEntityCounts(PipelineJob):
def __init__(self, preprocess_jobs: Dict[(str, PipelineJob)], opts):
super().__init__(requires=[f'data/versions/{opts.data_version_name}/indexes/mention_entity_counter.pickle', f'data/versions/{opts.data_version_name}/indexes/entity_counter.pickle', f'data/... |
def prior(lower_bound=(- 10.0), upper_bound=10.0, D=2, rng=None):
if (rng is None):
rng = np.random.default_rng()
return rng.uniform(low=lower_bound, high=upper_bound, size=D) |
def load_vocab(vocab_file):
vocab = collections.OrderedDict()
with open(vocab_file, 'r', encoding='utf-8') as reader:
tokens = reader.readlines()
for (index, token) in enumerate(tokens):
token = token.rstrip('\n')
vocab[token] = index
return vocab |
class TestFromString(object):
def test_floating(self):
fsingle = np.single('1.234')
fdouble = np.double('1.234')
flongdouble = np.longdouble('1.234')
assert_almost_equal(fsingle, 1.234)
assert_almost_equal(fdouble, 1.234)
assert_almost_equal(flongdouble, 1.234)
de... |
def get_command(id_):
os.environ['DEBUG'] = os.environ.get('DEBUG', 'false')
os.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID'
commands_dict = {}
tokens_bsz = 16384
num_gpus = 8
accum_steps = 1
folder_suffix_params = ['max_source_length', 'gradient_accumulation_steps', 'learning_rate', 'train_m... |
class VideoKeyframeDataset(Dataset):
_EMPTY_FRAMES = torch.empty((0, 3, 1, 1))
def __init__(self, video_list: List[str], frame_selector: Optional[FrameSelector]=None, transform: Optional[FrameTransform]=None):
self.video_list = video_list
self.frame_selector = frame_selector
self.transfo... |
def load_examples_copa_rev(path):
root = ET.parse(path).getroot()
examples_copa = []
for type_tag in root.findall('item'):
value = type_tag.get('most-plausible-alternative')
asks_for = type_tag.get('asks-for')
children = list(type_tag)
p = (children[0].text[:1].lower() + chil... |
_numpy_output(check_dtype=True)
def test_ufunc_invert_f(A: dace.float32[10]):
return np.invert(A) |
('categorical_accuracy')
class CategoricalAccuracy(Metric):
def __init__(self, top_k: int=1) -> None:
self._top_k = top_k
self.correct_count = 0.0
self.total_count = 0.0
def __call__(self, predictions: torch.Tensor, gold_labels: torch.Tensor, mask: Optional[torch.Tensor]=None):
(... |
def compare_headers(request, serialized):
headers = HTTPHeaderDict()
for (name, value) in serialized.items():
for sub in value:
headers.add(name, sub)
assert (request.headers[name] == headers[name]) |
class FileOperator(object):
def __init__(self, dry_run=False):
self.dry_run = dry_run
self.ensured = set()
self._init_record()
def _init_record(self):
self.record = False
self.files_written = set()
self.dirs_created = set()
def record_as_written(self, path):
... |
class FFN(nn.Module):
def __init__(self, __C):
super(FFN, self).__init__()
self.mlp = MLP(in_size=__C.HIDDEN_SIZE, mid_size=__C.FF_SIZE, out_size=__C.HIDDEN_SIZE, dropout_r=__C.DROPOUT_R, use_relu=True)
def forward(self, x):
return self.mlp(x) |
class IdentificationClassificationModelOutput(ModelOutput):
loss: Optional[torch.FloatTensor] = None
loss_cls: Optional[torch.FloatTensor] = None
loss_span: Optional[torch.FloatTensor] = None
class_logits: torch.FloatTensor = None
span_logits: torch.FloatTensor = None |
def split_files(org_dir, split_dir, short_name=None, train_size=0.7, dev_size=0.15, rotation=None):
os.makedirs(split_dir, exist_ok=True)
if ((train_size + dev_size) >= 1.0):
print('Not making a test slice with the given ratios: train {} dev {}'.format(train_size, dev_size))
file_names = create_shuf... |
def read_pretrain_eval_data(pretrain_data_dir):
all_valid_files = [f for f in os.listdir(pretrain_data_dir) if f.endswith('_valid.jsonl')]
languages = [f[:(- 12)] for f in all_valid_files]
print(f'Found Languages : {languages}')
examples_dict = {}
for lang in languages:
fp = open(os.path.joi... |
def simulator(theta, n_obs=4, flatten=True, rng=None):
if (rng is None):
rng = np.random.default_rng()
loc = np.array([theta[0], theta[1]])
s1 = (theta[2] ** 2)
s2 = (theta[3] ** 2)
rho = np.tanh(theta[4])
cov = ((rho * s1) * s2)
S_theta = np.array([[(s1 ** 2), cov], [cov, (s2 ** 2)]... |
def extract_sentence_transformer_embedding(sentence_transformer, utterances, intent):
embedding = sentence_transformer.encode(utterances, convert_to_tensor=True)
labels = ([intent] * embedding.shape[0])
return (embedding, labels) |
class Partition10(nn.Module):
LAYER_SCOPES = ['T5ForConditionalGeneration/T5Stack[decoder]/ModuleList[block]/T5Block[6]/ModuleList[layer]/T5LayerSelfAttention[0]/T5LayerNorm[layer_norm]', 'T5ForConditionalGeneration/T5Stack[decoder]/ModuleList[block]/T5Block[6]/ModuleList[layer]/T5LayerSelfAttention[0]/T5Attention[... |
def aggregate_emb_scores(q_ids_w_emb: dict):
p_ids_avg_emb = {}
for (key, value) in q_ids_w_emb.items():
list_emb = [emb[0] for emb in value]
list_weights = [emb[1] for emb in value]
p_ids_avg_emb.update({key: np.dot(list_weights, list_emb)})
return p_ids_avg_emb |
def hf_preprocess_encodings(src: Dict[(str, List)]) -> Dict[(str, List)]:
enc = preprocess_encodings(src['audio_encoding'], src['audio_encoding_shape'])
src['audio_encoding'] = enc
return src |
class IncNpzFile():
def __init__(self, file: str):
self.fn = file
self.zip = zipfile.ZipFile(file, mode='a', compression=zipfile.ZIP_DEFLATED)
self.keys = set()
def __setitem__(self, key: str, data) -> None:
if (key in self.keys):
return
self.keys.add(key)
... |
class MLP(nn.Module):
hidden_dims: Sequence[int]
activations: Callable[([jnp.ndarray], jnp.ndarray)] = nn.relu
activate_final: int = False
kernel_init: Callable[([PRNGKey, Shape, Dtype], Array)] = default_init()
def setup(self):
self.layers = [nn.Dense(size, kernel_init=self.kernel_init) for... |
def check_jieba():
try:
import jieba
except ImportError:
raise ImportError('Jieba is used but not installed on your machine. Go to for installation instructions.')
return True |
def _save(im, fp, filename):
if (im.mode != '1'):
raise OSError(('cannot write mode %s as XBM' % im.mode))
fp.write(('#define im_width %d\n' % im.size[0]).encode('ascii'))
fp.write(('#define im_height %d\n' % im.size[1]).encode('ascii'))
hotspot = im.encoderinfo.get('hotspot')
if hotspot:
... |
class Quantization(nn.Module):
def __init__(self, emb_size: int=768, subvector_num: int=96, subvector_bits: int=8, rotate: np.ndarray=None, codebook: np.ndarray=None):
super(Quantization, self).__init__()
if (codebook is not None):
self.codebook = nn.Parameter(torch.FloatTensor(codebook)... |
class Network():
def __init__(self, name: str=None, func_name: Any=None, **static_kwargs):
tfutil.assert_tf_initialized()
assert (isinstance(name, str) or (name is None))
assert (func_name is not None)
assert (isinstance(func_name, str) or util.is_top_level_function(func_name))
... |
def patch_blendmask(cfg, model, output_names):
def forward(self, tensor):
images = None
gt_instances = None
basis_sem = None
features = self.backbone(tensor)
(basis_out, basis_losses) = self.basis_module(features, basis_sem)
(proposals, proposal_losses) = self.proposa... |
class DegenerateCH4Tests(unittest.TestCase):
def setUpClass(cls):
cls.degenerate_CH4_manifold = load_degenerate_CH4_manifold()
def test_load_degenerate_CH4_manifold_power_spectrum_shape(self):
self.assertTrue((self.degenerate_CH4_manifold.data.SOAP_power_spectrum.shape == (162, 12)))
def tes... |
def test_argsort():
array = ak.Array(['one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight'])
assert (ak.operations.argsort(array, axis=(- 1)).to_list() == [7, 4, 3, 0, 6, 5, 2, 1])
array = ak.Array([['twotwo', 'two', 'three'], ['four', 'five'], [], ['six', 'seven', 'eight']])
assert (ak.operat... |
class GraphSAGE():
def __init__(self, layer_sizes, generator=None, aggregator=None, bias=True, dropout=0.0, normalize='l2', activations=None, kernel_initializer='glorot_uniform', kernel_regularizer=None, kernel_constraint=None, bias_initializer='zeros', bias_regularizer=None, bias_constraint=None, n_samples=None, i... |
def tf_efficientnet_b3(pretrained=False, **kwargs):
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet('tf_efficientnet_b3', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)
return model |
def register_Ns3EpcX2SapSwitchConnectionParams_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::EpcX2Sap::SwitchConnectionParams const &', 'arg0')])
cls.add_instance_attribute('drbid', 'uint8_t', is_const=False)
cls.add_instance_attribute('mmWaveCellId', 'uint16_t', is... |
class _MemoryEfficientFP16OptimizerMixin(object):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def has_flat_params(self):
return False
def state_dict(self):
state_dict = self.wrapped_optimizer.state_dict()
state_dict['loss_scale'] = self.scaler.loss_... |
def _create_test(bench_op_obj, orig_test_attrs, tags, OperatorTestCase, run_backward, bwd_input):
test_attrs = copy.deepcopy(orig_test_attrs)
test_attrs = {k: str(v) for (k, v) in test_attrs.items()}
ascii_test_attrs = ast.literal_eval(json.dumps(test_attrs))
input_config = str(ascii_test_attrs)[1:(- 1)... |
def test_rpad_and_clip_listoffset_array():
content = ak.contents.numpyarray.NumpyArray(np.array([0.0, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9]))
offsets = ak.index.Index64(np.array([0, 3, 3, 5, 6, 10, 10]))
listoffsetarray = ak.contents.listoffsetarray.ListOffsetArray(offsets, content)
assert (to_li... |
class GeneratorDynamicItem(DynamicItem):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.current_generator = None
self.num_provided_items = 0
def __call__(self, *args):
if (self.num_provided_items == len(self.provides)):
raise RuntimeError(... |
def FloatSingle(ctx=None):
ctx = _get_ctx(ctx)
return FPSortRef(Z3_mk_fpa_sort_single(ctx.ref()), ctx) |
def use_cuda(enabled, device_id=0):
if enabled:
assert torch.cuda.is_available(), 'CUDA is not available'
torch.cuda.set_device(device_id) |
_experiment
def ppo_garage_pytorch(ctxt, env_id, seed):
deterministic.set_seed(seed)
runner = LocalRunner(ctxt)
env = GarageEnv(normalize(gym.make(env_id)))
policy = PyTorch_GMP(env.spec, hidden_sizes=(32, 32), hidden_nonlinearity=torch.tanh, output_nonlinearity=None)
value_function = GaussianMLPVal... |
def test_parameter_file_load_save_using_global():
module_creator = ModuleCreator(TSTNetNormal(), [(4, 3, 32, 32), (4, 3, 32, 32)])
proto_variable_inputs = module_creator.get_proto_variable_inputs()
outputs = module_creator.module(*proto_variable_inputs)
g = nn.graph_def.get_default_graph_by_variable(out... |
def load_images_from_directory(names, rootdir, sources=None, standardize=False):
images = {}
if (sources is not None):
for (source, name) in zip(sources, names):
path = (os.path.join(rootdir, source, name) + '.*')
path = glob.glob(path)[0]
im = load_image(path, standa... |
def test_RecordArray_NumpyArray_lazy():
v2a = ak.contents.recordarray.RecordArray([ak.contents.numpyarray.NumpyArray(np.array([0, 1, 2, 3, 4], np.int64)), ak.contents.numpyarray.NumpyArray(np.array([0.0, 1.1, 2.2, 3.3, 4.4, 5.5]))], ['x', 'y'])
resultv2 = v2a._carry(ak.index.Index(np.array([1, 2], np.int64)), T... |
def test_dimension_optiontype():
content = ak.contents.NumpyArray(np.array(primes[:((2 * 3) * 5)], dtype=np.int64))
offsets1 = ak.index.Index64(np.array([0, 5, 10, 15, 20, 25, 30], dtype=np.int64))
listoffsetarray = ak.contents.ListOffsetArray(offsets1, content)
index = ak.index.Index64(np.array([5, (- ... |
def _rec_unstack(source: Tensor, *, axis: Dim, declare_rec_time: bool=NotSpecified, name: Optional[Union[(str, rfl.Layer)]]=None) -> Tensor:
if (not isinstance(source, Tensor)):
raise TypeError(f'rec_unstack: unexpected type for source {source!r}, need tensor')
args = {'axis': axis, 'declare_rec_time': ... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.