code stringlengths 101 5.91M |
|---|
def ref_convolution(x, w, b, base_axis, pad, stride, dilation, group, quantize_zero_to):
y = []
for xx in x.reshape((((- 1),) + x.shape[base_axis:])):
y += [refs.convolution_2d(xx, w, b, pad, stride, dilation, group)[np.newaxis]]
y = np.vstack(y)
return y.reshape((x.shape[:base_axis] + y.shape[1... |
class DPGradientDescentOptimizer(tf.train.GradientDescentOptimizer):
def __init__(self, learning_rate, eps_delta, sanitizer, sigma=None, use_locking=False, name='DPGradientDescent', batches_per_lot=1):
super(DPGradientDescentOptimizer, self).__init__(learning_rate, use_locking, name)
self._batches_p... |
class SourceCopyVocabulary():
def __init__(self, sentence, pad_token=DEFAULT_PADDING_TOKEN, unk_token=DEFAULT_OOV_TOKEN):
if (type(sentence) is not list):
sentence = sentence.split(' ')
self.src_tokens = sentence
self.pad_token = pad_token
self.unk_token = unk_token
... |
class TestAppendpath(object):
def test_1(self):
assert_equal(appendpath('prefix', 'name'), join('prefix', 'name'))
assert_equal(appendpath('/prefix', 'name'), ajoin('prefix', 'name'))
assert_equal(appendpath('/prefix', '/name'), ajoin('prefix', 'name'))
assert_equal(appendpath('prefi... |
def compute_is(opts, num_gen, num_splits):
detector_url = '
detector_kwargs = dict(no_output_bias=True)
gen_probs = metric_utils.compute_feature_stats_for_generator(opts=opts, detector_url=detector_url, detector_kwargs=detector_kwargs, capture_all=True, max_items=num_gen).get_all()
if (opts.rank != 0):
... |
_method
class ToricLattice_ambient(ToricLattice_generic, FreeModule_ambient_pid):
Element = ToricLatticeElement
def __init__(self, rank, name, dual_name, latex_name, latex_dual_name):
super().__init__(ZZ, rank)
self._name = name
self._dual_name = dual_name
self._latex_name = late... |
def test_nested_for_map_for_loop_with_tasklet():
ref = np.zeros([10, 10, 10], dtype=np.int64)
for i in range(10):
for j in range(10):
for k in range(10):
ref[(i, j, k)] = (((i * 100) + (j * 10)) + k)
val = nested_for_map_for_loop_with_tasklet()
assert np.array_equal(v... |
def create_supervised_evaluator(model, metrics, device=None):
def _inference(engine, batch):
model.eval()
with torch.no_grad():
(data, pids, camids) = batch
data = (data.to(device) if (torch.cuda.device_count() >= 1) else data)
feat = model(data)
retur... |
def amsterdam_typical_train(listener=False):
data = (([('light purple', 0, [(260.0, 45.0, 100.0), (260.0, 100.0, 100.0)]), ('purple', 0, [(260.0, 45.0, 100.0), (260.0, 100.0, 100.0)]), ('purple', 1, [(260.0, 45.0, 100.0), (260.0, 100.0, 100.0)]), ('purple', 1, [(260.0, 45.0, 100.0), (260.0, 100.0, 100.0)]), ('light... |
def hough_line(image, theta=None):
if (image.ndim != 2):
raise ValueError('The input image `image` must be 2D.')
if (theta is None):
theta = np.linspace(((- np.pi) / 2), (np.pi / 2), 180, endpoint=False)
return _hough_line(image, theta=theta) |
def initial_or_load(checkpoint_path_load, model, optimizer, dir):
if os.path.exists(checkpoint_path_load):
model_loaded_str = '******model is loaded'
print(model_loaded_str)
with open((dir + 'whole_log.txt'), 'a') as f:
print(model_loaded_str, file=f)
checkpoint = load_mo... |
class CTypeDefNode(StatNode):
child_attrs = ['base_type', 'declarator']
def analyse_declarations(self, env):
base = self.base_type.analyse(env)
(name_declarator, type) = self.declarator.analyse(base, env, visibility=self.visibility, in_pxd=self.in_pxd)
name = name_declarator.name
... |
def get_overview_paragraphs(overview_links, specific_summary_dir):
for (index, (overview, name)) in enumerate(overview_links):
print(name, overview)
try:
soup = BeautifulSoup(urllib.request.urlopen(overview), 'html.parser')
overview_data = soup.find('td', {'class': 'TextObjec... |
def load_weights_add_extra_dim(target, source_state, extra_dim=1):
new_dict = OrderedDict()
for (k1, v1) in target.state_dict().items():
if (not ('num_batches_tracked' in k1)):
if (k1 in source_state):
tar_v = source_state[k1]
if (v1.shape != tar_v.shape):
... |
class FinegrainedDecoder(nn.Module):
def __init__(self, hidden_channels, drop_path, max_q_thw, max_kv_thw):
super().__init__()
self.hidden_channels = hidden_channels
self.max_q_thw = max_q_thw
self.max_kv_thw = max_kv_thw
self.v3ds = nn.Sequential(nn.Conv3d(hidden_channels, h... |
def setup(app):
app.connect('autodoc-process-docstring', process_docstring_cython)
app.connect('autodoc-process-docstring', process_directives)
app.connect('autodoc-process-docstring', process_docstring_module_title)
app.connect('autodoc-process-docstring', process_dollars)
app.connect('autodoc-proc... |
def extract_audio_feature_birds(model, audio_switch, **kwargs):
root = os.getcwd()
for split in ('train', 'test'):
split_json = json.load(open(os.path.join(root, './data/birds/{}.json'.format(split)), 'r'))
split_filenames = []
for _d in split_json['data']:
split_filenames +=... |
def verify_ninja_availability():
with open(os.devnull, 'wb') as devnull:
try:
subprocess.check_call('ninja --version'.split(), stdout=devnull)
except OSError:
raise RuntimeError('Ninja is required to load C++ extensions') |
def extract_requests_exception_details(exc: RequestException) -> tuple[(str, list[str])]:
from requests.exceptions import SSLError, ConnectionError, ChunkedEncodingError
from urllib3.exceptions import MaxRetryError
if isinstance(exc, SSLError):
message = 'SSL verification problem'
reason = s... |
def test_parsing_context():
def func(a):
if dace.in_program():
a[:] = 1
else:
a[:] = 2
first = np.random.rand(10)
second = np.random.rand(10)
func(first)
dace.program(func)(second)
assert np.allclose(first, 2)
assert np.allclose(second, 1) |
def load_fields(train, valid, checkpoint):
fields = onmt.IO.ONMTDataset.load_fields(torch.load((opt.data + '.vocab.pt')))
fields = dict([(k, f) for (k, f) in fields.items() if (k in train.examples[0].__dict__)])
train.fields = fields
valid.fields = fields
if opt.train_from:
print(('Loading v... |
def unpack_vcs_link(link, location):
vcs_backend = _get_used_vcs_backend(link)
vcs_backend.unpack(location) |
_json
class InProgressCacheMetrics():
rows_finished: int = 0
chunks_finished: int = 0
shards_finished: int = 0
field_counts: Dict[(str, int)] = dataclasses.field(default_factory=dict)
is_finished: bool = False |
class Thin_Model(object):
def __init__(self, mode, images, labels):
self.mode = mode
self._build_model(images, labels)
def add_internal_summaries(self):
pass
def _stride_arr(self, stride):
return [1, stride, stride, 1]
def _build_model(self, images, labels, var_scope_str=... |
class SimpleReplayBuffer(ReplayBuffer):
def sample(self, batch_size):
assert (self._n_transitions_stored >= batch_size)
buffer = {}
for key in self._buffer.keys():
buffer[key] = self._buffer[key][:self._current_size]
time_horizon = buffer['action'].shape[1]
rollou... |
def load_tf_weights_in_mobilenet_v1(model, config, tf_checkpoint_path):
try:
import numpy as np
import tensorflow as tf
except ImportError:
logger.error('Loading a TensorFlow models in PyTorch, requires TensorFlow to be installed. Please see for installation instructions.')
rais... |
_toolkit()
class Spokeo(FunctionToolkit):
name_for_human = 'Spokeo'
description_for_human = 'Toolkit for searching and retrieving personal data from various sources.'
name_for_model = 'Spokeo'
description_for_model = 'A people search engine that provides access to personal data from public records, soci... |
def load_dependency_tree(parents):
trees = []
root = None
size = len(parents)
for i in xrange(size):
trees.append(None)
for i in xrange(size):
if (not trees[i]):
idx = i
prev = None
prev_idx = None
while True:
tree = Dep... |
class CoverageArchive(Archive):
_logger = logging.getLogger(__name__)
def __init__(self, objectives: OrderedSet[ff.TestCaseFitnessFunction]) -> None:
super().__init__()
self._covered: dict[(ff.TestCaseFitnessFunction, tcc.TestCaseChromosome)] = {}
self._uncovered = OrderedSet(objectives)... |
def validate_country(x: Union[(str, int, pd.Series)], input_format: Union[(str, Tuple[(str, ...)])]='auto', strict: bool=True) -> Union[(bool, pd.Series)]:
input_formats = _input_format_to_tuple(input_format)
if isinstance(x, pd.Series):
x = x.astype(str).str.lower().str.strip()
return x.apply(_... |
def get_model_dir(args):
model_subdir = get_model_subdir(args)
model_dir = os.path.join(args.model_root_dir, model_subdir)
args.model_dir = model_dir
if (not os.path.exists(model_dir)):
os.makedirs(model_dir)
print('Model directory created: {}'.format(model_dir))
else:
print(... |
class LossParameter(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _LOSSPARAMETER |
def test_ListArray_IndexedOptionArray_RecordArray_NumpyArray():
index = ak.index.Index64(np.asarray([0, (- 1), 1, (- 1), 4, (- 1), 5]))
content = ak.contents.recordarray.RecordArray([ak.contents.numpyarray.NumpyArray(np.array([6.6, 1.1, 2.2, 3.3, 4.4, 5.5, 7.7]))], ['nest'])
indexoptionarray = ak.contents.I... |
def parse_order_by(toks, start_idx, tables_with_alias, schema, default_tables):
idx = start_idx
len_ = len(toks)
val_units = []
order_type = 'asc'
if ((idx >= len_) or (toks[idx] != 'order')):
return (idx, val_units)
idx += 1
assert (toks[idx] == 'by')
idx += 1
while ((idx < ... |
def wrap_stacked_recurrent(recurrent_func, num_layers=1, residual=False, weight_norm=False):
def f(*kargs, **kwargs):
module = StackedRecurrent(residual)
for i in range(num_layers):
rnn = recurrent_func(*kargs, **kwargs)
if weight_norm:
rnn = wn(rnn)
... |
class Macaulay2(ExtraTabCompletion, Expect):
def __init__(self, maxread=None, script_subdirectory=None, logfile=None, server=None, server_tmpdir=None, command=None):
if (command is None):
command = (os.getenv('SAGE_MACAULAY2_COMMAND') or 'M2')
init_str = ((('sageLoadMode = false;ZZ#{Stan... |
def from_parquet(source):
file = pyarrow.parquet.ParquetFile(source)
form = _parquet_schema_to_form(file.schema)
all_columns = form.keys()
columns = all_columns
length = file.metadata.row_group(0).num_rows
cache = {}
hold_cache = ak._util.MappingProxy.maybe_wrap(cache)
lazy_cache = ak.la... |
def get_overload_no_implementation_error_message(kind, obj):
(sourcelines, file_lineno, filename) = get_source_lines_and_file(obj)
return (((f'''Implementation for the {kind} "{_qualified_name(obj)}" is missing. Please make sure a definition is provided and defined after all overload declarations.
File "{filena... |
class Hardtanh(Module):
def __init__(self, min_val=(- 1), max_val=1, inplace=False, min_value=None, max_value=None):
super(Hardtanh, self).__init__()
if (min_value is not None):
warnings.warn('keyword argument min_value is deprecated and renamed to min_val')
min_val = min_val... |
def integer_floor(x):
try:
return ZZ(x.floor())
except AttributeError:
try:
return ZZ(math.floor(float(x)))
except TypeError:
pass
raise NotImplementedError(('computation of floor of %s not implemented' % x)) |
.experimental
.parametrize('pad_columns, padding_value, array_size', [(['item_id', 'timestamp'], 0, 5)])
.parametrize('dataset, result', [pytest.param('dataframe', 'dataframe_two_columns_zeros'), pytest.param('dataframe_pandas', 'dataframe_two_columns_zeros_pandas')])
def test_padder_two_columns_same_value(pad_columns,... |
class LoadDefault(Load):
def __call__(self, file_name: str, id_: str, category: str, subject_id: str) -> typing.Tuple[(np.ndarray, typing.Union[(conv.ImageProperties, None)])]:
img = sitk.ReadImage(file_name)
return (sitk.GetArrayFromImage(img), conv.ImageProperties(img)) |
.gpu
def test_dynamic_maps():
W = dace.symbol('W')
H = dace.symbol('H')
nnz = dace.symbol('nnz')
(dace.uint32[(H + 1)], dace.uint32[nnz], dace.float32[nnz], dace.float32[W], dace.float32[H], dace.float32[H])
def spmv_2x(A_row, A_col, A_val, x, b, c):
for i in range(H):
row_start ... |
def read_wiki_file(filename):
with open(filename) as fin:
lines = fin.readlines()
docs = []
current_doc = []
line_iterator = iter(lines)
line = next(line_iterator, None)
while (line is not None):
if line.startswith('<doc'):
line = next(line_iterator, None)
eli... |
def register_types(module):
root_module = module.get_root()
module.add_class('AttributeConstructionList', import_from_module='ns.core')
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
typehandlers.add_type_alias(u'std::list< ns3::Attribut... |
def register_Ns3EpcS11SapUli_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::EpcS11Sap::Uli const &', 'arg0')])
cls.add_instance_attribute('gci', 'uint16_t', is_const=False)
return |
def load(model_name, model_path, device):
(_, base_name, variant) = model_name.split('-')
model = eval(base_name)(variant)
model.load_state_dict(torch.load(model_path, map_location='cpu'))
model = model.to(device)
model.eval()
transform = T.Compose([T.Resize((224, 224)), T.ToTensor(), T.Normaliz... |
def test_predict_proba_right_class():
n_test_samples = 200
(X, y) = make_classification(n_samples=1000)
(X_test, y_test) = make_classification(n_samples=n_test_samples)
pool = RandomForestClassifier(max_depth=3).fit(X, y)
oracle = Oracle(pool_classifiers=pool).fit(X, y)
preds = oracle.predict(X_... |
def check_model_list_for_task(task_guide, overwrite=False):
(current_list, start_index, end_index, lines) = _find_text_in_file(filename=os.path.join(PATH_TO_TASK_GUIDES, task_guide), start_prompt='<!--This tip is automatically generated by `make fix-copies`, do not fill manually!-->', end_prompt='<!--End of the gen... |
def diff_at_j_is_i(xs, j, i):
assert ((0 <= j) and ((j + 1) < len(xs)))
assert ((1 <= i) and (i < len(xs)))
return Or(([And(xs[j][k], xs[(j + 1)][(k - i)]) for k in range(i, len(xs))] + [And(xs[j][k], xs[(j + 1)][(k + i)]) for k in range(0, (len(xs) - i))])) |
def uncorrelated_entropy(traj, normalize=False, show_progress=True):
column_name = sys._getframe().f_code.co_name
if normalize:
column_name = ('norm_%s' % sys._getframe().f_code.co_name)
if (constants.UID not in traj.columns):
return pd.DataFrame([_uncorrelated_entropy_individual(traj)], col... |
def test_get_value_for_tag_returns_none_if_mapping_is_none() -> None:
assert (get_value_for_tag(None) == (None, None)) |
class TestMLP(TfGraphTestCase):
def setup_method(self):
super(TestMLP, self).setup_method()
self.obs_input = np.array([[1, 2, 3, 4]])
input_shape = self.obs_input.shape[1:]
self.hidden_nonlinearity = tf.nn.relu
self._input = tf.compat.v1.placeholder(tf.float32, shape=((None,)... |
class NortonIdentitySafeCheckPasswordStrength(VirtualFunctionTool):
name = 'NortonIdentitySafeCheckPasswordStrength'
summary = 'Check the strength of a password.'
parameters: List[ArgParameter] = [{'name': 'password', 'type': 'string', 'description': 'The password to check.', 'required': True}]
returns:... |
class WideAndDeep(RecMixin, BaseRecommenderModel):
_charger
def __init__(self, data, config, params, *args, **kwargs):
self._random = np.random
(self._data.sp_i_features, self._data.user_encoder, self._data.item_encoder) = build_sparse_features(self._data)
self._sampler = pwwds.Sampler(s... |
def Jacobian_of_curve(curve, morphism=False):
eqn = None
try:
eqn = curve.defining_polynomial()
except AttributeError:
pass
if (len(curve.defining_polynomials()) == 1):
eqn = curve.defining_polynomials()[0]
if (eqn is not None):
if morphism:
return Jacobia... |
class PlayerSprite(prefab_sprites.MazeWalker):
def __init__(self, corner, position, character):
super(PlayerSprite, self).__init__(corner, position, character, impassable='#')
def update(self, actions, board, layers, backdrop, things, the_plot):
del backdrop, things, layers
if (actions =... |
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3ApplicationContainer_methods(root_module, root_module['ns3::ApplicationContainer'])
register_Ns3AsciiFile_methods(root_module, root_module['ns3::AsciiFile'])
register_Ns3AsciiTraceHelper_... |
def mk_py_wrappers():
core_py.write('\nclass Elementaries:\n def __init__(self, f):\n self.f = f\n self.get_error_code = _lib.Z3_get_error_code\n self.get_error_message = _lib.Z3_get_error_msg\n self.OK = Z3_OK\n self.Exception = Z3Exception\n\n def Check(self, ctx):\n err = self.get_error_code(... |
def standardize_sample_weights(sample_weight, output_names):
return standardize_sample_or_class_weights(sample_weight, output_names, 'sample_weight') |
def main(_):
prepare_dirs(config)
rng = np.random.RandomState(config.random_seed)
tf.set_random_seed(config.random_seed)
trainer = Trainer(config, rng)
save_config(config.model_dir, config)
if config.is_train:
trainer.train()
else:
if (not config.load_path):
raise... |
def wrap_time_cell(cell_func, batch_first=False, lstm=True, with_attention=False, reverse=False):
def f(*kargs, **kwargs):
return TimeRecurrentCell(cell_func(*kargs, **kwargs), batch_first, lstm, with_attention, reverse)
return f |
.skipif((not _test_internal.have_fenv()), reason='no fenv()')
def test_add_round_down():
np.random.seed(1234)
_test_internal.test_add_round((10 ** 5), 'down') |
class BatchNormParameter(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHNORMPARAMETER |
def _make_repl(offset: int=0, offset_start: int=0):
def repl(x):
x_str = x.group()
x_int = int(x_str)
if (x_int < offset_start):
return x_str
else:
return str((x_int + offset))
return repl |
def pop_prefix():
del _prefixes[(- 1)]
global _prefix_str
_prefix_str = ''.join(_prefixes) |
class RecordEpisodeStatistics(gym.Wrapper):
def __init__(self, env, gamma):
super(RecordEpisodeStatistics, self).__init__(env)
self.t0 = time.time()
self.episode_return = 0.0
self.episode_length = 0
self.episode_discounted_return = 0.0
self.gamma = gamma
def reset... |
class BiasFieldCorrector(pymia_fltr.Filter):
def __init__(self, convergence_threshold: float=0.001, max_iterations: typing.List[int]=(50, 50, 50, 50), fullwidth_at_halfmax: float=0.15, filter_noise: float=0.01, histogram_bins: int=200, control_points: typing.List[int]=(4, 4, 4), spline_order: int=3):
super(... |
class MemRef(MemRefBase):
device = Target.SG2260
def __init__(self, address, shape, dtype: DType, stride=None, layout=None, context: 'SG2260Context'=None):
assert (context is not None)
self.context = context
super().__init__(address, shape, dtype, stride, layout)
if ((self.mtype ... |
def parse_input():
parser = argparse.ArgumentParser()
parser.add_argument('--yaml', type=str, help='yaml configuration basefile.')
(input_args, _) = parser.parse_known_args()
(args, args_dict) = get_yaml_args(input_args)
reproducibility.init_seed()
return (args, args_dict, input_args) |
class PCATransformer(LAMLTransformer):
_fit_checks = (numeric_check,)
_transform_checks = ()
_fname_prefix = 'pca'
def features(self) -> List[str]:
return self._features
def __init__(self, subs: Optional[int]=None, random_state: int=42, n_components: int=500):
self.subs = subs
... |
def get_args():
parser = argparse.ArgumentParser('3D-STMN')
parser.add_argument('config', type=str, help='path to config file')
parser.add_argument('--resume', type=str, help='path to resume from')
parser.add_argument('--work_dir', type=str, help='working directory')
parser.add_argument('--skip_vali... |
def run_variation(context):
camera = context.scene.objects['Camera']
uvh = context.scene.uv_holographics
r = (uvh.camera_dist_mean + uniform((- uvh.camera_dist_var), uvh.camera_dist_var))
theta = ((np.pi / 2) + uniform(((- np.pi) / 4), (np.pi / 8)))
phi = uniform(0, (2 * np.pi))
randX = ((r * np... |
def register_types(module):
root_module = module.get_root()
module.add_class('Address', import_from_module='ns.network')
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
module.add_class('AttributeConstructionList', import_from_module='... |
def right():
if (random.randrange(0, 3) == 1):
PressKey(Z)
else:
ReleaseKey(Z)
PressKey(D)
ReleaseKey(A)
ReleaseKey(S) |
def get_configuration():
if config.USE_GPU:
raise NotImplementedError
config_args = dict()
return tf.ConfigProto(**config_args) |
def fcn(split, num_classes=None):
n = caffe.NetSpec()
n.data = L.Input(shape=[dict(dim=[1, 3, 500, 500])])
(n.conv1_1, n.relu1_1) = conv_relu(n.data, 64, pad=100)
(n.conv1_2, n.relu1_2) = conv_relu(n.relu1_1, 64)
n.pool1 = max_pool(n.relu1_2)
(n.conv2_1, n.relu2_1) = conv_relu(n.pool1, 128)
... |
class Attr(sympy.Function):
def free_symbols(self):
return {sympy.Symbol(str(self))}
def __str__(self):
return f'{self.args[0]}.{self.args[1]}' |
class EmbeddingPredictionSequence(utils.Sequence):
def __init__(self, batch_size, x_seq, embedding_mat):
self.batch_size = batch_size
self.x_seq = x_seq
self.dataset_len = int((np.shape(x_seq)[0] // self.batch_size))
self.emb = embedding_mat
config = tf.ConfigProto()
... |
class _minmax_mixin():
def _min_or_max_axis(self, axis, min_or_max):
N = self.shape[axis]
if (N == 0):
raise ValueError('zero-size array to reduction operation')
M = self.shape[(1 - axis)]
idx_dtype = self._get_index_dtype(maxval=M)
mat = (self.tocsc() if (axis ==... |
def _set_parent_ns(packageName):
parts = packageName.split('.')
name = parts.pop()
if parts:
parent = '.'.join(parts)
setattr(sys.modules[parent], name, sys.modules[packageName]) |
class ESPNet_Encoder(nn.Module):
def __init__(self, classes=20, p=5, q=3):
super().__init__()
self.level1 = CBR(3, 16, 3, 2)
self.sample1 = InputProjectionA(1)
self.sample2 = InputProjectionA(2)
self.b1 = BR((16 + 3))
self.level2_0 = DownSamplerB((16 + 3), 64)
... |
def test_set_with_config_item_string_item_access_quantity(config_ns):
config_ns.set_config_item('a.b.param2.item2', 7)
item = config_ns.get_config_item('a.b.param2.item2')
assert_almost_equal(item.to(u.km).value, 7) |
class BPRSlim(RecMixin, BaseRecommenderModel):
_charger
def __init__(self, data, config, params, *args, **kwargs):
self._params_list = [('_lr', 'lr', 'lr', 0.001, None, None), ('_lj_reg', 'lj_reg', 'ljreg', 0.001, None, None), ('_li_reg', 'li_reg', 'lireg', 0.1, None, None)]
self.autoset_params(... |
def run_finetuned_GPT3(model_key, tag, engine, frame):
if (model_key not in frame.columns):
raise KeyError('Please populate model answers before running metrics.')
for calc in ['max', 'diff', 'acc']:
col_name = '{0} {1} {2}'.format(model_key, tag, calc)
if (col_name not in frame.columns)... |
def run_bandit_replay(bandit_feedback: BanditFeedback, policy: BanditPolicy) -> np.ndarray:
for key_ in ['action', 'position', 'reward', 'pscore', 'context']:
if (key_ not in bandit_feedback):
raise RuntimeError(f"Missing key of {key_} in 'bandit_feedback'.")
check_bandit_feedback_inputs(con... |
def register_types_ns3_Config(module):
root_module = module.get_root()
module.add_class('MatchContainer')
module.add_container('std::vector< ns3::Ptr< ns3::Object > >', 'ns3::Ptr< ns3::Object >', container_type=u'vector')
module.add_container('std::vector< std::string >', 'std::string', container_type=u... |
def readlines(filename):
with open(filename, 'r') as f:
lines = f.read().splitlines()
return lines |
def generate_bench(name, codes):
tab = (' ' * 4)
(top, middle, end) = ([], [], [])
tmp = codes.split('*')
if (len(tmp) > 1):
incodes = tmp[0]
outcodes = tmp[1]
else:
incodes = tmp[0]
outcodes = ''
(inargs, inargs_and_types) = ([], [])
for (n, code) in enumerat... |
class MLP(nn.Module):
' From
def __init__(self, in_dim: int, hidden_dims: Union[(int, tuple)], bias: bool=True, use_batchnorm: bool=True, batchnorm_last: bool=False):
super().__init__()
if isinstance(hidden_dims, int):
hidden_dims = (hidden_dims,)
mlp = [nn.Linear(in_dim, hi... |
def test_case51():
url = (brokerIp + '/ngsi-ld/v1/entities/')
headers = {'Content-Type': 'application/json', 'Accept': 'application/ld+json', 'Link': '<{{link}}>; rel=" type="application/ld+json"'}
r = requests.post(url, data=json.dumps(ld_data.subdata42), headers=headers)
print(r.content)
print(r.s... |
def _get_transpose_input(node, state, sdfg):
for edge in state.in_edges(node):
if (edge.dst_conn == '_inp'):
subset = dc(edge.data.subset)
subset.squeeze()
size = subset.size()
outer_array = sdfg.data(dace.sdfg.find_input_arraynode(state, edge).data)
... |
def load_vocab(name=None, tag=None, no_cache=False, cache_dir=None):
import torch
if (name is None):
name = 'bpe_encoder'
model_path = name
if (model_path and (not os.path.exists(model_path)) and (not (('/' in model_path) or ('\\' in model_path)))):
_tag = tag
if (_tag is None):
... |
class VOC_Dataset(torch.utils.data.Dataset):
def __init__(self, root_dir, domain, with_id=False, with_tags=False, with_mask=False):
self.root_dir = root_dir
self.image_dir = (self.root_dir + 'JPEGImages/')
self.xml_dir = (self.root_dir + 'Annotations/')
self.mask_dir = (self.root_dir... |
def test_construct_func():
tl = Timeline()
detectors2 = ([{}] * 2)
detectors4 = ([{}] * 4)
with pytest.raises(Exception):
bsm = make_bsm('bsm', tl, encoding_type='unknown', detectors=detectors4)
polar_bsm = make_bsm('bsm1', tl, encoding_type='polarization', detectors=detectors4)
time_bin... |
def register_Ns3LteEnbRrcSapProvider_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::LteEnbRrcSapProvider const &', 'arg0')])
cls.add_method('CompleteSetupUe', 'void', [param('uint16_t', 'rnti'), param('ns3::LteEnbRrcSapProvider::CompleteSetupUeParameters', 'params')], is... |
def extract_utterance_entities(dataset):
entities_values = {ent_name: set() for ent_name in dataset[ENTITIES]}
for intent in itervalues(dataset[INTENTS]):
for utterance in intent[UTTERANCES]:
for chunk in utterance[DATA]:
if (ENTITY in chunk):
entities_val... |
def train(model, train_loader, a2v, optimizer, criterion, scheduler, epoch, args, val_loader=None, best_val_acc=None, best_epoch=None):
model.train()
(running_vqa_loss, running_acc, running_mlm_loss) = (AverageMeter(), AverageMeter(), AverageMeter())
for (i, batch) in enumerate(train_loader):
(answe... |
def test_crop_and_pad_example():
def _run_asserts(seq, tgt_length, expected):
example = {'seq': seq}
left_padding = (tgt_length - len(seq))
util.CropAndPadExample(example, left_padding, tgt_length, 'seq')
assert_equal(example['seq'], expected)
seqs = [([1, 1, 1], 4, [0, 1, 1, 1])... |
_interact(n=(lambda : slider(2, 10000, 100, default=1000, label='Number of Tosses')), interval=(lambda : range_slider(0.0, 1.0, default=(0.45, 0.55), label='Plotting range (y)')))
def coin(n, interval):
from random import random
c = []
k = 0.0
for i in range(1, (n + 1)):
k += random()
c.... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.