code stringlengths 101 5.91M |
|---|
.parametrize('statement_type,value', [(stmt.IntPrimitiveStatement, 42), (stmt.FloatPrimitiveStatement, 42.23), (stmt.StringPrimitiveStatement, 'foo'), (stmt.BytesPrimitiveStatement, b'foo'), (stmt.BooleanPrimitiveStatement, True), (stmt.ComplexPrimitiveStatement, (4 + 1j)), (stmt.ClassPrimitiveStatement, 0)])
def test_... |
.skipif((not _ti_core.GGUI_AVAILABLE), reason='GGUI Not Available')
_utils.test(arch=[ti.vulkan])
def test_multi_windows():
window = ti.ui.Window('x', (128, 128), vsync=True, show_window=False)
window2 = ti.ui.Window('x2', (128, 128), vsync=True, show_window=False) |
class VideoDataset(Dataset):
def __init__(self, train, query, gallery, seq_len=15, sample_method='evenly', **kwargs):
super(VideoDataset, self).__init__(train, query, gallery, **kwargs)
self.seq_len = seq_len
self.sample_method = sample_method
if (self.transform is None):
... |
def success_probability(F: float) -> float:
return (((F ** 2) + (((2 * F) * (1 - F)) / 3)) + (5 * (((1 - F) / 3) ** 2))) |
def is_interactive_execution():
is_interactive = None
try:
get_ipython().__class__.__name__
is_interactive = True
except:
is_interactive = False
return is_interactive |
def create_stats_ordered_dict(name, data, stat_prefix=None, always_show_all_stats=True, exclude_max_min=False):
if (stat_prefix is not None):
name = '{}{}'.format(stat_prefix, name)
if isinstance(data, Number):
return OrderedDict({name: data})
if (len(data) == 0):
return OrderedDict(... |
class PythonCreoConnection():
def __init__(self, creo_exe_path, no_graphic_op=True, no_input_op=True):
pythoncom.CoInitialize()
self.model = None
self.models = []
self.creo_exe_path = creo_exe_path
self.no_graphic_op = (' -g:no_graphics' if no_graphic_op else '')
self... |
class PC(BaseTabularAlgo, BaseTabularAlgoFull):
def __init__(self, data: TabularData, prior_knowledge: Optional[PriorKnowledge]=None, CI_test: Union[(PartialCorrelation, KCI, DiscreteCI_tests)]=PartialCorrelation(), use_multiprocessing: Optional[bool]=False, **kargs):
BaseTabularAlgo.__init__(self, data=dat... |
class VGG(nn.Module):
def __init__(self, features, num_class=100):
super().__init__()
self.features = features
self.classifier = nn.Sequential(nn.Linear(512, 4096), nn.ReLU(inplace=True), nn.Dropout(), nn.Linear(4096, 4096), nn.ReLU(inplace=True), nn.Dropout(), nn.Linear(4096, num_class))
... |
def data_process(config):
mode = config['mode']
assert (mode in ('term', 'category'))
base_path = config['base_path']
raw_train_path = os.path.join(base_path, 'raw/train.xml')
raw_val_path = os.path.join(base_path, 'raw/val.xml')
raw_test_path = os.path.join(base_path, 'raw/test.xml')
lowerc... |
class ColumnPlaceholder():
def __init__(self, id_in_pattern, attributes):
self.id_in_pattern = id_in_pattern
self.attributes = attributes
self.column = None
def attach_to_column(self, column):
self.column = column |
.utils.register_keras_serializable()
class FactorizationMachineLayer(tf.keras.layers.Layer):
def __init__(self, field_dims, factors, kernel_initializer: Union[(Text, tf.keras.initializers.Initializer)]='truncated_normal', kernel_regularizer: Union[(Text, None, tf.keras.regularizers.Regularizer)]=None, **kwargs):
... |
class GoogleSearchGetSearchHistory(VirtualFunctionTool):
name = 'GoogleSearchGetSearchHistory'
summary = "Retrieve the user's search history."
parameters: List[ArgParameter] = [{'name': 'time_range', 'type': 'string', 'description': "The time range to get the search history, in the format of 'YYYY-MM-DD..YY... |
def sample_initial_states(rng):
state = zika.State().values()
state *= rng.uniform(low=0.95, high=1.05, size=state.shape)
return zika.State(*state) |
.parametrize('exposed_cases, exposed_total, control_cases, control_total, expected_rr', [(1, 4, 3, 8, (0.25 / 0.375)), (0, 10, 5, 20, 0), (0, 10, 0, 20, np.nan), (5, 15, 0, 20, np.inf)])
def test_relative_risk(exposed_cases, exposed_total, control_cases, control_total, expected_rr):
result = relative_risk(exposed_c... |
('just_spaces')
class JustSpacesWordSplitter(WordSplitter):
def split_words(self, sentence: str) -> List[Token]:
return [Token(t) for t in sentence.split()]
def from_params(cls, params: Params) -> 'WordSplitter':
params.assert_empty(cls.__name__)
return cls() |
class GaussianBlur(object):
'Gaussian blur augmentation in SimCLR
def __init__(self, sigma=[0.1, 2.0]):
self.sigma = sigma
def __call__(self, x):
sigma = random.uniform(self.sigma[0], self.sigma[1])
x = x.filter(ImageFilter.GaussianBlur(radius=sigma))
return x |
class Schema(object):
thisown = _swig_property((lambda x: x.this.own()), (lambda x, v: x.this.own(v)), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_snap.Schema_swiginit(self, _snap.new_Schema(*args))
__swig_destroy__ = _snap.delete_Schema
def __add__(self, Val... |
def copy_dlls(outdir):
libdir = None
libnames = None
libneeded = ('libgfortran', 'libgcc_s', 'libquadmath')
liboptional = ('libwinpthread',)
for p in os.environ['PATH'].split(';'):
if (not os.path.isdir(p)):
continue
libnames = [f for f in os.listdir(p) if (f.lower().ends... |
def _combining_class(cp):
v = unicodedata.combining(unichr(cp))
if (v == 0):
if (not unicodedata.name(unichr(cp))):
raise ValueError('Unknown character in unicodedata')
return v |
class NeveuSchwarzLieConformalAlgebra(GradedLieConformalAlgebra):
def __init__(self, R):
nsdict = {('L', 'L'): {0: {('L', 1): 1}, 1: {('L', 0): 2}, 3: {('C', 0): R(2).inverse_of_unit()}}, ('L', 'G'): {0: {('G', 1): 1}, 1: {('G', 0): (R(3) * R(2).inverse_of_unit())}}, ('G', 'G'): {0: {('L', 0): 2}, 2: {('C',... |
class QAFactEvalEvaluator():
def __init__(self, device=0):
os.makedirs('models/answering', exist_ok=True)
if (not os.listdir('models/answering')):
model_url = '
output_path = 'models/answering/model.zip'
gdown.download(model_url, output_path, quiet=False)
... |
class SPDocVQA(Dataset):
def __init__(self, imbd_dir, images_dir, split, kwargs):
data = np.load(os.path.join(imbd_dir, 'new_imdb_{:s}.npy'.format(split)), allow_pickle=True)
self.header = data[0]
self.imdb = data[1:]
self.hierarchical_method = kwargs.get('hierarchical_method', False... |
def depthwise_conv2d(_input, kernel_size, stride=1, padding='SAME', param_initializer=None):
in_features = int(_input.get_shape()[3])
if (not param_initializer):
param_initializer = {}
output = _input
with tf.variable_scope('conv'):
init_key = ('%s/weight' % tf.get_variable_scope().name)... |
def metric_max_over_ground_truths(metric_fn, prediction, ground_truths):
scores_for_ground_truths = []
for ground_truth in ground_truths:
score = metric_fn(prediction, ground_truth)
scores_for_ground_truths.append(score)
return max(scores_for_ground_truths) |
class ICNet(nn.Module):
def __init__(self, num_classes):
super(ICNet, self).__init__()
self.conv_sub1 = nn.Sequential(Conv3x3BNReLU(3, 32, 2), Conv3x3BNReLU(32, 32, 2), Conv3x3BNReLU(32, 64, 2))
self.backbone = Backbone()
self.ppm = PyramidPoolingModule()
self.cff_12 = Cascad... |
def normalize_words(target_words_batch, predicted_words_batch, glm_alternatives=None):
excluded_words = ['<UNK>', 'UH', 'UM', 'EH', 'MM', 'HM', 'AH', 'HUH', 'HA', 'ER', 'OOF', 'HEE', 'ACH', 'EEE', 'EW']
target_words_batch = expand_contractions_batch(target_words_batch)
predicted_words_batch = expand_contrac... |
class LossOutput():
loss: LossRecord
reconstruction_loss: (LossRecord | None) = None
kl_local: (LossRecord | None) = None
kl_global: (LossRecord | None) = None
classification_loss: (LossRecord | None) = None
logits: (Tensor | None) = None
true_labels: (Tensor | None) = None
extra_metrics... |
def mathematica_console(readline=True):
from sage.repl.rich_output.display_manager import get_display_manager
if (not get_display_manager().is_in_terminal()):
raise RuntimeError('Can use the console only in the terminal. Try %%mathematica magics instead.')
if (not readline):
os.system('math'... |
_function
def fully_qualified_name(filename):
module = os.path.splitext(os.path.basename(filename))[0]
return '.'.join((package(filename) + (module,))) |
def get_split(split_name, dataset_dir, file_pattern=None, reader=None):
if (split_name not in _SPLITS_TO_SIZES):
raise ValueError(('split name %s was not recognized.' % split_name))
if (not file_pattern):
file_pattern = _FILE_PATTERN
file_pattern = os.path.join(dataset_dir, (file_pattern % s... |
def binary_search_tree_shape(w, left_to_right=True):
if (not w):
return BinaryTree()
if left_to_right:
root = w[0]
else:
root = w[(- 1)]
left = [x for x in w if (x < root)]
right = [x for x in w if (x > root)]
return BinaryTree([binary_search_tree_shape(left, left_to_righ... |
class TestMiniAlexNet(test_util.TestCase):
def _MiniAlexNetNoDropout(self, order):
model = model_helper.ModelHelper(name='alexnet')
conv1 = brew.conv(model, 'data', 'conv1', 3, 16, 11, ('XavierFill', {}), ('ConstantFill', {}), stride=4, pad=0)
relu1 = brew.relu(model, conv1, 'relu1')
... |
def plot_pit_histogram(predicted, observed, **kwargs):
plt.bar(x=predicted[1:], height=np.diff(observed), width=(- np.diff(predicted)), align='edge', fill=False, edgecolor='black', **kwargs)
plt.xlim((0, 1))
plt.xlabel('Probability Integral Transform')
plt.ylabel('Density')
plt.axhline((1.0 / (len(p... |
.parametrize('seed', [313])
.parametrize('ctx, func_name', ctxs)
def test_log_forward_backward(seed, ctx, func_name):
from nbla_test_utils import cap_ignore_region, function_tester
rng = np.random.RandomState(seed)
inputs = [np.clip((np.abs(rng.randn(2, 3, 4).astype(np.float32)) * 10000.0), 0.01, 10000.0)]
... |
_utils.test(arch=get_host_arch_list())
def test_python_scope_matrix_operations():
for ops in matrix_operation_types:
(a, b) = test_matrix_arrays[:2]
(m1, m2) = (ti.Matrix(a), ti.Matrix(b))
c = ops(m1, m2)
assert np.allclose(c.to_numpy(), ops(a, b)) |
class Networks3D(object):
def __init__(self):
self.Conv3D_Block = Layers2D().Conv2D_Block
self.Conv3DTranspose_Block = Layers2D().Conv2DTranspose_Block
self.Residual3D_Block = Layers2D().Residual3D_Block
def build_patch_discriminator3D(self, model_shape, filters=32, k_size=4, drop=True, ... |
def parse_line(line, domain):
tokens = line.split(' ')[1:]
context = tokens[:(2 * domain.input_length())]
choice_str = tokens[(- domain.selection_length()):]
(cnts, vals) = domain.parse_context(context)
picks = []
for (i, c) in enumerate(choice_str[:(domain.selection_length() // 2)]):
if... |
def rmse_loss(outputs, targets):
return tf.sqrt(tf.reduce_mean(tf.square(tf.sub(targets, outputs)))) |
def test_multiple_in_connectors():
sdfg = dace.SDFG('mctest')
sdfg.add_array('A', [1], dace.float64)
state = sdfg.add_state()
a = state.add_read('A')
b = state.add_read('A')
tasklet = state.add_tasklet('dosomething', {'a'}, {}, 'a * a')
state.add_edge(a, None, tasklet, 'a', dace.Memlet('A[0]... |
class FineModel(abc.ABC):
controls: List[fenics.Function]
cost_functional_value: float
def __init__(self) -> None:
pass
def solve_and_evaluate(self) -> None:
pass |
def main(fuzzer, seed, output, group, program, argument, thread, command, cgroup_path='', scale_num=1):
controller_class = str_to_class(f'{str.upper(fuzzer)}Controller')
if (controller_class is None):
print(f"{fuzzer} controller doesn't exist.")
controller = controller_class(seed=os.path.realpath(se... |
def test():
pred_sequences = [['O', 'S-LOC', 'O', 'O', 'B-PER', 'E-PER'], ['O', 'S-MISC', 'O', 'E-ORG', 'O', 'B-PER', 'I-PER', 'E-PER']]
gold_sequences = [['O', 'B-LOC', 'E-LOC', 'O', 'B-PER', 'E-PER'], ['O', 'S-MISC', 'B-ORG', 'E-ORG', 'O', 'B-PER', 'E-PER', 'S-LOC']]
print(score_by_token(pred_sequences, g... |
_test()
def test_vector_reduce():
N.set(24)
X = np.random.rand(N.get()).astype(dace.float32.type)
s = dace.scalar(dace.float32)
sdfg = vector_reduce.to_sdfg()
sdfg.apply_transformations(FPGATransformSDFG)
sdfg(x=X, s=s, N=N)
s_exp = 0.0
for x in X:
s_exp += x
diff = (np.linal... |
class TestQuantizeDynamicJitPasses(QuantizationTestCase):
def test_prepare_dynamic(self):
class M(torch.nn.Module):
def __init__(self):
super(M, self).__init__()
self.fc = torch.nn.Linear(5, 5)
def forward(self, x):
return self.fc(x)
... |
class TestSummaryEncoder(unittest.TestCase):
def setUp(self):
self.df = pd.DataFrame({'categories': ['a', 'b', 'c', 'a', 'b', 'c', 'a', 'b']})
self.target = np.array([1, 2, 0, 4, 5, 0, 6, 7])
self.col = 'categories'
def assert_same_quantile(self, quantile):
quantile_results = enc... |
def _split_channels(num_feat, num_groups):
split = [(num_feat // num_groups) for _ in range(num_groups)]
split[0] += (num_feat - sum(split))
return split |
class FmaDatasetInfo(DatasetInfo):
name = 'fma'
def preprocess_id_col(self, df: pd.DataFrame) -> pd.DataFrame:
df[self.id_col] = df[self.id_col].apply((lambda x: f'{x:06}'))
return df
def id_to_filename(self, track_id: Union[(str, int)], dirname: Optional[str]=None):
track_id = int(t... |
class CTRLModelTest(CommonTestCases.CommonModelTester):
all_model_classes = ((CTRLModel, CTRLLMHeadModel) if is_torch_available() else ())
test_pruning = False
test_torchscript = False
test_resize_embeddings = False
test_head_masking = False
class CTRLModelTester(object):
def __init__(se... |
class TarballTestCase(unittest.TestCase):
def test_tarball(self):
pkg = Package('configure')
tarball = Tarball(pkg.tarball_filename)
self.assertEqual(tarball, pkg.tarball)
self.assertEqual(pkg, tarball.package)
with CapturedOutput() as (stdout, stderr):
with Captu... |
def to_pianoroll(music, colors):
multitrack = music.to_pypianoroll()
stacked = (multitrack.stack() > 0)
colormatrix = np.array(colors[:len(music)])
reshaped = stacked.reshape(len(music), (- 1))
recolored = (255 - np.matmul((255 - colormatrix.T), reshaped))
clipped = np.round(np.clip(recolored, 0... |
def build_image_encoder(config, direct_features=False, **kwargs):
from mmf.modules.encoders import ImageEncoderFactory, ImageFeatureEncoderFactory
if direct_features:
module = ImageFeatureEncoderFactory(config)
else:
module = ImageEncoderFactory(config)
return module.module |
def do_analyse_snli(file_path, dev=True, use_loss=False, stop=None):
results = []
with open(file_path, 'r', encoding='utf-8') as file:
find_entry = False
output = [0, 0.0, 0.0, 0.0, 0.0]
for line in file:
if (not find_entry):
if line.startswith('data round'):
... |
def save_pkl(data, file_path):
with open(file_path, 'wb') as handle:
pickle.dump(data, handle) |
def BVRedAnd(a):
if z3_debug():
_z3_assert(is_bv(a), 'First argument must be a Z3 bit-vector expression')
return BitVecRef(Z3_mk_bvredand(a.ctx_ref(), a.as_ast()), a.ctx) |
class ModelArguments():
model_name_or_path: Optional[str] = field(default=None, metadata={'help': "The model checkpoint for weights initialization.Don't set if you want to train a model from scratch."})
config_name: Optional[str] = field(default=None, metadata={'help': 'Pretrained config name or path if not the... |
class LayerPQC(LayeredPQC):
def __init__(self, encoding_circuit: LayeredPQC):
super().__init__(encoding_circuit.num_qubits, encoding_circuit.variable_groups)
def add_operation(self, operation: _operation):
self.operation_list.append(operation) |
def load_data_to_gpu(batch_dict):
for (key, val) in batch_dict.items():
try:
if isinstance(val, np.ndarray):
batch_dict[key] = torch.from_numpy(val).float().cuda()
elif isinstance(val, torch.Tensor):
batch_dict[key] = val.float().cuda()
except:... |
.spark
.parametrize('column', ['user_id'])
.usefixtures('simple_dataframe')
def test_label_encoder_spark(column, simple_dataframe):
rule = LabelEncodingRule(column)
encoder = LabelEncoder([rule]).fit(simple_dataframe)
mapped_data = encoder.transform(simple_dataframe)
rebuild_original_cols = encoder.inve... |
.experimental
def test_predict_pairs_k(log):
model = ADMMSLIM(seed=SEED)
model.fit(log)
pairs_pred_k = model.predict_pairs(pairs=log.select('user_idx', 'item_idx'), log=log, k=1)
pairs_pred = model.predict_pairs(pairs=log.select('user_idx', 'item_idx'), log=log, k=None)
assert (pairs_pred_k.groupBy(... |
def test_should_call_integrated_gradients_callback(random_data, convolutional_model, output_dir, mocker):
mock_explainer = mocker.MagicMock(explain=mocker.MagicMock(return_value=np.zeros((28, 28))))
mocker.patch('tf_explain.callbacks.integrated_gradients.IntegratedGradients', return_value=mock_explainer)
(i... |
def _worker_run_collect(all_args):
try:
(collect_once, counter, lock, threshold, args) = all_args
collected = []
while True:
with lock:
if (counter.value >= threshold):
return collected
(result, inc) = collect_once(singleton_pool.G,... |
class Up(nn.Module):
def __init__(self, in_ch, out_ch):
super(Up, self).__init__()
self.up = nn.Sequential(nn.ConvTranspose2d(in_ch, out_ch, kernel_size=3, stride=2, padding=1, output_padding=1), nn.ReLU())
def forward(self, x):
x = self.up(x)
return x |
class states_t(object):
__slots__ = ['p']
def __init__(self, p=None, _skip_initialize=False):
if _skip_initialize:
return
self.p = (Vector2d._default() if (p is None) else p)
def from_all_fields(p):
return states_t(p=p)
def _skytype_meta():
return dict(type='s... |
class RingBuffer(object):
def __init__(self, maxlen, shape, dtype=np.float32, data=None):
self.maxlen = maxlen
self.start = RawValue('L')
self.length = RawValue('L')
self.shape = shape
if (data is None):
self.data = np.zeros(((maxlen,) + shape), dtype=dtype)
... |
class NonBottleneck1d(BaseModule):
def __init__(self, channels, drop_rate=0, dilation=1, num_conv_layer=2, conv_cfg=None, norm_cfg=dict(type='BN', eps=0.001), act_cfg=dict(type='ReLU'), init_cfg=None):
super(NonBottleneck1d, self).__init__(init_cfg=init_cfg)
self.conv_cfg = conv_cfg
self.nor... |
class ComplexDoubleVectorSpace_class(FreeModule_ambient_field):
def __init__(self, n):
FreeModule_ambient_field.__init__(self, sage.rings.complex_double.CDF, n)
def coordinates(self, v):
return v |
class SuperbASR(ASR):
def default_config(self) -> dict:
return dict(start=0, stop=None, target_dir=MISSING, cache_dir=None, remove_all_cache=False, prepare_data=dict(dataset_root=MISSING, train_sets=['train-clean-100'], valid_sets=['dev-clean'], test_sets=['test-clean']), prepare_tokenizer_data=dict(), buil... |
class Checkpoints():
def __init__(self, logger=None, checkpoint_dir=None, experiment_id=None):
self.logger = logger
super(Checkpoints, self).__init__()
self.checkpoint_dir = checkpoint_dir
self.experiment_id = experiment_id
self.checkpoint_path = self.get_checkpoint()
def... |
class MixedPercisionActivationSearch2Bit(MixedPercisionActivationBaseTest):
def __init__(self, unit_test):
super().__init__(unit_test)
self.expected_config = [2, 8, 2, 2]
def get_kpi(self):
return KPI(96, 768)
def compare(self, quantized_models, float_model, input_x=None, quantizatio... |
.xfail
def test_exception():
def func():
foo = 1
bar = 0
try:
result = (0 / 0)
except ZeroDivisionError:
result = (foo + bar)
return result
assert (func() == 1)
dummy_block = BasicBlock([])
return_block = BasicBlock([Instr('LOAD_FAST', arg=... |
def test_jim21():
text = str(ak.with_parameter(ak.Array([1, 2, 3, None, [1], [1, 2], [1, 2, 3]]), 'wonky', 'string').type)
print(text)
parsedtype = deduce_type(text)
assert (str(parsedtype) == text) |
class DegenerateMetric(TensorField):
def __init__(self, vector_field_module, name, signature=None, latex_name=None):
TensorField.__init__(self, vector_field_module, (0, 2), name=name, latex_name=latex_name, sym=(0, 1))
ndim = self._ambient_domain.dimension()
if (signature is None):
... |
def setImgWidthElement(data, img_key, width_key, resize=True):
try:
img = data[img_key]
except KeyError:
logger.error(('No image data (key: %s)' % img_key))
return
try:
width = data[width_key]
if (resize and (width < img.shape[1])):
height = int(((width * ... |
def log_gaussian(x, mu, sigma, reduce_sum=True):
res = tfd.Normal(mu, sigma).log_prob(x)
if reduce_sum:
return tf.reduce_sum(res)
else:
return res |
def render_dist(df: pd.DataFrame, x: str, typ: str, plot_width: int, plot_height: int) -> Figure:
assert (typ in ['pdf', 'cdf'])
tooltips = [(x, ''), (typ.upper(), f'{{{typ}}}'), ('Label', f'{{{typ}}}_label')]
y_range = Range1d(0, (df[typ].max() * 1.01))
x_range = Range1d(0, (df[f'x_{typ}'].max() * 1.01... |
def OA_10_205():
pplane_size = (((16 ** 2) + 16) + 1)
baer_subplane_size = (((4 ** 2) + 4) + 1)
B = [0, 1, 22, 33, 83, 122, 135, 141, 145, 159, 175, 200, 226, 229, 231, 238, 246]
pplane = [[((xx + i) % pplane_size) for xx in B] for i in range(pplane_size)]
baer_subplane = set([((i * pplane_size) / b... |
def _load_sample_hash() -> Tuple[(Array, Array)]:
return (jnp.array([[, , , ], [, , , ], [, 6709002, , ], [, , , ], [, , , ], [, , , ], [, , , ], [, , , ], [, , , ], [, , , ], [, , , ], [, , , ], [, , , ], [7139188, , , ], [2653767, , , ], [, , 6716808, ], [, 49709, , ], [, 3340423, , 7686579], [, , , 9605779], [, ... |
class PickleableMagicMock(mock.MagicMock):
def __reduce__(self):
return (mock.MagicMock, ()) |
def log_scalar_sum(key: str, value: float, priority: int=10, round: Optional[int]=None):
for agg in get_active_aggregators():
if (key not in agg):
agg.add_meter(key, SumMeter(round=round), priority)
agg[key].update(value) |
class TFMobileBertPreTrainedModel(metaclass=DummyObject):
_backends = ['tf']
def __init__(self, *args, **kwargs):
requires_backends(self, ['tf']) |
def load_scone(dirname):
dfs = []
for filename in glob.glob((dirname + '/*.csv')):
df = pd.read_csv(filename, index_col=0)
df['category'] = os.path.basename(filename).replace('.csv', '')
dfs.append(df)
data_df = pd.concat(dfs)
def as_example(row):
suffix = ('' if (row['ca... |
def test_string_literal_in_callback_2():
success = False
_inhibitor
def cb(a):
nonlocal success
if (a == "b'Hello World!'"):
success = True
def tester(a):
cb("b'Hello World!'")
a = np.random.rand(1)
tester(a)
assert (success is True) |
def sample_neigh(graphs, size):
ps = np.array([len(g) for g in graphs], dtype=np.float)
ps /= np.sum(ps)
dist = stats.rv_discrete(values=(np.arange(len(graphs)), ps))
while True:
idx = dist.rvs()
graph = graphs[idx]
start_node = random.choice(list(graph.nodes))
neigh = [s... |
def interactions_rating_pandas_dataset():
events = pd.DataFrame({'user_id': [0, 0, 1, 1, 1, 2], 'item_id': [0, 1, 0, 2, 3, 1], 'rating': [1.1, 1.2, 1.3, 2, 3, 4]})
return {'interactions': events, 'user_col': 'user_id', 'item_col': 'item_id', 'ratings_col': 'rating', 'users_cardinality': 3, 'items_cardinality': ... |
class DummyTask(FairseqTask):
def __init__(self, args):
super().__init__(args)
self.dictionary = get_dummy_dictionary()
if getattr(self.args, 'ctc', False):
self.dictionary.add_symbol('<ctc_blank>')
self.src_dict = self.dictionary
self.tgt_dict = self.dictionary
... |
def re_train(mode):
if (mode == 0):
dbh.create_table()
dbh.populate_db()
synth.synthesize(20)
alphabet_model.train_alphabets()
if (mode == 1):
dbh.create_pose_table()
dbh.populate_words()
word_model.train_words() |
class AutonomousSystem(Printable, Graphable, Configurable):
__asn: int
__subnets: List[IPv4Network]
__routers: Dict[(str, Node)]
__hosts: Dict[(str, Node)]
__nets: Dict[(str, Network)]
__name_servers: List[str]
def __init__(self, asn: int, subnetTemplate: str='10.{}.0.0/16'):
super()... |
def Simulator_customizations(module):
Simulator = module['ns3::Simulator']
Simulator.add_custom_method_wrapper('Schedule', '_wrap_Simulator_Schedule', flags=['METH_VARARGS', 'METH_KEYWORDS', 'METH_STATIC'])
Simulator.add_custom_method_wrapper('ScheduleNow', '_wrap_Simulator_ScheduleNow', flags=['METH_VARARG... |
class XLMModelTest(CommonTestCases.CommonModelTester):
all_model_classes = ((XLMModel, XLMWithLMHeadModel, XLMForQuestionAnswering, XLMForSequenceClassification, XLMForQuestionAnsweringSimple) if is_torch_available() else ())
class XLMModelTester(object):
def __init__(self, parent, batch_size=13, seq_le... |
class ReducedContextAggModel(MeanPoolModel):
def init_graph(self, X, batch_size, ckpt=None, device='gpu', **model_params):
self.batch_size = batch_size
n_dim_x = len(X[0].values[0])
n_dim_q = len(X[1].values[0])
n_dim_p = len(X[2].values[0])
n_dim_c = len(X[3].values[0])
... |
class IndPool():
def __init__(self, pool_size):
self.pool_size = pool_size
if (self.pool_size > 0):
self.num = 0
self.inds = []
def fetch(self):
return self.inds
def is_full(self):
full = (self.num == self.pool_size)
return full
def update(... |
def build_stamp_source(root_dir=None, stamp_fname='pyx-stamps', include_dirs=None):
if (root_dir is None):
root_dir = os.getcwd()
if (include_dirs is None):
include_dirs = [pjoin(root_dir, 'src')]
pyxes = find_pyx(root_dir)
pyx_defs = build_stamp(pyxes, include_dirs=include_dirs)
wri... |
def init_logging(args):
handlers = [logging.StreamHandler()]
if ((not args.no_log) and (args.log_file is not None)):
mode = ('a' if os.path.isfile(args.resume_training) else 'w')
handlers.append(logging.FileHandler(args.log_file, mode=mode))
logging.basicConfig(handlers=handlers, format='[%(... |
(('ScriptModule' not in core._REGISTERED_OPERATORS), 'Script module integration in Caffe2 is not enabled')
class TestScriptModule(test_util.TestCase):
def _createFeedModule(self):
workspace.FeedBlob('m', MyModule())
def testCreation(self):
m = MyModule()
workspace.FeedBlob('module', m)
... |
class AttentionUNet(nn.Module):
def __init__(self, img_ch=3, output_ch=1):
super(AttentionUNet, self).__init__()
self.MaxPool = nn.MaxPool2d(kernel_size=2, stride=2)
self.Conv1 = ConvBlock(img_ch, 64)
self.Conv2 = ConvBlock(64, 128)
self.Conv3 = ConvBlock(128, 256)
se... |
def train(args, train_dataset, model, tokenizer):
if (args.local_rank in [(- 1), 0]):
tb_writer = SummaryWriter()
args.train_batch_size = (args.per_gpu_train_batch_size * max(1, args.n_gpu))
train_sampler = (RandomSampler(train_dataset) if (args.local_rank == (- 1)) else DistributedSampler(train_dat... |
.parametrize('a,b', [(10, 3), ((- 10), 3), (10, (- 3)), ((- 10), (- 3))])
_utils.test()
def test_c_style_mod(a, b):
z = ti.field(ti.i32, shape=())
def func(x: ti.i32, y: ti.i32):
z[None] = ti.raw_mod(x, y)
func(a, b)
assert (z[None] == _c_mod(a, b)) |
def _converId(img_id):
img_id = img_id.split('-')
if ('train' in img_id[0]):
new_id = int(img_id[1])
elif ('val' in img_id[0]):
new_id = (int(img_id[1]) + 1000000)
elif ('test' in img_id[0]):
new_id = (int(img_id[1]) + 2000000)
else:
pdb.set_trace()
return new_id |
def warmup_lr_scheduler(optimizer, warmup_iters, warmup_factor):
def f(x):
if (x >= warmup_iters):
return 1
alpha = (float(x) / warmup_iters)
return ((warmup_factor * (1 - alpha)) + alpha)
return torch.optim.lr_scheduler.LambdaLR(optimizer, f) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.