code stringlengths 101 5.91M |
|---|
def load_states_from_checkpoint(model_file: str) -> CheckpointState:
print('Reading saved model from %s', model_file)
state_dict = torch.load(model_file, map_location=(lambda s, l: default_restore_location(s, 'cpu')))
return CheckpointState(**state_dict) |
def mk_z3consts_java(api_files):
java = get_component(JAVA_COMPONENT)
full_path_api_files = []
for api_file in api_files:
api_file_c = java.find_file(api_file, java.name)
api_file = os.path.join(api_file_c.src_dir, api_file)
full_path_api_files.append(api_file)
generated_files = ... |
class MultiProcessRamTensorStorage(MultiProcessTensorStorage):
def __init__(self, data_schema: Dict[(str, SizeData)], rank_to_buffer: Dict[(int, io.BytesIO)]):
rank_to_storage = {rank: SingleProcessRamTensorStorage(data_schema, buf) for (rank, buf) in rank_to_buffer.items()}
super().__init__(rank_to... |
def verify(path: Path):
from onnxruntime import InferenceSession, SessionOptions
from onnxruntime.capi.onnxruntime_pybind11_state import RuntimeException
print(f'Checking ONNX model loading from: {path} ...')
try:
onnx_options = SessionOptions()
_ = InferenceSession(path.as_posix(), onnx... |
def build_optimizer(cfg, model):
name = cfg.SOLVER.TYPE
if hasattr(torch.optim, name):
def builder(cfg, model):
return getattr(torch.optim, name)(group_weight(model, cfg.SOLVER.WEIGHT_DECAY), lr=cfg.SOLVER.BASE_LR, **cfg.SOLVER[name])
elif (name in _OPTIMIZER_BUILDERS):
builder =... |
class ConvVAE(GaussianLatentVAE):
def __init__(self, representation_size, architecture, encoder_class=CNN, decoder_class=DCNN, decoder_output_activation=identity, decoder_distribution='bernoulli', input_channels=1, imsize=48, init_w=0.001, min_variance=0.001, hidden_init=ptu.fanin_init):
super().__init__(re... |
def init(workspace_template: str='default', log_level: str='INFO', log_file: str=None, agg_fqdn: str=None, col_names=None):
if (col_names is None):
col_names = ['one', 'two']
workspace.create(WORKSPACE_PREFIX, workspace_template)
os.chdir(WORKSPACE_PREFIX)
workspace.certify()
aggregator.gene... |
def recompress_dataset(dataset):
dataset = dataset.map(recompress_image)
dataset = dataset.batch(128)
return dataset |
class DecoderConfig(FairseqDataclass):
type: DECODER_CHOICES = field(default='viterbi', metadata={'help': 'The type of decoder to use'}) |
class Lbl2TransformerVec(Lbl2Vec):
def __init__(self, keywords_list: List[List[str]], documents: List[str], transformer_model: Union[(SentenceTransformer, AutoModel)]=SentenceTransformer('all-MiniLM-L6-v2'), label_names: List[str]=None, similarity_threshold: float=None, similarity_threshold_offset: float=0, min_num... |
def ConvertNetForDevice(net, device=None):
mnet = copy.deepcopy(net)
if (device is None):
device = scope.CurrentDeviceScope()
if core.IsGPUDeviceType(device.device_type):
device_prefix = 'gpu'
elif (device.device_type == caffe2_pb2.IDEEP):
device_prefix = 'ideep'
else:
... |
class CPUCountRequirement(Requirement):
MIN_CPU_COUNT = 2
def __init__(self):
super().__init__('CPUs >= {}'.format(self.MIN_CPU_COUNT))
def check(self):
cpu_count = self._get_cpu_count()
if (cpu_count < self.MIN_CPU_COUNT):
raise ValueError('Only {} CPUs available.'.forma... |
def register_Ns3MmWavePhySapProvider_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::MmWavePhySapProvider const &', 'arg0')])
cls.add_method('SendControlMessage', 'void', [param('ns3::Ptr< ns3::MmWaveControlMessage >', 'msg')], is_pure_virtual=True, is_virtual=True)
c... |
class NLayerDiscriminator(Module):
def __init__(self, hp):
self.hp = hp
def call(self, x, y):
hp = self.hp
results = []
with nn.parameter_scope('layer_0'):
x = F.pad(x, (0, 0, 7, 7), 'reflect')
x = wn_conv(x, hp.ndf, (15,))
x = F.leaky_relu(x, ... |
class DLDataType(ctypes.Structure):
_fields_ = [('type_code', DLDataTypeCode), ('bits', ctypes.c_uint8), ('lanes', ctypes.c_uint16)] |
class CombinerInterface():
def __init__(self, parent, name, address, fqdn, port, certificate=None, key=None, ip=None, config=None):
self.parent = parent
self.name = name
self.address = address
self.fqdn = fqdn
self.port = port
self.certificate = certificate
se... |
def test_accept(chromosome):
visitor = MagicMock()
chromosome.accept(visitor)
visitor.visit_test_suite_chromosome.assert_called_once_with(chromosome) |
class Ufunc(Func):
def __init__(self, name, signatures):
super(Ufunc, self).__init__(name, signatures)
self.doc = add_newdocs.get(name)
if (self.doc is None):
raise ValueError(('No docstring for ufunc %r' % name))
self.doc = textwrap.dedent(self.doc).strip()
def _get_... |
def make_fcs(fcs, inpt, activation=tf.nn.relu, initializer=None):
if (initializer is None):
initializer = tf.orthogonal_initializer(np.sqrt(2.0))
out = inpt
with tf.variable_scope('hiddens'):
for hidden in fcs:
out = layers.fully_connected(out, hidden, activation_fn=activation, w... |
class HashFunction():
def __init__(self):
pass
def compute(self, str1: str) -> int:
pass |
def test3():
time.sleep(3)
vj.open()
print('vj opening', flush=True)
time.sleep(2)
print('sending axes', flush=True)
joystickPosition = vj.generateJoystickPosition(wThrottle=32000, wAxisX=16000, wAxisY=16000)
vj.update(joystickPosition)
time.sleep(5)
joystickPosition = vj.generateJoy... |
class LevitFeatureExtractor(LevitImageProcessor):
def __init__(self, *args, **kwargs) -> None:
warnings.warn('The class LevitFeatureExtractor is deprecated and will be removed in version 5 of Transformers. Please use LevitImageProcessor instead.', FutureWarning)
super().__init__(*args, **kwargs) |
class createBlackBackground(bpy.types.Operator):
bl_idname = 'object.create_black_bg'
bl_label = 'Create Black BG (2D Default)'
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
scene = context.scene
myaddon = scene.my_addon
bpy.ops.mesh.primitive_plane_add()
... |
def train(flags):
plogger = FileWriter(xpid=flags.xpid, xp_args=flags.__dict__, rootdir=flags.savedir)
checkpointpath = os.path.expandvars(os.path.expanduser(('%s/%s/%s' % (flags.savedir, flags.xpid, 'model.tar'))))
T = flags.unroll_length
B = flags.batch_size
models = []
pre_models = []
ass... |
def build_sqa_zero_dataset(dataset_name, folder):
prompt_templates = get_sqa_prompt_templates()
os.makedirs(f'{folder}/{dataset_name}', exist_ok=True)
table_processor = get_default_processor(max_cell_length=10, max_input_length=MAX_LENGTH, model_name='google/flan-t5-xl')
for (idx, prompt_template) in en... |
.parametrize('data_types', [[1], 'True', None, ''])
.xfail(raises=ValueError)
def test_list_datasets_wrong_data_types(data_types):
list_datasets(data_types=data_types) |
def force_fp32(apply_to=None, out_fp16=False):
def force_fp32_wrapper(old_func):
(old_func)
def new_func(*args, **kwargs):
if (not isinstance(args[0], torch.nn.Module)):
raise TypeError('_fp32 can only be used to decorate the method of nn.Module')
if (not (has... |
.parametrize('spcreator', formats_for_minmax)
class Test_MinMaxMixin1D():
def test_minmax(self, spcreator):
D = np.arange(5)
X = spcreator(D)
assert_equal(X.min(), 0)
assert_equal(X.max(), 4)
assert_equal((- X).min(), (- 4))
assert_equal((- X).max(), 0)
def test_m... |
def register_Ns3DataOutputCallback_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::DataOutputCallback const &', 'arg0')])
cls.add_method('OutputSingleton', 'void', [param('std::string', 'key'), param('std::string', 'variable'), param('int', 'val')], is_pure_virtual=True, ... |
def iterate_eternally(indices):
def infinite_shuffles():
while True:
(yield np.random.permutation(indices))
return itertools.chain.from_iterable(infinite_shuffles()) |
class RandomCell(LTICell):
name = 'random'
def __init__(self, d_input, d_model, memory_size=1, memory_order=(- 1), **kwargs):
if (memory_order < 0):
memory_order = d_model
N = memory_order
A = (np.random.normal(size=(N, N)) / (N ** 0.5))
B = np.random.normal(size=(N, ... |
.parametrize('ctx, func_name', ctxs)
.parametrize('seed', [314])
def test_add2_inplace(seed, ctx, func_name):
from nbla_test_utils import inplace_function_test_helper
x0 = nn.Variable([2, 3, 4], need_grad=True)
x1 = nn.Variable([2, 3, 4], need_grad=True)
inplace_function_test_helper([x0, x1], F.add2, ct... |
def compute_l2_norm(h, subtract_mean=False):
h = dim_permute(h)
N = h.size(1)
if subtract_mean:
mn = h.mean(dim=1, keepdim=True)
h = (h - mn)
l2_norm = (h ** 2).sum()
return torch.sqrt(l2_norm) |
def matrix_product_transpose_test(A: dace.float32[(K, M)], B: dace.float32[(N, K)], C: dace.float32[(M, N)]):
C[:] = (np.transpose(A) np.transpose(B)) |
class InterfaceInit(Converter):
def __init__(self, interface):
self.name_init = ('_%s_init_' % interface.name())
self.interface = interface
self.relation_symbols = interface._relation_symbols()
def symbol(self, ex):
if (self.interface.name() == 'maxima'):
return ('_SA... |
def handle_stacktraces(test_results):
total_stacktraces = test_results.split('\n')[1:(- 1)]
stacktraces = []
for stacktrace in total_stacktraces:
try:
line = stacktrace[:stacktrace.index(' ')].split(':')[(- 2)]
error_message = stacktrace[stacktrace.index(' '):]
st... |
class WeightPredictor(abc.ABC):
def __init__(self, optimizer, fix_fn=None, scheduler=None, nag_with_predictor=False, true_weights_storage=None):
self.optimizer = optimizer
self.fix_fn = fix_fn
self.scheduler = scheduler
self.nag_with_predictor = nag_with_predictor
if nag_with... |
class FlaxGPTNeoPreTrainedModel(metaclass=DummyObject):
_backends = ['flax']
def __init__(self, *args, **kwargs):
requires_backends(self, ['flax']) |
def __getattr__(name):
return _sub_module_deprecation(sub_package='io', module='netcdf', private_modules=['_netcdf'], all=__all__, attribute=name) |
def capital_M(n):
n = ZZ(n)
return QQ.prod(((d ** (d * moebius((n / d)))) for d in divisors(n))) |
class AzureCognitiveSearch():
def __init__(self, search_service_name: str, search_api_key: str, search_index_name: str, field_text: str, field_score: str):
self.search_service_name = search_service_name
self.search_api_key = search_api_key
self.search_index_name = search_index_name
s... |
class DIDEMODataset(BaseDataset):
def __init__(self, *args, split='', **kwargs):
assert (split in ['train', 'val', 'test'])
self.split = split
self.metadata = None
if (split == 'train'):
names = ['didemo_train']
elif (split == 'val'):
names = ['didemo_... |
def merge_beams(beam_1, beam_2, beam_size):
if ((len(beam_1) == 0) or (len(beam_2) == 0)):
return (beam_1, beam_2)
annoated_beam_1 = [('beam_1', b) for b in beam_1]
annoated_beam_2 = [('beam_2', b) for b in beam_2]
merged_beams = (annoated_beam_1 + annoated_beam_2)
merged_beams.sort(key=(lam... |
def parse_command_line(args):
from .Main import CompilationOptions, default_options
pending_arg = []
def pop_arg():
if ((not args) or pending_arg):
bad_usage()
if (('=' in args[0]) and args[0].startswith('--')):
(name, value) = args.pop(0).split('=', 1)
pe... |
def generate(model, cond, top_k, top_p):
while True:
gen_text = model.generate(cond=cond, top_k=top_k, top_p=top_p)
if (len(list(filter(str.isalpha, gen_text))) > 0):
return gen_text |
class SubSectionTitleOrder():
def __init__(self, src_dir):
self.src_dir = src_dir
self.regex = re.compile('^([\\w ]+)\\n-', re.MULTILINE)
def __repr__(self):
return ('<%s>' % (self.__class__.__name__,))
def __call__(self, directory):
src_path = os.path.normpath(os.path.join(s... |
def save_ckpt(state, path):
def save_arrays(arrays, fname):
with open(fname, 'wb') as f:
np.savez(f, *arrays)
with print_time(f'Saving model in {path}'):
save_arrays(jax.tree_flatten(state['model'])[0], f'{path}/model/{jax.process_index()}.npz')
with print_time(f'Saving opt in {p... |
def decoration_hop() -> GoalDirectedBenchmark:
smiles = 'CCCOc1cc2ncnc(Nc3ccc4ncsc4c3)c2cc1S(=O)(=O)C(C)(C)C'
pharmacophor_sim = TanimotoScoringFunction(smiles, fp_type='PHCO', score_modifier=ClippedScoreModifier(upper_x=0.85))
deco1 = SMARTSScoringFunction('CS([#6])(=O)=O', inverse=True)
deco2 = SMARTS... |
_utils.test(require=ti.extension.quant, debug=True)
def test_1D_quant_array_fixed():
qfxt = ti.types.quant.fixed(bits=8, max_value=2)
x = ti.field(dtype=qfxt)
N = 4
ti.root.quant_array(ti.i, N, max_num_bits=32).place(x)
def set_val():
for i in range(N):
x[i] = (i * 0.5)
def v... |
class TestVoigtProfile():
.parametrize('x, sigma, gamma', [(np.nan, 1, 1), (0, np.nan, 1), (0, 1, np.nan), (1, np.nan, 0), (np.nan, 1, 0), (1, 0, np.nan), (np.nan, 0, 1), (np.nan, 0, 0)])
def test_nan(self, x, sigma, gamma):
assert np.isnan(sc.voigt_profile(x, sigma, gamma))
.parametrize('x, desired... |
def parse_args():
parser = argparse.ArgumentParser(description='Train a classification model')
parser.add_argument('--cfg', dest='cfg_file', help='Config file path', required=True, type=str)
parser.add_argument('--repeat', dest='repeat', help='Repeat how many random seeds', default=1, type=int)
parser.a... |
class SchemeMorphism_polynomial_affine_space_field(SchemeMorphism_polynomial_affine_space):
_method
def weil_restriction(self):
if any((isinstance(f, FractionFieldElement) for f in self)):
raise TypeError('coordinate functions must be polynomials')
DS = self.domain()
R = DS.c... |
def _act_backward(ctx, x, dx):
if (ctx.activation == ACT_LEAKY_RELU):
_backend.leaky_relu_backward(x, dx, ctx.slope)
elif (ctx.activation == ACT_ELU):
_backend.elu_backward(x, dx)
elif (ctx.activation == ACT_NONE):
pass |
class PointRCNN(Detector3DTemplate):
def __init__(self, model_cfg, num_class, dataset):
super().__init__(model_cfg=model_cfg, num_class=num_class, dataset=dataset)
self.module_list = self.build_networks()
def forward(self, batch_dict):
for cur_module in self.module_list:
batc... |
def test_with_bert(pretrain_file, tmp_path):
trainer = run_training(pretrain_file, tmp_path, '--bert_model', 'hf-internal-testing/tiny-bert')
model_file = os.path.join(trainer.args['save_dir'], trainer.args['save_name'])
assert (not model_file_has_bert(model_file)) |
class VGG19(torch.nn.Module):
def __init__(self):
super(VGG19, self).__init__()
features = models.vgg19(pretrained=True).features
self.relu1_1 = torch.nn.Sequential()
self.relu1_2 = torch.nn.Sequential()
self.relu2_1 = torch.nn.Sequential()
self.relu2_2 = torch.nn.Seq... |
def func():
ob = Foo()
ob.attr1 = 1
ob.attr2 = (ob.attr2 + [ob.attr1])
result = ob.attr2
return result |
def plot_parameter(parameter_name: str, train_values: Any, val_values: Any, tags: Any, output_path: str) -> None:
plot_1d(train_values, ('train_' + parameter_name), output_path, ['epoch', parameter_name], tags, (10, 10), 'plot', len(train_values))
plot_1d(val_values, ('val_' + parameter_name), output_path, ['ep... |
class Decoder_MDCBlock1(torch.nn.Module):
def __init__(self, num_filter, num_ft, kernel_size=4, stride=2, padding=1, bias=True, activation='prelu', norm=None, mode='iter1'):
super(Decoder_MDCBlock1, self).__init__()
self.mode = mode
self.num_ft = (num_ft - 1)
self.down_convs = nn.Mod... |
class NonInteractiveSpinner(SpinnerInterface):
def __init__(self, message, min_update_interval_seconds=60):
self._message = message
self._finished = False
self._rate_limiter = RateLimiter(min_update_interval_seconds)
self._update('started')
def _update(self, status):
asse... |
def create_diffuser(cfg: DictConfig, *args: List, **kwargs: Dict) -> nn.Module:
eps_model = MODEL.get(cfg.model.name)(cfg.model, *args, **kwargs)
has_obser = (cfg.task.has_observation if ('has_observation' in cfg.task) else False)
diffuser = DIFFUSER.get(cfg.diffuser.name)(eps_model, cfg.diffuser, has_obser... |
def test_write_statistics_no_individual(search_statistics):
assert (not search_statistics.write_statistics()) |
def _shell_pop_print(old_call):
if (not pybuf_enabled):
return old_call
info('Graphical python shell detected, using wrapped sys.stdout')
(old_call)
def new_call(*args, **kwargs):
ret = old_call(*args, **kwargs)
print(_ti_core.pop_python_print_buffer(), end='')
return ret... |
class CrossEntropyLoss(_WeightedLoss):
def __init__(self, weight=None, size_average=None, ignore_index=(- 100), reduce=None, reduction='elementwise_mean'):
super(CrossEntropyLoss, self).__init__(weight, size_average, reduce, reduction)
self.ignore_index = ignore_index
def forward(self, input, ta... |
def get_activation(activation_string):
if (activation_string in ACT2FN):
return ACT2FN[activation_string]
else:
raise KeyError('function {} not found in ACT2FN mapping {} or torch.nn.functional'.format(activation_string, list(ACT2FN.keys()))) |
def align_pos(original_sentence, corrected_sentence):
(orig, cor) = align(original_sentence, corrected_sentence)
(orig_out, cor_out) = ([[]], [[]])
for tok in orig:
if (tok.pos == 'WS'):
orig_out.append([])
else:
orig_out[(- 1)].append((tok.token, tok.pos))
for to... |
class SL2Z_class(Gamma0_class):
def __init__(self):
Gamma0_class.__init__(self, 1)
def __reduce__(self):
return (_SL2Z_ref, ())
def _element_constructor_(self, x, check=True):
return ArithmeticSubgroupElement(self, x, check=check)
def _contains_sl2(self, a, b, c, d):
retu... |
class Cn2An(object):
def __init__(self):
self.conf = utils.get_default_conf()
self.ac = An2Cn()
def cn2an(self, inputs=None, mode='strict'):
if (inputs is not None):
if (mode not in ['strict', 'normal', 'smart']):
raise ValueError('mode strict normal smart !'... |
.parametrize('dataset_class', [Sinusoid, Harmonic, SinusoidAndLine])
def test_toy_task(dataset_class):
dataset = dataset_class(10, num_tasks=1000, noise_std=None)
task = dataset[0]
assert isinstance(task, Task)
assert (len(task) == 10) |
def gen_grid(nx=5, ny=5, nt=10, Lx=1.0, Ly=1.0, T=1.0):
(x_grid, y_grid, t_grid) = np.meshgrid(np.linspace(0, Lx, nx)[1:(- 1)], np.linspace(0, Ly, ny)[1:(- 1)], np.linspace(0, T, nt)[1:], indexing='ij')
(x_grid, y_grid, t_grid) = [x.reshape((- 1), 1) for x in [x_grid, y_grid, t_grid]]
(x_init, y_init, t_ini... |
def validate_pe_ruc(df: Union[(str, pd.Series, dd.Series, pd.DataFrame, dd.DataFrame)], column: str='') -> Union[(bool, pd.Series, pd.DataFrame)]:
if isinstance(df, (pd.Series, dd.Series)):
return df.apply(ruc.is_valid)
elif isinstance(df, (pd.DataFrame, dd.DataFrame)):
if (column != ''):
... |
_torch
class DeiTRobertaModelTest(VisionTextDualEncoderMixin, unittest.TestCase):
def get_pretrained_model_and_inputs(self):
model = VisionTextDualEncoderModel.from_vision_text_pretrained('hf-internal-testing/tiny-random-deit', 'hf-internal-testing/tiny-random-roberta')
batch_size = 13
pixel... |
def activation_name_to_func(activation_name):
assert isinstance(activation_name, str)
if isinstance(activation_name, str):
if (activation_name == 'linear'):
act_fn = tf.identity
elif (activation_name == 'relu'):
act_fn = tf.nn.relu
elif (activation_name == 'elu'):... |
class SquadProcessor(DataProcessor):
train_file = None
dev_file = None
def _get_example_from_tensor_dict(self, tensor_dict, evaluate=False):
if (not evaluate):
answer = tensor_dict['answers']['text'][0].numpy().decode('utf-8')
answer_start = tensor_dict['answers']['answer_sta... |
class InvertedResidual(nn.Module):
def __init__(self, inp, oup, stride, expand_ratio, filter_size=1):
super(InvertedResidual, self).__init__()
self.stride = stride
assert (stride in [1, 2])
hidden_dim = int(round((inp * expand_ratio)))
self.use_res_connect = ((self.stride == ... |
class TestDiscretePolicies(TfGraphTestCase):
def setup_method(self):
super().setup_method()
self.env = GarageEnv(DummyDiscreteEnv())
def teardown_method(self):
self.env.close()
super().teardown_method()
def test_categorial_gru_policy(self):
categorical_gru_policy = Ca... |
def main(ranking_top_k_path, output_path, jsonl_corpus_path):
json_corpus = load_json_corpus(jsonl_corpus_path)
top_k = 500
with jsonlines.open(output_path, mode='w') as writer:
first_stage_ranking_dict = load_ranking(ranking_top_k_path, top_k=None)
for (query_id, retrieved_docs) in first_st... |
class ParserTfds(Parser):
def __init__(self, root, name, split='train', is_training=False, batch_size=None, download=False, repeats=0, seed=42, input_name='image', input_image='RGB', target_name='label', target_image='', prefetch_size=None, shuffle_size=None, max_threadpool_size=None):
super().__init__()
... |
def parse_argv(parser):
parser.add_argument('--eval_results', nargs='+', required=True, help='path to eval json files') |
def test_graphql_wsgi_loader(graphql_path, graphql_app, run_wsgi_test):
schema = loaders.from_wsgi(graphql_path, graphql_app)
strategy = schema[graphql_path]['POST'].as_strategy()
run_wsgi_test(strategy) |
def main():
gpu_config = tf.ConfigProto()
gpu_config.gpu_options.allow_growth = True
with tf.Session(config=gpu_config) as sess:
_inputs = {'query': tf.placeholder(dtype=tf.float32, shape=[None, flags.dim_text]), 'answer': tf.placeholder(dtype=tf.float32, shape=[None, 5, flags.dim_text]), 'story': t... |
class BopomofoConverter(object):
def to_bopomofo(self, pinyin, **kwargs):
pinyin = self._pre_convert(pinyin)
for (find_re, replace) in BOPOMOFO_REPLACE:
pinyin = find_re.sub(replace, pinyin)
pinyin = ''.join((BOPOMOFO_TABLE.get(x, x) for x in pinyin))
return pinyin
de... |
def crop_to_bounding_box(image, bbox):
(x, y, w, h) = bbox
w = (w + x)
h = (y + h)
bbox = (x, y, w, h)
cropped_image = image.crop(bbox)
return cropped_image |
def demo(seed=None):
if (seed is None):
seed = np.random.randint((2 ** 32))
print('Setting seed to ', seed)
np.random.seed(seed)
K = 5
T = 10000
dt = 1
dt_max = 50
B = 1
(S, true_model) = sample_from_network_hawkes(K, T, dt, dt_max, B)
test_basis = true_model.basis
te... |
def resnet50_fc512(num_classes, loss='softmax', pretrained=True, **kwargs):
model = ResNet(num_classes=num_classes, loss=loss, block=Bottleneck, layers=[3, 4, 6, 3], last_stride=1, fc_dims=[512], dropout_p=None, **kwargs)
if pretrained:
init_pretrained_weights(model, model_urls['resnet50'])
return m... |
def rotate_image(image, angle):
image_center = tuple((np.array(image.shape[:2]) / 2))
rot_mat = cv2.getRotationMatrix2D(image_center, angle, 1.0)
angle_r = ((float(angle) / 180) * PI)
result = cv2.warpAffine(image, rot_mat, image.shape[:2], flags=cv2.INTER_NEAREST)
return result |
def check_wmt_test_bleu(raw_folder, wmt_lang_pairs):
not_matchings = []
for (wmt, src_tgts) in wmt_lang_pairs:
for src_tgt in src_tgts:
print(f'checking test bleus for: {src_tgt} at {wmt}')
(src, tgt) = src_tgt.split('-')
(ssrc, stgt) = (src[:2], tgt[:2])
... |
class ComputeStatisticsForBlobs(NetModifier):
def __init__(self, blobs, logging_frequency):
self._blobs = blobs
self._logging_frequency = logging_frequency
self._field_name_suffix = '_summary'
def modify_net(self, net, init_net=None, grad_map=None, blob_to_device=None, modify_output_reco... |
class LoggingHandler(logging.Handler):
def __init__(self, level=logging.NOTSET):
super().__init__(level)
def emit(self, record):
try:
msg = self.format(record)
tqdm.tqdm.write(msg)
self.flush()
except (KeyboardInterrupt, SystemExit):
raise
... |
def got() -> operations.GraphOfOperations:
operations_graph = operations.GraphOfOperations()
plans = operations.Generate(1, 1)
operations_graph.append_operation(plans)
sorted_sublists = []
for i in range(1, 9):
list_id = f'List {i}'
sub_list = operations.Selector((lambda thoughts, li... |
def OA_9_135():
from .bibd import BIBD_from_difference_family
from .difference_family import singer_difference_set
(G, B) = singer_difference_set(16, 2)
PG16 = BIBD_from_difference_family(G, B)
n = 273
assert all(((sum((((x % 39) == 0) for x in B)) in [0, 1, 3]) for B in PG16))
lines = [B fo... |
def main(hp, args):
stft = TacotronSTFT(filter_length=hp.audio.filter_length, hop_length=hp.audio.hop_length, win_length=hp.audio.win_length, n_mel_channels=hp.audio.n_mel_channels, sampling_rate=hp.audio.sampling_rate, mel_fmin=hp.audio.mel_fmin, mel_fmax=hp.audio.mel_fmax)
wav_files = glob.glob(os.path.join(a... |
class DomainEmbedding(nn.Module):
def __init__(self, n_domains, domain_dim) -> None:
super().__init__()
self.embedding = nn.Embedding(n_domains, domain_dim)
self.output_dim = domain_dim
def forward(self, batch):
return {'domain-feature': self.embedding(batch['domains'])}
def ... |
(num_gpus=1, resources={'machine': 1})
class DataWorker(object):
def __init__(self, index, model_type='custom', device='cpu', enable_fail=True):
self.device = device
self.model = ConvNet(model_type).to(device)
if ((index == 2) and enable_fail):
import threading
def ki... |
def get_sents_from_tags(text, sent_start_tag, sent_end_tag):
sents = re.findall(('%s (.+?) %s' % (sent_start_tag, sent_end_tag)), text)
sents = [sent for sent in sents if (len(sent) > 0)]
return sents |
class WeightSpaceElement(CombinatorialFreeModule.Element):
def scalar(self, lambdacheck):
if ((lambdacheck not in self.parent().coroot_lattice()) and (lambdacheck not in self.parent().coroot_space())):
raise ValueError('{} is not in the coroot space'.format(lambdacheck))
zero = self.pare... |
def build_network(nb_classes, input_shape, resnet_layers=101, classifier='psp', sigmoid=False, output_size=None, num_input_channels=4):
inp = Input((input_shape[0], input_shape[1], num_input_channels))
if (resnet_layers == 101):
res = ResNet101(inp)
else:
ValueError('Resnet {} does not exist... |
class fisk_gen(burr_gen):
def _shape_info(self):
return [_ShapeInfo('c', False, (0, np.inf), (False, False))]
def _pdf(self, x, c):
return burr._pdf(x, c, 1.0)
def _cdf(self, x, c):
return burr._cdf(x, c, 1.0)
def _sf(self, x, c):
return burr._sf(x, c, 1.0)
def _logpd... |
.parametrize('dtype', ([torch.float16, torch.float32] + ([torch.bfloat16] if is_sm8x else [])))
.parametrize('inplace_backward', [False, True])
.parametrize('smoothing', [0.0, 0.9])
.parametrize('vocab_size', [50257])
def test_cross_entropy_loss_apex(vocab_size, smoothing, inplace_backward, dtype):
device = 'cuda'
... |
def test_option_unknown_1_parm():
text = 'option[unknown, parameters={"foo": "bar"}]'
parsedtype = ak.types.from_datashape(text, highlevel=False)
assert isinstance(parsedtype, ak.types.OptionType)
assert (str(parsedtype) == text) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.