function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def __init__(self):
super(Run, self).__init__()
self._test = None | ChromiumWebApps/chromium | [
216,
323,
216,
1,
1392992388
] |
def AddCommandLineOptions(self, parser):
test.Test.AddCommandLineOptions(parser)
# Allow tests to add their own command line options.
matching_tests = {}
for arg in sys.argv[1:]:
matching_tests.update(_MatchTestName(arg))
for test_class in matching_tests.itervalues():
test_class.AddTest... | ChromiumWebApps/chromium | [
216,
323,
216,
1,
1392992388
] |
def Run(self, options, args):
return min(255, self._test().Run(copy.copy(options))) | ChromiumWebApps/chromium | [
216,
323,
216,
1,
1392992388
] |
def _GetScriptName():
return os.path.basename(sys.argv[0]) | ChromiumWebApps/chromium | [
216,
323,
216,
1,
1392992388
] |
def _MatchTestName(input_test_name):
def _Matches(input_string, search_string):
if search_string.startswith(input_string):
return True
for part in search_string.split('.'):
if part.startswith(input_string):
return True
return False
# Exact matching.
if input_test_name in test_alia... | ChromiumWebApps/chromium | [
216,
323,
216,
1,
1392992388
] |
def cltv_modify_tx(tx, prepend_scriptsig, nsequence=None, nlocktime=None):
assert_equal(len(tx.vin), 1)
if nsequence is not None:
tx.vin[0].nSequence = nsequence
tx.nLockTime = nlocktime
tx.vin[0].scriptSig = CScript(prepend_scriptsig + list(CScript(tx.vin[0].scriptSig)))
tx.rehash() | bitcoinknots/bitcoin | [
150,
55,
150,
9,
1456398219
] |
def cltv_validate(tx, height):
# Modify the signature in vin 0 and nSequence/nLockTime of the tx to pass CLTV
scheme = [[CScriptNum(height), OP_CHECKLOCKTIMEVERIFY, OP_DROP], 0, height]
cltv_modify_tx(tx, prepend_scriptsig=scheme[0], nsequence=scheme[1], nlocktime=scheme[2]) | bitcoinknots/bitcoin | [
150,
55,
150,
9,
1456398219
] |
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [[
'-whitelist=noban@127.0.0.1',
'-par=1', # Use only one script thread to get the exact reject reason for testing
'-acceptnonstdtxn=1', # cltv_invalidate is nonstandard
]]
self.setup_cl... | bitcoinknots/bitcoin | [
150,
55,
150,
9,
1456398219
] |
def run_test(self):
peer = self.nodes[0].add_p2p_connection(P2PInterface())
wallet = MiniWallet(self.nodes[0], mode=MiniWalletMode.RAW_OP_TRUE)
self.test_cltv_info(is_active=False)
self.log.info("Mining %d blocks", CLTV_HEIGHT - 2)
wallet.generate(10)
self.nodes[0].gene... | bitcoinknots/bitcoin | [
150,
55,
150,
9,
1456398219
] |
def entities(hass):
"""Initialize the test switch."""
platform = getattr(hass.components, "test.switch")
platform.init()
yield platform.ENTITIES | home-assistant/home-assistant | [
58698,
22318,
58698,
2794,
1379402988
] |
def __init__(self, sleeptime = 0.01):
super(Lagger, self).__init__()
self.sleeptime = sleeptime | sparkslabs/kamaelia_ | [
13,
3,
13,
2,
1348148442
] |
def setUp(self):
self.files_dir = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'qucs_prj'
)
self.dummy_media = DefinedGammaZ0(
frequency = Frequency(1,100,21,'ghz'),
gamma=1j,
z0 = 50 ,
) | scikit-rf/scikit-rf | [
498,
231,
498,
47,
1326983158
] |
def test_resistor(self):
"""
"""
fname = os.path.join(self.files_dir,\
'resistor,1ohm.s2p')
qucs_ntwk = Network(fname)
self.dummy_media.frequency = qucs_ntwk.frequency
skrf_ntwk = self.dummy_media.resistor(1)
self.assertEqual(qucs_ntwk, skrf_ntwk) | scikit-rf/scikit-rf | [
498,
231,
498,
47,
1326983158
] |
def test_inductor(self):
"""
"""
fname = os.path.join(self.files_dir,\
'inductor,p1nH.s2p')
qucs_ntwk = Network(fname)
self.dummy_media.frequency = qucs_ntwk.frequency
skrf_ntwk = self.dummy_media.inductor(.1e-9)
self.assertEqual(qucs_ntwk, skrf_nt... | scikit-rf/scikit-rf | [
498,
231,
498,
47,
1326983158
] |
def test_vector_gamma_z0_media(self):
"""
test ability to create a Media from vector quantities for gamma/z0
"""
freq = Frequency(1,10,101)
a = DefinedGammaZ0(freq,
gamma = 1j*npy.ones(len(freq)) ,
z0 = 50*npy.ones(len(freq))... | scikit-rf/scikit-rf | [
498,
231,
498,
47,
1326983158
] |
def test_from_csv(self):
fname = os.path.join(self.files_dir,\
'out.csv')
self.dummy_media.write_csv(fname)
a_media = DefinedGammaZ0.from_csv(fname)
self.assertEqual(a_media,self.dummy_media)
os.remove(fname) | scikit-rf/scikit-rf | [
498,
231,
498,
47,
1326983158
] |
def setUp(self):
self.dummy_media = DefinedGammaZ0(
frequency=Frequency(1, 100, 21, 'GHz'),
gamma=1j,
z0=50,
) | scikit-rf/scikit-rf | [
498,
231,
498,
47,
1326983158
] |
def test_s_shunt_element(self):
"""
Shunt elements of admittance Y:
β---------β
|
[Y]
|
β---------β
have S matrix of the form:
[ -Y Z0 / (Y Z0 + 2) 2/(Y Z0 + 2) ]
[ 2/(Y Z0 + 2) Z/Z0 / (Y Z0 + 2) ]
"""
... | scikit-rf/scikit-rf | [
498,
231,
498,
47,
1326983158
] |
def test_s_lossy_line(self):
"""
Lossy transmission line of characteristic impedance Z0, length l
and propagation constant gamma = alpha + j beta
β---------β
β---------β
has ABCD matrix of the form:
[ cosh(gamma l) Z0 sinh(gamma l) ]
[ 1/Z0 sinh(... | scikit-rf/scikit-rf | [
498,
231,
498,
47,
1326983158
] |
def setUp(self):
self.dummy_media = DefinedGammaZ0(
frequency=Frequency(1, 100, 21,'GHz'),
gamma=1j,
z0=50 ,
) | scikit-rf/scikit-rf | [
498,
231,
498,
47,
1326983158
] |
def test_abcd_shunt_element(self):
"""
Shunt elements of admittance Y:
β---------β
|
[Y]
|
β---------β
have ABCD matrix of the form:
[ 1 0 ]
[ Y 1 ]
"""
R = 1.0 # Ohm
ntw = self.dummy_media.shunt(self... | scikit-rf/scikit-rf | [
498,
231,
498,
47,
1326983158
] |
def test_abcd_thru(self):
"""
Thru has ABCD matrix of the form:
[ 1 0 ]
[ 0 1 ]
"""
ntw = self.dummy_media.thru()
npy.testing.assert_array_almost_equal(ntw.a[:,0,0], 1.0)
npy.testing.assert_array_almost_equal(ntw.a[:,0,1], 0.0)
npy.testing.assert... | scikit-rf/scikit-rf | [
498,
231,
498,
47,
1326983158
] |
def glob_slash(dirname):
"""Like regular glob but replaces \ with / in returned paths."""
return [s.replace('\\', '/') for s in glob.glob(dirname)] | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def exits(request, context):
return ignore.get_exit(request, context) | rusenask/stubo-app | [
1,
1,
1,
1,
1434357123
] |
def setUp(self):
super().setUp()
# create users
self.bad_user = UserFactory.create(
username='bad_user',
)
self.good_user = UserFactory.create(
username='good_user',
)
self.non_staff = UserFactory.create(
username='non_staff',
... | edx/edx-platform | [
6290,
3437,
6290,
280,
1369945238
] |
def test_can_access_manage_account_page(self):
response = self.admin_client.get(reverse('manage_user_standing'), {
'user': self.admin,
})
assert response.status_code == 200 | edx/edx-platform | [
6290,
3437,
6290,
280,
1369945238
] |
def test_disable_account(self):
assert UserStanding.objects.filter(user=self.good_user).count() == 0
response = self.admin_client.post(reverse('disable_account_ajax'), { # lint-amnesty, pylint: disable=unused-variable
'username': self.good_user.username,
'account_action': 'disab... | edx/edx-platform | [
6290,
3437,
6290,
280,
1369945238
] |
def test_reenable_account(self):
response = self.admin_client.post(reverse('disable_account_ajax'), { # lint-amnesty, pylint: disable=unused-variable
'username': self.bad_user.username,
'account_action': 'reenable'
})
assert UserStanding.objects.get(user=self.bad_user).a... | edx/edx-platform | [
6290,
3437,
6290,
280,
1369945238
] |
def test_non_staff_cant_access_disable_view(self):
response = self.non_staff_client.get(reverse('manage_user_standing'), {
'user': self.non_staff,
})
assert response.status_code == 404 | edx/edx-platform | [
6290,
3437,
6290,
280,
1369945238
] |
def initiateOp(self, handle, seekpos, buffer):
assert len(buffer) > 0
assert seekpos >= 0
df = Deferred()
try:
self.op(handle, seekpos, buffer,
self.ovDone, (handle, buffer))
except:
df.errback(Failure())
else:
self.... | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def ovDone(self, ret, bytes, (handle, buffer)):
df = self.df
del self.df
if ret or not bytes:
try:
raise ctypes.WinError()
except:
df.errback(Failure())
else:
self.opComplete(df, bytes, buffer) | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def opComplete(self, df, bytes, buffer):
df.callback(buffer[:bytes]) | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def opComplete(self, df, bytes, buffer):
df.callback(bytes) | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def __init__(self, handle):
from twisted.internet import reactor
self.reactor = reactor
self.handle = handle
self.osfhandle = win32file._get_osfhandle(self.handle.fileno())
self.mode = self.handle.mode
# CloseHandle automatically calls CancelIo
self.close = self.h... | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def write(self, data):
return self.write_op.initiateOp(self.osfhandle, self.seekpos, data) | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def open_sparse_file(path, mode, length=0, overlapped=True):
return IOCPFile(open_sparse_file_base(path, mode, length, overlapped)) | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def __init__(self, doneflag, add_task, external_add_task, max_files_open, num_disk_threads):
self.add_task = add_task
self.file_to_torrent = {}
self.waiting_ops = []
self.active_file_to_handles = DictWithSets()
self.open_file_to_handles = DictWithLists()
self.set_max_fi... | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def _close_all(self, df):
failures = {}
while len(self.open_file_to_handles) > 0:
filename, handle = self.open_file_to_handles.popitem()
try:
handle.close()
except:
failures[self.file_to_torrent[filename]] = Failure()
for torr... | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def _close_files(self, df, file_set):
failure = None
done = False
filenames = self.open_file_to_handles.keys()
for filename in filenames:
if filename not in file_set:
continue
handles = self.open_file_to_handles.poprow(filename)
for ha... | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def add_files(self, files, torrent):
for filename in files:
if filename in self.file_to_torrent:
raise BTFailure(_("File %s belongs to another running torrent")
% filename)
for filename in files:
self.file_to_torrent[filename] = tor... | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def _ensure_exists(self, filename, length=0):
if not os.path.exists(filename):
f = os.path.split(filename)[0]
if f != '' and not os.path.exists(f):
os.makedirs(f)
f = file(filename, 'wb')
make_file_sparse(filename, f, length)
f.close() | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def free_handle_notify(self):
if self.waiting_ops:
args = self.waiting_ops.pop(0)
self._produce_handle(*args) | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def _produce_handle(self, df, filename, for_write, length):
if filename in self.open_file_to_handles:
handle = self.open_file_to_handles.pop_from_row(filename)
if for_write and not is_open_for_write(handle.mode):
handle.close()
handle = open_sparse_file(fi... | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def __init__(self, config, filepool, save_path, files, add_task,
external_add_task, doneflag):
self.filepool = filepool
self.config = config
self.doneflag = doneflag
self.add_task = add_task
self.external_add_task = external_add_task
self.initialize(save_... | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def _build_file_structs(self, filepool, files):
total = 0
for filename, length in files:
# we're shutting down, abort.
if self.doneflag.isSet():
return False
self.undownloaded[filename] = length
if length > 0:
self.ranges.a... | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def was_preallocated(self, pos, length):
return self.allocated_regions.is_range_in(pos, pos+length) | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def _intervals(self, pos, amount):
r = []
stop = pos + amount
p = max(bisect_right(self.ranges, (pos, 2 ** 500)) - 1, 0)
for begin, end, filename in self.ranges[p:]:
if begin >= stop:
break
r.append((filename,
max(pos, begin) ... | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def op(h):
h.seek(pos)
if write:
odf = h.write(param)
else:
odf = h.read(param)
def like_finally(r):
self.filepool.release_handle(filename, h)
return r
odf.addBoth(like_finally)
return... | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def _batch_read(self, pos, amount):
dfs = []
r = []
# queue all the reads
for filename, pos, end in self._intervals(pos, amount):
df = self._file_op(filename, pos, end - pos, write=False)
dfs.append(df)
# yield on all the reads in order - they complete i... | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def _batch_write(self, pos, s):
dfs = []
total = 0
amount = len(s)
# queue all the writes
for filename, begin, end in self._intervals(pos, amount):
length = end - begin
assert length > 0, '%s %s' % (pos, amount)
d = buffer(s, total, length)
... | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def write(self, pos, s):
df = launch_coroutine(wrap_task(self.add_task),
self._batch_write, pos, s)
return df | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def post_init(r):
return self.filepool.close_files(self.range_by_name) | epsylon3/torrentflux | [
130,
67,
130,
40,
1274203656
] |
def setUp(self):
super(TestViews, self).setUp()
self.request_factory = RequestFactory()
self.request = self.request_factory.get('')
self.request.user = None
self.simple_data = {'error': 'error'} | miptliot/edx-platform | [
1,
7,
1,
5,
1382087527
] |
def test_all_problem_grade_distribution_has_access(self, has_access):
"""
Test returns proper value when have proper access
"""
has_access.return_value = True
response = views.all_problem_grade_distribution(self.request, 'test/test/test')
self.assertEqual(json.dumps(self... | miptliot/edx-platform | [
1,
7,
1,
5,
1382087527
] |
def test_all_problem_grade_distribution_no_access(self, has_access):
"""
Test for no access
"""
has_access.return_value = False
response = views.all_problem_grade_distribution(self.request, 'test/test/test')
self.assertEqual("{\"error\": \"Access Denied: User does not ha... | miptliot/edx-platform | [
1,
7,
1,
5,
1382087527
] |
def test_all_sequential_open_distribution_has_access(self, has_access):
"""
Test returns proper value when have proper access
"""
has_access.return_value = True
response = views.all_sequential_open_distrib(self.request, 'test/test/test')
self.assertEqual(json.dumps(self.... | miptliot/edx-platform | [
1,
7,
1,
5,
1382087527
] |
def test_all_sequential_open_distribution_no_access(self, has_access):
"""
Test for no access
"""
has_access.return_value = False
response = views.all_sequential_open_distrib(self.request, 'test/test/test')
self.assertEqual("{\"error\": \"Access Denied: User does not hav... | miptliot/edx-platform | [
1,
7,
1,
5,
1382087527
] |
def test_section_problem_grade_distribution_has_access(self, has_access):
"""
Test returns proper value when have proper access
"""
has_access.return_value = True
response = views.section_problem_grade_distrib(self.request, 'test/test/test', '1')
self.assertEqual(json.du... | miptliot/edx-platform | [
1,
7,
1,
5,
1382087527
] |
def test_section_problem_grade_distribution_no_access(self, has_access):
"""
Test for no access
"""
has_access.return_value = False
response = views.section_problem_grade_distrib(self.request, 'test/test/test', '1')
self.assertEqual("{\"error\": \"Access Denied: User doe... | miptliot/edx-platform | [
1,
7,
1,
5,
1382087527
] |
def try_decode(byte_string, codec):
try:
s = byte_string.decode(codec)
return s
except:
return None | sorgerlab/indra | [
136,
54,
136,
41,
1407779045
] |
def fix_character_encoding(input_file, output_file):
with open(input_file, 'rb') as f_in:
with open(output_file, 'wb') as f_out:
for line in f_in:
# Try to decode with both latin_1 and utf-8
decoded = [try_decode(line, c) for c in codec_options]
d... | sorgerlab/indra | [
136,
54,
136,
41,
1407779045
] |
def __str__(self):
"""Prints string with field name if present on exception."""
return Error.__str__(self) | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __init__(cls, name, bases, dct):
"""Constructor."""
type.__init__(cls, name, bases, dct)
# Base classes may never be initialized.
if cls.__bases__ != (object,):
cls.__initialized = True | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __setattr__(cls, name, value):
"""Overridden to avoid setting variables after init.
Setting attributes on a class must work during the period of
initialization to set the enumation value class variables and
build the name/number maps. Once __init__ has set the
__initialized ... | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def definition_name(cls):
"""Helper method for creating definition name.
Names will be generated to include the classes package name,
scope (if the class is nested in another definition) and class
name.
By default, the package name for a definition is derived from
its m... | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def definition_package(cls):
"""Helper method for creating creating the package of a definition.
Returns:
Name of package that definition belongs to.
"""
outer_definition = cls.message_definition()
if not outer_definition:
return util.get_package_for_module... | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __init__(cls, name, bases, dct):
# Can only define one level of sub-classes below Enum.
if not (bases == (object,) or bases == (Enum,)):
raise EnumDefinitionError(
'Enum type %s may only inherit from Enum' % name)
cls.__by_number = {}
cls.__by_name = {}
... | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def names(cls):
"""Get all names for Enum.
Returns:
An iterator for names of the enumeration in arbitrary order.
"""
return cls.__by_name.keys() | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def lookup_by_name(cls, name):
"""Look up Enum by name.
Args:
name: Name of enum to find.
Returns:
Enum sub-class instance of that value.
"""
return cls.__by_name[name] | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __len__(cls):
return len(cls.__by_name) | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __new__(cls, index):
"""Acts as look-up routine after class is initialized.
The purpose of overriding __new__ is to provide a way to treat
Enum subclasses as casting types, similar to how the int type
functions. A program can pass a string or an integer and this
method with... | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __setattr__(self, name, value):
raise TypeError('May not change enum values') | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __int__(self):
return self.number | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __reduce__(self):
"""Enable pickling.
Returns:
A 2-tuple containing the class and __new__ args to be used
for restoring a pickled instance.
"""
return self.__class__, (self.number,) | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __lt__(self, other):
"""Order is by number."""
if isinstance(other, type(self)):
return self.number < other.number
return NotImplemented | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __eq__(self, other):
"""Order is by number."""
if isinstance(other, type(self)):
return self.number == other.number
return NotImplemented | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __ge__(self, other):
"""Order is by number."""
if isinstance(other, type(self)):
return self.number >= other.number
return NotImplemented | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __hash__(self):
"""Hash by number."""
return hash(self.number) | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def to_dict(cls):
"""Make dictionary version of enumerated class.
Dictionary created this way can be used with def_num.
Returns:
A dict (name) -> number
"""
return dict((item.name, item.number) for item in iter(cls)) | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def def_enum(dct, name):
"""Define enum class from dictionary.
Args:
dct: Dictionary of enumerated values for type.
name: Name of enum.
"""
return type(name, (Enum,), dct) | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __new__(cls, name, bases, dct):
"""Create new Message class instance.
The __new__ method of the _MessageClass type is overridden so as to
allow the translation of Field instances to slots.
"""
by_number = {}
by_name = {}
variant_map = {} # pylint:disable=un... | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __init__(self, **kwargs):
"""Initialize internal messages state.
Args:
A message can be initialized via the constructor by passing
in keyword arguments corresponding to fields. For example:
class Date(Message):
day = IntegerField(1)
month... | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def is_initialized(self):
"""Get initialization status.
Returns:
True if message is valid, else False.
"""
try:
self.check_initialized()
except ValidationError:
return False
else:
return True | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def all_fields(cls):
"""Get all field definition objects.
Ordering is arbitrary.
Returns:
Iterator over all values in arbitrary order.
"""
return cls.__by_name.values() | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def field_by_name(cls, name):
"""Get field by name.
Returns:
Field object associated with name.
Raises:
KeyError if no field found by that name.
"""
return cls.__by_name[name] | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def field_by_number(cls, number):
"""Get field by number.
Returns:
Field object associated with number.
Raises:
KeyError if no field found by that number.
"""
return cls.__by_number[number] | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def reset(self, name):
"""Reset assigned value for field.
Resetting a field will return it to its default value or None.
Args:
name: Name of field to reset.
"""
message_type = type(self)
try:
field = message_type.field_by_name(name)
except ... | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def get_unrecognized_field_info(self, key, value_default=None,
variant_default=None):
"""Get the value and variant of an unknown field in this message.
Args:
key: The name or number of the field to retrieve.
value_default: Value to be returned if ... | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __setattr__(self, name, value):
"""Change set behavior for messages.
Messages may only be assigned values that are fields.
Does not try to validate field when set.
Args:
name: Name of field to assign to.
value: Value to assign to field.
Raises:
... | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __eq__(self, other):
"""Equality operator.
Does field by field comparison with other message. For
equality, must be same type and values of all fields must be
equal.
Messages not required to be initialized for comparison.
Does not attempt to determine equality for... | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __init__(self, field_instance, sequence):
"""Constructor.
Args:
field_instance: Instance of field that validates the list.
sequence: List or tuple to construct list from.
"""
if not field_instance.repeated:
raise FieldDefinitionError(
... | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __setstate__(self, state):
"""Enable unpickling.
Args:
state: A 3-tuple containing:
- The field instance, or None if it belongs to a Message class.
- The Message class that the field instance belongs to, or None.
- The field instance number of the Messa... | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def field(self):
"""Field that validates list."""
return self.__field | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __setitem__(self, index, value):
"""Validate item assignment to list."""
if isinstance(index, slice):
self.__field.validate(value)
else:
self.__field.validate_element(value)
list.__setitem__(self, index, value) | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def extend(self, sequence):
"""Validate extension of list."""
self.__field.validate(sequence)
return list.extend(self, sequence) | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __init__(cls, name, bases, dct):
getattr(cls, '_Field__variant_to_type').update(
(variant, cls) for variant in dct.get('VARIANTS', []))
type.__init__(cls, name, bases, dct) | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __init__(self,
number,
required=False,
repeated=False,
variant=None,
default=None):
"""Constructor.
The required and repeated parameters are mutually exclusive.
Setting both to True will raise a FieldDefini... | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def __set__(self, message_instance, value):
"""Set value on message.
Args:
message_instance: Message instance to set value on.
value: Value to set on message.
"""
# Reaches in to message instance directly to assign to private tags.
if value is None:
... | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def validate_element(self, value):
"""Validate single element of field.
This is different from validate in that it is used on individual
values of repeated fields.
Args:
value: Value to validate.
Returns:
The value casted in the expected type.
Rais... | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
def validate(self, value):
"""Validate value assigned to field.
Args:
value: Value to validate.
Returns:
the value in casted in the correct type.
Raises:
ValidationError if value is not expected type.
"""
return self.__validate(value, self... | catapult-project/catapult | [
1835,
570,
1835,
1039,
1429033745
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.