function
stringlengths 16
7.61k
| repo_name
stringlengths 9
46
| features
sequence |
---|---|---|
def __init__(self, gan=None, config=None, trainer=None):
super().__init__(config=config, gan=gan, trainer=trainer)
self.d_grads = None
self.g_grads = None | 255BITS/HyperGAN | [
1175,
170,
1175,
20,
1466808596
] |
def main():
# Contingency Table from Wilks (2011) Table 8.3
table = np.array([[50, 91, 71],
[47, 2364, 170],
[54, 205, 3288]])
mct = MulticlassContingencyTable(table, n_classes=table.shape[0],
class_names=np.arange(table.shape[0]).astype(str))
print(mct.peirce_skill_score())
print(mct.gerrity_score()) | djgagne/hagelslag | [
58,
25,
58,
5,
1434487723
] |
def __init__(self, table=None, n_classes=2, class_names=("1", "0")):
self.table = table
self.n_classes = n_classes
self.class_names = class_names
if table is None:
self.table = np.zeros((self.n_classes, self.n_classes), dtype=int) | djgagne/hagelslag | [
58,
25,
58,
5,
1434487723
] |
def peirce_skill_score(self):
"""
Multiclass Peirce Skill Score (also Hanssen and Kuipers score, True Skill Score)
"""
n = float(self.table.sum())
nf = self.table.sum(axis=1)
no = self.table.sum(axis=0)
correct = float(self.table.trace())
return (correct / n - (nf * no).sum() / n ** 2) / (1 - (no * no).sum() / n ** 2) | djgagne/hagelslag | [
58,
25,
58,
5,
1434487723
] |
def heidke_skill_score(self):
n = float(self.table.sum())
nf = self.table.sum(axis=1)
no = self.table.sum(axis=0)
correct = float(self.table.trace())
return (correct / n - (nf * no).sum() / n ** 2) / (1 - (nf * no).sum() / n ** 2) | djgagne/hagelslag | [
58,
25,
58,
5,
1434487723
] |
def __init__(self, band_names=['delta', 'theta', 'alpha', 'beta'],
ch_names=['TP9', 'AF7', 'AF8', 'TP10']):
"""
"""
self.band_names = band_names
self.ch_names = ch_names
self.n_bars = self.band_names * self.ch_names
self.x =
self.fig, self.ax = plt.subplots()
self.ax.set_ylim((0, 1))
y = np.zeros((self.n_bars,))
x = range(self.n_bars)
self.rects = self.ax.bar(x, y) | bcimontreal/bci_workshop | [
76,
43,
76,
11,
1431551360
] |
def test_multi_buffer(self):
grid = Grid((3, 3))
f = TimeFunction(name="f", grid=grid)
g = TimeFunction(name="g", grid=grid, save=Buffer(7))
op = Operator([Eq(f.forward, 1), Eq(g, f.forward)])
op(time_M=3)
# f looped all time_order buffer and is 1 everywhere
assert np.allclose(f.data, 1)
# g looped indices 0 to 3, rest is still 0
assert np.allclose(g.data[0:4], 1)
assert np.allclose(g.data[4:], 0) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_interior(self, opt):
"""
Tests application of an Operator consisting of a single equation
over the ``interior`` subdomain.
"""
grid = Grid(shape=(4, 4, 4))
x, y, z = grid.dimensions
interior = grid.interior
u = TimeFunction(name='u', grid=grid)
eqn = [Eq(u.forward, u + 2, subdomain=interior)]
op = Operator(eqn, opt=opt)
op.apply(time_M=2)
assert np.all(u.data[1, 1:-1, 1:-1, 1:-1] == 6.)
assert np.all(u.data[1, :, 0] == 0.)
assert np.all(u.data[1, :, -1] == 0.)
assert np.all(u.data[1, :, :, 0] == 0.)
assert np.all(u.data[1, :, :, -1] == 0.) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_subdim_middle(self, opt):
"""
Tests that instantiating SubDimensions using the classmethod
constructors works correctly.
"""
grid = Grid(shape=(4, 4, 4))
x, y, z = grid.dimensions
t = grid.stepping_dim # noqa
u = TimeFunction(name='u', grid=grid) # noqa
xi = SubDimension.middle(name='xi', parent=x,
thickness_left=1,
thickness_right=1)
eqs = [Eq(u.forward, u + 1)]
eqs = [e.subs(x, xi) for e in eqs]
op = Operator(eqs, opt=opt)
u.data[:] = 1.0
op.apply(time_M=1)
assert np.all(u.data[1, 0, :, :] == 1)
assert np.all(u.data[1, -1, :, :] == 1)
assert np.all(u.data[1, 1:3, :, :] == 2) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_bcs(self, opt):
"""
Tests application of an Operator consisting of multiple equations
defined over different sub-regions, explicitly created through the
use of SubDimensions.
"""
grid = Grid(shape=(20, 20))
x, y = grid.dimensions
t = grid.stepping_dim
thickness = 4
u = TimeFunction(name='u', save=None, grid=grid, space_order=0, time_order=1)
xleft = SubDimension.left(name='xleft', parent=x, thickness=thickness)
xi = SubDimension.middle(name='xi', parent=x,
thickness_left=thickness, thickness_right=thickness)
xright = SubDimension.right(name='xright', parent=x, thickness=thickness)
yi = SubDimension.middle(name='yi', parent=y,
thickness_left=thickness, thickness_right=thickness)
t_in_centre = Eq(u[t+1, xi, yi], 1)
leftbc = Eq(u[t+1, xleft, yi], u[t+1, xleft+1, yi] + 1)
rightbc = Eq(u[t+1, xright, yi], u[t+1, xright-1, yi] + 1)
op = Operator([t_in_centre, leftbc, rightbc], opt=opt)
op.apply(time_m=1, time_M=1)
assert np.all(u.data[0, :, 0:thickness] == 0.)
assert np.all(u.data[0, :, -thickness:] == 0.)
assert all(np.all(u.data[0, i, thickness:-thickness] == (thickness+1-i))
for i in range(thickness))
assert all(np.all(u.data[0, -i, thickness:-thickness] == (thickness+2-i))
for i in range(1, thickness + 1))
assert np.all(u.data[0, thickness:-thickness, thickness:-thickness] == 1.) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_iteration_property_parallel(self, exprs, expected):
"""Tests detection of sequental and parallel Iterations when applying
equations over different subdomains."""
grid = Grid(shape=(20, 20))
x, y = grid.dimensions # noqa
t = grid.time_dim # noqa
interior = grid.interior # noqa
u = TimeFunction(name='u', grid=grid, save=10, time_order=1) # noqa
# List comprehension would need explicit locals/globals mappings to eval
for i, e in enumerate(list(exprs)):
exprs[i] = eval(e)
op = Operator(exprs, opt='noop')
iterations = FindNodes(Iteration).visit(op)
assert all(i.is_Sequential for i in iterations if i.dim.name != expected)
assert all(i.is_Parallel for i in iterations if i.dim.name == expected) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_iteration_property_vector(self, exprs, expected):
"""Tests detection of vector Iterations when using subdimensions."""
grid = Grid(shape=(20, 20))
x, y = grid.dimensions # noqa
time = grid.time_dim # noqa
# The leftmost 10 elements
yleft = SubDimension.left(name='yleft', parent=y, thickness=10) # noqa
u = TimeFunction(name='u', grid=grid, save=10, time_order=0, space_order=1) # noqa
# List comprehension would need explicit locals/globals mappings to eval
for i, e in enumerate(list(exprs)):
exprs[i] = eval(e)
op = Operator(exprs, opt='simd')
iterations = FindNodes(Iteration).visit(op)
vectorized = [i.dim.name for i in iterations if i.is_Vectorized]
assert set(vectorized) == set(expected) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_subdimmiddle_parallel(self, opt):
"""
Tests application of an Operator consisting of a subdimension
defined over different sub-regions, explicitly created through the
use of SubDimensions.
"""
grid = Grid(shape=(20, 20))
x, y = grid.dimensions
t = grid.stepping_dim
thickness = 4
u = TimeFunction(name='u', save=None, grid=grid, space_order=0, time_order=1)
xi = SubDimension.middle(name='xi', parent=x,
thickness_left=thickness, thickness_right=thickness)
yi = SubDimension.middle(name='yi', parent=y,
thickness_left=thickness, thickness_right=thickness)
# a 5 point stencil that can be computed in parallel
centre = Eq(u[t+1, xi, yi], u[t, xi, yi] + u[t, xi-1, yi]
+ u[t, xi+1, yi] + u[t, xi, yi-1] + u[t, xi, yi+1])
u.data[0, 10, 10] = 1.0
op = Operator([centre], opt=opt)
print(op.ccode)
iterations = FindNodes(Iteration).visit(op)
assert all(i.is_Affine and i.is_Parallel for i in iterations if i.dim in [xi, yi])
op.apply(time_m=0, time_M=0)
assert np.all(u.data[1, 9:12, 10] == 1.0)
assert np.all(u.data[1, 10, 9:12] == 1.0)
# Other than those, it should all be 0
u.data[1, 9:12, 10] = 0.0
u.data[1, 10, 9:12] = 0.0
assert np.all(u.data[1, :] == 0) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_subdimmiddle_notparallel(self):
"""
Tests application of an Operator consisting of a subdimension
defined over different sub-regions, explicitly created through the
use of SubDimensions.
Different from ``test_subdimmiddle_parallel`` because an interior
dimension cannot be evaluated in parallel.
"""
grid = Grid(shape=(20, 20))
x, y = grid.dimensions
t = grid.stepping_dim
thickness = 4
u = TimeFunction(name='u', save=None, grid=grid, space_order=0, time_order=1)
xi = SubDimension.middle(name='xi', parent=x,
thickness_left=thickness, thickness_right=thickness)
yi = SubDimension.middle(name='yi', parent=y,
thickness_left=thickness, thickness_right=thickness)
# flow dependencies in x and y which should force serial execution
# in reverse direction
centre = Eq(u[t+1, xi, yi], u[t, xi, yi] + u[t+1, xi+1, yi+1])
u.data[0, 10, 10] = 1.0
op = Operator([centre])
iterations = FindNodes(Iteration).visit(op)
assert all(i.is_Affine and i.is_Sequential for i in iterations if i.dim == xi)
assert all(i.is_Affine and i.is_Parallel for i in iterations if i.dim == yi)
op.apply(time_m=0, time_M=0)
for i in range(4, 11):
assert u.data[1, i, i] == 1.0
u.data[1, i, i] = 0.0
assert np.all(u.data[1, :] == 0) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_subdim_fd(self):
"""
Test that the FD shortcuts are handled correctly with SubDimensions
"""
grid = Grid(shape=(20, 20))
x, y = grid.dimensions
u = TimeFunction(name='u', save=None, grid=grid, space_order=1, time_order=1)
u.data[:] = 2.
# Flows inward (i.e. forward) rather than outward
eq = [Eq(u.forward, u.dx + u.dy, subdomain=grid.interior)]
op = Operator(eq)
op.apply(time_M=0)
assert np.all(u.data[1, -1, :] == 2.)
assert np.all(u.data[1, :, 0] == 2.)
assert np.all(u.data[1, :, -1] == 2.)
assert np.all(u.data[1, 0, :] == 2.)
assert np.all(u.data[1, 1:18, 1:18] == 0.) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_expandingbox_like(self, opt):
"""
Make sure SubDimensions aren't an obstacle to expanding boxes.
"""
grid = Grid(shape=(8, 8))
x, y = grid.dimensions
u = TimeFunction(name='u', grid=grid)
xi = SubDimension.middle(name='xi', parent=x, thickness_left=2, thickness_right=2)
yi = SubDimension.middle(name='yi', parent=y, thickness_left=2, thickness_right=2)
eqn = Eq(u.forward, u + 1)
eqn = eqn.subs({x: xi, y: yi})
op = Operator(eqn, opt=opt)
op.apply(time=3, x_m=2, x_M=5, y_m=2, y_M=5,
xi_ltkn=0, xi_rtkn=0, yi_ltkn=0, yi_rtkn=0)
assert np.all(u.data[0, 2:-2, 2:-2] == 4.)
assert np.all(u.data[1, 2:-2, 2:-2] == 3.)
assert np.all(u.data[:, :2] == 0.)
assert np.all(u.data[:, -2:] == 0.)
assert np.all(u.data[:, :, :2] == 0.)
assert np.all(u.data[:, :, -2:] == 0.) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_basic(self):
nt = 19
grid = Grid(shape=(11, 11))
time = grid.time_dim
u = TimeFunction(name='u', grid=grid)
assert(grid.stepping_dim in u.indices)
u2 = TimeFunction(name='u2', grid=grid, save=nt)
assert(time in u2.indices)
factor = 4
time_subsampled = ConditionalDimension('t_sub', parent=time, factor=factor)
usave = TimeFunction(name='usave', grid=grid, save=(nt+factor-1)//factor,
time_dim=time_subsampled)
assert(time_subsampled in usave.indices)
eqns = [Eq(u.forward, u + 1.), Eq(u2.forward, u2 + 1.), Eq(usave, u)]
op = Operator(eqns)
op.apply(t_M=nt-2)
assert np.all(np.allclose(u.data[(nt-1) % 3], nt-1))
assert np.all([np.allclose(u2.data[i], i) for i in range(nt)])
assert np.all([np.allclose(usave.data[i], i*factor)
for i in range((nt+factor-1)//factor)]) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_spacial_subsampling(self, opt):
"""
Test conditional dimension for the spatial ones.
This test saves u every two grid points :
u2[x, y] = u[2*x, 2*y]
"""
nt = 19
grid = Grid(shape=(11, 11))
time = grid.time_dim
u = TimeFunction(name='u', grid=grid, save=nt)
assert(grid.time_dim in u.indices)
# Creates subsampled spatial dimensions and accordine grid
dims = tuple([ConditionalDimension(d.name+'sub', parent=d, factor=2)
for d in u.grid.dimensions])
grid2 = Grid((6, 6), dimensions=dims, time_dimension=time)
u2 = TimeFunction(name='u2', grid=grid2, save=nt)
assert(time in u2.indices)
eqns = [Eq(u.forward, u + 1.), Eq(u2, u)]
op = Operator(eqns, opt=opt)
op.apply(time_M=nt-2)
# Verify that u2[x,y]= u[2*x, 2*y]
assert np.allclose(u.data[:-1, 0::2, 0::2], u2.data[:-1, :, :]) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_issue_1592(self):
grid = Grid(shape=(11, 11))
time = grid.time_dim
time_sub = ConditionalDimension('t_sub', parent=time, factor=2)
v = TimeFunction(name="v", grid=grid, space_order=4, time_dim=time_sub, save=5)
w = Function(name="w", grid=grid, space_order=4)
Operator(Eq(w, v.dx))(time=6)
op = Operator(Eq(v.forward, v.dx))
op.apply(time=6)
exprs = FindNodes(Expression).visit(op)
assert exprs[-1].expr.lhs.indices[0] == IntDiv(time, 2) + 1 | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_nothing_in_negative(self):
"""Test the case where when the condition is false, there is nothing to do."""
nt = 4
grid = Grid(shape=(11, 11))
time = grid.time_dim
u = TimeFunction(name='u', save=nt, grid=grid)
assert(grid.time_dim in u.indices)
factor = 4
time_subsampled = ConditionalDimension('t_sub', parent=time, factor=factor)
usave = TimeFunction(name='usave', grid=grid, save=(nt+factor-1)//factor,
time_dim=time_subsampled)
assert(time_subsampled in usave.indices)
eqns = [Eq(usave, u)]
op = Operator(eqns)
u.data[:] = 1.0
usave.data[:] = 0.0
op.apply(time_m=1, time_M=1)
assert np.allclose(usave.data, 0.0)
op.apply(time_m=0, time_M=0)
assert np.allclose(usave.data, 1.0) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_as_expr(self):
nt = 19
grid = Grid(shape=(11, 11))
time = grid.time_dim
u = TimeFunction(name='u', grid=grid)
assert(grid.stepping_dim in u.indices)
u2 = TimeFunction(name='u2', grid=grid, save=nt)
assert(time in u2.indices)
factor = 4
time_subsampled = ConditionalDimension('t_sub', parent=time, factor=factor)
usave = TimeFunction(name='usave', grid=grid, save=(nt+factor-1)//factor,
time_dim=time_subsampled)
assert(time_subsampled in usave.indices)
eqns = [Eq(u.forward, u + 1.), Eq(u2.forward, u2 + 1.),
Eq(usave, time_subsampled * u)]
op = Operator(eqns)
op.apply(t=nt-2)
assert np.all(np.allclose(u.data[(nt-1) % 3], nt-1))
assert np.all([np.allclose(u2.data[i], i) for i in range(nt)])
assert np.all([np.allclose(usave.data[i], i*factor*i)
for i in range((nt+factor-1)//factor)]) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_no_index(self):
"""Test behaviour when the ConditionalDimension is used as a symbol in
an expression."""
nt = 19
grid = Grid(shape=(11, 11))
time = grid.time_dim
u = TimeFunction(name='u', grid=grid)
assert(grid.stepping_dim in u.indices)
v = Function(name='v', grid=grid)
factor = 4
time_subsampled = ConditionalDimension('t_sub', parent=time, factor=factor)
eqns = [Eq(u.forward, u + 1), Eq(v, v + u*u*time_subsampled)]
op = Operator(eqns)
op.apply(t_M=nt-2)
assert np.all(np.allclose(u.data[(nt-1) % 3], nt-1))
# expected result is 1024
# v = u[0]**2 * 0 + u[4]**2 * 1 + u[8]**2 * 2 + u[12]**2 * 3 + u[16]**2 * 4
# with u[t] = t
# v = 16 * 1 + 64 * 2 + 144 * 3 + 256 * 4 = 1600
assert np.all(np.allclose(v.data, 1600)) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_symbolic_factor(self):
"""
Test ConditionalDimension with symbolic factor (provided as a Constant).
"""
g = Grid(shape=(4, 4, 4))
u = TimeFunction(name='u', grid=g, time_order=0)
fact = Constant(name='fact', dtype=np.int32, value=4)
tsub = ConditionalDimension(name='tsub', parent=g.time_dim, factor=fact)
usave = TimeFunction(name='usave', grid=g, time_dim=tsub, save=4)
op = Operator([Eq(u, u + 1), Eq(usave, u)])
op.apply(time=7) # Use `fact`'s default value, 4
assert np.all(usave.data[0] == 1)
assert np.all(usave.data[1] == 5)
u.data[:] = 0.
op.apply(time=7, fact=2)
assert np.all(usave.data[0] == 1)
assert np.all(usave.data[1] == 3)
assert np.all(usave.data[2] == 5)
assert np.all(usave.data[3] == 7) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_grouping(self):
"""
Test that Clusters over the same set of ConditionalDimensions fall within
the same Conditional. This is a follow up to issue #1610.
"""
grid = Grid(shape=(10, 10))
time = grid.time_dim
cond = ConditionalDimension(name='cond', parent=time, condition=time < 5)
u = TimeFunction(name='u', grid=grid, space_order=4)
# We use a SubDomain only to keep the two Eqs separated
eqns = [Eq(u.forward, u + 1, subdomain=grid.interior),
Eq(u.forward, u.dx.dx + 1., implicit_dims=[cond])]
op = Operator(eqns, opt=('advanced-fsg', {'cire-mincost-sops': 1}))
conds = FindNodes(Conditional).visit(op)
assert len(conds) == 1
assert len(retrieve_iteration_tree(conds[0].then_body)) == 2 | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_expr_like_lowering(self):
"""
Test the lowering of an expr-like ConditionalDimension's condition.
This test makes an Operator that should indexify and lower the condition
passed in the Conditional Dimension
"""
grid = Grid(shape=(3, 3))
g1 = Function(name='g1', grid=grid)
g2 = Function(name='g2', grid=grid)
g1.data[:] = 0.49
g2.data[:] = 0.49
x, y = grid.dimensions
ci = ConditionalDimension(name='ci', parent=y, condition=Le((g1 + g2),
1.01*(g1 + g2)))
f = Function(name='f', shape=grid.shape, dimensions=(x, ci))
Operator(Eq(f, g1+g2)).apply()
assert np.all(f.data[:] == g1.data[:] + g2.data[:]) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_relational_classes(self, setup_rel, rhs, c1, c2, c3, c4):
"""
Test ConditionalDimension using conditions based on Relations over SubDomains.
"""
class InnerDomain(SubDomain):
name = 'inner'
def define(self, dimensions):
return {d: ('middle', 2, 2) for d in dimensions}
inner_domain = InnerDomain()
grid = Grid(shape=(8, 8), subdomains=(inner_domain,))
g = Function(name='g', grid=grid)
g2 = Function(name='g2', grid=grid)
for i in [g, g2]:
i.data[:4, :4] = 1
i.data[4:, :4] = 2
i.data[4:, 4:] = 3
i.data[:4, 4:] = 4
xi, yi = grid.subdomains['inner'].dimensions
cond = setup_rel(0.25*g + 0.75*g2, rhs, subdomain=grid.subdomains['inner'])
ci = ConditionalDimension(name='ci', parent=yi, condition=cond)
f = Function(name='f', shape=grid.shape, dimensions=(xi, ci))
eq1 = Eq(f, 0.4*g + 0.6*g2)
eq2 = Eq(f, 5)
Operator([eq1, eq2]).apply()
assert np.all(f.data[2:6, c1:c2] == 5.)
assert np.all(f.data[:, c3:c4] < 5.) | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_no_fusion_simple(self):
"""
If ConditionalDimensions are present, then Clusters must not be fused so
that ultimately Eqs get scheduled to different loop nests.
"""
grid = Grid(shape=(4, 4, 4))
time = grid.time_dim
f = TimeFunction(name='f', grid=grid)
g = Function(name='g', grid=grid)
h = Function(name='h', grid=grid)
# No ConditionalDimensions yet. Will be fused and optimized
eqns = [Eq(f.forward, f + 1),
Eq(h, f + 1),
Eq(g, f + 1)]
op = Operator(eqns)
exprs = FindNodes(Expression).visit(op._func_table['bf0'].root)
assert len(exprs) == 4
assert exprs[1].expr.rhs is exprs[0].output
assert exprs[2].expr.rhs is exprs[0].output
assert exprs[3].expr.rhs is exprs[0].output
# Now with a ConditionalDimension. No fusion, no optimization
ctime = ConditionalDimension(name='ctime', parent=time, condition=time > 4)
eqns = [Eq(f.forward, f + 1),
Eq(h, f + 1),
Eq(g, f + 1, implicit_dims=[ctime])]
op = Operator(eqns)
exprs = FindNodes(Expression).visit(op._func_table['bf0'].root)
assert len(exprs) == 3
assert exprs[1].expr.rhs is exprs[0].output
assert exprs[2].expr.rhs is exprs[0].output
exprs = FindNodes(Expression).visit(op._func_table['bf1'].root)
assert len(exprs) == 1 | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_no_fusion_convoluted(self):
"""
Conceptually like `test_no_fusion_simple`, but with more expressions
and non-trivial data flow.
"""
grid = Grid(shape=(4, 4, 4))
time = grid.time_dim
f = TimeFunction(name='f', grid=grid)
g = Function(name='g', grid=grid)
h = Function(name='h', grid=grid)
ctime = ConditionalDimension(name='ctime', parent=time, condition=time > 4)
eqns = [Eq(f.forward, f + 1),
Eq(h, f + 1),
Eq(g, f + 1, implicit_dims=[ctime]),
Eq(f.forward, f + 1, implicit_dims=[ctime]),
Eq(f.forward, f + 1),
Eq(g, f + 1)]
op = Operator(eqns)
exprs = FindNodes(Expression).visit(op._func_table['bf0'].root)
assert len(exprs) == 3
assert exprs[1].expr.rhs is exprs[0].output
assert exprs[2].expr.rhs is exprs[0].output
exprs = FindNodes(Expression).visit(op._func_table['bf1'].root)
assert len(exprs) == 3
exprs = FindNodes(Expression).visit(op._func_table['bf2'].root)
assert len(exprs) == 3
assert exprs[1].expr.rhs is exprs[0].output
assert exprs[2].expr.rhs is exprs[0].output | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def test_topofusion_w_subdims_conddims(self):
"""
Check that topological fusion works across guarded Clusters over different
iteration spaces and in presence of anti-dependences.
This test uses both SubDimensions (via SubDomains) and ConditionalDimensions.
"""
grid = Grid(shape=(4, 4, 4))
time = grid.time_dim
f = TimeFunction(name='f', grid=grid, time_order=2)
g = TimeFunction(name='g', grid=grid, time_order=2)
h = TimeFunction(name='h', grid=grid, time_order=2)
fsave = TimeFunction(name='fsave', grid=grid, time_order=2, save=5)
gsave = TimeFunction(name='gsave', grid=grid, time_order=2, save=5)
ctime = ConditionalDimension(name='ctime', parent=time, condition=time > 4)
eqns = [Eq(f.forward, f + 1),
Eq(g.forward, g + 1),
Eq(fsave, f.dt2, implicit_dims=[ctime]),
Eq(h, f + g, subdomain=grid.interior),
Eq(gsave, g.dt2, implicit_dims=[ctime])]
op = Operator(eqns)
# Check generated code -- expect the gsave equation to be scheduled together
# in the same loop nest with the fsave equation
assert len(op._func_table) == 3
exprs = FindNodes(Expression).visit(op._func_table['bf0'].root)
assert len(exprs) == 2
assert exprs[0].write is f
assert exprs[1].write is g
exprs = FindNodes(Expression).visit(op._func_table['bf1'].root)
assert len(exprs) == 3
assert exprs[1].write is fsave
assert exprs[2].write is gsave
exprs = FindNodes(Expression).visit(op._func_table['bf2'].root)
assert len(exprs) == 1
assert exprs[0].write is h | opesci/devito | [
428,
198,
428,
105,
1458759589
] |
def __init__(self,domain='rds.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.AccountName = None
self.DBInstanceId = None
self.resourceOwnerAccount = None | francisar/rds_manager | [
11,
11,
11,
1,
1448422655
] |
def __init__(self, size):
self._data = np.zeros((size,))
self._capacity = size
self._size = 0 | evenmarbles/mlpy | [
7,
2,
7,
2,
1439328535
] |
def __getitem__(self, index):
"""Get the value at the given index.
Parameters
----------
index : int
The index into the array.
"""
return self._data[index] | evenmarbles/mlpy | [
7,
2,
7,
2,
1439328535
] |
def __init__(self, x=0.0, y=0.0):
self.x = x
self.y = y | evenmarbles/mlpy | [
7,
2,
7,
2,
1439328535
] |
def __init__(self, x=0.0, y=0.0, z=0.0):
self.x = x
self.y = y
self.z = z | evenmarbles/mlpy | [
7,
2,
7,
2,
1439328535
] |
def __init__(self, x=0.0, y=0.0, z=0.0):
super(Vector3D, self).__init__(x, y, z) | evenmarbles/mlpy | [
7,
2,
7,
2,
1439328535
] |
def __init__(self):
self._queue = [] | evenmarbles/mlpy | [
7,
2,
7,
2,
1439328535
] |
def __contains__(self, item):
try:
self._queue.index(item)
return True
except Exception:
return False | evenmarbles/mlpy | [
7,
2,
7,
2,
1439328535
] |
def __str__(self):
return '[' + ', '.join('{}'.format(el) for el in self._queue) + ']' | evenmarbles/mlpy | [
7,
2,
7,
2,
1439328535
] |
def push(self, item):
"""Push a new element on the queue
Parameters
----------
item :
The element to push on the queue
"""
raise NotImplementedError | evenmarbles/mlpy | [
7,
2,
7,
2,
1439328535
] |
def pop(self):
"""Pop an element from the queue."""
raise NotImplementedError | evenmarbles/mlpy | [
7,
2,
7,
2,
1439328535
] |
def extend(self, items):
"""Extend the queue by a number of elements.
Parameters
----------
items : list
A list of items.
"""
for item in items:
self.push(item) | evenmarbles/mlpy | [
7,
2,
7,
2,
1439328535
] |
def remove(self, item):
"""Remove an element from the queue.
Parameters
----------
item :
The element to remove.
"""
self._queue.remove(item) | evenmarbles/mlpy | [
7,
2,
7,
2,
1439328535
] |
def __init__(self):
super(FIFOQueue, self).__init__() | evenmarbles/mlpy | [
7,
2,
7,
2,
1439328535
] |
def pop(self):
"""Return the element at the front of the queue.
Returns
-------
The first element in the queue.
"""
return self._queue.pop(0) | evenmarbles/mlpy | [
7,
2,
7,
2,
1439328535
] |
def __init__(self, func=lambda x: x):
super(PriorityQueue, self).__init__()
self.func = func | evenmarbles/mlpy | [
7,
2,
7,
2,
1439328535
] |
def __str__(self):
return '[' + ', '.join('({},{})'.format(*el) for el in self._queue) + ']' | evenmarbles/mlpy | [
7,
2,
7,
2,
1439328535
] |
def pop(self):
"""Get the element with the highest priority.
Get the element with the highest priority (i.e., smallest value).
Returns
-------
The element with the highest priority.
"""
return heapq.heappop(self._queue)[1] | evenmarbles/mlpy | [
7,
2,
7,
2,
1439328535
] |
def __init__(self, plotly_name="textfont", parent_name="scattersmith", **kwargs):
super(TextfontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Textfont"),
data_docs=kwargs.pop(
"data_docs",
"""
color
colorsrc
Sets the source reference on Chart Studio Cloud
for `color`.
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud
for `family`.
size
sizesrc
Sets the source reference on Chart Studio Cloud
for `size`. | plotly/plotly.py | [
13052,
2308,
13052,
1319,
1385013188
] |
def __init__(self, json_dict=None):
self.id = None # (Integer)
self.company = None # *(CompanyReference)
self.contact = None # (ContactReference)
self.phone = None # (String)
self.phoneExt = None # (String)
self.email = None # (String)
self.site = None # (SiteReference)
self.status = None # *(OrderStatusReference)
self.opportunity = None # (OpportunityReference)
self.orderDate = None # (String)
self.dueDate = None # (String)
self.billingTerms = None # (BillingTermsReference)
self.taxCode = None # (TaxCodeReference)
self.poNumber = None # (String(50))
self.locationId = None # (Integer)
self.businessUnitId = None # (Integer)
self.salesRep = None # *(MemberReference)
self.notes = None # (String)
self.billClosedFlag = None # (Boolean)
self.billShippedFlag = None # (Boolean)
self.restrictDownpaymentFlag = None # (Boolean)
self.description = None # (String)
self.topCommentFlag = None # (Boolean)
self.bottomCommentFlag = None # (Boolean)
self.shipToCompany = None # (CompanyReference)
self.shipToContact = None # (ContactReference)
self.shipToSite = None # (SiteReference)
self.billToCompany = None # (CompanyReference)
self.billToContact = None # (ContactReference)
self.billToSite = None # (SiteReference)
self.productIds = None # (Integer[])
self.documentIds = None # (Integer[])
self.invoiceIds = None # (Integer[])
self.configIds = None # (Integer[])
self.total = None # (Number)
self.taxTotal = None # (Number)
self._info = None # (Metadata) | joshuamsmith/ConnectPyse | [
23,
14,
23,
7,
1479865554
] |
def __init__(
self, | JamesJeffryes/MINE-Database | [
13,
7,
13,
4,
1479244917
] |
def load_mongo(self, mongo_uri: Union[str, None] = None):
if mongo_uri:
self.mongo_uri = mongo_uri
self.client = MongoClient(mongo_uri)
else:
self.mongo_uri = "localhost:27017"
self.client = MongoClient()
self._core = self.client["core"] | JamesJeffryes/MINE-Database | [
13,
7,
13,
4,
1479244917
] |
def _eq_loaded(self):
if self.lc:
return True
else:
print("Load eQulibrator local cache.")
return False | JamesJeffryes/MINE-Database | [
13,
7,
13,
4,
1479244917
] |
def load_thermo_from_postgres(
self, postgres_uri: str = "postgresql:///eq_compounds" | JamesJeffryes/MINE-Database | [
13,
7,
13,
4,
1479244917
] |
def load_thermo_from_sqlite(
self, sqlite_filename: str = "compounds.sqlite" | JamesJeffryes/MINE-Database | [
13,
7,
13,
4,
1479244917
] |
def get_eQ_compound_from_cid(
self, c_id: str, pickaxe: Pickaxe = None, db_name: str = None | JamesJeffryes/MINE-Database | [
13,
7,
13,
4,
1479244917
] |
def standard_dg_formation_from_cid(
self, c_id: str, pickaxe: Pickaxe = None, db_name: str = None | JamesJeffryes/MINE-Database | [
13,
7,
13,
4,
1479244917
] |
def get_eQ_reaction_from_rid(
self, r_id: str, pickaxe: Pickaxe = None, db_name: str = None | JamesJeffryes/MINE-Database | [
13,
7,
13,
4,
1479244917
] |
def physiological_dg_prime_from_rid(
self, r_id: str, pickaxe: Pickaxe = None, db_name: str = None | JamesJeffryes/MINE-Database | [
13,
7,
13,
4,
1479244917
] |
def standard_dg_prime_from_rid(
self, r_id: str, pickaxe: Pickaxe = None, db_name: str = None | JamesJeffryes/MINE-Database | [
13,
7,
13,
4,
1479244917
] |
def dg_prime_from_rid(
self,
r_id: str,
pickaxe: Pickaxe = None,
db_name: str = None,
p_h: Q_ = default_physiological_p_h,
p_mg: Q_ = default_physiological_p_mg,
ionic_strength: Q_ = default_physiological_ionic_strength, | JamesJeffryes/MINE-Database | [
13,
7,
13,
4,
1479244917
] |
def __init__(
self,
*,
sampling_strategy="auto",
random_state=None,
shrinkage=None, | scikit-learn-contrib/imbalanced-learn | [
6265,
1230,
6265,
56,
1408165706
] |
def _check_X_y(self, X, y):
y, binarize_y = check_target_type(y, indicate_one_vs_all=True)
X, y = self._validate_data(
X,
y,
reset=True,
accept_sparse=["csr", "csc"],
dtype=None,
force_all_finite=False,
)
return X, y, binarize_y | scikit-learn-contrib/imbalanced-learn | [
6265,
1230,
6265,
56,
1408165706
] |
def setUp (self):
utils.mktemp ()
for filename in self.filenames:
with open (os.path.join (utils.TEST_ROOT, filename), "w"):
pass | operepo/ope | [
10,
8,
10,
78,
1499751734
] |
def test_glob (self):
import glob
pattern = os.path.join (utils.TEST_ROOT, "*")
self.assertEquals (list (fs.glob (pattern)), glob.glob (pattern)) | operepo/ope | [
10,
8,
10,
78,
1499751734
] |
def canonicalMachineName(machine=''):
aliases = {'nstxu': ['nstx', 'nstxu', 'nstx-u'],
'diiid': ['diiid', 'diii-d', 'd3d'],
'cmod': ['cmod', 'c-mod']}
for key, value in aliases.items():
if machine.lower() in value:
return key
# invalid machine name
raise FdpError('"{}" is not a valid machine name\n'.format(machine)) | Fusion-Data-Platform/fdp | [
11,
5,
11,
6,
1449233589
] |
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager() | jsubpy/jsub | [
2,
2,
2,
1,
1416218010
] |
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param) | jsubpy/jsub | [
2,
2,
2,
1,
1416218010
] |
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir) | jsubpy/jsub | [
2,
2,
2,
1,
1416218010
] |
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir) | jsubpy/jsub | [
2,
2,
2,
1,
1416218010
] |
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root) | jsubpy/jsub | [
2,
2,
2,
1,
1416218010
] |
def _no_ssl_required_on_debug(app, **kwargs):
if app.debug or app.testing:
os.environ['AUTHLIB_INSECURE_TRANSPORT'] = '1' | indico/indico | [
1446,
358,
1446,
649,
1311774990
] |
def normalized_cross(a, b):
"""
Returns the normalized cross product between vectors.
Uses numpy.cross(). | endarthur/autti | [
1,
2,
1,
8,
1491768157
] |
def general_plane_intersection(n_a, da, n_b, db):
"""
Returns a point and direction vector for the line of intersection
of two planes in space, or None if planes are parallel. | endarthur/autti | [
1,
2,
1,
8,
1491768157
] |
def small_circle_intersection(axis_a, angle_a, axis_b, angle_b):
"""
Finds the intersection between two small-circles returning zero, one or two
solutions as tuple. | endarthur/autti | [
1,
2,
1,
8,
1491768157
] |
def build_rotation_matrix(azim, plng, rake):
"""
Returns the rotation matrix that rotates the North vector to the line given
by Azimuth and Plunge and East and Up vectors are rotate clock-wise by Rake
around the rotated North vector. | endarthur/autti | [
1,
2,
1,
8,
1491768157
] |
def adjust_lines_to_planes(lines, planes):
"""
Project each given line to it's respective plane. Returns the projected
lines as a new LineSet and the angle (in radians) between each line and
plane prior to projection. | endarthur/autti | [
1,
2,
1,
8,
1491768157
] |
def graphviz_setup(gviz_path):
os.environ['PATH'] = gviz_path + ";" + os.environ['PATH'] | geographika/mappyfile | [
54,
19,
54,
16,
1484063271
] |
def add_children(graph, parent_id, d, level=0):
blue = "#6b6bd1"
white = "#fdfefd"
green = "#33a333"
colours = [blue, white, green] * 3
for class_, children in d.items():
colour = colours[level]
child_label = class_
child_id = parent_id + "_" + class_
add_child(graph, child_id, child_label, parent_id, colour)
add_children(graph, child_id, children, level+1) | geographika/mappyfile | [
54,
19,
54,
16,
1484063271
] |
def main(gviz_path, layer_only=False):
graphviz_setup(gviz_path)
graph = pydot.Dot(graph_type='digraph', rankdir="TB")
layer_children = {
'CLASS': {
'LABEL': {'STYLE': {}},
'CONNECTIONOPTIONS': {},
'LEADER': {'STYLE': {}},
'STYLE': {},
'VALIDATION': {}
},
'CLUSTER': {},
'COMPOSITE': {},
'FEATURE': {'POINTS': {}},
'GRID': {},
'JOIN': {},
'METADATA': {},
'PROJECTION': {},
'SCALETOKEN': {'VALUES': {}},
'VALIDATION': {}
}
# pprint.pprint(layer_children)
classes = {
"MAP": {
"LAYER": layer_children,
'LEGEND': {'LABEL': {}},
'PROJECTION': {},
'QUERYMAP': {},
'REFERENCE': {},
'SCALEBAR': {'LABEL': {}},
'SYMBOL': {},
'WEB': {'METADATA': {}, 'VALIDATION': {}}
}
}
if layer_only:
root = "LAYER"
classes = classes["MAP"]
fn = "layer_classes"
else:
fn = "map_classes"
root, = classes.keys()
node = pydot.Node(root, style="filled", fillcolor="#33a333", label=root, fontname=FONT, shape="polygon")
graph.add_node(node)
add_children(graph, root, classes[root])
save_file(graph, fn) | geographika/mappyfile | [
54,
19,
54,
16,
1484063271
] |
def playlist_handler(playlist_name, playlist_description, playlist_tracks):
# skip empty and no-name playlists
if not playlist_name: return
if len(playlist_tracks) == 0: return
# setup output files
playlist_name = playlist_name.replace('/', '')
open_log(os.path.join(output_dir,playlist_name+u'.log'))
outfile = codecs.open(os.path.join(output_dir,playlist_name+u'.csv'),
encoding='utf-8',mode='w')
# keep track of stats
stats = create_stats()
export_skipped = 0
# keep track of songids incase we need to skip duplicates
song_ids = []
log('')
log('============================================================')
log(u'Exporting '+ unicode(len(playlist_tracks)) +u' tracks from '
+playlist_name)
log('============================================================')
# add the playlist description as a "comment"
if playlist_description:
outfile.write(tsep)
outfile.write(playlist_description)
outfile.write(os.linesep)
for tnum, pl_track in enumerate(playlist_tracks):
track = pl_track.get('track')
# we need to look up these track in the library
if not track:
library_track = [
item for item in library if item.get('id')
in pl_track.get('trackId')]
if len(library_track) == 0:
log(u'!! '+str(tnum+1)+repr(pl_track))
export_skipped += 1
continue
track = library_track[0]
result_details = create_result_details(track)
if not allow_duplicates and result_details['songid'] in song_ids:
log('{D} '+str(tnum+1)+'. '+create_details_string(result_details,True))
export_skipped += 1
continue
# update the stats
update_stats(track,stats)
# export the track
song_ids.append(result_details['songid'])
outfile.write(create_details_string(result_details))
outfile.write(os.linesep)
# calculate the stats
stats_results = calculate_stats_results(stats,len(playlist_tracks))
# output the stats to the log
log('')
log_stats(stats_results)
log(u'export skipped: '+unicode(export_skipped))
# close the files
close_log()
outfile.close() | soulfx/gmusic-playlist | [
159,
57,
159,
25,
1401254477
] |
def __init__(
self,
plotly_name="showexponent",
parent_name="scatterpolar.marker.colorbar",
**kwargs | plotly/plotly.py | [
13052,
2308,
13052,
1319,
1385013188
] |
def __init__(self, query, explanation):
if isinstance(query, list):
query = " ".join(query)
message = f"'{query}': {explanation}"
super().__init__(message) | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def __init__(self, what, expected, detail=None):
message = f"'{what}' is not {expected}"
if detail:
message = f"{message}: {detail}"
super().__init__(message) | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def clause(self):
"""Generate an SQLite expression implementing the query.
Return (clause, subvals) where clause is a valid sqlite
WHERE clause implementing the query and subvals is a list of
items to be substituted for ?s in the clause.
"""
return None, () | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def __repr__(self):
return f"{self.__class__.__name__}()" | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def __hash__(self):
return 0 | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def __init__(self, field, pattern, fast=True):
self.field = field
self.pattern = pattern
self.fast = fast | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def clause(self):
if self.fast:
return self.col_clause()
else:
# Matching a flexattr. This is a slow query.
return None, () | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def value_match(cls, pattern, value):
"""Determine whether the value matches the pattern. Both
arguments are strings.
"""
raise NotImplementedError() | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def __repr__(self):
return ("{0.__class__.__name__}({0.field!r}, {0.pattern!r}, "
"{0.fast})".format(self)) | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def __hash__(self):
return hash((self.field, hash(self.pattern))) | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def col_clause(self):
return self.field + " = ?", [self.pattern] | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def value_match(cls, pattern, value):
return pattern == value | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def __init__(self, field, fast=True):
super().__init__(field, None, fast) | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def match(self, item):
return item.get(self.field) is None | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def value_match(cls, pattern, value):
"""Determine whether the value matches the pattern. The value
may have any type.
"""
return cls.string_match(pattern, util.as_string(value)) | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def string_match(cls, pattern, value):
"""Determine whether the value matches the pattern. Both
arguments are strings. Subclasses implement this method.
"""
raise NotImplementedError() | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def col_clause(self):
search = (self.pattern
.replace('\\', '\\\\')
.replace('%', '\\%')
.replace('_', '\\_'))
clause = self.field + " like ? escape '\\'"
subvals = [search]
return clause, subvals | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def string_match(cls, pattern, value):
return pattern.lower() == value.lower() | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |
def col_clause(self):
pattern = (self.pattern
.replace('\\', '\\\\')
.replace('%', '\\%')
.replace('_', '\\_'))
search = '%' + pattern + '%'
clause = self.field + " like ? escape '\\'"
subvals = [search]
return clause, subvals | beetbox/beets | [
11484,
1774,
11484,
509,
1281395840
] |