prompt
stringlengths 174
59.5k
| completion
stringlengths 7
228
| api
stringlengths 12
64
|
---|---|---|
import numpy as nm
import six
from sfepy import data_dir
from sfepy.base.base import Struct, output
from sfepy.terms.terms_hyperelastic_ul import HyperElasticULFamilyData
from sfepy.homogenization.micmac import get_homog_coefs_nonlinear
import sfepy.linalg as la
hyperelastic_data = {}
def post_process(out, pb, state, extend=False):
if isinstance(state, dict):
pass
else:
pb.update_materials_flag = 2
stress = pb.evaluate('ev_integrate_mat.1.Omega(solid.S, u)',
mode='el_avg')
out['cauchy_stress'] = Struct(name='output_data',
mode='cell',
data=stress,
dofs=None)
strain = pb.evaluate('ev_integrate_mat.1.Omega(solid.E, u)',
mode='el_avg')
out['green_strain'] = Struct(name='output_data',
mode='cell',
data=strain,
dofs=None)
pb.update_materials_flag = 0
if pb.conf.options.get('recover_micro', False):
happ = pb.homogen_app
if pb.ts.step == 0:
rname = pb.conf.options.recovery_region
rcells = pb.domain.regions[rname].get_cells()
sh = hyperelastic_data['homog_mat_shape']
happ.app_options.store_micro_idxs = sh[1] * rcells
else:
hpb = happ.problem
recovery_hook = hpb.conf.options.get('recovery_hook', None)
if recovery_hook is not None:
recovery_hook = hpb.conf.get_function(recovery_hook)
rname = pb.conf.options.recovery_region
rcoors = []
for ii in happ.app_options.store_micro_idxs:
key = happ.get_micro_cache_key('coors', ii, pb.ts.step)
if key in happ.micro_state_cache:
rcoors.append(happ.micro_state_cache[key])
recovery_hook(hpb, rcoors, pb.domain.regions[rname], pb.ts)
return out
def get_homog_mat(ts, coors, mode, term=None, problem=None, **kwargs):
if problem.update_materials_flag == 2 and mode == 'qp':
out = hyperelastic_data['homog_mat']
return {k: nm.array(v) for k, v in six.iteritems(out)}
elif problem.update_materials_flag == 0 or not mode == 'qp':
return
| output('get_homog_mat') | sfepy.base.base.output |
import numpy as nm
import six
from sfepy import data_dir
from sfepy.base.base import Struct, output
from sfepy.terms.terms_hyperelastic_ul import HyperElasticULFamilyData
from sfepy.homogenization.micmac import get_homog_coefs_nonlinear
import sfepy.linalg as la
hyperelastic_data = {}
def post_process(out, pb, state, extend=False):
if isinstance(state, dict):
pass
else:
pb.update_materials_flag = 2
stress = pb.evaluate('ev_integrate_mat.1.Omega(solid.S, u)',
mode='el_avg')
out['cauchy_stress'] = Struct(name='output_data',
mode='cell',
data=stress,
dofs=None)
strain = pb.evaluate('ev_integrate_mat.1.Omega(solid.E, u)',
mode='el_avg')
out['green_strain'] = Struct(name='output_data',
mode='cell',
data=strain,
dofs=None)
pb.update_materials_flag = 0
if pb.conf.options.get('recover_micro', False):
happ = pb.homogen_app
if pb.ts.step == 0:
rname = pb.conf.options.recovery_region
rcells = pb.domain.regions[rname].get_cells()
sh = hyperelastic_data['homog_mat_shape']
happ.app_options.store_micro_idxs = sh[1] * rcells
else:
hpb = happ.problem
recovery_hook = hpb.conf.options.get('recovery_hook', None)
if recovery_hook is not None:
recovery_hook = hpb.conf.get_function(recovery_hook)
rname = pb.conf.options.recovery_region
rcoors = []
for ii in happ.app_options.store_micro_idxs:
key = happ.get_micro_cache_key('coors', ii, pb.ts.step)
if key in happ.micro_state_cache:
rcoors.append(happ.micro_state_cache[key])
recovery_hook(hpb, rcoors, pb.domain.regions[rname], pb.ts)
return out
def get_homog_mat(ts, coors, mode, term=None, problem=None, **kwargs):
if problem.update_materials_flag == 2 and mode == 'qp':
out = hyperelastic_data['homog_mat']
return {k: nm.array(v) for k, v in six.iteritems(out)}
elif problem.update_materials_flag == 0 or not mode == 'qp':
return
output('get_homog_mat')
dim = problem.domain.mesh.dim
update_var = problem.conf.options.mesh_update_variables[0]
state_u = problem.equations.variables[update_var]
state_u.field.clear_mappings()
family_data = problem.family_data(state_u, term.region,
term.integral, term.integration)
mtx_f = family_data.mtx_f.reshape((coors.shape[0],)
+ family_data.mtx_f.shape[-2:])
out = get_homog_coefs_nonlinear(ts, coors, mode, mtx_f,
term=term, problem=problem,
iteration=problem.iiter, **kwargs)
out['E'] = 0.5 * (la.dot_sequences(mtx_f, mtx_f, 'ATB') - nm.eye(dim))
hyperelastic_data['time'] = ts.step
hyperelastic_data['homog_mat_shape'] = family_data.det_f.shape[:2]
hyperelastic_data['homog_mat'] = \
{k: nm.array(v) for k, v in six.iteritems(out)}
return out
def ulf_iteration_hook(pb, nls, vec, it, err, err0):
vec = pb.equations.make_full_vec(vec)
pb.equations.set_variables_from_state(vec)
update_var = pb.conf.options.mesh_update_variables[0]
state_u = pb.equations.variables[update_var]
nods = state_u.field.get_dofs_in_region(state_u.field.region, merge=True)
coors = pb.domain.get_mesh_coors().copy()
coors[nods, :] += state_u().reshape(len(nods), state_u.n_components)
if len(state_u.field.mappings0) == 0:
state_u.field.save_mappings()
state_u.field.clear_mappings()
pb.set_mesh_coors(coors, update_fields=False, actual=True,
clear_all=False)
pb.iiter = it
pb.update_materials_flag = True
pb.update_materials()
pb.update_materials_flag = False
class MyEvalResidual(object):
def __init__(self, problem, matrix_hook=None):
self.problem = problem
self.matrix_hook = problem.matrix_hook
def eval_residual(self, vec, is_full=False):
if not is_full:
vec = self.problem.equations.make_full_vec(vec)
vec_r = self.problem.equations.eval_residuals(vec * 0)
return vec_r
def ulf_init(pb):
pb.family_data = | HyperElasticULFamilyData() | sfepy.terms.terms_hyperelastic_ul.HyperElasticULFamilyData |
import numpy as nm
import six
from sfepy import data_dir
from sfepy.base.base import Struct, output
from sfepy.terms.terms_hyperelastic_ul import HyperElasticULFamilyData
from sfepy.homogenization.micmac import get_homog_coefs_nonlinear
import sfepy.linalg as la
hyperelastic_data = {}
def post_process(out, pb, state, extend=False):
if isinstance(state, dict):
pass
else:
pb.update_materials_flag = 2
stress = pb.evaluate('ev_integrate_mat.1.Omega(solid.S, u)',
mode='el_avg')
out['cauchy_stress'] = Struct(name='output_data',
mode='cell',
data=stress,
dofs=None)
strain = pb.evaluate('ev_integrate_mat.1.Omega(solid.E, u)',
mode='el_avg')
out['green_strain'] = Struct(name='output_data',
mode='cell',
data=strain,
dofs=None)
pb.update_materials_flag = 0
if pb.conf.options.get('recover_micro', False):
happ = pb.homogen_app
if pb.ts.step == 0:
rname = pb.conf.options.recovery_region
rcells = pb.domain.regions[rname].get_cells()
sh = hyperelastic_data['homog_mat_shape']
happ.app_options.store_micro_idxs = sh[1] * rcells
else:
hpb = happ.problem
recovery_hook = hpb.conf.options.get('recovery_hook', None)
if recovery_hook is not None:
recovery_hook = hpb.conf.get_function(recovery_hook)
rname = pb.conf.options.recovery_region
rcoors = []
for ii in happ.app_options.store_micro_idxs:
key = happ.get_micro_cache_key('coors', ii, pb.ts.step)
if key in happ.micro_state_cache:
rcoors.append(happ.micro_state_cache[key])
recovery_hook(hpb, rcoors, pb.domain.regions[rname], pb.ts)
return out
def get_homog_mat(ts, coors, mode, term=None, problem=None, **kwargs):
if problem.update_materials_flag == 2 and mode == 'qp':
out = hyperelastic_data['homog_mat']
return {k: nm.array(v) for k, v in six.iteritems(out)}
elif problem.update_materials_flag == 0 or not mode == 'qp':
return
output('get_homog_mat')
dim = problem.domain.mesh.dim
update_var = problem.conf.options.mesh_update_variables[0]
state_u = problem.equations.variables[update_var]
state_u.field.clear_mappings()
family_data = problem.family_data(state_u, term.region,
term.integral, term.integration)
mtx_f = family_data.mtx_f.reshape((coors.shape[0],)
+ family_data.mtx_f.shape[-2:])
out = get_homog_coefs_nonlinear(ts, coors, mode, mtx_f,
term=term, problem=problem,
iteration=problem.iiter, **kwargs)
out['E'] = 0.5 * (la.dot_sequences(mtx_f, mtx_f, 'ATB') - nm.eye(dim))
hyperelastic_data['time'] = ts.step
hyperelastic_data['homog_mat_shape'] = family_data.det_f.shape[:2]
hyperelastic_data['homog_mat'] = \
{k: nm.array(v) for k, v in six.iteritems(out)}
return out
def ulf_iteration_hook(pb, nls, vec, it, err, err0):
vec = pb.equations.make_full_vec(vec)
pb.equations.set_variables_from_state(vec)
update_var = pb.conf.options.mesh_update_variables[0]
state_u = pb.equations.variables[update_var]
nods = state_u.field.get_dofs_in_region(state_u.field.region, merge=True)
coors = pb.domain.get_mesh_coors().copy()
coors[nods, :] += state_u().reshape(len(nods), state_u.n_components)
if len(state_u.field.mappings0) == 0:
state_u.field.save_mappings()
state_u.field.clear_mappings()
pb.set_mesh_coors(coors, update_fields=False, actual=True,
clear_all=False)
pb.iiter = it
pb.update_materials_flag = True
pb.update_materials()
pb.update_materials_flag = False
class MyEvalResidual(object):
def __init__(self, problem, matrix_hook=None):
self.problem = problem
self.matrix_hook = problem.matrix_hook
def eval_residual(self, vec, is_full=False):
if not is_full:
vec = self.problem.equations.make_full_vec(vec)
vec_r = self.problem.equations.eval_residuals(vec * 0)
return vec_r
def ulf_init(pb):
pb.family_data = HyperElasticULFamilyData()
pb.init_solvers()
pb.nls.fun = MyEvalResidual(pb).eval_residual
pb.nls_iter_hook = ulf_iteration_hook
pb.domain.mesh.coors_act = pb.domain.mesh.coors.copy()
pb_vars = pb.get_variables()
pb_vars['u'].init_data()
pb.update_materials_flag = True
pb.iiter = 0
options = {
'output_dir': 'output',
'mesh_update_variables': ['u'],
'nls_iter_hook': ulf_iteration_hook,
'pre_process_hook': ulf_init,
'micro_filename': 'examples/homogenization/nonlinear_homogenization.py',
'recover_micro': True,
'recovery_region': 'Recovery',
'post_process_hook': post_process,
}
materials = {
'solid': 'get_homog',
}
fields = {
'displacement': ('real', 'vector', 'Omega', 1),
}
variables = {
'u': ('unknown field', 'displacement'),
'v': ('test field', 'displacement', 'u'),
}
filename_mesh = data_dir + '/meshes/2d/its2D.mesh'
regions = {
'Omega': 'all',
'Left': ('vertices in (x < 0.001)', 'facet'),
'Bottom': ('vertices in (y < 0.001 )', 'facet'),
'Recovery': ('cell 49, 81', 'cell'),
}
ebcs = {
'l': ('Left', {'u.all': 0.0}),
'b': ('Bottom', {'u.all': 'move_bottom'}),
}
centre = nm.array([0, 0], dtype=nm.float64)
def move_bottom(ts, coor, **kwargs):
from sfepy.linalg import rotation_matrix2d
vec = coor[:, 0:2] - centre
angle = 3 * ts.step
print('angle:', angle)
mtx = | rotation_matrix2d(angle) | sfepy.linalg.rotation_matrix2d |
import numpy as nm
import six
from sfepy import data_dir
from sfepy.base.base import Struct, output
from sfepy.terms.terms_hyperelastic_ul import HyperElasticULFamilyData
from sfepy.homogenization.micmac import get_homog_coefs_nonlinear
import sfepy.linalg as la
hyperelastic_data = {}
def post_process(out, pb, state, extend=False):
if isinstance(state, dict):
pass
else:
pb.update_materials_flag = 2
stress = pb.evaluate('ev_integrate_mat.1.Omega(solid.S, u)',
mode='el_avg')
out['cauchy_stress'] = Struct(name='output_data',
mode='cell',
data=stress,
dofs=None)
strain = pb.evaluate('ev_integrate_mat.1.Omega(solid.E, u)',
mode='el_avg')
out['green_strain'] = Struct(name='output_data',
mode='cell',
data=strain,
dofs=None)
pb.update_materials_flag = 0
if pb.conf.options.get('recover_micro', False):
happ = pb.homogen_app
if pb.ts.step == 0:
rname = pb.conf.options.recovery_region
rcells = pb.domain.regions[rname].get_cells()
sh = hyperelastic_data['homog_mat_shape']
happ.app_options.store_micro_idxs = sh[1] * rcells
else:
hpb = happ.problem
recovery_hook = hpb.conf.options.get('recovery_hook', None)
if recovery_hook is not None:
recovery_hook = hpb.conf.get_function(recovery_hook)
rname = pb.conf.options.recovery_region
rcoors = []
for ii in happ.app_options.store_micro_idxs:
key = happ.get_micro_cache_key('coors', ii, pb.ts.step)
if key in happ.micro_state_cache:
rcoors.append(happ.micro_state_cache[key])
recovery_hook(hpb, rcoors, pb.domain.regions[rname], pb.ts)
return out
def get_homog_mat(ts, coors, mode, term=None, problem=None, **kwargs):
if problem.update_materials_flag == 2 and mode == 'qp':
out = hyperelastic_data['homog_mat']
return {k: nm.array(v) for k, v in six.iteritems(out)}
elif problem.update_materials_flag == 0 or not mode == 'qp':
return
output('get_homog_mat')
dim = problem.domain.mesh.dim
update_var = problem.conf.options.mesh_update_variables[0]
state_u = problem.equations.variables[update_var]
state_u.field.clear_mappings()
family_data = problem.family_data(state_u, term.region,
term.integral, term.integration)
mtx_f = family_data.mtx_f.reshape((coors.shape[0],)
+ family_data.mtx_f.shape[-2:])
out = get_homog_coefs_nonlinear(ts, coors, mode, mtx_f,
term=term, problem=problem,
iteration=problem.iiter, **kwargs)
out['E'] = 0.5 * ( | la.dot_sequences(mtx_f, mtx_f, 'ATB') | sfepy.linalg.dot_sequences |
from __future__ import absolute_import
import os
import numpy as nm
from sfepy.base.testing import TestCommon
from sfepy import data_dir
from six.moves import range
# n_vertex, n_edge, n_face, n_cell
# d1 -> d2 : num, n_incident
expected = {
'1_2_2.mesh' : ([3, 2, 0, 0], {
(0, 0) : (3, 4),
(0, 1) : (3, 4),
(1, 0) : (2, 4),
(1, 1) : (2, 2),
}),
'2_3_2.mesh' : ([4, 5, 2, 0], {
(0, 0) : (4, 10),
(0, 1) : (4, 10),
(0, 2) : (4, 6),
(1, 0) : (5, 10),
(1, 1) : (5, 16),
(1, 2) : (5, 6),
(2, 0) : (2, 6),
(2, 1) : (2, 6),
(2, 2) : (2, 2),
}),
'2_4_2.mesh' : ([6, 7, 2, 0], {
(0, 0) : (6, 22),
(0, 1) : (6, 14),
(0, 2) : (6, 8),
(1, 0) : (7, 14),
(1, 1) : (7, 20),
(1, 2) : (7, 8),
(2, 0) : (2, 8),
(2, 1) : (2, 8),
(2, 2) : (2, 2),
}),
'3_4_2.mesh' : ([5, 9, 7, 2], {
(0, 0) : (5, 18),
(0, 1) : (5, 18),
(0, 2) : (5, 21),
(0, 3) : (5, 8),
(1, 0) : (9, 18),
(1, 1) : (9, 48),
(1, 2) : (9, 21),
(1, 3) : (9, 12),
(2, 0) : (7, 21),
(2, 1) : (7, 21),
(2, 2) : (7, 42),
(2, 3) : (7, 8),
(3, 0) : (2, 8),
(3, 1) : (2, 12),
(3, 2) : (2, 8),
(3, 3) : (2, 2),
}),
'3_8_2.mesh' : ([12, 20, 11, 2], {
(0, 0) : (12, 100),
(0, 1) : (12, 40),
(0, 2) : (12, 44),
(0, 3) : (12, 16),
(1, 0) : (20, 40),
(1, 1) : (20, 96),
(1, 2) : (20, 44),
(1, 3) : (20, 24),
(2, 0) : (11, 44),
(2, 1) : (11, 44),
(2, 2) : (11, 72),
(2, 3) : (11, 12),
(3, 0) : (2, 16),
(3, 1) : (2, 24),
(3, 2) : (2, 12),
(3, 3) : (2, 2),
}),
'square_triquad.mesh' : ([470, 1127, 658, 0], {
(0, 0) : (470, 3054),
(0, 1) : (470, 2254),
(0, 2) : (470, 2174),
(1, 0) : (1127, 2254),
(1, 1) : (1127, 9174),
(1, 2) : (1127, 2174),
(2, 0) : (658, 2174),
(2, 1) : (658, 2174),
(2, 2) : (658, 6686),
}),
}
class Test(TestCommon):
@staticmethod
def from_conf(conf, options):
filename_meshes = [data_dir + '/meshes/elements/%s_2.mesh' % geom
for geom in ['1_2', '2_3', '2_4', '3_4', '3_8']]
filename_meshes.append(data_dir
+ '/meshes/2d/special/square_triquad.mesh')
test = Test(filename_meshes=filename_meshes,
conf=conf, options=options)
return test
def test_cmesh_counts(self):
from sfepy.discrete.fem import Mesh
from sfepy.discrete.fem.geometry_element import create_geometry_elements
from sfepy.discrete.common.extmods.cmesh import CMesh, get_cmem_usage
gels = | create_geometry_elements() | sfepy.discrete.fem.geometry_element.create_geometry_elements |
from __future__ import absolute_import
import os
import numpy as nm
from sfepy.base.testing import TestCommon
from sfepy import data_dir
from six.moves import range
# n_vertex, n_edge, n_face, n_cell
# d1 -> d2 : num, n_incident
expected = {
'1_2_2.mesh' : ([3, 2, 0, 0], {
(0, 0) : (3, 4),
(0, 1) : (3, 4),
(1, 0) : (2, 4),
(1, 1) : (2, 2),
}),
'2_3_2.mesh' : ([4, 5, 2, 0], {
(0, 0) : (4, 10),
(0, 1) : (4, 10),
(0, 2) : (4, 6),
(1, 0) : (5, 10),
(1, 1) : (5, 16),
(1, 2) : (5, 6),
(2, 0) : (2, 6),
(2, 1) : (2, 6),
(2, 2) : (2, 2),
}),
'2_4_2.mesh' : ([6, 7, 2, 0], {
(0, 0) : (6, 22),
(0, 1) : (6, 14),
(0, 2) : (6, 8),
(1, 0) : (7, 14),
(1, 1) : (7, 20),
(1, 2) : (7, 8),
(2, 0) : (2, 8),
(2, 1) : (2, 8),
(2, 2) : (2, 2),
}),
'3_4_2.mesh' : ([5, 9, 7, 2], {
(0, 0) : (5, 18),
(0, 1) : (5, 18),
(0, 2) : (5, 21),
(0, 3) : (5, 8),
(1, 0) : (9, 18),
(1, 1) : (9, 48),
(1, 2) : (9, 21),
(1, 3) : (9, 12),
(2, 0) : (7, 21),
(2, 1) : (7, 21),
(2, 2) : (7, 42),
(2, 3) : (7, 8),
(3, 0) : (2, 8),
(3, 1) : (2, 12),
(3, 2) : (2, 8),
(3, 3) : (2, 2),
}),
'3_8_2.mesh' : ([12, 20, 11, 2], {
(0, 0) : (12, 100),
(0, 1) : (12, 40),
(0, 2) : (12, 44),
(0, 3) : (12, 16),
(1, 0) : (20, 40),
(1, 1) : (20, 96),
(1, 2) : (20, 44),
(1, 3) : (20, 24),
(2, 0) : (11, 44),
(2, 1) : (11, 44),
(2, 2) : (11, 72),
(2, 3) : (11, 12),
(3, 0) : (2, 16),
(3, 1) : (2, 24),
(3, 2) : (2, 12),
(3, 3) : (2, 2),
}),
'square_triquad.mesh' : ([470, 1127, 658, 0], {
(0, 0) : (470, 3054),
(0, 1) : (470, 2254),
(0, 2) : (470, 2174),
(1, 0) : (1127, 2254),
(1, 1) : (1127, 9174),
(1, 2) : (1127, 2174),
(2, 0) : (658, 2174),
(2, 1) : (658, 2174),
(2, 2) : (658, 6686),
}),
}
class Test(TestCommon):
@staticmethod
def from_conf(conf, options):
filename_meshes = [data_dir + '/meshes/elements/%s_2.mesh' % geom
for geom in ['1_2', '2_3', '2_4', '3_4', '3_8']]
filename_meshes.append(data_dir
+ '/meshes/2d/special/square_triquad.mesh')
test = Test(filename_meshes=filename_meshes,
conf=conf, options=options)
return test
def test_cmesh_counts(self):
from sfepy.discrete.fem import Mesh
from sfepy.discrete.fem.geometry_element import create_geometry_elements
from sfepy.discrete.common.extmods.cmesh import CMesh, get_cmem_usage
gels = create_geometry_elements()
ok = True
for filename in self.filename_meshes:
basename = os.path.basename(filename)
enum, esizes = expected[basename]
self.report('mesh: %s' % basename)
mesh = Mesh.from_file(filename)
cmesh = mesh.cmesh
cmesh.set_local_entities(gels)
cmesh.setup_entities()
self.report('dim:', cmesh.dim)
self.report('n_vertex: %d, n_edge: %d, n_face: %d, n_cell: %d' %
tuple(cmesh.num))
_ok = (enum == cmesh.num).all()
if not _ok:
self.report('%s == %s failed!' % (enum, cmesh.num))
ok = ok and _ok
dim = cmesh.dim
for ir in range(dim + 1):
for ic in range(dim + 1):
cmesh.setup_connectivity(ir, ic)
mem_usage1 = get_cmem_usage()[0]
if (ir == dim) and (ic == 0):
continue
cmesh.free_connectivity(ir, ic)
mem_usage2 = get_cmem_usage()[0]
cmesh.setup_connectivity(ir, ic)
mem_usage3 = get_cmem_usage()[0]
conn = cmesh.get_conn(ir, ic)
self.report('(%d, %d) : (%d, %d)'
% (ir, ic, conn.num, conn.n_incident))
sizes = nm.array([conn.num, conn.n_incident])
_ok = (esizes[ir, ic] == sizes).all()
if not _ok:
self.report('%s == %s failed!' % (esizes, sizes))
ok = ok and _ok
_ok1 = mem_usage3 == mem_usage1
_ok2 = mem_usage3 > mem_usage2
if not (_ok1 and _ok2):
self.report('unexpected memory usage! (%s)'
% (mem_usage1, mem_usage2, mem_usage3))
ok = ok and (_ok1 and _ok2)
return ok
def test_entity_volumes(self):
import sfepy
from sfepy.discrete.fem import Mesh, FEDomain
from sfepy.discrete.common import Field
from sfepy.discrete import Integral
mesh = Mesh.from_file('meshes/3d/special/cross3d.mesh',
prefix_dir=sfepy.data_dir)
domain = | FEDomain('domain', mesh) | sfepy.discrete.fem.FEDomain |
from __future__ import absolute_import
import os
import numpy as nm
from sfepy.base.testing import TestCommon
from sfepy import data_dir
from six.moves import range
# n_vertex, n_edge, n_face, n_cell
# d1 -> d2 : num, n_incident
expected = {
'1_2_2.mesh' : ([3, 2, 0, 0], {
(0, 0) : (3, 4),
(0, 1) : (3, 4),
(1, 0) : (2, 4),
(1, 1) : (2, 2),
}),
'2_3_2.mesh' : ([4, 5, 2, 0], {
(0, 0) : (4, 10),
(0, 1) : (4, 10),
(0, 2) : (4, 6),
(1, 0) : (5, 10),
(1, 1) : (5, 16),
(1, 2) : (5, 6),
(2, 0) : (2, 6),
(2, 1) : (2, 6),
(2, 2) : (2, 2),
}),
'2_4_2.mesh' : ([6, 7, 2, 0], {
(0, 0) : (6, 22),
(0, 1) : (6, 14),
(0, 2) : (6, 8),
(1, 0) : (7, 14),
(1, 1) : (7, 20),
(1, 2) : (7, 8),
(2, 0) : (2, 8),
(2, 1) : (2, 8),
(2, 2) : (2, 2),
}),
'3_4_2.mesh' : ([5, 9, 7, 2], {
(0, 0) : (5, 18),
(0, 1) : (5, 18),
(0, 2) : (5, 21),
(0, 3) : (5, 8),
(1, 0) : (9, 18),
(1, 1) : (9, 48),
(1, 2) : (9, 21),
(1, 3) : (9, 12),
(2, 0) : (7, 21),
(2, 1) : (7, 21),
(2, 2) : (7, 42),
(2, 3) : (7, 8),
(3, 0) : (2, 8),
(3, 1) : (2, 12),
(3, 2) : (2, 8),
(3, 3) : (2, 2),
}),
'3_8_2.mesh' : ([12, 20, 11, 2], {
(0, 0) : (12, 100),
(0, 1) : (12, 40),
(0, 2) : (12, 44),
(0, 3) : (12, 16),
(1, 0) : (20, 40),
(1, 1) : (20, 96),
(1, 2) : (20, 44),
(1, 3) : (20, 24),
(2, 0) : (11, 44),
(2, 1) : (11, 44),
(2, 2) : (11, 72),
(2, 3) : (11, 12),
(3, 0) : (2, 16),
(3, 1) : (2, 24),
(3, 2) : (2, 12),
(3, 3) : (2, 2),
}),
'square_triquad.mesh' : ([470, 1127, 658, 0], {
(0, 0) : (470, 3054),
(0, 1) : (470, 2254),
(0, 2) : (470, 2174),
(1, 0) : (1127, 2254),
(1, 1) : (1127, 9174),
(1, 2) : (1127, 2174),
(2, 0) : (658, 2174),
(2, 1) : (658, 2174),
(2, 2) : (658, 6686),
}),
}
class Test(TestCommon):
@staticmethod
def from_conf(conf, options):
filename_meshes = [data_dir + '/meshes/elements/%s_2.mesh' % geom
for geom in ['1_2', '2_3', '2_4', '3_4', '3_8']]
filename_meshes.append(data_dir
+ '/meshes/2d/special/square_triquad.mesh')
test = Test(filename_meshes=filename_meshes,
conf=conf, options=options)
return test
def test_cmesh_counts(self):
from sfepy.discrete.fem import Mesh
from sfepy.discrete.fem.geometry_element import create_geometry_elements
from sfepy.discrete.common.extmods.cmesh import CMesh, get_cmem_usage
gels = create_geometry_elements()
ok = True
for filename in self.filename_meshes:
basename = os.path.basename(filename)
enum, esizes = expected[basename]
self.report('mesh: %s' % basename)
mesh = Mesh.from_file(filename)
cmesh = mesh.cmesh
cmesh.set_local_entities(gels)
cmesh.setup_entities()
self.report('dim:', cmesh.dim)
self.report('n_vertex: %d, n_edge: %d, n_face: %d, n_cell: %d' %
tuple(cmesh.num))
_ok = (enum == cmesh.num).all()
if not _ok:
self.report('%s == %s failed!' % (enum, cmesh.num))
ok = ok and _ok
dim = cmesh.dim
for ir in range(dim + 1):
for ic in range(dim + 1):
cmesh.setup_connectivity(ir, ic)
mem_usage1 = get_cmem_usage()[0]
if (ir == dim) and (ic == 0):
continue
cmesh.free_connectivity(ir, ic)
mem_usage2 = get_cmem_usage()[0]
cmesh.setup_connectivity(ir, ic)
mem_usage3 = get_cmem_usage()[0]
conn = cmesh.get_conn(ir, ic)
self.report('(%d, %d) : (%d, %d)'
% (ir, ic, conn.num, conn.n_incident))
sizes = nm.array([conn.num, conn.n_incident])
_ok = (esizes[ir, ic] == sizes).all()
if not _ok:
self.report('%s == %s failed!' % (esizes, sizes))
ok = ok and _ok
_ok1 = mem_usage3 == mem_usage1
_ok2 = mem_usage3 > mem_usage2
if not (_ok1 and _ok2):
self.report('unexpected memory usage! (%s)'
% (mem_usage1, mem_usage2, mem_usage3))
ok = ok and (_ok1 and _ok2)
return ok
def test_entity_volumes(self):
import sfepy
from sfepy.discrete.fem import Mesh, FEDomain
from sfepy.discrete.common import Field
from sfepy.discrete import Integral
mesh = Mesh.from_file('meshes/3d/special/cross3d.mesh',
prefix_dir=sfepy.data_dir)
domain = FEDomain('domain', mesh)
omega = domain.create_region('Omega', 'all')
gamma = domain.create_region('Gamma', 'vertices of surface', 'facet')
top = domain.create_region('Top', 'cell 2')
vfield = Field.from_args('v', nm.float64, 'scalar', omega,
approx_order=1)
sfield = Field.from_args('s', nm.float64, 'scalar', gamma,
approx_order=1)
integral = | Integral('i', order=3) | sfepy.discrete.Integral |
from __future__ import absolute_import
import os
import numpy as nm
from sfepy.base.testing import TestCommon
from sfepy import data_dir
from six.moves import range
# n_vertex, n_edge, n_face, n_cell
# d1 -> d2 : num, n_incident
expected = {
'1_2_2.mesh' : ([3, 2, 0, 0], {
(0, 0) : (3, 4),
(0, 1) : (3, 4),
(1, 0) : (2, 4),
(1, 1) : (2, 2),
}),
'2_3_2.mesh' : ([4, 5, 2, 0], {
(0, 0) : (4, 10),
(0, 1) : (4, 10),
(0, 2) : (4, 6),
(1, 0) : (5, 10),
(1, 1) : (5, 16),
(1, 2) : (5, 6),
(2, 0) : (2, 6),
(2, 1) : (2, 6),
(2, 2) : (2, 2),
}),
'2_4_2.mesh' : ([6, 7, 2, 0], {
(0, 0) : (6, 22),
(0, 1) : (6, 14),
(0, 2) : (6, 8),
(1, 0) : (7, 14),
(1, 1) : (7, 20),
(1, 2) : (7, 8),
(2, 0) : (2, 8),
(2, 1) : (2, 8),
(2, 2) : (2, 2),
}),
'3_4_2.mesh' : ([5, 9, 7, 2], {
(0, 0) : (5, 18),
(0, 1) : (5, 18),
(0, 2) : (5, 21),
(0, 3) : (5, 8),
(1, 0) : (9, 18),
(1, 1) : (9, 48),
(1, 2) : (9, 21),
(1, 3) : (9, 12),
(2, 0) : (7, 21),
(2, 1) : (7, 21),
(2, 2) : (7, 42),
(2, 3) : (7, 8),
(3, 0) : (2, 8),
(3, 1) : (2, 12),
(3, 2) : (2, 8),
(3, 3) : (2, 2),
}),
'3_8_2.mesh' : ([12, 20, 11, 2], {
(0, 0) : (12, 100),
(0, 1) : (12, 40),
(0, 2) : (12, 44),
(0, 3) : (12, 16),
(1, 0) : (20, 40),
(1, 1) : (20, 96),
(1, 2) : (20, 44),
(1, 3) : (20, 24),
(2, 0) : (11, 44),
(2, 1) : (11, 44),
(2, 2) : (11, 72),
(2, 3) : (11, 12),
(3, 0) : (2, 16),
(3, 1) : (2, 24),
(3, 2) : (2, 12),
(3, 3) : (2, 2),
}),
'square_triquad.mesh' : ([470, 1127, 658, 0], {
(0, 0) : (470, 3054),
(0, 1) : (470, 2254),
(0, 2) : (470, 2174),
(1, 0) : (1127, 2254),
(1, 1) : (1127, 9174),
(1, 2) : (1127, 2174),
(2, 0) : (658, 2174),
(2, 1) : (658, 2174),
(2, 2) : (658, 6686),
}),
}
class Test(TestCommon):
@staticmethod
def from_conf(conf, options):
filename_meshes = [data_dir + '/meshes/elements/%s_2.mesh' % geom
for geom in ['1_2', '2_3', '2_4', '3_4', '3_8']]
filename_meshes.append(data_dir
+ '/meshes/2d/special/square_triquad.mesh')
test = Test(filename_meshes=filename_meshes,
conf=conf, options=options)
return test
def test_cmesh_counts(self):
from sfepy.discrete.fem import Mesh
from sfepy.discrete.fem.geometry_element import create_geometry_elements
from sfepy.discrete.common.extmods.cmesh import CMesh, get_cmem_usage
gels = create_geometry_elements()
ok = True
for filename in self.filename_meshes:
basename = os.path.basename(filename)
enum, esizes = expected[basename]
self.report('mesh: %s' % basename)
mesh = | Mesh.from_file(filename) | sfepy.discrete.fem.Mesh.from_file |
from __future__ import absolute_import
import os
import numpy as nm
from sfepy.base.testing import TestCommon
from sfepy import data_dir
from six.moves import range
# n_vertex, n_edge, n_face, n_cell
# d1 -> d2 : num, n_incident
expected = {
'1_2_2.mesh' : ([3, 2, 0, 0], {
(0, 0) : (3, 4),
(0, 1) : (3, 4),
(1, 0) : (2, 4),
(1, 1) : (2, 2),
}),
'2_3_2.mesh' : ([4, 5, 2, 0], {
(0, 0) : (4, 10),
(0, 1) : (4, 10),
(0, 2) : (4, 6),
(1, 0) : (5, 10),
(1, 1) : (5, 16),
(1, 2) : (5, 6),
(2, 0) : (2, 6),
(2, 1) : (2, 6),
(2, 2) : (2, 2),
}),
'2_4_2.mesh' : ([6, 7, 2, 0], {
(0, 0) : (6, 22),
(0, 1) : (6, 14),
(0, 2) : (6, 8),
(1, 0) : (7, 14),
(1, 1) : (7, 20),
(1, 2) : (7, 8),
(2, 0) : (2, 8),
(2, 1) : (2, 8),
(2, 2) : (2, 2),
}),
'3_4_2.mesh' : ([5, 9, 7, 2], {
(0, 0) : (5, 18),
(0, 1) : (5, 18),
(0, 2) : (5, 21),
(0, 3) : (5, 8),
(1, 0) : (9, 18),
(1, 1) : (9, 48),
(1, 2) : (9, 21),
(1, 3) : (9, 12),
(2, 0) : (7, 21),
(2, 1) : (7, 21),
(2, 2) : (7, 42),
(2, 3) : (7, 8),
(3, 0) : (2, 8),
(3, 1) : (2, 12),
(3, 2) : (2, 8),
(3, 3) : (2, 2),
}),
'3_8_2.mesh' : ([12, 20, 11, 2], {
(0, 0) : (12, 100),
(0, 1) : (12, 40),
(0, 2) : (12, 44),
(0, 3) : (12, 16),
(1, 0) : (20, 40),
(1, 1) : (20, 96),
(1, 2) : (20, 44),
(1, 3) : (20, 24),
(2, 0) : (11, 44),
(2, 1) : (11, 44),
(2, 2) : (11, 72),
(2, 3) : (11, 12),
(3, 0) : (2, 16),
(3, 1) : (2, 24),
(3, 2) : (2, 12),
(3, 3) : (2, 2),
}),
'square_triquad.mesh' : ([470, 1127, 658, 0], {
(0, 0) : (470, 3054),
(0, 1) : (470, 2254),
(0, 2) : (470, 2174),
(1, 0) : (1127, 2254),
(1, 1) : (1127, 9174),
(1, 2) : (1127, 2174),
(2, 0) : (658, 2174),
(2, 1) : (658, 2174),
(2, 2) : (658, 6686),
}),
}
class Test(TestCommon):
@staticmethod
def from_conf(conf, options):
filename_meshes = [data_dir + '/meshes/elements/%s_2.mesh' % geom
for geom in ['1_2', '2_3', '2_4', '3_4', '3_8']]
filename_meshes.append(data_dir
+ '/meshes/2d/special/square_triquad.mesh')
test = Test(filename_meshes=filename_meshes,
conf=conf, options=options)
return test
def test_cmesh_counts(self):
from sfepy.discrete.fem import Mesh
from sfepy.discrete.fem.geometry_element import create_geometry_elements
from sfepy.discrete.common.extmods.cmesh import CMesh, get_cmem_usage
gels = create_geometry_elements()
ok = True
for filename in self.filename_meshes:
basename = os.path.basename(filename)
enum, esizes = expected[basename]
self.report('mesh: %s' % basename)
mesh = Mesh.from_file(filename)
cmesh = mesh.cmesh
cmesh.set_local_entities(gels)
cmesh.setup_entities()
self.report('dim:', cmesh.dim)
self.report('n_vertex: %d, n_edge: %d, n_face: %d, n_cell: %d' %
tuple(cmesh.num))
_ok = (enum == cmesh.num).all()
if not _ok:
self.report('%s == %s failed!' % (enum, cmesh.num))
ok = ok and _ok
dim = cmesh.dim
for ir in range(dim + 1):
for ic in range(dim + 1):
cmesh.setup_connectivity(ir, ic)
mem_usage1 = | get_cmem_usage() | sfepy.discrete.common.extmods.cmesh.get_cmem_usage |
from __future__ import absolute_import
import os
import numpy as nm
from sfepy.base.testing import TestCommon
from sfepy import data_dir
from six.moves import range
# n_vertex, n_edge, n_face, n_cell
# d1 -> d2 : num, n_incident
expected = {
'1_2_2.mesh' : ([3, 2, 0, 0], {
(0, 0) : (3, 4),
(0, 1) : (3, 4),
(1, 0) : (2, 4),
(1, 1) : (2, 2),
}),
'2_3_2.mesh' : ([4, 5, 2, 0], {
(0, 0) : (4, 10),
(0, 1) : (4, 10),
(0, 2) : (4, 6),
(1, 0) : (5, 10),
(1, 1) : (5, 16),
(1, 2) : (5, 6),
(2, 0) : (2, 6),
(2, 1) : (2, 6),
(2, 2) : (2, 2),
}),
'2_4_2.mesh' : ([6, 7, 2, 0], {
(0, 0) : (6, 22),
(0, 1) : (6, 14),
(0, 2) : (6, 8),
(1, 0) : (7, 14),
(1, 1) : (7, 20),
(1, 2) : (7, 8),
(2, 0) : (2, 8),
(2, 1) : (2, 8),
(2, 2) : (2, 2),
}),
'3_4_2.mesh' : ([5, 9, 7, 2], {
(0, 0) : (5, 18),
(0, 1) : (5, 18),
(0, 2) : (5, 21),
(0, 3) : (5, 8),
(1, 0) : (9, 18),
(1, 1) : (9, 48),
(1, 2) : (9, 21),
(1, 3) : (9, 12),
(2, 0) : (7, 21),
(2, 1) : (7, 21),
(2, 2) : (7, 42),
(2, 3) : (7, 8),
(3, 0) : (2, 8),
(3, 1) : (2, 12),
(3, 2) : (2, 8),
(3, 3) : (2, 2),
}),
'3_8_2.mesh' : ([12, 20, 11, 2], {
(0, 0) : (12, 100),
(0, 1) : (12, 40),
(0, 2) : (12, 44),
(0, 3) : (12, 16),
(1, 0) : (20, 40),
(1, 1) : (20, 96),
(1, 2) : (20, 44),
(1, 3) : (20, 24),
(2, 0) : (11, 44),
(2, 1) : (11, 44),
(2, 2) : (11, 72),
(2, 3) : (11, 12),
(3, 0) : (2, 16),
(3, 1) : (2, 24),
(3, 2) : (2, 12),
(3, 3) : (2, 2),
}),
'square_triquad.mesh' : ([470, 1127, 658, 0], {
(0, 0) : (470, 3054),
(0, 1) : (470, 2254),
(0, 2) : (470, 2174),
(1, 0) : (1127, 2254),
(1, 1) : (1127, 9174),
(1, 2) : (1127, 2174),
(2, 0) : (658, 2174),
(2, 1) : (658, 2174),
(2, 2) : (658, 6686),
}),
}
class Test(TestCommon):
@staticmethod
def from_conf(conf, options):
filename_meshes = [data_dir + '/meshes/elements/%s_2.mesh' % geom
for geom in ['1_2', '2_3', '2_4', '3_4', '3_8']]
filename_meshes.append(data_dir
+ '/meshes/2d/special/square_triquad.mesh')
test = Test(filename_meshes=filename_meshes,
conf=conf, options=options)
return test
def test_cmesh_counts(self):
from sfepy.discrete.fem import Mesh
from sfepy.discrete.fem.geometry_element import create_geometry_elements
from sfepy.discrete.common.extmods.cmesh import CMesh, get_cmem_usage
gels = create_geometry_elements()
ok = True
for filename in self.filename_meshes:
basename = os.path.basename(filename)
enum, esizes = expected[basename]
self.report('mesh: %s' % basename)
mesh = Mesh.from_file(filename)
cmesh = mesh.cmesh
cmesh.set_local_entities(gels)
cmesh.setup_entities()
self.report('dim:', cmesh.dim)
self.report('n_vertex: %d, n_edge: %d, n_face: %d, n_cell: %d' %
tuple(cmesh.num))
_ok = (enum == cmesh.num).all()
if not _ok:
self.report('%s == %s failed!' % (enum, cmesh.num))
ok = ok and _ok
dim = cmesh.dim
for ir in range(dim + 1):
for ic in range(dim + 1):
cmesh.setup_connectivity(ir, ic)
mem_usage1 = get_cmem_usage()[0]
if (ir == dim) and (ic == 0):
continue
cmesh.free_connectivity(ir, ic)
mem_usage2 = | get_cmem_usage() | sfepy.discrete.common.extmods.cmesh.get_cmem_usage |
from __future__ import absolute_import
import os
import numpy as nm
from sfepy.base.testing import TestCommon
from sfepy import data_dir
from six.moves import range
# n_vertex, n_edge, n_face, n_cell
# d1 -> d2 : num, n_incident
expected = {
'1_2_2.mesh' : ([3, 2, 0, 0], {
(0, 0) : (3, 4),
(0, 1) : (3, 4),
(1, 0) : (2, 4),
(1, 1) : (2, 2),
}),
'2_3_2.mesh' : ([4, 5, 2, 0], {
(0, 0) : (4, 10),
(0, 1) : (4, 10),
(0, 2) : (4, 6),
(1, 0) : (5, 10),
(1, 1) : (5, 16),
(1, 2) : (5, 6),
(2, 0) : (2, 6),
(2, 1) : (2, 6),
(2, 2) : (2, 2),
}),
'2_4_2.mesh' : ([6, 7, 2, 0], {
(0, 0) : (6, 22),
(0, 1) : (6, 14),
(0, 2) : (6, 8),
(1, 0) : (7, 14),
(1, 1) : (7, 20),
(1, 2) : (7, 8),
(2, 0) : (2, 8),
(2, 1) : (2, 8),
(2, 2) : (2, 2),
}),
'3_4_2.mesh' : ([5, 9, 7, 2], {
(0, 0) : (5, 18),
(0, 1) : (5, 18),
(0, 2) : (5, 21),
(0, 3) : (5, 8),
(1, 0) : (9, 18),
(1, 1) : (9, 48),
(1, 2) : (9, 21),
(1, 3) : (9, 12),
(2, 0) : (7, 21),
(2, 1) : (7, 21),
(2, 2) : (7, 42),
(2, 3) : (7, 8),
(3, 0) : (2, 8),
(3, 1) : (2, 12),
(3, 2) : (2, 8),
(3, 3) : (2, 2),
}),
'3_8_2.mesh' : ([12, 20, 11, 2], {
(0, 0) : (12, 100),
(0, 1) : (12, 40),
(0, 2) : (12, 44),
(0, 3) : (12, 16),
(1, 0) : (20, 40),
(1, 1) : (20, 96),
(1, 2) : (20, 44),
(1, 3) : (20, 24),
(2, 0) : (11, 44),
(2, 1) : (11, 44),
(2, 2) : (11, 72),
(2, 3) : (11, 12),
(3, 0) : (2, 16),
(3, 1) : (2, 24),
(3, 2) : (2, 12),
(3, 3) : (2, 2),
}),
'square_triquad.mesh' : ([470, 1127, 658, 0], {
(0, 0) : (470, 3054),
(0, 1) : (470, 2254),
(0, 2) : (470, 2174),
(1, 0) : (1127, 2254),
(1, 1) : (1127, 9174),
(1, 2) : (1127, 2174),
(2, 0) : (658, 2174),
(2, 1) : (658, 2174),
(2, 2) : (658, 6686),
}),
}
class Test(TestCommon):
@staticmethod
def from_conf(conf, options):
filename_meshes = [data_dir + '/meshes/elements/%s_2.mesh' % geom
for geom in ['1_2', '2_3', '2_4', '3_4', '3_8']]
filename_meshes.append(data_dir
+ '/meshes/2d/special/square_triquad.mesh')
test = Test(filename_meshes=filename_meshes,
conf=conf, options=options)
return test
def test_cmesh_counts(self):
from sfepy.discrete.fem import Mesh
from sfepy.discrete.fem.geometry_element import create_geometry_elements
from sfepy.discrete.common.extmods.cmesh import CMesh, get_cmem_usage
gels = create_geometry_elements()
ok = True
for filename in self.filename_meshes:
basename = os.path.basename(filename)
enum, esizes = expected[basename]
self.report('mesh: %s' % basename)
mesh = Mesh.from_file(filename)
cmesh = mesh.cmesh
cmesh.set_local_entities(gels)
cmesh.setup_entities()
self.report('dim:', cmesh.dim)
self.report('n_vertex: %d, n_edge: %d, n_face: %d, n_cell: %d' %
tuple(cmesh.num))
_ok = (enum == cmesh.num).all()
if not _ok:
self.report('%s == %s failed!' % (enum, cmesh.num))
ok = ok and _ok
dim = cmesh.dim
for ir in range(dim + 1):
for ic in range(dim + 1):
cmesh.setup_connectivity(ir, ic)
mem_usage1 = get_cmem_usage()[0]
if (ir == dim) and (ic == 0):
continue
cmesh.free_connectivity(ir, ic)
mem_usage2 = get_cmem_usage()[0]
cmesh.setup_connectivity(ir, ic)
mem_usage3 = | get_cmem_usage() | sfepy.discrete.common.extmods.cmesh.get_cmem_usage |
"""
Computational domain for isogeometric analysis.
"""
import os.path as op
import numpy as nm
from sfepy.base.base import assert_, Struct
from sfepy.discrete.common.domain import Domain
import sfepy.discrete.iga as iga
import sfepy.discrete.iga.io as io
from sfepy.discrete.iga.extmods.igac import eval_in_tp_coors
class NurbsPatch(Struct):
"""
Single NURBS patch data.
"""
def __init__(self, knots, degrees, cps,
weights, cs, conn):
degrees = nm.asarray(degrees, dtype=nm.int32)
cs = [nm.asarray(cc, dtype=nm.float64) for cc in cs]
if cs[0].ndim == 3:
cs = [nm.ascontiguousarray(cc[:, None, ...]) for cc in cs]
Struct.__init__(self, name='nurbs', knots=knots, degrees=degrees,
cps=cps, weights=weights, cs=cs, conn=conn)
self.n_els = [len(ii) for ii in cs]
self.dim = len(self.n_els)
def _get_ref_coors_1d(self, pars, axis):
uk = nm.unique(self.knots[axis])
indices = nm.searchsorted(uk[1:], pars)
ref_coors = nm.empty_like(pars)
for ii in xrange(len(uk) - 1):
ispan = nm.where(indices == ii)[0]
pp = pars[ispan]
ref_coors[ispan] = (pp - uk[ii]) / (uk[ii+1] - uk[ii])
return uk, indices, ref_coors
def __call__(self, u=None, v=None, w=None, field=None):
"""
Igakit-like interface for NURBS evaluation.
"""
pars = [u]
if v is not None: pars += [v]
if w is not None: pars += [w]
indices = []
rcs = []
for ia, par in enumerate(pars):
uk, indx, rc = self._get_ref_coors_1d(par, ia)
indices.append(indx.astype(nm.uint32))
rcs.append(rc)
out = eval_in_tp_coors(field, indices,
rcs, self.cps, self.weights,
self.degrees,
self.cs, self.conn)
return out
def evaluate(self, field, u=None, v=None, w=None):
"""
Igakit-like interface for NURBS evaluation.
"""
return self(u, v, w, field)
def _to_igakit(self):
import igakit.cad as cad
n_efuns = self.degrees + 1
nks = nm.array([len(ii) for ii in self.knots])
shape = tuple(nks - n_efuns)
cps = self.cps.reshape(shape + (-1,))
weights = self.weights.reshape(shape)
return cad.NURBS(self.knots, cps, weights=weights)
def _from_igakit(self, inurbs):
cs = | iga.compute_bezier_extraction(inurbs.knots, inurbs.degree) | sfepy.discrete.iga.compute_bezier_extraction |
"""
Computational domain for isogeometric analysis.
"""
import os.path as op
import numpy as nm
from sfepy.base.base import assert_, Struct
from sfepy.discrete.common.domain import Domain
import sfepy.discrete.iga as iga
import sfepy.discrete.iga.io as io
from sfepy.discrete.iga.extmods.igac import eval_in_tp_coors
class NurbsPatch(Struct):
"""
Single NURBS patch data.
"""
def __init__(self, knots, degrees, cps,
weights, cs, conn):
degrees = nm.asarray(degrees, dtype=nm.int32)
cs = [nm.asarray(cc, dtype=nm.float64) for cc in cs]
if cs[0].ndim == 3:
cs = [nm.ascontiguousarray(cc[:, None, ...]) for cc in cs]
Struct.__init__(self, name='nurbs', knots=knots, degrees=degrees,
cps=cps, weights=weights, cs=cs, conn=conn)
self.n_els = [len(ii) for ii in cs]
self.dim = len(self.n_els)
def _get_ref_coors_1d(self, pars, axis):
uk = nm.unique(self.knots[axis])
indices = nm.searchsorted(uk[1:], pars)
ref_coors = nm.empty_like(pars)
for ii in xrange(len(uk) - 1):
ispan = nm.where(indices == ii)[0]
pp = pars[ispan]
ref_coors[ispan] = (pp - uk[ii]) / (uk[ii+1] - uk[ii])
return uk, indices, ref_coors
def __call__(self, u=None, v=None, w=None, field=None):
"""
Igakit-like interface for NURBS evaluation.
"""
pars = [u]
if v is not None: pars += [v]
if w is not None: pars += [w]
indices = []
rcs = []
for ia, par in enumerate(pars):
uk, indx, rc = self._get_ref_coors_1d(par, ia)
indices.append(indx.astype(nm.uint32))
rcs.append(rc)
out = eval_in_tp_coors(field, indices,
rcs, self.cps, self.weights,
self.degrees,
self.cs, self.conn)
return out
def evaluate(self, field, u=None, v=None, w=None):
"""
Igakit-like interface for NURBS evaluation.
"""
return self(u, v, w, field)
def _to_igakit(self):
import igakit.cad as cad
n_efuns = self.degrees + 1
nks = nm.array([len(ii) for ii in self.knots])
shape = tuple(nks - n_efuns)
cps = self.cps.reshape(shape + (-1,))
weights = self.weights.reshape(shape)
return cad.NURBS(self.knots, cps, weights=weights)
def _from_igakit(self, inurbs):
cs = iga.compute_bezier_extraction(inurbs.knots, inurbs.degree)
n_els = [len(ii) for ii in cs]
conn, bconn = iga.create_connectivity(n_els, inurbs.knots,
inurbs.degree)
cps = inurbs.points[..., :self.dim].copy()
cps = cps.reshape((-1, self.dim))
return NurbsPatch(inurbs.knots, inurbs.degree, cps,
inurbs.weights.ravel(), cs, conn)
def elevate(self, times=0):
"""
Elevate the patch degrees several `times` by one.
Returns
-------
nurbs : NurbsPatch instance
Either `self` if `times` is zero, or a new instance.
"""
if times is 0: return self
aux = self._to_igakit()
for ia in range(self.dim):
aux.elevate(ia, times)
assert_(nm.isfinite(aux.points).all(),
'igakit degree elevation failed for axis %d!' % ia)
return self._from_igakit(aux)
class IGDomain(Domain):
"""
Bezier extraction based NURBS domain for isogeometric analysis.
"""
@staticmethod
def from_file(filename):
"""
filename : str
The name of the IGA domain file.
"""
(knots, degrees, cps, weights, cs, conn,
bcps, bweights, bconn, regions) = | io.read_iga_data(filename) | sfepy.discrete.iga.io.read_iga_data |
"""
Computational domain for isogeometric analysis.
"""
import os.path as op
import numpy as nm
from sfepy.base.base import assert_, Struct
from sfepy.discrete.common.domain import Domain
import sfepy.discrete.iga as iga
import sfepy.discrete.iga.io as io
from sfepy.discrete.iga.extmods.igac import eval_in_tp_coors
class NurbsPatch(Struct):
"""
Single NURBS patch data.
"""
def __init__(self, knots, degrees, cps,
weights, cs, conn):
degrees = nm.asarray(degrees, dtype=nm.int32)
cs = [nm.asarray(cc, dtype=nm.float64) for cc in cs]
if cs[0].ndim == 3:
cs = [nm.ascontiguousarray(cc[:, None, ...]) for cc in cs]
Struct.__init__(self, name='nurbs', knots=knots, degrees=degrees,
cps=cps, weights=weights, cs=cs, conn=conn)
self.n_els = [len(ii) for ii in cs]
self.dim = len(self.n_els)
def _get_ref_coors_1d(self, pars, axis):
uk = nm.unique(self.knots[axis])
indices = nm.searchsorted(uk[1:], pars)
ref_coors = nm.empty_like(pars)
for ii in xrange(len(uk) - 1):
ispan = nm.where(indices == ii)[0]
pp = pars[ispan]
ref_coors[ispan] = (pp - uk[ii]) / (uk[ii+1] - uk[ii])
return uk, indices, ref_coors
def __call__(self, u=None, v=None, w=None, field=None):
"""
Igakit-like interface for NURBS evaluation.
"""
pars = [u]
if v is not None: pars += [v]
if w is not None: pars += [w]
indices = []
rcs = []
for ia, par in enumerate(pars):
uk, indx, rc = self._get_ref_coors_1d(par, ia)
indices.append(indx.astype(nm.uint32))
rcs.append(rc)
out = eval_in_tp_coors(field, indices,
rcs, self.cps, self.weights,
self.degrees,
self.cs, self.conn)
return out
def evaluate(self, field, u=None, v=None, w=None):
"""
Igakit-like interface for NURBS evaluation.
"""
return self(u, v, w, field)
def _to_igakit(self):
import igakit.cad as cad
n_efuns = self.degrees + 1
nks = nm.array([len(ii) for ii in self.knots])
shape = tuple(nks - n_efuns)
cps = self.cps.reshape(shape + (-1,))
weights = self.weights.reshape(shape)
return cad.NURBS(self.knots, cps, weights=weights)
def _from_igakit(self, inurbs):
cs = iga.compute_bezier_extraction(inurbs.knots, inurbs.degree)
n_els = [len(ii) for ii in cs]
conn, bconn = iga.create_connectivity(n_els, inurbs.knots,
inurbs.degree)
cps = inurbs.points[..., :self.dim].copy()
cps = cps.reshape((-1, self.dim))
return NurbsPatch(inurbs.knots, inurbs.degree, cps,
inurbs.weights.ravel(), cs, conn)
def elevate(self, times=0):
"""
Elevate the patch degrees several `times` by one.
Returns
-------
nurbs : NurbsPatch instance
Either `self` if `times` is zero, or a new instance.
"""
if times is 0: return self
aux = self._to_igakit()
for ia in range(self.dim):
aux.elevate(ia, times)
assert_(nm.isfinite(aux.points).all(),
'igakit degree elevation failed for axis %d!' % ia)
return self._from_igakit(aux)
class IGDomain(Domain):
"""
Bezier extraction based NURBS domain for isogeometric analysis.
"""
@staticmethod
def from_file(filename):
"""
filename : str
The name of the IGA domain file.
"""
(knots, degrees, cps, weights, cs, conn,
bcps, bweights, bconn, regions) = io.read_iga_data(filename)
nurbs = NurbsPatch(knots, degrees, cps, weights, cs, conn)
bmesh = | Struct(name='bmesh', cps=bcps, weights=bweights, conn=bconn) | sfepy.base.base.Struct |
"""
Computational domain for isogeometric analysis.
"""
import os.path as op
import numpy as nm
from sfepy.base.base import assert_, Struct
from sfepy.discrete.common.domain import Domain
import sfepy.discrete.iga as iga
import sfepy.discrete.iga.io as io
from sfepy.discrete.iga.extmods.igac import eval_in_tp_coors
class NurbsPatch(Struct):
"""
Single NURBS patch data.
"""
def __init__(self, knots, degrees, cps,
weights, cs, conn):
degrees = nm.asarray(degrees, dtype=nm.int32)
cs = [nm.asarray(cc, dtype=nm.float64) for cc in cs]
if cs[0].ndim == 3:
cs = [nm.ascontiguousarray(cc[:, None, ...]) for cc in cs]
Struct.__init__(self, name='nurbs', knots=knots, degrees=degrees,
cps=cps, weights=weights, cs=cs, conn=conn)
self.n_els = [len(ii) for ii in cs]
self.dim = len(self.n_els)
def _get_ref_coors_1d(self, pars, axis):
uk = nm.unique(self.knots[axis])
indices = nm.searchsorted(uk[1:], pars)
ref_coors = nm.empty_like(pars)
for ii in xrange(len(uk) - 1):
ispan = nm.where(indices == ii)[0]
pp = pars[ispan]
ref_coors[ispan] = (pp - uk[ii]) / (uk[ii+1] - uk[ii])
return uk, indices, ref_coors
def __call__(self, u=None, v=None, w=None, field=None):
"""
Igakit-like interface for NURBS evaluation.
"""
pars = [u]
if v is not None: pars += [v]
if w is not None: pars += [w]
indices = []
rcs = []
for ia, par in enumerate(pars):
uk, indx, rc = self._get_ref_coors_1d(par, ia)
indices.append(indx.astype(nm.uint32))
rcs.append(rc)
out = eval_in_tp_coors(field, indices,
rcs, self.cps, self.weights,
self.degrees,
self.cs, self.conn)
return out
def evaluate(self, field, u=None, v=None, w=None):
"""
Igakit-like interface for NURBS evaluation.
"""
return self(u, v, w, field)
def _to_igakit(self):
import igakit.cad as cad
n_efuns = self.degrees + 1
nks = nm.array([len(ii) for ii in self.knots])
shape = tuple(nks - n_efuns)
cps = self.cps.reshape(shape + (-1,))
weights = self.weights.reshape(shape)
return cad.NURBS(self.knots, cps, weights=weights)
def _from_igakit(self, inurbs):
cs = iga.compute_bezier_extraction(inurbs.knots, inurbs.degree)
n_els = [len(ii) for ii in cs]
conn, bconn = iga.create_connectivity(n_els, inurbs.knots,
inurbs.degree)
cps = inurbs.points[..., :self.dim].copy()
cps = cps.reshape((-1, self.dim))
return NurbsPatch(inurbs.knots, inurbs.degree, cps,
inurbs.weights.ravel(), cs, conn)
def elevate(self, times=0):
"""
Elevate the patch degrees several `times` by one.
Returns
-------
nurbs : NurbsPatch instance
Either `self` if `times` is zero, or a new instance.
"""
if times is 0: return self
aux = self._to_igakit()
for ia in range(self.dim):
aux.elevate(ia, times)
assert_(nm.isfinite(aux.points).all(),
'igakit degree elevation failed for axis %d!' % ia)
return self._from_igakit(aux)
class IGDomain(Domain):
"""
Bezier extraction based NURBS domain for isogeometric analysis.
"""
@staticmethod
def from_file(filename):
"""
filename : str
The name of the IGA domain file.
"""
(knots, degrees, cps, weights, cs, conn,
bcps, bweights, bconn, regions) = io.read_iga_data(filename)
nurbs = NurbsPatch(knots, degrees, cps, weights, cs, conn)
bmesh = Struct(name='bmesh', cps=bcps, weights=bweights, conn=bconn)
name = op.splitext(filename)[0]
domain = IGDomain(name, nurbs=nurbs, bmesh=bmesh, regions=regions)
return domain
def __init__(self, name, nurbs, bmesh, regions=None, **kwargs):
"""
Create an IGA domain.
Parameters
----------
name : str
The domain name.
"""
Domain.__init__(self, name, nurbs=nurbs, bmesh=bmesh, regions=regions,
**kwargs)
from sfepy.discrete.fem.geometry_element import create_geometry_elements
from sfepy.discrete.fem import Mesh
from sfepy.discrete.fem.utils import prepare_remap
tconn = | iga.get_bezier_topology(bmesh.conn, nurbs.degrees) | sfepy.discrete.iga.get_bezier_topology |
"""
Computational domain for isogeometric analysis.
"""
import os.path as op
import numpy as nm
from sfepy.base.base import assert_, Struct
from sfepy.discrete.common.domain import Domain
import sfepy.discrete.iga as iga
import sfepy.discrete.iga.io as io
from sfepy.discrete.iga.extmods.igac import eval_in_tp_coors
class NurbsPatch(Struct):
"""
Single NURBS patch data.
"""
def __init__(self, knots, degrees, cps,
weights, cs, conn):
degrees = nm.asarray(degrees, dtype=nm.int32)
cs = [nm.asarray(cc, dtype=nm.float64) for cc in cs]
if cs[0].ndim == 3:
cs = [nm.ascontiguousarray(cc[:, None, ...]) for cc in cs]
Struct.__init__(self, name='nurbs', knots=knots, degrees=degrees,
cps=cps, weights=weights, cs=cs, conn=conn)
self.n_els = [len(ii) for ii in cs]
self.dim = len(self.n_els)
def _get_ref_coors_1d(self, pars, axis):
uk = nm.unique(self.knots[axis])
indices = nm.searchsorted(uk[1:], pars)
ref_coors = nm.empty_like(pars)
for ii in xrange(len(uk) - 1):
ispan = nm.where(indices == ii)[0]
pp = pars[ispan]
ref_coors[ispan] = (pp - uk[ii]) / (uk[ii+1] - uk[ii])
return uk, indices, ref_coors
def __call__(self, u=None, v=None, w=None, field=None):
"""
Igakit-like interface for NURBS evaluation.
"""
pars = [u]
if v is not None: pars += [v]
if w is not None: pars += [w]
indices = []
rcs = []
for ia, par in enumerate(pars):
uk, indx, rc = self._get_ref_coors_1d(par, ia)
indices.append(indx.astype(nm.uint32))
rcs.append(rc)
out = eval_in_tp_coors(field, indices,
rcs, self.cps, self.weights,
self.degrees,
self.cs, self.conn)
return out
def evaluate(self, field, u=None, v=None, w=None):
"""
Igakit-like interface for NURBS evaluation.
"""
return self(u, v, w, field)
def _to_igakit(self):
import igakit.cad as cad
n_efuns = self.degrees + 1
nks = nm.array([len(ii) for ii in self.knots])
shape = tuple(nks - n_efuns)
cps = self.cps.reshape(shape + (-1,))
weights = self.weights.reshape(shape)
return cad.NURBS(self.knots, cps, weights=weights)
def _from_igakit(self, inurbs):
cs = iga.compute_bezier_extraction(inurbs.knots, inurbs.degree)
n_els = [len(ii) for ii in cs]
conn, bconn = iga.create_connectivity(n_els, inurbs.knots,
inurbs.degree)
cps = inurbs.points[..., :self.dim].copy()
cps = cps.reshape((-1, self.dim))
return NurbsPatch(inurbs.knots, inurbs.degree, cps,
inurbs.weights.ravel(), cs, conn)
def elevate(self, times=0):
"""
Elevate the patch degrees several `times` by one.
Returns
-------
nurbs : NurbsPatch instance
Either `self` if `times` is zero, or a new instance.
"""
if times is 0: return self
aux = self._to_igakit()
for ia in range(self.dim):
aux.elevate(ia, times)
assert_(nm.isfinite(aux.points).all(),
'igakit degree elevation failed for axis %d!' % ia)
return self._from_igakit(aux)
class IGDomain(Domain):
"""
Bezier extraction based NURBS domain for isogeometric analysis.
"""
@staticmethod
def from_file(filename):
"""
filename : str
The name of the IGA domain file.
"""
(knots, degrees, cps, weights, cs, conn,
bcps, bweights, bconn, regions) = io.read_iga_data(filename)
nurbs = NurbsPatch(knots, degrees, cps, weights, cs, conn)
bmesh = Struct(name='bmesh', cps=bcps, weights=bweights, conn=bconn)
name = op.splitext(filename)[0]
domain = IGDomain(name, nurbs=nurbs, bmesh=bmesh, regions=regions)
return domain
def __init__(self, name, nurbs, bmesh, regions=None, **kwargs):
"""
Create an IGA domain.
Parameters
----------
name : str
The domain name.
"""
Domain.__init__(self, name, nurbs=nurbs, bmesh=bmesh, regions=regions,
**kwargs)
from sfepy.discrete.fem.geometry_element import create_geometry_elements
from sfepy.discrete.fem import Mesh
from sfepy.discrete.fem.utils import prepare_remap
tconn = iga.get_bezier_topology(bmesh.conn, nurbs.degrees)
itc = nm.unique(tconn)
remap = prepare_remap(itc, bmesh.conn.max() + 1)
ltcoors = bmesh.cps[itc]
ltconn = remap[tconn]
n_nod, dim = ltcoors.shape
n_el = ltconn.shape[0]
self.shape = | Struct(n_nod=n_nod, dim=dim, tdim=0, n_el=n_el) | sfepy.base.base.Struct |
"""
Computational domain for isogeometric analysis.
"""
import os.path as op
import numpy as nm
from sfepy.base.base import assert_, Struct
from sfepy.discrete.common.domain import Domain
import sfepy.discrete.iga as iga
import sfepy.discrete.iga.io as io
from sfepy.discrete.iga.extmods.igac import eval_in_tp_coors
class NurbsPatch(Struct):
"""
Single NURBS patch data.
"""
def __init__(self, knots, degrees, cps,
weights, cs, conn):
degrees = nm.asarray(degrees, dtype=nm.int32)
cs = [nm.asarray(cc, dtype=nm.float64) for cc in cs]
if cs[0].ndim == 3:
cs = [nm.ascontiguousarray(cc[:, None, ...]) for cc in cs]
Struct.__init__(self, name='nurbs', knots=knots, degrees=degrees,
cps=cps, weights=weights, cs=cs, conn=conn)
self.n_els = [len(ii) for ii in cs]
self.dim = len(self.n_els)
def _get_ref_coors_1d(self, pars, axis):
uk = nm.unique(self.knots[axis])
indices = nm.searchsorted(uk[1:], pars)
ref_coors = nm.empty_like(pars)
for ii in xrange(len(uk) - 1):
ispan = nm.where(indices == ii)[0]
pp = pars[ispan]
ref_coors[ispan] = (pp - uk[ii]) / (uk[ii+1] - uk[ii])
return uk, indices, ref_coors
def __call__(self, u=None, v=None, w=None, field=None):
"""
Igakit-like interface for NURBS evaluation.
"""
pars = [u]
if v is not None: pars += [v]
if w is not None: pars += [w]
indices = []
rcs = []
for ia, par in enumerate(pars):
uk, indx, rc = self._get_ref_coors_1d(par, ia)
indices.append(indx.astype(nm.uint32))
rcs.append(rc)
out = eval_in_tp_coors(field, indices,
rcs, self.cps, self.weights,
self.degrees,
self.cs, self.conn)
return out
def evaluate(self, field, u=None, v=None, w=None):
"""
Igakit-like interface for NURBS evaluation.
"""
return self(u, v, w, field)
def _to_igakit(self):
import igakit.cad as cad
n_efuns = self.degrees + 1
nks = nm.array([len(ii) for ii in self.knots])
shape = tuple(nks - n_efuns)
cps = self.cps.reshape(shape + (-1,))
weights = self.weights.reshape(shape)
return cad.NURBS(self.knots, cps, weights=weights)
def _from_igakit(self, inurbs):
cs = iga.compute_bezier_extraction(inurbs.knots, inurbs.degree)
n_els = [len(ii) for ii in cs]
conn, bconn = iga.create_connectivity(n_els, inurbs.knots,
inurbs.degree)
cps = inurbs.points[..., :self.dim].copy()
cps = cps.reshape((-1, self.dim))
return NurbsPatch(inurbs.knots, inurbs.degree, cps,
inurbs.weights.ravel(), cs, conn)
def elevate(self, times=0):
"""
Elevate the patch degrees several `times` by one.
Returns
-------
nurbs : NurbsPatch instance
Either `self` if `times` is zero, or a new instance.
"""
if times is 0: return self
aux = self._to_igakit()
for ia in range(self.dim):
aux.elevate(ia, times)
assert_(nm.isfinite(aux.points).all(),
'igakit degree elevation failed for axis %d!' % ia)
return self._from_igakit(aux)
class IGDomain(Domain):
"""
Bezier extraction based NURBS domain for isogeometric analysis.
"""
@staticmethod
def from_file(filename):
"""
filename : str
The name of the IGA domain file.
"""
(knots, degrees, cps, weights, cs, conn,
bcps, bweights, bconn, regions) = io.read_iga_data(filename)
nurbs = NurbsPatch(knots, degrees, cps, weights, cs, conn)
bmesh = Struct(name='bmesh', cps=bcps, weights=bweights, conn=bconn)
name = op.splitext(filename)[0]
domain = IGDomain(name, nurbs=nurbs, bmesh=bmesh, regions=regions)
return domain
def __init__(self, name, nurbs, bmesh, regions=None, **kwargs):
"""
Create an IGA domain.
Parameters
----------
name : str
The domain name.
"""
Domain.__init__(self, name, nurbs=nurbs, bmesh=bmesh, regions=regions,
**kwargs)
from sfepy.discrete.fem.geometry_element import create_geometry_elements
from sfepy.discrete.fem import Mesh
from sfepy.discrete.fem.utils import prepare_remap
tconn = iga.get_bezier_topology(bmesh.conn, nurbs.degrees)
itc = nm.unique(tconn)
remap = prepare_remap(itc, bmesh.conn.max() + 1)
ltcoors = bmesh.cps[itc]
ltconn = remap[tconn]
n_nod, dim = ltcoors.shape
n_el = ltconn.shape[0]
self.shape = Struct(n_nod=n_nod, dim=dim, tdim=0, n_el=n_el)
desc = '%d_%d' % (dim, bmesh.conn.shape[1])
mat_id = nm.zeros(bmesh.conn.shape[0], dtype=nm.int32)
eval_mesh = Mesh.from_data(self.name + '_eval', nurbs.cps, None,
[nurbs.conn], [mat_id], [desc])
self.eval_mesh = eval_mesh
desc = '%d_%d' % (dim, 2**dim)
mat_id = nm.zeros(ltconn.shape[0], dtype=nm.int32)
self.mesh = Mesh.from_data(self.name + '_topo', ltcoors, None, [ltconn],
[mat_id], [desc])
self.cmesh = self.mesh.cmesh
gels = | create_geometry_elements() | sfepy.discrete.fem.geometry_element.create_geometry_elements |
input_names = {'TL': '../examples/large_deformation/hyperelastic.py',
'UL': '../examples/large_deformation/hyperelastic_ul.py',
'ULM': '../examples/large_deformation/hyperelastic_ul_up.py'}
output_name_trunk = 'test_hyperelastic_'
from sfepy.base.testing import TestCommon
from tests_basic import NLSStatus
class Test(TestCommon):
@staticmethod
def from_conf(conf, options):
return Test(conf = conf, options = options)
def test_solution(self):
from sfepy.base.base import Struct
from sfepy.base.conf import ProblemConf, get_standard_keywords
from sfepy.applications import solve_pde, assign_standard_hooks
import numpy as nm
import os.path as op
solutions = {}
ok = True
for hp, pb_filename in input_names.iteritems():
required, other = | get_standard_keywords() | sfepy.base.conf.get_standard_keywords |
input_names = {'TL': '../examples/large_deformation/hyperelastic.py',
'UL': '../examples/large_deformation/hyperelastic_ul.py',
'ULM': '../examples/large_deformation/hyperelastic_ul_up.py'}
output_name_trunk = 'test_hyperelastic_'
from sfepy.base.testing import TestCommon
from tests_basic import NLSStatus
class Test(TestCommon):
@staticmethod
def from_conf(conf, options):
return Test(conf = conf, options = options)
def test_solution(self):
from sfepy.base.base import Struct
from sfepy.base.conf import ProblemConf, get_standard_keywords
from sfepy.applications import solve_pde, assign_standard_hooks
import numpy as nm
import os.path as op
solutions = {}
ok = True
for hp, pb_filename in input_names.iteritems():
required, other = get_standard_keywords()
input_name = op.join(op.dirname(__file__), pb_filename)
test_conf = | ProblemConf.from_file(input_name, required, other) | sfepy.base.conf.ProblemConf.from_file |
input_names = {'TL': '../examples/large_deformation/hyperelastic.py',
'UL': '../examples/large_deformation/hyperelastic_ul.py',
'ULM': '../examples/large_deformation/hyperelastic_ul_up.py'}
output_name_trunk = 'test_hyperelastic_'
from sfepy.base.testing import TestCommon
from tests_basic import NLSStatus
class Test(TestCommon):
@staticmethod
def from_conf(conf, options):
return Test(conf = conf, options = options)
def test_solution(self):
from sfepy.base.base import Struct
from sfepy.base.conf import ProblemConf, get_standard_keywords
from sfepy.applications import solve_pde, assign_standard_hooks
import numpy as nm
import os.path as op
solutions = {}
ok = True
for hp, pb_filename in input_names.iteritems():
required, other = get_standard_keywords()
input_name = op.join(op.dirname(__file__), pb_filename)
test_conf = ProblemConf.from_file(input_name, required, other)
name = output_name_trunk + hp
solver_options = Struct(output_filename_trunk=name,
output_format='vtk',
save_ebc=False, save_ebc_nodes=False,
save_regions=False,
save_regions_as_groups=False,
save_field_meshes=False,
solve_not=False)
| assign_standard_hooks(self, test_conf.options.get, test_conf) | sfepy.applications.assign_standard_hooks |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = | get_default(var_indx, {}) | sfepy.base.base.get_default |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in | iter_dict_of_lists(conn_info, return_keys=True) | sfepy.base.base.iter_dict_of_lists |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
| output('setting up dof connectivities...') | sfepy.base.base.output |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = | Timer(start=True) | sfepy.base.timing.Timer |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
| Container.__setitem__(self, ii, var) | sfepy.base.base.Container.__setitem__ |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = | DofInfo('state_dof_info') | sfepy.discrete.common.dof_info.DofInfo |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = | LCBCOperators('lcbcs', self, functions=functions) | sfepy.discrete.fem.lcbc_operators.LCBCOperators |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = | DofInfo('active_state_dof_info') | sfepy.discrete.common.dof_info.DofInfo |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
| Struct.__init__(self, name=name, **kwargs) | sfepy.base.base.Struct.__init__ |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
self.dof_name = self.primary_var_name
elif kind == 'parameter':
self.flags.add(is_parameter)
msg = 'parameter variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
if self.primary_var_name == '(set-to-None)':
self.primary_var_name = None
self.dof_name = self.name
else:
self.dof_name = self.primary_var_name
if special is not None:
self.special = special
else:
raise NotImplementedError('unknown variable kind: %s' % kind)
self.kind = kind
def _setup_dofs(self, n_nod, n_components, val_shape):
"""
Setup number of DOFs and DOF names.
"""
self.n_nod = n_nod
self.n_components = n_components
self.val_shape = val_shape
self.n_dof = self.n_nod * self.n_components
self.dofs = [self.dof_name + ('.%d' % ii)
for ii in range(self.n_components)]
def get_primary(self):
"""
Get the corresponding primary variable.
Returns
-------
var : Variable instance
The primary variable, or `self` for state
variables or if `primary_var_name` is None, or None if no other
variables are defined.
"""
if self.is_state():
var = self
elif self.primary_var_name is not None:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
else:
var = self
return var
def get_dual(self):
"""
Get the dual variable.
Returns
-------
var : Variable instance
The primary variable for non-state variables, or the dual
variable for state variables.
"""
if self.is_state():
if ((self._variables is not None)
and (self.dual_var_name in self._variables.names)):
var = self._variables[self.dual_var_name]
else:
var = None
else:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
return var
def is_state(self):
return is_state in self.flags
def is_virtual(self):
return is_virtual in self.flags
def is_parameter(self):
return is_parameter in self.flags
def is_state_or_parameter(self):
return (is_state in self.flags) or (is_parameter in self.flags)
def is_kind(self, kind):
return eval('self.is_%s()' % kind)
def is_real(self):
return self.dtype in real_types
def is_complex(self):
return self.dtype in complex_types
def is_finite(self, step=0, derivative=None, dt=None):
return nm.isfinite(self(step=step, derivative=derivative, dt=dt)).all()
def get_primary_name(self):
if self.is_state():
name = self.name
else:
name = self.primary_var_name
return name
def init_history(self):
"""Initialize data of variables with history."""
if self.history is None: return
self.data = deque((self.history + 1) * [None])
self.step = 0
def time_update(self, ts, functions):
"""Implemented in subclasses."""
pass
def advance(self, ts):
"""
Advance in time the DOF state history. A copy of the DOF vector
is made to prevent history modification.
"""
if self.history is None: return
self.step = ts.step + 1
if self.history > 0:
# Copy the current step data to the history data, shift history,
# initialize if needed. The current step data are left intact.
# Note: cannot use self.data.rotate() due to data sharing with
# State.
for ii in range(self.history, 0, -1):
if self.data[ii] is None:
self.data[ii] = nm.empty_like(self.data[0])
self.data[ii][:] = self.data[ii - 1]
# Advance evaluate cache.
for step_cache in six.itervalues(self.evaluate_cache):
steps = sorted(step_cache.keys())
for step in steps:
if step is None:
# Special caches with possible custom advance()
# function.
for key, val in six.iteritems(step_cache[step]):
if hasattr(val, '__advance__'):
val.__advance__(ts, val)
elif -step < self.history:
step_cache[step-1] = step_cache[step]
if len(steps) and (steps[0] is not None):
step_cache.pop(steps[-1])
def init_data(self, step=0):
"""
Initialize the dof vector data of time step `step` to zeros.
"""
if self.is_state_or_parameter():
data = nm.zeros((self.n_dof,), dtype=self.dtype)
self.set_data(data, step=step)
def set_constant(self, val):
"""
Set the variable to a constant value.
"""
data = nm.empty((self.n_dof,), dtype=self.dtype)
data.fill(val)
self.set_data(data)
def set_data(self, data=None, indx=None, step=0,
preserve_caches=False):
"""
Set data (vector of DOF values) of the variable.
Parameters
----------
data : array
The vector of DOF values.
indx : int, optional
If given, `data[indx]` is used.
step : int, optional
The time history step, 0 (default) = current.
preserve_caches : bool
If True, do not invalidate evaluate caches of the variable.
"""
data = data.ravel()
if indx is None:
indx = slice(0, len(data))
else:
indx = slice(int(indx.start), int(indx.stop))
n_data_dof = indx.stop - indx.start
if self.n_dof != n_data_dof:
msg = 'incompatible data shape! (%d (variable) == %d (data))' \
% (self.n_dof, n_data_dof)
raise ValueError(msg)
elif (step > 0) or (-step >= len(self.data)):
raise ValueError('step %d out of range! ([%d, 0])'
% (step, -(len(self.data) - 1)))
else:
self.data[step] = data
self.indx = indx
if not preserve_caches:
self.invalidate_evaluate_cache(step=step)
def __call__(self, step=0, derivative=None, dt=None):
"""
Return vector of degrees of freedom of the variable.
Parameters
----------
step : int, default 0
The time step (0 means current, -1 previous, ...).
derivative : None or 'dt'
If not None, return time derivative of the DOF vector,
approximated by the backward finite difference.
Returns
-------
vec : array
The DOF vector. If `derivative` is None: a view of the data vector,
otherwise: required derivative of the DOF vector
at time step given by `step`.
Notes
-----
If the previous time step is requested in step 0, the step 0
DOF vector is returned instead.
"""
if derivative is None:
if (self.step == 0) and (step == -1):
data = self.data[0]
else:
data = self.data[-step]
if data is None:
raise ValueError('data of variable are not set! (%s, step %d)' \
% (self.name, step))
return data[self.indx]
else:
if self.history is None:
msg = 'set history type of variable %s to use derivatives!'\
% self.name
raise ValueError(msg)
dt = get_default(dt, self.dt)
return (self(step=step) - self(step=step-1)) / dt
def get_initial_condition(self):
if self.initial_condition is None:
return 0.0
else:
return self.initial_condition
class FieldVariable(Variable):
"""
A finite element field variable.
field .. field description of variable (borrowed)
"""
def __init__(self, name, kind, field, order=None, primary_var_name=None,
special=None, flags=None, history=None, **kwargs):
Variable.__init__(self, name, kind, order, primary_var_name,
special, flags, history=history, **kwargs)
self._set_field(field)
self.has_field = True
self.has_bc = True
self._variables = None
self.clear_evaluate_cache()
def _set_field(self, field):
"""
Set field of the variable.
Takes reference to a Field instance. Sets dtype according to
field.dtype. Sets `dim` attribute to spatial dimension.
"""
self.is_surface = field.is_surface
self.field = field
self._setup_dofs(field.n_nod, field.n_components, field.val_shape)
self.flags.add(is_field)
self.dtype = field.dtype
self.dim = field.domain.shape.dim
def _get_setter(self, kind, functions, **kwargs):
"""
Get the setter function of the variable and its arguments depending in
the setter kind.
"""
if not (hasattr(self, 'special') and (kind in self.special)):
return
setter_name = self.special[kind]
setter = functions[setter_name]
region = self.field.region
nod_list = self.field.get_dofs_in_region(region)
nods = nm.unique(nod_list)
coors = self.field.get_coor(nods)
if kind == 'setter':
sargs = (kwargs.get('ts'), coors)
elif kind == 'ic':
sargs = (coors, )
skwargs = {'region' : region}
return setter, sargs, skwargs
def get_field(self):
return self.field
def get_mapping(self, region, integral, integration,
get_saved=False, return_key=False):
"""
Get the reference element mapping of the underlying field.
See Also
--------
sfepy.discrete.common.fields.Field.get_mapping
"""
if region is None:
region = self.field.region
out = self.field.get_mapping(region, integral, integration,
get_saved=get_saved,
return_key=return_key)
return out
def get_dof_conn(self, dc_type, is_trace=False, trace_region=None):
"""
Get active dof connectivity of a variable.
Notes
-----
The primary and dual variables must have the same Region.
"""
if self.is_virtual():
var = self.get_primary()
# No primary variable can occur in single term evaluations.
var_name = var.name if var is not None else self.name
else:
var_name = self.name
if not is_trace:
region_name = dc_type.region_name
else:
aux = self.field.domain.regions[dc_type.region_name]
region = aux.get_mirror_region(trace_region)
region_name = region.name
key = (var_name, region_name, dc_type.type, is_trace)
dc = self.adof_conns[key]
return dc
def get_dof_info(self, active=False):
details = Struct(name='field_var_dof_details',
n_nod=self.n_nod,
dpn=self.n_components)
if active:
n_dof = self.n_adof
else:
n_dof = self.n_dof
return n_dof, details
def time_update(self, ts, functions):
"""
Store time step, set variable data for variables with the setter
function.
"""
if ts is not None:
self.dt = ts.dt
if hasattr(self, 'special') and ('setter' in self.special):
setter, sargs, skwargs = self._get_setter('setter', functions,
ts=ts)
self.set_data(setter(*sargs, **skwargs))
output('data of %s set by %s()' % (self.name, setter.name))
def set_from_qp(self, data_qp, integral, step=0):
"""
Set DOFs of variable using values in quadrature points
corresponding to the given integral.
"""
data_vertex = self.field.average_qp_to_vertices(data_qp, integral)
# Field nodes values.
data = self.field.interp_v_vals_to_n_vals(data_vertex)
data = data.ravel()
self.indx = slice(0, len(data))
self.data[step] = data
def set_from_mesh_vertices(self, data):
"""
Set the variable using values at the mesh vertices.
"""
ndata = self.field.interp_v_vals_to_n_vals(data)
self.set_data(ndata)
def set_from_function(self, fun, step=0):
"""
Set the variable data (the vector of DOF values) using a function of
space coordinates.
Parameters
----------
fun : callable
The function of coordinates returning DOF values of shape
`(n_coor, n_components)`.
step : int, optional
The time history step, 0 (default) = current.
"""
_, vv = self.field.set_dofs(fun, self.field.region, self.n_components)
self.set_data(vv.ravel(), step=step)
def equation_mapping(self, bcs, var_di, ts, functions, problem=None,
warn=False):
"""
Create the mapping of active DOFs from/to all DOFs.
Sets n_adof.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.eq_map = | EquationMap('eq_map', self.dofs, var_di) | sfepy.discrete.common.dof_info.EquationMap |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
self.dof_name = self.primary_var_name
elif kind == 'parameter':
self.flags.add(is_parameter)
msg = 'parameter variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
if self.primary_var_name == '(set-to-None)':
self.primary_var_name = None
self.dof_name = self.name
else:
self.dof_name = self.primary_var_name
if special is not None:
self.special = special
else:
raise NotImplementedError('unknown variable kind: %s' % kind)
self.kind = kind
def _setup_dofs(self, n_nod, n_components, val_shape):
"""
Setup number of DOFs and DOF names.
"""
self.n_nod = n_nod
self.n_components = n_components
self.val_shape = val_shape
self.n_dof = self.n_nod * self.n_components
self.dofs = [self.dof_name + ('.%d' % ii)
for ii in range(self.n_components)]
def get_primary(self):
"""
Get the corresponding primary variable.
Returns
-------
var : Variable instance
The primary variable, or `self` for state
variables or if `primary_var_name` is None, or None if no other
variables are defined.
"""
if self.is_state():
var = self
elif self.primary_var_name is not None:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
else:
var = self
return var
def get_dual(self):
"""
Get the dual variable.
Returns
-------
var : Variable instance
The primary variable for non-state variables, or the dual
variable for state variables.
"""
if self.is_state():
if ((self._variables is not None)
and (self.dual_var_name in self._variables.names)):
var = self._variables[self.dual_var_name]
else:
var = None
else:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
return var
def is_state(self):
return is_state in self.flags
def is_virtual(self):
return is_virtual in self.flags
def is_parameter(self):
return is_parameter in self.flags
def is_state_or_parameter(self):
return (is_state in self.flags) or (is_parameter in self.flags)
def is_kind(self, kind):
return eval('self.is_%s()' % kind)
def is_real(self):
return self.dtype in real_types
def is_complex(self):
return self.dtype in complex_types
def is_finite(self, step=0, derivative=None, dt=None):
return nm.isfinite(self(step=step, derivative=derivative, dt=dt)).all()
def get_primary_name(self):
if self.is_state():
name = self.name
else:
name = self.primary_var_name
return name
def init_history(self):
"""Initialize data of variables with history."""
if self.history is None: return
self.data = deque((self.history + 1) * [None])
self.step = 0
def time_update(self, ts, functions):
"""Implemented in subclasses."""
pass
def advance(self, ts):
"""
Advance in time the DOF state history. A copy of the DOF vector
is made to prevent history modification.
"""
if self.history is None: return
self.step = ts.step + 1
if self.history > 0:
# Copy the current step data to the history data, shift history,
# initialize if needed. The current step data are left intact.
# Note: cannot use self.data.rotate() due to data sharing with
# State.
for ii in range(self.history, 0, -1):
if self.data[ii] is None:
self.data[ii] = nm.empty_like(self.data[0])
self.data[ii][:] = self.data[ii - 1]
# Advance evaluate cache.
for step_cache in six.itervalues(self.evaluate_cache):
steps = sorted(step_cache.keys())
for step in steps:
if step is None:
# Special caches with possible custom advance()
# function.
for key, val in six.iteritems(step_cache[step]):
if hasattr(val, '__advance__'):
val.__advance__(ts, val)
elif -step < self.history:
step_cache[step-1] = step_cache[step]
if len(steps) and (steps[0] is not None):
step_cache.pop(steps[-1])
def init_data(self, step=0):
"""
Initialize the dof vector data of time step `step` to zeros.
"""
if self.is_state_or_parameter():
data = nm.zeros((self.n_dof,), dtype=self.dtype)
self.set_data(data, step=step)
def set_constant(self, val):
"""
Set the variable to a constant value.
"""
data = nm.empty((self.n_dof,), dtype=self.dtype)
data.fill(val)
self.set_data(data)
def set_data(self, data=None, indx=None, step=0,
preserve_caches=False):
"""
Set data (vector of DOF values) of the variable.
Parameters
----------
data : array
The vector of DOF values.
indx : int, optional
If given, `data[indx]` is used.
step : int, optional
The time history step, 0 (default) = current.
preserve_caches : bool
If True, do not invalidate evaluate caches of the variable.
"""
data = data.ravel()
if indx is None:
indx = slice(0, len(data))
else:
indx = slice(int(indx.start), int(indx.stop))
n_data_dof = indx.stop - indx.start
if self.n_dof != n_data_dof:
msg = 'incompatible data shape! (%d (variable) == %d (data))' \
% (self.n_dof, n_data_dof)
raise ValueError(msg)
elif (step > 0) or (-step >= len(self.data)):
raise ValueError('step %d out of range! ([%d, 0])'
% (step, -(len(self.data) - 1)))
else:
self.data[step] = data
self.indx = indx
if not preserve_caches:
self.invalidate_evaluate_cache(step=step)
def __call__(self, step=0, derivative=None, dt=None):
"""
Return vector of degrees of freedom of the variable.
Parameters
----------
step : int, default 0
The time step (0 means current, -1 previous, ...).
derivative : None or 'dt'
If not None, return time derivative of the DOF vector,
approximated by the backward finite difference.
Returns
-------
vec : array
The DOF vector. If `derivative` is None: a view of the data vector,
otherwise: required derivative of the DOF vector
at time step given by `step`.
Notes
-----
If the previous time step is requested in step 0, the step 0
DOF vector is returned instead.
"""
if derivative is None:
if (self.step == 0) and (step == -1):
data = self.data[0]
else:
data = self.data[-step]
if data is None:
raise ValueError('data of variable are not set! (%s, step %d)' \
% (self.name, step))
return data[self.indx]
else:
if self.history is None:
msg = 'set history type of variable %s to use derivatives!'\
% self.name
raise ValueError(msg)
dt = get_default(dt, self.dt)
return (self(step=step) - self(step=step-1)) / dt
def get_initial_condition(self):
if self.initial_condition is None:
return 0.0
else:
return self.initial_condition
class FieldVariable(Variable):
"""
A finite element field variable.
field .. field description of variable (borrowed)
"""
def __init__(self, name, kind, field, order=None, primary_var_name=None,
special=None, flags=None, history=None, **kwargs):
Variable.__init__(self, name, kind, order, primary_var_name,
special, flags, history=history, **kwargs)
self._set_field(field)
self.has_field = True
self.has_bc = True
self._variables = None
self.clear_evaluate_cache()
def _set_field(self, field):
"""
Set field of the variable.
Takes reference to a Field instance. Sets dtype according to
field.dtype. Sets `dim` attribute to spatial dimension.
"""
self.is_surface = field.is_surface
self.field = field
self._setup_dofs(field.n_nod, field.n_components, field.val_shape)
self.flags.add(is_field)
self.dtype = field.dtype
self.dim = field.domain.shape.dim
def _get_setter(self, kind, functions, **kwargs):
"""
Get the setter function of the variable and its arguments depending in
the setter kind.
"""
if not (hasattr(self, 'special') and (kind in self.special)):
return
setter_name = self.special[kind]
setter = functions[setter_name]
region = self.field.region
nod_list = self.field.get_dofs_in_region(region)
nods = nm.unique(nod_list)
coors = self.field.get_coor(nods)
if kind == 'setter':
sargs = (kwargs.get('ts'), coors)
elif kind == 'ic':
sargs = (coors, )
skwargs = {'region' : region}
return setter, sargs, skwargs
def get_field(self):
return self.field
def get_mapping(self, region, integral, integration,
get_saved=False, return_key=False):
"""
Get the reference element mapping of the underlying field.
See Also
--------
sfepy.discrete.common.fields.Field.get_mapping
"""
if region is None:
region = self.field.region
out = self.field.get_mapping(region, integral, integration,
get_saved=get_saved,
return_key=return_key)
return out
def get_dof_conn(self, dc_type, is_trace=False, trace_region=None):
"""
Get active dof connectivity of a variable.
Notes
-----
The primary and dual variables must have the same Region.
"""
if self.is_virtual():
var = self.get_primary()
# No primary variable can occur in single term evaluations.
var_name = var.name if var is not None else self.name
else:
var_name = self.name
if not is_trace:
region_name = dc_type.region_name
else:
aux = self.field.domain.regions[dc_type.region_name]
region = aux.get_mirror_region(trace_region)
region_name = region.name
key = (var_name, region_name, dc_type.type, is_trace)
dc = self.adof_conns[key]
return dc
def get_dof_info(self, active=False):
details = Struct(name='field_var_dof_details',
n_nod=self.n_nod,
dpn=self.n_components)
if active:
n_dof = self.n_adof
else:
n_dof = self.n_dof
return n_dof, details
def time_update(self, ts, functions):
"""
Store time step, set variable data for variables with the setter
function.
"""
if ts is not None:
self.dt = ts.dt
if hasattr(self, 'special') and ('setter' in self.special):
setter, sargs, skwargs = self._get_setter('setter', functions,
ts=ts)
self.set_data(setter(*sargs, **skwargs))
output('data of %s set by %s()' % (self.name, setter.name))
def set_from_qp(self, data_qp, integral, step=0):
"""
Set DOFs of variable using values in quadrature points
corresponding to the given integral.
"""
data_vertex = self.field.average_qp_to_vertices(data_qp, integral)
# Field nodes values.
data = self.field.interp_v_vals_to_n_vals(data_vertex)
data = data.ravel()
self.indx = slice(0, len(data))
self.data[step] = data
def set_from_mesh_vertices(self, data):
"""
Set the variable using values at the mesh vertices.
"""
ndata = self.field.interp_v_vals_to_n_vals(data)
self.set_data(ndata)
def set_from_function(self, fun, step=0):
"""
Set the variable data (the vector of DOF values) using a function of
space coordinates.
Parameters
----------
fun : callable
The function of coordinates returning DOF values of shape
`(n_coor, n_components)`.
step : int, optional
The time history step, 0 (default) = current.
"""
_, vv = self.field.set_dofs(fun, self.field.region, self.n_components)
self.set_data(vv.ravel(), step=step)
def equation_mapping(self, bcs, var_di, ts, functions, problem=None,
warn=False):
"""
Create the mapping of active DOFs from/to all DOFs.
Sets n_adof.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.eq_map = EquationMap('eq_map', self.dofs, var_di)
if bcs is not None:
bcs.canonize_dof_names(self.dofs)
bcs.sort()
active_bcs = self.eq_map.map_equations(bcs, self.field, ts, functions,
problem=problem, warn=warn)
self.n_adof = self.eq_map.n_eq
return active_bcs
def setup_initial_conditions(self, ics, di, functions, warn=False):
"""
Setup of initial conditions.
"""
ics.canonize_dof_names(self.dofs)
ics.sort()
self.initial_condition = nm.zeros((di.n_dof[self.name],),
dtype=self.dtype)
for ic in ics:
region = ic.region
dofs, val = ic.dofs
if warn:
clean_msg = ('warning: ignoring nonexistent' \
' IC node (%s) in ' % self.name)
else:
clean_msg = None
nod_list = self.field.get_dofs_in_region(region)
if len(nod_list) == 0:
continue
fun = get_condition_value(val, functions, 'IC', ic.name)
if isinstance(fun, Function):
aux = fun
fun = lambda coors: aux(coors, ic=ic)
nods, vv = self.field.set_dofs(fun, region, len(dofs), clean_msg)
eq = expand_nodes_to_equations(nods, dofs, self.dofs)
self.initial_condition[eq] = nm.ravel(vv)
def get_data_shape(self, integral, integration='volume', region_name=None):
"""
Get element data dimensions for given approximation.
Parameters
----------
integral : Integral instance
The integral describing used numerical quadrature.
integration : 'volume', 'surface', 'surface_extra', 'point' or 'custom'
The term integration type.
region_name : str
The name of the region of the integral.
Returns
-------
data_shape : 5 ints
The `(n_el, n_qp, dim, n_en, n_comp)` for volume shape kind,
`(n_fa, n_qp, dim, n_fn, n_comp)` for surface shape kind and
`(n_nod, 0, 0, 1, n_comp)` for point shape kind.
Notes
-----
- `n_el`, `n_fa` = number of elements/facets
- `n_qp` = number of quadrature points per element/facet
- `dim` = spatial dimension
- `n_en`, `n_fn` = number of element/facet nodes
- `n_comp` = number of variable components in a point/node
- `n_nod` = number of element nodes
"""
aux = self.field.get_data_shape(integral, integration=integration,
region_name=region_name)
data_shape = aux + (self.n_components,)
return data_shape
def clear_evaluate_cache(self):
"""
Clear current evaluate cache.
"""
self.evaluate_cache = {}
def invalidate_evaluate_cache(self, step=0):
"""
Invalidate variable data in evaluate cache for time step given
by `step` (0 is current, -1 previous, ...).
This should be done, for example, prior to every nonlinear
solver iteration.
"""
for step_cache in six.itervalues(self.evaluate_cache):
for key in list(step_cache.keys()):
if key == step: # Given time step to clear.
step_cache.pop(key)
def evaluate(self, mode='val',
region=None, integral=None, integration=None,
step=0, time_derivative=None, is_trace=False,
trace_region=None, dt=None, bf=None):
"""
Evaluate various quantities related to the variable according to
`mode` in quadrature points defined by `integral`.
The evaluated data are cached in the variable instance in
`evaluate_cache` attribute.
Parameters
----------
mode : one of 'val', 'grad', 'div', 'cauchy_strain'
The evaluation mode.
region : Region instance, optional
The region where the evaluation occurs. If None, the
underlying field region is used.
integral : Integral instance, optional
The integral defining quadrature points in which the
evaluation occurs. If None, the first order volume integral
is created. Must not be None for surface integrations.
integration : 'volume', 'surface', 'surface_extra', or 'point'
The term integration type. If None, it is derived from
`integral`.
step : int, default 0
The time step (0 means current, -1 previous, ...).
time_derivative : None or 'dt'
If not None, return time derivative of the data,
approximated by the backward finite difference.
is_trace : bool, default False
Indicate evaluation of trace of the variable on a boundary
region.
dt : float, optional
The time step to be used if `derivative` is `'dt'`. If None,
the `dt` attribute of the variable is used.
bf : Base function, optional
The base function to be used in 'val' mode.
Returns
-------
out : array
The 4-dimensional array of shape
`(n_el, n_qp, n_row, n_col)` with the requested data,
where `n_row`, `n_col` depend on `mode`.
"""
if integration == 'custom':
msg = 'cannot use FieldVariable.evaluate() with custom integration!'
raise ValueError(msg)
step_cache = self.evaluate_cache.setdefault(mode, {})
cache = step_cache.setdefault(step, {})
field = self.field
if region is None:
region = field.region
if is_trace:
region = region.get_mirror_region(trace_region)
if (region is not field.region) and not region.is_empty:
assert_(field.region.contains(region))
if integral is None:
integral = Integral('aux_1', 1)
if integration is None:
integration = 'volume' if region.can_cells else 'surface'
geo, _, key = field.get_mapping(region, integral, integration,
return_key=True)
key += (time_derivative, is_trace)
if key in cache:
out = cache[key]
else:
vec = self(step=step, derivative=time_derivative, dt=dt)
ct = integration
if integration == 'surface_extra':
ct = 'volume'
conn = field.get_econn(ct, region, is_trace, integration)
shape = self.get_data_shape(integral, integration, region.name)
if self.dtype == nm.float64:
out = eval_real(vec, conn, geo, mode, shape, bf)
else:
out = eval_complex(vec, conn, geo, mode, shape, bf)
cache[key] = out
return out
def get_state_in_region(self, region, reshape=True, step=0):
"""
Get DOFs of the variable in the given region.
Parameters
----------
region : Region
The selected region.
reshape : bool
If True, reshape the DOF vector to a 2D array with the individual
components as columns. Otherwise a 1D DOF array of the form [all
DOFs in region node 0, all DOFs in region node 1, ...] is returned.
step : int, default 0
The time step (0 means current, -1 previous, ...).
Returns
-------
out : array
The selected DOFs.
"""
nods = self.field.get_dofs_in_region(region, merge=True)
eq = nm.empty((len(nods) * self.n_components,), dtype=nm.int32)
for idof in range(self.n_components):
eq[idof::self.n_components] = self.n_components * nods \
+ idof + self.indx.start
out = self.data[step][eq]
if reshape:
out.shape = (len(nods), self.n_components)
return out
def apply_ebc(self, vec, offset=0, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions to
vector `vec`, starting at `offset`.
"""
eq_map = self.eq_map
ii = offset + eq_map.eq_ebc
# EBC,
if force_values is None:
vec[ii] = eq_map.val_ebc
else:
if isinstance(force_values, dict):
vec[ii] = force_values[self.name]
else:
vec[ii] = force_values
# EPBC.
vec[offset+eq_map.master] = vec[offset+eq_map.slave]
def apply_ic(self, vec, offset=0, force_values=None):
"""
Apply initial conditions conditions to vector `vec`, starting at
`offset`.
"""
ii = slice(offset, offset + self.n_dof)
if force_values is None:
vec[ii] = self.get_initial_condition()
else:
if isinstance(force_values, dict):
vec[ii] = force_values[self.name]
else:
vec[ii] = force_values
def get_reduced(self, vec, offset=0, follow_epbc=False):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
The full vector starts in `vec` at `offset`. If 'follow_epbc' is True,
values of EPBC master DOFs are not simply thrown away, but added to the
corresponding slave DOFs, just like when assembling. For vectors with
state (unknown) variables it should be set to False, for assembled
vectors it should be set to True.
"""
eq_map = self.eq_map
ii = offset + eq_map.eqi
r_vec = vec[ii]
if follow_epbc:
master = offset + eq_map.master
slave = eq_map.eq[eq_map.slave]
ii = slave >= 0
la.assemble1d(r_vec, slave[ii], vec[master[ii]])
return r_vec
def get_full(self, r_vec, r_offset=0, force_value=None,
vec=None, offset=0):
"""
Get the full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Notes
-----
The reduced vector starts in `r_vec` at `r_offset`.
Passing a `force_value` overrides the EBC values. Optionally,
`vec` argument can be provided to store the full vector (in
place) starting at `offset`.
"""
if vec is None:
vec = nm.empty(self.n_dof, dtype=r_vec.dtype)
else:
vec = vec[offset:offset+self.n_dof]
eq_map = self.eq_map
r_vec = r_vec[r_offset:r_offset+eq_map.n_eq]
# EBC.
vec[eq_map.eq_ebc] = | get_default(force_value, eq_map.val_ebc) | sfepy.base.base.get_default |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
self.dof_name = self.primary_var_name
elif kind == 'parameter':
self.flags.add(is_parameter)
msg = 'parameter variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
if self.primary_var_name == '(set-to-None)':
self.primary_var_name = None
self.dof_name = self.name
else:
self.dof_name = self.primary_var_name
if special is not None:
self.special = special
else:
raise NotImplementedError('unknown variable kind: %s' % kind)
self.kind = kind
def _setup_dofs(self, n_nod, n_components, val_shape):
"""
Setup number of DOFs and DOF names.
"""
self.n_nod = n_nod
self.n_components = n_components
self.val_shape = val_shape
self.n_dof = self.n_nod * self.n_components
self.dofs = [self.dof_name + ('.%d' % ii)
for ii in range(self.n_components)]
def get_primary(self):
"""
Get the corresponding primary variable.
Returns
-------
var : Variable instance
The primary variable, or `self` for state
variables or if `primary_var_name` is None, or None if no other
variables are defined.
"""
if self.is_state():
var = self
elif self.primary_var_name is not None:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
else:
var = self
return var
def get_dual(self):
"""
Get the dual variable.
Returns
-------
var : Variable instance
The primary variable for non-state variables, or the dual
variable for state variables.
"""
if self.is_state():
if ((self._variables is not None)
and (self.dual_var_name in self._variables.names)):
var = self._variables[self.dual_var_name]
else:
var = None
else:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
return var
def is_state(self):
return is_state in self.flags
def is_virtual(self):
return is_virtual in self.flags
def is_parameter(self):
return is_parameter in self.flags
def is_state_or_parameter(self):
return (is_state in self.flags) or (is_parameter in self.flags)
def is_kind(self, kind):
return eval('self.is_%s()' % kind)
def is_real(self):
return self.dtype in real_types
def is_complex(self):
return self.dtype in complex_types
def is_finite(self, step=0, derivative=None, dt=None):
return nm.isfinite(self(step=step, derivative=derivative, dt=dt)).all()
def get_primary_name(self):
if self.is_state():
name = self.name
else:
name = self.primary_var_name
return name
def init_history(self):
"""Initialize data of variables with history."""
if self.history is None: return
self.data = deque((self.history + 1) * [None])
self.step = 0
def time_update(self, ts, functions):
"""Implemented in subclasses."""
pass
def advance(self, ts):
"""
Advance in time the DOF state history. A copy of the DOF vector
is made to prevent history modification.
"""
if self.history is None: return
self.step = ts.step + 1
if self.history > 0:
# Copy the current step data to the history data, shift history,
# initialize if needed. The current step data are left intact.
# Note: cannot use self.data.rotate() due to data sharing with
# State.
for ii in range(self.history, 0, -1):
if self.data[ii] is None:
self.data[ii] = nm.empty_like(self.data[0])
self.data[ii][:] = self.data[ii - 1]
# Advance evaluate cache.
for step_cache in six.itervalues(self.evaluate_cache):
steps = sorted(step_cache.keys())
for step in steps:
if step is None:
# Special caches with possible custom advance()
# function.
for key, val in six.iteritems(step_cache[step]):
if hasattr(val, '__advance__'):
val.__advance__(ts, val)
elif -step < self.history:
step_cache[step-1] = step_cache[step]
if len(steps) and (steps[0] is not None):
step_cache.pop(steps[-1])
def init_data(self, step=0):
"""
Initialize the dof vector data of time step `step` to zeros.
"""
if self.is_state_or_parameter():
data = nm.zeros((self.n_dof,), dtype=self.dtype)
self.set_data(data, step=step)
def set_constant(self, val):
"""
Set the variable to a constant value.
"""
data = nm.empty((self.n_dof,), dtype=self.dtype)
data.fill(val)
self.set_data(data)
def set_data(self, data=None, indx=None, step=0,
preserve_caches=False):
"""
Set data (vector of DOF values) of the variable.
Parameters
----------
data : array
The vector of DOF values.
indx : int, optional
If given, `data[indx]` is used.
step : int, optional
The time history step, 0 (default) = current.
preserve_caches : bool
If True, do not invalidate evaluate caches of the variable.
"""
data = data.ravel()
if indx is None:
indx = slice(0, len(data))
else:
indx = slice(int(indx.start), int(indx.stop))
n_data_dof = indx.stop - indx.start
if self.n_dof != n_data_dof:
msg = 'incompatible data shape! (%d (variable) == %d (data))' \
% (self.n_dof, n_data_dof)
raise ValueError(msg)
elif (step > 0) or (-step >= len(self.data)):
raise ValueError('step %d out of range! ([%d, 0])'
% (step, -(len(self.data) - 1)))
else:
self.data[step] = data
self.indx = indx
if not preserve_caches:
self.invalidate_evaluate_cache(step=step)
def __call__(self, step=0, derivative=None, dt=None):
"""
Return vector of degrees of freedom of the variable.
Parameters
----------
step : int, default 0
The time step (0 means current, -1 previous, ...).
derivative : None or 'dt'
If not None, return time derivative of the DOF vector,
approximated by the backward finite difference.
Returns
-------
vec : array
The DOF vector. If `derivative` is None: a view of the data vector,
otherwise: required derivative of the DOF vector
at time step given by `step`.
Notes
-----
If the previous time step is requested in step 0, the step 0
DOF vector is returned instead.
"""
if derivative is None:
if (self.step == 0) and (step == -1):
data = self.data[0]
else:
data = self.data[-step]
if data is None:
raise ValueError('data of variable are not set! (%s, step %d)' \
% (self.name, step))
return data[self.indx]
else:
if self.history is None:
msg = 'set history type of variable %s to use derivatives!'\
% self.name
raise ValueError(msg)
dt = get_default(dt, self.dt)
return (self(step=step) - self(step=step-1)) / dt
def get_initial_condition(self):
if self.initial_condition is None:
return 0.0
else:
return self.initial_condition
class FieldVariable(Variable):
"""
A finite element field variable.
field .. field description of variable (borrowed)
"""
def __init__(self, name, kind, field, order=None, primary_var_name=None,
special=None, flags=None, history=None, **kwargs):
Variable.__init__(self, name, kind, order, primary_var_name,
special, flags, history=history, **kwargs)
self._set_field(field)
self.has_field = True
self.has_bc = True
self._variables = None
self.clear_evaluate_cache()
def _set_field(self, field):
"""
Set field of the variable.
Takes reference to a Field instance. Sets dtype according to
field.dtype. Sets `dim` attribute to spatial dimension.
"""
self.is_surface = field.is_surface
self.field = field
self._setup_dofs(field.n_nod, field.n_components, field.val_shape)
self.flags.add(is_field)
self.dtype = field.dtype
self.dim = field.domain.shape.dim
def _get_setter(self, kind, functions, **kwargs):
"""
Get the setter function of the variable and its arguments depending in
the setter kind.
"""
if not (hasattr(self, 'special') and (kind in self.special)):
return
setter_name = self.special[kind]
setter = functions[setter_name]
region = self.field.region
nod_list = self.field.get_dofs_in_region(region)
nods = nm.unique(nod_list)
coors = self.field.get_coor(nods)
if kind == 'setter':
sargs = (kwargs.get('ts'), coors)
elif kind == 'ic':
sargs = (coors, )
skwargs = {'region' : region}
return setter, sargs, skwargs
def get_field(self):
return self.field
def get_mapping(self, region, integral, integration,
get_saved=False, return_key=False):
"""
Get the reference element mapping of the underlying field.
See Also
--------
sfepy.discrete.common.fields.Field.get_mapping
"""
if region is None:
region = self.field.region
out = self.field.get_mapping(region, integral, integration,
get_saved=get_saved,
return_key=return_key)
return out
def get_dof_conn(self, dc_type, is_trace=False, trace_region=None):
"""
Get active dof connectivity of a variable.
Notes
-----
The primary and dual variables must have the same Region.
"""
if self.is_virtual():
var = self.get_primary()
# No primary variable can occur in single term evaluations.
var_name = var.name if var is not None else self.name
else:
var_name = self.name
if not is_trace:
region_name = dc_type.region_name
else:
aux = self.field.domain.regions[dc_type.region_name]
region = aux.get_mirror_region(trace_region)
region_name = region.name
key = (var_name, region_name, dc_type.type, is_trace)
dc = self.adof_conns[key]
return dc
def get_dof_info(self, active=False):
details = Struct(name='field_var_dof_details',
n_nod=self.n_nod,
dpn=self.n_components)
if active:
n_dof = self.n_adof
else:
n_dof = self.n_dof
return n_dof, details
def time_update(self, ts, functions):
"""
Store time step, set variable data for variables with the setter
function.
"""
if ts is not None:
self.dt = ts.dt
if hasattr(self, 'special') and ('setter' in self.special):
setter, sargs, skwargs = self._get_setter('setter', functions,
ts=ts)
self.set_data(setter(*sargs, **skwargs))
output('data of %s set by %s()' % (self.name, setter.name))
def set_from_qp(self, data_qp, integral, step=0):
"""
Set DOFs of variable using values in quadrature points
corresponding to the given integral.
"""
data_vertex = self.field.average_qp_to_vertices(data_qp, integral)
# Field nodes values.
data = self.field.interp_v_vals_to_n_vals(data_vertex)
data = data.ravel()
self.indx = slice(0, len(data))
self.data[step] = data
def set_from_mesh_vertices(self, data):
"""
Set the variable using values at the mesh vertices.
"""
ndata = self.field.interp_v_vals_to_n_vals(data)
self.set_data(ndata)
def set_from_function(self, fun, step=0):
"""
Set the variable data (the vector of DOF values) using a function of
space coordinates.
Parameters
----------
fun : callable
The function of coordinates returning DOF values of shape
`(n_coor, n_components)`.
step : int, optional
The time history step, 0 (default) = current.
"""
_, vv = self.field.set_dofs(fun, self.field.region, self.n_components)
self.set_data(vv.ravel(), step=step)
def equation_mapping(self, bcs, var_di, ts, functions, problem=None,
warn=False):
"""
Create the mapping of active DOFs from/to all DOFs.
Sets n_adof.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.eq_map = EquationMap('eq_map', self.dofs, var_di)
if bcs is not None:
bcs.canonize_dof_names(self.dofs)
bcs.sort()
active_bcs = self.eq_map.map_equations(bcs, self.field, ts, functions,
problem=problem, warn=warn)
self.n_adof = self.eq_map.n_eq
return active_bcs
def setup_initial_conditions(self, ics, di, functions, warn=False):
"""
Setup of initial conditions.
"""
ics.canonize_dof_names(self.dofs)
ics.sort()
self.initial_condition = nm.zeros((di.n_dof[self.name],),
dtype=self.dtype)
for ic in ics:
region = ic.region
dofs, val = ic.dofs
if warn:
clean_msg = ('warning: ignoring nonexistent' \
' IC node (%s) in ' % self.name)
else:
clean_msg = None
nod_list = self.field.get_dofs_in_region(region)
if len(nod_list) == 0:
continue
fun = get_condition_value(val, functions, 'IC', ic.name)
if isinstance(fun, Function):
aux = fun
fun = lambda coors: aux(coors, ic=ic)
nods, vv = self.field.set_dofs(fun, region, len(dofs), clean_msg)
eq = expand_nodes_to_equations(nods, dofs, self.dofs)
self.initial_condition[eq] = nm.ravel(vv)
def get_data_shape(self, integral, integration='volume', region_name=None):
"""
Get element data dimensions for given approximation.
Parameters
----------
integral : Integral instance
The integral describing used numerical quadrature.
integration : 'volume', 'surface', 'surface_extra', 'point' or 'custom'
The term integration type.
region_name : str
The name of the region of the integral.
Returns
-------
data_shape : 5 ints
The `(n_el, n_qp, dim, n_en, n_comp)` for volume shape kind,
`(n_fa, n_qp, dim, n_fn, n_comp)` for surface shape kind and
`(n_nod, 0, 0, 1, n_comp)` for point shape kind.
Notes
-----
- `n_el`, `n_fa` = number of elements/facets
- `n_qp` = number of quadrature points per element/facet
- `dim` = spatial dimension
- `n_en`, `n_fn` = number of element/facet nodes
- `n_comp` = number of variable components in a point/node
- `n_nod` = number of element nodes
"""
aux = self.field.get_data_shape(integral, integration=integration,
region_name=region_name)
data_shape = aux + (self.n_components,)
return data_shape
def clear_evaluate_cache(self):
"""
Clear current evaluate cache.
"""
self.evaluate_cache = {}
def invalidate_evaluate_cache(self, step=0):
"""
Invalidate variable data in evaluate cache for time step given
by `step` (0 is current, -1 previous, ...).
This should be done, for example, prior to every nonlinear
solver iteration.
"""
for step_cache in six.itervalues(self.evaluate_cache):
for key in list(step_cache.keys()):
if key == step: # Given time step to clear.
step_cache.pop(key)
def evaluate(self, mode='val',
region=None, integral=None, integration=None,
step=0, time_derivative=None, is_trace=False,
trace_region=None, dt=None, bf=None):
"""
Evaluate various quantities related to the variable according to
`mode` in quadrature points defined by `integral`.
The evaluated data are cached in the variable instance in
`evaluate_cache` attribute.
Parameters
----------
mode : one of 'val', 'grad', 'div', 'cauchy_strain'
The evaluation mode.
region : Region instance, optional
The region where the evaluation occurs. If None, the
underlying field region is used.
integral : Integral instance, optional
The integral defining quadrature points in which the
evaluation occurs. If None, the first order volume integral
is created. Must not be None for surface integrations.
integration : 'volume', 'surface', 'surface_extra', or 'point'
The term integration type. If None, it is derived from
`integral`.
step : int, default 0
The time step (0 means current, -1 previous, ...).
time_derivative : None or 'dt'
If not None, return time derivative of the data,
approximated by the backward finite difference.
is_trace : bool, default False
Indicate evaluation of trace of the variable on a boundary
region.
dt : float, optional
The time step to be used if `derivative` is `'dt'`. If None,
the `dt` attribute of the variable is used.
bf : Base function, optional
The base function to be used in 'val' mode.
Returns
-------
out : array
The 4-dimensional array of shape
`(n_el, n_qp, n_row, n_col)` with the requested data,
where `n_row`, `n_col` depend on `mode`.
"""
if integration == 'custom':
msg = 'cannot use FieldVariable.evaluate() with custom integration!'
raise ValueError(msg)
step_cache = self.evaluate_cache.setdefault(mode, {})
cache = step_cache.setdefault(step, {})
field = self.field
if region is None:
region = field.region
if is_trace:
region = region.get_mirror_region(trace_region)
if (region is not field.region) and not region.is_empty:
assert_(field.region.contains(region))
if integral is None:
integral = Integral('aux_1', 1)
if integration is None:
integration = 'volume' if region.can_cells else 'surface'
geo, _, key = field.get_mapping(region, integral, integration,
return_key=True)
key += (time_derivative, is_trace)
if key in cache:
out = cache[key]
else:
vec = self(step=step, derivative=time_derivative, dt=dt)
ct = integration
if integration == 'surface_extra':
ct = 'volume'
conn = field.get_econn(ct, region, is_trace, integration)
shape = self.get_data_shape(integral, integration, region.name)
if self.dtype == nm.float64:
out = eval_real(vec, conn, geo, mode, shape, bf)
else:
out = eval_complex(vec, conn, geo, mode, shape, bf)
cache[key] = out
return out
def get_state_in_region(self, region, reshape=True, step=0):
"""
Get DOFs of the variable in the given region.
Parameters
----------
region : Region
The selected region.
reshape : bool
If True, reshape the DOF vector to a 2D array with the individual
components as columns. Otherwise a 1D DOF array of the form [all
DOFs in region node 0, all DOFs in region node 1, ...] is returned.
step : int, default 0
The time step (0 means current, -1 previous, ...).
Returns
-------
out : array
The selected DOFs.
"""
nods = self.field.get_dofs_in_region(region, merge=True)
eq = nm.empty((len(nods) * self.n_components,), dtype=nm.int32)
for idof in range(self.n_components):
eq[idof::self.n_components] = self.n_components * nods \
+ idof + self.indx.start
out = self.data[step][eq]
if reshape:
out.shape = (len(nods), self.n_components)
return out
def apply_ebc(self, vec, offset=0, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions to
vector `vec`, starting at `offset`.
"""
eq_map = self.eq_map
ii = offset + eq_map.eq_ebc
# EBC,
if force_values is None:
vec[ii] = eq_map.val_ebc
else:
if isinstance(force_values, dict):
vec[ii] = force_values[self.name]
else:
vec[ii] = force_values
# EPBC.
vec[offset+eq_map.master] = vec[offset+eq_map.slave]
def apply_ic(self, vec, offset=0, force_values=None):
"""
Apply initial conditions conditions to vector `vec`, starting at
`offset`.
"""
ii = slice(offset, offset + self.n_dof)
if force_values is None:
vec[ii] = self.get_initial_condition()
else:
if isinstance(force_values, dict):
vec[ii] = force_values[self.name]
else:
vec[ii] = force_values
def get_reduced(self, vec, offset=0, follow_epbc=False):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
The full vector starts in `vec` at `offset`. If 'follow_epbc' is True,
values of EPBC master DOFs are not simply thrown away, but added to the
corresponding slave DOFs, just like when assembling. For vectors with
state (unknown) variables it should be set to False, for assembled
vectors it should be set to True.
"""
eq_map = self.eq_map
ii = offset + eq_map.eqi
r_vec = vec[ii]
if follow_epbc:
master = offset + eq_map.master
slave = eq_map.eq[eq_map.slave]
ii = slave >= 0
la.assemble1d(r_vec, slave[ii], vec[master[ii]])
return r_vec
def get_full(self, r_vec, r_offset=0, force_value=None,
vec=None, offset=0):
"""
Get the full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Notes
-----
The reduced vector starts in `r_vec` at `r_offset`.
Passing a `force_value` overrides the EBC values. Optionally,
`vec` argument can be provided to store the full vector (in
place) starting at `offset`.
"""
if vec is None:
vec = nm.empty(self.n_dof, dtype=r_vec.dtype)
else:
vec = vec[offset:offset+self.n_dof]
eq_map = self.eq_map
r_vec = r_vec[r_offset:r_offset+eq_map.n_eq]
# EBC.
vec[eq_map.eq_ebc] = get_default(force_value, eq_map.val_ebc)
# Reduced vector values.
vec[eq_map.eqi] = r_vec
# EPBC.
vec[eq_map.master] = vec[eq_map.slave]
unused_dofs = self.field.get('unused_dofs')
if unused_dofs is not None:
vec[:] = self.field.restore_substituted(vec)
return vec
def create_output(self, vec=None, key=None, extend=True, fill_value=None,
linearization=None):
"""
Convert the DOF vector to a dictionary of output data usable by
Mesh.write().
Parameters
----------
vec : array, optional
An alternative DOF vector to be used instead of the variable
DOF vector.
key : str, optional
The key to be used in the output dictionary instead of the
variable name.
extend : bool
Extend the DOF values to cover the whole domain.
fill_value : float or complex
The value used to fill the missing DOF values if `extend` is True.
linearization : Struct or None
The linearization configuration for higher order approximations.
"""
linearization = get_default(linearization, Struct(kind='strip'))
if vec is None:
vec = self()
key = | get_default(key, self.name) | sfepy.base.base.get_default |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
self.dof_name = self.primary_var_name
elif kind == 'parameter':
self.flags.add(is_parameter)
msg = 'parameter variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
if self.primary_var_name == '(set-to-None)':
self.primary_var_name = None
self.dof_name = self.name
else:
self.dof_name = self.primary_var_name
if special is not None:
self.special = special
else:
raise NotImplementedError('unknown variable kind: %s' % kind)
self.kind = kind
def _setup_dofs(self, n_nod, n_components, val_shape):
"""
Setup number of DOFs and DOF names.
"""
self.n_nod = n_nod
self.n_components = n_components
self.val_shape = val_shape
self.n_dof = self.n_nod * self.n_components
self.dofs = [self.dof_name + ('.%d' % ii)
for ii in range(self.n_components)]
def get_primary(self):
"""
Get the corresponding primary variable.
Returns
-------
var : Variable instance
The primary variable, or `self` for state
variables or if `primary_var_name` is None, or None if no other
variables are defined.
"""
if self.is_state():
var = self
elif self.primary_var_name is not None:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
else:
var = self
return var
def get_dual(self):
"""
Get the dual variable.
Returns
-------
var : Variable instance
The primary variable for non-state variables, or the dual
variable for state variables.
"""
if self.is_state():
if ((self._variables is not None)
and (self.dual_var_name in self._variables.names)):
var = self._variables[self.dual_var_name]
else:
var = None
else:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
return var
def is_state(self):
return is_state in self.flags
def is_virtual(self):
return is_virtual in self.flags
def is_parameter(self):
return is_parameter in self.flags
def is_state_or_parameter(self):
return (is_state in self.flags) or (is_parameter in self.flags)
def is_kind(self, kind):
return eval('self.is_%s()' % kind)
def is_real(self):
return self.dtype in real_types
def is_complex(self):
return self.dtype in complex_types
def is_finite(self, step=0, derivative=None, dt=None):
return nm.isfinite(self(step=step, derivative=derivative, dt=dt)).all()
def get_primary_name(self):
if self.is_state():
name = self.name
else:
name = self.primary_var_name
return name
def init_history(self):
"""Initialize data of variables with history."""
if self.history is None: return
self.data = deque((self.history + 1) * [None])
self.step = 0
def time_update(self, ts, functions):
"""Implemented in subclasses."""
pass
def advance(self, ts):
"""
Advance in time the DOF state history. A copy of the DOF vector
is made to prevent history modification.
"""
if self.history is None: return
self.step = ts.step + 1
if self.history > 0:
# Copy the current step data to the history data, shift history,
# initialize if needed. The current step data are left intact.
# Note: cannot use self.data.rotate() due to data sharing with
# State.
for ii in range(self.history, 0, -1):
if self.data[ii] is None:
self.data[ii] = nm.empty_like(self.data[0])
self.data[ii][:] = self.data[ii - 1]
# Advance evaluate cache.
for step_cache in six.itervalues(self.evaluate_cache):
steps = sorted(step_cache.keys())
for step in steps:
if step is None:
# Special caches with possible custom advance()
# function.
for key, val in six.iteritems(step_cache[step]):
if hasattr(val, '__advance__'):
val.__advance__(ts, val)
elif -step < self.history:
step_cache[step-1] = step_cache[step]
if len(steps) and (steps[0] is not None):
step_cache.pop(steps[-1])
def init_data(self, step=0):
"""
Initialize the dof vector data of time step `step` to zeros.
"""
if self.is_state_or_parameter():
data = nm.zeros((self.n_dof,), dtype=self.dtype)
self.set_data(data, step=step)
def set_constant(self, val):
"""
Set the variable to a constant value.
"""
data = nm.empty((self.n_dof,), dtype=self.dtype)
data.fill(val)
self.set_data(data)
def set_data(self, data=None, indx=None, step=0,
preserve_caches=False):
"""
Set data (vector of DOF values) of the variable.
Parameters
----------
data : array
The vector of DOF values.
indx : int, optional
If given, `data[indx]` is used.
step : int, optional
The time history step, 0 (default) = current.
preserve_caches : bool
If True, do not invalidate evaluate caches of the variable.
"""
data = data.ravel()
if indx is None:
indx = slice(0, len(data))
else:
indx = slice(int(indx.start), int(indx.stop))
n_data_dof = indx.stop - indx.start
if self.n_dof != n_data_dof:
msg = 'incompatible data shape! (%d (variable) == %d (data))' \
% (self.n_dof, n_data_dof)
raise ValueError(msg)
elif (step > 0) or (-step >= len(self.data)):
raise ValueError('step %d out of range! ([%d, 0])'
% (step, -(len(self.data) - 1)))
else:
self.data[step] = data
self.indx = indx
if not preserve_caches:
self.invalidate_evaluate_cache(step=step)
def __call__(self, step=0, derivative=None, dt=None):
"""
Return vector of degrees of freedom of the variable.
Parameters
----------
step : int, default 0
The time step (0 means current, -1 previous, ...).
derivative : None or 'dt'
If not None, return time derivative of the DOF vector,
approximated by the backward finite difference.
Returns
-------
vec : array
The DOF vector. If `derivative` is None: a view of the data vector,
otherwise: required derivative of the DOF vector
at time step given by `step`.
Notes
-----
If the previous time step is requested in step 0, the step 0
DOF vector is returned instead.
"""
if derivative is None:
if (self.step == 0) and (step == -1):
data = self.data[0]
else:
data = self.data[-step]
if data is None:
raise ValueError('data of variable are not set! (%s, step %d)' \
% (self.name, step))
return data[self.indx]
else:
if self.history is None:
msg = 'set history type of variable %s to use derivatives!'\
% self.name
raise ValueError(msg)
dt = get_default(dt, self.dt)
return (self(step=step) - self(step=step-1)) / dt
def get_initial_condition(self):
if self.initial_condition is None:
return 0.0
else:
return self.initial_condition
class FieldVariable(Variable):
"""
A finite element field variable.
field .. field description of variable (borrowed)
"""
def __init__(self, name, kind, field, order=None, primary_var_name=None,
special=None, flags=None, history=None, **kwargs):
Variable.__init__(self, name, kind, order, primary_var_name,
special, flags, history=history, **kwargs)
self._set_field(field)
self.has_field = True
self.has_bc = True
self._variables = None
self.clear_evaluate_cache()
def _set_field(self, field):
"""
Set field of the variable.
Takes reference to a Field instance. Sets dtype according to
field.dtype. Sets `dim` attribute to spatial dimension.
"""
self.is_surface = field.is_surface
self.field = field
self._setup_dofs(field.n_nod, field.n_components, field.val_shape)
self.flags.add(is_field)
self.dtype = field.dtype
self.dim = field.domain.shape.dim
def _get_setter(self, kind, functions, **kwargs):
"""
Get the setter function of the variable and its arguments depending in
the setter kind.
"""
if not (hasattr(self, 'special') and (kind in self.special)):
return
setter_name = self.special[kind]
setter = functions[setter_name]
region = self.field.region
nod_list = self.field.get_dofs_in_region(region)
nods = nm.unique(nod_list)
coors = self.field.get_coor(nods)
if kind == 'setter':
sargs = (kwargs.get('ts'), coors)
elif kind == 'ic':
sargs = (coors, )
skwargs = {'region' : region}
return setter, sargs, skwargs
def get_field(self):
return self.field
def get_mapping(self, region, integral, integration,
get_saved=False, return_key=False):
"""
Get the reference element mapping of the underlying field.
See Also
--------
sfepy.discrete.common.fields.Field.get_mapping
"""
if region is None:
region = self.field.region
out = self.field.get_mapping(region, integral, integration,
get_saved=get_saved,
return_key=return_key)
return out
def get_dof_conn(self, dc_type, is_trace=False, trace_region=None):
"""
Get active dof connectivity of a variable.
Notes
-----
The primary and dual variables must have the same Region.
"""
if self.is_virtual():
var = self.get_primary()
# No primary variable can occur in single term evaluations.
var_name = var.name if var is not None else self.name
else:
var_name = self.name
if not is_trace:
region_name = dc_type.region_name
else:
aux = self.field.domain.regions[dc_type.region_name]
region = aux.get_mirror_region(trace_region)
region_name = region.name
key = (var_name, region_name, dc_type.type, is_trace)
dc = self.adof_conns[key]
return dc
def get_dof_info(self, active=False):
details = Struct(name='field_var_dof_details',
n_nod=self.n_nod,
dpn=self.n_components)
if active:
n_dof = self.n_adof
else:
n_dof = self.n_dof
return n_dof, details
def time_update(self, ts, functions):
"""
Store time step, set variable data for variables with the setter
function.
"""
if ts is not None:
self.dt = ts.dt
if hasattr(self, 'special') and ('setter' in self.special):
setter, sargs, skwargs = self._get_setter('setter', functions,
ts=ts)
self.set_data(setter(*sargs, **skwargs))
output('data of %s set by %s()' % (self.name, setter.name))
def set_from_qp(self, data_qp, integral, step=0):
"""
Set DOFs of variable using values in quadrature points
corresponding to the given integral.
"""
data_vertex = self.field.average_qp_to_vertices(data_qp, integral)
# Field nodes values.
data = self.field.interp_v_vals_to_n_vals(data_vertex)
data = data.ravel()
self.indx = slice(0, len(data))
self.data[step] = data
def set_from_mesh_vertices(self, data):
"""
Set the variable using values at the mesh vertices.
"""
ndata = self.field.interp_v_vals_to_n_vals(data)
self.set_data(ndata)
def set_from_function(self, fun, step=0):
"""
Set the variable data (the vector of DOF values) using a function of
space coordinates.
Parameters
----------
fun : callable
The function of coordinates returning DOF values of shape
`(n_coor, n_components)`.
step : int, optional
The time history step, 0 (default) = current.
"""
_, vv = self.field.set_dofs(fun, self.field.region, self.n_components)
self.set_data(vv.ravel(), step=step)
def equation_mapping(self, bcs, var_di, ts, functions, problem=None,
warn=False):
"""
Create the mapping of active DOFs from/to all DOFs.
Sets n_adof.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.eq_map = EquationMap('eq_map', self.dofs, var_di)
if bcs is not None:
bcs.canonize_dof_names(self.dofs)
bcs.sort()
active_bcs = self.eq_map.map_equations(bcs, self.field, ts, functions,
problem=problem, warn=warn)
self.n_adof = self.eq_map.n_eq
return active_bcs
def setup_initial_conditions(self, ics, di, functions, warn=False):
"""
Setup of initial conditions.
"""
ics.canonize_dof_names(self.dofs)
ics.sort()
self.initial_condition = nm.zeros((di.n_dof[self.name],),
dtype=self.dtype)
for ic in ics:
region = ic.region
dofs, val = ic.dofs
if warn:
clean_msg = ('warning: ignoring nonexistent' \
' IC node (%s) in ' % self.name)
else:
clean_msg = None
nod_list = self.field.get_dofs_in_region(region)
if len(nod_list) == 0:
continue
fun = get_condition_value(val, functions, 'IC', ic.name)
if isinstance(fun, Function):
aux = fun
fun = lambda coors: aux(coors, ic=ic)
nods, vv = self.field.set_dofs(fun, region, len(dofs), clean_msg)
eq = expand_nodes_to_equations(nods, dofs, self.dofs)
self.initial_condition[eq] = nm.ravel(vv)
def get_data_shape(self, integral, integration='volume', region_name=None):
"""
Get element data dimensions for given approximation.
Parameters
----------
integral : Integral instance
The integral describing used numerical quadrature.
integration : 'volume', 'surface', 'surface_extra', 'point' or 'custom'
The term integration type.
region_name : str
The name of the region of the integral.
Returns
-------
data_shape : 5 ints
The `(n_el, n_qp, dim, n_en, n_comp)` for volume shape kind,
`(n_fa, n_qp, dim, n_fn, n_comp)` for surface shape kind and
`(n_nod, 0, 0, 1, n_comp)` for point shape kind.
Notes
-----
- `n_el`, `n_fa` = number of elements/facets
- `n_qp` = number of quadrature points per element/facet
- `dim` = spatial dimension
- `n_en`, `n_fn` = number of element/facet nodes
- `n_comp` = number of variable components in a point/node
- `n_nod` = number of element nodes
"""
aux = self.field.get_data_shape(integral, integration=integration,
region_name=region_name)
data_shape = aux + (self.n_components,)
return data_shape
def clear_evaluate_cache(self):
"""
Clear current evaluate cache.
"""
self.evaluate_cache = {}
def invalidate_evaluate_cache(self, step=0):
"""
Invalidate variable data in evaluate cache for time step given
by `step` (0 is current, -1 previous, ...).
This should be done, for example, prior to every nonlinear
solver iteration.
"""
for step_cache in six.itervalues(self.evaluate_cache):
for key in list(step_cache.keys()):
if key == step: # Given time step to clear.
step_cache.pop(key)
def evaluate(self, mode='val',
region=None, integral=None, integration=None,
step=0, time_derivative=None, is_trace=False,
trace_region=None, dt=None, bf=None):
"""
Evaluate various quantities related to the variable according to
`mode` in quadrature points defined by `integral`.
The evaluated data are cached in the variable instance in
`evaluate_cache` attribute.
Parameters
----------
mode : one of 'val', 'grad', 'div', 'cauchy_strain'
The evaluation mode.
region : Region instance, optional
The region where the evaluation occurs. If None, the
underlying field region is used.
integral : Integral instance, optional
The integral defining quadrature points in which the
evaluation occurs. If None, the first order volume integral
is created. Must not be None for surface integrations.
integration : 'volume', 'surface', 'surface_extra', or 'point'
The term integration type. If None, it is derived from
`integral`.
step : int, default 0
The time step (0 means current, -1 previous, ...).
time_derivative : None or 'dt'
If not None, return time derivative of the data,
approximated by the backward finite difference.
is_trace : bool, default False
Indicate evaluation of trace of the variable on a boundary
region.
dt : float, optional
The time step to be used if `derivative` is `'dt'`. If None,
the `dt` attribute of the variable is used.
bf : Base function, optional
The base function to be used in 'val' mode.
Returns
-------
out : array
The 4-dimensional array of shape
`(n_el, n_qp, n_row, n_col)` with the requested data,
where `n_row`, `n_col` depend on `mode`.
"""
if integration == 'custom':
msg = 'cannot use FieldVariable.evaluate() with custom integration!'
raise ValueError(msg)
step_cache = self.evaluate_cache.setdefault(mode, {})
cache = step_cache.setdefault(step, {})
field = self.field
if region is None:
region = field.region
if is_trace:
region = region.get_mirror_region(trace_region)
if (region is not field.region) and not region.is_empty:
assert_(field.region.contains(region))
if integral is None:
integral = Integral('aux_1', 1)
if integration is None:
integration = 'volume' if region.can_cells else 'surface'
geo, _, key = field.get_mapping(region, integral, integration,
return_key=True)
key += (time_derivative, is_trace)
if key in cache:
out = cache[key]
else:
vec = self(step=step, derivative=time_derivative, dt=dt)
ct = integration
if integration == 'surface_extra':
ct = 'volume'
conn = field.get_econn(ct, region, is_trace, integration)
shape = self.get_data_shape(integral, integration, region.name)
if self.dtype == nm.float64:
out = eval_real(vec, conn, geo, mode, shape, bf)
else:
out = eval_complex(vec, conn, geo, mode, shape, bf)
cache[key] = out
return out
def get_state_in_region(self, region, reshape=True, step=0):
"""
Get DOFs of the variable in the given region.
Parameters
----------
region : Region
The selected region.
reshape : bool
If True, reshape the DOF vector to a 2D array with the individual
components as columns. Otherwise a 1D DOF array of the form [all
DOFs in region node 0, all DOFs in region node 1, ...] is returned.
step : int, default 0
The time step (0 means current, -1 previous, ...).
Returns
-------
out : array
The selected DOFs.
"""
nods = self.field.get_dofs_in_region(region, merge=True)
eq = nm.empty((len(nods) * self.n_components,), dtype=nm.int32)
for idof in range(self.n_components):
eq[idof::self.n_components] = self.n_components * nods \
+ idof + self.indx.start
out = self.data[step][eq]
if reshape:
out.shape = (len(nods), self.n_components)
return out
def apply_ebc(self, vec, offset=0, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions to
vector `vec`, starting at `offset`.
"""
eq_map = self.eq_map
ii = offset + eq_map.eq_ebc
# EBC,
if force_values is None:
vec[ii] = eq_map.val_ebc
else:
if isinstance(force_values, dict):
vec[ii] = force_values[self.name]
else:
vec[ii] = force_values
# EPBC.
vec[offset+eq_map.master] = vec[offset+eq_map.slave]
def apply_ic(self, vec, offset=0, force_values=None):
"""
Apply initial conditions conditions to vector `vec`, starting at
`offset`.
"""
ii = slice(offset, offset + self.n_dof)
if force_values is None:
vec[ii] = self.get_initial_condition()
else:
if isinstance(force_values, dict):
vec[ii] = force_values[self.name]
else:
vec[ii] = force_values
def get_reduced(self, vec, offset=0, follow_epbc=False):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
The full vector starts in `vec` at `offset`. If 'follow_epbc' is True,
values of EPBC master DOFs are not simply thrown away, but added to the
corresponding slave DOFs, just like when assembling. For vectors with
state (unknown) variables it should be set to False, for assembled
vectors it should be set to True.
"""
eq_map = self.eq_map
ii = offset + eq_map.eqi
r_vec = vec[ii]
if follow_epbc:
master = offset + eq_map.master
slave = eq_map.eq[eq_map.slave]
ii = slave >= 0
la.assemble1d(r_vec, slave[ii], vec[master[ii]])
return r_vec
def get_full(self, r_vec, r_offset=0, force_value=None,
vec=None, offset=0):
"""
Get the full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Notes
-----
The reduced vector starts in `r_vec` at `r_offset`.
Passing a `force_value` overrides the EBC values. Optionally,
`vec` argument can be provided to store the full vector (in
place) starting at `offset`.
"""
if vec is None:
vec = nm.empty(self.n_dof, dtype=r_vec.dtype)
else:
vec = vec[offset:offset+self.n_dof]
eq_map = self.eq_map
r_vec = r_vec[r_offset:r_offset+eq_map.n_eq]
# EBC.
vec[eq_map.eq_ebc] = get_default(force_value, eq_map.val_ebc)
# Reduced vector values.
vec[eq_map.eqi] = r_vec
# EPBC.
vec[eq_map.master] = vec[eq_map.slave]
unused_dofs = self.field.get('unused_dofs')
if unused_dofs is not None:
vec[:] = self.field.restore_substituted(vec)
return vec
def create_output(self, vec=None, key=None, extend=True, fill_value=None,
linearization=None):
"""
Convert the DOF vector to a dictionary of output data usable by
Mesh.write().
Parameters
----------
vec : array, optional
An alternative DOF vector to be used instead of the variable
DOF vector.
key : str, optional
The key to be used in the output dictionary instead of the
variable name.
extend : bool
Extend the DOF values to cover the whole domain.
fill_value : float or complex
The value used to fill the missing DOF values if `extend` is True.
linearization : Struct or None
The linearization configuration for higher order approximations.
"""
linearization = get_default(linearization, Struct(kind='strip'))
if vec is None:
vec = self()
key = get_default(key, self.name)
aux = nm.reshape(vec,
(self.n_dof // self.n_components, self.n_components))
out = self.field.create_output(aux, self.name, dof_names=self.dofs,
key=key, extend=extend,
fill_value=fill_value,
linearization=linearization)
return out
def get_element_diameters(self, cells, mode, square=False):
"""Get diameters of selected elements."""
field = self.field
domain = field.domain
cells = nm.array(cells)
diameters = nm.empty((cells.shape[0],), dtype=nm.float64)
integral = | Integral('i_tmp', 1) | sfepy.discrete.integrals.Integral |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, | OneTypeList(Variable) | sfepy.base.base.OneTypeList |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = | DofInfo('virtual_dof_info') | sfepy.discrete.common.dof_info.DofInfo |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = | DofInfo('active_virtual_dof_info') | sfepy.discrete.common.dof_info.DofInfo |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
| output('updating variables...') | sfepy.base.base.output |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
| output('...done') | sfepy.base.base.output |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
self.dof_name = self.primary_var_name
elif kind == 'parameter':
self.flags.add(is_parameter)
msg = 'parameter variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
if self.primary_var_name == '(set-to-None)':
self.primary_var_name = None
self.dof_name = self.name
else:
self.dof_name = self.primary_var_name
if special is not None:
self.special = special
else:
raise NotImplementedError('unknown variable kind: %s' % kind)
self.kind = kind
def _setup_dofs(self, n_nod, n_components, val_shape):
"""
Setup number of DOFs and DOF names.
"""
self.n_nod = n_nod
self.n_components = n_components
self.val_shape = val_shape
self.n_dof = self.n_nod * self.n_components
self.dofs = [self.dof_name + ('.%d' % ii)
for ii in range(self.n_components)]
def get_primary(self):
"""
Get the corresponding primary variable.
Returns
-------
var : Variable instance
The primary variable, or `self` for state
variables or if `primary_var_name` is None, or None if no other
variables are defined.
"""
if self.is_state():
var = self
elif self.primary_var_name is not None:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
else:
var = self
return var
def get_dual(self):
"""
Get the dual variable.
Returns
-------
var : Variable instance
The primary variable for non-state variables, or the dual
variable for state variables.
"""
if self.is_state():
if ((self._variables is not None)
and (self.dual_var_name in self._variables.names)):
var = self._variables[self.dual_var_name]
else:
var = None
else:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
return var
def is_state(self):
return is_state in self.flags
def is_virtual(self):
return is_virtual in self.flags
def is_parameter(self):
return is_parameter in self.flags
def is_state_or_parameter(self):
return (is_state in self.flags) or (is_parameter in self.flags)
def is_kind(self, kind):
return eval('self.is_%s()' % kind)
def is_real(self):
return self.dtype in real_types
def is_complex(self):
return self.dtype in complex_types
def is_finite(self, step=0, derivative=None, dt=None):
return nm.isfinite(self(step=step, derivative=derivative, dt=dt)).all()
def get_primary_name(self):
if self.is_state():
name = self.name
else:
name = self.primary_var_name
return name
def init_history(self):
"""Initialize data of variables with history."""
if self.history is None: return
self.data = deque((self.history + 1) * [None])
self.step = 0
def time_update(self, ts, functions):
"""Implemented in subclasses."""
pass
def advance(self, ts):
"""
Advance in time the DOF state history. A copy of the DOF vector
is made to prevent history modification.
"""
if self.history is None: return
self.step = ts.step + 1
if self.history > 0:
# Copy the current step data to the history data, shift history,
# initialize if needed. The current step data are left intact.
# Note: cannot use self.data.rotate() due to data sharing with
# State.
for ii in range(self.history, 0, -1):
if self.data[ii] is None:
self.data[ii] = nm.empty_like(self.data[0])
self.data[ii][:] = self.data[ii - 1]
# Advance evaluate cache.
for step_cache in six.itervalues(self.evaluate_cache):
steps = sorted(step_cache.keys())
for step in steps:
if step is None:
# Special caches with possible custom advance()
# function.
for key, val in six.iteritems(step_cache[step]):
if hasattr(val, '__advance__'):
val.__advance__(ts, val)
elif -step < self.history:
step_cache[step-1] = step_cache[step]
if len(steps) and (steps[0] is not None):
step_cache.pop(steps[-1])
def init_data(self, step=0):
"""
Initialize the dof vector data of time step `step` to zeros.
"""
if self.is_state_or_parameter():
data = nm.zeros((self.n_dof,), dtype=self.dtype)
self.set_data(data, step=step)
def set_constant(self, val):
"""
Set the variable to a constant value.
"""
data = nm.empty((self.n_dof,), dtype=self.dtype)
data.fill(val)
self.set_data(data)
def set_data(self, data=None, indx=None, step=0,
preserve_caches=False):
"""
Set data (vector of DOF values) of the variable.
Parameters
----------
data : array
The vector of DOF values.
indx : int, optional
If given, `data[indx]` is used.
step : int, optional
The time history step, 0 (default) = current.
preserve_caches : bool
If True, do not invalidate evaluate caches of the variable.
"""
data = data.ravel()
if indx is None:
indx = slice(0, len(data))
else:
indx = slice(int(indx.start), int(indx.stop))
n_data_dof = indx.stop - indx.start
if self.n_dof != n_data_dof:
msg = 'incompatible data shape! (%d (variable) == %d (data))' \
% (self.n_dof, n_data_dof)
raise ValueError(msg)
elif (step > 0) or (-step >= len(self.data)):
raise ValueError('step %d out of range! ([%d, 0])'
% (step, -(len(self.data) - 1)))
else:
self.data[step] = data
self.indx = indx
if not preserve_caches:
self.invalidate_evaluate_cache(step=step)
def __call__(self, step=0, derivative=None, dt=None):
"""
Return vector of degrees of freedom of the variable.
Parameters
----------
step : int, default 0
The time step (0 means current, -1 previous, ...).
derivative : None or 'dt'
If not None, return time derivative of the DOF vector,
approximated by the backward finite difference.
Returns
-------
vec : array
The DOF vector. If `derivative` is None: a view of the data vector,
otherwise: required derivative of the DOF vector
at time step given by `step`.
Notes
-----
If the previous time step is requested in step 0, the step 0
DOF vector is returned instead.
"""
if derivative is None:
if (self.step == 0) and (step == -1):
data = self.data[0]
else:
data = self.data[-step]
if data is None:
raise ValueError('data of variable are not set! (%s, step %d)' \
% (self.name, step))
return data[self.indx]
else:
if self.history is None:
msg = 'set history type of variable %s to use derivatives!'\
% self.name
raise ValueError(msg)
dt = | get_default(dt, self.dt) | sfepy.base.base.get_default |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
self.dof_name = self.primary_var_name
elif kind == 'parameter':
self.flags.add(is_parameter)
msg = 'parameter variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
if self.primary_var_name == '(set-to-None)':
self.primary_var_name = None
self.dof_name = self.name
else:
self.dof_name = self.primary_var_name
if special is not None:
self.special = special
else:
raise NotImplementedError('unknown variable kind: %s' % kind)
self.kind = kind
def _setup_dofs(self, n_nod, n_components, val_shape):
"""
Setup number of DOFs and DOF names.
"""
self.n_nod = n_nod
self.n_components = n_components
self.val_shape = val_shape
self.n_dof = self.n_nod * self.n_components
self.dofs = [self.dof_name + ('.%d' % ii)
for ii in range(self.n_components)]
def get_primary(self):
"""
Get the corresponding primary variable.
Returns
-------
var : Variable instance
The primary variable, or `self` for state
variables or if `primary_var_name` is None, or None if no other
variables are defined.
"""
if self.is_state():
var = self
elif self.primary_var_name is not None:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
else:
var = self
return var
def get_dual(self):
"""
Get the dual variable.
Returns
-------
var : Variable instance
The primary variable for non-state variables, or the dual
variable for state variables.
"""
if self.is_state():
if ((self._variables is not None)
and (self.dual_var_name in self._variables.names)):
var = self._variables[self.dual_var_name]
else:
var = None
else:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
return var
def is_state(self):
return is_state in self.flags
def is_virtual(self):
return is_virtual in self.flags
def is_parameter(self):
return is_parameter in self.flags
def is_state_or_parameter(self):
return (is_state in self.flags) or (is_parameter in self.flags)
def is_kind(self, kind):
return eval('self.is_%s()' % kind)
def is_real(self):
return self.dtype in real_types
def is_complex(self):
return self.dtype in complex_types
def is_finite(self, step=0, derivative=None, dt=None):
return nm.isfinite(self(step=step, derivative=derivative, dt=dt)).all()
def get_primary_name(self):
if self.is_state():
name = self.name
else:
name = self.primary_var_name
return name
def init_history(self):
"""Initialize data of variables with history."""
if self.history is None: return
self.data = deque((self.history + 1) * [None])
self.step = 0
def time_update(self, ts, functions):
"""Implemented in subclasses."""
pass
def advance(self, ts):
"""
Advance in time the DOF state history. A copy of the DOF vector
is made to prevent history modification.
"""
if self.history is None: return
self.step = ts.step + 1
if self.history > 0:
# Copy the current step data to the history data, shift history,
# initialize if needed. The current step data are left intact.
# Note: cannot use self.data.rotate() due to data sharing with
# State.
for ii in range(self.history, 0, -1):
if self.data[ii] is None:
self.data[ii] = nm.empty_like(self.data[0])
self.data[ii][:] = self.data[ii - 1]
# Advance evaluate cache.
for step_cache in six.itervalues(self.evaluate_cache):
steps = sorted(step_cache.keys())
for step in steps:
if step is None:
# Special caches with possible custom advance()
# function.
for key, val in six.iteritems(step_cache[step]):
if hasattr(val, '__advance__'):
val.__advance__(ts, val)
elif -step < self.history:
step_cache[step-1] = step_cache[step]
if len(steps) and (steps[0] is not None):
step_cache.pop(steps[-1])
def init_data(self, step=0):
"""
Initialize the dof vector data of time step `step` to zeros.
"""
if self.is_state_or_parameter():
data = nm.zeros((self.n_dof,), dtype=self.dtype)
self.set_data(data, step=step)
def set_constant(self, val):
"""
Set the variable to a constant value.
"""
data = nm.empty((self.n_dof,), dtype=self.dtype)
data.fill(val)
self.set_data(data)
def set_data(self, data=None, indx=None, step=0,
preserve_caches=False):
"""
Set data (vector of DOF values) of the variable.
Parameters
----------
data : array
The vector of DOF values.
indx : int, optional
If given, `data[indx]` is used.
step : int, optional
The time history step, 0 (default) = current.
preserve_caches : bool
If True, do not invalidate evaluate caches of the variable.
"""
data = data.ravel()
if indx is None:
indx = slice(0, len(data))
else:
indx = slice(int(indx.start), int(indx.stop))
n_data_dof = indx.stop - indx.start
if self.n_dof != n_data_dof:
msg = 'incompatible data shape! (%d (variable) == %d (data))' \
% (self.n_dof, n_data_dof)
raise ValueError(msg)
elif (step > 0) or (-step >= len(self.data)):
raise ValueError('step %d out of range! ([%d, 0])'
% (step, -(len(self.data) - 1)))
else:
self.data[step] = data
self.indx = indx
if not preserve_caches:
self.invalidate_evaluate_cache(step=step)
def __call__(self, step=0, derivative=None, dt=None):
"""
Return vector of degrees of freedom of the variable.
Parameters
----------
step : int, default 0
The time step (0 means current, -1 previous, ...).
derivative : None or 'dt'
If not None, return time derivative of the DOF vector,
approximated by the backward finite difference.
Returns
-------
vec : array
The DOF vector. If `derivative` is None: a view of the data vector,
otherwise: required derivative of the DOF vector
at time step given by `step`.
Notes
-----
If the previous time step is requested in step 0, the step 0
DOF vector is returned instead.
"""
if derivative is None:
if (self.step == 0) and (step == -1):
data = self.data[0]
else:
data = self.data[-step]
if data is None:
raise ValueError('data of variable are not set! (%s, step %d)' \
% (self.name, step))
return data[self.indx]
else:
if self.history is None:
msg = 'set history type of variable %s to use derivatives!'\
% self.name
raise ValueError(msg)
dt = get_default(dt, self.dt)
return (self(step=step) - self(step=step-1)) / dt
def get_initial_condition(self):
if self.initial_condition is None:
return 0.0
else:
return self.initial_condition
class FieldVariable(Variable):
"""
A finite element field variable.
field .. field description of variable (borrowed)
"""
def __init__(self, name, kind, field, order=None, primary_var_name=None,
special=None, flags=None, history=None, **kwargs):
Variable.__init__(self, name, kind, order, primary_var_name,
special, flags, history=history, **kwargs)
self._set_field(field)
self.has_field = True
self.has_bc = True
self._variables = None
self.clear_evaluate_cache()
def _set_field(self, field):
"""
Set field of the variable.
Takes reference to a Field instance. Sets dtype according to
field.dtype. Sets `dim` attribute to spatial dimension.
"""
self.is_surface = field.is_surface
self.field = field
self._setup_dofs(field.n_nod, field.n_components, field.val_shape)
self.flags.add(is_field)
self.dtype = field.dtype
self.dim = field.domain.shape.dim
def _get_setter(self, kind, functions, **kwargs):
"""
Get the setter function of the variable and its arguments depending in
the setter kind.
"""
if not (hasattr(self, 'special') and (kind in self.special)):
return
setter_name = self.special[kind]
setter = functions[setter_name]
region = self.field.region
nod_list = self.field.get_dofs_in_region(region)
nods = nm.unique(nod_list)
coors = self.field.get_coor(nods)
if kind == 'setter':
sargs = (kwargs.get('ts'), coors)
elif kind == 'ic':
sargs = (coors, )
skwargs = {'region' : region}
return setter, sargs, skwargs
def get_field(self):
return self.field
def get_mapping(self, region, integral, integration,
get_saved=False, return_key=False):
"""
Get the reference element mapping of the underlying field.
See Also
--------
sfepy.discrete.common.fields.Field.get_mapping
"""
if region is None:
region = self.field.region
out = self.field.get_mapping(region, integral, integration,
get_saved=get_saved,
return_key=return_key)
return out
def get_dof_conn(self, dc_type, is_trace=False, trace_region=None):
"""
Get active dof connectivity of a variable.
Notes
-----
The primary and dual variables must have the same Region.
"""
if self.is_virtual():
var = self.get_primary()
# No primary variable can occur in single term evaluations.
var_name = var.name if var is not None else self.name
else:
var_name = self.name
if not is_trace:
region_name = dc_type.region_name
else:
aux = self.field.domain.regions[dc_type.region_name]
region = aux.get_mirror_region(trace_region)
region_name = region.name
key = (var_name, region_name, dc_type.type, is_trace)
dc = self.adof_conns[key]
return dc
def get_dof_info(self, active=False):
details = Struct(name='field_var_dof_details',
n_nod=self.n_nod,
dpn=self.n_components)
if active:
n_dof = self.n_adof
else:
n_dof = self.n_dof
return n_dof, details
def time_update(self, ts, functions):
"""
Store time step, set variable data for variables with the setter
function.
"""
if ts is not None:
self.dt = ts.dt
if hasattr(self, 'special') and ('setter' in self.special):
setter, sargs, skwargs = self._get_setter('setter', functions,
ts=ts)
self.set_data(setter(*sargs, **skwargs))
| output('data of %s set by %s()' % (self.name, setter.name)) | sfepy.base.base.output |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
self.dof_name = self.primary_var_name
elif kind == 'parameter':
self.flags.add(is_parameter)
msg = 'parameter variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
if self.primary_var_name == '(set-to-None)':
self.primary_var_name = None
self.dof_name = self.name
else:
self.dof_name = self.primary_var_name
if special is not None:
self.special = special
else:
raise NotImplementedError('unknown variable kind: %s' % kind)
self.kind = kind
def _setup_dofs(self, n_nod, n_components, val_shape):
"""
Setup number of DOFs and DOF names.
"""
self.n_nod = n_nod
self.n_components = n_components
self.val_shape = val_shape
self.n_dof = self.n_nod * self.n_components
self.dofs = [self.dof_name + ('.%d' % ii)
for ii in range(self.n_components)]
def get_primary(self):
"""
Get the corresponding primary variable.
Returns
-------
var : Variable instance
The primary variable, or `self` for state
variables or if `primary_var_name` is None, or None if no other
variables are defined.
"""
if self.is_state():
var = self
elif self.primary_var_name is not None:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
else:
var = self
return var
def get_dual(self):
"""
Get the dual variable.
Returns
-------
var : Variable instance
The primary variable for non-state variables, or the dual
variable for state variables.
"""
if self.is_state():
if ((self._variables is not None)
and (self.dual_var_name in self._variables.names)):
var = self._variables[self.dual_var_name]
else:
var = None
else:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
return var
def is_state(self):
return is_state in self.flags
def is_virtual(self):
return is_virtual in self.flags
def is_parameter(self):
return is_parameter in self.flags
def is_state_or_parameter(self):
return (is_state in self.flags) or (is_parameter in self.flags)
def is_kind(self, kind):
return eval('self.is_%s()' % kind)
def is_real(self):
return self.dtype in real_types
def is_complex(self):
return self.dtype in complex_types
def is_finite(self, step=0, derivative=None, dt=None):
return nm.isfinite(self(step=step, derivative=derivative, dt=dt)).all()
def get_primary_name(self):
if self.is_state():
name = self.name
else:
name = self.primary_var_name
return name
def init_history(self):
"""Initialize data of variables with history."""
if self.history is None: return
self.data = deque((self.history + 1) * [None])
self.step = 0
def time_update(self, ts, functions):
"""Implemented in subclasses."""
pass
def advance(self, ts):
"""
Advance in time the DOF state history. A copy of the DOF vector
is made to prevent history modification.
"""
if self.history is None: return
self.step = ts.step + 1
if self.history > 0:
# Copy the current step data to the history data, shift history,
# initialize if needed. The current step data are left intact.
# Note: cannot use self.data.rotate() due to data sharing with
# State.
for ii in range(self.history, 0, -1):
if self.data[ii] is None:
self.data[ii] = nm.empty_like(self.data[0])
self.data[ii][:] = self.data[ii - 1]
# Advance evaluate cache.
for step_cache in six.itervalues(self.evaluate_cache):
steps = sorted(step_cache.keys())
for step in steps:
if step is None:
# Special caches with possible custom advance()
# function.
for key, val in six.iteritems(step_cache[step]):
if hasattr(val, '__advance__'):
val.__advance__(ts, val)
elif -step < self.history:
step_cache[step-1] = step_cache[step]
if len(steps) and (steps[0] is not None):
step_cache.pop(steps[-1])
def init_data(self, step=0):
"""
Initialize the dof vector data of time step `step` to zeros.
"""
if self.is_state_or_parameter():
data = nm.zeros((self.n_dof,), dtype=self.dtype)
self.set_data(data, step=step)
def set_constant(self, val):
"""
Set the variable to a constant value.
"""
data = nm.empty((self.n_dof,), dtype=self.dtype)
data.fill(val)
self.set_data(data)
def set_data(self, data=None, indx=None, step=0,
preserve_caches=False):
"""
Set data (vector of DOF values) of the variable.
Parameters
----------
data : array
The vector of DOF values.
indx : int, optional
If given, `data[indx]` is used.
step : int, optional
The time history step, 0 (default) = current.
preserve_caches : bool
If True, do not invalidate evaluate caches of the variable.
"""
data = data.ravel()
if indx is None:
indx = slice(0, len(data))
else:
indx = slice(int(indx.start), int(indx.stop))
n_data_dof = indx.stop - indx.start
if self.n_dof != n_data_dof:
msg = 'incompatible data shape! (%d (variable) == %d (data))' \
% (self.n_dof, n_data_dof)
raise ValueError(msg)
elif (step > 0) or (-step >= len(self.data)):
raise ValueError('step %d out of range! ([%d, 0])'
% (step, -(len(self.data) - 1)))
else:
self.data[step] = data
self.indx = indx
if not preserve_caches:
self.invalidate_evaluate_cache(step=step)
def __call__(self, step=0, derivative=None, dt=None):
"""
Return vector of degrees of freedom of the variable.
Parameters
----------
step : int, default 0
The time step (0 means current, -1 previous, ...).
derivative : None or 'dt'
If not None, return time derivative of the DOF vector,
approximated by the backward finite difference.
Returns
-------
vec : array
The DOF vector. If `derivative` is None: a view of the data vector,
otherwise: required derivative of the DOF vector
at time step given by `step`.
Notes
-----
If the previous time step is requested in step 0, the step 0
DOF vector is returned instead.
"""
if derivative is None:
if (self.step == 0) and (step == -1):
data = self.data[0]
else:
data = self.data[-step]
if data is None:
raise ValueError('data of variable are not set! (%s, step %d)' \
% (self.name, step))
return data[self.indx]
else:
if self.history is None:
msg = 'set history type of variable %s to use derivatives!'\
% self.name
raise ValueError(msg)
dt = get_default(dt, self.dt)
return (self(step=step) - self(step=step-1)) / dt
def get_initial_condition(self):
if self.initial_condition is None:
return 0.0
else:
return self.initial_condition
class FieldVariable(Variable):
"""
A finite element field variable.
field .. field description of variable (borrowed)
"""
def __init__(self, name, kind, field, order=None, primary_var_name=None,
special=None, flags=None, history=None, **kwargs):
Variable.__init__(self, name, kind, order, primary_var_name,
special, flags, history=history, **kwargs)
self._set_field(field)
self.has_field = True
self.has_bc = True
self._variables = None
self.clear_evaluate_cache()
def _set_field(self, field):
"""
Set field of the variable.
Takes reference to a Field instance. Sets dtype according to
field.dtype. Sets `dim` attribute to spatial dimension.
"""
self.is_surface = field.is_surface
self.field = field
self._setup_dofs(field.n_nod, field.n_components, field.val_shape)
self.flags.add(is_field)
self.dtype = field.dtype
self.dim = field.domain.shape.dim
def _get_setter(self, kind, functions, **kwargs):
"""
Get the setter function of the variable and its arguments depending in
the setter kind.
"""
if not (hasattr(self, 'special') and (kind in self.special)):
return
setter_name = self.special[kind]
setter = functions[setter_name]
region = self.field.region
nod_list = self.field.get_dofs_in_region(region)
nods = nm.unique(nod_list)
coors = self.field.get_coor(nods)
if kind == 'setter':
sargs = (kwargs.get('ts'), coors)
elif kind == 'ic':
sargs = (coors, )
skwargs = {'region' : region}
return setter, sargs, skwargs
def get_field(self):
return self.field
def get_mapping(self, region, integral, integration,
get_saved=False, return_key=False):
"""
Get the reference element mapping of the underlying field.
See Also
--------
sfepy.discrete.common.fields.Field.get_mapping
"""
if region is None:
region = self.field.region
out = self.field.get_mapping(region, integral, integration,
get_saved=get_saved,
return_key=return_key)
return out
def get_dof_conn(self, dc_type, is_trace=False, trace_region=None):
"""
Get active dof connectivity of a variable.
Notes
-----
The primary and dual variables must have the same Region.
"""
if self.is_virtual():
var = self.get_primary()
# No primary variable can occur in single term evaluations.
var_name = var.name if var is not None else self.name
else:
var_name = self.name
if not is_trace:
region_name = dc_type.region_name
else:
aux = self.field.domain.regions[dc_type.region_name]
region = aux.get_mirror_region(trace_region)
region_name = region.name
key = (var_name, region_name, dc_type.type, is_trace)
dc = self.adof_conns[key]
return dc
def get_dof_info(self, active=False):
details = Struct(name='field_var_dof_details',
n_nod=self.n_nod,
dpn=self.n_components)
if active:
n_dof = self.n_adof
else:
n_dof = self.n_dof
return n_dof, details
def time_update(self, ts, functions):
"""
Store time step, set variable data for variables with the setter
function.
"""
if ts is not None:
self.dt = ts.dt
if hasattr(self, 'special') and ('setter' in self.special):
setter, sargs, skwargs = self._get_setter('setter', functions,
ts=ts)
self.set_data(setter(*sargs, **skwargs))
output('data of %s set by %s()' % (self.name, setter.name))
def set_from_qp(self, data_qp, integral, step=0):
"""
Set DOFs of variable using values in quadrature points
corresponding to the given integral.
"""
data_vertex = self.field.average_qp_to_vertices(data_qp, integral)
# Field nodes values.
data = self.field.interp_v_vals_to_n_vals(data_vertex)
data = data.ravel()
self.indx = slice(0, len(data))
self.data[step] = data
def set_from_mesh_vertices(self, data):
"""
Set the variable using values at the mesh vertices.
"""
ndata = self.field.interp_v_vals_to_n_vals(data)
self.set_data(ndata)
def set_from_function(self, fun, step=0):
"""
Set the variable data (the vector of DOF values) using a function of
space coordinates.
Parameters
----------
fun : callable
The function of coordinates returning DOF values of shape
`(n_coor, n_components)`.
step : int, optional
The time history step, 0 (default) = current.
"""
_, vv = self.field.set_dofs(fun, self.field.region, self.n_components)
self.set_data(vv.ravel(), step=step)
def equation_mapping(self, bcs, var_di, ts, functions, problem=None,
warn=False):
"""
Create the mapping of active DOFs from/to all DOFs.
Sets n_adof.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.eq_map = EquationMap('eq_map', self.dofs, var_di)
if bcs is not None:
bcs.canonize_dof_names(self.dofs)
bcs.sort()
active_bcs = self.eq_map.map_equations(bcs, self.field, ts, functions,
problem=problem, warn=warn)
self.n_adof = self.eq_map.n_eq
return active_bcs
def setup_initial_conditions(self, ics, di, functions, warn=False):
"""
Setup of initial conditions.
"""
ics.canonize_dof_names(self.dofs)
ics.sort()
self.initial_condition = nm.zeros((di.n_dof[self.name],),
dtype=self.dtype)
for ic in ics:
region = ic.region
dofs, val = ic.dofs
if warn:
clean_msg = ('warning: ignoring nonexistent' \
' IC node (%s) in ' % self.name)
else:
clean_msg = None
nod_list = self.field.get_dofs_in_region(region)
if len(nod_list) == 0:
continue
fun = | get_condition_value(val, functions, 'IC', ic.name) | sfepy.discrete.conditions.get_condition_value |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
self.dof_name = self.primary_var_name
elif kind == 'parameter':
self.flags.add(is_parameter)
msg = 'parameter variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
if self.primary_var_name == '(set-to-None)':
self.primary_var_name = None
self.dof_name = self.name
else:
self.dof_name = self.primary_var_name
if special is not None:
self.special = special
else:
raise NotImplementedError('unknown variable kind: %s' % kind)
self.kind = kind
def _setup_dofs(self, n_nod, n_components, val_shape):
"""
Setup number of DOFs and DOF names.
"""
self.n_nod = n_nod
self.n_components = n_components
self.val_shape = val_shape
self.n_dof = self.n_nod * self.n_components
self.dofs = [self.dof_name + ('.%d' % ii)
for ii in range(self.n_components)]
def get_primary(self):
"""
Get the corresponding primary variable.
Returns
-------
var : Variable instance
The primary variable, or `self` for state
variables or if `primary_var_name` is None, or None if no other
variables are defined.
"""
if self.is_state():
var = self
elif self.primary_var_name is not None:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
else:
var = self
return var
def get_dual(self):
"""
Get the dual variable.
Returns
-------
var : Variable instance
The primary variable for non-state variables, or the dual
variable for state variables.
"""
if self.is_state():
if ((self._variables is not None)
and (self.dual_var_name in self._variables.names)):
var = self._variables[self.dual_var_name]
else:
var = None
else:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
return var
def is_state(self):
return is_state in self.flags
def is_virtual(self):
return is_virtual in self.flags
def is_parameter(self):
return is_parameter in self.flags
def is_state_or_parameter(self):
return (is_state in self.flags) or (is_parameter in self.flags)
def is_kind(self, kind):
return eval('self.is_%s()' % kind)
def is_real(self):
return self.dtype in real_types
def is_complex(self):
return self.dtype in complex_types
def is_finite(self, step=0, derivative=None, dt=None):
return nm.isfinite(self(step=step, derivative=derivative, dt=dt)).all()
def get_primary_name(self):
if self.is_state():
name = self.name
else:
name = self.primary_var_name
return name
def init_history(self):
"""Initialize data of variables with history."""
if self.history is None: return
self.data = deque((self.history + 1) * [None])
self.step = 0
def time_update(self, ts, functions):
"""Implemented in subclasses."""
pass
def advance(self, ts):
"""
Advance in time the DOF state history. A copy of the DOF vector
is made to prevent history modification.
"""
if self.history is None: return
self.step = ts.step + 1
if self.history > 0:
# Copy the current step data to the history data, shift history,
# initialize if needed. The current step data are left intact.
# Note: cannot use self.data.rotate() due to data sharing with
# State.
for ii in range(self.history, 0, -1):
if self.data[ii] is None:
self.data[ii] = nm.empty_like(self.data[0])
self.data[ii][:] = self.data[ii - 1]
# Advance evaluate cache.
for step_cache in six.itervalues(self.evaluate_cache):
steps = sorted(step_cache.keys())
for step in steps:
if step is None:
# Special caches with possible custom advance()
# function.
for key, val in six.iteritems(step_cache[step]):
if hasattr(val, '__advance__'):
val.__advance__(ts, val)
elif -step < self.history:
step_cache[step-1] = step_cache[step]
if len(steps) and (steps[0] is not None):
step_cache.pop(steps[-1])
def init_data(self, step=0):
"""
Initialize the dof vector data of time step `step` to zeros.
"""
if self.is_state_or_parameter():
data = nm.zeros((self.n_dof,), dtype=self.dtype)
self.set_data(data, step=step)
def set_constant(self, val):
"""
Set the variable to a constant value.
"""
data = nm.empty((self.n_dof,), dtype=self.dtype)
data.fill(val)
self.set_data(data)
def set_data(self, data=None, indx=None, step=0,
preserve_caches=False):
"""
Set data (vector of DOF values) of the variable.
Parameters
----------
data : array
The vector of DOF values.
indx : int, optional
If given, `data[indx]` is used.
step : int, optional
The time history step, 0 (default) = current.
preserve_caches : bool
If True, do not invalidate evaluate caches of the variable.
"""
data = data.ravel()
if indx is None:
indx = slice(0, len(data))
else:
indx = slice(int(indx.start), int(indx.stop))
n_data_dof = indx.stop - indx.start
if self.n_dof != n_data_dof:
msg = 'incompatible data shape! (%d (variable) == %d (data))' \
% (self.n_dof, n_data_dof)
raise ValueError(msg)
elif (step > 0) or (-step >= len(self.data)):
raise ValueError('step %d out of range! ([%d, 0])'
% (step, -(len(self.data) - 1)))
else:
self.data[step] = data
self.indx = indx
if not preserve_caches:
self.invalidate_evaluate_cache(step=step)
def __call__(self, step=0, derivative=None, dt=None):
"""
Return vector of degrees of freedom of the variable.
Parameters
----------
step : int, default 0
The time step (0 means current, -1 previous, ...).
derivative : None or 'dt'
If not None, return time derivative of the DOF vector,
approximated by the backward finite difference.
Returns
-------
vec : array
The DOF vector. If `derivative` is None: a view of the data vector,
otherwise: required derivative of the DOF vector
at time step given by `step`.
Notes
-----
If the previous time step is requested in step 0, the step 0
DOF vector is returned instead.
"""
if derivative is None:
if (self.step == 0) and (step == -1):
data = self.data[0]
else:
data = self.data[-step]
if data is None:
raise ValueError('data of variable are not set! (%s, step %d)' \
% (self.name, step))
return data[self.indx]
else:
if self.history is None:
msg = 'set history type of variable %s to use derivatives!'\
% self.name
raise ValueError(msg)
dt = get_default(dt, self.dt)
return (self(step=step) - self(step=step-1)) / dt
def get_initial_condition(self):
if self.initial_condition is None:
return 0.0
else:
return self.initial_condition
class FieldVariable(Variable):
"""
A finite element field variable.
field .. field description of variable (borrowed)
"""
def __init__(self, name, kind, field, order=None, primary_var_name=None,
special=None, flags=None, history=None, **kwargs):
Variable.__init__(self, name, kind, order, primary_var_name,
special, flags, history=history, **kwargs)
self._set_field(field)
self.has_field = True
self.has_bc = True
self._variables = None
self.clear_evaluate_cache()
def _set_field(self, field):
"""
Set field of the variable.
Takes reference to a Field instance. Sets dtype according to
field.dtype. Sets `dim` attribute to spatial dimension.
"""
self.is_surface = field.is_surface
self.field = field
self._setup_dofs(field.n_nod, field.n_components, field.val_shape)
self.flags.add(is_field)
self.dtype = field.dtype
self.dim = field.domain.shape.dim
def _get_setter(self, kind, functions, **kwargs):
"""
Get the setter function of the variable and its arguments depending in
the setter kind.
"""
if not (hasattr(self, 'special') and (kind in self.special)):
return
setter_name = self.special[kind]
setter = functions[setter_name]
region = self.field.region
nod_list = self.field.get_dofs_in_region(region)
nods = nm.unique(nod_list)
coors = self.field.get_coor(nods)
if kind == 'setter':
sargs = (kwargs.get('ts'), coors)
elif kind == 'ic':
sargs = (coors, )
skwargs = {'region' : region}
return setter, sargs, skwargs
def get_field(self):
return self.field
def get_mapping(self, region, integral, integration,
get_saved=False, return_key=False):
"""
Get the reference element mapping of the underlying field.
See Also
--------
sfepy.discrete.common.fields.Field.get_mapping
"""
if region is None:
region = self.field.region
out = self.field.get_mapping(region, integral, integration,
get_saved=get_saved,
return_key=return_key)
return out
def get_dof_conn(self, dc_type, is_trace=False, trace_region=None):
"""
Get active dof connectivity of a variable.
Notes
-----
The primary and dual variables must have the same Region.
"""
if self.is_virtual():
var = self.get_primary()
# No primary variable can occur in single term evaluations.
var_name = var.name if var is not None else self.name
else:
var_name = self.name
if not is_trace:
region_name = dc_type.region_name
else:
aux = self.field.domain.regions[dc_type.region_name]
region = aux.get_mirror_region(trace_region)
region_name = region.name
key = (var_name, region_name, dc_type.type, is_trace)
dc = self.adof_conns[key]
return dc
def get_dof_info(self, active=False):
details = Struct(name='field_var_dof_details',
n_nod=self.n_nod,
dpn=self.n_components)
if active:
n_dof = self.n_adof
else:
n_dof = self.n_dof
return n_dof, details
def time_update(self, ts, functions):
"""
Store time step, set variable data for variables with the setter
function.
"""
if ts is not None:
self.dt = ts.dt
if hasattr(self, 'special') and ('setter' in self.special):
setter, sargs, skwargs = self._get_setter('setter', functions,
ts=ts)
self.set_data(setter(*sargs, **skwargs))
output('data of %s set by %s()' % (self.name, setter.name))
def set_from_qp(self, data_qp, integral, step=0):
"""
Set DOFs of variable using values in quadrature points
corresponding to the given integral.
"""
data_vertex = self.field.average_qp_to_vertices(data_qp, integral)
# Field nodes values.
data = self.field.interp_v_vals_to_n_vals(data_vertex)
data = data.ravel()
self.indx = slice(0, len(data))
self.data[step] = data
def set_from_mesh_vertices(self, data):
"""
Set the variable using values at the mesh vertices.
"""
ndata = self.field.interp_v_vals_to_n_vals(data)
self.set_data(ndata)
def set_from_function(self, fun, step=0):
"""
Set the variable data (the vector of DOF values) using a function of
space coordinates.
Parameters
----------
fun : callable
The function of coordinates returning DOF values of shape
`(n_coor, n_components)`.
step : int, optional
The time history step, 0 (default) = current.
"""
_, vv = self.field.set_dofs(fun, self.field.region, self.n_components)
self.set_data(vv.ravel(), step=step)
def equation_mapping(self, bcs, var_di, ts, functions, problem=None,
warn=False):
"""
Create the mapping of active DOFs from/to all DOFs.
Sets n_adof.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.eq_map = EquationMap('eq_map', self.dofs, var_di)
if bcs is not None:
bcs.canonize_dof_names(self.dofs)
bcs.sort()
active_bcs = self.eq_map.map_equations(bcs, self.field, ts, functions,
problem=problem, warn=warn)
self.n_adof = self.eq_map.n_eq
return active_bcs
def setup_initial_conditions(self, ics, di, functions, warn=False):
"""
Setup of initial conditions.
"""
ics.canonize_dof_names(self.dofs)
ics.sort()
self.initial_condition = nm.zeros((di.n_dof[self.name],),
dtype=self.dtype)
for ic in ics:
region = ic.region
dofs, val = ic.dofs
if warn:
clean_msg = ('warning: ignoring nonexistent' \
' IC node (%s) in ' % self.name)
else:
clean_msg = None
nod_list = self.field.get_dofs_in_region(region)
if len(nod_list) == 0:
continue
fun = get_condition_value(val, functions, 'IC', ic.name)
if isinstance(fun, Function):
aux = fun
fun = lambda coors: aux(coors, ic=ic)
nods, vv = self.field.set_dofs(fun, region, len(dofs), clean_msg)
eq = | expand_nodes_to_equations(nods, dofs, self.dofs) | sfepy.discrete.common.dof_info.expand_nodes_to_equations |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
self.dof_name = self.primary_var_name
elif kind == 'parameter':
self.flags.add(is_parameter)
msg = 'parameter variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
if self.primary_var_name == '(set-to-None)':
self.primary_var_name = None
self.dof_name = self.name
else:
self.dof_name = self.primary_var_name
if special is not None:
self.special = special
else:
raise NotImplementedError('unknown variable kind: %s' % kind)
self.kind = kind
def _setup_dofs(self, n_nod, n_components, val_shape):
"""
Setup number of DOFs and DOF names.
"""
self.n_nod = n_nod
self.n_components = n_components
self.val_shape = val_shape
self.n_dof = self.n_nod * self.n_components
self.dofs = [self.dof_name + ('.%d' % ii)
for ii in range(self.n_components)]
def get_primary(self):
"""
Get the corresponding primary variable.
Returns
-------
var : Variable instance
The primary variable, or `self` for state
variables or if `primary_var_name` is None, or None if no other
variables are defined.
"""
if self.is_state():
var = self
elif self.primary_var_name is not None:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
else:
var = self
return var
def get_dual(self):
"""
Get the dual variable.
Returns
-------
var : Variable instance
The primary variable for non-state variables, or the dual
variable for state variables.
"""
if self.is_state():
if ((self._variables is not None)
and (self.dual_var_name in self._variables.names)):
var = self._variables[self.dual_var_name]
else:
var = None
else:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
return var
def is_state(self):
return is_state in self.flags
def is_virtual(self):
return is_virtual in self.flags
def is_parameter(self):
return is_parameter in self.flags
def is_state_or_parameter(self):
return (is_state in self.flags) or (is_parameter in self.flags)
def is_kind(self, kind):
return eval('self.is_%s()' % kind)
def is_real(self):
return self.dtype in real_types
def is_complex(self):
return self.dtype in complex_types
def is_finite(self, step=0, derivative=None, dt=None):
return nm.isfinite(self(step=step, derivative=derivative, dt=dt)).all()
def get_primary_name(self):
if self.is_state():
name = self.name
else:
name = self.primary_var_name
return name
def init_history(self):
"""Initialize data of variables with history."""
if self.history is None: return
self.data = deque((self.history + 1) * [None])
self.step = 0
def time_update(self, ts, functions):
"""Implemented in subclasses."""
pass
def advance(self, ts):
"""
Advance in time the DOF state history. A copy of the DOF vector
is made to prevent history modification.
"""
if self.history is None: return
self.step = ts.step + 1
if self.history > 0:
# Copy the current step data to the history data, shift history,
# initialize if needed. The current step data are left intact.
# Note: cannot use self.data.rotate() due to data sharing with
# State.
for ii in range(self.history, 0, -1):
if self.data[ii] is None:
self.data[ii] = nm.empty_like(self.data[0])
self.data[ii][:] = self.data[ii - 1]
# Advance evaluate cache.
for step_cache in six.itervalues(self.evaluate_cache):
steps = sorted(step_cache.keys())
for step in steps:
if step is None:
# Special caches with possible custom advance()
# function.
for key, val in six.iteritems(step_cache[step]):
if hasattr(val, '__advance__'):
val.__advance__(ts, val)
elif -step < self.history:
step_cache[step-1] = step_cache[step]
if len(steps) and (steps[0] is not None):
step_cache.pop(steps[-1])
def init_data(self, step=0):
"""
Initialize the dof vector data of time step `step` to zeros.
"""
if self.is_state_or_parameter():
data = nm.zeros((self.n_dof,), dtype=self.dtype)
self.set_data(data, step=step)
def set_constant(self, val):
"""
Set the variable to a constant value.
"""
data = nm.empty((self.n_dof,), dtype=self.dtype)
data.fill(val)
self.set_data(data)
def set_data(self, data=None, indx=None, step=0,
preserve_caches=False):
"""
Set data (vector of DOF values) of the variable.
Parameters
----------
data : array
The vector of DOF values.
indx : int, optional
If given, `data[indx]` is used.
step : int, optional
The time history step, 0 (default) = current.
preserve_caches : bool
If True, do not invalidate evaluate caches of the variable.
"""
data = data.ravel()
if indx is None:
indx = slice(0, len(data))
else:
indx = slice(int(indx.start), int(indx.stop))
n_data_dof = indx.stop - indx.start
if self.n_dof != n_data_dof:
msg = 'incompatible data shape! (%d (variable) == %d (data))' \
% (self.n_dof, n_data_dof)
raise ValueError(msg)
elif (step > 0) or (-step >= len(self.data)):
raise ValueError('step %d out of range! ([%d, 0])'
% (step, -(len(self.data) - 1)))
else:
self.data[step] = data
self.indx = indx
if not preserve_caches:
self.invalidate_evaluate_cache(step=step)
def __call__(self, step=0, derivative=None, dt=None):
"""
Return vector of degrees of freedom of the variable.
Parameters
----------
step : int, default 0
The time step (0 means current, -1 previous, ...).
derivative : None or 'dt'
If not None, return time derivative of the DOF vector,
approximated by the backward finite difference.
Returns
-------
vec : array
The DOF vector. If `derivative` is None: a view of the data vector,
otherwise: required derivative of the DOF vector
at time step given by `step`.
Notes
-----
If the previous time step is requested in step 0, the step 0
DOF vector is returned instead.
"""
if derivative is None:
if (self.step == 0) and (step == -1):
data = self.data[0]
else:
data = self.data[-step]
if data is None:
raise ValueError('data of variable are not set! (%s, step %d)' \
% (self.name, step))
return data[self.indx]
else:
if self.history is None:
msg = 'set history type of variable %s to use derivatives!'\
% self.name
raise ValueError(msg)
dt = get_default(dt, self.dt)
return (self(step=step) - self(step=step-1)) / dt
def get_initial_condition(self):
if self.initial_condition is None:
return 0.0
else:
return self.initial_condition
class FieldVariable(Variable):
"""
A finite element field variable.
field .. field description of variable (borrowed)
"""
def __init__(self, name, kind, field, order=None, primary_var_name=None,
special=None, flags=None, history=None, **kwargs):
Variable.__init__(self, name, kind, order, primary_var_name,
special, flags, history=history, **kwargs)
self._set_field(field)
self.has_field = True
self.has_bc = True
self._variables = None
self.clear_evaluate_cache()
def _set_field(self, field):
"""
Set field of the variable.
Takes reference to a Field instance. Sets dtype according to
field.dtype. Sets `dim` attribute to spatial dimension.
"""
self.is_surface = field.is_surface
self.field = field
self._setup_dofs(field.n_nod, field.n_components, field.val_shape)
self.flags.add(is_field)
self.dtype = field.dtype
self.dim = field.domain.shape.dim
def _get_setter(self, kind, functions, **kwargs):
"""
Get the setter function of the variable and its arguments depending in
the setter kind.
"""
if not (hasattr(self, 'special') and (kind in self.special)):
return
setter_name = self.special[kind]
setter = functions[setter_name]
region = self.field.region
nod_list = self.field.get_dofs_in_region(region)
nods = nm.unique(nod_list)
coors = self.field.get_coor(nods)
if kind == 'setter':
sargs = (kwargs.get('ts'), coors)
elif kind == 'ic':
sargs = (coors, )
skwargs = {'region' : region}
return setter, sargs, skwargs
def get_field(self):
return self.field
def get_mapping(self, region, integral, integration,
get_saved=False, return_key=False):
"""
Get the reference element mapping of the underlying field.
See Also
--------
sfepy.discrete.common.fields.Field.get_mapping
"""
if region is None:
region = self.field.region
out = self.field.get_mapping(region, integral, integration,
get_saved=get_saved,
return_key=return_key)
return out
def get_dof_conn(self, dc_type, is_trace=False, trace_region=None):
"""
Get active dof connectivity of a variable.
Notes
-----
The primary and dual variables must have the same Region.
"""
if self.is_virtual():
var = self.get_primary()
# No primary variable can occur in single term evaluations.
var_name = var.name if var is not None else self.name
else:
var_name = self.name
if not is_trace:
region_name = dc_type.region_name
else:
aux = self.field.domain.regions[dc_type.region_name]
region = aux.get_mirror_region(trace_region)
region_name = region.name
key = (var_name, region_name, dc_type.type, is_trace)
dc = self.adof_conns[key]
return dc
def get_dof_info(self, active=False):
details = Struct(name='field_var_dof_details',
n_nod=self.n_nod,
dpn=self.n_components)
if active:
n_dof = self.n_adof
else:
n_dof = self.n_dof
return n_dof, details
def time_update(self, ts, functions):
"""
Store time step, set variable data for variables with the setter
function.
"""
if ts is not None:
self.dt = ts.dt
if hasattr(self, 'special') and ('setter' in self.special):
setter, sargs, skwargs = self._get_setter('setter', functions,
ts=ts)
self.set_data(setter(*sargs, **skwargs))
output('data of %s set by %s()' % (self.name, setter.name))
def set_from_qp(self, data_qp, integral, step=0):
"""
Set DOFs of variable using values in quadrature points
corresponding to the given integral.
"""
data_vertex = self.field.average_qp_to_vertices(data_qp, integral)
# Field nodes values.
data = self.field.interp_v_vals_to_n_vals(data_vertex)
data = data.ravel()
self.indx = slice(0, len(data))
self.data[step] = data
def set_from_mesh_vertices(self, data):
"""
Set the variable using values at the mesh vertices.
"""
ndata = self.field.interp_v_vals_to_n_vals(data)
self.set_data(ndata)
def set_from_function(self, fun, step=0):
"""
Set the variable data (the vector of DOF values) using a function of
space coordinates.
Parameters
----------
fun : callable
The function of coordinates returning DOF values of shape
`(n_coor, n_components)`.
step : int, optional
The time history step, 0 (default) = current.
"""
_, vv = self.field.set_dofs(fun, self.field.region, self.n_components)
self.set_data(vv.ravel(), step=step)
def equation_mapping(self, bcs, var_di, ts, functions, problem=None,
warn=False):
"""
Create the mapping of active DOFs from/to all DOFs.
Sets n_adof.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.eq_map = EquationMap('eq_map', self.dofs, var_di)
if bcs is not None:
bcs.canonize_dof_names(self.dofs)
bcs.sort()
active_bcs = self.eq_map.map_equations(bcs, self.field, ts, functions,
problem=problem, warn=warn)
self.n_adof = self.eq_map.n_eq
return active_bcs
def setup_initial_conditions(self, ics, di, functions, warn=False):
"""
Setup of initial conditions.
"""
ics.canonize_dof_names(self.dofs)
ics.sort()
self.initial_condition = nm.zeros((di.n_dof[self.name],),
dtype=self.dtype)
for ic in ics:
region = ic.region
dofs, val = ic.dofs
if warn:
clean_msg = ('warning: ignoring nonexistent' \
' IC node (%s) in ' % self.name)
else:
clean_msg = None
nod_list = self.field.get_dofs_in_region(region)
if len(nod_list) == 0:
continue
fun = get_condition_value(val, functions, 'IC', ic.name)
if isinstance(fun, Function):
aux = fun
fun = lambda coors: aux(coors, ic=ic)
nods, vv = self.field.set_dofs(fun, region, len(dofs), clean_msg)
eq = expand_nodes_to_equations(nods, dofs, self.dofs)
self.initial_condition[eq] = nm.ravel(vv)
def get_data_shape(self, integral, integration='volume', region_name=None):
"""
Get element data dimensions for given approximation.
Parameters
----------
integral : Integral instance
The integral describing used numerical quadrature.
integration : 'volume', 'surface', 'surface_extra', 'point' or 'custom'
The term integration type.
region_name : str
The name of the region of the integral.
Returns
-------
data_shape : 5 ints
The `(n_el, n_qp, dim, n_en, n_comp)` for volume shape kind,
`(n_fa, n_qp, dim, n_fn, n_comp)` for surface shape kind and
`(n_nod, 0, 0, 1, n_comp)` for point shape kind.
Notes
-----
- `n_el`, `n_fa` = number of elements/facets
- `n_qp` = number of quadrature points per element/facet
- `dim` = spatial dimension
- `n_en`, `n_fn` = number of element/facet nodes
- `n_comp` = number of variable components in a point/node
- `n_nod` = number of element nodes
"""
aux = self.field.get_data_shape(integral, integration=integration,
region_name=region_name)
data_shape = aux + (self.n_components,)
return data_shape
def clear_evaluate_cache(self):
"""
Clear current evaluate cache.
"""
self.evaluate_cache = {}
def invalidate_evaluate_cache(self, step=0):
"""
Invalidate variable data in evaluate cache for time step given
by `step` (0 is current, -1 previous, ...).
This should be done, for example, prior to every nonlinear
solver iteration.
"""
for step_cache in six.itervalues(self.evaluate_cache):
for key in list(step_cache.keys()):
if key == step: # Given time step to clear.
step_cache.pop(key)
def evaluate(self, mode='val',
region=None, integral=None, integration=None,
step=0, time_derivative=None, is_trace=False,
trace_region=None, dt=None, bf=None):
"""
Evaluate various quantities related to the variable according to
`mode` in quadrature points defined by `integral`.
The evaluated data are cached in the variable instance in
`evaluate_cache` attribute.
Parameters
----------
mode : one of 'val', 'grad', 'div', 'cauchy_strain'
The evaluation mode.
region : Region instance, optional
The region where the evaluation occurs. If None, the
underlying field region is used.
integral : Integral instance, optional
The integral defining quadrature points in which the
evaluation occurs. If None, the first order volume integral
is created. Must not be None for surface integrations.
integration : 'volume', 'surface', 'surface_extra', or 'point'
The term integration type. If None, it is derived from
`integral`.
step : int, default 0
The time step (0 means current, -1 previous, ...).
time_derivative : None or 'dt'
If not None, return time derivative of the data,
approximated by the backward finite difference.
is_trace : bool, default False
Indicate evaluation of trace of the variable on a boundary
region.
dt : float, optional
The time step to be used if `derivative` is `'dt'`. If None,
the `dt` attribute of the variable is used.
bf : Base function, optional
The base function to be used in 'val' mode.
Returns
-------
out : array
The 4-dimensional array of shape
`(n_el, n_qp, n_row, n_col)` with the requested data,
where `n_row`, `n_col` depend on `mode`.
"""
if integration == 'custom':
msg = 'cannot use FieldVariable.evaluate() with custom integration!'
raise ValueError(msg)
step_cache = self.evaluate_cache.setdefault(mode, {})
cache = step_cache.setdefault(step, {})
field = self.field
if region is None:
region = field.region
if is_trace:
region = region.get_mirror_region(trace_region)
if (region is not field.region) and not region.is_empty:
assert_(field.region.contains(region))
if integral is None:
integral = | Integral('aux_1', 1) | sfepy.discrete.integrals.Integral |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
self.dof_name = self.primary_var_name
elif kind == 'parameter':
self.flags.add(is_parameter)
msg = 'parameter variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
if self.primary_var_name == '(set-to-None)':
self.primary_var_name = None
self.dof_name = self.name
else:
self.dof_name = self.primary_var_name
if special is not None:
self.special = special
else:
raise NotImplementedError('unknown variable kind: %s' % kind)
self.kind = kind
def _setup_dofs(self, n_nod, n_components, val_shape):
"""
Setup number of DOFs and DOF names.
"""
self.n_nod = n_nod
self.n_components = n_components
self.val_shape = val_shape
self.n_dof = self.n_nod * self.n_components
self.dofs = [self.dof_name + ('.%d' % ii)
for ii in range(self.n_components)]
def get_primary(self):
"""
Get the corresponding primary variable.
Returns
-------
var : Variable instance
The primary variable, or `self` for state
variables or if `primary_var_name` is None, or None if no other
variables are defined.
"""
if self.is_state():
var = self
elif self.primary_var_name is not None:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
else:
var = self
return var
def get_dual(self):
"""
Get the dual variable.
Returns
-------
var : Variable instance
The primary variable for non-state variables, or the dual
variable for state variables.
"""
if self.is_state():
if ((self._variables is not None)
and (self.dual_var_name in self._variables.names)):
var = self._variables[self.dual_var_name]
else:
var = None
else:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
return var
def is_state(self):
return is_state in self.flags
def is_virtual(self):
return is_virtual in self.flags
def is_parameter(self):
return is_parameter in self.flags
def is_state_or_parameter(self):
return (is_state in self.flags) or (is_parameter in self.flags)
def is_kind(self, kind):
return eval('self.is_%s()' % kind)
def is_real(self):
return self.dtype in real_types
def is_complex(self):
return self.dtype in complex_types
def is_finite(self, step=0, derivative=None, dt=None):
return nm.isfinite(self(step=step, derivative=derivative, dt=dt)).all()
def get_primary_name(self):
if self.is_state():
name = self.name
else:
name = self.primary_var_name
return name
def init_history(self):
"""Initialize data of variables with history."""
if self.history is None: return
self.data = deque((self.history + 1) * [None])
self.step = 0
def time_update(self, ts, functions):
"""Implemented in subclasses."""
pass
def advance(self, ts):
"""
Advance in time the DOF state history. A copy of the DOF vector
is made to prevent history modification.
"""
if self.history is None: return
self.step = ts.step + 1
if self.history > 0:
# Copy the current step data to the history data, shift history,
# initialize if needed. The current step data are left intact.
# Note: cannot use self.data.rotate() due to data sharing with
# State.
for ii in range(self.history, 0, -1):
if self.data[ii] is None:
self.data[ii] = nm.empty_like(self.data[0])
self.data[ii][:] = self.data[ii - 1]
# Advance evaluate cache.
for step_cache in six.itervalues(self.evaluate_cache):
steps = sorted(step_cache.keys())
for step in steps:
if step is None:
# Special caches with possible custom advance()
# function.
for key, val in six.iteritems(step_cache[step]):
if hasattr(val, '__advance__'):
val.__advance__(ts, val)
elif -step < self.history:
step_cache[step-1] = step_cache[step]
if len(steps) and (steps[0] is not None):
step_cache.pop(steps[-1])
def init_data(self, step=0):
"""
Initialize the dof vector data of time step `step` to zeros.
"""
if self.is_state_or_parameter():
data = nm.zeros((self.n_dof,), dtype=self.dtype)
self.set_data(data, step=step)
def set_constant(self, val):
"""
Set the variable to a constant value.
"""
data = nm.empty((self.n_dof,), dtype=self.dtype)
data.fill(val)
self.set_data(data)
def set_data(self, data=None, indx=None, step=0,
preserve_caches=False):
"""
Set data (vector of DOF values) of the variable.
Parameters
----------
data : array
The vector of DOF values.
indx : int, optional
If given, `data[indx]` is used.
step : int, optional
The time history step, 0 (default) = current.
preserve_caches : bool
If True, do not invalidate evaluate caches of the variable.
"""
data = data.ravel()
if indx is None:
indx = slice(0, len(data))
else:
indx = slice(int(indx.start), int(indx.stop))
n_data_dof = indx.stop - indx.start
if self.n_dof != n_data_dof:
msg = 'incompatible data shape! (%d (variable) == %d (data))' \
% (self.n_dof, n_data_dof)
raise ValueError(msg)
elif (step > 0) or (-step >= len(self.data)):
raise ValueError('step %d out of range! ([%d, 0])'
% (step, -(len(self.data) - 1)))
else:
self.data[step] = data
self.indx = indx
if not preserve_caches:
self.invalidate_evaluate_cache(step=step)
def __call__(self, step=0, derivative=None, dt=None):
"""
Return vector of degrees of freedom of the variable.
Parameters
----------
step : int, default 0
The time step (0 means current, -1 previous, ...).
derivative : None or 'dt'
If not None, return time derivative of the DOF vector,
approximated by the backward finite difference.
Returns
-------
vec : array
The DOF vector. If `derivative` is None: a view of the data vector,
otherwise: required derivative of the DOF vector
at time step given by `step`.
Notes
-----
If the previous time step is requested in step 0, the step 0
DOF vector is returned instead.
"""
if derivative is None:
if (self.step == 0) and (step == -1):
data = self.data[0]
else:
data = self.data[-step]
if data is None:
raise ValueError('data of variable are not set! (%s, step %d)' \
% (self.name, step))
return data[self.indx]
else:
if self.history is None:
msg = 'set history type of variable %s to use derivatives!'\
% self.name
raise ValueError(msg)
dt = get_default(dt, self.dt)
return (self(step=step) - self(step=step-1)) / dt
def get_initial_condition(self):
if self.initial_condition is None:
return 0.0
else:
return self.initial_condition
class FieldVariable(Variable):
"""
A finite element field variable.
field .. field description of variable (borrowed)
"""
def __init__(self, name, kind, field, order=None, primary_var_name=None,
special=None, flags=None, history=None, **kwargs):
Variable.__init__(self, name, kind, order, primary_var_name,
special, flags, history=history, **kwargs)
self._set_field(field)
self.has_field = True
self.has_bc = True
self._variables = None
self.clear_evaluate_cache()
def _set_field(self, field):
"""
Set field of the variable.
Takes reference to a Field instance. Sets dtype according to
field.dtype. Sets `dim` attribute to spatial dimension.
"""
self.is_surface = field.is_surface
self.field = field
self._setup_dofs(field.n_nod, field.n_components, field.val_shape)
self.flags.add(is_field)
self.dtype = field.dtype
self.dim = field.domain.shape.dim
def _get_setter(self, kind, functions, **kwargs):
"""
Get the setter function of the variable and its arguments depending in
the setter kind.
"""
if not (hasattr(self, 'special') and (kind in self.special)):
return
setter_name = self.special[kind]
setter = functions[setter_name]
region = self.field.region
nod_list = self.field.get_dofs_in_region(region)
nods = nm.unique(nod_list)
coors = self.field.get_coor(nods)
if kind == 'setter':
sargs = (kwargs.get('ts'), coors)
elif kind == 'ic':
sargs = (coors, )
skwargs = {'region' : region}
return setter, sargs, skwargs
def get_field(self):
return self.field
def get_mapping(self, region, integral, integration,
get_saved=False, return_key=False):
"""
Get the reference element mapping of the underlying field.
See Also
--------
sfepy.discrete.common.fields.Field.get_mapping
"""
if region is None:
region = self.field.region
out = self.field.get_mapping(region, integral, integration,
get_saved=get_saved,
return_key=return_key)
return out
def get_dof_conn(self, dc_type, is_trace=False, trace_region=None):
"""
Get active dof connectivity of a variable.
Notes
-----
The primary and dual variables must have the same Region.
"""
if self.is_virtual():
var = self.get_primary()
# No primary variable can occur in single term evaluations.
var_name = var.name if var is not None else self.name
else:
var_name = self.name
if not is_trace:
region_name = dc_type.region_name
else:
aux = self.field.domain.regions[dc_type.region_name]
region = aux.get_mirror_region(trace_region)
region_name = region.name
key = (var_name, region_name, dc_type.type, is_trace)
dc = self.adof_conns[key]
return dc
def get_dof_info(self, active=False):
details = Struct(name='field_var_dof_details',
n_nod=self.n_nod,
dpn=self.n_components)
if active:
n_dof = self.n_adof
else:
n_dof = self.n_dof
return n_dof, details
def time_update(self, ts, functions):
"""
Store time step, set variable data for variables with the setter
function.
"""
if ts is not None:
self.dt = ts.dt
if hasattr(self, 'special') and ('setter' in self.special):
setter, sargs, skwargs = self._get_setter('setter', functions,
ts=ts)
self.set_data(setter(*sargs, **skwargs))
output('data of %s set by %s()' % (self.name, setter.name))
def set_from_qp(self, data_qp, integral, step=0):
"""
Set DOFs of variable using values in quadrature points
corresponding to the given integral.
"""
data_vertex = self.field.average_qp_to_vertices(data_qp, integral)
# Field nodes values.
data = self.field.interp_v_vals_to_n_vals(data_vertex)
data = data.ravel()
self.indx = slice(0, len(data))
self.data[step] = data
def set_from_mesh_vertices(self, data):
"""
Set the variable using values at the mesh vertices.
"""
ndata = self.field.interp_v_vals_to_n_vals(data)
self.set_data(ndata)
def set_from_function(self, fun, step=0):
"""
Set the variable data (the vector of DOF values) using a function of
space coordinates.
Parameters
----------
fun : callable
The function of coordinates returning DOF values of shape
`(n_coor, n_components)`.
step : int, optional
The time history step, 0 (default) = current.
"""
_, vv = self.field.set_dofs(fun, self.field.region, self.n_components)
self.set_data(vv.ravel(), step=step)
def equation_mapping(self, bcs, var_di, ts, functions, problem=None,
warn=False):
"""
Create the mapping of active DOFs from/to all DOFs.
Sets n_adof.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.eq_map = EquationMap('eq_map', self.dofs, var_di)
if bcs is not None:
bcs.canonize_dof_names(self.dofs)
bcs.sort()
active_bcs = self.eq_map.map_equations(bcs, self.field, ts, functions,
problem=problem, warn=warn)
self.n_adof = self.eq_map.n_eq
return active_bcs
def setup_initial_conditions(self, ics, di, functions, warn=False):
"""
Setup of initial conditions.
"""
ics.canonize_dof_names(self.dofs)
ics.sort()
self.initial_condition = nm.zeros((di.n_dof[self.name],),
dtype=self.dtype)
for ic in ics:
region = ic.region
dofs, val = ic.dofs
if warn:
clean_msg = ('warning: ignoring nonexistent' \
' IC node (%s) in ' % self.name)
else:
clean_msg = None
nod_list = self.field.get_dofs_in_region(region)
if len(nod_list) == 0:
continue
fun = get_condition_value(val, functions, 'IC', ic.name)
if isinstance(fun, Function):
aux = fun
fun = lambda coors: aux(coors, ic=ic)
nods, vv = self.field.set_dofs(fun, region, len(dofs), clean_msg)
eq = expand_nodes_to_equations(nods, dofs, self.dofs)
self.initial_condition[eq] = nm.ravel(vv)
def get_data_shape(self, integral, integration='volume', region_name=None):
"""
Get element data dimensions for given approximation.
Parameters
----------
integral : Integral instance
The integral describing used numerical quadrature.
integration : 'volume', 'surface', 'surface_extra', 'point' or 'custom'
The term integration type.
region_name : str
The name of the region of the integral.
Returns
-------
data_shape : 5 ints
The `(n_el, n_qp, dim, n_en, n_comp)` for volume shape kind,
`(n_fa, n_qp, dim, n_fn, n_comp)` for surface shape kind and
`(n_nod, 0, 0, 1, n_comp)` for point shape kind.
Notes
-----
- `n_el`, `n_fa` = number of elements/facets
- `n_qp` = number of quadrature points per element/facet
- `dim` = spatial dimension
- `n_en`, `n_fn` = number of element/facet nodes
- `n_comp` = number of variable components in a point/node
- `n_nod` = number of element nodes
"""
aux = self.field.get_data_shape(integral, integration=integration,
region_name=region_name)
data_shape = aux + (self.n_components,)
return data_shape
def clear_evaluate_cache(self):
"""
Clear current evaluate cache.
"""
self.evaluate_cache = {}
def invalidate_evaluate_cache(self, step=0):
"""
Invalidate variable data in evaluate cache for time step given
by `step` (0 is current, -1 previous, ...).
This should be done, for example, prior to every nonlinear
solver iteration.
"""
for step_cache in six.itervalues(self.evaluate_cache):
for key in list(step_cache.keys()):
if key == step: # Given time step to clear.
step_cache.pop(key)
def evaluate(self, mode='val',
region=None, integral=None, integration=None,
step=0, time_derivative=None, is_trace=False,
trace_region=None, dt=None, bf=None):
"""
Evaluate various quantities related to the variable according to
`mode` in quadrature points defined by `integral`.
The evaluated data are cached in the variable instance in
`evaluate_cache` attribute.
Parameters
----------
mode : one of 'val', 'grad', 'div', 'cauchy_strain'
The evaluation mode.
region : Region instance, optional
The region where the evaluation occurs. If None, the
underlying field region is used.
integral : Integral instance, optional
The integral defining quadrature points in which the
evaluation occurs. If None, the first order volume integral
is created. Must not be None for surface integrations.
integration : 'volume', 'surface', 'surface_extra', or 'point'
The term integration type. If None, it is derived from
`integral`.
step : int, default 0
The time step (0 means current, -1 previous, ...).
time_derivative : None or 'dt'
If not None, return time derivative of the data,
approximated by the backward finite difference.
is_trace : bool, default False
Indicate evaluation of trace of the variable on a boundary
region.
dt : float, optional
The time step to be used if `derivative` is `'dt'`. If None,
the `dt` attribute of the variable is used.
bf : Base function, optional
The base function to be used in 'val' mode.
Returns
-------
out : array
The 4-dimensional array of shape
`(n_el, n_qp, n_row, n_col)` with the requested data,
where `n_row`, `n_col` depend on `mode`.
"""
if integration == 'custom':
msg = 'cannot use FieldVariable.evaluate() with custom integration!'
raise ValueError(msg)
step_cache = self.evaluate_cache.setdefault(mode, {})
cache = step_cache.setdefault(step, {})
field = self.field
if region is None:
region = field.region
if is_trace:
region = region.get_mirror_region(trace_region)
if (region is not field.region) and not region.is_empty:
assert_(field.region.contains(region))
if integral is None:
integral = Integral('aux_1', 1)
if integration is None:
integration = 'volume' if region.can_cells else 'surface'
geo, _, key = field.get_mapping(region, integral, integration,
return_key=True)
key += (time_derivative, is_trace)
if key in cache:
out = cache[key]
else:
vec = self(step=step, derivative=time_derivative, dt=dt)
ct = integration
if integration == 'surface_extra':
ct = 'volume'
conn = field.get_econn(ct, region, is_trace, integration)
shape = self.get_data_shape(integral, integration, region.name)
if self.dtype == nm.float64:
out = eval_real(vec, conn, geo, mode, shape, bf)
else:
out = eval_complex(vec, conn, geo, mode, shape, bf)
cache[key] = out
return out
def get_state_in_region(self, region, reshape=True, step=0):
"""
Get DOFs of the variable in the given region.
Parameters
----------
region : Region
The selected region.
reshape : bool
If True, reshape the DOF vector to a 2D array with the individual
components as columns. Otherwise a 1D DOF array of the form [all
DOFs in region node 0, all DOFs in region node 1, ...] is returned.
step : int, default 0
The time step (0 means current, -1 previous, ...).
Returns
-------
out : array
The selected DOFs.
"""
nods = self.field.get_dofs_in_region(region, merge=True)
eq = nm.empty((len(nods) * self.n_components,), dtype=nm.int32)
for idof in range(self.n_components):
eq[idof::self.n_components] = self.n_components * nods \
+ idof + self.indx.start
out = self.data[step][eq]
if reshape:
out.shape = (len(nods), self.n_components)
return out
def apply_ebc(self, vec, offset=0, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions to
vector `vec`, starting at `offset`.
"""
eq_map = self.eq_map
ii = offset + eq_map.eq_ebc
# EBC,
if force_values is None:
vec[ii] = eq_map.val_ebc
else:
if isinstance(force_values, dict):
vec[ii] = force_values[self.name]
else:
vec[ii] = force_values
# EPBC.
vec[offset+eq_map.master] = vec[offset+eq_map.slave]
def apply_ic(self, vec, offset=0, force_values=None):
"""
Apply initial conditions conditions to vector `vec`, starting at
`offset`.
"""
ii = slice(offset, offset + self.n_dof)
if force_values is None:
vec[ii] = self.get_initial_condition()
else:
if isinstance(force_values, dict):
vec[ii] = force_values[self.name]
else:
vec[ii] = force_values
def get_reduced(self, vec, offset=0, follow_epbc=False):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
The full vector starts in `vec` at `offset`. If 'follow_epbc' is True,
values of EPBC master DOFs are not simply thrown away, but added to the
corresponding slave DOFs, just like when assembling. For vectors with
state (unknown) variables it should be set to False, for assembled
vectors it should be set to True.
"""
eq_map = self.eq_map
ii = offset + eq_map.eqi
r_vec = vec[ii]
if follow_epbc:
master = offset + eq_map.master
slave = eq_map.eq[eq_map.slave]
ii = slave >= 0
| la.assemble1d(r_vec, slave[ii], vec[master[ii]]) | sfepy.linalg.assemble1d |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
self.dof_name = self.primary_var_name
elif kind == 'parameter':
self.flags.add(is_parameter)
msg = 'parameter variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
if self.primary_var_name == '(set-to-None)':
self.primary_var_name = None
self.dof_name = self.name
else:
self.dof_name = self.primary_var_name
if special is not None:
self.special = special
else:
raise NotImplementedError('unknown variable kind: %s' % kind)
self.kind = kind
def _setup_dofs(self, n_nod, n_components, val_shape):
"""
Setup number of DOFs and DOF names.
"""
self.n_nod = n_nod
self.n_components = n_components
self.val_shape = val_shape
self.n_dof = self.n_nod * self.n_components
self.dofs = [self.dof_name + ('.%d' % ii)
for ii in range(self.n_components)]
def get_primary(self):
"""
Get the corresponding primary variable.
Returns
-------
var : Variable instance
The primary variable, or `self` for state
variables or if `primary_var_name` is None, or None if no other
variables are defined.
"""
if self.is_state():
var = self
elif self.primary_var_name is not None:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
else:
var = self
return var
def get_dual(self):
"""
Get the dual variable.
Returns
-------
var : Variable instance
The primary variable for non-state variables, or the dual
variable for state variables.
"""
if self.is_state():
if ((self._variables is not None)
and (self.dual_var_name in self._variables.names)):
var = self._variables[self.dual_var_name]
else:
var = None
else:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
return var
def is_state(self):
return is_state in self.flags
def is_virtual(self):
return is_virtual in self.flags
def is_parameter(self):
return is_parameter in self.flags
def is_state_or_parameter(self):
return (is_state in self.flags) or (is_parameter in self.flags)
def is_kind(self, kind):
return eval('self.is_%s()' % kind)
def is_real(self):
return self.dtype in real_types
def is_complex(self):
return self.dtype in complex_types
def is_finite(self, step=0, derivative=None, dt=None):
return nm.isfinite(self(step=step, derivative=derivative, dt=dt)).all()
def get_primary_name(self):
if self.is_state():
name = self.name
else:
name = self.primary_var_name
return name
def init_history(self):
"""Initialize data of variables with history."""
if self.history is None: return
self.data = deque((self.history + 1) * [None])
self.step = 0
def time_update(self, ts, functions):
"""Implemented in subclasses."""
pass
def advance(self, ts):
"""
Advance in time the DOF state history. A copy of the DOF vector
is made to prevent history modification.
"""
if self.history is None: return
self.step = ts.step + 1
if self.history > 0:
# Copy the current step data to the history data, shift history,
# initialize if needed. The current step data are left intact.
# Note: cannot use self.data.rotate() due to data sharing with
# State.
for ii in range(self.history, 0, -1):
if self.data[ii] is None:
self.data[ii] = nm.empty_like(self.data[0])
self.data[ii][:] = self.data[ii - 1]
# Advance evaluate cache.
for step_cache in six.itervalues(self.evaluate_cache):
steps = sorted(step_cache.keys())
for step in steps:
if step is None:
# Special caches with possible custom advance()
# function.
for key, val in six.iteritems(step_cache[step]):
if hasattr(val, '__advance__'):
val.__advance__(ts, val)
elif -step < self.history:
step_cache[step-1] = step_cache[step]
if len(steps) and (steps[0] is not None):
step_cache.pop(steps[-1])
def init_data(self, step=0):
"""
Initialize the dof vector data of time step `step` to zeros.
"""
if self.is_state_or_parameter():
data = nm.zeros((self.n_dof,), dtype=self.dtype)
self.set_data(data, step=step)
def set_constant(self, val):
"""
Set the variable to a constant value.
"""
data = nm.empty((self.n_dof,), dtype=self.dtype)
data.fill(val)
self.set_data(data)
def set_data(self, data=None, indx=None, step=0,
preserve_caches=False):
"""
Set data (vector of DOF values) of the variable.
Parameters
----------
data : array
The vector of DOF values.
indx : int, optional
If given, `data[indx]` is used.
step : int, optional
The time history step, 0 (default) = current.
preserve_caches : bool
If True, do not invalidate evaluate caches of the variable.
"""
data = data.ravel()
if indx is None:
indx = slice(0, len(data))
else:
indx = slice(int(indx.start), int(indx.stop))
n_data_dof = indx.stop - indx.start
if self.n_dof != n_data_dof:
msg = 'incompatible data shape! (%d (variable) == %d (data))' \
% (self.n_dof, n_data_dof)
raise ValueError(msg)
elif (step > 0) or (-step >= len(self.data)):
raise ValueError('step %d out of range! ([%d, 0])'
% (step, -(len(self.data) - 1)))
else:
self.data[step] = data
self.indx = indx
if not preserve_caches:
self.invalidate_evaluate_cache(step=step)
def __call__(self, step=0, derivative=None, dt=None):
"""
Return vector of degrees of freedom of the variable.
Parameters
----------
step : int, default 0
The time step (0 means current, -1 previous, ...).
derivative : None or 'dt'
If not None, return time derivative of the DOF vector,
approximated by the backward finite difference.
Returns
-------
vec : array
The DOF vector. If `derivative` is None: a view of the data vector,
otherwise: required derivative of the DOF vector
at time step given by `step`.
Notes
-----
If the previous time step is requested in step 0, the step 0
DOF vector is returned instead.
"""
if derivative is None:
if (self.step == 0) and (step == -1):
data = self.data[0]
else:
data = self.data[-step]
if data is None:
raise ValueError('data of variable are not set! (%s, step %d)' \
% (self.name, step))
return data[self.indx]
else:
if self.history is None:
msg = 'set history type of variable %s to use derivatives!'\
% self.name
raise ValueError(msg)
dt = get_default(dt, self.dt)
return (self(step=step) - self(step=step-1)) / dt
def get_initial_condition(self):
if self.initial_condition is None:
return 0.0
else:
return self.initial_condition
class FieldVariable(Variable):
"""
A finite element field variable.
field .. field description of variable (borrowed)
"""
def __init__(self, name, kind, field, order=None, primary_var_name=None,
special=None, flags=None, history=None, **kwargs):
Variable.__init__(self, name, kind, order, primary_var_name,
special, flags, history=history, **kwargs)
self._set_field(field)
self.has_field = True
self.has_bc = True
self._variables = None
self.clear_evaluate_cache()
def _set_field(self, field):
"""
Set field of the variable.
Takes reference to a Field instance. Sets dtype according to
field.dtype. Sets `dim` attribute to spatial dimension.
"""
self.is_surface = field.is_surface
self.field = field
self._setup_dofs(field.n_nod, field.n_components, field.val_shape)
self.flags.add(is_field)
self.dtype = field.dtype
self.dim = field.domain.shape.dim
def _get_setter(self, kind, functions, **kwargs):
"""
Get the setter function of the variable and its arguments depending in
the setter kind.
"""
if not (hasattr(self, 'special') and (kind in self.special)):
return
setter_name = self.special[kind]
setter = functions[setter_name]
region = self.field.region
nod_list = self.field.get_dofs_in_region(region)
nods = nm.unique(nod_list)
coors = self.field.get_coor(nods)
if kind == 'setter':
sargs = (kwargs.get('ts'), coors)
elif kind == 'ic':
sargs = (coors, )
skwargs = {'region' : region}
return setter, sargs, skwargs
def get_field(self):
return self.field
def get_mapping(self, region, integral, integration,
get_saved=False, return_key=False):
"""
Get the reference element mapping of the underlying field.
See Also
--------
sfepy.discrete.common.fields.Field.get_mapping
"""
if region is None:
region = self.field.region
out = self.field.get_mapping(region, integral, integration,
get_saved=get_saved,
return_key=return_key)
return out
def get_dof_conn(self, dc_type, is_trace=False, trace_region=None):
"""
Get active dof connectivity of a variable.
Notes
-----
The primary and dual variables must have the same Region.
"""
if self.is_virtual():
var = self.get_primary()
# No primary variable can occur in single term evaluations.
var_name = var.name if var is not None else self.name
else:
var_name = self.name
if not is_trace:
region_name = dc_type.region_name
else:
aux = self.field.domain.regions[dc_type.region_name]
region = aux.get_mirror_region(trace_region)
region_name = region.name
key = (var_name, region_name, dc_type.type, is_trace)
dc = self.adof_conns[key]
return dc
def get_dof_info(self, active=False):
details = Struct(name='field_var_dof_details',
n_nod=self.n_nod,
dpn=self.n_components)
if active:
n_dof = self.n_adof
else:
n_dof = self.n_dof
return n_dof, details
def time_update(self, ts, functions):
"""
Store time step, set variable data for variables with the setter
function.
"""
if ts is not None:
self.dt = ts.dt
if hasattr(self, 'special') and ('setter' in self.special):
setter, sargs, skwargs = self._get_setter('setter', functions,
ts=ts)
self.set_data(setter(*sargs, **skwargs))
output('data of %s set by %s()' % (self.name, setter.name))
def set_from_qp(self, data_qp, integral, step=0):
"""
Set DOFs of variable using values in quadrature points
corresponding to the given integral.
"""
data_vertex = self.field.average_qp_to_vertices(data_qp, integral)
# Field nodes values.
data = self.field.interp_v_vals_to_n_vals(data_vertex)
data = data.ravel()
self.indx = slice(0, len(data))
self.data[step] = data
def set_from_mesh_vertices(self, data):
"""
Set the variable using values at the mesh vertices.
"""
ndata = self.field.interp_v_vals_to_n_vals(data)
self.set_data(ndata)
def set_from_function(self, fun, step=0):
"""
Set the variable data (the vector of DOF values) using a function of
space coordinates.
Parameters
----------
fun : callable
The function of coordinates returning DOF values of shape
`(n_coor, n_components)`.
step : int, optional
The time history step, 0 (default) = current.
"""
_, vv = self.field.set_dofs(fun, self.field.region, self.n_components)
self.set_data(vv.ravel(), step=step)
def equation_mapping(self, bcs, var_di, ts, functions, problem=None,
warn=False):
"""
Create the mapping of active DOFs from/to all DOFs.
Sets n_adof.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.eq_map = EquationMap('eq_map', self.dofs, var_di)
if bcs is not None:
bcs.canonize_dof_names(self.dofs)
bcs.sort()
active_bcs = self.eq_map.map_equations(bcs, self.field, ts, functions,
problem=problem, warn=warn)
self.n_adof = self.eq_map.n_eq
return active_bcs
def setup_initial_conditions(self, ics, di, functions, warn=False):
"""
Setup of initial conditions.
"""
ics.canonize_dof_names(self.dofs)
ics.sort()
self.initial_condition = nm.zeros((di.n_dof[self.name],),
dtype=self.dtype)
for ic in ics:
region = ic.region
dofs, val = ic.dofs
if warn:
clean_msg = ('warning: ignoring nonexistent' \
' IC node (%s) in ' % self.name)
else:
clean_msg = None
nod_list = self.field.get_dofs_in_region(region)
if len(nod_list) == 0:
continue
fun = get_condition_value(val, functions, 'IC', ic.name)
if isinstance(fun, Function):
aux = fun
fun = lambda coors: aux(coors, ic=ic)
nods, vv = self.field.set_dofs(fun, region, len(dofs), clean_msg)
eq = expand_nodes_to_equations(nods, dofs, self.dofs)
self.initial_condition[eq] = nm.ravel(vv)
def get_data_shape(self, integral, integration='volume', region_name=None):
"""
Get element data dimensions for given approximation.
Parameters
----------
integral : Integral instance
The integral describing used numerical quadrature.
integration : 'volume', 'surface', 'surface_extra', 'point' or 'custom'
The term integration type.
region_name : str
The name of the region of the integral.
Returns
-------
data_shape : 5 ints
The `(n_el, n_qp, dim, n_en, n_comp)` for volume shape kind,
`(n_fa, n_qp, dim, n_fn, n_comp)` for surface shape kind and
`(n_nod, 0, 0, 1, n_comp)` for point shape kind.
Notes
-----
- `n_el`, `n_fa` = number of elements/facets
- `n_qp` = number of quadrature points per element/facet
- `dim` = spatial dimension
- `n_en`, `n_fn` = number of element/facet nodes
- `n_comp` = number of variable components in a point/node
- `n_nod` = number of element nodes
"""
aux = self.field.get_data_shape(integral, integration=integration,
region_name=region_name)
data_shape = aux + (self.n_components,)
return data_shape
def clear_evaluate_cache(self):
"""
Clear current evaluate cache.
"""
self.evaluate_cache = {}
def invalidate_evaluate_cache(self, step=0):
"""
Invalidate variable data in evaluate cache for time step given
by `step` (0 is current, -1 previous, ...).
This should be done, for example, prior to every nonlinear
solver iteration.
"""
for step_cache in six.itervalues(self.evaluate_cache):
for key in list(step_cache.keys()):
if key == step: # Given time step to clear.
step_cache.pop(key)
def evaluate(self, mode='val',
region=None, integral=None, integration=None,
step=0, time_derivative=None, is_trace=False,
trace_region=None, dt=None, bf=None):
"""
Evaluate various quantities related to the variable according to
`mode` in quadrature points defined by `integral`.
The evaluated data are cached in the variable instance in
`evaluate_cache` attribute.
Parameters
----------
mode : one of 'val', 'grad', 'div', 'cauchy_strain'
The evaluation mode.
region : Region instance, optional
The region where the evaluation occurs. If None, the
underlying field region is used.
integral : Integral instance, optional
The integral defining quadrature points in which the
evaluation occurs. If None, the first order volume integral
is created. Must not be None for surface integrations.
integration : 'volume', 'surface', 'surface_extra', or 'point'
The term integration type. If None, it is derived from
`integral`.
step : int, default 0
The time step (0 means current, -1 previous, ...).
time_derivative : None or 'dt'
If not None, return time derivative of the data,
approximated by the backward finite difference.
is_trace : bool, default False
Indicate evaluation of trace of the variable on a boundary
region.
dt : float, optional
The time step to be used if `derivative` is `'dt'`. If None,
the `dt` attribute of the variable is used.
bf : Base function, optional
The base function to be used in 'val' mode.
Returns
-------
out : array
The 4-dimensional array of shape
`(n_el, n_qp, n_row, n_col)` with the requested data,
where `n_row`, `n_col` depend on `mode`.
"""
if integration == 'custom':
msg = 'cannot use FieldVariable.evaluate() with custom integration!'
raise ValueError(msg)
step_cache = self.evaluate_cache.setdefault(mode, {})
cache = step_cache.setdefault(step, {})
field = self.field
if region is None:
region = field.region
if is_trace:
region = region.get_mirror_region(trace_region)
if (region is not field.region) and not region.is_empty:
assert_(field.region.contains(region))
if integral is None:
integral = Integral('aux_1', 1)
if integration is None:
integration = 'volume' if region.can_cells else 'surface'
geo, _, key = field.get_mapping(region, integral, integration,
return_key=True)
key += (time_derivative, is_trace)
if key in cache:
out = cache[key]
else:
vec = self(step=step, derivative=time_derivative, dt=dt)
ct = integration
if integration == 'surface_extra':
ct = 'volume'
conn = field.get_econn(ct, region, is_trace, integration)
shape = self.get_data_shape(integral, integration, region.name)
if self.dtype == nm.float64:
out = eval_real(vec, conn, geo, mode, shape, bf)
else:
out = eval_complex(vec, conn, geo, mode, shape, bf)
cache[key] = out
return out
def get_state_in_region(self, region, reshape=True, step=0):
"""
Get DOFs of the variable in the given region.
Parameters
----------
region : Region
The selected region.
reshape : bool
If True, reshape the DOF vector to a 2D array with the individual
components as columns. Otherwise a 1D DOF array of the form [all
DOFs in region node 0, all DOFs in region node 1, ...] is returned.
step : int, default 0
The time step (0 means current, -1 previous, ...).
Returns
-------
out : array
The selected DOFs.
"""
nods = self.field.get_dofs_in_region(region, merge=True)
eq = nm.empty((len(nods) * self.n_components,), dtype=nm.int32)
for idof in range(self.n_components):
eq[idof::self.n_components] = self.n_components * nods \
+ idof + self.indx.start
out = self.data[step][eq]
if reshape:
out.shape = (len(nods), self.n_components)
return out
def apply_ebc(self, vec, offset=0, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions to
vector `vec`, starting at `offset`.
"""
eq_map = self.eq_map
ii = offset + eq_map.eq_ebc
# EBC,
if force_values is None:
vec[ii] = eq_map.val_ebc
else:
if isinstance(force_values, dict):
vec[ii] = force_values[self.name]
else:
vec[ii] = force_values
# EPBC.
vec[offset+eq_map.master] = vec[offset+eq_map.slave]
def apply_ic(self, vec, offset=0, force_values=None):
"""
Apply initial conditions conditions to vector `vec`, starting at
`offset`.
"""
ii = slice(offset, offset + self.n_dof)
if force_values is None:
vec[ii] = self.get_initial_condition()
else:
if isinstance(force_values, dict):
vec[ii] = force_values[self.name]
else:
vec[ii] = force_values
def get_reduced(self, vec, offset=0, follow_epbc=False):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
The full vector starts in `vec` at `offset`. If 'follow_epbc' is True,
values of EPBC master DOFs are not simply thrown away, but added to the
corresponding slave DOFs, just like when assembling. For vectors with
state (unknown) variables it should be set to False, for assembled
vectors it should be set to True.
"""
eq_map = self.eq_map
ii = offset + eq_map.eqi
r_vec = vec[ii]
if follow_epbc:
master = offset + eq_map.master
slave = eq_map.eq[eq_map.slave]
ii = slave >= 0
la.assemble1d(r_vec, slave[ii], vec[master[ii]])
return r_vec
def get_full(self, r_vec, r_offset=0, force_value=None,
vec=None, offset=0):
"""
Get the full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Notes
-----
The reduced vector starts in `r_vec` at `r_offset`.
Passing a `force_value` overrides the EBC values. Optionally,
`vec` argument can be provided to store the full vector (in
place) starting at `offset`.
"""
if vec is None:
vec = nm.empty(self.n_dof, dtype=r_vec.dtype)
else:
vec = vec[offset:offset+self.n_dof]
eq_map = self.eq_map
r_vec = r_vec[r_offset:r_offset+eq_map.n_eq]
# EBC.
vec[eq_map.eq_ebc] = get_default(force_value, eq_map.val_ebc)
# Reduced vector values.
vec[eq_map.eqi] = r_vec
# EPBC.
vec[eq_map.master] = vec[eq_map.slave]
unused_dofs = self.field.get('unused_dofs')
if unused_dofs is not None:
vec[:] = self.field.restore_substituted(vec)
return vec
def create_output(self, vec=None, key=None, extend=True, fill_value=None,
linearization=None):
"""
Convert the DOF vector to a dictionary of output data usable by
Mesh.write().
Parameters
----------
vec : array, optional
An alternative DOF vector to be used instead of the variable
DOF vector.
key : str, optional
The key to be used in the output dictionary instead of the
variable name.
extend : bool
Extend the DOF values to cover the whole domain.
fill_value : float or complex
The value used to fill the missing DOF values if `extend` is True.
linearization : Struct or None
The linearization configuration for higher order approximations.
"""
linearization = get_default(linearization, | Struct(kind='strip') | sfepy.base.base.Struct |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
| output('lcbc:', bc.name) | sfepy.base.base.output |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
| output('IC data of %s set by %s()' % (var.name, setter.name)) | sfepy.base.base.output |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
| assert_(history >= 0) | sfepy.base.base.assert_ |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = | get_default(conf.like, '(set-to-None)') | sfepy.base.base.get_default |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = | get_default(primary_var_name, None, msg) | sfepy.base.base.get_default |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
self.dof_name = self.primary_var_name
elif kind == 'parameter':
self.flags.add(is_parameter)
msg = 'parameter variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
if self.primary_var_name == '(set-to-None)':
self.primary_var_name = None
self.dof_name = self.name
else:
self.dof_name = self.primary_var_name
if special is not None:
self.special = special
else:
raise NotImplementedError('unknown variable kind: %s' % kind)
self.kind = kind
def _setup_dofs(self, n_nod, n_components, val_shape):
"""
Setup number of DOFs and DOF names.
"""
self.n_nod = n_nod
self.n_components = n_components
self.val_shape = val_shape
self.n_dof = self.n_nod * self.n_components
self.dofs = [self.dof_name + ('.%d' % ii)
for ii in range(self.n_components)]
def get_primary(self):
"""
Get the corresponding primary variable.
Returns
-------
var : Variable instance
The primary variable, or `self` for state
variables or if `primary_var_name` is None, or None if no other
variables are defined.
"""
if self.is_state():
var = self
elif self.primary_var_name is not None:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
else:
var = self
return var
def get_dual(self):
"""
Get the dual variable.
Returns
-------
var : Variable instance
The primary variable for non-state variables, or the dual
variable for state variables.
"""
if self.is_state():
if ((self._variables is not None)
and (self.dual_var_name in self._variables.names)):
var = self._variables[self.dual_var_name]
else:
var = None
else:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
return var
def is_state(self):
return is_state in self.flags
def is_virtual(self):
return is_virtual in self.flags
def is_parameter(self):
return is_parameter in self.flags
def is_state_or_parameter(self):
return (is_state in self.flags) or (is_parameter in self.flags)
def is_kind(self, kind):
return eval('self.is_%s()' % kind)
def is_real(self):
return self.dtype in real_types
def is_complex(self):
return self.dtype in complex_types
def is_finite(self, step=0, derivative=None, dt=None):
return nm.isfinite(self(step=step, derivative=derivative, dt=dt)).all()
def get_primary_name(self):
if self.is_state():
name = self.name
else:
name = self.primary_var_name
return name
def init_history(self):
"""Initialize data of variables with history."""
if self.history is None: return
self.data = deque((self.history + 1) * [None])
self.step = 0
def time_update(self, ts, functions):
"""Implemented in subclasses."""
pass
def advance(self, ts):
"""
Advance in time the DOF state history. A copy of the DOF vector
is made to prevent history modification.
"""
if self.history is None: return
self.step = ts.step + 1
if self.history > 0:
# Copy the current step data to the history data, shift history,
# initialize if needed. The current step data are left intact.
# Note: cannot use self.data.rotate() due to data sharing with
# State.
for ii in range(self.history, 0, -1):
if self.data[ii] is None:
self.data[ii] = nm.empty_like(self.data[0])
self.data[ii][:] = self.data[ii - 1]
# Advance evaluate cache.
for step_cache in six.itervalues(self.evaluate_cache):
steps = sorted(step_cache.keys())
for step in steps:
if step is None:
# Special caches with possible custom advance()
# function.
for key, val in six.iteritems(step_cache[step]):
if hasattr(val, '__advance__'):
val.__advance__(ts, val)
elif -step < self.history:
step_cache[step-1] = step_cache[step]
if len(steps) and (steps[0] is not None):
step_cache.pop(steps[-1])
def init_data(self, step=0):
"""
Initialize the dof vector data of time step `step` to zeros.
"""
if self.is_state_or_parameter():
data = nm.zeros((self.n_dof,), dtype=self.dtype)
self.set_data(data, step=step)
def set_constant(self, val):
"""
Set the variable to a constant value.
"""
data = nm.empty((self.n_dof,), dtype=self.dtype)
data.fill(val)
self.set_data(data)
def set_data(self, data=None, indx=None, step=0,
preserve_caches=False):
"""
Set data (vector of DOF values) of the variable.
Parameters
----------
data : array
The vector of DOF values.
indx : int, optional
If given, `data[indx]` is used.
step : int, optional
The time history step, 0 (default) = current.
preserve_caches : bool
If True, do not invalidate evaluate caches of the variable.
"""
data = data.ravel()
if indx is None:
indx = slice(0, len(data))
else:
indx = slice(int(indx.start), int(indx.stop))
n_data_dof = indx.stop - indx.start
if self.n_dof != n_data_dof:
msg = 'incompatible data shape! (%d (variable) == %d (data))' \
% (self.n_dof, n_data_dof)
raise ValueError(msg)
elif (step > 0) or (-step >= len(self.data)):
raise ValueError('step %d out of range! ([%d, 0])'
% (step, -(len(self.data) - 1)))
else:
self.data[step] = data
self.indx = indx
if not preserve_caches:
self.invalidate_evaluate_cache(step=step)
def __call__(self, step=0, derivative=None, dt=None):
"""
Return vector of degrees of freedom of the variable.
Parameters
----------
step : int, default 0
The time step (0 means current, -1 previous, ...).
derivative : None or 'dt'
If not None, return time derivative of the DOF vector,
approximated by the backward finite difference.
Returns
-------
vec : array
The DOF vector. If `derivative` is None: a view of the data vector,
otherwise: required derivative of the DOF vector
at time step given by `step`.
Notes
-----
If the previous time step is requested in step 0, the step 0
DOF vector is returned instead.
"""
if derivative is None:
if (self.step == 0) and (step == -1):
data = self.data[0]
else:
data = self.data[-step]
if data is None:
raise ValueError('data of variable are not set! (%s, step %d)' \
% (self.name, step))
return data[self.indx]
else:
if self.history is None:
msg = 'set history type of variable %s to use derivatives!'\
% self.name
raise ValueError(msg)
dt = get_default(dt, self.dt)
return (self(step=step) - self(step=step-1)) / dt
def get_initial_condition(self):
if self.initial_condition is None:
return 0.0
else:
return self.initial_condition
class FieldVariable(Variable):
"""
A finite element field variable.
field .. field description of variable (borrowed)
"""
def __init__(self, name, kind, field, order=None, primary_var_name=None,
special=None, flags=None, history=None, **kwargs):
Variable.__init__(self, name, kind, order, primary_var_name,
special, flags, history=history, **kwargs)
self._set_field(field)
self.has_field = True
self.has_bc = True
self._variables = None
self.clear_evaluate_cache()
def _set_field(self, field):
"""
Set field of the variable.
Takes reference to a Field instance. Sets dtype according to
field.dtype. Sets `dim` attribute to spatial dimension.
"""
self.is_surface = field.is_surface
self.field = field
self._setup_dofs(field.n_nod, field.n_components, field.val_shape)
self.flags.add(is_field)
self.dtype = field.dtype
self.dim = field.domain.shape.dim
def _get_setter(self, kind, functions, **kwargs):
"""
Get the setter function of the variable and its arguments depending in
the setter kind.
"""
if not (hasattr(self, 'special') and (kind in self.special)):
return
setter_name = self.special[kind]
setter = functions[setter_name]
region = self.field.region
nod_list = self.field.get_dofs_in_region(region)
nods = nm.unique(nod_list)
coors = self.field.get_coor(nods)
if kind == 'setter':
sargs = (kwargs.get('ts'), coors)
elif kind == 'ic':
sargs = (coors, )
skwargs = {'region' : region}
return setter, sargs, skwargs
def get_field(self):
return self.field
def get_mapping(self, region, integral, integration,
get_saved=False, return_key=False):
"""
Get the reference element mapping of the underlying field.
See Also
--------
sfepy.discrete.common.fields.Field.get_mapping
"""
if region is None:
region = self.field.region
out = self.field.get_mapping(region, integral, integration,
get_saved=get_saved,
return_key=return_key)
return out
def get_dof_conn(self, dc_type, is_trace=False, trace_region=None):
"""
Get active dof connectivity of a variable.
Notes
-----
The primary and dual variables must have the same Region.
"""
if self.is_virtual():
var = self.get_primary()
# No primary variable can occur in single term evaluations.
var_name = var.name if var is not None else self.name
else:
var_name = self.name
if not is_trace:
region_name = dc_type.region_name
else:
aux = self.field.domain.regions[dc_type.region_name]
region = aux.get_mirror_region(trace_region)
region_name = region.name
key = (var_name, region_name, dc_type.type, is_trace)
dc = self.adof_conns[key]
return dc
def get_dof_info(self, active=False):
details = Struct(name='field_var_dof_details',
n_nod=self.n_nod,
dpn=self.n_components)
if active:
n_dof = self.n_adof
else:
n_dof = self.n_dof
return n_dof, details
def time_update(self, ts, functions):
"""
Store time step, set variable data for variables with the setter
function.
"""
if ts is not None:
self.dt = ts.dt
if hasattr(self, 'special') and ('setter' in self.special):
setter, sargs, skwargs = self._get_setter('setter', functions,
ts=ts)
self.set_data(setter(*sargs, **skwargs))
output('data of %s set by %s()' % (self.name, setter.name))
def set_from_qp(self, data_qp, integral, step=0):
"""
Set DOFs of variable using values in quadrature points
corresponding to the given integral.
"""
data_vertex = self.field.average_qp_to_vertices(data_qp, integral)
# Field nodes values.
data = self.field.interp_v_vals_to_n_vals(data_vertex)
data = data.ravel()
self.indx = slice(0, len(data))
self.data[step] = data
def set_from_mesh_vertices(self, data):
"""
Set the variable using values at the mesh vertices.
"""
ndata = self.field.interp_v_vals_to_n_vals(data)
self.set_data(ndata)
def set_from_function(self, fun, step=0):
"""
Set the variable data (the vector of DOF values) using a function of
space coordinates.
Parameters
----------
fun : callable
The function of coordinates returning DOF values of shape
`(n_coor, n_components)`.
step : int, optional
The time history step, 0 (default) = current.
"""
_, vv = self.field.set_dofs(fun, self.field.region, self.n_components)
self.set_data(vv.ravel(), step=step)
def equation_mapping(self, bcs, var_di, ts, functions, problem=None,
warn=False):
"""
Create the mapping of active DOFs from/to all DOFs.
Sets n_adof.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.eq_map = EquationMap('eq_map', self.dofs, var_di)
if bcs is not None:
bcs.canonize_dof_names(self.dofs)
bcs.sort()
active_bcs = self.eq_map.map_equations(bcs, self.field, ts, functions,
problem=problem, warn=warn)
self.n_adof = self.eq_map.n_eq
return active_bcs
def setup_initial_conditions(self, ics, di, functions, warn=False):
"""
Setup of initial conditions.
"""
ics.canonize_dof_names(self.dofs)
ics.sort()
self.initial_condition = nm.zeros((di.n_dof[self.name],),
dtype=self.dtype)
for ic in ics:
region = ic.region
dofs, val = ic.dofs
if warn:
clean_msg = ('warning: ignoring nonexistent' \
' IC node (%s) in ' % self.name)
else:
clean_msg = None
nod_list = self.field.get_dofs_in_region(region)
if len(nod_list) == 0:
continue
fun = get_condition_value(val, functions, 'IC', ic.name)
if isinstance(fun, Function):
aux = fun
fun = lambda coors: aux(coors, ic=ic)
nods, vv = self.field.set_dofs(fun, region, len(dofs), clean_msg)
eq = expand_nodes_to_equations(nods, dofs, self.dofs)
self.initial_condition[eq] = nm.ravel(vv)
def get_data_shape(self, integral, integration='volume', region_name=None):
"""
Get element data dimensions for given approximation.
Parameters
----------
integral : Integral instance
The integral describing used numerical quadrature.
integration : 'volume', 'surface', 'surface_extra', 'point' or 'custom'
The term integration type.
region_name : str
The name of the region of the integral.
Returns
-------
data_shape : 5 ints
The `(n_el, n_qp, dim, n_en, n_comp)` for volume shape kind,
`(n_fa, n_qp, dim, n_fn, n_comp)` for surface shape kind and
`(n_nod, 0, 0, 1, n_comp)` for point shape kind.
Notes
-----
- `n_el`, `n_fa` = number of elements/facets
- `n_qp` = number of quadrature points per element/facet
- `dim` = spatial dimension
- `n_en`, `n_fn` = number of element/facet nodes
- `n_comp` = number of variable components in a point/node
- `n_nod` = number of element nodes
"""
aux = self.field.get_data_shape(integral, integration=integration,
region_name=region_name)
data_shape = aux + (self.n_components,)
return data_shape
def clear_evaluate_cache(self):
"""
Clear current evaluate cache.
"""
self.evaluate_cache = {}
def invalidate_evaluate_cache(self, step=0):
"""
Invalidate variable data in evaluate cache for time step given
by `step` (0 is current, -1 previous, ...).
This should be done, for example, prior to every nonlinear
solver iteration.
"""
for step_cache in six.itervalues(self.evaluate_cache):
for key in list(step_cache.keys()):
if key == step: # Given time step to clear.
step_cache.pop(key)
def evaluate(self, mode='val',
region=None, integral=None, integration=None,
step=0, time_derivative=None, is_trace=False,
trace_region=None, dt=None, bf=None):
"""
Evaluate various quantities related to the variable according to
`mode` in quadrature points defined by `integral`.
The evaluated data are cached in the variable instance in
`evaluate_cache` attribute.
Parameters
----------
mode : one of 'val', 'grad', 'div', 'cauchy_strain'
The evaluation mode.
region : Region instance, optional
The region where the evaluation occurs. If None, the
underlying field region is used.
integral : Integral instance, optional
The integral defining quadrature points in which the
evaluation occurs. If None, the first order volume integral
is created. Must not be None for surface integrations.
integration : 'volume', 'surface', 'surface_extra', or 'point'
The term integration type. If None, it is derived from
`integral`.
step : int, default 0
The time step (0 means current, -1 previous, ...).
time_derivative : None or 'dt'
If not None, return time derivative of the data,
approximated by the backward finite difference.
is_trace : bool, default False
Indicate evaluation of trace of the variable on a boundary
region.
dt : float, optional
The time step to be used if `derivative` is `'dt'`. If None,
the `dt` attribute of the variable is used.
bf : Base function, optional
The base function to be used in 'val' mode.
Returns
-------
out : array
The 4-dimensional array of shape
`(n_el, n_qp, n_row, n_col)` with the requested data,
where `n_row`, `n_col` depend on `mode`.
"""
if integration == 'custom':
msg = 'cannot use FieldVariable.evaluate() with custom integration!'
raise ValueError(msg)
step_cache = self.evaluate_cache.setdefault(mode, {})
cache = step_cache.setdefault(step, {})
field = self.field
if region is None:
region = field.region
if is_trace:
region = region.get_mirror_region(trace_region)
if (region is not field.region) and not region.is_empty:
assert_(field.region.contains(region))
if integral is None:
integral = Integral('aux_1', 1)
if integration is None:
integration = 'volume' if region.can_cells else 'surface'
geo, _, key = field.get_mapping(region, integral, integration,
return_key=True)
key += (time_derivative, is_trace)
if key in cache:
out = cache[key]
else:
vec = self(step=step, derivative=time_derivative, dt=dt)
ct = integration
if integration == 'surface_extra':
ct = 'volume'
conn = field.get_econn(ct, region, is_trace, integration)
shape = self.get_data_shape(integral, integration, region.name)
if self.dtype == nm.float64:
out = | eval_real(vec, conn, geo, mode, shape, bf) | sfepy.discrete.evaluate_variable.eval_real |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
self.dof_name = self.primary_var_name
elif kind == 'parameter':
self.flags.add(is_parameter)
msg = 'parameter variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
if self.primary_var_name == '(set-to-None)':
self.primary_var_name = None
self.dof_name = self.name
else:
self.dof_name = self.primary_var_name
if special is not None:
self.special = special
else:
raise NotImplementedError('unknown variable kind: %s' % kind)
self.kind = kind
def _setup_dofs(self, n_nod, n_components, val_shape):
"""
Setup number of DOFs and DOF names.
"""
self.n_nod = n_nod
self.n_components = n_components
self.val_shape = val_shape
self.n_dof = self.n_nod * self.n_components
self.dofs = [self.dof_name + ('.%d' % ii)
for ii in range(self.n_components)]
def get_primary(self):
"""
Get the corresponding primary variable.
Returns
-------
var : Variable instance
The primary variable, or `self` for state
variables or if `primary_var_name` is None, or None if no other
variables are defined.
"""
if self.is_state():
var = self
elif self.primary_var_name is not None:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
else:
var = self
return var
def get_dual(self):
"""
Get the dual variable.
Returns
-------
var : Variable instance
The primary variable for non-state variables, or the dual
variable for state variables.
"""
if self.is_state():
if ((self._variables is not None)
and (self.dual_var_name in self._variables.names)):
var = self._variables[self.dual_var_name]
else:
var = None
else:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
return var
def is_state(self):
return is_state in self.flags
def is_virtual(self):
return is_virtual in self.flags
def is_parameter(self):
return is_parameter in self.flags
def is_state_or_parameter(self):
return (is_state in self.flags) or (is_parameter in self.flags)
def is_kind(self, kind):
return eval('self.is_%s()' % kind)
def is_real(self):
return self.dtype in real_types
def is_complex(self):
return self.dtype in complex_types
def is_finite(self, step=0, derivative=None, dt=None):
return nm.isfinite(self(step=step, derivative=derivative, dt=dt)).all()
def get_primary_name(self):
if self.is_state():
name = self.name
else:
name = self.primary_var_name
return name
def init_history(self):
"""Initialize data of variables with history."""
if self.history is None: return
self.data = deque((self.history + 1) * [None])
self.step = 0
def time_update(self, ts, functions):
"""Implemented in subclasses."""
pass
def advance(self, ts):
"""
Advance in time the DOF state history. A copy of the DOF vector
is made to prevent history modification.
"""
if self.history is None: return
self.step = ts.step + 1
if self.history > 0:
# Copy the current step data to the history data, shift history,
# initialize if needed. The current step data are left intact.
# Note: cannot use self.data.rotate() due to data sharing with
# State.
for ii in range(self.history, 0, -1):
if self.data[ii] is None:
self.data[ii] = nm.empty_like(self.data[0])
self.data[ii][:] = self.data[ii - 1]
# Advance evaluate cache.
for step_cache in six.itervalues(self.evaluate_cache):
steps = sorted(step_cache.keys())
for step in steps:
if step is None:
# Special caches with possible custom advance()
# function.
for key, val in six.iteritems(step_cache[step]):
if hasattr(val, '__advance__'):
val.__advance__(ts, val)
elif -step < self.history:
step_cache[step-1] = step_cache[step]
if len(steps) and (steps[0] is not None):
step_cache.pop(steps[-1])
def init_data(self, step=0):
"""
Initialize the dof vector data of time step `step` to zeros.
"""
if self.is_state_or_parameter():
data = nm.zeros((self.n_dof,), dtype=self.dtype)
self.set_data(data, step=step)
def set_constant(self, val):
"""
Set the variable to a constant value.
"""
data = nm.empty((self.n_dof,), dtype=self.dtype)
data.fill(val)
self.set_data(data)
def set_data(self, data=None, indx=None, step=0,
preserve_caches=False):
"""
Set data (vector of DOF values) of the variable.
Parameters
----------
data : array
The vector of DOF values.
indx : int, optional
If given, `data[indx]` is used.
step : int, optional
The time history step, 0 (default) = current.
preserve_caches : bool
If True, do not invalidate evaluate caches of the variable.
"""
data = data.ravel()
if indx is None:
indx = slice(0, len(data))
else:
indx = slice(int(indx.start), int(indx.stop))
n_data_dof = indx.stop - indx.start
if self.n_dof != n_data_dof:
msg = 'incompatible data shape! (%d (variable) == %d (data))' \
% (self.n_dof, n_data_dof)
raise ValueError(msg)
elif (step > 0) or (-step >= len(self.data)):
raise ValueError('step %d out of range! ([%d, 0])'
% (step, -(len(self.data) - 1)))
else:
self.data[step] = data
self.indx = indx
if not preserve_caches:
self.invalidate_evaluate_cache(step=step)
def __call__(self, step=0, derivative=None, dt=None):
"""
Return vector of degrees of freedom of the variable.
Parameters
----------
step : int, default 0
The time step (0 means current, -1 previous, ...).
derivative : None or 'dt'
If not None, return time derivative of the DOF vector,
approximated by the backward finite difference.
Returns
-------
vec : array
The DOF vector. If `derivative` is None: a view of the data vector,
otherwise: required derivative of the DOF vector
at time step given by `step`.
Notes
-----
If the previous time step is requested in step 0, the step 0
DOF vector is returned instead.
"""
if derivative is None:
if (self.step == 0) and (step == -1):
data = self.data[0]
else:
data = self.data[-step]
if data is None:
raise ValueError('data of variable are not set! (%s, step %d)' \
% (self.name, step))
return data[self.indx]
else:
if self.history is None:
msg = 'set history type of variable %s to use derivatives!'\
% self.name
raise ValueError(msg)
dt = get_default(dt, self.dt)
return (self(step=step) - self(step=step-1)) / dt
def get_initial_condition(self):
if self.initial_condition is None:
return 0.0
else:
return self.initial_condition
class FieldVariable(Variable):
"""
A finite element field variable.
field .. field description of variable (borrowed)
"""
def __init__(self, name, kind, field, order=None, primary_var_name=None,
special=None, flags=None, history=None, **kwargs):
Variable.__init__(self, name, kind, order, primary_var_name,
special, flags, history=history, **kwargs)
self._set_field(field)
self.has_field = True
self.has_bc = True
self._variables = None
self.clear_evaluate_cache()
def _set_field(self, field):
"""
Set field of the variable.
Takes reference to a Field instance. Sets dtype according to
field.dtype. Sets `dim` attribute to spatial dimension.
"""
self.is_surface = field.is_surface
self.field = field
self._setup_dofs(field.n_nod, field.n_components, field.val_shape)
self.flags.add(is_field)
self.dtype = field.dtype
self.dim = field.domain.shape.dim
def _get_setter(self, kind, functions, **kwargs):
"""
Get the setter function of the variable and its arguments depending in
the setter kind.
"""
if not (hasattr(self, 'special') and (kind in self.special)):
return
setter_name = self.special[kind]
setter = functions[setter_name]
region = self.field.region
nod_list = self.field.get_dofs_in_region(region)
nods = nm.unique(nod_list)
coors = self.field.get_coor(nods)
if kind == 'setter':
sargs = (kwargs.get('ts'), coors)
elif kind == 'ic':
sargs = (coors, )
skwargs = {'region' : region}
return setter, sargs, skwargs
def get_field(self):
return self.field
def get_mapping(self, region, integral, integration,
get_saved=False, return_key=False):
"""
Get the reference element mapping of the underlying field.
See Also
--------
sfepy.discrete.common.fields.Field.get_mapping
"""
if region is None:
region = self.field.region
out = self.field.get_mapping(region, integral, integration,
get_saved=get_saved,
return_key=return_key)
return out
def get_dof_conn(self, dc_type, is_trace=False, trace_region=None):
"""
Get active dof connectivity of a variable.
Notes
-----
The primary and dual variables must have the same Region.
"""
if self.is_virtual():
var = self.get_primary()
# No primary variable can occur in single term evaluations.
var_name = var.name if var is not None else self.name
else:
var_name = self.name
if not is_trace:
region_name = dc_type.region_name
else:
aux = self.field.domain.regions[dc_type.region_name]
region = aux.get_mirror_region(trace_region)
region_name = region.name
key = (var_name, region_name, dc_type.type, is_trace)
dc = self.adof_conns[key]
return dc
def get_dof_info(self, active=False):
details = Struct(name='field_var_dof_details',
n_nod=self.n_nod,
dpn=self.n_components)
if active:
n_dof = self.n_adof
else:
n_dof = self.n_dof
return n_dof, details
def time_update(self, ts, functions):
"""
Store time step, set variable data for variables with the setter
function.
"""
if ts is not None:
self.dt = ts.dt
if hasattr(self, 'special') and ('setter' in self.special):
setter, sargs, skwargs = self._get_setter('setter', functions,
ts=ts)
self.set_data(setter(*sargs, **skwargs))
output('data of %s set by %s()' % (self.name, setter.name))
def set_from_qp(self, data_qp, integral, step=0):
"""
Set DOFs of variable using values in quadrature points
corresponding to the given integral.
"""
data_vertex = self.field.average_qp_to_vertices(data_qp, integral)
# Field nodes values.
data = self.field.interp_v_vals_to_n_vals(data_vertex)
data = data.ravel()
self.indx = slice(0, len(data))
self.data[step] = data
def set_from_mesh_vertices(self, data):
"""
Set the variable using values at the mesh vertices.
"""
ndata = self.field.interp_v_vals_to_n_vals(data)
self.set_data(ndata)
def set_from_function(self, fun, step=0):
"""
Set the variable data (the vector of DOF values) using a function of
space coordinates.
Parameters
----------
fun : callable
The function of coordinates returning DOF values of shape
`(n_coor, n_components)`.
step : int, optional
The time history step, 0 (default) = current.
"""
_, vv = self.field.set_dofs(fun, self.field.region, self.n_components)
self.set_data(vv.ravel(), step=step)
def equation_mapping(self, bcs, var_di, ts, functions, problem=None,
warn=False):
"""
Create the mapping of active DOFs from/to all DOFs.
Sets n_adof.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.eq_map = EquationMap('eq_map', self.dofs, var_di)
if bcs is not None:
bcs.canonize_dof_names(self.dofs)
bcs.sort()
active_bcs = self.eq_map.map_equations(bcs, self.field, ts, functions,
problem=problem, warn=warn)
self.n_adof = self.eq_map.n_eq
return active_bcs
def setup_initial_conditions(self, ics, di, functions, warn=False):
"""
Setup of initial conditions.
"""
ics.canonize_dof_names(self.dofs)
ics.sort()
self.initial_condition = nm.zeros((di.n_dof[self.name],),
dtype=self.dtype)
for ic in ics:
region = ic.region
dofs, val = ic.dofs
if warn:
clean_msg = ('warning: ignoring nonexistent' \
' IC node (%s) in ' % self.name)
else:
clean_msg = None
nod_list = self.field.get_dofs_in_region(region)
if len(nod_list) == 0:
continue
fun = get_condition_value(val, functions, 'IC', ic.name)
if isinstance(fun, Function):
aux = fun
fun = lambda coors: aux(coors, ic=ic)
nods, vv = self.field.set_dofs(fun, region, len(dofs), clean_msg)
eq = expand_nodes_to_equations(nods, dofs, self.dofs)
self.initial_condition[eq] = nm.ravel(vv)
def get_data_shape(self, integral, integration='volume', region_name=None):
"""
Get element data dimensions for given approximation.
Parameters
----------
integral : Integral instance
The integral describing used numerical quadrature.
integration : 'volume', 'surface', 'surface_extra', 'point' or 'custom'
The term integration type.
region_name : str
The name of the region of the integral.
Returns
-------
data_shape : 5 ints
The `(n_el, n_qp, dim, n_en, n_comp)` for volume shape kind,
`(n_fa, n_qp, dim, n_fn, n_comp)` for surface shape kind and
`(n_nod, 0, 0, 1, n_comp)` for point shape kind.
Notes
-----
- `n_el`, `n_fa` = number of elements/facets
- `n_qp` = number of quadrature points per element/facet
- `dim` = spatial dimension
- `n_en`, `n_fn` = number of element/facet nodes
- `n_comp` = number of variable components in a point/node
- `n_nod` = number of element nodes
"""
aux = self.field.get_data_shape(integral, integration=integration,
region_name=region_name)
data_shape = aux + (self.n_components,)
return data_shape
def clear_evaluate_cache(self):
"""
Clear current evaluate cache.
"""
self.evaluate_cache = {}
def invalidate_evaluate_cache(self, step=0):
"""
Invalidate variable data in evaluate cache for time step given
by `step` (0 is current, -1 previous, ...).
This should be done, for example, prior to every nonlinear
solver iteration.
"""
for step_cache in six.itervalues(self.evaluate_cache):
for key in list(step_cache.keys()):
if key == step: # Given time step to clear.
step_cache.pop(key)
def evaluate(self, mode='val',
region=None, integral=None, integration=None,
step=0, time_derivative=None, is_trace=False,
trace_region=None, dt=None, bf=None):
"""
Evaluate various quantities related to the variable according to
`mode` in quadrature points defined by `integral`.
The evaluated data are cached in the variable instance in
`evaluate_cache` attribute.
Parameters
----------
mode : one of 'val', 'grad', 'div', 'cauchy_strain'
The evaluation mode.
region : Region instance, optional
The region where the evaluation occurs. If None, the
underlying field region is used.
integral : Integral instance, optional
The integral defining quadrature points in which the
evaluation occurs. If None, the first order volume integral
is created. Must not be None for surface integrations.
integration : 'volume', 'surface', 'surface_extra', or 'point'
The term integration type. If None, it is derived from
`integral`.
step : int, default 0
The time step (0 means current, -1 previous, ...).
time_derivative : None or 'dt'
If not None, return time derivative of the data,
approximated by the backward finite difference.
is_trace : bool, default False
Indicate evaluation of trace of the variable on a boundary
region.
dt : float, optional
The time step to be used if `derivative` is `'dt'`. If None,
the `dt` attribute of the variable is used.
bf : Base function, optional
The base function to be used in 'val' mode.
Returns
-------
out : array
The 4-dimensional array of shape
`(n_el, n_qp, n_row, n_col)` with the requested data,
where `n_row`, `n_col` depend on `mode`.
"""
if integration == 'custom':
msg = 'cannot use FieldVariable.evaluate() with custom integration!'
raise ValueError(msg)
step_cache = self.evaluate_cache.setdefault(mode, {})
cache = step_cache.setdefault(step, {})
field = self.field
if region is None:
region = field.region
if is_trace:
region = region.get_mirror_region(trace_region)
if (region is not field.region) and not region.is_empty:
assert_(field.region.contains(region))
if integral is None:
integral = Integral('aux_1', 1)
if integration is None:
integration = 'volume' if region.can_cells else 'surface'
geo, _, key = field.get_mapping(region, integral, integration,
return_key=True)
key += (time_derivative, is_trace)
if key in cache:
out = cache[key]
else:
vec = self(step=step, derivative=time_derivative, dt=dt)
ct = integration
if integration == 'surface_extra':
ct = 'volume'
conn = field.get_econn(ct, region, is_trace, integration)
shape = self.get_data_shape(integral, integration, region.name)
if self.dtype == nm.float64:
out = eval_real(vec, conn, geo, mode, shape, bf)
else:
out = | eval_complex(vec, conn, geo, mode, shape, bf) | sfepy.discrete.evaluate_variable.eval_complex |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
self.dof_name = self.primary_var_name
elif kind == 'parameter':
self.flags.add(is_parameter)
msg = 'parameter variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
if self.primary_var_name == '(set-to-None)':
self.primary_var_name = None
self.dof_name = self.name
else:
self.dof_name = self.primary_var_name
if special is not None:
self.special = special
else:
raise NotImplementedError('unknown variable kind: %s' % kind)
self.kind = kind
def _setup_dofs(self, n_nod, n_components, val_shape):
"""
Setup number of DOFs and DOF names.
"""
self.n_nod = n_nod
self.n_components = n_components
self.val_shape = val_shape
self.n_dof = self.n_nod * self.n_components
self.dofs = [self.dof_name + ('.%d' % ii)
for ii in range(self.n_components)]
def get_primary(self):
"""
Get the corresponding primary variable.
Returns
-------
var : Variable instance
The primary variable, or `self` for state
variables or if `primary_var_name` is None, or None if no other
variables are defined.
"""
if self.is_state():
var = self
elif self.primary_var_name is not None:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
else:
var = self
return var
def get_dual(self):
"""
Get the dual variable.
Returns
-------
var : Variable instance
The primary variable for non-state variables, or the dual
variable for state variables.
"""
if self.is_state():
if ((self._variables is not None)
and (self.dual_var_name in self._variables.names)):
var = self._variables[self.dual_var_name]
else:
var = None
else:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
return var
def is_state(self):
return is_state in self.flags
def is_virtual(self):
return is_virtual in self.flags
def is_parameter(self):
return is_parameter in self.flags
def is_state_or_parameter(self):
return (is_state in self.flags) or (is_parameter in self.flags)
def is_kind(self, kind):
return eval('self.is_%s()' % kind)
def is_real(self):
return self.dtype in real_types
def is_complex(self):
return self.dtype in complex_types
def is_finite(self, step=0, derivative=None, dt=None):
return nm.isfinite(self(step=step, derivative=derivative, dt=dt)).all()
def get_primary_name(self):
if self.is_state():
name = self.name
else:
name = self.primary_var_name
return name
def init_history(self):
"""Initialize data of variables with history."""
if self.history is None: return
self.data = deque((self.history + 1) * [None])
self.step = 0
def time_update(self, ts, functions):
"""Implemented in subclasses."""
pass
def advance(self, ts):
"""
Advance in time the DOF state history. A copy of the DOF vector
is made to prevent history modification.
"""
if self.history is None: return
self.step = ts.step + 1
if self.history > 0:
# Copy the current step data to the history data, shift history,
# initialize if needed. The current step data are left intact.
# Note: cannot use self.data.rotate() due to data sharing with
# State.
for ii in range(self.history, 0, -1):
if self.data[ii] is None:
self.data[ii] = nm.empty_like(self.data[0])
self.data[ii][:] = self.data[ii - 1]
# Advance evaluate cache.
for step_cache in six.itervalues(self.evaluate_cache):
steps = sorted(step_cache.keys())
for step in steps:
if step is None:
# Special caches with possible custom advance()
# function.
for key, val in six.iteritems(step_cache[step]):
if hasattr(val, '__advance__'):
val.__advance__(ts, val)
elif -step < self.history:
step_cache[step-1] = step_cache[step]
if len(steps) and (steps[0] is not None):
step_cache.pop(steps[-1])
def init_data(self, step=0):
"""
Initialize the dof vector data of time step `step` to zeros.
"""
if self.is_state_or_parameter():
data = nm.zeros((self.n_dof,), dtype=self.dtype)
self.set_data(data, step=step)
def set_constant(self, val):
"""
Set the variable to a constant value.
"""
data = nm.empty((self.n_dof,), dtype=self.dtype)
data.fill(val)
self.set_data(data)
def set_data(self, data=None, indx=None, step=0,
preserve_caches=False):
"""
Set data (vector of DOF values) of the variable.
Parameters
----------
data : array
The vector of DOF values.
indx : int, optional
If given, `data[indx]` is used.
step : int, optional
The time history step, 0 (default) = current.
preserve_caches : bool
If True, do not invalidate evaluate caches of the variable.
"""
data = data.ravel()
if indx is None:
indx = slice(0, len(data))
else:
indx = slice(int(indx.start), int(indx.stop))
n_data_dof = indx.stop - indx.start
if self.n_dof != n_data_dof:
msg = 'incompatible data shape! (%d (variable) == %d (data))' \
% (self.n_dof, n_data_dof)
raise ValueError(msg)
elif (step > 0) or (-step >= len(self.data)):
raise ValueError('step %d out of range! ([%d, 0])'
% (step, -(len(self.data) - 1)))
else:
self.data[step] = data
self.indx = indx
if not preserve_caches:
self.invalidate_evaluate_cache(step=step)
def __call__(self, step=0, derivative=None, dt=None):
"""
Return vector of degrees of freedom of the variable.
Parameters
----------
step : int, default 0
The time step (0 means current, -1 previous, ...).
derivative : None or 'dt'
If not None, return time derivative of the DOF vector,
approximated by the backward finite difference.
Returns
-------
vec : array
The DOF vector. If `derivative` is None: a view of the data vector,
otherwise: required derivative of the DOF vector
at time step given by `step`.
Notes
-----
If the previous time step is requested in step 0, the step 0
DOF vector is returned instead.
"""
if derivative is None:
if (self.step == 0) and (step == -1):
data = self.data[0]
else:
data = self.data[-step]
if data is None:
raise ValueError('data of variable are not set! (%s, step %d)' \
% (self.name, step))
return data[self.indx]
else:
if self.history is None:
msg = 'set history type of variable %s to use derivatives!'\
% self.name
raise ValueError(msg)
dt = get_default(dt, self.dt)
return (self(step=step) - self(step=step-1)) / dt
def get_initial_condition(self):
if self.initial_condition is None:
return 0.0
else:
return self.initial_condition
class FieldVariable(Variable):
"""
A finite element field variable.
field .. field description of variable (borrowed)
"""
def __init__(self, name, kind, field, order=None, primary_var_name=None,
special=None, flags=None, history=None, **kwargs):
Variable.__init__(self, name, kind, order, primary_var_name,
special, flags, history=history, **kwargs)
self._set_field(field)
self.has_field = True
self.has_bc = True
self._variables = None
self.clear_evaluate_cache()
def _set_field(self, field):
"""
Set field of the variable.
Takes reference to a Field instance. Sets dtype according to
field.dtype. Sets `dim` attribute to spatial dimension.
"""
self.is_surface = field.is_surface
self.field = field
self._setup_dofs(field.n_nod, field.n_components, field.val_shape)
self.flags.add(is_field)
self.dtype = field.dtype
self.dim = field.domain.shape.dim
def _get_setter(self, kind, functions, **kwargs):
"""
Get the setter function of the variable and its arguments depending in
the setter kind.
"""
if not (hasattr(self, 'special') and (kind in self.special)):
return
setter_name = self.special[kind]
setter = functions[setter_name]
region = self.field.region
nod_list = self.field.get_dofs_in_region(region)
nods = nm.unique(nod_list)
coors = self.field.get_coor(nods)
if kind == 'setter':
sargs = (kwargs.get('ts'), coors)
elif kind == 'ic':
sargs = (coors, )
skwargs = {'region' : region}
return setter, sargs, skwargs
def get_field(self):
return self.field
def get_mapping(self, region, integral, integration,
get_saved=False, return_key=False):
"""
Get the reference element mapping of the underlying field.
See Also
--------
sfepy.discrete.common.fields.Field.get_mapping
"""
if region is None:
region = self.field.region
out = self.field.get_mapping(region, integral, integration,
get_saved=get_saved,
return_key=return_key)
return out
def get_dof_conn(self, dc_type, is_trace=False, trace_region=None):
"""
Get active dof connectivity of a variable.
Notes
-----
The primary and dual variables must have the same Region.
"""
if self.is_virtual():
var = self.get_primary()
# No primary variable can occur in single term evaluations.
var_name = var.name if var is not None else self.name
else:
var_name = self.name
if not is_trace:
region_name = dc_type.region_name
else:
aux = self.field.domain.regions[dc_type.region_name]
region = aux.get_mirror_region(trace_region)
region_name = region.name
key = (var_name, region_name, dc_type.type, is_trace)
dc = self.adof_conns[key]
return dc
def get_dof_info(self, active=False):
details = Struct(name='field_var_dof_details',
n_nod=self.n_nod,
dpn=self.n_components)
if active:
n_dof = self.n_adof
else:
n_dof = self.n_dof
return n_dof, details
def time_update(self, ts, functions):
"""
Store time step, set variable data for variables with the setter
function.
"""
if ts is not None:
self.dt = ts.dt
if hasattr(self, 'special') and ('setter' in self.special):
setter, sargs, skwargs = self._get_setter('setter', functions,
ts=ts)
self.set_data(setter(*sargs, **skwargs))
output('data of %s set by %s()' % (self.name, setter.name))
def set_from_qp(self, data_qp, integral, step=0):
"""
Set DOFs of variable using values in quadrature points
corresponding to the given integral.
"""
data_vertex = self.field.average_qp_to_vertices(data_qp, integral)
# Field nodes values.
data = self.field.interp_v_vals_to_n_vals(data_vertex)
data = data.ravel()
self.indx = slice(0, len(data))
self.data[step] = data
def set_from_mesh_vertices(self, data):
"""
Set the variable using values at the mesh vertices.
"""
ndata = self.field.interp_v_vals_to_n_vals(data)
self.set_data(ndata)
def set_from_function(self, fun, step=0):
"""
Set the variable data (the vector of DOF values) using a function of
space coordinates.
Parameters
----------
fun : callable
The function of coordinates returning DOF values of shape
`(n_coor, n_components)`.
step : int, optional
The time history step, 0 (default) = current.
"""
_, vv = self.field.set_dofs(fun, self.field.region, self.n_components)
self.set_data(vv.ravel(), step=step)
def equation_mapping(self, bcs, var_di, ts, functions, problem=None,
warn=False):
"""
Create the mapping of active DOFs from/to all DOFs.
Sets n_adof.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.eq_map = EquationMap('eq_map', self.dofs, var_di)
if bcs is not None:
bcs.canonize_dof_names(self.dofs)
bcs.sort()
active_bcs = self.eq_map.map_equations(bcs, self.field, ts, functions,
problem=problem, warn=warn)
self.n_adof = self.eq_map.n_eq
return active_bcs
def setup_initial_conditions(self, ics, di, functions, warn=False):
"""
Setup of initial conditions.
"""
ics.canonize_dof_names(self.dofs)
ics.sort()
self.initial_condition = nm.zeros((di.n_dof[self.name],),
dtype=self.dtype)
for ic in ics:
region = ic.region
dofs, val = ic.dofs
if warn:
clean_msg = ('warning: ignoring nonexistent' \
' IC node (%s) in ' % self.name)
else:
clean_msg = None
nod_list = self.field.get_dofs_in_region(region)
if len(nod_list) == 0:
continue
fun = get_condition_value(val, functions, 'IC', ic.name)
if isinstance(fun, Function):
aux = fun
fun = lambda coors: aux(coors, ic=ic)
nods, vv = self.field.set_dofs(fun, region, len(dofs), clean_msg)
eq = expand_nodes_to_equations(nods, dofs, self.dofs)
self.initial_condition[eq] = nm.ravel(vv)
def get_data_shape(self, integral, integration='volume', region_name=None):
"""
Get element data dimensions for given approximation.
Parameters
----------
integral : Integral instance
The integral describing used numerical quadrature.
integration : 'volume', 'surface', 'surface_extra', 'point' or 'custom'
The term integration type.
region_name : str
The name of the region of the integral.
Returns
-------
data_shape : 5 ints
The `(n_el, n_qp, dim, n_en, n_comp)` for volume shape kind,
`(n_fa, n_qp, dim, n_fn, n_comp)` for surface shape kind and
`(n_nod, 0, 0, 1, n_comp)` for point shape kind.
Notes
-----
- `n_el`, `n_fa` = number of elements/facets
- `n_qp` = number of quadrature points per element/facet
- `dim` = spatial dimension
- `n_en`, `n_fn` = number of element/facet nodes
- `n_comp` = number of variable components in a point/node
- `n_nod` = number of element nodes
"""
aux = self.field.get_data_shape(integral, integration=integration,
region_name=region_name)
data_shape = aux + (self.n_components,)
return data_shape
def clear_evaluate_cache(self):
"""
Clear current evaluate cache.
"""
self.evaluate_cache = {}
def invalidate_evaluate_cache(self, step=0):
"""
Invalidate variable data in evaluate cache for time step given
by `step` (0 is current, -1 previous, ...).
This should be done, for example, prior to every nonlinear
solver iteration.
"""
for step_cache in six.itervalues(self.evaluate_cache):
for key in list(step_cache.keys()):
if key == step: # Given time step to clear.
step_cache.pop(key)
def evaluate(self, mode='val',
region=None, integral=None, integration=None,
step=0, time_derivative=None, is_trace=False,
trace_region=None, dt=None, bf=None):
"""
Evaluate various quantities related to the variable according to
`mode` in quadrature points defined by `integral`.
The evaluated data are cached in the variable instance in
`evaluate_cache` attribute.
Parameters
----------
mode : one of 'val', 'grad', 'div', 'cauchy_strain'
The evaluation mode.
region : Region instance, optional
The region where the evaluation occurs. If None, the
underlying field region is used.
integral : Integral instance, optional
The integral defining quadrature points in which the
evaluation occurs. If None, the first order volume integral
is created. Must not be None for surface integrations.
integration : 'volume', 'surface', 'surface_extra', or 'point'
The term integration type. If None, it is derived from
`integral`.
step : int, default 0
The time step (0 means current, -1 previous, ...).
time_derivative : None or 'dt'
If not None, return time derivative of the data,
approximated by the backward finite difference.
is_trace : bool, default False
Indicate evaluation of trace of the variable on a boundary
region.
dt : float, optional
The time step to be used if `derivative` is `'dt'`. If None,
the `dt` attribute of the variable is used.
bf : Base function, optional
The base function to be used in 'val' mode.
Returns
-------
out : array
The 4-dimensional array of shape
`(n_el, n_qp, n_row, n_col)` with the requested data,
where `n_row`, `n_col` depend on `mode`.
"""
if integration == 'custom':
msg = 'cannot use FieldVariable.evaluate() with custom integration!'
raise ValueError(msg)
step_cache = self.evaluate_cache.setdefault(mode, {})
cache = step_cache.setdefault(step, {})
field = self.field
if region is None:
region = field.region
if is_trace:
region = region.get_mirror_region(trace_region)
if (region is not field.region) and not region.is_empty:
assert_(field.region.contains(region))
if integral is None:
integral = Integral('aux_1', 1)
if integration is None:
integration = 'volume' if region.can_cells else 'surface'
geo, _, key = field.get_mapping(region, integral, integration,
return_key=True)
key += (time_derivative, is_trace)
if key in cache:
out = cache[key]
else:
vec = self(step=step, derivative=time_derivative, dt=dt)
ct = integration
if integration == 'surface_extra':
ct = 'volume'
conn = field.get_econn(ct, region, is_trace, integration)
shape = self.get_data_shape(integral, integration, region.name)
if self.dtype == nm.float64:
out = eval_real(vec, conn, geo, mode, shape, bf)
else:
out = eval_complex(vec, conn, geo, mode, shape, bf)
cache[key] = out
return out
def get_state_in_region(self, region, reshape=True, step=0):
"""
Get DOFs of the variable in the given region.
Parameters
----------
region : Region
The selected region.
reshape : bool
If True, reshape the DOF vector to a 2D array with the individual
components as columns. Otherwise a 1D DOF array of the form [all
DOFs in region node 0, all DOFs in region node 1, ...] is returned.
step : int, default 0
The time step (0 means current, -1 previous, ...).
Returns
-------
out : array
The selected DOFs.
"""
nods = self.field.get_dofs_in_region(region, merge=True)
eq = nm.empty((len(nods) * self.n_components,), dtype=nm.int32)
for idof in range(self.n_components):
eq[idof::self.n_components] = self.n_components * nods \
+ idof + self.indx.start
out = self.data[step][eq]
if reshape:
out.shape = (len(nods), self.n_components)
return out
def apply_ebc(self, vec, offset=0, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions to
vector `vec`, starting at `offset`.
"""
eq_map = self.eq_map
ii = offset + eq_map.eq_ebc
# EBC,
if force_values is None:
vec[ii] = eq_map.val_ebc
else:
if isinstance(force_values, dict):
vec[ii] = force_values[self.name]
else:
vec[ii] = force_values
# EPBC.
vec[offset+eq_map.master] = vec[offset+eq_map.slave]
def apply_ic(self, vec, offset=0, force_values=None):
"""
Apply initial conditions conditions to vector `vec`, starting at
`offset`.
"""
ii = slice(offset, offset + self.n_dof)
if force_values is None:
vec[ii] = self.get_initial_condition()
else:
if isinstance(force_values, dict):
vec[ii] = force_values[self.name]
else:
vec[ii] = force_values
def get_reduced(self, vec, offset=0, follow_epbc=False):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
The full vector starts in `vec` at `offset`. If 'follow_epbc' is True,
values of EPBC master DOFs are not simply thrown away, but added to the
corresponding slave DOFs, just like when assembling. For vectors with
state (unknown) variables it should be set to False, for assembled
vectors it should be set to True.
"""
eq_map = self.eq_map
ii = offset + eq_map.eqi
r_vec = vec[ii]
if follow_epbc:
master = offset + eq_map.master
slave = eq_map.eq[eq_map.slave]
ii = slave >= 0
la.assemble1d(r_vec, slave[ii], vec[master[ii]])
return r_vec
def get_full(self, r_vec, r_offset=0, force_value=None,
vec=None, offset=0):
"""
Get the full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Notes
-----
The reduced vector starts in `r_vec` at `r_offset`.
Passing a `force_value` overrides the EBC values. Optionally,
`vec` argument can be provided to store the full vector (in
place) starting at `offset`.
"""
if vec is None:
vec = nm.empty(self.n_dof, dtype=r_vec.dtype)
else:
vec = vec[offset:offset+self.n_dof]
eq_map = self.eq_map
r_vec = r_vec[r_offset:r_offset+eq_map.n_eq]
# EBC.
vec[eq_map.eq_ebc] = get_default(force_value, eq_map.val_ebc)
# Reduced vector values.
vec[eq_map.eqi] = r_vec
# EPBC.
vec[eq_map.master] = vec[eq_map.slave]
unused_dofs = self.field.get('unused_dofs')
if unused_dofs is not None:
vec[:] = self.field.restore_substituted(vec)
return vec
def create_output(self, vec=None, key=None, extend=True, fill_value=None,
linearization=None):
"""
Convert the DOF vector to a dictionary of output data usable by
Mesh.write().
Parameters
----------
vec : array, optional
An alternative DOF vector to be used instead of the variable
DOF vector.
key : str, optional
The key to be used in the output dictionary instead of the
variable name.
extend : bool
Extend the DOF values to cover the whole domain.
fill_value : float or complex
The value used to fill the missing DOF values if `extend` is True.
linearization : Struct or None
The linearization configuration for higher order approximations.
"""
linearization = get_default(linearization, Struct(kind='strip'))
if vec is None:
vec = self()
key = get_default(key, self.name)
aux = nm.reshape(vec,
(self.n_dof // self.n_components, self.n_components))
out = self.field.create_output(aux, self.name, dof_names=self.dofs,
key=key, extend=extend,
fill_value=fill_value,
linearization=linearization)
return out
def get_element_diameters(self, cells, mode, square=False):
"""Get diameters of selected elements."""
field = self.field
domain = field.domain
cells = nm.array(cells)
diameters = nm.empty((cells.shape[0],), dtype=nm.float64)
integral = Integral('i_tmp', 1)
vg, _ = field.get_mapping(field.region, integral, 'volume')
diameters = domain.get_element_diameters(cells, vg, mode, square=square)
return diameters
def save_as_mesh(self, filename):
"""
Save the field mesh and the variable values into a file for
visualization. Only the vertex values are stored.
"""
mesh = self.field.create_mesh(extra_nodes=False)
vec = self()
n_nod, n_dof, dpn = mesh.n_nod, self.n_dof, self.n_components
aux = nm.reshape(vec, (n_dof // dpn, dpn))
ext = self.field.extend_dofs(aux, 0.0)
out = {}
if self.field.approx_order != 0:
out[self.name] = Struct(name='output_data',
mode='vertex', data=ext,
var_name=self.name, dofs=self.dofs)
else:
ext.shape = (ext.shape[0], 1, ext.shape[1], 1)
out[self.name] = Struct(name='output_data',
mode='cell', data=ext,
var_name=self.name, dofs=self.dofs)
mesh.write(filename, io='auto', out=out)
def has_same_mesh(self, other):
"""
Returns
-------
flag : int
The flag can be either 'different' (different meshes), 'deformed'
(slightly deformed same mesh), or 'same' (same).
"""
f1 = self.field
f2 = other.field
c1 = f1.get_coor()
c2 = f2.get_coor()
if c1.shape != c2.shape:
flag = 'different'
else:
eps = 10.0 * nm.finfo(nm.float64).eps
if nm.allclose(c1, c2, rtol=eps, atol=0.0):
flag = 'same'
elif nm.allclose(c1, c2, rtol=0.1, atol=0.0):
flag = 'deformed'
else:
flag = 'different'
return flag
def get_interp_coors(self, strategy='interpolation', interp_term=None):
"""
Get the physical coordinates to interpolate into, based on the strategy
used.
"""
if strategy == 'interpolation':
coors = self.field.get_coor()
elif strategy == 'projection':
region = self.field.region
integral = | Integral(term=interp_term) | sfepy.discrete.integrals.Integral |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
self.dof_name = self.primary_var_name
elif kind == 'parameter':
self.flags.add(is_parameter)
msg = 'parameter variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
if self.primary_var_name == '(set-to-None)':
self.primary_var_name = None
self.dof_name = self.name
else:
self.dof_name = self.primary_var_name
if special is not None:
self.special = special
else:
raise NotImplementedError('unknown variable kind: %s' % kind)
self.kind = kind
def _setup_dofs(self, n_nod, n_components, val_shape):
"""
Setup number of DOFs and DOF names.
"""
self.n_nod = n_nod
self.n_components = n_components
self.val_shape = val_shape
self.n_dof = self.n_nod * self.n_components
self.dofs = [self.dof_name + ('.%d' % ii)
for ii in range(self.n_components)]
def get_primary(self):
"""
Get the corresponding primary variable.
Returns
-------
var : Variable instance
The primary variable, or `self` for state
variables or if `primary_var_name` is None, or None if no other
variables are defined.
"""
if self.is_state():
var = self
elif self.primary_var_name is not None:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
else:
var = self
return var
def get_dual(self):
"""
Get the dual variable.
Returns
-------
var : Variable instance
The primary variable for non-state variables, or the dual
variable for state variables.
"""
if self.is_state():
if ((self._variables is not None)
and (self.dual_var_name in self._variables.names)):
var = self._variables[self.dual_var_name]
else:
var = None
else:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
return var
def is_state(self):
return is_state in self.flags
def is_virtual(self):
return is_virtual in self.flags
def is_parameter(self):
return is_parameter in self.flags
def is_state_or_parameter(self):
return (is_state in self.flags) or (is_parameter in self.flags)
def is_kind(self, kind):
return eval('self.is_%s()' % kind)
def is_real(self):
return self.dtype in real_types
def is_complex(self):
return self.dtype in complex_types
def is_finite(self, step=0, derivative=None, dt=None):
return nm.isfinite(self(step=step, derivative=derivative, dt=dt)).all()
def get_primary_name(self):
if self.is_state():
name = self.name
else:
name = self.primary_var_name
return name
def init_history(self):
"""Initialize data of variables with history."""
if self.history is None: return
self.data = deque((self.history + 1) * [None])
self.step = 0
def time_update(self, ts, functions):
"""Implemented in subclasses."""
pass
def advance(self, ts):
"""
Advance in time the DOF state history. A copy of the DOF vector
is made to prevent history modification.
"""
if self.history is None: return
self.step = ts.step + 1
if self.history > 0:
# Copy the current step data to the history data, shift history,
# initialize if needed. The current step data are left intact.
# Note: cannot use self.data.rotate() due to data sharing with
# State.
for ii in range(self.history, 0, -1):
if self.data[ii] is None:
self.data[ii] = nm.empty_like(self.data[0])
self.data[ii][:] = self.data[ii - 1]
# Advance evaluate cache.
for step_cache in six.itervalues(self.evaluate_cache):
steps = sorted(step_cache.keys())
for step in steps:
if step is None:
# Special caches with possible custom advance()
# function.
for key, val in six.iteritems(step_cache[step]):
if hasattr(val, '__advance__'):
val.__advance__(ts, val)
elif -step < self.history:
step_cache[step-1] = step_cache[step]
if len(steps) and (steps[0] is not None):
step_cache.pop(steps[-1])
def init_data(self, step=0):
"""
Initialize the dof vector data of time step `step` to zeros.
"""
if self.is_state_or_parameter():
data = nm.zeros((self.n_dof,), dtype=self.dtype)
self.set_data(data, step=step)
def set_constant(self, val):
"""
Set the variable to a constant value.
"""
data = nm.empty((self.n_dof,), dtype=self.dtype)
data.fill(val)
self.set_data(data)
def set_data(self, data=None, indx=None, step=0,
preserve_caches=False):
"""
Set data (vector of DOF values) of the variable.
Parameters
----------
data : array
The vector of DOF values.
indx : int, optional
If given, `data[indx]` is used.
step : int, optional
The time history step, 0 (default) = current.
preserve_caches : bool
If True, do not invalidate evaluate caches of the variable.
"""
data = data.ravel()
if indx is None:
indx = slice(0, len(data))
else:
indx = slice(int(indx.start), int(indx.stop))
n_data_dof = indx.stop - indx.start
if self.n_dof != n_data_dof:
msg = 'incompatible data shape! (%d (variable) == %d (data))' \
% (self.n_dof, n_data_dof)
raise ValueError(msg)
elif (step > 0) or (-step >= len(self.data)):
raise ValueError('step %d out of range! ([%d, 0])'
% (step, -(len(self.data) - 1)))
else:
self.data[step] = data
self.indx = indx
if not preserve_caches:
self.invalidate_evaluate_cache(step=step)
def __call__(self, step=0, derivative=None, dt=None):
"""
Return vector of degrees of freedom of the variable.
Parameters
----------
step : int, default 0
The time step (0 means current, -1 previous, ...).
derivative : None or 'dt'
If not None, return time derivative of the DOF vector,
approximated by the backward finite difference.
Returns
-------
vec : array
The DOF vector. If `derivative` is None: a view of the data vector,
otherwise: required derivative of the DOF vector
at time step given by `step`.
Notes
-----
If the previous time step is requested in step 0, the step 0
DOF vector is returned instead.
"""
if derivative is None:
if (self.step == 0) and (step == -1):
data = self.data[0]
else:
data = self.data[-step]
if data is None:
raise ValueError('data of variable are not set! (%s, step %d)' \
% (self.name, step))
return data[self.indx]
else:
if self.history is None:
msg = 'set history type of variable %s to use derivatives!'\
% self.name
raise ValueError(msg)
dt = get_default(dt, self.dt)
return (self(step=step) - self(step=step-1)) / dt
def get_initial_condition(self):
if self.initial_condition is None:
return 0.0
else:
return self.initial_condition
class FieldVariable(Variable):
"""
A finite element field variable.
field .. field description of variable (borrowed)
"""
def __init__(self, name, kind, field, order=None, primary_var_name=None,
special=None, flags=None, history=None, **kwargs):
Variable.__init__(self, name, kind, order, primary_var_name,
special, flags, history=history, **kwargs)
self._set_field(field)
self.has_field = True
self.has_bc = True
self._variables = None
self.clear_evaluate_cache()
def _set_field(self, field):
"""
Set field of the variable.
Takes reference to a Field instance. Sets dtype according to
field.dtype. Sets `dim` attribute to spatial dimension.
"""
self.is_surface = field.is_surface
self.field = field
self._setup_dofs(field.n_nod, field.n_components, field.val_shape)
self.flags.add(is_field)
self.dtype = field.dtype
self.dim = field.domain.shape.dim
def _get_setter(self, kind, functions, **kwargs):
"""
Get the setter function of the variable and its arguments depending in
the setter kind.
"""
if not (hasattr(self, 'special') and (kind in self.special)):
return
setter_name = self.special[kind]
setter = functions[setter_name]
region = self.field.region
nod_list = self.field.get_dofs_in_region(region)
nods = nm.unique(nod_list)
coors = self.field.get_coor(nods)
if kind == 'setter':
sargs = (kwargs.get('ts'), coors)
elif kind == 'ic':
sargs = (coors, )
skwargs = {'region' : region}
return setter, sargs, skwargs
def get_field(self):
return self.field
def get_mapping(self, region, integral, integration,
get_saved=False, return_key=False):
"""
Get the reference element mapping of the underlying field.
See Also
--------
sfepy.discrete.common.fields.Field.get_mapping
"""
if region is None:
region = self.field.region
out = self.field.get_mapping(region, integral, integration,
get_saved=get_saved,
return_key=return_key)
return out
def get_dof_conn(self, dc_type, is_trace=False, trace_region=None):
"""
Get active dof connectivity of a variable.
Notes
-----
The primary and dual variables must have the same Region.
"""
if self.is_virtual():
var = self.get_primary()
# No primary variable can occur in single term evaluations.
var_name = var.name if var is not None else self.name
else:
var_name = self.name
if not is_trace:
region_name = dc_type.region_name
else:
aux = self.field.domain.regions[dc_type.region_name]
region = aux.get_mirror_region(trace_region)
region_name = region.name
key = (var_name, region_name, dc_type.type, is_trace)
dc = self.adof_conns[key]
return dc
def get_dof_info(self, active=False):
details = Struct(name='field_var_dof_details',
n_nod=self.n_nod,
dpn=self.n_components)
if active:
n_dof = self.n_adof
else:
n_dof = self.n_dof
return n_dof, details
def time_update(self, ts, functions):
"""
Store time step, set variable data for variables with the setter
function.
"""
if ts is not None:
self.dt = ts.dt
if hasattr(self, 'special') and ('setter' in self.special):
setter, sargs, skwargs = self._get_setter('setter', functions,
ts=ts)
self.set_data(setter(*sargs, **skwargs))
output('data of %s set by %s()' % (self.name, setter.name))
def set_from_qp(self, data_qp, integral, step=0):
"""
Set DOFs of variable using values in quadrature points
corresponding to the given integral.
"""
data_vertex = self.field.average_qp_to_vertices(data_qp, integral)
# Field nodes values.
data = self.field.interp_v_vals_to_n_vals(data_vertex)
data = data.ravel()
self.indx = slice(0, len(data))
self.data[step] = data
def set_from_mesh_vertices(self, data):
"""
Set the variable using values at the mesh vertices.
"""
ndata = self.field.interp_v_vals_to_n_vals(data)
self.set_data(ndata)
def set_from_function(self, fun, step=0):
"""
Set the variable data (the vector of DOF values) using a function of
space coordinates.
Parameters
----------
fun : callable
The function of coordinates returning DOF values of shape
`(n_coor, n_components)`.
step : int, optional
The time history step, 0 (default) = current.
"""
_, vv = self.field.set_dofs(fun, self.field.region, self.n_components)
self.set_data(vv.ravel(), step=step)
def equation_mapping(self, bcs, var_di, ts, functions, problem=None,
warn=False):
"""
Create the mapping of active DOFs from/to all DOFs.
Sets n_adof.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.eq_map = EquationMap('eq_map', self.dofs, var_di)
if bcs is not None:
bcs.canonize_dof_names(self.dofs)
bcs.sort()
active_bcs = self.eq_map.map_equations(bcs, self.field, ts, functions,
problem=problem, warn=warn)
self.n_adof = self.eq_map.n_eq
return active_bcs
def setup_initial_conditions(self, ics, di, functions, warn=False):
"""
Setup of initial conditions.
"""
ics.canonize_dof_names(self.dofs)
ics.sort()
self.initial_condition = nm.zeros((di.n_dof[self.name],),
dtype=self.dtype)
for ic in ics:
region = ic.region
dofs, val = ic.dofs
if warn:
clean_msg = ('warning: ignoring nonexistent' \
' IC node (%s) in ' % self.name)
else:
clean_msg = None
nod_list = self.field.get_dofs_in_region(region)
if len(nod_list) == 0:
continue
fun = get_condition_value(val, functions, 'IC', ic.name)
if isinstance(fun, Function):
aux = fun
fun = lambda coors: aux(coors, ic=ic)
nods, vv = self.field.set_dofs(fun, region, len(dofs), clean_msg)
eq = expand_nodes_to_equations(nods, dofs, self.dofs)
self.initial_condition[eq] = nm.ravel(vv)
def get_data_shape(self, integral, integration='volume', region_name=None):
"""
Get element data dimensions for given approximation.
Parameters
----------
integral : Integral instance
The integral describing used numerical quadrature.
integration : 'volume', 'surface', 'surface_extra', 'point' or 'custom'
The term integration type.
region_name : str
The name of the region of the integral.
Returns
-------
data_shape : 5 ints
The `(n_el, n_qp, dim, n_en, n_comp)` for volume shape kind,
`(n_fa, n_qp, dim, n_fn, n_comp)` for surface shape kind and
`(n_nod, 0, 0, 1, n_comp)` for point shape kind.
Notes
-----
- `n_el`, `n_fa` = number of elements/facets
- `n_qp` = number of quadrature points per element/facet
- `dim` = spatial dimension
- `n_en`, `n_fn` = number of element/facet nodes
- `n_comp` = number of variable components in a point/node
- `n_nod` = number of element nodes
"""
aux = self.field.get_data_shape(integral, integration=integration,
region_name=region_name)
data_shape = aux + (self.n_components,)
return data_shape
def clear_evaluate_cache(self):
"""
Clear current evaluate cache.
"""
self.evaluate_cache = {}
def invalidate_evaluate_cache(self, step=0):
"""
Invalidate variable data in evaluate cache for time step given
by `step` (0 is current, -1 previous, ...).
This should be done, for example, prior to every nonlinear
solver iteration.
"""
for step_cache in six.itervalues(self.evaluate_cache):
for key in list(step_cache.keys()):
if key == step: # Given time step to clear.
step_cache.pop(key)
def evaluate(self, mode='val',
region=None, integral=None, integration=None,
step=0, time_derivative=None, is_trace=False,
trace_region=None, dt=None, bf=None):
"""
Evaluate various quantities related to the variable according to
`mode` in quadrature points defined by `integral`.
The evaluated data are cached in the variable instance in
`evaluate_cache` attribute.
Parameters
----------
mode : one of 'val', 'grad', 'div', 'cauchy_strain'
The evaluation mode.
region : Region instance, optional
The region where the evaluation occurs. If None, the
underlying field region is used.
integral : Integral instance, optional
The integral defining quadrature points in which the
evaluation occurs. If None, the first order volume integral
is created. Must not be None for surface integrations.
integration : 'volume', 'surface', 'surface_extra', or 'point'
The term integration type. If None, it is derived from
`integral`.
step : int, default 0
The time step (0 means current, -1 previous, ...).
time_derivative : None or 'dt'
If not None, return time derivative of the data,
approximated by the backward finite difference.
is_trace : bool, default False
Indicate evaluation of trace of the variable on a boundary
region.
dt : float, optional
The time step to be used if `derivative` is `'dt'`. If None,
the `dt` attribute of the variable is used.
bf : Base function, optional
The base function to be used in 'val' mode.
Returns
-------
out : array
The 4-dimensional array of shape
`(n_el, n_qp, n_row, n_col)` with the requested data,
where `n_row`, `n_col` depend on `mode`.
"""
if integration == 'custom':
msg = 'cannot use FieldVariable.evaluate() with custom integration!'
raise ValueError(msg)
step_cache = self.evaluate_cache.setdefault(mode, {})
cache = step_cache.setdefault(step, {})
field = self.field
if region is None:
region = field.region
if is_trace:
region = region.get_mirror_region(trace_region)
if (region is not field.region) and not region.is_empty:
assert_(field.region.contains(region))
if integral is None:
integral = Integral('aux_1', 1)
if integration is None:
integration = 'volume' if region.can_cells else 'surface'
geo, _, key = field.get_mapping(region, integral, integration,
return_key=True)
key += (time_derivative, is_trace)
if key in cache:
out = cache[key]
else:
vec = self(step=step, derivative=time_derivative, dt=dt)
ct = integration
if integration == 'surface_extra':
ct = 'volume'
conn = field.get_econn(ct, region, is_trace, integration)
shape = self.get_data_shape(integral, integration, region.name)
if self.dtype == nm.float64:
out = eval_real(vec, conn, geo, mode, shape, bf)
else:
out = eval_complex(vec, conn, geo, mode, shape, bf)
cache[key] = out
return out
def get_state_in_region(self, region, reshape=True, step=0):
"""
Get DOFs of the variable in the given region.
Parameters
----------
region : Region
The selected region.
reshape : bool
If True, reshape the DOF vector to a 2D array with the individual
components as columns. Otherwise a 1D DOF array of the form [all
DOFs in region node 0, all DOFs in region node 1, ...] is returned.
step : int, default 0
The time step (0 means current, -1 previous, ...).
Returns
-------
out : array
The selected DOFs.
"""
nods = self.field.get_dofs_in_region(region, merge=True)
eq = nm.empty((len(nods) * self.n_components,), dtype=nm.int32)
for idof in range(self.n_components):
eq[idof::self.n_components] = self.n_components * nods \
+ idof + self.indx.start
out = self.data[step][eq]
if reshape:
out.shape = (len(nods), self.n_components)
return out
def apply_ebc(self, vec, offset=0, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions to
vector `vec`, starting at `offset`.
"""
eq_map = self.eq_map
ii = offset + eq_map.eq_ebc
# EBC,
if force_values is None:
vec[ii] = eq_map.val_ebc
else:
if isinstance(force_values, dict):
vec[ii] = force_values[self.name]
else:
vec[ii] = force_values
# EPBC.
vec[offset+eq_map.master] = vec[offset+eq_map.slave]
def apply_ic(self, vec, offset=0, force_values=None):
"""
Apply initial conditions conditions to vector `vec`, starting at
`offset`.
"""
ii = slice(offset, offset + self.n_dof)
if force_values is None:
vec[ii] = self.get_initial_condition()
else:
if isinstance(force_values, dict):
vec[ii] = force_values[self.name]
else:
vec[ii] = force_values
def get_reduced(self, vec, offset=0, follow_epbc=False):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
The full vector starts in `vec` at `offset`. If 'follow_epbc' is True,
values of EPBC master DOFs are not simply thrown away, but added to the
corresponding slave DOFs, just like when assembling. For vectors with
state (unknown) variables it should be set to False, for assembled
vectors it should be set to True.
"""
eq_map = self.eq_map
ii = offset + eq_map.eqi
r_vec = vec[ii]
if follow_epbc:
master = offset + eq_map.master
slave = eq_map.eq[eq_map.slave]
ii = slave >= 0
la.assemble1d(r_vec, slave[ii], vec[master[ii]])
return r_vec
def get_full(self, r_vec, r_offset=0, force_value=None,
vec=None, offset=0):
"""
Get the full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Notes
-----
The reduced vector starts in `r_vec` at `r_offset`.
Passing a `force_value` overrides the EBC values. Optionally,
`vec` argument can be provided to store the full vector (in
place) starting at `offset`.
"""
if vec is None:
vec = nm.empty(self.n_dof, dtype=r_vec.dtype)
else:
vec = vec[offset:offset+self.n_dof]
eq_map = self.eq_map
r_vec = r_vec[r_offset:r_offset+eq_map.n_eq]
# EBC.
vec[eq_map.eq_ebc] = get_default(force_value, eq_map.val_ebc)
# Reduced vector values.
vec[eq_map.eqi] = r_vec
# EPBC.
vec[eq_map.master] = vec[eq_map.slave]
unused_dofs = self.field.get('unused_dofs')
if unused_dofs is not None:
vec[:] = self.field.restore_substituted(vec)
return vec
def create_output(self, vec=None, key=None, extend=True, fill_value=None,
linearization=None):
"""
Convert the DOF vector to a dictionary of output data usable by
Mesh.write().
Parameters
----------
vec : array, optional
An alternative DOF vector to be used instead of the variable
DOF vector.
key : str, optional
The key to be used in the output dictionary instead of the
variable name.
extend : bool
Extend the DOF values to cover the whole domain.
fill_value : float or complex
The value used to fill the missing DOF values if `extend` is True.
linearization : Struct or None
The linearization configuration for higher order approximations.
"""
linearization = get_default(linearization, Struct(kind='strip'))
if vec is None:
vec = self()
key = get_default(key, self.name)
aux = nm.reshape(vec,
(self.n_dof // self.n_components, self.n_components))
out = self.field.create_output(aux, self.name, dof_names=self.dofs,
key=key, extend=extend,
fill_value=fill_value,
linearization=linearization)
return out
def get_element_diameters(self, cells, mode, square=False):
"""Get diameters of selected elements."""
field = self.field
domain = field.domain
cells = nm.array(cells)
diameters = nm.empty((cells.shape[0],), dtype=nm.float64)
integral = Integral('i_tmp', 1)
vg, _ = field.get_mapping(field.region, integral, 'volume')
diameters = domain.get_element_diameters(cells, vg, mode, square=square)
return diameters
def save_as_mesh(self, filename):
"""
Save the field mesh and the variable values into a file for
visualization. Only the vertex values are stored.
"""
mesh = self.field.create_mesh(extra_nodes=False)
vec = self()
n_nod, n_dof, dpn = mesh.n_nod, self.n_dof, self.n_components
aux = nm.reshape(vec, (n_dof // dpn, dpn))
ext = self.field.extend_dofs(aux, 0.0)
out = {}
if self.field.approx_order != 0:
out[self.name] = Struct(name='output_data',
mode='vertex', data=ext,
var_name=self.name, dofs=self.dofs)
else:
ext.shape = (ext.shape[0], 1, ext.shape[1], 1)
out[self.name] = Struct(name='output_data',
mode='cell', data=ext,
var_name=self.name, dofs=self.dofs)
mesh.write(filename, io='auto', out=out)
def has_same_mesh(self, other):
"""
Returns
-------
flag : int
The flag can be either 'different' (different meshes), 'deformed'
(slightly deformed same mesh), or 'same' (same).
"""
f1 = self.field
f2 = other.field
c1 = f1.get_coor()
c2 = f2.get_coor()
if c1.shape != c2.shape:
flag = 'different'
else:
eps = 10.0 * nm.finfo(nm.float64).eps
if nm.allclose(c1, c2, rtol=eps, atol=0.0):
flag = 'same'
elif nm.allclose(c1, c2, rtol=0.1, atol=0.0):
flag = 'deformed'
else:
flag = 'different'
return flag
def get_interp_coors(self, strategy='interpolation', interp_term=None):
"""
Get the physical coordinates to interpolate into, based on the strategy
used.
"""
if strategy == 'interpolation':
coors = self.field.get_coor()
elif strategy == 'projection':
region = self.field.region
integral = Integral(term=interp_term)
coors = | get_physical_qps(region, integral) | sfepy.discrete.common.mappings.get_physical_qps |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not | is_active_bc(bc, ts=ts, functions=functions) | sfepy.discrete.common.dof_info.is_active_bc |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
self.dof_name = self.primary_var_name
elif kind == 'parameter':
self.flags.add(is_parameter)
msg = 'parameter variable %s: related unknown missing' % self.name
self.primary_var_name = | get_default(primary_var_name, None, msg) | sfepy.base.base.get_default |
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not | are_disjoint(regs[i0], regs[i1]) | sfepy.discrete.common.region.are_disjoint |
#!/usr/bin/env python
"""
Plot logs of variables saved in a text file by sfepy.base.log.Log class.
The plot should be almost the same as the plot that would be generated by the
Log directly.
"""
from optparse import OptionParser
import matplotlib.pyplot as plt
from sfepy.base.log import read_log, plot_log
usage = '%prog [options] filename\n' + __doc__.rstrip()
def parse_rc(option, opt, value, parser):
pars = {}
for pair in value.split(','):
key, val = pair.split('=')
pars[key] = eval(val)
setattr(parser.values, option.dest, pars)
helps = {
'output_filename' :
'save the figure using the given file name',
'rc' : 'matplotlib resources',
'no_show' :
'do not show the figure',
}
def main():
parser = OptionParser(usage=usage)
parser.add_option('-o', '--output', metavar='filename',
action='store', dest='output_filename',
default=None, help=helps['output_filename'])
parser.add_option('--rc', type='str', metavar='key=val,...',
action='callback', dest='rc',
callback=parse_rc, default={}, help=helps['rc'])
parser.add_option('-n', '--no-show',
action='store_true', dest='no_show',
default=False, help=helps['no_show'])
options, args = parser.parse_args()
if len(args) == 1:
filename = args[0]
else:
parser.print_help()
return
log, info = | read_log(filename) | sfepy.base.log.read_log |
#!/usr/bin/env python
"""
Plot logs of variables saved in a text file by sfepy.base.log.Log class.
The plot should be almost the same as the plot that would be generated by the
Log directly.
"""
from optparse import OptionParser
import matplotlib.pyplot as plt
from sfepy.base.log import read_log, plot_log
usage = '%prog [options] filename\n' + __doc__.rstrip()
def parse_rc(option, opt, value, parser):
pars = {}
for pair in value.split(','):
key, val = pair.split('=')
pars[key] = eval(val)
setattr(parser.values, option.dest, pars)
helps = {
'output_filename' :
'save the figure using the given file name',
'rc' : 'matplotlib resources',
'no_show' :
'do not show the figure',
}
def main():
parser = OptionParser(usage=usage)
parser.add_option('-o', '--output', metavar='filename',
action='store', dest='output_filename',
default=None, help=helps['output_filename'])
parser.add_option('--rc', type='str', metavar='key=val,...',
action='callback', dest='rc',
callback=parse_rc, default={}, help=helps['rc'])
parser.add_option('-n', '--no-show',
action='store_true', dest='no_show',
default=False, help=helps['no_show'])
options, args = parser.parse_args()
if len(args) == 1:
filename = args[0]
else:
parser.print_help()
return
log, info = read_log(filename)
plt.rcParams.update(options.rc)
| plot_log(1, log, info) | sfepy.base.log.plot_log |
r"""
Compute homogenized elastic coefficients for a given heterogeneous linear
elastic microstructure, see [1] for details or [2] and [3] for a quick
explanation.
[1] <NAME>, <NAME>: Homogenization in open sets with holes.
Journal of Mathematical Analysis and Applications 71(2), 1979, pages 590-607.
https://doi.org/10.1016/0022-247X(79)90211-7
[2] <NAME>, <NAME>, <NAME>:
Asymptotic homogenisation in linear elasticity.
Part I: Mathematical formulation and finite element modelling.
Computational Materials Science 45(4), 2009, pages 1073-1080.
http://dx.doi.org/10.1016/j.commatsci.2009.02.025
[3] <NAME>, <NAME>, <NAME>:
Asymptotic homogenisation in linear elasticity.
Part II: Finite element procedures and multiscale applications.
Computational Materials Science 45(4), 2009, pages 1081-1096.
http://dx.doi.org/10.1016/j.commatsci.2009.01.027
"""
from __future__ import absolute_import
import sfepy.discrete.fem.periodic as per
from sfepy.mechanics.matcoefs import stiffness_from_youngpoisson
from sfepy.homogenization.utils import define_box_regions
import sfepy.homogenization.coefs_base as cb
from sfepy import data_dir
from sfepy.base.base import Struct
from sfepy.homogenization.recovery import compute_micro_u,\
compute_stress_strain_u, compute_mac_stress_part
def recovery_le(pb, corrs, macro):
out = {}
dim = corrs['corrs_le']['u_00'].shape[1]
mic_u = - compute_micro_u(corrs['corrs_le'], macro['strain'], 'u', dim)
out['u_mic'] = Struct(name='output_data',
mode='vertex', data=mic_u,
var_name='u', dofs=None)
stress_Y, strain_Y = \
| compute_stress_strain_u(pb, 'i', 'Y', 'mat.D', 'u', mic_u) | sfepy.homogenization.recovery.compute_stress_strain_u |
r"""
Compute homogenized elastic coefficients for a given heterogeneous linear
elastic microstructure, see [1] for details or [2] and [3] for a quick
explanation.
[1] <NAME>, <NAME>: Homogenization in open sets with holes.
Journal of Mathematical Analysis and Applications 71(2), 1979, pages 590-607.
https://doi.org/10.1016/0022-247X(79)90211-7
[2] <NAME>, <NAME>, <NAME>:
Asymptotic homogenisation in linear elasticity.
Part I: Mathematical formulation and finite element modelling.
Computational Materials Science 45(4), 2009, pages 1073-1080.
http://dx.doi.org/10.1016/j.commatsci.2009.02.025
[3] <NAME>, <NAME>, <NAME>:
Asymptotic homogenisation in linear elasticity.
Part II: Finite element procedures and multiscale applications.
Computational Materials Science 45(4), 2009, pages 1081-1096.
http://dx.doi.org/10.1016/j.commatsci.2009.01.027
"""
from __future__ import absolute_import
import sfepy.discrete.fem.periodic as per
from sfepy.mechanics.matcoefs import stiffness_from_youngpoisson
from sfepy.homogenization.utils import define_box_regions
import sfepy.homogenization.coefs_base as cb
from sfepy import data_dir
from sfepy.base.base import Struct
from sfepy.homogenization.recovery import compute_micro_u,\
compute_stress_strain_u, compute_mac_stress_part
def recovery_le(pb, corrs, macro):
out = {}
dim = corrs['corrs_le']['u_00'].shape[1]
mic_u = - compute_micro_u(corrs['corrs_le'], macro['strain'], 'u', dim)
out['u_mic'] = Struct(name='output_data',
mode='vertex', data=mic_u,
var_name='u', dofs=None)
stress_Y, strain_Y = \
compute_stress_strain_u(pb, 'i', 'Y', 'mat.D', 'u', mic_u)
stress_Y += \
| compute_mac_stress_part(pb, 'i', 'Y', 'mat.D', 'u', macro['strain']) | sfepy.homogenization.recovery.compute_mac_stress_part |
r"""
Compute homogenized elastic coefficients for a given heterogeneous linear
elastic microstructure, see [1] for details or [2] and [3] for a quick
explanation.
[1] <NAME>, <NAME>: Homogenization in open sets with holes.
Journal of Mathematical Analysis and Applications 71(2), 1979, pages 590-607.
https://doi.org/10.1016/0022-247X(79)90211-7
[2] <NAME>, <NAME>, <NAME>:
Asymptotic homogenisation in linear elasticity.
Part I: Mathematical formulation and finite element modelling.
Computational Materials Science 45(4), 2009, pages 1073-1080.
http://dx.doi.org/10.1016/j.commatsci.2009.02.025
[3] <NAME>, <NAME>, <NAME>:
Asymptotic homogenisation in linear elasticity.
Part II: Finite element procedures and multiscale applications.
Computational Materials Science 45(4), 2009, pages 1081-1096.
http://dx.doi.org/10.1016/j.commatsci.2009.01.027
"""
from __future__ import absolute_import
import sfepy.discrete.fem.periodic as per
from sfepy.mechanics.matcoefs import stiffness_from_youngpoisson
from sfepy.homogenization.utils import define_box_regions
import sfepy.homogenization.coefs_base as cb
from sfepy import data_dir
from sfepy.base.base import Struct
from sfepy.homogenization.recovery import compute_micro_u,\
compute_stress_strain_u, compute_mac_stress_part
def recovery_le(pb, corrs, macro):
out = {}
dim = corrs['corrs_le']['u_00'].shape[1]
mic_u = - compute_micro_u(corrs['corrs_le'], macro['strain'], 'u', dim)
out['u_mic'] = Struct(name='output_data',
mode='vertex', data=mic_u,
var_name='u', dofs=None)
stress_Y, strain_Y = \
compute_stress_strain_u(pb, 'i', 'Y', 'mat.D', 'u', mic_u)
stress_Y += \
compute_mac_stress_part(pb, 'i', 'Y', 'mat.D', 'u', macro['strain'])
strain = macro['strain'] + strain_Y
out['cauchy_strain'] = Struct(name='output_data',
mode='cell', data=strain,
dofs=None)
out['cauchy_stress'] = Struct(name='output_data',
mode='cell', data=stress_Y,
dofs=None)
return out
filename_mesh = data_dir + '/meshes/3d/matrix_fiber.mesh'
dim = 3
region_lbn = (0, 0, 0)
region_rtf = (1, 1, 1)
regions = {
'Y': 'all',
'Ym': 'cells of group 1',
'Yc': 'cells of group 2',
}
regions.update( | define_box_regions(dim, region_lbn, region_rtf) | sfepy.homogenization.utils.define_box_regions |
r"""
Compute homogenized elastic coefficients for a given heterogeneous linear
elastic microstructure, see [1] for details or [2] and [3] for a quick
explanation.
[1] <NAME>, <NAME>: Homogenization in open sets with holes.
Journal of Mathematical Analysis and Applications 71(2), 1979, pages 590-607.
https://doi.org/10.1016/0022-247X(79)90211-7
[2] <NAME>, <NAME>, <NAME>:
Asymptotic homogenisation in linear elasticity.
Part I: Mathematical formulation and finite element modelling.
Computational Materials Science 45(4), 2009, pages 1073-1080.
http://dx.doi.org/10.1016/j.commatsci.2009.02.025
[3] <NAME>, <NAME>, <NAME>:
Asymptotic homogenisation in linear elasticity.
Part II: Finite element procedures and multiscale applications.
Computational Materials Science 45(4), 2009, pages 1081-1096.
http://dx.doi.org/10.1016/j.commatsci.2009.01.027
"""
from __future__ import absolute_import
import sfepy.discrete.fem.periodic as per
from sfepy.mechanics.matcoefs import stiffness_from_youngpoisson
from sfepy.homogenization.utils import define_box_regions
import sfepy.homogenization.coefs_base as cb
from sfepy import data_dir
from sfepy.base.base import Struct
from sfepy.homogenization.recovery import compute_micro_u,\
compute_stress_strain_u, compute_mac_stress_part
def recovery_le(pb, corrs, macro):
out = {}
dim = corrs['corrs_le']['u_00'].shape[1]
mic_u = - | compute_micro_u(corrs['corrs_le'], macro['strain'], 'u', dim) | sfepy.homogenization.recovery.compute_micro_u |
r"""
Compute homogenized elastic coefficients for a given heterogeneous linear
elastic microstructure, see [1] for details or [2] and [3] for a quick
explanation.
[1] <NAME>, <NAME>: Homogenization in open sets with holes.
Journal of Mathematical Analysis and Applications 71(2), 1979, pages 590-607.
https://doi.org/10.1016/0022-247X(79)90211-7
[2] <NAME>, <NAME>, <NAME>:
Asymptotic homogenisation in linear elasticity.
Part I: Mathematical formulation and finite element modelling.
Computational Materials Science 45(4), 2009, pages 1073-1080.
http://dx.doi.org/10.1016/j.commatsci.2009.02.025
[3] <NAME>, <NAME>, <NAME>:
Asymptotic homogenisation in linear elasticity.
Part II: Finite element procedures and multiscale applications.
Computational Materials Science 45(4), 2009, pages 1081-1096.
http://dx.doi.org/10.1016/j.commatsci.2009.01.027
"""
from __future__ import absolute_import
import sfepy.discrete.fem.periodic as per
from sfepy.mechanics.matcoefs import stiffness_from_youngpoisson
from sfepy.homogenization.utils import define_box_regions
import sfepy.homogenization.coefs_base as cb
from sfepy import data_dir
from sfepy.base.base import Struct
from sfepy.homogenization.recovery import compute_micro_u,\
compute_stress_strain_u, compute_mac_stress_part
def recovery_le(pb, corrs, macro):
out = {}
dim = corrs['corrs_le']['u_00'].shape[1]
mic_u = - compute_micro_u(corrs['corrs_le'], macro['strain'], 'u', dim)
out['u_mic'] = Struct(name='output_data',
mode='vertex', data=mic_u,
var_name='u', dofs=None)
stress_Y, strain_Y = \
compute_stress_strain_u(pb, 'i', 'Y', 'mat.D', 'u', mic_u)
stress_Y += \
compute_mac_stress_part(pb, 'i', 'Y', 'mat.D', 'u', macro['strain'])
strain = macro['strain'] + strain_Y
out['cauchy_strain'] = Struct(name='output_data',
mode='cell', data=strain,
dofs=None)
out['cauchy_stress'] = Struct(name='output_data',
mode='cell', data=stress_Y,
dofs=None)
return out
filename_mesh = data_dir + '/meshes/3d/matrix_fiber.mesh'
dim = 3
region_lbn = (0, 0, 0)
region_rtf = (1, 1, 1)
regions = {
'Y': 'all',
'Ym': 'cells of group 1',
'Yc': 'cells of group 2',
}
regions.update(define_box_regions(dim, region_lbn, region_rtf))
materials = {
'mat': ({'D': {'Ym': | stiffness_from_youngpoisson(dim, 7.0e9, 0.4) | sfepy.mechanics.matcoefs.stiffness_from_youngpoisson |