text
stringlengths 75
104k
|
---|
def visit_FunctionDef(self, node):
'''
Initialise aliasing default value before visiting.
Add aliasing values for :
- Pythonic
- globals declarations
- current function arguments
'''
self.aliases = IntrinsicAliases.copy()
self.aliases.update((f.name, {f})
for f in self.global_declarations.values())
self.aliases.update((arg.id, {arg})
for arg in node.args.args)
self.generic_visit(node)
if Aliases.RetId in self.aliases:
# parametrize the expression
def parametrize(exp):
# constant(?) or global -> no change
if isinstance(exp, (ast.Index, Intrinsic, ast.FunctionDef)):
return lambda _: {exp}
elif isinstance(exp, ContainerOf):
pcontainee = parametrize(exp.containee)
index = exp.index
return lambda args: {
ContainerOf(pc, index)
for pc in pcontainee(args)
}
elif isinstance(exp, ast.Name):
try:
w = node.args.args.index(exp)
def return_alias(args):
if w < len(args):
return {args[w]}
else:
return {node.args.defaults[w - len(args)]}
return return_alias
except ValueError:
return lambda _: self.get_unbound_value_set()
elif isinstance(exp, ast.Subscript):
values = parametrize(exp.value)
slices = parametrize(exp.slice)
return lambda args: {
ast.Subscript(value, slice, ast.Load())
for value in values(args)
for slice in slices(args)}
else:
return lambda _: self.get_unbound_value_set()
# this is a little tricky: for each returned alias,
# parametrize builds a function that, given a list of args,
# returns the alias
# then as we may have multiple returned alias, we compute the union
# of these returned aliases
return_aliases = [parametrize(ret_alias)
for ret_alias
in self.aliases[Aliases.RetId]]
def merge_return_aliases(args):
merged_return_aliases = set()
for return_alias in return_aliases:
merged_return_aliases.update(return_alias(args))
return merged_return_aliases
node.return_alias = merge_return_aliases |
def visit_For(self, node):
'''
For loop creates aliasing between the target
and the content of the iterator
>>> from pythran import passmanager
>>> pm = passmanager.PassManager('demo')
>>> module = ast.parse("""
... def foo(a):
... for i in a:
... {i}""")
>>> result = pm.gather(Aliases, module)
>>> Aliases.dump(result, filter=ast.Set)
{i} => ['|i|']
Not very useful, unless we know something about the iterated container
>>> module = ast.parse("""
... def foo(a, b):
... for i in [a, b]:
... {i}""")
>>> result = pm.gather(Aliases, module)
>>> Aliases.dump(result, filter=ast.Set)
{i} => ['|a|', '|b|']
'''
iter_aliases = self.visit(node.iter)
if all(isinstance(x, ContainerOf) for x in iter_aliases):
target_aliases = set()
for iter_alias in iter_aliases:
target_aliases.add(iter_alias.containee)
else:
target_aliases = {node.target}
self.add(node.target, target_aliases)
self.aliases[node.target.id] = self.result[node.target]
self.generic_visit(node)
self.generic_visit(node) |
def visit_If(self, node):
'''
After an if statement, the values from both branches are merged,
potentially creating more aliasing:
>>> from pythran import passmanager
>>> pm = passmanager.PassManager('demo')
>>> fun = """
... def foo(a, b):
... if a: c=a
... else: c=b
... return {c}"""
>>> module = ast.parse(fun)
>>> result = pm.gather(Aliases, module)
>>> Aliases.dump(result, filter=ast.Set)
{c} => ['|a|', '|b|']
'''
md.visit(self, node)
self.visit(node.test)
true_aliases = false_aliases = None
# first try the true branch
try:
tmp = self.aliases.copy()
for stmt in node.body:
self.visit(stmt)
true_aliases = self.aliases
self.aliases = tmp
except PythranSyntaxError:
pass
# then try the false branch
try:
for stmt in node.orelse:
self.visit(stmt)
false_aliases = self.aliases
except PythranSyntaxError:
pass
if true_aliases and not false_aliases:
self.aliases = true_aliases
try:
for stmt in node.orelse:
self.visit(stmt)
false_aliases = self.aliases
except PythranSyntaxError:
pass
if false_aliases and not true_aliases:
self.aliases = false_aliases
try:
for stmt in node.body:
self.visit(stmt)
true_aliases = self.aliases
except PythranSyntaxError:
pass
# merge the results from true and false branches
if false_aliases and true_aliases:
for k, v in true_aliases.items():
if k in self.aliases:
self.aliases[k] = self.aliases[k].union(v)
else:
assert isinstance(v, (frozenset, set))
self.aliases[k] = v
elif true_aliases:
self.aliases = true_aliases |
def prepare(self, node):
"""
Initialise arguments effects as this analysis in inter-procedural.
Initialisation done for Pythonic functions and default values set for
user defined functions.
"""
super(ArgumentReadOnce, self).prepare(node)
# global functions init
for n in self.global_declarations.values():
fe = ArgumentReadOnce.FunctionEffects(n)
self.node_to_functioneffect[n] = fe
self.result.add(fe)
# Pythonic functions init
def save_effect(module):
""" Recursively save read once effect for Pythonic functions. """
for intr in module.values():
if isinstance(intr, dict): # Submodule case
save_effect(intr)
else:
fe = ArgumentReadOnce.FunctionEffects(intr)
self.node_to_functioneffect[intr] = fe
self.result.add(fe)
if isinstance(intr, intrinsic.Class): # Class case
save_effect(intr.fields)
for module in MODULES.values():
save_effect(module) |
def ds9_objects_to_string(regions, coordsys='fk5', fmt='.6f', radunit='deg'):
"""
Converts a `list` of `~regions.Region` to DS9 region string.
Parameters
----------
regions : `list`
List of `~regions.Region` objects
coordsys : `str`, optional
This overrides the coordinate system frame for all regions.
Default is 'fk5'.
fmt : `str`, optional
A python string format defining the output precision. Default is .6f,
which is accurate to 0.0036 arcseconds.
radunit : `str`, optional
This denotes the unit of the radius. Default is 'deg'(degrees)
Returns
-------
region_string : `str`
DS9 region string
Examples
--------
>>> from astropy import units as u
>>> from astropy.coordinates import SkyCoord
>>> from regions import CircleSkyRegion, ds9_objects_to_string
>>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg)
>>> print(ds9_objects_to_string([reg_sky]))
# Region file format: DS9 astropy/regions
fk5
circle(1.000007,2.000002,5.000000)
"""
shapelist = to_shape_list(regions, coordsys)
return shapelist.to_ds9(coordsys, fmt, radunit) |
def write_ds9(regions, filename, coordsys='fk5', fmt='.6f', radunit='deg'):
"""
Converts a `list` of `~regions.Region` to DS9 string and write to file.
Parameters
----------
regions : `list`
List of `regions.Region` objects
filename : `str`
Filename in which the string is to be written.
coordsys : `str`, optional #TODO
Coordinate system that overrides the coordinate frames of all regions.
Default is 'fk5'.
fmt : `str`, optional
A python string format defining the output precision. Default is .6f,
which is accurate to 0.0036 arcseconds.
radunit : `str`, optional
This denotes the unit of the radius. Default is deg (degrees)
Examples
--------
>>> from astropy import units as u
>>> from astropy.coordinates import SkyCoord
>>> from regions import CircleSkyRegion, write_ds9
>>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg)
>>> write_ds9([reg_sky], 'test_write.reg')
>>> with open('test_write.reg') as f:
... print(f.read())
# Region file format: DS9 astropy/regions
fk5
circle(1.000007,2.000002,5.000000)
"""
output = ds9_objects_to_string(regions, coordsys, fmt, radunit)
with open(filename, 'w') as fh:
fh.write(output) |
def crtf_objects_to_string(regions, coordsys='fk5', fmt='.6f', radunit='deg'):
"""
Converts a `list` of `~regions.Region` to CRTF region string.
Parameters
----------
regions : `list`
List of `~regions.Region` objects
coordsys : `str`, optional
Astropy Coordinate system that overrides the coordinate system frame for
all regions. Default is 'fk5'.
fmt : `str`, optional
A python string format defining the output precision. Default is .6f,
which is accurate to 0.0036 arcseconds.
radunit : `str`, optional
This denotes the unit of the radius. Default is deg (degrees)
Returns
-------
region_string : `str`
CRTF region string
Examples
--------
>>> from astropy import units as u
>>> from astropy.coordinates import SkyCoord
>>> from regions import CircleSkyRegion, crtf_objects_to_string
>>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg)
>>> print(crtf_objects_to_string([reg_sky]))
#CRTF
global coord=fk5
+circle[[1.000007deg, 2.000002deg], 5.000000deg]
"""
shapelist = to_shape_list(regions, coordsys)
return shapelist.to_crtf(coordsys, fmt, radunit) |
def write_crtf(regions, filename, coordsys='fk5', fmt='.6f', radunit='deg'):
"""
Converts a `list` of `~regions.Region` to CRTF string and write to file.
Parameters
----------
regions : `list`
List of `~regions.Region` objects
filename : `str`
Filename in which the string is to be written. Default is 'new.crtf'
coordsys : `str`, optional
Astropy Coordinate system that overrides the coordinate frames of all
regions. Default is 'fk5'.
fmt : `str`, optional
A python string format defining the output precision. Default is .6f,
which is accurate to 0.0036 arcseconds.
radunit : `str`, optional
This denotes the unit of the radius. Default is deg (degrees)
Examples
--------
>>> from astropy import units as u
>>> from astropy.coordinates import SkyCoord
>>> from regions import CircleSkyRegion, write_crtf
>>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg)
>>> write_crtf([reg_sky], 'test_write.crtf')
>>> with open('test_write.crtf') as f:
... print(f.read())
#CRTF
global coord=fk5
+circle[[1.000007deg, 2.000002deg], 5.000000deg]
"""
output = crtf_objects_to_string(regions, coordsys, fmt, radunit)
with open(filename, 'w') as fh:
fh.write(output) |
def bounding_box(self):
"""Bounding box (`~regions.BoundingBox`)."""
xmin = self.center.x - self.radius
xmax = self.center.x + self.radius
ymin = self.center.y - self.radius
ymax = self.center.y + self.radius
return BoundingBox.from_float(xmin, xmax, ymin, ymax) |
def as_artist(self, origin=(0, 0), **kwargs):
"""
Matplotlib patch object for this region (`matplotlib.patches.Circle`)
Parameters
----------
origin : array_like, optional
The ``(x, y)`` pixel position of the origin of the displayed image.
Default is (0, 0).
kwargs : `dict`
All keywords that a `~matplotlib.patches.Circle` object accepts
Returns
-------
patch : `~matplotlib.patches.Circle`
Matplotlib circle patch
"""
from matplotlib.patches import Circle
xy = self.center.x - origin[0], self.center.y - origin[1]
radius = self.radius
mpl_params = self.mpl_properties_default('patch')
mpl_params.update(kwargs)
return Circle(xy=xy, radius=radius, **mpl_params) |
def bounding_box(self):
"""
The minimal bounding box (`~regions.BoundingBox`) enclosing the
exact rectangular region.
"""
w2 = self.width / 2.
h2 = self.height / 2.
cos_angle = np.cos(self.angle) # self.angle is a Quantity
sin_angle = np.sin(self.angle) # self.angle is a Quantity
dx1 = abs(w2 * cos_angle - h2 * sin_angle)
dy1 = abs(w2 * sin_angle + h2 * cos_angle)
dx2 = abs(w2 * cos_angle + h2 * sin_angle)
dy2 = abs(w2 * sin_angle - h2 * cos_angle)
dx = max(dx1, dx2)
dy = max(dy1, dy2)
xmin = self.center.x - dx
xmax = self.center.x + dx
ymin = self.center.y - dy
ymax = self.center.y + dy
return BoundingBox.from_float(xmin, xmax, ymin, ymax) |
def as_artist(self, origin=(0, 0), **kwargs):
"""
Matplotlib patch object for this region (`matplotlib.patches.Rectangle`).
Parameters:
-----------
origin : array_like, optional
The ``(x, y)`` pixel position of the origin of the displayed image.
Default is (0, 0).
kwargs : `dict`
All keywords that a `~matplotlib.patches.Rectangle` object accepts
Returns
-------
patch : `~matplotlib.patches.Rectangle`
Matplotlib circle patch
"""
from matplotlib.patches import Rectangle
xy = self._lower_left_xy()
xy = xy[0] - origin[0], xy[1] - origin[1]
width = self.width
height = self.height
# From the docstring: MPL expects "rotation in degrees (anti-clockwise)"
angle = self.angle.to('deg').value
mpl_params = self.mpl_properties_default('patch')
mpl_params.update(kwargs)
return Rectangle(xy=xy, width=width, height=height,
angle=angle, **mpl_params) |
def corners(self):
"""
Return the x, y coordinate pairs that define the corners
"""
corners = [(-self.width/2, -self.height/2),
( self.width/2, -self.height/2),
( self.width/2, self.height/2),
(-self.width/2, self.height/2),
]
rotmat = [[np.cos(self.angle), np.sin(self.angle)],
[-np.sin(self.angle), np.cos(self.angle)]]
return np.dot(corners, rotmat) + np.array([self.center.x,
self.center.y]) |
def to_polygon(self):
"""
Return a 4-cornered polygon equivalent to this rectangle
"""
x,y = self.corners.T
vertices = PixCoord(x=x, y=y)
return PolygonPixelRegion(vertices=vertices, meta=self.meta,
visual=self.visual) |
def _lower_left_xy(self):
"""
Compute lower left `xy` position.
This is used for the conversion to matplotlib in ``as_artist``
Taken from http://photutils.readthedocs.io/en/latest/_modules/photutils/aperture/rectangle.html#RectangularAperture.plot
"""
hw = self.width / 2.
hh = self.height / 2.
sint = np.sin(self.angle)
cost = np.cos(self.angle)
dx = (hh * sint) - (hw * cost)
dy = -(hh * cost) - (hw * sint)
x = self.center.x + dx
y = self.center.y + dy
return x, y |
def bounding_box(self):
"""
The minimal bounding box (`~regions.BoundingBox`) enclosing the
exact elliptical region.
"""
# We use the solution described in http://stackoverflow.com/a/88020
# which is to use the parametric equation of an ellipse and to find
# when dx/dt or dy/dt=0.
cos_angle = np.cos(self.angle)
sin_angle = np.sin(self.angle)
tan_angle = np.tan(self.angle)
t1 = np.arctan(-self.height * tan_angle / self.width)
t2 = t1 + np.pi * u.rad
dx1 = 0.5 * self.width * cos_angle * np.cos(t1) - 0.5 * self.height * sin_angle * np.sin(t1)
dx2 = 0.5 * self.width * cos_angle * np.cos(t2) - 0.5 * self.height * sin_angle * np.sin(t2)
if dx1 > dx2:
dx1, dx2 = dx2, dx1
t1 = np.arctan(self.height / tan_angle / self.width)
t2 = t1 + np.pi * u.rad
dy1 = 0.5 * self.height * cos_angle * np.sin(t1) + 0.5 * self.width * sin_angle * np.cos(t1)
dy2 = 0.5 * self.height * cos_angle * np.sin(t2) + 0.5 * self.width * sin_angle * np.cos(t2)
if dy1 > dy2:
dy1, dy2 = dy2, dy1
xmin = self.center.x + dx1
xmax = self.center.x + dx2
ymin = self.center.y + dy1
ymax = self.center.y + dy2
return BoundingBox.from_float(xmin, xmax, ymin, ymax) |
def as_artist(self, origin=(0, 0), **kwargs):
"""
Matplotlib patch object for this region (`matplotlib.patches.Ellipse`).
Parameters:
-----------
origin : array_like, optional
The ``(x, y)`` pixel position of the origin of the displayed image.
Default is (0, 0).
kwargs : `dict`
All keywords that a `~matplotlib.patches.Ellipse` object accepts
Returns
-------
patch : `~matplotlib.patches.Ellipse`
Matplotlib ellipse patch
"""
from matplotlib.patches import Ellipse
xy = self.center.x - origin[0], self.center.y - origin[1]
width = self.width
height = self.height
# From the docstring: MPL expects "rotation in degrees (anti-clockwise)"
angle = self.angle.to('deg').value
mpl_params = self.mpl_properties_default('patch')
mpl_params.update(kwargs)
return Ellipse(xy=xy, width=width, height=height, angle=angle,
**mpl_params) |
def _make_annulus_path(patch_inner, patch_outer):
"""
Defines a matplotlib annulus path from two patches.
This preserves the cubic Bezier curves (CURVE4) of the aperture
paths.
# This is borrowed from photutils aperture.
"""
import matplotlib.path as mpath
path_inner = patch_inner.get_path()
transform_inner = patch_inner.get_transform()
path_inner = transform_inner.transform_path(path_inner)
path_outer = patch_outer.get_path()
transform_outer = patch_outer.get_transform()
path_outer = transform_outer.transform_path(path_outer)
verts_inner = path_inner.vertices[:-1][::-1]
verts_inner = np.concatenate((verts_inner, [verts_inner[-1]]))
verts = np.vstack((path_outer.vertices, verts_inner))
codes = np.hstack((path_outer.codes, path_inner.codes))
return mpath.Path(verts, codes) |
def as_artist(self, origin=(0, 0), **kwargs):
"""
Matplotlib patch object for annulus region (`matplotlib.patches.PathPatch`).
Parameters
----------
origin : array_like, optional
The ``(x, y)`` pixel position of the origin of the displayed image.
Default is (0, 0).
kwargs : `dict`
All keywords that a `~matplotlib.patches.PathPatch` object accepts
Returns
-------
patch : `~matplotlib.patches.PathPatch`
Matplotlib patch object
"""
if self.region1.center == self.region2.center and self.operator == op.xor:
import matplotlib.patches as mpatches
patch_inner = self.region1.as_artist(origin=origin)
patch_outer = self.region2.as_artist(origin=origin)
path = self._make_annulus_path(patch_inner, patch_outer)
patch = mpatches.PathPatch(path, **kwargs)
return patch
else:
raise NotImplementedError |
def as_artist(self, origin=(0, 0), **kwargs):
"""
Matplotlib Text object for this region (`matplotlib.text.Text`).
Parameters
----------
origin : array_like, optional
The ``(x, y)`` pixel position of the origin of the displayed image.
Default is (0, 0).
kwargs : `dict`
All keywords that a `~matplotlib.text.Text` object accepts
Returns
-------
text : `~matplotlib.text.Text`
Matplotlib Text object.
"""
from matplotlib.text import Text
mpl_params = self.mpl_properties_default('text')
mpl_params.update(kwargs)
text = Text(self.center.x - origin[0], self.center.y - origin[1],
self.text, **mpl_params)
return text |
def rotate_polygon(lon, lat, lon0, lat0):
"""
Given a polygon with vertices defined by (lon, lat), rotate the polygon
such that the North pole of the spherical coordinates is now at (lon0,
lat0). Therefore, to end up with a polygon centered on (lon0, lat0), the
polygon should initially be drawn around the North pole.
"""
# Create a representation object
polygon = UnitSphericalRepresentation(lon=lon, lat=lat)
# Determine rotation matrix to make it so that the circle is centered
# on the correct longitude/latitude.
m1 = rotation_matrix(-(0.5 * np.pi * u.radian - lat0), axis='y')
m2 = rotation_matrix(-lon0, axis='z')
transform_matrix = m2 * m1
# Apply 3D rotation
polygon = polygon.to_cartesian()
polygon = polygon.transform(transform_matrix)
polygon = UnitSphericalRepresentation.from_cartesian(polygon)
return polygon.lon, polygon.lat |
def as_artist(self, origin=(0, 0), **kwargs):
"""
Matplotlib Line2D object for this region (`matplotlib.lines.Line2D`).
Parameters
----------
origin : array_like, optional
The ``(x, y)`` pixel position of the origin of the displayed image.
Default is (0, 0).
kwargs : `dict`
All keywords that a `~matplotlib.lines.Line2D` object accepts
Returns
-------
point : `~matplotlib.lines.Line2D`
Matplotlib Line2D object.
"""
from matplotlib.lines import Line2D
mpl_params = self.mpl_properties_default('LINE2D')
mpl_params.update(kwargs)
point = Line2D([self.center.x - origin[0]], [self.center.y - origin[1]],
**mpl_params)
return point |
def read_fits_region(filename, errors='strict'):
"""
Reads a FITS region file and scans for any fits regions table and
converts them into `Region` objects.
Parameters
----------
filename : str
The file path
errors : ``warn``, ``ignore``, ``strict``
The error handling scheme to use for handling parsing errors.
The default is 'strict', which will raise a `FITSRegionParserError`.
``warn`` will raise a `FITSRegionParserWarning`, and ``ignore`` will do nothing
(i.e., be silent).
Returns
-------
regions : list
Python list of `regions.Region` objects.
Examples
--------
>>> from astropy.utils.data import get_pkg_data_filename
>>> from regions import read_fits_region
>>> file_read = get_pkg_data_filename('data/region.fits',
... package='regions.io.fits.tests')
>>> regions = read_fits_region(file_read)
"""
regions = []
hdul = fits.open(filename)
for hdu in hdul:
if hdu.name == 'REGION':
table = Table.read(hdu)
wcs = WCS(hdu.header, keysel=['image', 'binary', 'pixel'])
regions_list = FITSRegionParser(table, errors).shapes.to_regions()
for reg in regions_list:
regions.append(reg.to_sky(wcs))
return regions |
def to_shape_list(region_list, coordinate_system='fk5'):
"""
Converts a list of regions into a `regions.ShapeList` object.
Parameters
----------
region_list: python list
Lists of `regions.Region` objects
format_type: str ('DS9' or 'CRTF')
The format type of the Shape object. Default is 'DS9'.
coordinate_system: str
The astropy coordinate system frame in which all the coordinates present
in the `region_list` will be converted. Default is 'fk5'.
Returns
-------
shape_list: `regions.ShapeList` object
list of `regions.Shape` objects.
"""
shape_list = ShapeList()
for region in region_list:
coord = []
if isinstance(region, SkyRegion):
reg_type = region.__class__.__name__[:-9].lower()
else:
reg_type = region.__class__.__name__[:-11].lower()
for val in regions_attributes[reg_type]:
coord.append(getattr(region, val))
if reg_type == 'polygon':
coord = [x for x in region.vertices]
if coordinate_system:
coordsys = coordinate_system
else:
if isinstance(region, SkyRegion):
coordsys = coord[0].name
else:
coordsys = 'image'
frame = coordinates.frame_transform_graph.lookup_name(coordsys)
new_coord = []
for val in coord:
if isinstance(val, Angle) or isinstance(val, u.Quantity) or isinstance(val, numbers.Number):
new_coord.append(val)
elif isinstance(val, PixCoord):
new_coord.append(u.Quantity(val.x, u.dimensionless_unscaled))
new_coord.append(u.Quantity(val.y, u.dimensionless_unscaled))
else:
new_coord.append(Angle(val.transform_to(frame).spherical.lon))
new_coord.append(Angle(val.transform_to(frame).spherical.lat))
meta = dict(region.meta)
meta.update(region.visual)
if reg_type == 'text':
meta['text'] = meta.get('text', meta.pop('label', ''))
include = region.meta.pop('include', True)
shape_list.append(Shape(coordsys, reg_type, new_coord, meta, False,
include))
return shape_list |
def to_ds9_meta(shape_meta):
"""
Makes the meta data DS9 compatible by filtering and mapping the valid keys
Parameters
----------
shape_meta: dict
meta attribute of a `regions.Shape` object
Returns
-------
meta : dict
DS9 compatible meta dictionary
"""
# meta keys allowed in DS9.
valid_keys = ['symbol', 'include', 'tag', 'line', 'comment',
'name', 'select', 'highlite', 'fixed', 'label', 'text',
'edit', 'move', 'rotate', 'delete', 'source', 'background']
# visual keys allowed in DS9
valid_keys += ['color', 'dash', 'linewidth', 'font', 'dashlist',
'fill', 'textangle', 'symsize']
# mapped to actual names in DS9
key_mappings = {'symbol': 'point', 'linewidth': 'width', 'label': 'text'}
meta = _to_io_meta(shape_meta, valid_keys, key_mappings)
if 'font' in meta:
meta['font'] += " {0} {1} {2}".format(shape_meta.get('fontsize', 12),
shape_meta.get('fontstyle', 'normal'),
shape_meta.get('fontweight', 'roman'))
return meta |
def _to_io_meta(shape_meta, valid_keys, key_mappings):
"""
This is used to make meta data compatible with a specific io
by filtering and mapping to it's valid keys
Parameters
----------
shape_meta: dict
meta attribute of a `regions.Region` object
valid_keys : python list
Contains all the valid keys of a particular file format.
key_mappings : python dict
Maps to the actual name of the key in the format.
Returns
-------
meta : dict
io compatible meta dictionary according to valid_keys and key_mappings
"""
meta = dict()
for key in shape_meta:
if key in valid_keys:
meta[key_mappings.get(key, key)] = shape_meta[key]
return meta |
def to_crtf(self, coordsys='fk5', fmt='.6f', radunit='deg'):
"""
Converts a list of ``regions.Shape`` objects to crtf region strings.
Parameters
----------
coordsys : str
This overrides the coordinate system frame for all regions.
fmt : str
A python string format defining the output precision.
Default is .6f, which is accurate to 0.0036 arcseconds.
radunit : str
This denotes the unit of the radius.
Returns
-------
region_string : str
crtf region string
Examples
--------
TODO
"""
crtf_strings = {
'circle': '{0}circle[[{1:FMT}deg, {2:FMT}deg], {3:FMT}RAD]',
'circleannulus': '{0}annulus[[{1:FMT}deg, {2:FMT}deg], [{3:FMT}RAD, {4:FMT}RAD]]',
'ellipse': '{0}ellipse[[{1:FMT}deg, {2:FMT}deg], [{3:FMT}RAD, {4:FMT}RAD], {5:FMT}deg]',
'rectangle': '{0}rotbox[[{1:FMT}deg, {2:FMT}deg], [{3:FMT}RAD, {4:FMT}RAD], {5:FMT}deg]',
'polygon': '{0}poly[{1}]',
'point': '{0}point[[{1:FMT}deg, {2:FMT}deg]]',
'symbol': '{0}symbol[[{1:FMT}deg, {2:FMT}deg], {symbol}]',
'text': '{0}text[[{1:FMT}deg, {2:FMT}deg], \'{text}\']',
'line': '{0}line[[{1:FMT}deg, {2:FMT}deg], [{3:FMT}deg, {4:FMT}deg]]'
}
output = '#CRTF\n'
if radunit == 'arcsec':
# what's this for?
if coordsys in coordsys_mapping['CRTF'].values():
radunitstr = '"'
else:
raise ValueError(
'Radius unit arcsec not valid for coordsys {}'.format(
coordsys))
else:
radunitstr = radunit
for key, val in crtf_strings.items():
crtf_strings[key] = val.replace("FMT", fmt).replace("RAD",
radunitstr)
# CASA does not support global coordinate specification, even though the
# documentation for the specification explicitly states that it does.
# output += 'global coord={}\n'.format(coordsys)
for shape in self:
shape.check_crtf()
shape.meta = to_crtf_meta(shape.meta)
# if unspecified, include is True.
# Despite the specification, CASA does *not* support a preceding
# "+". If you want a region included, leave the opening character
# blank.
include = "-" if shape.include in (False, '-') else ""
include += "ann " if shape.meta.get('type', 'reg') == 'ann' else ""
if shape.meta.get('label', "") != "":
shape.meta['label'] = "'{}'".format(shape.meta['label'])
meta_str = ", ".join("{0}={1}".format(key, val) for key, val in
shape.meta.items() if
key not in ('include', 'comment', 'symbol',
'coord', 'text', 'range', 'corr',
'type'))
# the first item should be the coordinates, since CASA cannot
# recognize a region without an inline coordinate specification
# It can be, but does not need to be, comma-separated at the start
meta_str = "coord={0}, ".format(coordsys.upper()) + meta_str
if 'comment' in shape.meta:
meta_str += ", " + shape.meta['comment']
if 'range' in shape.meta:
shape.meta['range'] = [str(str(x).replace(" ", "")) for x in
shape.meta['range']]
meta_str += ", range={}".format(shape.meta['range']).replace("'", "")
if 'corr' in shape.meta:
meta_str += ", corr={}".format(shape.meta['corr']).replace("'", "")
coord = []
if coordsys not in ['image', 'physical']:
for val in shape.coord:
if isinstance(val, Angle):
coord.append(float(val.value))
else:
if radunit == '' or None:
coord.append(float(val.value))
else:
coord.append(float(val.to(radunit).value))
else:
for val in shape.coord:
if isinstance(val, u.Quantity):
coord.append(float(val.value))
else:
coord.append(float(val))
if shape.region_type in ['ellipse', 'rectangle'] and len(shape.coord) % 2 == 1:
coord[-1] = float(shape.coord[-1].to('deg').value)
if shape.region_type == 'polygon':
val = '[{0:' + fmt + '}deg, {1:' + fmt + '}deg]'
temp = [val.format(x, y) for x, y in zip(coord[::2], coord[1::2])]
coord = ", ".join(temp)
line = crtf_strings['polygon'].format(include, coord)
elif shape.region_type == 'point':
if 'symbol' in shape.meta:
line = crtf_strings['symbol'].format(include, *coord,
symbol=shape.meta['symbol'])
else:
line = crtf_strings['point'].format(include, *coord)
elif shape.region_type == 'ellipse':
coord[2:] = [x / 2 for x in coord[2:]]
if len(coord) % 2 == 1:
coord[-1] *= 2
line = crtf_strings['ellipse'].format(include, *coord)
elif shape.region_type == 'text':
line = crtf_strings['text'].format(include, *coord, text=shape.meta['text'])
else:
line = crtf_strings[shape.region_type].format(include, *coord)
if meta_str.strip():
output += "{0}, {1}\n".format(line, meta_str)
else:
output += "{0}\n".format(line)
return output |
def to_ds9(self, coordsys='fk5', fmt='.6f', radunit='deg'):
"""
Converts a list of ``regions.Shape`` objects to ds9 region strings.
Parameters
----------
coordsys : str
This overrides the coordinate system frame for all regions.
fmt : str
A python string format defining the output precision.
Default is .6f, which is accurate to 0.0036 arcseconds.
radunit : str
This denotes the unit of the radius.
Returns
-------
region_string : str
ds9 region string
Examples
--------
TODO
"""
valid_symbols_reverse = {y: x for x, y in valid_symbols_ds9.items()}
ds9_strings = {
'circle': '{0}circle({1:FMT},{2:FMT},{3:FMT}RAD)',
'circleannulus': '{0}annulus({1:FMT},{2:FMT},{3:FMT}RAD,{4:FMT}RAD)',
'ellipse': '{0}ellipse({1:FMT},{2:FMT},{3:FMT}RAD,{4:FMT}RAD,{5:FMT})',
'rectangle': '{0}box({1:FMT},{2:FMT},{3:FMT}RAD,{4:FMT}RAD,{5:FMT})',
'polygon': '{0}polygon({1})',
'point': '{0}point({1:FMT},{2:FMT})',
'line': '{0}line({1:FMT},{2:FMT},{3:FMT},{4:FMT})',
'text': '{0}text({1:FMT},{2:FMT})'
}
output = '# Region file format: DS9 astropy/regions\n'
if radunit == 'arcsec':
# what's this for?
if coordsys in coordsys_mapping['DS9'].values():
radunitstr = '"'
else:
raise ValueError('Radius unit arcsec not valid for coordsys {}'.format(coordsys))
else:
radunitstr = ''
for key, val in ds9_strings.items():
ds9_strings[key] = val.replace("FMT", fmt).replace("RAD", radunitstr)
output += '{}\n'.format(coordsys)
for shape in self:
shape.check_ds9()
shape.meta = to_ds9_meta(shape.meta)
# if unspecified, include is True.
include = "-" if shape.include in (False, '-') else ""
if 'point' in shape.meta:
shape.meta['point'] = valid_symbols_reverse[shape.meta['point']]
if 'symsize' in shape.meta:
shape.meta['point'] += " {}".format(shape.meta.pop('symsize'))
meta_str = " ".join("{0}={1}".format(key, val) for key, val in
shape.meta.items() if key not in ('include', 'tag', 'comment', 'font', 'text'))
if 'tag' in shape.meta:
meta_str += " " + " ".join(["tag={0}".format(tag) for tag in shape.meta['tag']])
if 'font' in shape.meta:
meta_str += " " + 'font="{0}"'.format(shape.meta['font'])
if shape.meta.get('text', '') != '':
meta_str += " " + 'text={' + shape.meta['text'] + '}'
if 'comment' in shape.meta:
meta_str += " " + shape.meta['comment']
coord = []
if coordsys not in ['image', 'physical']:
for val in shape.coord:
if isinstance(val, Angle):
coord.append(float(val.value))
else:
if radunit == '' or None:
coord.append(float(val.value))
else:
coord.append(float(val.to(radunit).value))
if shape.region_type in ['ellipse', 'rectangle'] and len(shape.coord) % 2 == 1:
coord[-1] = float(shape.coord[-1].to('deg').value)
else:
for val in shape.coord:
if isinstance(val, u.Quantity):
coord.append(float(val.value))
else:
coord.append(float(val))
if shape.region_type in ['polygon', 'line']:
coord = [x+1 for x in coord]
else:
coord[0] += 1
coord[1] += 1
if shape.region_type == 'polygon':
val = "{0:" + fmt + "}"
temp = [val.format(x) for x in coord]
coord = ",".join(temp)
line = ds9_strings['polygon'].format(include, coord)
elif shape.region_type == 'ellipse':
coord[2:] = [x / 2 for x in coord[2:]]
if len(coord) % 2 == 1:
coord[-1] *= 2
line = ds9_strings['ellipse'].format(include, *coord)
else:
line = ds9_strings[shape.region_type].format(include, *coord)
if meta_str.strip():
output += "{0} # {1}\n".format(line, meta_str)
else:
output += "{0}\n".format(line)
return output |
def to_fits(self):
"""
Converts a `~regions.ShapeList` to a `~astropy.table.Table` object.
"""
max_length_coord = 1
coord_x = []
coord_y = []
shapes = []
radius = []
rotangle_deg = []
components = []
reg_reverse_mapping = {value: key for key, value in
reg_mapping['FITS_REGION'].items()}
reg_reverse_mapping['rectangle'] = 'ROTBOX'
reg_reverse_mapping['circleannulus'] = 'ANNULUS'
reg_reverse_mapping['ellipseannulus'] = 'ELLIPTANNULUS'
for num, shape in enumerate(self):
shapes.append(reg_reverse_mapping[shape.region_type])
if shape.region_type == 'polygon':
max_length_coord = max(len(shape.coord)/2, max_length_coord)
coord = [x.value for x in shape.coord]
coord_x.append(coord[::2])
coord_y.append(coord[1::2])
radius.append(0)
rotangle_deg.append(0)
else:
coord_x.append(shape.coord[0].value)
coord_y.append(shape.coord[1].value)
if shape.region_type in ['circle', 'circleannulus', 'point']:
radius.append([float(val) for val in shape.coord[2:]])
rotangle_deg.append(0)
else:
radius.append([float(x) for x in shape.coord[2:-1]])
rotangle_deg.append(shape.coord[-1].to('deg').value)
tag = shape.meta.get('tag', '')
if tag.isdigit():
components.append(int(tag))
else:
components.append(num + 1)
# padding every value with zeros at the end to make sure that all values
# in the column have same length.
for i in range(len(self)):
if np.isscalar(coord_x[i]):
coord_x[i] = np.array([coord_x[i]])
if np.isscalar(coord_y[i]):
coord_y[i] = np.array([coord_y[i]])
if np.isscalar(radius[i]):
radius[i] = np.array([radius[i]])
coord_x[i] = np.pad(coord_x[i], (0, int(max_length_coord - len(coord_x[i]))),
'constant', constant_values=(0, 0))
coord_y[i] = np.pad(coord_y[i], (0, int(max_length_coord - len(coord_y[i]))),
'constant', constant_values=(0, 0))
radius[i] = np.pad(radius[i], (0, 4 - len(radius[i])), 'constant',
constant_values=(0, 0))
table = Table([coord_x, coord_y, shapes, radius, rotangle_deg, components],
names=('X', 'Y', 'SHAPE', 'R', 'ROTANG', 'COMPONENT'))
table['X'].unit = 'pix'
table['Y'].unit = 'pix'
table['R'].unit = 'pix'
table['ROTANG'].unit = 'deg'
return table |
def convert_coords(self):
"""
Process list of coordinates
This mainly searches for tuple of coordinates in the coordinate list and
creates a SkyCoord or PixCoord object from them if appropriate for a
given region type. This involves again some coordinate transformation,
so this step could be moved to the parsing process
"""
if self.coordsys in ['image', 'physical']:
coords = self._convert_pix_coords()
else:
coords = self._convert_sky_coords()
if self.region_type == 'line':
coords = [coords[0][0], coords[0][1]]
if self.region_type == 'text':
coords.append(self.meta['text'])
return coords |
def _convert_sky_coords(self):
"""
Convert to sky coordinates
"""
parsed_angles = [(x, y)
for x, y in zip(self.coord[:-1:2], self.coord[1::2])
if (isinstance(x, coordinates.Angle) and isinstance(y, coordinates.Angle))
]
frame = coordinates.frame_transform_graph.lookup_name(self.coordsys)
lon, lat = zip(*parsed_angles)
if hasattr(lon, '__len__') and hasattr(lat, '__len__') and len(lon) == 1 and len(lat) == 1:
# force entries to be scalar if they are length-1
lon, lat = u.Quantity(lon[0]), u.Quantity(lat[0])
else:
# otherwise, they are vector quantities
lon, lat = u.Quantity(lon), u.Quantity(lat)
sphcoords = coordinates.UnitSphericalRepresentation(lon, lat)
coords = [SkyCoord(frame(sphcoords))]
if self.region_type != 'polygon':
coords += self.coord[len(coords * 2):]
return coords |
def _convert_pix_coords(self):
"""
Convert to pixel coordinates, `regions.PixCoord`
"""
if self.region_type in ['polygon', 'line']:
# have to special-case polygon in the phys coord case
# b/c can't typecheck when iterating as in sky coord case
coords = [PixCoord(self.coord[0::2], self.coord[1::2])]
else:
temp = [_.value for _ in self.coord]
coord = PixCoord(temp[0], temp[1])
coords = [coord] + temp[2:]
# The angle remains as a quantity object.
# Modulus check makes sure that it works for ellipse/rectangle annulus
if self.region_type in ['ellipse', 'rectangle'] and len(coords) % 2 == 0:
coords[-1] = self.coord[-1]
return coords |
def to_region(self):
"""
Converts to region, ``regions.Region`` object
"""
coords = self.convert_coords()
log.debug(coords)
viz_keywords = ['color', 'dash', 'dashlist', 'width', 'font', 'symsize',
'symbol', 'symsize', 'fontsize', 'fontstyle', 'usetex',
'labelpos', 'labeloff', 'linewidth', 'linestyle',
'point', 'textangle', 'fontweight']
if isinstance(coords[0], SkyCoord):
reg = self.shape_to_sky_region[self.region_type](*coords)
elif isinstance(coords[0], PixCoord):
reg = self.shape_to_pixel_region[self.region_type](*coords)
else:
self._raise_error("No central coordinate")
reg.visual = RegionVisual()
reg.meta = RegionMeta()
# both 'text' and 'label' should be set to the same value, where we
# default to the 'text' value since that is the one used by ds9 regions
label = self.meta.get('text',
self.meta.get('label', ""))
if label != '':
reg.meta['label'] = label
for key in self.meta:
if key in viz_keywords:
reg.visual[key] = self.meta[key]
else:
reg.meta[key] = self.meta[key]
reg.meta['include'] = self.include
return reg |
def check_crtf(self):
"""
Checks for CRTF compatibility.
"""
if self.region_type not in regions_attributes:
raise ValueError("'{0}' is not a valid region type in this package"
"supported by CRTF".format(self.region_type))
if self.coordsys not in valid_coordsys['CRTF']:
raise ValueError("'{0}' is not a valid coordinate reference frame in "
"astropy supported by CRTF".format(self.coordsys)) |
def check_ds9(self):
"""
Checks for DS9 compatibility.
"""
if self.region_type not in regions_attributes:
raise ValueError("'{0}' is not a valid region type in this package"
"supported by DS9".format(self.region_type))
if self.coordsys not in valid_coordsys['DS9']:
raise ValueError("'{0}' is not a valid coordinate reference frame "
"in astropy supported by DS9".format(self.coordsys)) |
def _validate(self):
"""
Checks whether all the attributes of this object is valid.
"""
if self.region_type not in regions_attributes:
raise ValueError("'{0}' is not a valid region type in this package"
.format(self.region_type))
if self.coordsys not in valid_coordsys['DS9'] + valid_coordsys['CRTF']:
raise ValueError("'{0}' is not a valid coordinate reference frame "
"in astropy".format(self.coordsys)) |
def read_crtf(filename, errors='strict'):
"""
Reads a CRTF region file and returns a list of region objects.
Parameters
----------
filename : `str`
The file path
errors : ``warn``, ``ignore``, ``strict``, optional
The error handling scheme to use for handling parsing errors.
The default is 'strict', which will raise a `~regions.CRTFRegionParserError`.
``warn`` will raise a `~regions.CRTFRegionParserWarning`, and ``ignore`` will do nothing
(i.e., be silent).
Returns
-------
regions : `list`
Python `list` of `~regions.Region` objects.
Examples
--------
>>> from regions import read_crtf
>>> from astropy.utils.data import get_pkg_data_filename
>>> file = get_pkg_data_filename('data/CRTFgeneral.crtf', package='regions.io.crtf.tests')
>>> regs = read_crtf(file, errors='warn')
>>> print(regs[0])
Region: CircleSkyRegion
center: <SkyCoord (FK4: equinox=B1950.000, obstime=B1950.000): (ra, dec) in deg
(273.1, -23.18333333)>
radius: 2.3 arcsec
>>> print(regs[0].meta)
{'frame': 'BARY', 'corr': ['I', 'Q'], 'include': True, 'type': 'ann'}
>>> print(regs[0].visual)
{'color': 'blue'}
"""
with open(filename) as fh:
if regex_begin.search(fh.readline()):
region_string = fh.read()
parser = CRTFParser(region_string, errors)
return parser.shapes.to_regions()
else:
raise CRTFRegionParserError('Every CRTF Region must start with "#CRTF" ') |
def parse_line(self, line):
"""
Parses a single line.
"""
# Skip blanks
if line == '':
return
# Skip comments
if regex_comment.search(line):
return
# Special case / header: parse global parameters into metadata
global_parameters = regex_global.search(line)
if global_parameters:
self.parse_global_meta(global_parameters.group('parameters'))
return
# Tries to check the validity of the line.
crtf_line = regex_line.search(line)
if crtf_line:
# Tries to parse the line.
# Finds info about the region.
region = regex_region.search(crtf_line.group('region'))
type_ = region.group('type') or 'reg'
include = region.group('include') or '+'
region_type = region.group('regiontype').lower()
if region_type in self.valid_definition:
helper = CRTFRegionParser(self.global_meta, include, type_, region_type,
*crtf_line.group('region', 'parameters'))
self.shapes.append(helper.shape)
else:
self._raise_error("Not a valid CRTF Region type: '{0}'.".format(region_type))
else:
self._raise_error("Not a valid CRTF line: '{0}'.".format(line))
return |
def parse_global_meta(self, global_meta_str):
"""
Parses the line starting with global to extract all the valid meta key/value pair.
"""
if global_meta_str:
global_meta_str = regex_meta.findall(global_meta_str + ',')
if global_meta_str:
for par in global_meta_str:
if par[0] is not '':
val1 = par[0].lower()
val2 = par[1]
else:
val1 = par[2].lower()
val2 = par[3]
val1 = val1.strip()
val2 = val2.strip()
if val1 in self.valid_global_keys :
if val1 in ('range', 'corr', 'labeloff'):
val2 = val2.split(",")
val2 = [x.strip() for x in val2 if x]
self.global_meta[val1] = val2
else:
self._raise_error("'{0}' is not a valid global meta key".format(val1)) |
def parse(self):
"""
Starting point to parse the CRTF region string.
"""
self.convert_meta()
self.coordsys = self.meta.get('coord', 'image').lower()
self.set_coordsys()
self.convert_coordinates()
self.make_shape() |
def set_coordsys(self):
"""
Mapping to astropy's coordinate system name
# TODO: needs expert attention (Most reference systems are not mapped)
"""
if self.coordsys.lower() in self.coordsys_mapping:
self.coordsys = self.coordsys_mapping[self.coordsys.lower()] |
def convert_coordinates(self):
"""
Convert coordinate string to `~astropy.coordinates.Angle` or `~astropy.units.quantity.Quantity` objects
"""
coord_list_str = regex_coordinate.findall(self.reg_str) + regex_length.findall(self.reg_str)
coord_list = []
if self.region_type == 'poly':
if len(coord_list_str) < 4:
self._raise_error('Not in proper format: {} polygon should have > 4 coordinates'.format(self.reg_str))
if coord_list_str[0] != coord_list_str[-1]:
self._raise_error("Not in proper format: '{0}', "
"In polygon, the last and first coordinates should be same".format(self.reg_str))
else:
if len(coord_list_str) != len(self.language_spec[self.region_type]):
self._raise_error("Not in proper format: '{0}', "
"Does not contain expected number of parameters for the region '{1}'"
.format(self.reg_str, self.region_type))
for attr_spec, val_str in zip(self.language_spec[self.region_type], coord_list_str):
if attr_spec == 'c':
if len(val_str) == 2 and val_str[1] != '':
coord_list.append(CoordinateParser.parse_coordinate(val_str[0]))
coord_list.append(CoordinateParser.parse_coordinate(val_str[1]))
else:
self._raise_error("Not in proper format: {0} should be a coordinate".format(val_str))
if attr_spec == 'pl':
if len(val_str) == 2 and val_str[1] != '':
coord_list.append(CoordinateParser.parse_angular_length_quantity(val_str[0]))
coord_list.append(CoordinateParser.parse_angular_length_quantity(val_str[1]))
else:
self._raise_error("Not in proper format: {0} should be a pair of length".format(val_str))
if attr_spec == 'l':
if isinstance(val_str, six.string_types):
coord_list.append(CoordinateParser.parse_angular_length_quantity(val_str))
else:
self._raise_error("Not in proper format: {0} should be a single length".format(val_str))
if attr_spec == 's':
if self.region_type == 'symbol':
if val_str in valid_symbols:
self.meta['symbol'] = val_str
else:
self._raise_error("Not in proper format: '{0}' should be a symbol".format(val_str))
elif self.region_type == 'text':
self.meta['text'] = val_str[1:-1]
self.coord = coord_list |
def convert_meta(self):
"""
Parses the meta_str to python dictionary and stores in ``meta`` attribute.
"""
if self.meta_str:
self.meta_str = regex_meta.findall(self.meta_str + ',')
if self.meta_str:
for par in self.meta_str:
if par[0] is not '':
val1 = par[0]
val2 = par[1]
else:
val1 = par[2]
val2 = par[3]
val1 = val1.strip()
val2 = val2.strip()
if val1 in CRTFParser.valid_global_keys or val1 == 'label':
if val1 in ('range', 'corr', 'labeloff'):
val2 = val2.split(',')
val2 = [x.strip() for x in val2]
self.meta[val1] = val2
else:
self._raise_error("'{0}' is not a valid meta key".format(val1))
self.meta['include'] = self.include != '-'
self.include = self.meta['include']
if 'range' in self.meta:
self.meta['range'] = [u.Quantity(x) for x in self.meta['range']]
self.meta['type'] = self.type_ |
def make_shape(self):
"""
Make shape object
"""
if self.region_type == 'ellipse':
self.coord[2:] = [x * 2 for x in self.coord[2:]]
if len(self.coord) % 2 == 1: # This checks if the angle is present.
self.coord[-1] /= 2
if self.region_type == 'box':
x = (self.coord[0] + self.coord[2]) / 2
y = (self.coord[1] + self.coord[3]) / 2
w = u.Quantity(self.coord[0] - self.coord[2])
h = u.Quantity(self.coord[1] - self.coord[3])
self.coord = [x, y, abs(w), abs(h)]
self.meta.pop('coord', None)
self.shape = Shape(coordsys=self.coordsys,
region_type=reg_mapping['CRTF'][self.region_type],
coord=self.coord,
meta=self.meta,
composite=False,
include=self.include
) |
def parse_coordinate(string_rep):
"""
Parse a single coordinate
"""
# Any CRTF coordinate representation (sexagesimal or degrees)
if 'pix' in string_rep:
return u.Quantity(string_rep[:-3], u.dimensionless_unscaled)
if 'h' in string_rep or 'rad' in string_rep:
return coordinates.Angle(string_rep)
if len(string_rep.split('.')) >= 3:
string_rep = string_rep.replace('.', ':', 2)
return coordinates.Angle(string_rep, u.deg) |
def parse_angular_length_quantity(string_rep):
"""
Given a string that is a number and a unit, return a
Quantity of that string.Raise an Error If there is no unit. e.g.:
50" -> 50*u.arcsec
50 -> CRTFRegionParserError : Units must be specified for 50
"""
unit_mapping = {
'deg': u.deg,
'rad': u.rad,
'arcmin': u.arcmin,
'arcsec': u.arcsec,
'pix': u.dimensionless_unscaled,
'"': u.arcsec,
"'": u.arcmin,
}
regex_str = re.compile(r'([0-9+,-.]*)(.*)')
str = regex_str.search(string_rep)
unit = str.group(2)
if unit:
if unit in unit_mapping:
return u.Quantity(str.group(1), unit=unit_mapping[unit])
return u.Quantity(str.group(1))
else:
raise CRTFRegionParserError('Units must be specified for {0} '.format(string_rep)) |
def fits_region_objects_to_table(regions):
"""
Converts list of regions to FITS region table.
Parameters
----------
regions : list
List of `regions.Region` objects
Returns
-------
region_string : `~astropy.table.Table`
FITS region table
Examples
--------
>>> from regions import CirclePixelRegion, PixCoord
>>> reg_pixel = CirclePixelRegion(PixCoord(1, 2), 5)
>>> table = fits_region_objects_to_table([reg_pixel])
>>> print(table)
X [1] Y [1] SHAPE R [4] ROTANG COMPONENT
pix pix pix deg
----- ----- ------ ---------- ------ ---------
1.0 2.0 circle 5.0 .. 0.0 0 1
"""
for reg in regions:
if isinstance(reg, SkyRegion):
raise TypeError('Every region must be a pixel region'.format(reg))
shape_list = to_shape_list(regions, coordinate_system='image')
return shape_list.to_fits() |
def write_fits_region(filename, regions, header=None):
"""
Converts list of regions to FITS region table and write to a file.
Parameters
----------
filename: str
Filename in which the table is to be written. Default is 'new.fits'
regions: list
List of `regions.Region` objects
header: `~astropy.io.fits.header.Header` object
The FITS header.
Examples
--------
>>> from astropy.utils.data import get_pkg_data_filename
>>> from astropy.io import fits
>>> file_sample = get_pkg_data_filename('data/fits_region.fits', package='regions.io.fits.tests')
>>> from regions import CirclePixelRegion, PixCoord, write_fits_region
>>> reg_pixel = CirclePixelRegion(PixCoord(1, 2), 5)
>>> hdul = fits.open(file_sample)
>>> write_fits_region('region_output.fits', regions=[reg_pixel], header=hdul[1].header)
"""
output = fits_region_objects_to_table(regions)
bin_table = fits.BinTableHDU(data=output, header=header)
bin_table.writeto(filename) |
def make_example_dataset(data='simulated', config=None):
"""Make example dataset.
This is a factory function for ``ExampleDataset`` objects.
The following config options are available (default values shown):
* ``crval = 0, 0``
* ``crpix = 180, 90``
* ``cdelt = -1, 1``
* ``shape = 180, 360``
* ``ctype = 'GLON-AIT', 'GLAT-AIT'``
Parameters
----------
data : {'simulated', 'fermi'}
Which dataset to use
config : dict or None
Configuration options
Returns
-------
dataset : ``ExampleDataset``
Example dataset object
Examples
--------
Make an example dataset:
>>> from regions import make_example_dataset
>>> config = dict(crpix=(18, 9), cdelt=(-10, 10), shape=(18, 36))
>>> dataset = make_example_dataset(data='simulated', config=config)
Access properties of the ``dataset`` object:
>>> dataset.source_table
>>> dataset.event_table
>>> ExampleDataset.wcs
>>> ExampleDataset.image
>>> ExampleDataset.hdu_list
"""
if data == 'simulated':
return ExampleDatasetSimulated(config=config)
elif data == 'fermi':
return ExampleDatasetFermi(config=config)
else:
raise ValueError('Invalid selection data: {}'.format(data)) |
def _table_to_bintable(table):
"""Convert `~astropy.table.Table` to `astropy.io.fits.BinTable`."""
data = table.as_array()
header = fits.Header()
header.update(table.meta)
name = table.meta.pop('name', None)
return fits.BinTableHDU(data, header, name=name) |
def wcs(self):
"""World coordinate system (`~astropy.wcs.WCS`)."""
wcs = WCS(naxis=2)
wcs.wcs.crval = self.config['crval']
wcs.wcs.crpix = self.config['crpix']
wcs.wcs.cdelt = self.config['cdelt']
wcs.wcs.ctype = self.config['ctype']
return wcs |
def image(self):
"""Counts image (`~astropy.io.fits.ImageHDU`)."""
events = self.event_table
skycoord = SkyCoord(events['GLON'], events['GLAT'], unit='deg', frame='galactic')
pixcoord = PixCoord.from_sky(skycoord=skycoord, wcs=self.wcs)
shape = self.config['shape']
bins = [np.arange(shape[0] + 1), np.arange(shape[1] + 1)]
sample = np.vstack((pixcoord.y, pixcoord.x)).T
data, _ = np.histogramdd(sample=sample, bins=bins)
data = data.astype('float32')
header = self.wcs.to_header()
return fits.ImageHDU(data=data, header=header, name='image') |
def hdu_list(self):
"""HDU list (`~astropy.io.fits.HDUList`).
Different pieces collected together in a HDU list.
This method makes it easy to write the example dataset to a FITS
file with multiple HDUs.
"""
hdu_list = fits.HDUList()
hdu = _table_to_bintable(self.source_table)
hdu.name = 'sources'
hdu_list.append(hdu)
hdu = _table_to_bintable(self.event_table)
hdu.name = 'events'
hdu_list.append(hdu)
hdu = self.image
hdu.name = 'image'
hdu_list.append(hdu)
return hdu_list |
def source_table(self):
"""Source table (`~astropy.table.Table`).
Columns: GLON, GLAT, COUNTS
"""
table = Table()
table['GLON'] = np.array([0, 45, 45], dtype='float32')
table['GLAT'] = np.array([0, 0, 45], dtype='float32')
table['COUNTS'] = np.array([100, 100, 100], dtype='int32')
return table |
def event_table(self):
"""Event table (`~astropy.table.Table`).
Columns: GLON, GLAT, SOURCE_IDX
"""
# Create event list table for each source
tables = []
for source in self.source_table:
lon = source['GLON'] * np.ones(source['COUNTS'])
lat = source['GLAT'] * np.ones(source['COUNTS'])
coord = SkyCoord(lon, lat, unit='deg', frame='galactic')
# TODO: scatter positions assuming Gaussian PSF on the sky
# using SkyOffsetFrame.
table = Table()
table['GLON'] = lon
table['GLAT'] = lat
table['SOURCE_IDX'] = source.index
tables.append(table)
# Stack all tables together
table = table_vstack(tables)
return table |
def source_table(self):
"""Source table (`~astropy.table.Table`).
Columns: GLON, GLAT, COUNTS
"""
url = 'https://github.com/gammapy/gammapy-extra/raw/master/datasets/fermi_2fhl/gll_psch_v08.fit.gz'
table = Table.read(url, hdu='2FHL Source Catalog')
table.rename_column('Npred', 'COUNTS')
table.keep_columns(['GLON', 'GLAT', 'COUNTS'])
table.meta.clear()
return table |
def read_ds9(filename, errors='strict'):
"""
Read a DS9 region file in as a `list` of `~regions.Region` objects.
Parameters
----------
filename : `str`
The file path
errors : ``warn``, ``ignore``, ``strict``, optional
The error handling scheme to use for handling parsing errors.
The default is 'strict', which will raise a `~regions.DS9RegionParserError`.
``warn`` will raise a `~regions.DS9RegionParserWarning`, and
``ignore`` will do nothing (i.e., be silent).
Returns
-------
regions : `list`
Python list of `~regions.Region` objects.
Examples
--------
>>> from regions import read_ds9
>>> from astropy.utils.data import get_pkg_data_filename
>>> file = get_pkg_data_filename('data/physical_reference.reg', package='regions.io.ds9.tests')
>>> regs = read_ds9(file, errors='warn')
>>> print(regs[0])
Region: CirclePixelRegion
center: PixCoord(x=330.0, y=1090.0)
radius: 40.0
>>> print(regs[0].meta)
{'label': 'Circle', 'select': '1', 'highlite': '1', 'fixed': '0', 'edit': '1', 'move': '1', 'delete': '1', 'source': '1', 'tag': ['{foo}', '{foo bar}'], 'include': True}
>>> print(regs[0].visual)
{'dashlist': '8 3', 'dash': '0', 'color': 'pink', 'linewidth': '3', 'font': 'times', 'fontsize': '10', 'fontstyle': 'normal', 'fontweight': 'roman'}
"""
with open(filename) as fh:
region_string = fh.read()
parser = DS9Parser(region_string, errors=errors)
return parser.shapes.to_regions() |
def parse_coordinate(string_rep, unit):
"""
Parse a single coordinate
"""
# explicit radian ('r') value
if string_rep[-1] == 'r':
return coordinates.Angle(string_rep[:-1], unit=u.rad)
# explicit image ('i') and physical ('p') pixels
elif string_rep[-1] in ['i', 'p']:
return u.Quantity(string_rep[:-1]) - 1
# Any ds9 coordinate representation (sexagesimal or degrees)
elif 'd' in string_rep or 'h' in string_rep:
return coordinates.Angle(string_rep)
elif unit is 'hour_or_deg':
if ':' in string_rep:
spl = tuple([float(x) for x in string_rep.split(":")])
return coordinates.Angle(spl, u.hourangle)
else:
ang = float(string_rep)
return coordinates.Angle(ang, u.deg)
elif unit.is_equivalent(u.deg):
# return coordinates.Angle(string_rep, unit=unit)
if ':' in string_rep:
ang = tuple([float(x) for x in string_rep.split(":")])
else:
ang = float(string_rep)
return coordinates.Angle(ang, u.deg)
elif unit.is_equivalent(u.dimensionless_unscaled):
return u.Quantity(float(string_rep), unit) - 1
else:
return u.Quantity(float(string_rep), unit) |
def parse_angular_length_quantity(string_rep, unit=u.deg):
"""
Given a string that is either a number or a number and a unit, return a
Quantity of that string. e.g.:
23.9 -> 23.9*u.deg
50" -> 50*u.arcsec
"""
unit_mapping = {
'"': u.arcsec,
"'": u.arcmin,
'd': u.deg,
'r': u.rad,
'i': u.dimensionless_unscaled,
'p': u.dimensionless_unscaled
}
has_unit = string_rep[-1] not in string.digits
if has_unit:
unit = unit_mapping[string_rep[-1]]
return u.Quantity(float(string_rep[:-1]), unit=unit)
else:
return u.Quantity(float(string_rep), unit=unit) |
def set_coordsys(self, coordsys):
"""
Transform coordinate system
# TODO: needs expert attention
"""
if coordsys in self.coordsys_mapping:
self.coordsys = self.coordsys_mapping[coordsys]
else:
self.coordsys = coordsys |
def run(self):
"""
Run all steps
"""
for line_ in self.region_string.split('\n'):
for line in line_.split(";"):
self.parse_line(line)
log.debug('Global state: {}'.format(self)) |
def parse_line(self, line):
"""
Parse one line
"""
log.debug('Parsing {}'.format(line))
# Skip blanks
if line == '':
return
# Skip comments
if line[0] == '#':
return
# Special case / header: parse global parameters into metadata
if line.lstrip()[:6] == 'global':
self.global_meta = self.parse_meta(line)
# global_meta can specify "include=1"; never seen other options
# used but presumably =0 means false
self.global_meta['include'] = (False if
self.global_meta.get('include') in
('0', 'False', False) else True)
return
# Try to parse the line
region_type_search = regex_global.search(line)
if region_type_search:
include = region_type_search.groups()[0]
region_type = region_type_search.groups()[1]
else:
self._raise_error("No region type found for line '{0}'.".format(line))
return
if region_type in self.coordinate_systems:
# Found coord system definition
self.set_coordsys(region_type)
return
if region_type not in DS9RegionParser.language_spec:
self._raise_error("Region type '{0}' was identified, but it is not one of "
"the known region types.".format(region_type))
return
else:
# Found region specification,
region_end = region_type_search.span()[1]
self.parse_region(include, region_type, region_end, line) |
def parse_meta(meta_str):
"""
Parse the metadata for a single ds9 region string.
Parameters
----------
meta_str : `str`
Meta string, the metadata is everything after the close-paren of the
region coordinate specification. All metadata is specified as
key=value pairs separated by whitespace, but sometimes the values
can also be whitespace separated.
Returns
-------
meta : `~collections.OrderedDict`
Dictionary containing the meta data
"""
keys_vals = [(x, y) for x, _, y in regex_meta.findall(meta_str.strip())]
extra_text = regex_meta.split(meta_str.strip())[-1]
result = OrderedDict()
for key, val in keys_vals:
# regex can include trailing whitespace or inverted commas
# remove it
val = val.strip().strip("'").strip('"')
if key == 'text':
val = val.lstrip("{").rstrip("}")
if key in result:
if key == 'tag':
result[key].append(val)
else:
raise ValueError("Duplicate key {0} found".format(key))
else:
if key == 'tag':
result[key] = [val]
else:
result[key] = val
if extra_text:
result['comment'] = extra_text
return result |
def parse_region(self, include, region_type, region_end, line):
"""
Extract a Shape from a region string
"""
if self.coordsys is None:
raise DS9RegionParserError("No coordinate system specified and a"
" region has been found.")
else:
helper = DS9RegionParser(coordsys=self.coordsys,
include=include,
region_type=region_type,
region_end=region_end,
global_meta=self.global_meta,
line=line)
helper.parse()
self.shapes.append(helper.shape) |
def parse(self):
"""
Convert line to shape object
"""
log.debug(self)
self.parse_composite()
self.split_line()
self.convert_coordinates()
self.convert_meta()
self.make_shape()
log.debug(self) |
def split_line(self):
"""
Split line into coordinates and meta string
"""
# coordinate of the # symbol or end of the line (-1) if not found
hash_or_end = self.line.find("#")
temp = self.line[self.region_end:hash_or_end].strip(" |")
self.coord_str = regex_paren.sub("", temp)
# don't want any meta_str if there is no metadata found
if hash_or_end >= 0:
self.meta_str = self.line[hash_or_end:]
else:
self.meta_str = "" |
def convert_coordinates(self):
"""
Convert coordinate string to objects
"""
coord_list = []
# strip out "null" elements, i.e. ''. It might be possible to eliminate
# these some other way, i.e. with regex directly, but I don't know how.
# We need to copy in order not to burn up the iterators
elements = [x for x in regex_splitter.split(self.coord_str) if x]
element_parsers = self.language_spec[self.region_type]
for ii, (element, element_parser) in enumerate(zip(elements,
element_parsers)):
if element_parser is coordinate:
unit = self.coordinate_units[self.coordsys][ii % 2]
coord_list.append(element_parser(element, unit))
elif self.coordinate_units[self.coordsys][0] is u.dimensionless_unscaled:
coord_list.append(element_parser(element, unit=u.dimensionless_unscaled))
else:
coord_list.append(element_parser(element))
if self.region_type in ['ellipse', 'box'] and len(coord_list) % 2 == 1:
coord_list[-1] = CoordinateParser.parse_angular_length_quantity(elements[len(coord_list)-1])
# Reset iterator for ellipse and annulus
# Note that this cannot be done with copy.deepcopy on python2
if self.region_type in ['ellipse', 'annulus']:
self.language_spec[self.region_type] = itertools.chain(
(coordinate, coordinate), itertools.cycle((radius,)))
self.coord = coord_list |
def convert_meta(self):
"""
Convert meta string to dict
"""
meta_ = DS9Parser.parse_meta(self.meta_str)
self.meta = copy.deepcopy(self.global_meta)
self.meta.update(meta_)
# the 'include' is not part of the metadata string;
# it is pre-parsed as part of the shape type and should always
# override the global one
self.include = self.meta.get('include', True) if self.include == '' else self.include != '-'
self.meta['include'] = self.include |
def make_shape(self):
"""
Make shape object
"""
# In DS9, ellipse can also represents an elliptical annulus
# For elliptical annulus angle is optional.
if self.region_type == 'ellipse':
self.coord[2:] = [x * 2 for x in self.coord[2:]]
if len(self.coord) % 2 == 1: # This checks if angle is present
self.coord[-1] /= 2
if 'point' in self.meta:
point = self.meta['point'].split(" ")
if len(point) > 1:
self.meta['symsize'] = point[1]
self.meta['point'] = valid_symbols_ds9[point[0]]
if 'font' in self.meta:
fonts = self.meta['font'].split(" ")
keys = ['font', 'fontsize', 'fontstyle', 'fontweight']
for i, val in enumerate(fonts):
self.meta[keys[i]] = val
self.meta.pop('coord', None)
self.shape = Shape(coordsys=self.coordsys,
region_type=reg_mapping['DS9'][self.region_type],
coord=self.coord,
meta=self.meta,
composite=self.composite,
include=self.include,
) |
def _validate(val, name, expected='any'):
"""Validate that a given object is an appropriate `PixCoord`.
This is used for input validation throughout the regions package,
especially in the `__init__` method of pixel region classes.
Parameters
----------
val : `PixCoord`
The object to check
name : str
Parameter name (used for error messages)
expected : {'any', 'scalar', 'not scalar'}
What kind of PixCoord to check for
Returns
-------
val : `PixCoord`
The input object (at the moment unmodified, might do fix-ups here later)
"""
if not isinstance(val, PixCoord):
raise TypeError('{} must be a PixCoord'.format(name))
if expected == 'any':
pass
elif expected == 'scalar':
if not val.isscalar:
raise ValueError('{} must be a scalar PixCoord'.format(name))
elif expected == 'not scalar':
if val.isscalar:
raise ValueError('{} must be a non-scalar PixCoord'.format(name))
else:
raise ValueError('Invalid argument for `expected`: {}'.format(expected))
return val |
def to_sky(self, wcs, origin=_DEFAULT_WCS_ORIGIN, mode=_DEFAULT_WCS_MODE):
"""Convert this `PixCoord` to `~astropy.coordinates.SkyCoord`.
Calls :meth:`astropy.coordinates.SkyCoord.from_pixel`.
See parameter description there.
"""
return SkyCoord.from_pixel(
xp=self.x, yp=self.y, wcs=wcs,
origin=origin, mode=mode,
) |
def from_sky(cls, skycoord, wcs, origin=_DEFAULT_WCS_ORIGIN, mode=_DEFAULT_WCS_MODE):
"""Create `PixCoord` from `~astropy.coordinates.SkyCoord`.
Calls :meth:`astropy.coordinates.SkyCoord.to_pixel`.
See parameter description there.
"""
x, y = skycoord.to_pixel(wcs=wcs, origin=origin, mode=mode)
return cls(x=x, y=y) |
def separation(self, other):
r"""Separation to another pixel coordinate.
This is the two-dimensional cartesian separation :math:`d` with
.. math::
d = \sqrt{(x_1 - x_2) ^ 2 + (y_1 - y_2) ^ 2}
Parameters
----------
other : `PixCoord`
Other pixel coordinate
Returns
-------
separation : `numpy.array`
Separation in pixels
"""
dx = other.x - self.x
dy = other.y - self.y
return np.hypot(dx, dy) |
def as_artist(self, origin=(0, 0), **kwargs):
"""
Matplotlib patch object for this region (`matplotlib.patches.Polygon`).
Parameters:
-----------
origin : array_like, optional
The ``(x, y)`` pixel position of the origin of the displayed image.
Default is (0, 0).
kwargs : `dict`
All keywords that a `~matplotlib.patches.Polygon` object accepts
Returns
-------
patch : `~matplotlib.patches.Polygon`
Matplotlib polygon patch
"""
from matplotlib.patches import Polygon
xy = np.vstack([self.vertices.x - origin[0],
self.vertices.y - origin[1]]).transpose()
mpl_params = self.mpl_properties_default('patch')
mpl_params.update(kwargs)
return Polygon(xy=xy, **mpl_params) |
def skycoord_to_pixel_scale_angle(skycoord, wcs, small_offset=1 * u.arcsec):
"""
Convert a set of SkyCoord coordinates into pixel coordinates, pixel
scales, and position angles.
Parameters
----------
skycoord : `~astropy.coordinates.SkyCoord`
Sky coordinates
wcs : `~astropy.wcs.WCS`
The WCS transformation to use
small_offset : `~astropy.units.Quantity`
A small offset to use to compute the angle
Returns
-------
pixcoord : `~regions.PixCoord`
Pixel coordinates
scale : float
The pixel scale at each location, in degrees/pixel
angle : `~astropy.units.Quantity`
The position angle of the celestial coordinate system in pixel space.
"""
# Convert to pixel coordinates
x, y = skycoord_to_pixel(skycoord, wcs, mode=skycoord_to_pixel_mode)
pixcoord = PixCoord(x=x, y=y)
# We take a point directly 'above' (in latitude) the position requested
# and convert it to pixel coordinates, then we use that to figure out the
# scale and position angle of the coordinate system at the location of
# the points.
# Find the coordinates as a representation object
r_old = skycoord.represent_as('unitspherical')
# Add a a small perturbation in the latitude direction (since longitude
# is more difficult because it is not directly an angle).
dlat = small_offset
r_new = UnitSphericalRepresentation(r_old.lon, r_old.lat + dlat)
coords_offset = skycoord.realize_frame(r_new)
# Find pixel coordinates of offset coordinates
x_offset, y_offset = skycoord_to_pixel(coords_offset, wcs,
mode=skycoord_to_pixel_mode)
# Find vector
dx = x_offset - x
dy = y_offset - y
# Find the length of the vector
scale = np.hypot(dx, dy) / dlat.to('degree').value
# Find the position angle
angle = np.arctan2(dy, dx) * u.radian
return pixcoord, scale, angle |
def assert_angle(name, q):
"""
Check that ``q`` is an angular `~astropy.units.Quantity`.
"""
if isinstance(q, u.Quantity):
if q.unit.physical_type == 'angle':
pass
else:
raise ValueError("{0} should have angular units".format(name))
else:
raise TypeError("{0} should be a Quantity instance".format(name)) |
def _silence():
"""A context manager that silences sys.stdout and sys.stderr."""
old_stdout = sys.stdout
old_stderr = sys.stderr
sys.stdout = _DummyFile()
sys.stderr = _DummyFile()
exception_occurred = False
try:
yield
except:
exception_occurred = True
# Go ahead and clean up so that exception handling can work normally
sys.stdout = old_stdout
sys.stderr = old_stderr
raise
if not exception_occurred:
sys.stdout = old_stdout
sys.stderr = old_stderr |
def use_astropy_helpers(**kwargs):
"""
Ensure that the `astropy_helpers` module is available and is importable.
This supports automatic submodule initialization if astropy_helpers is
included in a project as a git submodule, or will download it from PyPI if
necessary.
Parameters
----------
path : str or None, optional
A filesystem path relative to the root of the project's source code
that should be added to `sys.path` so that `astropy_helpers` can be
imported from that path.
If the path is a git submodule it will automatically be initialized
and/or updated.
The path may also be to a ``.tar.gz`` archive of the astropy_helpers
source distribution. In this case the archive is automatically
unpacked and made temporarily available on `sys.path` as a ``.egg``
archive.
If `None` skip straight to downloading.
download_if_needed : bool, optional
If the provided filesystem path is not found an attempt will be made to
download astropy_helpers from PyPI. It will then be made temporarily
available on `sys.path` as a ``.egg`` archive (using the
``setup_requires`` feature of setuptools. If the ``--offline`` option
is given at the command line the value of this argument is overridden
to `False`.
index_url : str, optional
If provided, use a different URL for the Python package index than the
main PyPI server.
use_git : bool, optional
If `False` no git commands will be used--this effectively disables
support for git submodules. If the ``--no-git`` option is given at the
command line the value of this argument is overridden to `False`.
auto_upgrade : bool, optional
By default, when installing a package from a non-development source
distribution ah_boostrap will try to automatically check for patch
releases to astropy-helpers on PyPI and use the patched version over
any bundled versions. Setting this to `False` will disable that
functionality. If the ``--offline`` option is given at the command line
the value of this argument is overridden to `False`.
offline : bool, optional
If `False` disable all actions that require an internet connection,
including downloading packages from the package index and fetching
updates to any git submodule. Defaults to `True`.
"""
global BOOTSTRAPPER
config = BOOTSTRAPPER.config
config.update(**kwargs)
# Create a new bootstrapper with the updated configuration and run it
BOOTSTRAPPER = _Bootstrapper(**config)
BOOTSTRAPPER.run() |
def config(self):
"""
A `dict` containing the options this `_Bootstrapper` was configured
with.
"""
return dict((optname, getattr(self, optname))
for optname, _ in CFG_OPTIONS if hasattr(self, optname)) |
def get_local_directory_dist(self):
"""
Handle importing a vendored package from a subdirectory of the source
distribution.
"""
if not os.path.isdir(self.path):
return
log.info('Attempting to import astropy_helpers from {0} {1!r}'.format(
'submodule' if self.is_submodule else 'directory',
self.path))
dist = self._directory_import()
if dist is None:
log.warn(
'The requested path {0!r} for importing {1} does not '
'exist, or does not contain a copy of the {1} '
'package.'.format(self.path, PACKAGE_NAME))
elif self.auto_upgrade and not self.is_submodule:
# A version of astropy-helpers was found on the available path, but
# check to see if a bugfix release is available on PyPI
upgrade = self._do_upgrade(dist)
if upgrade is not None:
dist = upgrade
return dist |
def get_local_file_dist(self):
"""
Handle importing from a source archive; this also uses setup_requires
but points easy_install directly to the source archive.
"""
if not os.path.isfile(self.path):
return
log.info('Attempting to unpack and import astropy_helpers from '
'{0!r}'.format(self.path))
try:
dist = self._do_download(find_links=[self.path])
except Exception as e:
if DEBUG:
raise
log.warn(
'Failed to import {0} from the specified archive {1!r}: '
'{2}'.format(PACKAGE_NAME, self.path, str(e)))
dist = None
if dist is not None and self.auto_upgrade:
# A version of astropy-helpers was found on the available path, but
# check to see if a bugfix release is available on PyPI
upgrade = self._do_upgrade(dist)
if upgrade is not None:
dist = upgrade
return dist |
def _directory_import(self):
"""
Import astropy_helpers from the given path, which will be added to
sys.path.
Must return True if the import succeeded, and False otherwise.
"""
# Return True on success, False on failure but download is allowed, and
# otherwise raise SystemExit
path = os.path.abspath(self.path)
# Use an empty WorkingSet rather than the man
# pkg_resources.working_set, since on older versions of setuptools this
# will invoke a VersionConflict when trying to install an upgrade
ws = pkg_resources.WorkingSet([])
ws.add_entry(path)
dist = ws.by_key.get(DIST_NAME)
if dist is None:
# We didn't find an egg-info/dist-info in the given path, but if a
# setup.py exists we can generate it
setup_py = os.path.join(path, 'setup.py')
if os.path.isfile(setup_py):
# We use subprocess instead of run_setup from setuptools to
# avoid segmentation faults - see the following for more details:
# https://github.com/cython/cython/issues/2104
sp.check_output([sys.executable, 'setup.py', 'egg_info'], cwd=path)
for dist in pkg_resources.find_distributions(path, True):
# There should be only one...
return dist
return dist |
def _check_submodule(self):
"""
Check if the given path is a git submodule.
See the docstrings for ``_check_submodule_using_git`` and
``_check_submodule_no_git`` for further details.
"""
if (self.path is None or
(os.path.exists(self.path) and not os.path.isdir(self.path))):
return False
if self.use_git:
return self._check_submodule_using_git()
else:
return self._check_submodule_no_git() |
def _check_submodule_using_git(self):
"""
Check if the given path is a git submodule. If so, attempt to initialize
and/or update the submodule if needed.
This function makes calls to the ``git`` command in subprocesses. The
``_check_submodule_no_git`` option uses pure Python to check if the given
path looks like a git submodule, but it cannot perform updates.
"""
cmd = ['git', 'submodule', 'status', '--', self.path]
try:
log.info('Running `{0}`; use the --no-git option to disable git '
'commands'.format(' '.join(cmd)))
returncode, stdout, stderr = run_cmd(cmd)
except _CommandNotFound:
# The git command simply wasn't found; this is most likely the
# case on user systems that don't have git and are simply
# trying to install the package from PyPI or a source
# distribution. Silently ignore this case and simply don't try
# to use submodules
return False
stderr = stderr.strip()
if returncode != 0 and stderr:
# Unfortunately the return code alone cannot be relied on, as
# earlier versions of git returned 0 even if the requested submodule
# does not exist
# This is a warning that occurs in perl (from running git submodule)
# which only occurs with a malformatted locale setting which can
# happen sometimes on OSX. See again
# https://github.com/astropy/astropy/issues/2749
perl_warning = ('perl: warning: Falling back to the standard locale '
'("C").')
if not stderr.strip().endswith(perl_warning):
# Some other unknown error condition occurred
log.warn('git submodule command failed '
'unexpectedly:\n{0}'.format(stderr))
return False
# Output of `git submodule status` is as follows:
#
# 1: Status indicator: '-' for submodule is uninitialized, '+' if
# submodule is initialized but is not at the commit currently indicated
# in .gitmodules (and thus needs to be updated), or 'U' if the
# submodule is in an unstable state (i.e. has merge conflicts)
#
# 2. SHA-1 hash of the current commit of the submodule (we don't really
# need this information but it's useful for checking that the output is
# correct)
#
# 3. The output of `git describe` for the submodule's current commit
# hash (this includes for example what branches the commit is on) but
# only if the submodule is initialized. We ignore this information for
# now
_git_submodule_status_re = re.compile(
r'^(?P<status>[+-U ])(?P<commit>[0-9a-f]{40}) '
r'(?P<submodule>\S+)( .*)?$')
# The stdout should only contain one line--the status of the
# requested submodule
m = _git_submodule_status_re.match(stdout)
if m:
# Yes, the path *is* a git submodule
self._update_submodule(m.group('submodule'), m.group('status'))
return True
else:
log.warn(
'Unexpected output from `git submodule status`:\n{0}\n'
'Will attempt import from {1!r} regardless.'.format(
stdout, self.path))
return False |
def _check_submodule_no_git(self):
"""
Like ``_check_submodule_using_git``, but simply parses the .gitmodules file
to determine if the supplied path is a git submodule, and does not exec any
subprocesses.
This can only determine if a path is a submodule--it does not perform
updates, etc. This function may need to be updated if the format of the
.gitmodules file is changed between git versions.
"""
gitmodules_path = os.path.abspath('.gitmodules')
if not os.path.isfile(gitmodules_path):
return False
# This is a minimal reader for gitconfig-style files. It handles a few of
# the quirks that make gitconfig files incompatible with ConfigParser-style
# files, but does not support the full gitconfig syntax (just enough
# needed to read a .gitmodules file).
gitmodules_fileobj = io.StringIO()
# Must use io.open for cross-Python-compatible behavior wrt unicode
with io.open(gitmodules_path) as f:
for line in f:
# gitconfig files are more flexible with leading whitespace; just
# go ahead and remove it
line = line.lstrip()
# comments can start with either # or ;
if line and line[0] in (':', ';'):
continue
gitmodules_fileobj.write(line)
gitmodules_fileobj.seek(0)
cfg = RawConfigParser()
try:
cfg.readfp(gitmodules_fileobj)
except Exception as exc:
log.warn('Malformatted .gitmodules file: {0}\n'
'{1} cannot be assumed to be a git submodule.'.format(
exc, self.path))
return False
for section in cfg.sections():
if not cfg.has_option(section, 'path'):
continue
submodule_path = cfg.get(section, 'path').rstrip(os.sep)
if submodule_path == self.path.rstrip(os.sep):
return True
return False |
def multidot_old(ten,mats):
'''
Implements tensor operation : tensor-times-matrices.
If last dimensions of ten represent multilinear operations of the type : [X1,...,Xk]->B[X1,...,Xk]
and mats contains matrices or vectors [A1,...Ak] the function returns an array representing operators :
[X1,...,Xk]->B[A1 X1,...,Ak Xk]
'''
resp = ten
n_d = ten.ndim
n_m = len(mats)
for i in range(n_m):
#resp = np.tensordot( resp, mats[i], (n_d-n_m+i-1,0) )
resp = np.tensordot( resp, mats[i], (n_d-n_m,0) )
return resp |
def sdot( U, V ):
'''
Computes the tensorproduct reducing last dimensoin of U with first dimension of V.
For matrices, it is equal to regular matrix product.
'''
nu = U.ndim
#nv = V.ndim
return np.tensordot( U, V, axes=(nu-1,0) ) |
def deprecated(func):
'''This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emitted
when the function is used.'''
import warnings
@functools.wraps(func)
def new_func(*args, **kwargs):
if is_python_3:
code = func.__code__
else:
code = func.func_code
warnings.warn_explicit(
"Call to deprecated function {}.".format(func.__name__),
category=Warning,
filename=code.co_filename,
lineno=code.co_firstlineno + 1
)
return func(*args, **kwargs)
return new_func |
def decode_complementarity(comp, control):
'''
# comp can be either:
- None
- "a<=expr" where a is a controls
- "expr<=a" where a is a control
- "expr1<=a<=expr2"
'''
try:
res = regex.match(comp).groups()
except:
raise Exception("Unable to parse complementarity condition '{}'".format(comp))
res = [r.strip() for r in res]
if res[1] != control:
msg = "Complementarity condition '{}' incorrect. Expected {} instead of {}.".format(comp, control, res[1])
raise Exception(msg)
return [res[0], res[2]] |
def time_iteration(model, initial_guess=None, dprocess=None, with_complementarities=True,
verbose=True, grid={},
maxit=1000, inner_maxit=10, tol=1e-6, hook=None, details=False):
'''
Finds a global solution for ``model`` using backward time-iteration.
This algorithm iterates on the residuals of the arbitrage equations
Parameters
----------
model : Model
model to be solved
verbose : boolean
if True, display iterations
initial_guess : decision rule
initial guess for the decision rule
dprocess : DiscretizedProcess (model.exogenous.discretize())
discretized process to be used
with_complementarities : boolean (True)
if False, complementarity conditions are ignored
grid: grid options
overload the values set in `options:grid` section
maxit: maximum number of iterations
inner_maxit: maximum number of iteration for inner solver
tol: tolerance criterium for successive approximations
hook: Callable
function to be called within each iteration, useful for debugging purposes
Returns
-------
decision rule :
approximated solution
'''
from dolo import dprint
def vprint(t):
if verbose:
print(t)
if dprocess is None:
dprocess = model.exogenous.discretize()
n_ms = dprocess.n_nodes() # number of exogenous states
n_mv = dprocess.n_inodes(0) # this assume number of integration nodes is constant
x0 = model.calibration['controls']
parms = model.calibration['parameters']
n_x = len(x0)
n_s = len(model.symbols['states'])
endo_grid = model.get_grid(**grid)
exo_grid = dprocess.grid
mdr = DecisionRule(exo_grid, endo_grid)
grid = mdr.endo_grid.nodes()
N = grid.shape[0]
controls_0 = numpy.zeros((n_ms, N, n_x))
if initial_guess is None:
controls_0[:, :, :] = x0[None,None,:]
else:
if isinstance(initial_guess, AlgoResult):
initial_guess = initial_guess.dr
try:
for i_m in range(n_ms):
controls_0[i_m, :, :] = initial_guess(i_m, grid)
except Exception:
for i_m in range(n_ms):
m = dprocess.node(i_m)
controls_0[i_m, :, :] = initial_guess(m, grid)
f = model.functions['arbitrage']
g = model.functions['transition']
if 'controls_lb' in model.functions and with_complementarities==True:
lb_fun = model.functions['controls_lb']
ub_fun = model.functions['controls_ub']
lb = numpy.zeros_like(controls_0)*numpy.nan
ub = numpy.zeros_like(controls_0)*numpy.nan
for i_m in range(n_ms):
m = dprocess.node(i_m)[None,:]
p = parms[None,:]
m = numpy.repeat(m, N, axis=0)
p = numpy.repeat(p, N, axis=0)
lb[i_m,:,:] = lb_fun(m, grid, p)
ub[i_m,:,:] = ub_fun(m, grid, p)
else:
with_complementarities = False
sh_c = controls_0.shape
controls_0 = controls_0.reshape( (-1,n_x) )
from dolo.numeric.optimize.newton import newton, SerialDifferentiableFunction
from dolo.numeric.optimize.ncpsolve import ncpsolve
err = 10
it = 0
if with_complementarities:
vprint("Solving WITH complementarities.")
lb = lb.reshape((-1,n_x))
ub = ub.reshape((-1,n_x))
if verbose:
headline = '|{0:^4} | {1:10} | {2:8} | {3:8} | {4:3} |'.format( 'N',' Error', 'Gain','Time', 'nit' )
stars = '-'*len(headline)
print(stars)
print(headline)
print(stars)
import time
t1 = time.time()
err_0 = numpy.nan
verbit = (verbose == 'full')
while err>tol and it<maxit:
it += 1
t_start = time.time()
mdr.set_values(controls_0.reshape(sh_c))
fn = lambda x: residuals_simple(f, g, grid, x.reshape(sh_c), mdr, dprocess, parms).reshape((-1,n_x))
dfn = SerialDifferentiableFunction(fn)
res = fn(controls_0)
if hook:
hook()
if with_complementarities:
[controls,nit] = ncpsolve(dfn, lb, ub, controls_0, verbose=verbit, maxit=inner_maxit)
else:
[controls, nit] = newton(dfn, controls_0, verbose=verbit, maxit=inner_maxit)
err = abs(controls-controls_0).max()
err_SA = err/err_0
err_0 = err
controls_0 = controls
t_finish = time.time()
elapsed = t_finish - t_start
if verbose:
print('|{0:4} | {1:10.3e} | {2:8.3f} | {3:8.3f} | {4:3} |'.format( it, err, err_SA, elapsed, nit ))
controls_0 = controls.reshape(sh_c)
t2 = time.time()
if verbose:
print(stars)
print("Elapsed: {} seconds.".format(t2-t1))
print(stars)
if not details:
return mdr
return TimeIterationResult(
mdr,
it,
with_complementarities,
dprocess,
err<tol, # x_converged: bool
tol, # x_tol
err, #: float
None, # log: object # TimeIterationLog
None # trace: object #{Nothing,IterationTrace}
) |
def set_values(self,x):
""" Updates self.theta parameter. No returns values"""
x = numpy.atleast_2d(x)
x = x.real # ahem
C_inv = self.__C_inv__
theta = numpy.dot( x, C_inv )
self.theta = theta
return theta |
def tauchen(N, mu, rho, sigma, m=2):
"""
Approximate an AR1 process by a finite markov chain using Tauchen's method.
:param N: scalar, number of nodes for Z
:param mu: scalar, unconditional mean of process
:param rho: scalar
:param sigma: scalar, std. dev. of epsilons
:param m: max +- std. devs.
:returns: Z, N*1 vector, nodes for Z. Zprob, N*N matrix, transition probabilities
SJB: This is a port of Martin Floden's 1996 Matlab code to implement Tauchen 1986 Economic Letters method The following comments are Floden's. Finds a Markov chain whose sample paths approximate those of the AR(1) process z(t+1) = (1-rho)*mu + rho * z(t) + eps(t+1) where eps are normal with stddev sigma.
"""
Z = np.zeros((N,1))
Zprob = np.zeros((N,N))
a = (1-rho)*mu
Z[-1] = m * math.sqrt(sigma**2 / (1 - (rho**2)))
Z[0] = -1 * Z[-1]
zstep = (Z[-1] - Z[0]) / (N - 1)
for i in range(1,N):
Z[i] = Z[0] + zstep * (i)
Z = Z + a / (1-rho)
for j in range(0,N):
for k in range(0,N):
if k == 0:
Zprob[j,k] = sp.stats.norm.cdf((Z[0] - a - rho * Z[j] + zstep / 2) / sigma)
elif k == (N-1):
Zprob[j,k] = 1 - sp.stats.norm.cdf((Z[-1] - a - rho * Z[j] - zstep / 2) / sigma)
else:
up = sp.stats.norm.cdf((Z[k] - a - rho * Z[j] + zstep / 2) / sigma)
down = sp.stats.norm.cdf( (Z[k] - a - rho * Z[j] - zstep / 2) / sigma)
Zprob[j,k] = up - down
return( (Z, Zprob) ) |
def rouwenhorst(rho, sigma, N):
"""
Approximate an AR1 process by a finite markov chain using Rouwenhorst's method.
:param rho: autocorrelation of the AR1 process
:param sigma: conditional standard deviation of the AR1 process
:param N: number of states
:return [nodes, P]: equally spaced nodes and transition matrix
"""
from numpy import sqrt, linspace, array,zeros
sigma = float(sigma)
if N == 1:
nodes = array([0.0])
transitions = array([[1.0]])
return [nodes, transitions]
p = (rho+1)/2
q = p
nu = sqrt( (N-1)/(1-rho**2) )*sigma
nodes = linspace( -nu, nu, N)
sig_a = sigma
n = 1
# mat0 = array( [[1]] )
mat0 = array([[p,1-p],[1-q,q]])
if N == 2:
return [nodes,mat0]
for n in range(3,N+1):
mat = zeros( (n,n) )
mat_A = mat.copy()
mat_B = mat.copy()
mat_C = mat.copy()
mat_D = mat.copy()
mat_A[:-1,:-1] = mat0
mat_B[:-1,1:] = mat0
mat_C[1:,:-1] = mat0
mat_D[1:,1:] = mat0
mat0 = p*mat_A + (1-p)*mat_B + (1-q)*mat_C + q*mat_D
mat0[1:-1,:] = mat0[1:-1,:]/2
P = mat0
return [nodes, P] |
def multidimensional_discretization(rho, sigma, N=3, method='rouwenhorst', m=2):
"""
Discretize an VAR(1) into a markov chain. The autoregression matrix is supposed to be a scalar.
:param rho:
:param sigma:
:param N:
:param method:
:param m:
:return:
"""
# rho is assumed to be a scalar
# sigma is a positive symmetric matrix
# N number of points in each non-degenerate dimension
# m : standard deviations to approximate
import scipy.linalg
from itertools import product
d = sigma.shape[1]
sigma = sigma.copy()
zero_columns = np.where(sigma.sum(axis=0)==0)[0]
for i in zero_columns:
sigma[i,i] = 1
L = scipy.linalg.cholesky(sigma)
N = int(N)
if method=='tauchen':
[nodes_1d, probas_1d] = tauchen(N, 0, rho, 1, m=m)
elif method=='rouwenhorst':
[nodes_1d, probas_1d] = rouwenhorst(rho, 1, N)
markov_nodes = np.array( list( product( *([nodes_1d]*d))) ).T
markov_indices = np.array( list( product( *([range(N)]*d) ) ) ).T
markov_nodes = np.dot(L, markov_nodes)
transition_matrix = 1
for i in range(d):
transition_matrix = np.kron(transition_matrix, probas_1d)
markov_nodes = np.ascontiguousarray(markov_nodes.T)
for i in zero_columns:
markov_nodes[:,i] = 0
return [markov_nodes, transition_matrix] |
def tensor_markov( *args ):
"""Computes the product of two independent markov chains.
:param m1: a tuple containing the nodes and the transition matrix of the first chain
:param m2: a tuple containing the nodes and the transition matrix of the second chain
:return: a tuple containing the nodes and the transition matrix of the product chain
"""
if len(args) > 2:
m1 = args[0]
m2 = args[1]
tail = args[2:]
prod = tensor_markov(m1,m2)
return tensor_markov( prod, tail )
elif len(args) == 2:
m1,m2 = args
n1, t1 = m1
n2, t2 = m2
n1 = np.array(n1, dtype=float)
n2 = np.array(n2, dtype=float)
t1 = np.array(t1, dtype=float)
t2 = np.array(t2, dtype=float)
assert(n1.shape[0] == t1.shape[0] == t1.shape[1])
assert(n2.shape[0] == t2.shape[0] == t2.shape[1])
t = np.kron(t1, t2)
p = t1.shape[0]
q = t2.shape[0]
np.tile( n2, (1,p))
# n = np.row_stack([
# np.repeat(n1, q, axis=1),
# np.tile( n2, (1,p))
# ])
n = np.column_stack([
np.repeat(n1, q, axis=0),
np.tile( n2, (p,1))
])
return [n,t]
else:
raise Exception("Incorrect number of arguments. Expected at least 2. Found {}.".format(len(args))) |
def parse_dynare_text(txt,add_model=True,full_output=False, debug=False):
'''
Imports the content of a modfile into the current interpreter scope
'''
# here we call "instruction group", a string finishing by a semicolon
# an "instruction group" can have several lines
# a line can be
# - a comment //...
# - an old-style tag //$...
# - a new-style tag [key1='value1',..]
# - macro-instruction @#...
# A Modfile contains several blocks (in this order) :
# - an initblock defining variables, exovariables, parameters, initialization
# inside the initblock the order of declaration doesn't matter
# - a model block with two special lines (model; end;)
# - optional blocks (like endval, shocks)
# seperated by free matlab instructions in any order;
# - all other instructions are ignored
otxt = txt
otxt = otxt.replace("\r\n","\n")
otxt = otxt.replace("^","**")
# first, we remove end-of-line comments : they are definitely lost
regex = re.compile("(.+)//[^#](.*)")
def remove_end_comment(line):
res = regex.search(line)
if res:
l = res.groups(1)[0]
return(l)
else:
return line
txt = str.join("\n",map(remove_end_comment,otxt.split("\n")))
name_regex = re.compile("//\s*fname\s*=\s*'(.*)'")
m = name_regex.search(txt)
if m:
fname = m.group(1)
else:
fname = None
instruction_groups = [Instruction_group(s) for s in txt.split(";")]
instructions = [ig.instruction for ig in instruction_groups]
if debug:
print('Elementary instructions')
for i in instruction_groups:
print(i)
try:
imodel = [re.compile('model(\(.*\)|)').match(e) is not None for e in instructions]
imodel = imodel.index(True)
#imodel = instructions.index("model") #this doesn't work for "MODEL"
iend = instructions.index("end")
model_block = instruction_groups[imodel:(iend+1)]
init_block = instruction_groups[0:imodel]
except:
raise Exception('Model block could not be found.')
next_instructions = instructions[(iend+1):]
next_instruction_groups = instruction_groups[(iend+1):]
if 'initval' in next_instructions:
iinitval = next_instructions.index('initval')
iend = next_instructions.index('end',iinitval)
matlab_block_1 = next_instruction_groups[0:iinitval]
initval_block = next_instruction_groups[iinitval:(iend+1)]
next_instruction_groups = next_instruction_groups[(iend+1):]
next_instructions = next_instructions[(iend+1):]
else:
initval_block = None
matlab_block_1 = None
if 'endval' in next_instructions:
iendval = next_instructions.index('endval')
iend = next_instructions.index('end',iendval)
matlab_block_2 = next_instruction_groups[0:iendval]
endval_block = next_instruction_groups[iendval:(iend+1)]
next_instruction_groups = next_instruction_groups[(iend+1):]
next_instructions = next_instructions[(iend+1):]
else:
endval_block = None
matlab_block_2 = None
# TODO : currently shocks block needs to follow initval, this restriction should be removed
if 'shocks' in next_instructions:
ishocks = next_instructions.index('shocks')
iend = next_instructions.index('end',ishocks)
matlab_block_3 = next_instruction_groups[0:ishocks]
shocks_block = next_instruction_groups[ishocks:(iend+1)]
next_instruction_groups = next_instruction_groups[(iend+1):]
next_instructions = next_instructions[(iend+1):]
else:
shocks_block = None
matlab_block_3 = None
try:
init_regex = re.compile("(parameters |var |varexo |)(.*)")
var_names = []
varexo_names = []
parameters_names = []
declarations = {}
for ig in init_block:
if ig.instruction != '':
m = init_regex.match(ig.instruction)
if not m:
raise Exception("Unexpected instruction in init block : " + str(ig.instruction))
if m.group(1) == '':
[lhs,rhs] = m.group(2).split("=")
lhs = lhs.strip()
rhs = rhs.strip()
declarations[lhs] = rhs
else:
arg = m.group(2).replace(","," ")
names = [vn.strip() for vn in arg.split()]
if m.group(1).strip() == 'var':
dest = var_names
elif m.group(1).strip() == 'varexo':
dest = varexo_names
elif m.group(1).strip() == 'parameters':
dest = parameters_names
for n in names:
if not n in dest:
dest.append(n)
else:
raise Exception("symbol %s has already been defined".format(n))
except Exception as e:
raise Exception('Init block could not be read : ' + str(e) )
# the following instruction set the variables "variables","shocks","parameters"
variables = []
for vn in var_names:
v = Variable(vn)
variables.append(v)
shocks = []
for vn in varexo_names:
s = Shock(vn)
shocks.append(s)
parameters = []
for vn in parameters_names:
p = Parameter(vn)
parameters.append(p)
parse_dict = dict()
for v in variables + shocks + parameters:
parse_dict[v.name] = v
special_symbols = [sympy.exp,sympy.log,sympy.sin,sympy.cos, sympy.atan, sympy.tan]
for s in special_symbols:
parse_dict[str(s)] = s
parse_dict['sqrt'] = sympy.sqrt
# Read parameters values
parameters_values = {}
for p in declarations:
try:
rhs = eval(declarations[p], parse_dict)
except Exception as e:
Exception("Impossible to evaluate parameter value : " + str(e))
try:
lhs = eval(p,parse_dict)
except Exception as e:
# here we could declare p
raise e
parameters_values[lhs] = rhs
# Now we read the model block
model_tags = model_block[0].tags
equations = []
for ig in model_block[1:-1]:
if ig.instruction != '':
teq = ig.instruction.replace('^',"**")
if '=' in teq:
teqlhs,teqrhs = teq.split("=")
else:
teqlhs = teq
teqrhs = '0'
eqlhs = eval(teqlhs, parse_dict)
eqrhs = eval(teqrhs, parse_dict)
eq = Equation(eqlhs,eqrhs)
eq.tags.update(ig.tags)
# if eq.tags.has_key('name'):
# eq.tags[] = ig.tags['name']
equations.append(eq)
# Now we read the initval block
init_values = {}
if initval_block != None:
for ig in initval_block[1:-1]:
if len(ig.instruction.strip()) >0:
try:
[lhs,rhs] = ig.instruction.split("=")
except Exception as e:
print(ig.instruction)
raise e
init_values[eval(lhs,parse_dict)] = eval(rhs,parse_dict)
# Now we read the endval block
# I don't really care about the endval block !
end_values = {}
if endval_block != None:
for ig in endval_block[1:-1]:
[lhs,rhs] = ig.instruction.split("=")
end_values[eval(lhs)] = eval(rhs)
# Now we read the shocks block
covariances = None
if shocks_block != None:
covariances = sympy.zeros(len(shocks))
regex1 = re.compile("var (.*?),(.*?)=(.*)|var (.*?)=(.*)")
for ig in shocks_block[1:-1]:
m = regex1.match(ig.instruction)
if not m:
raise Exception("unrecognized instruction in block shocks : " + str(ig.instruction))
if m.group(1) != None:
varname1 = m.group(1).strip()
varname2 = m.group(2).strip()
value = m.group(3).strip().replace("^","**")
elif m.group(4) != None:
varname1 = m.group(4).strip()
varname2 = varname1
value = m.group(5).strip().replace("^","**")
i = varexo_names.index(varname1)
j = varexo_names.index(varname2)
covariances[i,j] = eval(value,parse_dict)
covariances[j,i] = eval(value,parse_dict)
calibration = {}
calibration.update(parameters_values)
calibration.update(init_values)
symbols = {'variables': variables, 'shocks': shocks, 'parameters': parameters}
from trash.dolo.symbolic.model import SModel
model = SModel({'dynare_block': equations}, symbols, calibration, covariances)
return model |
def dynare_import(filename,full_output=False, debug=False):
'''Imports model defined in specified file'''
import os
basename = os.path.basename(filename)
fname = re.compile('(.*)\.(.*)').match(basename).group(1)
f = open(filename)
txt = f.read()
model = parse_dynare_text(txt,full_output=full_output, debug=debug)
model.name = fname
return model |
def loadUiType(uiFile):
"""
Pyside lacks the "loadUiType" command, so we have to convert the ui file to py code in-memory first
and then execute it in a special frame to retrieve the form_class.
"""
parsed = xml.parse(uiFile)
widget_class = parsed.find('widget').get('class')
form_class = parsed.find('class').text
with open(uiFile, 'r') as f:
o = StringIO()
frame = {}
pysideuic.compileUi(f, o, indent=0)
pyc = compile(o.getvalue(), '<string>', 'exec')
exec pyc in frame
#Fetch the base_class and form class based on their type in the xml from designer
form_class = frame['Ui_%s'%form_class]
base_class = eval('QtGui.%s'%widget_class)
return form_class, base_class |
def simple_newton(f, x0, lb=None, ub=None, infos=False, verbose=False, maxit=50, tol=1e-8, eps=1e-8, numdiff=True):
'''Solves many independent systems f(x)=0 simultaneously using a simple gradient descent.
:param f: objective function to be solved with values p x N . The second output argument represents the derivative with
values in (p x p x N)
:param x0: initial value ( p x N )
:return: solution x such that f(x) = 0
'''
precision = x0.dtype # default tolerance should depend on precision
from numpy.linalg import solve
err = 1
it = 0
while err > tol and it <= maxit:
if not numdiff:
[res,dres] = f(x0)
else:
res = f(x0)
dres = numpy.zeros( (res.shape[0], x0.shape[0]), dtype=precision )
for i in range(x0.shape[0]):
xi = x0.copy()
xi[i] += eps
resi = f(xi)
dres[:,i] = (resi - res)/eps
dx = - solve(dres,res)
x = x0 + dx
print('x0 : {}'.format(x0))
err = abs(res).max()
print('iteration {} {}'.format(it, err))
x0 = x
it += 1
if not infos:
return x
else:
return [x, it] |
def newton_solver(f, x0, lb=None, ub=None, infos=False, verbose=False, maxit=50, tol=1e-8, eps=1e-5, numdiff=False):
'''Solves many independent systems f(x)=0 simultaneously using a simple gradient descent.
:param f: objective function to be solved with values p x N . The second output argument represents the derivative with
values in (p x p x N)
:param x0: initial value ( p x N )
:return: solution x such that f(x) = 0
'''
precision = x0.dtype # default tolerance should depend on precision
from dolo.numeric.serial_operations import serial_multiplication as stv, serial_solve
err = 1
it = 0
while err > tol and it <= maxit:
if not numdiff:
[res,dres] = f(x0)
else:
res = f(x0)
dres = numpy.zeros( (res.shape[0], x0.shape[0], x0.shape[1]), dtype=precision )
for i in range(x0.shape[0]):
xi = x0.copy()
xi[i,:] += eps
resi = f(xi)
dres[:,i,:] = (resi - res)/eps
try:
dx = - serial_solve(dres,res)
except:
dx = - serial_solve(dres,res, debug=True)
x = x0 + dx
err = abs(res).max()
x0 = x
it += 1
if not infos:
return x
else:
return [x, it] |