hash
stringlengths 64
64
| content
stringlengths 0
1.51M
|
---|---|
38130b17f976bb6e9362cab7c0b3b3194c6644a357928a4401fd5a46c34d13c7 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# This file is the main file used when running tests with pytest directly,
# in particular if running e.g. ``pytest docs/``.
import os
import tempfile
import hypothesis
from astropy import __version__
try:
from pytest_astropy_header.display import PYTEST_HEADER_MODULES, TESTED_VERSIONS
except ImportError:
PYTEST_HEADER_MODULES = {}
TESTED_VERSIONS = {}
# This has to be in the root dir or it will not display in CI.
def pytest_configure(config):
PYTEST_HEADER_MODULES['PyERFA'] = 'erfa'
PYTEST_HEADER_MODULES['Cython'] = 'cython'
PYTEST_HEADER_MODULES['Scikit-image'] = 'skimage'
PYTEST_HEADER_MODULES['asdf'] = 'asdf'
PYTEST_HEADER_MODULES['pyarrow'] = 'pyarrow'
TESTED_VERSIONS['Astropy'] = __version__
# This has to be in the root dir or it will not display in CI.
def pytest_report_header(config):
# This gets added after the pytest-astropy-header output.
return (f'ARCH_ON_CI: {os.environ.get("ARCH_ON_CI", "undefined")}\n'
f'IS_CRON: {os.environ.get("IS_CRON", "undefined")}\n')
# Tell Hypothesis that we might be running slow tests, to print the seed blob
# so we can easily reproduce failures from CI, and derive a fuzzing profile
# to try many more inputs when we detect a scheduled build or when specifically
# requested using the HYPOTHESIS_PROFILE=fuzz environment variable or
# `pytest --hypothesis-profile=fuzz ...` argument.
hypothesis.settings.register_profile(
'ci', deadline=None, print_blob=True, derandomize=True
)
hypothesis.settings.register_profile(
'fuzzing', deadline=None, print_blob=True, max_examples=1000
)
default = 'fuzzing' if (os.environ.get('IS_CRON') == 'true' and os.environ.get('ARCH_ON_CI') not in ('aarch64', 'ppc64le')) else 'ci' # noqa: E501
hypothesis.settings.load_profile(os.environ.get('HYPOTHESIS_PROFILE', default))
# Make sure we use temporary directories for the config and cache
# so that the tests are insensitive to local configuration.
os.environ['XDG_CONFIG_HOME'] = tempfile.mkdtemp('astropy_config')
os.environ['XDG_CACHE_HOME'] = tempfile.mkdtemp('astropy_cache')
os.mkdir(os.path.join(os.environ['XDG_CONFIG_HOME'], 'astropy'))
os.mkdir(os.path.join(os.environ['XDG_CACHE_HOME'], 'astropy'))
# Note that we don't need to change the environment variables back or remove
# them after testing, because they are only changed for the duration of the
# Python process, and this configuration only matters if running pytest
# directly, not from e.g. an IPython session.
|
bded93adc301a0fcc3dd01dc4f3e259ca71231cc421fd7bee8443d206b1ce143 | #!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# NOTE: The configuration for the package, including the name, version, and
# other information are set in the setup.cfg file.
import sys
# First provide helpful messages if contributors try and run legacy commands
# for tests or docs.
TEST_HELP = """
Note: running tests is no longer done using 'python setup.py test'. Instead
you will need to run:
tox -e test
If you don't already have tox installed, you can install it with:
pip install tox
If you only want to run part of the test suite, you can also use pytest
directly with::
pip install -e .[test]
pytest
For more information, see:
https://docs.astropy.org/en/latest/development/testguide.html#running-tests
"""
if 'test' in sys.argv:
print(TEST_HELP)
sys.exit(1)
DOCS_HELP = """
Note: building the documentation is no longer done using
'python setup.py build_docs'. Instead you will need to run:
tox -e build_docs
If you don't already have tox installed, you can install it with:
pip install tox
You can also build the documentation with Sphinx directly using::
pip install -e .[docs]
cd docs
make html
For more information, see:
https://docs.astropy.org/en/latest/install.html#builddocs
"""
if 'build_docs' in sys.argv or 'build_sphinx' in sys.argv:
print(DOCS_HELP)
sys.exit(1)
# Only import these if the above checks are okay
# to avoid masking the real problem with import error.
from setuptools import setup # noqa
from extension_helpers import get_extensions # noqa
setup(ext_modules=get_extensions())
|
73b018608b35beb850df948ebe0315cbe4f8019618c62ccea33612d1f828bdf7 | import os
import shutil
import sys
import erfa # noqa
import pytest
import astropy # noqa
if len(sys.argv) == 3 and sys.argv[1] == '--astropy-root':
ROOT = sys.argv[2]
else:
# Make sure we don't allow any arguments to be passed - some tests call
# sys.executable which becomes this script when producing a pyinstaller
# bundle, but we should just error in this case since this is not the
# regular Python interpreter.
if len(sys.argv) > 1:
print("Extra arguments passed, exiting early")
sys.exit(1)
for root, dirnames, files in os.walk(os.path.join(ROOT, 'astropy')):
# NOTE: we can't simply use
# test_root = root.replace('astropy', 'astropy_tests')
# as we only want to change the one which is for the module, so instead
# we search for the last occurrence and replace that.
pos = root.rfind('astropy')
test_root = root[:pos] + 'astropy_tests' + root[pos + 7:]
# Copy over the astropy 'tests' directories and their contents
for dirname in dirnames:
final_dir = os.path.relpath(os.path.join(test_root, dirname), ROOT)
# We only copy over 'tests' directories, but not astropy/tests (only
# astropy/tests/tests) since that is not just a directory with tests.
if dirname == 'tests' and not root.endswith('astropy'):
shutil.copytree(os.path.join(root, dirname), final_dir, dirs_exist_ok=True)
else:
# Create empty __init__.py files so that 'astropy_tests' still
# behaves like a single package, otherwise pytest gets confused
# by the different conftest.py files.
init_filename = os.path.join(final_dir, '__init__.py')
if not os.path.exists(os.path.join(final_dir, '__init__.py')):
os.makedirs(final_dir, exist_ok=True)
with open(os.path.join(final_dir, '__init__.py'), 'w') as f:
f.write("#")
# Copy over all conftest.py files
for file in files:
if file == 'conftest.py':
final_file = os.path.relpath(os.path.join(test_root, file), ROOT)
shutil.copy2(os.path.join(root, file), final_file)
# Add the top-level __init__.py file
with open(os.path.join('astropy_tests', '__init__.py'), 'w') as f:
f.write("#")
# Remove test file that tries to import all sub-packages at collection time
os.remove(os.path.join('astropy_tests', 'utils', 'iers', 'tests', 'test_leap_second.py'))
# Remove convolution tests for now as there are issues with the loading of the C extension.
# FIXME: one way to fix this would be to migrate the convolution C extension away from using
# ctypes and using the regular extension mechanism instead.
shutil.rmtree(os.path.join('astropy_tests', 'convolution'))
os.remove(os.path.join('astropy_tests', 'modeling', 'tests', 'test_convolution.py'))
os.remove(os.path.join('astropy_tests', 'modeling', 'tests', 'test_core.py'))
os.remove(os.path.join('astropy_tests', 'visualization', 'tests', 'test_lupton_rgb.py'))
# FIXME: The following tests rely on the fully qualified name of classes which
# don't seem to be the same.
os.remove(os.path.join('astropy_tests', 'table', 'mixins', 'tests', 'test_registry.py'))
# Copy the top-level conftest.py
shutil.copy2(os.path.join(ROOT, 'astropy', 'conftest.py'),
os.path.join('astropy_tests', 'conftest.py'))
# We skip a few tests, which are generally ones that rely on explicitly
# checking the name of the current module (which ends up starting with
# astropy_tests rather than astropy).
SKIP_TESTS = ['test_exception_logging_origin',
'test_log',
'test_configitem',
'test_config_noastropy_fallback',
'test_no_home',
'test_path',
'test_rename_path',
'test_data_name_third_party_package',
'test_pkg_finder',
'test_wcsapi_extension',
'test_find_current_module_bundle',
'test_minversion',
'test_imports',
'test_generate_config',
'test_generate_config2',
'test_create_config_file',
'test_download_parallel_fills_cache']
# Run the tests!
sys.exit(pytest.main(['astropy_tests',
'-k ' + ' and '.join('not ' + test for test in SKIP_TESTS)],
plugins=['pytest_doctestplus.plugin',
'pytest_openfiles.plugin',
'pytest_remotedata.plugin',
'pytest_astropy_header.display']))
|
f3a0aeb97076b413c5b12bf54c9d21687bfae4f546d809811d8fd5110a7aa5eb | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Astropy is a package intended to contain core functionality and some
common tools needed for performing astronomy and astrophysics research with
Python. It also provides an index for other astronomy packages and tools for
managing them.
"""
import os
import sys
from .version import version as __version__
def _is_astropy_source(path=None):
"""
Returns whether the source for this module is directly in an astropy
source distribution or checkout.
"""
# If this __init__.py file is in ./astropy/ then import is within a source
# dir .astropy-root is a file distributed with the source, but that should
# not installed
if path is None:
path = os.path.join(os.path.dirname(__file__), os.pardir)
elif os.path.isfile(path):
path = os.path.dirname(path)
source_dir = os.path.abspath(path)
return os.path.exists(os.path.join(source_dir, '.astropy-root'))
# The location of the online documentation for astropy
# This location will normally point to the current released version of astropy
if 'dev' in __version__:
online_docs_root = 'https://docs.astropy.org/en/latest/'
else:
online_docs_root = f'https://docs.astropy.org/en/{__version__}/'
from . import config as _config # noqa: E402
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy`.
"""
unicode_output = _config.ConfigItem(
False,
'When True, use Unicode characters when outputting values, and '
'displaying widgets at the console.')
use_color = _config.ConfigItem(
sys.platform != 'win32',
'When True, use ANSI color escape sequences when writing to the console.',
aliases=['astropy.utils.console.USE_COLOR', 'astropy.logger.USE_COLOR'])
max_lines = _config.ConfigItem(
None,
description='Maximum number of lines in the display of pretty-printed '
'objects. If not provided, try to determine automatically from the '
'terminal size. Negative numbers mean no limit.',
cfgtype='integer(default=None)',
aliases=['astropy.table.pprint.max_lines'])
max_width = _config.ConfigItem(
None,
description='Maximum number of characters per line in the display of '
'pretty-printed objects. If not provided, try to determine '
'automatically from the terminal size. Negative numbers mean no '
'limit.',
cfgtype='integer(default=None)',
aliases=['astropy.table.pprint.max_width'])
conf = Conf()
# Define a base ScienceState for configuring constants and units
from .utils.state import ScienceState # noqa: E402
class base_constants_version(ScienceState):
"""
Base class for the real version-setters below
"""
_value = 'test'
_versions = dict(test='test')
@classmethod
def validate(cls, value):
if value not in cls._versions:
raise ValueError(f'Must be one of {list(cls._versions.keys())}')
return cls._versions[value]
@classmethod
def set(cls, value):
"""
Set the current constants value.
"""
import sys
if 'astropy.units' in sys.modules:
raise RuntimeError('astropy.units is already imported')
if 'astropy.constants' in sys.modules:
raise RuntimeError('astropy.constants is already imported')
return super().set(value)
class physical_constants(base_constants_version):
"""
The version of physical constants to use
"""
# Maintainers: update when new constants are added
_value = 'codata2018'
_versions = dict(codata2018='codata2018', codata2014='codata2014',
codata2010='codata2010', astropyconst40='codata2018',
astropyconst20='codata2014', astropyconst13='codata2010')
class astronomical_constants(base_constants_version):
"""
The version of astronomical constants to use
"""
# Maintainers: update when new constants are added
_value = 'iau2015'
_versions = dict(iau2015='iau2015', iau2012='iau2012',
astropyconst40='iau2015', astropyconst20='iau2015',
astropyconst13='iau2012')
# Create the test() function
from .tests.runner import TestRunner # noqa: E402
test = TestRunner.make_test_runner_in(__path__[0]) # noqa: F821
# if we are *not* in setup mode, import the logger and possibly populate the
# configuration file with the defaults
def _initialize_astropy():
try:
from .utils import _compiler # noqa: F401
except ImportError:
if _is_astropy_source():
raise ImportError('You appear to be trying to import astropy from '
'within a source checkout or from an editable '
'installation without building the extension '
'modules first. Either run:\n\n'
' pip install -e .\n\nor\n\n'
' python setup.py build_ext --inplace\n\n'
'to make sure the extension modules are built ')
else:
# Outright broken installation, just raise standard error
raise
# Set the bibtex entry to the article referenced in CITATION.
def _get_bibtex():
citation_file = os.path.join(os.path.dirname(__file__), 'CITATION')
with open(citation_file, 'r') as citation:
refs = citation.read().split('@ARTICLE')[1:]
if len(refs) == 0:
return ''
bibtexreference = f'@ARTICLE{refs[0]}'
return bibtexreference
__citation__ = __bibtex__ = _get_bibtex()
from .logger import _init_log, _teardown_log # noqa: E402, F401
log = _init_log()
_initialize_astropy()
from .utils.misc import find_api_page # noqa: E402, F401
def online_help(query):
"""
Search the online Astropy documentation for the given query.
Opens the results in the default web browser. Requires an active
Internet connection.
Parameters
----------
query : str
The search query.
"""
import webbrowser
from urllib.parse import urlencode
version = __version__
if 'dev' in version:
version = 'latest'
else:
version = 'v' + version
url = f"https://docs.astropy.org/en/{version}/search.html?{urlencode({'q': query})}"
webbrowser.open(url)
__dir_inc__ = ['__version__', '__githash__',
'__bibtex__', 'test', 'log', 'find_api_page', 'online_help',
'online_docs_root', 'conf', 'physical_constants',
'astronomical_constants']
from types import ModuleType as __module_type__ # noqa: E402
# Clean up top-level namespace--delete everything that isn't in __dir_inc__
# or is a magic attribute, and that isn't a submodule of this package
for varname in dir():
if not ((varname.startswith('__') and varname.endswith('__')) or
varname in __dir_inc__ or
(varname[0] != '_' and
isinstance(locals()[varname], __module_type__) and
locals()[varname].__name__.startswith(__name__ + '.'))):
# The last clause in the the above disjunction deserves explanation:
# When using relative imports like ``from .. import config``, the
# ``config`` variable is automatically created in the namespace of
# whatever module ``..`` resolves to (in this case astropy). This
# happens a few times just in the module setup above. This allows
# the cleanup to keep any public submodules of the astropy package
del locals()[varname]
del varname, __module_type__
|
ab94da6ed51f8e247cb5bf4f29bd54aa85a12e98494aebf149298826f5daa769 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""This module defines a logging class based on the built-in logging module.
.. note::
This module is meant for internal ``astropy`` usage. For use in other
packages, we recommend implementing your own logger instead.
"""
import inspect
import os
import sys
import logging
import warnings
from contextlib import contextmanager
from . import config as _config
from . import conf as _conf
from .utils import find_current_module
from .utils.exceptions import AstropyWarning, AstropyUserWarning
__all__ = ['Conf', 'conf', 'log', 'AstropyLogger', 'LoggingError']
# import the logging levels from logging so that one can do:
# log.setLevel(log.DEBUG), for example
logging_levels = ['NOTSET', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL',
'FATAL', ]
for level in logging_levels:
globals()[level] = getattr(logging, level)
__all__ += logging_levels
# Initialize by calling _init_log()
log = None
class LoggingError(Exception):
"""
This exception is for various errors that occur in the astropy logger,
typically when activating or deactivating logger-related features.
"""
class _AstLogIPYExc(Exception):
"""
An exception that is used only as a placeholder to indicate to the
IPython exception-catching mechanism that the astropy
exception-capturing is activated. It should not actually be used as
an exception anywhere.
"""
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.logger`.
"""
log_level = _config.ConfigItem(
'INFO',
"Threshold for the logging messages. Logging "
"messages that are less severe than this level "
"will be ignored. The levels are ``'DEBUG'``, "
"``'INFO'``, ``'WARNING'``, ``'ERROR'``.")
log_warnings = _config.ConfigItem(
True,
"Whether to log `warnings.warn` calls.")
log_exceptions = _config.ConfigItem(
False,
"Whether to log exceptions before raising "
"them.")
log_to_file = _config.ConfigItem(
False,
"Whether to always log messages to a log "
"file.")
log_file_path = _config.ConfigItem(
'',
"The file to log messages to. If empty string is given, "
"it defaults to a file ``'astropy.log'`` in "
"the astropy config directory.")
log_file_level = _config.ConfigItem(
'INFO',
"Threshold for logging messages to "
"`log_file_path`.")
log_file_format = _config.ConfigItem(
"%(asctime)r, "
"%(origin)r, %(levelname)r, %(message)r",
"Format for log file entries.")
log_file_encoding = _config.ConfigItem(
'',
"The encoding (e.g., UTF-8) to use for the log file. If empty string "
"is given, it defaults to the platform-preferred encoding.")
conf = Conf()
def _init_log():
"""Initializes the Astropy log--in most circumstances this is called
automatically when importing astropy.
"""
global log
orig_logger_cls = logging.getLoggerClass()
logging.setLoggerClass(AstropyLogger)
try:
log = logging.getLogger('astropy')
log._set_defaults()
finally:
logging.setLoggerClass(orig_logger_cls)
return log
def _teardown_log():
"""Shut down exception and warning logging (if enabled) and clear all
Astropy loggers from the logging module's cache.
This involves poking some logging module internals, so much if it is 'at
your own risk' and is allowed to pass silently if any exceptions occur.
"""
global log
if log.exception_logging_enabled():
log.disable_exception_logging()
if log.warnings_logging_enabled():
log.disable_warnings_logging()
del log
# Now for the fun stuff...
try:
logging._acquireLock()
try:
loggerDict = logging.Logger.manager.loggerDict
for key in loggerDict.keys():
if key == 'astropy' or key.startswith('astropy.'):
del loggerDict[key]
finally:
logging._releaseLock()
except Exception:
pass
Logger = logging.getLoggerClass()
class AstropyLogger(Logger):
'''
This class is used to set up the Astropy logging.
The main functionality added by this class over the built-in
logging.Logger class is the ability to keep track of the origin of the
messages, the ability to enable logging of warnings.warn calls and
exceptions, and the addition of colorized output and context managers to
easily capture messages to a file or list.
'''
def makeRecord(self, name, level, pathname, lineno, msg, args, exc_info,
func=None, extra=None, sinfo=None):
if extra is None:
extra = {}
if 'origin' not in extra:
current_module = find_current_module(1, finddiff=[True, 'logging'])
if current_module is not None:
extra['origin'] = current_module.__name__
else:
extra['origin'] = 'unknown'
return Logger.makeRecord(self, name, level, pathname, lineno, msg,
args, exc_info, func=func, extra=extra,
sinfo=sinfo)
_showwarning_orig = None
def _showwarning(self, *args, **kwargs):
# Bail out if we are not catching a warning from Astropy
if not isinstance(args[0], AstropyWarning):
return self._showwarning_orig(*args, **kwargs)
warning = args[0]
# Deliberately not using isinstance here: We want to display
# the class name only when it's not the default class,
# AstropyWarning. The name of subclasses of AstropyWarning should
# be displayed.
if type(warning) not in (AstropyWarning, AstropyUserWarning):
message = f'{warning.__class__.__name__}: {args[0]}'
else:
message = str(args[0])
mod_path = args[2]
# Now that we have the module's path, we look through sys.modules to
# find the module object and thus the fully-package-specified module
# name. The module.__file__ is the original source file name.
mod_name = None
mod_path, ext = os.path.splitext(mod_path)
for name, mod in list(sys.modules.items()):
try:
# Believe it or not this can fail in some cases:
# https://github.com/astropy/astropy/issues/2671
path = os.path.splitext(getattr(mod, '__file__', ''))[0]
except Exception:
continue
if path == mod_path:
mod_name = mod.__name__
break
if mod_name is not None:
self.warning(message, extra={'origin': mod_name})
else:
self.warning(message)
def warnings_logging_enabled(self):
return self._showwarning_orig is not None
def enable_warnings_logging(self):
'''
Enable logging of warnings.warn() calls
Once called, any subsequent calls to ``warnings.warn()`` are
redirected to this logger and emitted with level ``WARN``. Note that
this replaces the output from ``warnings.warn``.
This can be disabled with ``disable_warnings_logging``.
'''
if self.warnings_logging_enabled():
raise LoggingError("Warnings logging has already been enabled")
self._showwarning_orig = warnings.showwarning
warnings.showwarning = self._showwarning
def disable_warnings_logging(self):
'''
Disable logging of warnings.warn() calls
Once called, any subsequent calls to ``warnings.warn()`` are no longer
redirected to this logger.
This can be re-enabled with ``enable_warnings_logging``.
'''
if not self.warnings_logging_enabled():
raise LoggingError("Warnings logging has not been enabled")
if warnings.showwarning != self._showwarning:
raise LoggingError("Cannot disable warnings logging: "
"warnings.showwarning was not set by this "
"logger, or has been overridden")
warnings.showwarning = self._showwarning_orig
self._showwarning_orig = None
_excepthook_orig = None
def _excepthook(self, etype, value, traceback):
if traceback is None:
mod = None
else:
tb = traceback
while tb.tb_next is not None:
tb = tb.tb_next
mod = inspect.getmodule(tb)
# include the the error type in the message.
if len(value.args) > 0:
message = f'{etype.__name__}: {str(value)}'
else:
message = str(etype.__name__)
if mod is not None:
self.error(message, extra={'origin': mod.__name__})
else:
self.error(message)
self._excepthook_orig(etype, value, traceback)
def exception_logging_enabled(self):
'''
Determine if the exception-logging mechanism is enabled.
Returns
-------
exclog : bool
True if exception logging is on, False if not.
'''
try:
ip = get_ipython()
except NameError:
ip = None
if ip is None:
return self._excepthook_orig is not None
else:
return _AstLogIPYExc in ip.custom_exceptions
def enable_exception_logging(self):
'''
Enable logging of exceptions
Once called, any uncaught exceptions will be emitted with level
``ERROR`` by this logger, before being raised.
This can be disabled with ``disable_exception_logging``.
'''
try:
ip = get_ipython()
except NameError:
ip = None
if self.exception_logging_enabled():
raise LoggingError("Exception logging has already been enabled")
if ip is None:
# standard python interpreter
self._excepthook_orig = sys.excepthook
sys.excepthook = self._excepthook
else:
# IPython has its own way of dealing with excepthook
# We need to locally define the function here, because IPython
# actually makes this a member function of their own class
def ipy_exc_handler(ipyshell, etype, evalue, tb, tb_offset=None):
# First use our excepthook
self._excepthook(etype, evalue, tb)
# Now also do IPython's traceback
ipyshell.showtraceback((etype, evalue, tb), tb_offset=tb_offset)
# now register the function with IPython
# note that we include _AstLogIPYExc so `disable_exception_logging`
# knows that it's disabling the right thing
ip.set_custom_exc((BaseException, _AstLogIPYExc), ipy_exc_handler)
# and set self._excepthook_orig to a no-op
self._excepthook_orig = lambda etype, evalue, tb: None
def disable_exception_logging(self):
'''
Disable logging of exceptions
Once called, any uncaught exceptions will no longer be emitted by this
logger.
This can be re-enabled with ``enable_exception_logging``.
'''
try:
ip = get_ipython()
except NameError:
ip = None
if not self.exception_logging_enabled():
raise LoggingError("Exception logging has not been enabled")
if ip is None:
# standard python interpreter
if sys.excepthook != self._excepthook:
raise LoggingError("Cannot disable exception logging: "
"sys.excepthook was not set by this logger, "
"or has been overridden")
sys.excepthook = self._excepthook_orig
self._excepthook_orig = None
else:
# IPython has its own way of dealing with exceptions
ip.set_custom_exc(tuple(), None)
def enable_color(self):
'''
Enable colorized output
'''
_conf.use_color = True
def disable_color(self):
'''
Disable colorized output
'''
_conf.use_color = False
@contextmanager
def log_to_file(self, filename, filter_level=None, filter_origin=None):
'''
Context manager to temporarily log messages to a file.
Parameters
----------
filename : str
The file to log messages to.
filter_level : str
If set, any log messages less important than ``filter_level`` will
not be output to the file. Note that this is in addition to the
top-level filtering for the logger, so if the logger has level
'INFO', then setting ``filter_level`` to ``INFO`` or ``DEBUG``
will have no effect, since these messages are already filtered
out.
filter_origin : str
If set, only log messages with an origin starting with
``filter_origin`` will be output to the file.
Notes
-----
By default, the logger already outputs log messages to a file set in
the Astropy configuration file. Using this context manager does not
stop log messages from being output to that file, nor does it stop log
messages from being printed to standard output.
Examples
--------
The context manager is used as::
with logger.log_to_file('myfile.log'):
# your code here
'''
encoding = conf.log_file_encoding if conf.log_file_encoding else None
fh = logging.FileHandler(filename, encoding=encoding)
if filter_level is not None:
fh.setLevel(filter_level)
if filter_origin is not None:
fh.addFilter(FilterOrigin(filter_origin))
f = logging.Formatter(conf.log_file_format)
fh.setFormatter(f)
self.addHandler(fh)
yield
fh.close()
self.removeHandler(fh)
@contextmanager
def log_to_list(self, filter_level=None, filter_origin=None):
'''
Context manager to temporarily log messages to a list.
Parameters
----------
filename : str
The file to log messages to.
filter_level : str
If set, any log messages less important than ``filter_level`` will
not be output to the file. Note that this is in addition to the
top-level filtering for the logger, so if the logger has level
'INFO', then setting ``filter_level`` to ``INFO`` or ``DEBUG``
will have no effect, since these messages are already filtered
out.
filter_origin : str
If set, only log messages with an origin starting with
``filter_origin`` will be output to the file.
Notes
-----
Using this context manager does not stop log messages from being
output to standard output.
Examples
--------
The context manager is used as::
with logger.log_to_list() as log_list:
# your code here
'''
lh = ListHandler()
if filter_level is not None:
lh.setLevel(filter_level)
if filter_origin is not None:
lh.addFilter(FilterOrigin(filter_origin))
self.addHandler(lh)
yield lh.log_list
self.removeHandler(lh)
def _set_defaults(self):
'''
Reset logger to its initial state
'''
# Reset any previously installed hooks
if self.warnings_logging_enabled():
self.disable_warnings_logging()
if self.exception_logging_enabled():
self.disable_exception_logging()
# Remove all previous handlers
for handler in self.handlers[:]:
self.removeHandler(handler)
# Set levels
self.setLevel(conf.log_level)
# Set up the stdout handler
sh = StreamHandler()
self.addHandler(sh)
# Set up the main log file handler if requested (but this might fail if
# configuration directory or log file is not writeable).
if conf.log_to_file:
log_file_path = conf.log_file_path
# "None" as a string because it comes from config
try:
_ASTROPY_TEST_
testing_mode = True
except NameError:
testing_mode = False
try:
if log_file_path == '' or testing_mode:
log_file_path = os.path.join(
_config.get_config_dir('astropy'), "astropy.log")
else:
log_file_path = os.path.expanduser(log_file_path)
encoding = conf.log_file_encoding if conf.log_file_encoding else None
fh = logging.FileHandler(log_file_path, encoding=encoding)
except OSError as e:
warnings.warn(
f'log file {log_file_path!r} could not be opened for writing: {str(e)}',
RuntimeWarning)
else:
formatter = logging.Formatter(conf.log_file_format)
fh.setFormatter(formatter)
fh.setLevel(conf.log_file_level)
self.addHandler(fh)
if conf.log_warnings:
self.enable_warnings_logging()
if conf.log_exceptions:
self.enable_exception_logging()
class StreamHandler(logging.StreamHandler):
"""
A specialized StreamHandler that logs INFO and DEBUG messages to
stdout, and all other messages to stderr. Also provides coloring
of the output, if enabled in the parent logger.
"""
def emit(self, record):
'''
The formatter for stderr
'''
if record.levelno <= logging.INFO:
stream = sys.stdout
else:
stream = sys.stderr
if record.levelno < logging.DEBUG or not _conf.use_color:
print(record.levelname, end='', file=stream)
else:
# Import utils.console only if necessary and at the latest because
# the import takes a significant time [#4649]
from .utils.console import color_print
if record.levelno < logging.INFO:
color_print(record.levelname, 'magenta', end='', file=stream)
elif record.levelno < logging.WARN:
color_print(record.levelname, 'green', end='', file=stream)
elif record.levelno < logging.ERROR:
color_print(record.levelname, 'brown', end='', file=stream)
else:
color_print(record.levelname, 'red', end='', file=stream)
record.message = f"{record.msg} [{record.origin:s}]"
print(": " + record.message, file=stream)
class FilterOrigin:
'''A filter for the record origin'''
def __init__(self, origin):
self.origin = origin
def filter(self, record):
return record.origin.startswith(self.origin)
class ListHandler(logging.Handler):
'''A handler that can be used to capture the records in a list'''
def __init__(self, filter_level=None, filter_origin=None):
logging.Handler.__init__(self)
self.log_list = []
def emit(self, record):
self.log_list.append(record)
|
849d26898fd469a494890a464c47e1076f6d55df28b54c2619cc895ab57340d1 | # NOTE: First try _dev.scm_version if it exists and setuptools_scm is installed
# This file is not included in astropy wheels/tarballs, so otherwise it will
# fall back on the generated _version module.
try:
try:
from ._dev.scm_version import version
except ImportError:
from ._version import version
except Exception:
import warnings
warnings.warn(
f'could not determine {__name__.split(".")[0]} package version; '
f'this indicates a broken installation')
del warnings
version = '0.0.0'
# We use Version to define major, minor, micro, but ignore any suffixes.
def split_version(version):
pieces = [0, 0, 0]
try:
from packaging.version import Version
v = Version(version)
pieces = [v.major, v.minor, v.micro]
except Exception:
pass
return pieces
major, minor, bugfix = split_version(version)
del split_version # clean up namespace.
release = 'dev' not in version
|
e3970894c3f13086681508803f5f44c92c2fd9b69c5f237606b04099de0ac0dd | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This file contains pytest configuration settings that are astropy-specific
(i.e. those that would not necessarily be shared by affiliated packages
making use of astropy's test runner).
"""
import builtins
import os
import sys
import tempfile
import warnings
try:
from pytest_astropy_header.display import PYTEST_HEADER_MODULES, TESTED_VERSIONS
except ImportError:
PYTEST_HEADER_MODULES = {}
TESTED_VERSIONS = {}
import pytest
from astropy import __version__
# This is needed to silence a warning from matplotlib caused by
# PyInstaller's matplotlib runtime hook. This can be removed once the
# issue is fixed upstream in PyInstaller, and only impacts us when running
# the tests from a PyInstaller bundle.
# See https://github.com/astropy/astropy/issues/10785
if getattr(sys, 'frozen', False) and hasattr(sys, '_MEIPASS'):
# The above checks whether we are running in a PyInstaller bundle.
warnings.filterwarnings("ignore", "(?s).*MATPLOTLIBDATA.*",
category=UserWarning)
# Note: while the filterwarnings is required, this import has to come after the
# filterwarnings above, because this attempts to import matplotlib:
from astropy.utils.compat.optional_deps import HAS_MATPLOTLIB # noqa: E402
if HAS_MATPLOTLIB:
import matplotlib
matplotlibrc_cache = {}
@pytest.fixture
def ignore_matplotlibrc():
# This is a fixture for tests that use matplotlib but not pytest-mpl
# (which already handles rcParams)
from matplotlib import pyplot as plt
with plt.style.context({}, after_reset=True):
yield
@pytest.fixture
def fast_thread_switching():
"""Fixture that reduces thread switching interval.
This makes it easier to provoke race conditions.
"""
old = sys.getswitchinterval()
sys.setswitchinterval(1e-6)
yield
sys.setswitchinterval(old)
def pytest_configure(config):
from astropy.utils.iers import conf as iers_conf
# Disable IERS auto download for testing
iers_conf.auto_download = False
builtins._pytest_running = True
# do not assign to matplotlibrc_cache in function scope
if HAS_MATPLOTLIB:
with warnings.catch_warnings():
warnings.simplefilter('ignore')
matplotlibrc_cache.update(matplotlib.rcParams)
matplotlib.rcdefaults()
matplotlib.use('Agg')
# Make sure we use temporary directories for the config and cache
# so that the tests are insensitive to local configuration. Note that this
# is also set in the test runner, but we need to also set it here for
# things to work properly in parallel mode
builtins._xdg_config_home_orig = os.environ.get('XDG_CONFIG_HOME')
builtins._xdg_cache_home_orig = os.environ.get('XDG_CACHE_HOME')
os.environ['XDG_CONFIG_HOME'] = tempfile.mkdtemp('astropy_config')
os.environ['XDG_CACHE_HOME'] = tempfile.mkdtemp('astropy_cache')
os.mkdir(os.path.join(os.environ['XDG_CONFIG_HOME'], 'astropy'))
os.mkdir(os.path.join(os.environ['XDG_CACHE_HOME'], 'astropy'))
config.option.astropy_header = True
PYTEST_HEADER_MODULES['PyERFA'] = 'erfa'
PYTEST_HEADER_MODULES['Cython'] = 'cython'
PYTEST_HEADER_MODULES['Scikit-image'] = 'skimage'
PYTEST_HEADER_MODULES['asdf'] = 'asdf'
TESTED_VERSIONS['Astropy'] = __version__
def pytest_unconfigure(config):
from astropy.utils.iers import conf as iers_conf
# Undo IERS auto download setting for testing
iers_conf.reset('auto_download')
builtins._pytest_running = False
# do not assign to matplotlibrc_cache in function scope
if HAS_MATPLOTLIB:
with warnings.catch_warnings():
warnings.simplefilter('ignore')
matplotlib.rcParams.update(matplotlibrc_cache)
matplotlibrc_cache.clear()
if builtins._xdg_config_home_orig is None:
os.environ.pop('XDG_CONFIG_HOME')
else:
os.environ['XDG_CONFIG_HOME'] = builtins._xdg_config_home_orig
if builtins._xdg_cache_home_orig is None:
os.environ.pop('XDG_CACHE_HOME')
else:
os.environ['XDG_CACHE_HOME'] = builtins._xdg_cache_home_orig
def pytest_terminal_summary(terminalreporter):
"""Output a warning to IPython users in case any tests failed."""
try:
get_ipython()
except NameError:
return
if not terminalreporter.stats.get('failed'):
# Only issue the warning when there are actually failures
return
terminalreporter.ensure_newline()
terminalreporter.write_line(
'Some tests may fail when run from the IPython prompt; '
'especially, but not limited to tests involving logging and warning '
'handling. Unless you are certain as to the cause of the failure, '
'please check that the failure occurs outside IPython as well. See '
'https://docs.astropy.org/en/stable/known_issues.html#failing-logging-'
'tests-when-running-the-tests-in-ipython for more information.',
yellow=True, bold=True)
|
b083f49c5a623cf28e75b25cd79688276dbe7ed19355a301169da9235586360e | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# This file needs to be included here to make sure commands such
# as ``pytest docs/...`` works, since this
# will ignore the conftest.py file at the root of the repository
# and the one in astropy/conftest.py
import os
import tempfile
import pytest
# Make sure we use temporary directories for the config and cache
# so that the tests are insensitive to local configuration.
os.environ['XDG_CONFIG_HOME'] = tempfile.mkdtemp('astropy_config')
os.environ['XDG_CACHE_HOME'] = tempfile.mkdtemp('astropy_cache')
os.mkdir(os.path.join(os.environ['XDG_CONFIG_HOME'], 'astropy'))
os.mkdir(os.path.join(os.environ['XDG_CACHE_HOME'], 'astropy'))
# Note that we don't need to change the environment variables back or remove
# them after testing, because they are only changed for the duration of the
# Python process, and this configuration only matters if running pytest
# directly, not from e.g. an IPython session.
@pytest.fixture(autouse=True)
def _docdir(request):
"""Run doctests in isolated tmpdir so outputs do not end up in repo"""
# Trigger ONLY for doctestplus
doctest_plugin = request.config.pluginmanager.getplugin("doctestplus")
if isinstance(request.node.parent, doctest_plugin._doctest_textfile_item_cls):
# Don't apply this fixture to io.rst. It reads files and doesn't write
if "io.rst" not in request.node.name:
tmpdir = request.getfixturevalue('tmpdir')
with tmpdir.as_cwd():
yield
else:
yield
else:
yield
|
6f600f3b21fec07e8bd0aecdf780f73a4b028ba7cacacb37ef41fecbcb1b874f | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
#
# Astropy documentation build configuration file.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this file.
#
# All configuration values have a default. Some values are defined in
# the global Astropy configuration which is loaded here before anything else.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('..'))
# IMPORTANT: the above commented section was generated by sphinx-quickstart, but
# is *NOT* appropriate for astropy or Astropy affiliated packages. It is left
# commented out with this explanation to make it clear why this should not be
# done. If the sys.path entry above is added, when the astropy.sphinx.conf
# import occurs, it will import the *source* version of astropy instead of the
# version installed (if invoked as "make html" or directly with sphinx), or the
# version in the build directory.
# Thus, any C-extensions that are needed to build the documentation will *not*
# be accessible, and the documentation will not build correctly.
# See sphinx_astropy.conf for which values are set there.
import os
import sys
import configparser
from datetime import datetime
from importlib import metadata
import doctest
from packaging.requirements import Requirement
from packaging.specifiers import SpecifierSet
# -- Check for missing dependencies -------------------------------------------
missing_requirements = {}
for line in metadata.requires('astropy'):
if 'extra == "docs"' in line:
req = Requirement(line.split(';')[0])
req_package = req.name.lower()
req_specifier = str(req.specifier)
try:
version = metadata.version(req_package)
except metadata.PackageNotFoundError:
missing_requirements[req_package] = req_specifier
if version not in SpecifierSet(req_specifier, prereleases=True):
missing_requirements[req_package] = req_specifier
if missing_requirements:
print('The following packages could not be found and are required to '
'build the documentation:')
for key, val in missing_requirements.items():
print(f' * {key} {val}')
print('Please install the "docs" requirements.')
sys.exit(1)
from sphinx_astropy.conf.v1 import * # noqa
# -- Plot configuration -------------------------------------------------------
plot_rcparams = {}
plot_rcparams['figure.figsize'] = (6, 6)
plot_rcparams['savefig.facecolor'] = 'none'
plot_rcparams['savefig.bbox'] = 'tight'
plot_rcparams['axes.labelsize'] = 'large'
plot_rcparams['figure.subplot.hspace'] = 0.5
plot_apply_rcparams = True
plot_html_show_source_link = False
plot_formats = ['png', 'svg', 'pdf']
# Don't use the default - which includes a numpy and matplotlib import
plot_pre_code = ""
# -- General configuration ----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.7'
# To perform a Sphinx version check that needs to be more specific than
# major.minor, call `check_sphinx_version("X.Y.Z")` here.
check_sphinx_version("1.2.1") # noqa: F405
# The intersphinx_mapping in sphinx_astropy.sphinx refers to astropy for
# the benefit of other packages who want to refer to objects in the
# astropy core. However, we don't want to cyclically reference astropy in its
# own build so we remove it here.
del intersphinx_mapping['astropy'] # noqa: F405
# add any custom intersphinx for astropy
intersphinx_mapping['astropy-dev'] = ('https://docs.astropy.org/en/latest/', None) # noqa: F405
intersphinx_mapping['pyerfa'] = ('https://pyerfa.readthedocs.io/en/stable/', None) # noqa: F405
intersphinx_mapping['pytest'] = ('https://docs.pytest.org/en/stable/', None) # noqa: F405
intersphinx_mapping['ipython'] = ('https://ipython.readthedocs.io/en/stable/', None) # noqa: F405
intersphinx_mapping['pandas'] = ('https://pandas.pydata.org/pandas-docs/stable/', None) # noqa: F405, E501
intersphinx_mapping['sphinx_automodapi'] = ('https://sphinx-automodapi.readthedocs.io/en/stable/', None) # noqa: F405, E501
intersphinx_mapping['packagetemplate'] = ('https://docs.astropy.org/projects/package-template/en/latest/', None) # noqa: F405, E501
intersphinx_mapping['h5py'] = ('https://docs.h5py.org/en/stable/', None) # noqa: F405
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns.append('_templates') # noqa: F405
exclude_patterns.append('changes') # noqa: F405
exclude_patterns.append('_pkgtemplate.rst') # noqa: F405
exclude_patterns.append('**/*.inc.rst') # .inc.rst mean *include* files, don't have sphinx process them # noqa: F405, E501
# Add any paths that contain templates here, relative to this directory.
if 'templates_path' not in locals(): # in case parent conf.py defines it
templates_path = []
templates_path.append('_templates')
extensions += ["sphinx_changelog"] # noqa: F405
# Grab minversion from setup.cfg
setup_cfg = configparser.ConfigParser()
setup_cfg.read(os.path.join(os.path.pardir, 'setup.cfg'))
__minimum_python_version__ = setup_cfg['options']['python_requires'].replace('>=', '')
project = u'Astropy'
min_versions = {}
for line in metadata.requires('astropy'):
req = Requirement(line.split(';')[0])
min_versions[req.name.lower()] = str(req.specifier)
# This is added to the end of RST files - a good place to put substitutions to
# be used globally.
with open("common_links.txt", "r") as cl:
rst_epilog += cl.read().format(minimum_python=__minimum_python_version__,
**min_versions)
# Manually register doctest options since matplotlib 3.5 messed up allowing them
# from pytest-doctestplus
IGNORE_OUTPUT = doctest.register_optionflag('IGNORE_OUTPUT')
REMOTE_DATA = doctest.register_optionflag('REMOTE_DATA')
FLOAT_CMP = doctest.register_optionflag('FLOAT_CMP')
# Whether to create cross-references for the parameter types in the
# Parameters, Other Parameters, Returns and Yields sections of the docstring.
numpydoc_xref_param_type = True
# Words not to cross-reference. Most likely, these are common words used in
# parameter type descriptions that may be confused for classes of the same
# name. The base set comes from sphinx-astropy. We add more here.
numpydoc_xref_ignore.update({
"mixin",
"Any", # aka something that would be annotated with `typing.Any`
# needed in subclassing numpy # TODO! revisit
"Arguments", "Path",
# TODO! not need to ignore.
"flag", "bits",
})
# Mappings to fully qualified paths (or correct ReST references) for the
# aliases/shortcuts used when specifying the types of parameters.
# Numpy provides some defaults
# https://github.com/numpy/numpydoc/blob/b352cd7635f2ea7748722f410a31f937d92545cc/numpydoc/xref.py#L62-L94
# and a base set comes from sphinx-astropy.
# so here we mostly need to define Astropy-specific x-refs
numpydoc_xref_aliases.update({
# python & adjacent
"Any": "`~typing.Any`",
"file-like": ":term:`python:file-like object`",
"file": ":term:`python:file object`",
"path-like": ":term:`python:path-like object`",
"module": ":term:`python:module`",
"buffer-like": ":term:buffer-like",
"hashable": ":term:`python:hashable`",
# for matplotlib
"color": ":term:`color`",
# for numpy
"ints": ":class:`python:int`",
# for astropy
"number": ":term:`number`",
"Representation": ":class:`~astropy.coordinates.BaseRepresentation`",
"writable": ":term:`writable file-like object`",
"readable": ":term:`readable file-like object`",
"BaseHDU": ":doc:`HDU </io/fits/api/hdus>`"
})
# Add from sphinx-astropy 1) glossary aliases 2) physical types.
numpydoc_xref_aliases.update(numpydoc_xref_astropy_aliases)
# -- Project information ------------------------------------------------------
author = u'The Astropy Developers'
copyright = f'2011–{datetime.utcnow().year}, ' + author
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
# The full version, including alpha/beta/rc tags.
release = metadata.version(project)
# The short X.Y version.
version = '.'.join(release.split('.')[:2])
# Only include dev docs in dev version.
dev = 'dev' in release
if not dev:
exclude_patterns.append('development/*') # noqa: F405
exclude_patterns.append('testhelpers.rst') # noqa: F405
# -- Options for the module index ---------------------------------------------
modindex_common_prefix = ['astropy.']
# -- Options for HTML output ---------------------------------------------------
# A NOTE ON HTML THEMES
#
# The global astropy configuration uses a custom theme,
# 'bootstrap-astropy', which is installed along with astropy. The
# theme has options for controlling the text of the logo in the upper
# left corner. This is how you would specify the options in order to
# override the theme defaults (The following options *are* the
# defaults, so we do not actually need to set them here.)
# html_theme_options = {
# 'logotext1': 'astro', # white, semi-bold
# 'logotext2': 'py', # orange, light
# 'logotext3': ':docs' # white, light
# }
# A different theme can be used, or other parts of this theme can be
# modified, by overriding some of the variables set in the global
# configuration. The variables set in the global configuration are
# listed below, commented out.
# Add any paths that contain custom themes here, relative to this directory.
# To use a different custom theme, add the directory containing the theme.
# html_theme_path = []
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes. To override the custom theme, set this to the
# name of a builtin theme or the name of a custom theme in html_theme_path.
# html_theme = None
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = ''
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = ''
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = f'{project} v{release}'
# Output file base name for HTML help builder.
htmlhelp_basename = project + 'doc'
# A dictionary of values to pass into the template engine’s context for all pages.
html_context = {
'to_be_indexed': ['stable', 'latest'],
'is_development': dev
}
# -- Options for LaTeX output --------------------------------------------------
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [('index', project + '.tex', project + u' Documentation',
author, 'manual')]
latex_logo = '_static/astropy_logo.pdf'
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [('index', project.lower(), project + u' Documentation',
[author], 1)]
# Setting this URL is requited by sphinx-astropy
github_issues_url = 'https://github.com/astropy/astropy/issues/'
edit_on_github_branch = 'main'
# Enable nitpicky mode - which ensures that all references in the docs
# resolve.
nitpicky = True
# This is not used. See docs/nitpick-exceptions file for the actual listing.
nitpick_ignore = []
for line in open('nitpick-exceptions'):
if line.strip() == "" or line.startswith("#"):
continue
dtype, target = line.split(None, 1)
target = target.strip()
nitpick_ignore.append((dtype, target))
# -- Options for the Sphinx gallery -------------------------------------------
try:
import warnings
import sphinx_gallery # noqa: F401
extensions += ["sphinx_gallery.gen_gallery"] # noqa: F405
sphinx_gallery_conf = {
'backreferences_dir': 'generated/modules', # path to store the module using example template # noqa: E501
'filename_pattern': '^((?!skip_).)*$', # execute all examples except those that start with "skip_" # noqa: E501
'examples_dirs': f'..{os.sep}examples', # path to the examples scripts
'gallery_dirs': 'generated/examples', # path to save gallery generated examples
'reference_url': {
'astropy': None,
'matplotlib': 'https://matplotlib.org/stable/',
'numpy': 'https://numpy.org/doc/stable/',
},
'abort_on_example_error': True
}
# Filter out backend-related warnings as described in
# https://github.com/sphinx-gallery/sphinx-gallery/pull/564
warnings.filterwarnings("ignore", category=UserWarning,
message='Matplotlib is currently using agg, which is a'
' non-GUI backend, so cannot show the figure.')
except ImportError:
sphinx_gallery = None
# -- Options for linkcheck output -------------------------------------------
linkcheck_retry = 5
linkcheck_ignore = ['https://journals.aas.org/manuscript-preparation/',
'https://maia.usno.navy.mil/',
'https://www.usno.navy.mil/USNO/time/gps/usno-gps-time-transfer',
'https://aa.usno.navy.mil/publications/docs/Circular_179.php',
'http://data.astropy.org',
'https://doi.org/10.1017/S0251107X00002406', # internal server error
'https://doi.org/10.1017/pasa.2013.31', # internal server error
r'https://github\.com/astropy/astropy/(?:issues|pull)/\d+']
linkcheck_timeout = 180
linkcheck_anchors = False
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
html_extra_path = ['robots.txt']
def rstjinja(app, docname, source):
"""Render pages as a jinja template to hide/show dev docs. """
# Make sure we're outputting HTML
if app.builder.format != 'html':
return
files_to_render = ["index", "install"]
if docname in files_to_render:
print(f"Jinja rendering {docname}")
rendered = app.builder.templates.render_string(
source[0], app.config.html_context)
source[0] = rendered
def resolve_astropy_and_dev_reference(app, env, node, contnode):
"""
Reference targets for ``astropy:`` and ``astropy-dev:`` are special cases.
Documentation links in astropy can be set up as intersphinx links so that
affiliate packages do not have to override the docstrings when building
the docs.
If we are building the development docs it is a local ref targeting the
label ``astropy-dev:<label>``, but for stable docs it should be an
intersphinx resolution to the development docs.
See https://github.com/astropy/astropy/issues/11366
"""
# should the node be processed?
reftarget = node.get('reftarget') # str or None
if str(reftarget).startswith('astropy:'):
# This allows Astropy to use intersphinx links to itself and have
# them resolve to local links. Downstream packages will see intersphinx.
# TODO! deprecate this if sphinx-doc/sphinx/issues/9169 is implemented.
process, replace = True, 'astropy:'
elif dev and str(reftarget).startswith('astropy-dev:'):
process, replace = True, 'astropy-dev:'
else:
process, replace = False, ''
# make link local
if process:
reftype = node.get('reftype')
refdoc = node.get('refdoc', app.env.docname)
# convert astropy intersphinx targets to local links.
# there are a few types of intersphinx link patters, as described in
# https://docs.readthedocs.io/en/stable/guides/intersphinx.html
reftarget = reftarget.replace(replace, '')
if reftype == "doc": # also need to replace the doc link
node.replace_attr("reftarget", reftarget)
# Delegate to the ref node's original domain/target (typically :ref:)
try:
domain = app.env.domains[node['refdomain']]
return domain.resolve_xref(app.env, refdoc, app.builder,
reftype, reftarget, node, contnode)
except Exception:
pass
# Otherwise return None which should delegate to intersphinx
def setup(app):
if sphinx_gallery is None:
msg = ('The sphinx_gallery extension is not installed, so the '
'gallery will not be built. You will probably see '
'additional warnings about undefined references due '
'to this.')
try:
app.warn(msg)
except AttributeError:
# Sphinx 1.6+
from sphinx.util import logging
logger = logging.getLogger(__name__)
logger.warning(msg)
# Generate the page from Jinja template
app.connect("source-read", rstjinja)
# Set this to higher priority than intersphinx; this way when building
# dev docs astropy-dev: targets will go to the local docs instead of the
# intersphinx mapping
app.connect("missing-reference", resolve_astropy_and_dev_reference,
priority=400)
|
c07338c94a815b26b8846754da892de4c0e1b22dde30525e9980609aeaba4861 | # NOTE: this hook should be added to
# https://github.com/pyinstaller/pyinstaller-hooks-contrib
# once that repository is ready for pull requests
from PyInstaller.utils.hooks import collect_data_files
datas = collect_data_files('skyfield')
|
bfa48512aada02006c25f249d8b46593f5a541cd7244d7f675f740dab06a9099 | # -*- coding: utf-8 -*-
"""
========================
Title of Example
========================
This example <verb> <active tense> <does something>.
The example uses <packages> to <do something> and <other package> to <do other
thing>. Include links to referenced packages like this: `astropy.io.fits` to
show the astropy.io.fits or like this `~astropy.io.fits`to show just 'fits'
*By: <names>*
*License: BSD*
"""
##############################################################################
# Make print work the same in all versions of Python, set up numpy,
# matplotlib, and use a nicer set of plot parameters:
import numpy as np
import matplotlib.pyplot as plt
from astropy.visualization import astropy_mpl_style
plt.style.use(astropy_mpl_style)
# uncomment if including figures:
# import matplotlib.pyplot as plt
# from astropy.visualization import astropy_mpl_style
# plt.style.use(astropy_mpl_style)
##############################################################################
# This code block is executed, although it produces no output. Lines starting
# with a simple hash are code comment and get treated as part of the code
# block. To include this new comment string we started the new block with a
# long line of hashes.
#
# The sphinx-gallery parser will assume everything after this splitter and that
# continues to start with a **comment hash and space** (respecting code style)
# is text that has to be rendered in
# html format. Keep in mind to always keep your comments always together by
# comment hashes. That means to break a paragraph you still need to comment
# that line break.
#
# In this example the next block of code produces some plotable data. Code is
# executed, figure is saved and then code is presented next, followed by the
# inlined figure.
x = np.linspace(-np.pi, np.pi, 300)
xx, yy = np.meshgrid(x, x)
z = np.cos(xx) + np.cos(yy)
plt.figure()
plt.imshow(z)
plt.colorbar()
plt.xlabel('$x$')
plt.ylabel('$y$')
###########################################################################
# Again it is possible to continue the discussion with a new Python string. This
# time to introduce the next code block generates 2 separate figures.
plt.figure()
plt.imshow(z, cmap=plt.cm.get_cmap('hot'))
plt.figure()
plt.imshow(z, cmap=plt.cm.get_cmap('Spectral'), interpolation='none')
##########################################################################
# There's some subtle differences between rendered html rendered comment
# strings and code comment strings which I'll demonstrate below. (Some of this
# only makes sense if you look at the
# :download:`raw Python script <plot_notebook.py>`)
#
# Comments in comment blocks remain nested in the text.
def dummy():
"""Dummy function to make sure docstrings don't get rendered as text"""
pass
# Code comments not preceded by the hash splitter are left in code blocks.
string = """
Triple-quoted string which tries to break parser but doesn't.
"""
############################################################################
# Output of the script is captured:
print('Some output from Python')
############################################################################
# Finally, I'll call ``show`` at the end just so someone running the Python
# code directly will see the plots; this is not necessary for creating the docs
plt.show()
|
547790e0143fe9a8b1996e4cecd461eb313bd82630ddb4630845753fd1343eef | # -*- coding: utf-8 -*-
r"""
==========================================================
Create a new coordinate class (for the Sagittarius stream)
==========================================================
This document describes in detail how to subclass and define a custom spherical
coordinate frame, as discussed in :ref:`astropy:astropy-coordinates-design` and
the docstring for `~astropy.coordinates.BaseCoordinateFrame`. In this example,
we will define a coordinate system defined by the plane of orbit of the
Sagittarius Dwarf Galaxy (hereafter Sgr; as defined in Majewski et al. 2003).
The Sgr coordinate system is often referred to in terms of two angular
coordinates, :math:`\Lambda,B`.
To do this, we need to define a subclass of
`~astropy.coordinates.BaseCoordinateFrame` that knows the names and units of the
coordinate system angles in each of the supported representations. In this case
we support `~astropy.coordinates.SphericalRepresentation` with "Lambda" and
"Beta". Then we have to define the transformation from this coordinate system to
some other built-in system. Here we will use Galactic coordinates, represented
by the `~astropy.coordinates.Galactic` class.
See Also
--------
* The `gala package <http://gala.adrian.pw/>`_, which defines a number of
Astropy coordinate frames for stellar stream coordinate systems.
* Majewski et al. 2003, "A Two Micron All Sky Survey View of the Sagittarius
Dwarf Galaxy. I. Morphology of the Sagittarius Core and Tidal Arms",
https://arxiv.org/abs/astro-ph/0304198
* Law & Majewski 2010, "The Sagittarius Dwarf Galaxy: A Model for Evolution in a
Triaxial Milky Way Halo", https://arxiv.org/abs/1003.1132
* David Law's Sgr info page https://www.stsci.edu/~dlaw/Sgr/
*By: Adrian Price-Whelan, Erik Tollerud*
*License: BSD*
"""
##############################################################################
# Make `print` work the same in all versions of Python, set up numpy,
# matplotlib, and use a nicer set of plot parameters:
import numpy as np
import matplotlib.pyplot as plt
from astropy.visualization import astropy_mpl_style
plt.style.use(astropy_mpl_style)
##############################################################################
# Import the packages necessary for coordinates
from astropy.coordinates import frame_transform_graph
from astropy.coordinates.matrix_utilities import rotation_matrix, matrix_product, matrix_transpose
import astropy.coordinates as coord
import astropy.units as u
##############################################################################
# The first step is to create a new class, which we'll call
# ``Sagittarius`` and make it a subclass of
# `~astropy.coordinates.BaseCoordinateFrame`:
class Sagittarius(coord.BaseCoordinateFrame):
"""
A Heliocentric spherical coordinate system defined by the orbit
of the Sagittarius dwarf galaxy, as described in
https://ui.adsabs.harvard.edu/abs/2003ApJ...599.1082M
and further explained in
https://www.stsci.edu/~dlaw/Sgr/.
Parameters
----------
representation : `~astropy.coordinates.BaseRepresentation` or None
A representation object or None to have no data (or use the other keywords)
Lambda : `~astropy.coordinates.Angle`, optional, must be keyword
The longitude-like angle corresponding to Sagittarius' orbit.
Beta : `~astropy.coordinates.Angle`, optional, must be keyword
The latitude-like angle corresponding to Sagittarius' orbit.
distance : `~astropy.units.Quantity`, optional, must be keyword
The Distance for this object along the line-of-sight.
pm_Lambda_cosBeta : `~astropy.units.Quantity`, optional, must be keyword
The proper motion along the stream in ``Lambda`` (including the
``cos(Beta)`` factor) for this object (``pm_Beta`` must also be given).
pm_Beta : `~astropy.units.Quantity`, optional, must be keyword
The proper motion in Declination for this object (``pm_ra_cosdec`` must
also be given).
radial_velocity : `~astropy.units.Quantity`, optional, keyword-only
The radial velocity of this object.
"""
default_representation = coord.SphericalRepresentation
default_differential = coord.SphericalCosLatDifferential
frame_specific_representation_info = {
coord.SphericalRepresentation: [
coord.RepresentationMapping('lon', 'Lambda'),
coord.RepresentationMapping('lat', 'Beta'),
coord.RepresentationMapping('distance', 'distance')]
}
##############################################################################
# Breaking this down line-by-line, we define the class as a subclass of
# `~astropy.coordinates.BaseCoordinateFrame`. Then we include a descriptive
# docstring. The final lines are class-level attributes that specify the
# default representation for the data, default differential for the velocity
# information, and mappings from the attribute names used by representation
# objects to the names that are to be used by the ``Sagittarius`` frame. In this
# case we override the names in the spherical representations but don't do
# anything with other representations like cartesian or cylindrical.
#
# Next we have to define the transformation from this coordinate system to some
# other built-in coordinate system; we will use Galactic coordinates. We can do
# this by defining functions that return transformation matrices, or by simply
# defining a function that accepts a coordinate and returns a new coordinate in
# the new system. Because the transformation to the Sagittarius coordinate
# system is just a spherical rotation from Galactic coordinates, we'll just
# define a function that returns this matrix. We'll start by constructing the
# transformation matrix using pre-determined Euler angles and the
# ``rotation_matrix`` helper function:
SGR_PHI = (180 + 3.75) * u.degree # Euler angles (from Law & Majewski 2010)
SGR_THETA = (90 - 13.46) * u.degree
SGR_PSI = (180 + 14.111534) * u.degree
# Generate the rotation matrix using the x-convention (see Goldstein)
D = rotation_matrix(SGR_PHI, "z")
C = rotation_matrix(SGR_THETA, "x")
B = rotation_matrix(SGR_PSI, "z")
A = np.diag([1.,1.,-1.])
SGR_MATRIX = matrix_product(A, B, C, D)
##############################################################################
# Since we already constructed the transformation (rotation) matrix above, and
# the inverse of a rotation matrix is just its transpose, the required
# transformation functions are very simple:
@frame_transform_graph.transform(coord.StaticMatrixTransform, coord.Galactic, Sagittarius)
def galactic_to_sgr():
""" Compute the transformation matrix from Galactic spherical to
heliocentric Sgr coordinates.
"""
return SGR_MATRIX
##############################################################################
# The decorator ``@frame_transform_graph.transform(coord.StaticMatrixTransform,
# coord.Galactic, Sagittarius)`` registers this function on the
# ``frame_transform_graph`` as a coordinate transformation. Inside the function,
# we simply return the previously defined rotation matrix.
#
# We then register the inverse transformation by using the transpose of the
# rotation matrix (which is faster to compute than the inverse):
@frame_transform_graph.transform(coord.StaticMatrixTransform, Sagittarius, coord.Galactic)
def sgr_to_galactic():
""" Compute the transformation matrix from heliocentric Sgr coordinates to
spherical Galactic.
"""
return matrix_transpose(SGR_MATRIX)
##############################################################################
# Now that we've registered these transformations between ``Sagittarius`` and
# `~astropy.coordinates.Galactic`, we can transform between *any* coordinate
# system and ``Sagittarius`` (as long as the other system has a path to
# transform to `~astropy.coordinates.Galactic`). For example, to transform from
# ICRS coordinates to ``Sagittarius``, we would do:
icrs = coord.SkyCoord(280.161732*u.degree, 11.91934*u.degree, frame='icrs')
sgr = icrs.transform_to(Sagittarius)
print(sgr)
##############################################################################
# Or, to transform from the ``Sagittarius`` frame to ICRS coordinates (in this
# case, a line along the ``Sagittarius`` x-y plane):
sgr = coord.SkyCoord(Lambda=np.linspace(0, 2*np.pi, 128)*u.radian,
Beta=np.zeros(128)*u.radian, frame='sagittarius')
icrs = sgr.transform_to(coord.ICRS)
print(icrs)
##############################################################################
# As an example, we'll now plot the points in both coordinate systems:
fig, axes = plt.subplots(2, 1, figsize=(8, 10),
subplot_kw={'projection': 'aitoff'})
axes[0].set_title("Sagittarius")
axes[0].plot(sgr.Lambda.wrap_at(180*u.deg).radian, sgr.Beta.radian,
linestyle='none', marker='.')
axes[1].set_title("ICRS")
axes[1].plot(icrs.ra.wrap_at(180*u.deg).radian, icrs.dec.radian,
linestyle='none', marker='.')
plt.show()
##############################################################################
# This particular transformation is just a spherical rotation, which is a
# special case of an Affine transformation with no vector offset. The
# transformation of velocity components is therefore natively supported as
# well:
sgr = coord.SkyCoord(Lambda=np.linspace(0, 2*np.pi, 128)*u.radian,
Beta=np.zeros(128)*u.radian,
pm_Lambda_cosBeta=np.random.uniform(-5, 5, 128)*u.mas/u.yr,
pm_Beta=np.zeros(128)*u.mas/u.yr,
frame='sagittarius')
icrs = sgr.transform_to(coord.ICRS)
print(icrs)
fig, axes = plt.subplots(3, 1, figsize=(8, 10), sharex=True)
axes[0].set_title("Sagittarius")
axes[0].plot(sgr.Lambda.degree,
sgr.pm_Lambda_cosBeta.value,
linestyle='none', marker='.')
axes[0].set_xlabel(r"$\Lambda$ [deg]")
axes[0].set_ylabel(
fr"$\mu_\Lambda \, \cos B$ [{sgr.pm_Lambda_cosBeta.unit.to_string('latex_inline')}]")
axes[1].set_title("ICRS")
axes[1].plot(icrs.ra.degree, icrs.pm_ra_cosdec.value,
linestyle='none', marker='.')
axes[1].set_ylabel(
fr"$\mu_\alpha \, \cos\delta$ [{icrs.pm_ra_cosdec.unit.to_string('latex_inline')}]")
axes[2].set_title("ICRS")
axes[2].plot(icrs.ra.degree, icrs.pm_dec.value,
linestyle='none', marker='.')
axes[2].set_xlabel("RA [deg]")
axes[2].set_ylabel(
fr"$\mu_\delta$ [{icrs.pm_dec.unit.to_string('latex_inline')}]")
plt.show()
|
169d08766b27d0e8c3edf0512170a6796246d917db295c0b3463aa7bfb1c4330 | # -*- coding: utf-8 -*-
"""
================================================================
Convert a radial velocity to the Galactic Standard of Rest (GSR)
================================================================
Radial or line-of-sight velocities of sources are often reported in a
Heliocentric or Solar-system barycentric reference frame. A common
transformation incorporates the projection of the Sun's motion along the
line-of-sight to the target, hence transforming it to a Galactic rest frame
instead (sometimes referred to as the Galactic Standard of Rest, GSR). This
transformation depends on the assumptions about the orientation of the Galactic
frame relative to the bary- or Heliocentric frame. It also depends on the
assumed solar velocity vector. Here we'll demonstrate how to perform this
transformation using a sky position and barycentric radial-velocity.
*By: Adrian Price-Whelan*
*License: BSD*
"""
################################################################################
# Make print work the same in all versions of Python and import the required
# Astropy packages:
import astropy.units as u
import astropy.coordinates as coord
################################################################################
# Use the latest convention for the Galactocentric coordinates
coord.galactocentric_frame_defaults.set('latest')
################################################################################
# For this example, let's work with the coordinates and barycentric radial
# velocity of the star HD 155967, as obtained from
# `Simbad <https://simbad.u-strasbg.fr/simbad/>`_:
icrs = coord.SkyCoord(ra=258.58356362*u.deg, dec=14.55255619*u.deg,
radial_velocity=-16.1*u.km/u.s, frame='icrs')
################################################################################
# We next need to decide on the velocity of the Sun in the assumed GSR frame.
# We'll use the same velocity vector as used in the
# `~astropy.coordinates.Galactocentric` frame, and convert it to a
# `~astropy.coordinates.CartesianRepresentation` object using the
# ``.to_cartesian()`` method of the
# `~astropy.coordinates.CartesianDifferential` object ``galcen_v_sun``:
v_sun = coord.Galactocentric().galcen_v_sun.to_cartesian()
################################################################################
# We now need to get a unit vector in the assumed Galactic frame from the sky
# position in the ICRS frame above. We'll use this unit vector to project the
# solar velocity onto the line-of-sight:
gal = icrs.transform_to(coord.Galactic)
cart_data = gal.data.to_cartesian()
unit_vector = cart_data / cart_data.norm()
################################################################################
# Now we project the solar velocity using this unit vector:
v_proj = v_sun.dot(unit_vector)
################################################################################
# Finally, we add the projection of the solar velocity to the radial velocity
# to get a GSR radial velocity:
rv_gsr = icrs.radial_velocity + v_proj
print(rv_gsr)
################################################################################
# We could wrap this in a function so we can control the solar velocity and
# re-use the above code:
def rv_to_gsr(c, v_sun=None):
"""Transform a barycentric radial velocity to the Galactic Standard of Rest
(GSR).
The input radial velocity must be passed in as a
Parameters
----------
c : `~astropy.coordinates.BaseCoordinateFrame` subclass instance
The radial velocity, associated with a sky coordinates, to be
transformed.
v_sun : `~astropy.units.Quantity`, optional
The 3D velocity of the solar system barycenter in the GSR frame.
Defaults to the same solar motion as in the
`~astropy.coordinates.Galactocentric` frame.
Returns
-------
v_gsr : `~astropy.units.Quantity`
The input radial velocity transformed to a GSR frame.
"""
if v_sun is None:
v_sun = coord.Galactocentric().galcen_v_sun.to_cartesian()
gal = c.transform_to(coord.Galactic)
cart_data = gal.data.to_cartesian()
unit_vector = cart_data / cart_data.norm()
v_proj = v_sun.dot(unit_vector)
return c.radial_velocity + v_proj
rv_gsr = rv_to_gsr(icrs)
print(rv_gsr)
|
9035692fe1ca2a75b94864183c99f6dfb14e7c446c8fa7bdbb53ec46b965569a | # -*- coding: utf-8 -*-
"""
========================================================================
Transforming positions and velocities to and from a Galactocentric frame
========================================================================
This document shows a few examples of how to use and customize the
`~astropy.coordinates.Galactocentric` frame to transform Heliocentric sky
positions, distance, proper motions, and radial velocities to a Galactocentric,
Cartesian frame, and the same in reverse.
The main configurable parameters of the `~astropy.coordinates.Galactocentric`
frame control the position and velocity of the solar system barycenter within
the Galaxy. These are specified by setting the ICRS coordinates of the
Galactic center, the distance to the Galactic center (the sun-galactic center
line is always assumed to be the x-axis of the Galactocentric frame), and the
Cartesian 3-velocity of the sun in the Galactocentric frame. We'll first
demonstrate how to customize these values, then show how to set the solar motion
instead by inputting the proper motion of Sgr A*.
Note that, for brevity, we may refer to the solar system barycenter as just "the
sun" in the examples below.
*By: Adrian Price-Whelan*
*License: BSD*
"""
##############################################################################
# Make `print` work the same in all versions of Python, set up numpy,
# matplotlib, and use a nicer set of plot parameters:
import numpy as np
import matplotlib.pyplot as plt
from astropy.visualization import astropy_mpl_style
plt.style.use(astropy_mpl_style)
##############################################################################
# Import the necessary astropy subpackages
import astropy.coordinates as coord
import astropy.units as u
##############################################################################
# Let's first define a barycentric coordinate and velocity in the ICRS frame.
# We'll use the data for the star HD 39881 from the `Simbad
# <https://simbad.u-strasbg.fr/simbad/>`_ database:
c1 = coord.SkyCoord(ra=89.014303*u.degree, dec=13.924912*u.degree,
distance=(37.59*u.mas).to(u.pc, u.parallax()),
pm_ra_cosdec=372.72*u.mas/u.yr,
pm_dec=-483.69*u.mas/u.yr,
radial_velocity=0.37*u.km/u.s,
frame='icrs')
##############################################################################
# This is a high proper-motion star; suppose we'd like to transform its position
# and velocity to a Galactocentric frame to see if it has a large 3D velocity
# as well. To use the Astropy default solar position and motion parameters, we
# can simply do:
gc1 = c1.transform_to(coord.Galactocentric)
##############################################################################
# From here, we can access the components of the resulting
# `~astropy.coordinates.Galactocentric` instance to see the 3D Cartesian
# velocity components:
print(gc1.v_x, gc1.v_y, gc1.v_z)
##############################################################################
# The default parameters for the `~astropy.coordinates.Galactocentric` frame
# are detailed in the linked documentation, but we can modify the most commonly
# changes values using the keywords ``galcen_distance``, ``galcen_v_sun``, and
# ``z_sun`` which set the sun-Galactic center distance, the 3D velocity vector
# of the sun, and the height of the sun above the Galactic midplane,
# respectively. The velocity of the sun can be specified as an
# `~astropy.units.Quantity` object with velocity units and is interpreted as a
# Cartesian velocity, as in the example below. Note that, as with the positions,
# the Galactocentric frame is a right-handed system (i.e., the Sun is at negative
# x values) so ``v_x`` is opposite of the Galactocentric radial velocity:
v_sun = [11.1, 244, 7.25] * (u.km / u.s) # [vx, vy, vz]
gc_frame = coord.Galactocentric(
galcen_distance=8*u.kpc,
galcen_v_sun=v_sun,
z_sun=0*u.pc)
##############################################################################
# We can then transform to this frame instead, with our custom parameters:
gc2 = c1.transform_to(gc_frame)
print(gc2.v_x, gc2.v_y, gc2.v_z)
##############################################################################
# It's sometimes useful to specify the solar motion using the `proper motion
# of Sgr A* <https://arxiv.org/abs/astro-ph/0408107>`_ instead of Cartesian
# velocity components. With an assumed distance, we can convert proper motion
# components to Cartesian velocity components using `astropy.units`:
galcen_distance = 8*u.kpc
pm_gal_sgrA = [-6.379, -0.202] * u.mas/u.yr # from Reid & Brunthaler 2004
vy, vz = -(galcen_distance * pm_gal_sgrA).to(u.km/u.s, u.dimensionless_angles())
##############################################################################
# We still have to assume a line-of-sight velocity for the Galactic center,
# which we will again take to be 11 km/s:
vx = 11.1 * u.km/u.s
v_sun2 = u.Quantity([vx, vy, vz]) # List of Quantity -> a single Quantity
gc_frame2 = coord.Galactocentric(galcen_distance=galcen_distance,
galcen_v_sun=v_sun2,
z_sun=0*u.pc)
gc3 = c1.transform_to(gc_frame2)
print(gc3.v_x, gc3.v_y, gc3.v_z)
##############################################################################
# The transformations also work in the opposite direction. This can be useful
# for transforming simulated or theoretical data to observable quantities. As
# an example, we'll generate 4 theoretical circular orbits at different
# Galactocentric radii with the same circular velocity, and transform them to
# Heliocentric coordinates:
ring_distances = np.arange(10, 25+1, 5) * u.kpc
circ_velocity = 220 * u.km/u.s
phi_grid = np.linspace(90, 270, 512) * u.degree # grid of azimuths
ring_rep = coord.CylindricalRepresentation(
rho=ring_distances[:,np.newaxis],
phi=phi_grid[np.newaxis],
z=np.zeros_like(ring_distances)[:,np.newaxis])
angular_velocity = (-circ_velocity / ring_distances).to(u.mas/u.yr,
u.dimensionless_angles())
ring_dif = coord.CylindricalDifferential(
d_rho=np.zeros(phi_grid.shape)[np.newaxis]*u.km/u.s,
d_phi=angular_velocity[:,np.newaxis],
d_z=np.zeros(phi_grid.shape)[np.newaxis]*u.km/u.s
)
ring_rep = ring_rep.with_differentials(ring_dif)
gc_rings = coord.SkyCoord(ring_rep, frame=coord.Galactocentric)
##############################################################################
# First, let's visualize the geometry in Galactocentric coordinates. Here are
# the positions and velocities of the rings; note that in the velocity plot,
# the velocities of the 4 rings are identical and thus overlaid under the same
# curve:
fig,axes = plt.subplots(1, 2, figsize=(12,6))
# Positions
axes[0].plot(gc_rings.x.T, gc_rings.y.T, marker='None', linewidth=3)
axes[0].text(-8., 0, r'$\odot$', fontsize=20)
axes[0].set_xlim(-30, 30)
axes[0].set_ylim(-30, 30)
axes[0].set_xlabel('$x$ [kpc]')
axes[0].set_ylabel('$y$ [kpc]')
# Velocities
axes[1].plot(gc_rings.v_x.T, gc_rings.v_y.T, marker='None', linewidth=3)
axes[1].set_xlim(-250, 250)
axes[1].set_ylim(-250, 250)
axes[1].set_xlabel(f"$v_x$ [{(u.km / u.s).to_string('latex_inline')}]")
axes[1].set_ylabel(f"$v_y$ [{(u.km / u.s).to_string('latex_inline')}]")
fig.tight_layout()
plt.show()
##############################################################################
# Now we can transform to Galactic coordinates and visualize the rings in
# observable coordinates:
gal_rings = gc_rings.transform_to(coord.Galactic)
fig, ax = plt.subplots(1, 1, figsize=(8, 6))
for i in range(len(ring_distances)):
ax.plot(gal_rings[i].l.degree, gal_rings[i].pm_l_cosb.value,
label=str(ring_distances[i]), marker='None', linewidth=3)
ax.set_xlim(360, 0)
ax.set_xlabel('$l$ [deg]')
ax.set_ylabel(fr'$\mu_l \, \cos b$ [{(u.mas/u.yr).to_string("latex_inline")}]')
ax.legend()
plt.show()
|
12bef7dc219d41692d2cbc9fe75a49900ea1f0df6e831e5d70e413d075a0fb12 | # -*- coding: utf-8 -*-
"""
===================================================================
Determining and plotting the altitude/azimuth of a celestial object
===================================================================
This example demonstrates coordinate transformations and the creation of
visibility curves to assist with observing run planning.
In this example, we make a `~astropy.coordinates.SkyCoord` instance for M33.
The altitude-azimuth coordinates are then found using
`astropy.coordinates.EarthLocation` and `astropy.time.Time` objects.
This example is meant to demonstrate the capabilities of the
`astropy.coordinates` package. For more convenient and/or complex observation
planning, consider the `astroplan <https://astroplan.readthedocs.org/>`_
package.
*By: Erik Tollerud, Kelle Cruz*
*License: BSD*
"""
##############################################################################
# Let's suppose you are planning to visit picturesque Bear Mountain State Park
# in New York, USA. You're bringing your telescope with you (of course), and
# someone told you M33 is a great target to observe there. You happen to know
# you're free at 11:00 pm local time, and you want to know if it will be up.
# Astropy can answer that.
#
# Import numpy and matplotlib. For the latter, use a nicer set of plot
# parameters and set up support for plotting/converting quantities.
import numpy as np
import matplotlib.pyplot as plt
from astropy.visualization import astropy_mpl_style, quantity_support
plt.style.use(astropy_mpl_style)
quantity_support()
##############################################################################
# Import the packages necessary for finding coordinates and making
# coordinate transformations
import astropy.units as u
from astropy.time import Time
from astropy.coordinates import SkyCoord, EarthLocation, AltAz
##############################################################################
# `astropy.coordinates.SkyCoord.from_name` uses Simbad to resolve object
# names and retrieve coordinates.
#
# Get the coordinates of M33:
m33 = SkyCoord.from_name('M33')
##############################################################################
# Use `astropy.coordinates.EarthLocation` to provide the location of Bear
# Mountain and set the time to 11pm EDT on 2012 July 12:
bear_mountain = EarthLocation(lat=41.3*u.deg, lon=-74*u.deg, height=390*u.m)
utcoffset = -4*u.hour # Eastern Daylight Time
time = Time('2012-7-12 23:00:00') - utcoffset
##############################################################################
# `astropy.coordinates.EarthLocation.get_site_names` and
# `~astropy.coordinates.EarthLocation.get_site_names` can be used to get
# locations of major observatories.
#
# Use `astropy.coordinates` to find the Alt, Az coordinates of M33 at as
# observed from Bear Mountain at 11pm on 2012 July 12.
m33altaz = m33.transform_to(AltAz(obstime=time,location=bear_mountain))
print(f"M33's Altitude = {m33altaz.alt:.2}")
##############################################################################
# This is helpful since it turns out M33 is barely above the horizon at this
# time. It's more informative to find M33's airmass over the course of
# the night.
#
# Find the alt,az coordinates of M33 at 100 times evenly spaced between 10pm
# and 7am EDT:
midnight = Time('2012-7-13 00:00:00') - utcoffset
delta_midnight = np.linspace(-2, 10, 100)*u.hour
frame_July13night = AltAz(obstime=midnight+delta_midnight,
location=bear_mountain)
m33altazs_July13night = m33.transform_to(frame_July13night)
##############################################################################
# convert alt, az to airmass with `~astropy.coordinates.AltAz.secz` attribute:
m33airmasss_July13night = m33altazs_July13night.secz
##############################################################################
# Plot the airmass as a function of time:
plt.plot(delta_midnight, m33airmasss_July13night)
plt.xlim(-2, 10)
plt.ylim(1, 4)
plt.xlabel('Hours from EDT Midnight')
plt.ylabel('Airmass [Sec(z)]')
plt.show()
##############################################################################
# Use `~astropy.coordinates.get_sun` to find the location of the Sun at 1000
# evenly spaced times between noon on July 12 and noon on July 13:
from astropy.coordinates import get_sun
delta_midnight = np.linspace(-12, 12, 1000)*u.hour
times_July12_to_13 = midnight + delta_midnight
frame_July12_to_13 = AltAz(obstime=times_July12_to_13, location=bear_mountain)
sunaltazs_July12_to_13 = get_sun(times_July12_to_13).transform_to(frame_July12_to_13)
##############################################################################
# Do the same with `~astropy.coordinates.get_moon` to find when the moon is
# up. Be aware that this will need to download a 10MB file from the internet
# to get a precise location of the moon.
from astropy.coordinates import get_moon
moon_July12_to_13 = get_moon(times_July12_to_13)
moonaltazs_July12_to_13 = moon_July12_to_13.transform_to(frame_July12_to_13)
##############################################################################
# Find the alt,az coordinates of M33 at those same times:
m33altazs_July12_to_13 = m33.transform_to(frame_July12_to_13)
##############################################################################
# Make a beautiful figure illustrating nighttime and the altitudes of M33 and
# the Sun over that time:
plt.plot(delta_midnight, sunaltazs_July12_to_13.alt, color='r', label='Sun')
plt.plot(delta_midnight, moonaltazs_July12_to_13.alt, color=[0.75]*3, ls='--', label='Moon')
plt.scatter(delta_midnight, m33altazs_July12_to_13.alt,
c=m33altazs_July12_to_13.az, label='M33', lw=0, s=8,
cmap='viridis')
plt.fill_between(delta_midnight, 0*u.deg, 90*u.deg,
sunaltazs_July12_to_13.alt < -0*u.deg, color='0.5', zorder=0)
plt.fill_between(delta_midnight, 0*u.deg, 90*u.deg,
sunaltazs_July12_to_13.alt < -18*u.deg, color='k', zorder=0)
plt.colorbar().set_label('Azimuth [deg]')
plt.legend(loc='upper left')
plt.xlim(-12*u.hour, 12*u.hour)
plt.xticks((np.arange(13)*2-12)*u.hour)
plt.ylim(0*u.deg, 90*u.deg)
plt.xlabel('Hours from EDT Midnight')
plt.ylabel('Altitude [deg]')
plt.show()
|
fbb510c5e0186d5e16961c613d3831dbd37a4d215fe262121af70c4b3d053210 | # -*- coding: utf-8 -*-
"""
==================
Edit a FITS header
==================
This example describes how to edit a value in a FITS header
using `astropy.io.fits`.
*By: Adrian Price-Whelan*
*License: BSD*
"""
from astropy.io import fits
##############################################################################
# Download a FITS file:
from astropy.utils.data import get_pkg_data_filename
fits_file = get_pkg_data_filename('tutorials/FITS-Header/input_file.fits')
##############################################################################
# Look at contents of the FITS file
fits.info(fits_file)
##############################################################################
# Look at the headers of the two extensions:
print("Before modifications:")
print()
print("Extension 0:")
print(repr(fits.getheader(fits_file, 0)))
print()
print("Extension 1:")
print(repr(fits.getheader(fits_file, 1)))
##############################################################################
# `astropy.io.fits` provides an object-oriented interface for reading and
# interacting with FITS files, but for small operations (like this example) it
# is often easier to use the
# `convenience functions <https://docs.astropy.org/en/latest/io/fits/index.html#convenience-functions>`_.
#
# To edit a single header value in the header for extension 0, use the
# `~astropy.io.fits.setval()` function. For example, set the OBJECT keyword
# to 'M31':
fits.setval(fits_file, 'OBJECT', value='M31')
##############################################################################
# With no extra arguments, this will modify the header for extension 0, but
# this can be changed using the ``ext`` keyword argument. For example, we can
# specify extension 1 instead:
fits.setval(fits_file, 'OBJECT', value='M31', ext=1)
##############################################################################
# This can also be used to create a new keyword-value pair ("card" in FITS
# lingo):
fits.setval(fits_file, 'ANEWKEY', value='some value')
##############################################################################
# Again, this is useful for one-off modifications, but can be inefficient
# for operations like editing multiple headers in the same file
# because `~astropy.io.fits.setval()` loads the whole file each time it
# is called. To make several modifications, it's better to load the file once:
with fits.open(fits_file, 'update') as f:
for hdu in f:
hdu.header['OBJECT'] = 'CAT'
print("After modifications:")
print()
print("Extension 0:")
print(repr(fits.getheader(fits_file, 0)))
print()
print("Extension 1:")
print(repr(fits.getheader(fits_file, 1)))
|
f060aa9837a0df597dbb32aa17d9db8fda514facc6f269012d3fc9611751b903 | # -*- coding: utf-8 -*-
"""
=====================================================
Create a multi-extension FITS (MEF) file from scratch
=====================================================
This example demonstrates how to create a multi-extension FITS (MEF)
file from scratch using `astropy.io.fits`.
*By: Erik Bray*
*License: BSD*
"""
import os
##############################################################################
# HDUList objects are used to hold all the HDUs in a FITS file. This
# ``HDUList`` class is a subclass of Python's builtin `list`. and can be
# created from scratch. For example, to create a FITS file with
# three extensions:
from astropy.io import fits
new_hdul = fits.HDUList()
new_hdul.append(fits.ImageHDU())
new_hdul.append(fits.ImageHDU())
##############################################################################
# Write out the new file to disk:
new_hdul.writeto('test.fits')
##############################################################################
# Alternatively, the HDU instances can be created first (or read from an
# existing FITS file).
#
# Create a multi-extension FITS file with two empty IMAGE extensions (a
# default PRIMARY HDU is prepended automatically if one is not specified;
# we use ``overwrite=True`` to overwrite the file if it already exists):
hdu1 = fits.PrimaryHDU()
hdu2 = fits.ImageHDU()
new_hdul = fits.HDUList([hdu1, hdu2])
new_hdul.writeto('test.fits', overwrite=True)
##############################################################################
# Finally, we'll remove the file we created:
os.remove('test.fits')
|
f8e82631004deaaab5e0800e207f27d16652fb1494aaeb6bec42c040fe4f5c35 | # -*- coding: utf-8 -*-
"""
=====================================================================
Accessing data stored as a table in a multi-extension FITS (MEF) file
=====================================================================
FITS files can often contain large amount of multi-dimensional data and
tables. This example opens a FITS file with information
from Chandra's HETG-S instrument.
The example uses `astropy.utils.data` to download multi-extension FITS (MEF)
file, `astropy.io.fits` to investigate the header, and
`astropy.table.Table` to explore the data.
*By: Lia Corrales, Adrian Price-Whelan, and Kelle Cruz*
*License: BSD*
"""
##############################################################################
# Use `astropy.utils.data` subpackage to download the FITS file used in this
# example. Also import `~astropy.table.Table` from the `astropy.table` subpackage
# and `astropy.io.fits`
from astropy.utils.data import get_pkg_data_filename
from astropy.table import Table
from astropy.io import fits
##############################################################################
# Download a FITS file
event_filename = get_pkg_data_filename('tutorials/FITS-tables/chandra_events.fits')
##############################################################################
# Display information about the contents of the FITS file.
fits.info(event_filename)
##############################################################################
# Extension 1, EVENTS, is a Table that contains information about each X-ray
# photon that hit Chandra's HETG-S detector.
#
# Use `~astropy.table.Table` to read the table
events = Table.read(event_filename, hdu=1)
##############################################################################
# Print the column names of the Events Table.
print(events.columns)
##############################################################################
# If a column contains unit information, it will have an associated
# `astropy.units` object.
print(events['energy'].unit)
##############################################################################
# Print the data stored in the Energy column.
print(events['energy'])
|
78cf5c60f74125abbfa26c0a951f75863b38c484d06ca630714937cedada6e19 | # -*- coding: utf-8 -*-
"""
=======================================
Read and plot an image from a FITS file
=======================================
This example opens an image stored in a FITS file and displays it to the screen.
This example uses `astropy.utils.data` to download the file, `astropy.io.fits` to open
the file, and `matplotlib.pyplot` to display the image.
*By: Lia R. Corrales, Adrian Price-Whelan, Kelle Cruz*
*License: BSD*
"""
##############################################################################
# Set up matplotlib and use a nicer set of plot parameters
import matplotlib.pyplot as plt
from astropy.visualization import astropy_mpl_style
plt.style.use(astropy_mpl_style)
##############################################################################
# Download the example FITS files used by this example:
from astropy.utils.data import get_pkg_data_filename
from astropy.io import fits
image_file = get_pkg_data_filename('tutorials/FITS-images/HorseHead.fits')
##############################################################################
# Use `astropy.io.fits.info()` to display the structure of the file:
fits.info(image_file)
##############################################################################
# Generally the image information is located in the Primary HDU, also known
# as extension 0. Here, we use `astropy.io.fits.getdata()` to read the image
# data from this first extension using the keyword argument ``ext=0``:
image_data = fits.getdata(image_file, ext=0)
##############################################################################
# The data is now stored as a 2D numpy array. Print the dimensions using the
# shape attribute:
print(image_data.shape)
##############################################################################
# Display the image data:
plt.figure()
plt.imshow(image_data, cmap='gray')
plt.colorbar()
|
042abe04a704fa7bcd949b1e95727d07e411096873c00d61d30b3d539d20354a | # -*- coding: utf-8 -*-
"""
==========================================
Create a very large FITS file from scratch
==========================================
This example demonstrates how to create a large file (larger than will fit in
memory) from scratch using `astropy.io.fits`.
*By: Erik Bray*
*License: BSD*
"""
##############################################################################
# Normally to create a single image FITS file one would do something like:
import os
import numpy as np
from astropy.io import fits
data = np.zeros((40000, 40000), dtype=np.float64)
hdu = fits.PrimaryHDU(data=data)
##############################################################################
# Then use the `astropy.io.fits.writeto()` method to write out the new
# file to disk
hdu.writeto('large.fits')
##############################################################################
# However, a 40000 x 40000 array of doubles is nearly twelve gigabytes! Most
# systems won't be able to create that in memory just to write out to disk. In
# order to create such a large file efficiently requires a little extra work,
# and a few assumptions.
#
# First, it is helpful to anticipate about how large (as in, how many keywords)
# the header will have in it. FITS headers must be written in 2880 byte
# blocks, large enough for 36 keywords per block (including the END keyword in
# the final block). Typical headers have somewhere between 1 and 4 blocks,
# though sometimes more.
#
# Since the first thing we write to a FITS file is the header, we want to write
# enough header blocks so that there is plenty of padding in which to add new
# keywords without having to resize the whole file. Say you want the header to
# use 4 blocks by default. Then, excluding the END card which Astropy will add
# automatically, create the header and pad it out to 36 * 4 cards.
#
# Create a stub array to initialize the HDU; its
# exact size is irrelevant, as long as it has the desired number of
# dimensions
data = np.zeros((100, 100), dtype=np.float64)
hdu = fits.PrimaryHDU(data=data)
header = hdu.header
while len(header) < (36 * 4 - 1):
header.append() # Adds a blank card to the end
##############################################################################
# Now adjust the NAXISn keywords to the desired size of the array, and write
# only the header out to a file. Using the ``hdu.writeto()`` method will cause
# astropy to "helpfully" reset the NAXISn keywords to match the size of the
# dummy array. That is because it works hard to ensure that only valid FITS
# files are written. Instead, we can write just the header to a file using the
# `astropy.io.fits.Header.tofile` method:
header['NAXIS1'] = 40000
header['NAXIS2'] = 40000
header.tofile('large.fits')
##############################################################################
# Finally, grow out the end of the file to match the length of the
# data (plus the length of the header). This can be done very efficiently on
# most systems by seeking past the end of the file and writing a single byte,
# like so:
with open('large.fits', 'rb+') as fobj:
# Seek past the length of the header, plus the length of the
# Data we want to write.
# 8 is the number of bytes per value, i.e. abs(header['BITPIX'])/8
# (this example is assuming a 64-bit float)
# The -1 is to account for the final byte that we are about to
# write:
fobj.seek(len(header.tostring()) + (40000 * 40000 * 8) - 1)
fobj.write(b'\0')
##############################################################################
# More generally, this can be written:
shape = tuple(header[f'NAXIS{ii}'] for ii in range(1, header['NAXIS']+1))
with open('large.fits', 'rb+') as fobj:
fobj.seek(len(header.tostring()) + (np.product(shape) * np.abs(header['BITPIX']//8)) - 1)
fobj.write(b'\0')
##############################################################################
# On modern operating systems this will cause the file (past the header) to be
# filled with zeros out to the ~12GB needed to hold a 40000 x 40000 image. On
# filesystems that support sparse file creation (most Linux filesystems, but not
# the HFS+ filesystem used by most Macs) this is a very fast, efficient
# operation. On other systems your mileage may vary.
#
# This isn't the only way to build up a large file, but probably one of the
# safest. This method can also be used to create large multi-extension FITS
# files, with a little care.
##############################################################################
# Finally, we'll remove the file we created:
os.remove('large.fits')
|
93f258becc4ec5da3768b6fd02d801854a4cb29f419856cfe08914b80cd312c8 | # -*- coding: utf-8 -*-
"""
=====================================================
Convert a 3-color image (JPG) to separate FITS images
=====================================================
This example opens an RGB JPEG image and writes out each channel as a separate
FITS (image) file.
This example uses `pillow <https://python-pillow.org>`_ to read the image,
`matplotlib.pyplot` to display the image, and `astropy.io.fits` to save FITS files.
*By: Erik Bray, Adrian Price-Whelan*
*License: BSD*
"""
import numpy as np
from PIL import Image
from astropy.io import fits
##############################################################################
# Set up matplotlib and use a nicer set of plot parameters
import matplotlib.pyplot as plt
from astropy.visualization import astropy_mpl_style
plt.style.use(astropy_mpl_style)
##############################################################################
# Load and display the original 3-color jpeg image:
image = Image.open('Hs-2009-14-a-web.jpg')
xsize, ysize = image.size
print(f"Image size: {ysize} x {xsize}")
print(f"Image bands: {image.getbands()}")
ax = plt.imshow(image)
##############################################################################
# Split the three channels (RGB) and get the data as Numpy arrays. The arrays
# are flattened, so they are 1-dimensional:
r, g, b = image.split()
r_data = np.array(r.getdata()) # data is now an array of length ysize*xsize
g_data = np.array(g.getdata())
b_data = np.array(b.getdata())
print(r_data.shape)
##############################################################################
# Reshape the image arrays to be 2-dimensional:
r_data = r_data.reshape(ysize, xsize) # data is now a matrix (ysize, xsize)
g_data = g_data.reshape(ysize, xsize)
b_data = b_data.reshape(ysize, xsize)
print(r_data.shape)
##############################################################################
# Write out the channels as separate FITS images.
# Add and visualize header info
red = fits.PrimaryHDU(data=r_data)
red.header['LATOBS'] = "32:11:56" # add spurious header info
red.header['LONGOBS'] = "110:56"
red.writeto('red.fits')
green = fits.PrimaryHDU(data=g_data)
green.header['LATOBS'] = "32:11:56"
green.header['LONGOBS'] = "110:56"
green.writeto('green.fits')
blue = fits.PrimaryHDU(data=b_data)
blue.header['LATOBS'] = "32:11:56"
blue.header['LONGOBS'] = "110:56"
blue.writeto('blue.fits')
from pprint import pprint
pprint(red.header)
##############################################################################
# Delete the files created
import os
os.remove('red.fits')
os.remove('green.fits')
os.remove('blue.fits')
|
39925d73352c68f7255c1d0d924280ef6398a56b59b5ccb3c0347e3e2b53f09f | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This subpackage contains classes and functions for defining and converting
between different physical units.
This code is adapted from the `pynbody
<https://github.com/pynbody/pynbody>`_ units module written by Andrew
Pontzen, who has granted the Astropy project permission to use the
code under a BSD license.
"""
# Lots of things to import - go from more basic to advanced, so that
# whatever advanced ones need generally has been imported already;
# this helps prevent circular imports and makes it easier to understand
# where most time is spent (e.g., using python -X importtime).
from .core import *
from .quantity import *
from . import si
from . import cgs
from . import astrophys
from . import photometric
from . import misc
from .function import units as function_units
from .si import *
from .astrophys import *
from .photometric import *
from .cgs import *
from .physical import *
from .function.units import *
from .misc import *
from .equivalencies import *
from .function.core import *
from .function.logarithmic import *
from .structured import *
from .decorators import *
del bases
# Enable the set of default units. This notably does *not* include
# Imperial units.
set_enabled_units([si, cgs, astrophys, function_units, misc, photometric])
# -------------------------------------------------------------------------
def __getattr__(attr):
if attr == "littleh":
from astropy.units.astrophys import littleh
return littleh
elif attr == "with_H0":
from astropy.units.equivalencies import with_H0
return with_H0
raise AttributeError(f"module {__name__!r} has no attribute {attr!r}.")
|
a671682046f0a87fb4fee08f99dc39dc03225cc8bc17033841e75048b23dbe9d | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This package defines the astrophysics-specific units. They are also
available in the `astropy.units` namespace.
"""
from . import si
from astropy.constants import si as _si
from .core import (UnitBase, def_unit, si_prefixes, binary_prefixes,
set_enabled_units)
# To ensure si units of the constants can be interpreted.
set_enabled_units([si])
import numpy as _numpy
_ns = globals()
###########################################################################
# LENGTH
def_unit((['AU', 'au'], ['astronomical_unit']), _si.au, namespace=_ns, prefixes=True,
doc="astronomical unit: approximately the mean Earth--Sun "
"distance.")
def_unit(['pc', 'parsec'], _si.pc, namespace=_ns, prefixes=True,
doc="parsec: approximately 3.26 light-years.")
def_unit(['solRad', 'R_sun', 'Rsun'], _si.R_sun, namespace=_ns,
doc="Solar radius", prefixes=False,
format={'latex': r'R_{\odot}', 'unicode': 'R\N{SUN}'})
def_unit(['jupiterRad', 'R_jup', 'Rjup', 'R_jupiter', 'Rjupiter'],
_si.R_jup, namespace=_ns, prefixes=False, doc="Jupiter radius",
# LaTeX jupiter symbol requires wasysym
format={'latex': r'R_{\rm J}', 'unicode': 'R\N{JUPITER}'})
def_unit(['earthRad', 'R_earth', 'Rearth'], _si.R_earth, namespace=_ns,
prefixes=False, doc="Earth radius",
# LaTeX earth symbol requires wasysym
format={'latex': r'R_{\oplus}', 'unicode': 'R⊕'})
def_unit(['lyr', 'lightyear'], (_si.c * si.yr).to(si.m),
namespace=_ns, prefixes=True, doc="Light year")
def_unit(['lsec', 'lightsecond'], (_si.c * si.s).to(si.m),
namespace=_ns, prefixes=False, doc="Light second")
###########################################################################
# MASS
def_unit(['solMass', 'M_sun', 'Msun'], _si.M_sun, namespace=_ns,
prefixes=False, doc="Solar mass",
format={'latex': r'M_{\odot}', 'unicode': 'M\N{SUN}'})
def_unit(['jupiterMass', 'M_jup', 'Mjup', 'M_jupiter', 'Mjupiter'],
_si.M_jup, namespace=_ns, prefixes=False, doc="Jupiter mass",
# LaTeX jupiter symbol requires wasysym
format={'latex': r'M_{\rm J}', 'unicode': 'M\N{JUPITER}'})
def_unit(['earthMass', 'M_earth', 'Mearth'], _si.M_earth, namespace=_ns,
prefixes=False, doc="Earth mass",
# LaTeX earth symbol requires wasysym
format={'latex': r'M_{\oplus}', 'unicode': 'M⊕'})
##########################################################################
# ENERGY
# Here, explicitly convert the planck constant to 'eV s' since the constant
# can override that to give a more precise value that takes into account
# covariances between e and h. Eventually, this may also be replaced with
# just `_si.Ryd.to(eV)`.
def_unit(['Ry', 'rydberg'],
(_si.Ryd * _si.c * _si.h.to(si.eV * si.s)).to(si.eV),
namespace=_ns, prefixes=True,
doc="Rydberg: Energy of a photon whose wavenumber is the Rydberg "
"constant",
format={'latex': r'R_{\infty}', 'unicode': 'R∞'})
###########################################################################
# ILLUMINATION
def_unit(['solLum', 'L_sun', 'Lsun'], _si.L_sun, namespace=_ns,
prefixes=False, doc="Solar luminance",
format={'latex': r'L_{\odot}', 'unicode': 'L\N{SUN}'})
###########################################################################
# SPECTRAL DENSITY
def_unit((['ph', 'photon'], ['photon']),
format={'ogip': 'photon', 'vounit': 'photon'},
namespace=_ns, prefixes=True)
def_unit(['Jy', 'Jansky', 'jansky'], 1e-26 * si.W / si.m ** 2 / si.Hz,
namespace=_ns, prefixes=True,
doc="Jansky: spectral flux density")
def_unit(['R', 'Rayleigh', 'rayleigh'],
(1e10 / (4 * _numpy.pi)) *
ph * si.m ** -2 * si.s ** -1 * si.sr ** -1,
namespace=_ns, prefixes=True,
doc="Rayleigh: photon flux")
###########################################################################
# EVENTS
def_unit((['ct', 'count'], ['count']),
format={'fits': 'count', 'ogip': 'count', 'vounit': 'count'},
namespace=_ns, prefixes=True, exclude_prefixes=['p'])
def_unit(['adu'], namespace=_ns, prefixes=True)
def_unit(['DN', 'dn'], namespace=_ns, prefixes=False)
###########################################################################
# MISCELLANEOUS
# Some of these are very FITS-specific and perhaps considered a mistake.
# Maybe they should be moved into the FITS format class?
# TODO: This is defined by the FITS standard as "relative to the sun".
# Is that mass, volume, what?
def_unit(['Sun'], namespace=_ns)
def_unit(['chan'], namespace=_ns, prefixes=True)
def_unit(['bin'], namespace=_ns, prefixes=True)
def_unit(['beam'], namespace=_ns, prefixes=True)
def_unit(['electron'], doc="Number of electrons", namespace=_ns,
format={'latex': r'e^{-}', 'unicode': 'e⁻'})
###########################################################################
# CLEANUP
del UnitBase
del def_unit
del si
###########################################################################
# DOCSTRING
# This generates a docstring for this module that describes all of the
# standard units defined here.
from .utils import generate_unit_summary as _generate_unit_summary
if __doc__ is not None:
__doc__ += _generate_unit_summary(globals())
# -------------------------------------------------------------------------
def __getattr__(attr):
if attr == "littleh":
import warnings
from astropy.cosmology.units import littleh
from astropy.utils.exceptions import AstropyDeprecationWarning
warnings.warn(
("`littleh` is deprecated from module `astropy.units.astrophys` "
"since astropy 5.0 and may be removed in a future version. "
"Use `astropy.cosmology.units.littleh` instead."),
AstropyDeprecationWarning)
return littleh
raise AttributeError(f"module {__name__!r} has no attribute {attr!r}.")
|
0b2781faaca6b6933abaa13f2824ea15230bbd10fdeb9f957443315a055f1d91 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Support for ``typing`` py3.9+ features while min version is py3.8.
"""
from typing import *
try: # py 3.9+
from typing import Annotated
except (ImportError, ModuleNotFoundError): # optional dependency
try:
from typing_extensions import Annotated
except (ImportError, ModuleNotFoundError):
Annotated = NotImplemented
else:
from typing_extensions import * # override typing
HAS_ANNOTATED = Annotated is not NotImplemented
|
46d21d999a60a43886c5596edc5958a66299b51124ae7af43c8397e3ab1ac805 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Defines the physical types that correspond to different units."""
import numbers
import warnings
from . import core
from . import si
from . import astrophys
from . import cgs
from . import imperial # Need this for backward namespace compat, see issues 11975 and 11977 # noqa
from . import misc
from . import quantity
from astropy.utils.exceptions import AstropyDeprecationWarning
__all__ = ["def_physical_type", "get_physical_type", "PhysicalType"]
_units_and_physical_types = [
(core.dimensionless_unscaled, "dimensionless"),
(si.m, "length"),
(si.m ** 2, "area"),
(si.m ** 3, "volume"),
(si.s, "time"),
(si.rad, "angle"),
(si.sr, "solid angle"),
(si.m / si.s, {"speed", "velocity"}),
(si.m / si.s ** 2, "acceleration"),
(si.Hz, "frequency"),
(si.g, "mass"),
(si.mol, "amount of substance"),
(si.K, "temperature"),
(si.W * si.m ** -1 * si.K ** -1, "thermal conductivity"),
(si.J * si.K ** -1, {"heat capacity", "entropy"}),
(si.J * si.K ** -1 * si.kg ** -1, {"specific heat capacity", "specific entropy"}),
(si.N, "force"),
(si.J, {"energy", "work", "torque"}),
(si.J * si.m ** -2 * si.s ** -1, {"energy flux", "irradiance"}),
(si.Pa, {"pressure", "energy density", "stress"}),
(si.W, {"power", "radiant flux"}),
(si.kg * si.m ** -3, "mass density"),
(si.m ** 3 / si.kg, "specific volume"),
(si.mol / si.m ** 3, "molar concentration"),
(si.m ** 3 / si.mol, "molar volume"),
(si.kg * si.m / si.s, {"momentum", "impulse"}),
(si.kg * si.m ** 2 / si.s, {"angular momentum", "action"}),
(si.rad / si.s, {"angular speed", "angular velocity", "angular frequency"}),
(si.rad / si.s ** 2, "angular acceleration"),
(si.rad / si.m, "plate scale"),
(si.g / (si.m * si.s), "dynamic viscosity"),
(si.m ** 2 / si.s, {"diffusivity", "kinematic viscosity"}),
(si.m ** -1, "wavenumber"),
(si.m ** -2, "column density"),
(si.A, "electrical current"),
(si.C, "electrical charge"),
(si.V, "electrical potential"),
(si.Ohm, {"electrical resistance", "electrical impedance", "electrical reactance"}),
(si.Ohm * si.m, "electrical resistivity"),
(si.S, "electrical conductance"),
(si.S / si.m, "electrical conductivity"),
(si.F, "electrical capacitance"),
(si.C * si.m, "electrical dipole moment"),
(si.A / si.m ** 2, "electrical current density"),
(si.V / si.m, "electrical field strength"),
(si.C / si.m ** 2,
{"electrical flux density", "surface charge density", "polarization density"},
),
(si.C / si.m ** 3, "electrical charge density"),
(si.F / si.m, "permittivity"),
(si.Wb, "magnetic flux"),
(si.T, "magnetic flux density"),
(si.A / si.m, "magnetic field strength"),
(si.m ** 2 * si.A, "magnetic moment"),
(si.H / si.m, {"electromagnetic field strength", "permeability"}),
(si.H, "inductance"),
(si.cd, "luminous intensity"),
(si.lm, "luminous flux"),
(si.lx, {"luminous emittance", "illuminance"}),
(si.W / si.sr, "radiant intensity"),
(si.cd / si.m ** 2, "luminance"),
(si.m ** -3 * si.s ** -1, "volumetric rate"),
(astrophys.Jy, "spectral flux density"),
(si.W * si.m ** 2 * si.Hz ** -1, "surface tension"),
(si.J * si.m ** -3 * si.s ** -1, {"spectral flux density wav", "power density"}),
(astrophys.photon / si.Hz / si.cm ** 2 / si.s, "photon flux density"),
(astrophys.photon / si.AA / si.cm ** 2 / si.s, "photon flux density wav"),
(astrophys.R, "photon flux"),
(misc.bit, "data quantity"),
(misc.bit / si.s, "bandwidth"),
(cgs.Franklin, "electrical charge (ESU)"),
(cgs.statampere, "electrical current (ESU)"),
(cgs.Biot, "electrical current (EMU)"),
(cgs.abcoulomb, "electrical charge (EMU)"),
(si.m * si.s ** -3, {"jerk", "jolt"}),
(si.m * si.s ** -4, {"snap", "jounce"}),
(si.m * si.s ** -5, "crackle"),
(si.m * si.s ** -6, {"pop", "pounce"}),
(si.K / si.m, "temperature gradient"),
(si.J / si.kg, "specific energy"),
(si.mol * si.m ** -3 * si.s ** -1, "reaction rate"),
(si.kg * si.m ** 2, "moment of inertia"),
(si.mol / si.s, "catalytic activity"),
(si.J * si.K ** -1 * si.mol ** -1, "molar heat capacity"),
(si.mol / si.kg, "molality"),
(si.m * si.s, "absement"),
(si.m * si.s ** 2, "absity"),
(si.m ** 3 / si.s, "volumetric flow rate"),
(si.s ** -2, "frequency drift"),
(si.Pa ** -1, "compressibility"),
(astrophys.electron * si.m ** -3, "electron density"),
(astrophys.electron * si.m ** -2 * si.s ** -1, "electron flux"),
(si.kg / si.m ** 2, "surface mass density"),
(si.W / si.m ** 2 / si.sr, "radiance"),
(si.J / si.mol, "chemical potential"),
(si.kg / si.m, "linear density"),
(si.H ** -1, "magnetic reluctance"),
(si.W / si.K, "thermal conductance"),
(si.K / si.W, "thermal resistance"),
(si.K * si.m / si.W, "thermal resistivity"),
(si.N / si.s, "yank"),
(si.S * si.m ** 2 / si.mol, "molar conductivity"),
(si.m ** 2 / si.V / si.s, "electrical mobility"),
(si.lumen / si.W, "luminous efficacy"),
(si.m ** 2 / si.kg, {"opacity", "mass attenuation coefficient"}),
(si.kg * si.m ** -2 * si.s ** -1, {"mass flux", "momentum density"}),
(si.m ** -3, "number density"),
(si.m ** -2 * si.s ** -1, "particle flux"),
]
_physical_unit_mapping = {}
_unit_physical_mapping = {}
_name_physical_mapping = {}
# mapping from attribute-accessible name (no spaces, etc.) to the actual name.
_attrname_physical_mapping = {}
def _physical_type_from_str(name):
"""
Return the `PhysicalType` instance associated with the name of a
physical type.
"""
if name == "unknown":
raise ValueError("cannot uniquely identify an 'unknown' physical type.")
elif name in _attrname_physical_mapping:
return _attrname_physical_mapping[name] # convert attribute-accessible
elif name in _name_physical_mapping:
return _name_physical_mapping[name]
else:
raise ValueError(f"{name!r} is not a known physical type.")
def _replace_temperatures_with_kelvin(unit):
"""
If a unit contains a temperature unit besides kelvin, then replace
that unit with kelvin.
Temperatures cannot be converted directly between K, °F, °C, and
°Ra, in particular since there would be different conversions for
T and ΔT. However, each of these temperatures each represents the
physical type. Replacing the different temperature units with
kelvin allows the physical type to be treated consistently.
"""
physical_type_id = unit._get_physical_type_id()
physical_type_id_components = []
substitution_was_made = False
for base, power in physical_type_id:
if base in ["deg_F", "deg_C", "deg_R"]:
base = "K"
substitution_was_made = True
physical_type_id_components.append((base, power))
if substitution_was_made:
return core.Unit._from_physical_type_id(tuple(physical_type_id_components))
else:
return unit
def _standardize_physical_type_names(physical_type_input):
"""
Convert a string or `set` of strings into a `set` containing
string representations of physical types.
The strings provided in ``physical_type_input`` can each contain
multiple physical types that are separated by a regular slash.
Underscores are treated as spaces so that variable names could
be identical to physical type names.
"""
if isinstance(physical_type_input, str):
physical_type_input = {physical_type_input}
standardized_physical_types = set()
for ptype_input in physical_type_input:
if not isinstance(ptype_input, str):
raise ValueError(f"expecting a string, but got {ptype_input}")
input_set = set(ptype_input.split("/"))
processed_set = {s.strip().replace("_", " ") for s in input_set}
standardized_physical_types |= processed_set
return standardized_physical_types
class PhysicalType:
"""
Represents the physical type(s) that are dimensionally compatible
with a set of units.
Instances of this class should be accessed through either
`get_physical_type` or by using the
`~astropy.units.core.UnitBase.physical_type` attribute of units.
This class is not intended to be instantiated directly in user code.
Parameters
----------
unit : `~astropy.units.Unit`
The unit to be represented by the physical type.
physical_types : `str` or `set` of `str`
A `str` representing the name of the physical type of the unit,
or a `set` containing strings that represent one or more names
of physical types.
Notes
-----
A physical type will be considered equal to an equivalent
`PhysicalType` instance (recommended) or a string that contains a
name of the physical type. The latter method is not recommended
in packages, as the names of some physical types may change in the
future.
To maintain backwards compatibility, two physical type names may be
included in one string if they are separated with a slash (e.g.,
``"momentum/impulse"``). String representations of physical types
may include underscores instead of spaces.
Examples
--------
`PhysicalType` instances may be accessed via the
`~astropy.units.core.UnitBase.physical_type` attribute of units.
>>> import astropy.units as u
>>> u.meter.physical_type
PhysicalType('length')
`PhysicalType` instances may also be accessed by calling
`get_physical_type`. This function will accept a unit, a string
containing the name of a physical type, or the number one.
>>> u.get_physical_type(u.m ** -3)
PhysicalType('number density')
>>> u.get_physical_type("volume")
PhysicalType('volume')
>>> u.get_physical_type(1)
PhysicalType('dimensionless')
Some units are dimensionally compatible with multiple physical types.
A pascal is intended to represent pressure and stress, but the unit
decomposition is equivalent to that of energy density.
>>> pressure = u.get_physical_type("pressure")
>>> pressure
PhysicalType({'energy density', 'pressure', 'stress'})
>>> 'energy density' in pressure
True
Physical types can be tested for equality against other physical
type objects or against strings that may contain the name of a
physical type.
>>> area = (u.m ** 2).physical_type
>>> area == u.barn.physical_type
True
>>> area == "area"
True
Multiplication, division, and exponentiation are enabled so that
physical types may be used for dimensional analysis.
>>> length = u.pc.physical_type
>>> area = (u.cm ** 2).physical_type
>>> length * area
PhysicalType('volume')
>>> area / length
PhysicalType('length')
>>> length ** 3
PhysicalType('volume')
may also be performed using a string that contains the name of a
physical type.
>>> "length" * area
PhysicalType('volume')
>>> "area" / length
PhysicalType('length')
Unknown physical types are labelled as ``"unknown"``.
>>> (u.s ** 13).physical_type
PhysicalType('unknown')
Dimensional analysis may be performed for unknown physical types too.
>>> length_to_19th_power = (u.m ** 19).physical_type
>>> length_to_20th_power = (u.m ** 20).physical_type
>>> length_to_20th_power / length_to_19th_power
PhysicalType('length')
"""
def __init__(self, unit, physical_types):
self._unit = _replace_temperatures_with_kelvin(unit)
self._physical_type_id = self._unit._get_physical_type_id()
self._physical_type = _standardize_physical_type_names(physical_types)
self._physical_type_list = sorted(self._physical_type)
def __iter__(self):
yield from self._physical_type_list
def __getattr__(self, attr):
# TODO: remove this whole method when accessing str attributes from
# physical types is no longer supported
# short circuit attribute accessed in __str__ to prevent recursion
if attr == '_physical_type_list':
super().__getattribute__(attr)
self_str_attr = getattr(str(self), attr, None)
if hasattr(str(self), attr):
warning_message = (
f"support for accessing str attributes such as {attr!r} "
"from PhysicalType instances is deprecated since 4.3 "
"and will be removed in a subsequent release.")
warnings.warn(warning_message, AstropyDeprecationWarning)
return self_str_attr
else:
super().__getattribute__(attr) # to get standard error message
def __eq__(self, other):
"""
Return `True` if ``other`` represents a physical type that is
consistent with the physical type of the `PhysicalType` instance.
"""
if isinstance(other, PhysicalType):
return self._physical_type_id == other._physical_type_id
elif isinstance(other, str):
other = _standardize_physical_type_names(other)
return other.issubset(self._physical_type)
else:
return NotImplemented
def __ne__(self, other):
equality = self.__eq__(other)
return not equality if isinstance(equality, bool) else NotImplemented
def _name_string_as_ordered_set(self):
return "{" + str(self._physical_type_list)[1:-1] + "}"
def __repr__(self):
if len(self._physical_type) == 1:
names = "'" + self._physical_type_list[0] + "'"
else:
names = self._name_string_as_ordered_set()
return f"PhysicalType({names})"
def __str__(self):
return "/".join(self._physical_type_list)
@staticmethod
def _dimensionally_compatible_unit(obj):
"""
Return a unit that corresponds to the provided argument.
If a unit is passed in, return that unit. If a physical type
(or a `str` with the name of a physical type) is passed in,
return a unit that corresponds to that physical type. If the
number equal to ``1`` is passed in, return a dimensionless unit.
Otherwise, return `NotImplemented`.
"""
if isinstance(obj, core.UnitBase):
return _replace_temperatures_with_kelvin(obj)
elif isinstance(obj, PhysicalType):
return obj._unit
elif isinstance(obj, numbers.Real) and obj == 1:
return core.dimensionless_unscaled
elif isinstance(obj, str):
return _physical_type_from_str(obj)._unit
else:
return NotImplemented
def _dimensional_analysis(self, other, operation):
other_unit = self._dimensionally_compatible_unit(other)
if other_unit is NotImplemented:
return NotImplemented
other_unit = _replace_temperatures_with_kelvin(other_unit)
new_unit = getattr(self._unit, operation)(other_unit)
return new_unit.physical_type
def __mul__(self, other):
return self._dimensional_analysis(other, "__mul__")
def __rmul__(self, other):
return self.__mul__(other)
def __truediv__(self, other):
return self._dimensional_analysis(other, "__truediv__")
def __rtruediv__(self, other):
other = self._dimensionally_compatible_unit(other)
if other is NotImplemented:
return NotImplemented
return other.physical_type._dimensional_analysis(self, "__truediv__")
def __pow__(self, power):
return (self._unit ** power).physical_type
def __hash__(self):
return hash(self._physical_type_id)
def __len__(self):
return len(self._physical_type)
# We need to prevent operations like where a Unit instance left
# multiplies a PhysicalType instance from returning a `Quantity`
# instance with a PhysicalType as the value. We can do this by
# preventing np.array from casting a PhysicalType instance as
# an object array.
__array__ = None
def def_physical_type(unit, name):
"""
Add a mapping between a unit and the corresponding physical type(s).
If a physical type already exists for a unit, add new physical type
names so long as those names are not already in use for other
physical types.
Parameters
----------
unit : `~astropy.units.Unit`
The unit to be represented by the physical type.
name : `str` or `set` of `str`
A `str` representing the name of the physical type of the unit,
or a `set` containing strings that represent one or more names
of physical types.
Raises
------
ValueError
If a physical type name is already in use for another unit, or
if attempting to name a unit as ``"unknown"``.
"""
physical_type_id = unit._get_physical_type_id()
physical_type_names = _standardize_physical_type_names(name)
if "unknown" in physical_type_names:
raise ValueError("cannot uniquely define an unknown physical type")
names_for_other_units = set(_unit_physical_mapping.keys()).difference(
_physical_unit_mapping.get(physical_type_id, {}))
names_already_in_use = physical_type_names & names_for_other_units
if names_already_in_use:
raise ValueError(
f"the following physical type names are already in use: "
f"{names_already_in_use}.")
unit_already_in_use = physical_type_id in _physical_unit_mapping
if unit_already_in_use:
physical_type = _physical_unit_mapping[physical_type_id]
physical_type_names |= set(physical_type)
physical_type.__init__(unit, physical_type_names)
else:
physical_type = PhysicalType(unit, physical_type_names)
_physical_unit_mapping[physical_type_id] = physical_type
for ptype in physical_type:
_unit_physical_mapping[ptype] = physical_type_id
for ptype_name in physical_type_names:
_name_physical_mapping[ptype_name] = physical_type
# attribute-accessible name
attr_name = ptype_name.replace(' ', '_').replace('(', '').replace(')', '')
_attrname_physical_mapping[attr_name] = physical_type
def get_physical_type(obj):
"""
Return the physical type that corresponds to a unit (or another
physical type representation).
Parameters
----------
obj : quantity-like or `~astropy.units.PhysicalType`-like
An object that (implicitly or explicitly) has a corresponding
physical type. This object may be a unit, a
`~astropy.units.Quantity`, an object that can be converted to a
`~astropy.units.Quantity` (such as a number or array), a string
that contains a name of a physical type, or a
`~astropy.units.PhysicalType` instance.
Returns
-------
`~astropy.units.PhysicalType`
A representation of the physical type(s) of the unit.
Examples
--------
The physical type may be retrieved from a unit or a
`~astropy.units.Quantity`.
>>> import astropy.units as u
>>> u.get_physical_type(u.meter ** -2)
PhysicalType('column density')
>>> u.get_physical_type(0.62 * u.barn * u.Mpc)
PhysicalType('volume')
The physical type may also be retrieved by providing a `str` that
contains the name of a physical type.
>>> u.get_physical_type("energy")
PhysicalType({'energy', 'torque', 'work'})
Numbers and arrays of numbers correspond to a dimensionless physical
type.
>>> u.get_physical_type(1)
PhysicalType('dimensionless')
"""
if isinstance(obj, PhysicalType):
return obj
if isinstance(obj, str):
return _physical_type_from_str(obj)
try:
unit = obj if isinstance(obj, core.UnitBase) else quantity.Quantity(obj, copy=False).unit
except TypeError as exc:
raise TypeError(f"{obj} does not correspond to a physical type.") from exc
unit = _replace_temperatures_with_kelvin(unit)
physical_type_id = unit._get_physical_type_id()
unit_has_known_physical_type = physical_type_id in _physical_unit_mapping
if unit_has_known_physical_type:
return _physical_unit_mapping[physical_type_id]
else:
return PhysicalType(unit, "unknown")
# ------------------------------------------------------------------------------
# Script section creating the physical types and the documentation
# define the physical types
for unit, physical_type in _units_and_physical_types:
def_physical_type(unit, physical_type)
# For getting the physical types.
def __getattr__(name):
"""Checks for physical types using lazy import.
This also allows user-defined physical types to be accessible from the
:mod:`astropy.units.physical` module.
See `PEP 562 <https://www.python.org/dev/peps/pep-0562/>`_
Parameters
----------
name : str
The name of the attribute in this module. If it is already defined,
then this function is not called.
Returns
-------
ptype : `~astropy.units.physical.PhysicalType`
Raises
------
AttributeError
If the ``name`` does not correspond to a physical type
"""
if name in _attrname_physical_mapping:
return _attrname_physical_mapping[name]
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
def __dir__():
"""Return contents directory (__all__ + all physical type names)."""
return list(set(__all__) | set(_attrname_physical_mapping.keys()))
# This generates a docstring addition for this module that describes all of the
# standard physical types defined here.
if __doc__ is not None:
doclines = [
".. list-table:: Defined Physical Types",
" :header-rows: 1",
" :widths: 30 10 50",
"",
" * - Physical type",
" - Unit",
" - Other physical type(s) with same unit"]
for name in sorted(_name_physical_mapping.keys()):
physical_type = _name_physical_mapping[name]
doclines.extend([
f" * - _`{name}`",
f" - :math:`{physical_type._unit.to_string('latex')[1:-1]}`",
f" - {', '.join([n for n in physical_type if n != name])}"])
__doc__ += '\n\n' + '\n'.join(doclines)
del unit, physical_type
|
993c714f2008288a714601027ad005de4d23245e3e460918c66069eb7966a3f1 | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This package defines miscellaneous units. They are also
available in the `astropy.units` namespace.
"""
from . import si
from astropy.constants import si as _si
from .core import (UnitBase, def_unit, si_prefixes, binary_prefixes,
set_enabled_units)
# To ensure si units of the constants can be interpreted.
set_enabled_units([si])
import numpy as _numpy
_ns = globals()
###########################################################################
# AREAS
def_unit(['barn', 'barn'], 10 ** -28 * si.m ** 2, namespace=_ns, prefixes=True,
doc="barn: unit of area used in HEP")
###########################################################################
# ANGULAR MEASUREMENTS
def_unit(['cycle', 'cy'], 2.0 * _numpy.pi * si.rad,
namespace=_ns, prefixes=False,
doc="cycle: angular measurement, a full turn or rotation")
def_unit(['spat', 'sp'], 4.0 * _numpy.pi * si.sr,
namespace=_ns, prefixes=False,
doc="spat: the solid angle of the sphere, 4pi sr")
##########################################################################
# PRESSURE
def_unit(['bar'], 1e5 * si.Pa, namespace=_ns,
prefixes=[(['m'], ['milli'], 1.e-3)],
doc="bar: pressure")
# The torr is almost the same as mmHg but not quite.
# See https://en.wikipedia.org/wiki/Torr
# Define the unit here despite it not being an astrophysical unit.
# It may be moved if more similar units are created later.
def_unit(['Torr', 'torr'], _si.atm.value/760. * si.Pa, namespace=_ns,
prefixes=[(['m'], ['milli'], 1.e-3)],
doc="Unit of pressure based on an absolute scale, now defined as "
"exactly 1/760 of a standard atmosphere")
###########################################################################
# MASS
def_unit(['M_p'], _si.m_p, namespace=_ns, doc="Proton mass",
format={'latex': r'M_{p}', 'unicode': 'Mₚ'})
def_unit(['M_e'], _si.m_e, namespace=_ns, doc="Electron mass",
format={'latex': r'M_{e}', 'unicode': 'Mₑ'})
# Unified atomic mass unit
def_unit(['u', 'Da', 'Dalton'], _si.u, namespace=_ns,
prefixes=True, exclude_prefixes=['a', 'da'],
doc="Unified atomic mass unit")
###########################################################################
# COMPUTER
def_unit((['bit', 'b'], ['bit']), namespace=_ns,
prefixes=si_prefixes + binary_prefixes)
def_unit((['byte', 'B'], ['byte']), 8 * bit, namespace=_ns,
format={'vounit': 'byte'},
prefixes=si_prefixes + binary_prefixes,
exclude_prefixes=['d'])
def_unit((['pix', 'pixel'], ['pixel']),
format={'ogip': 'pixel', 'vounit': 'pixel'},
namespace=_ns, prefixes=True)
def_unit((['vox', 'voxel'], ['voxel']),
format={'fits': 'voxel', 'ogip': 'voxel', 'vounit': 'voxel'},
namespace=_ns, prefixes=True)
###########################################################################
# CLEANUP
del UnitBase
del def_unit
del si
###########################################################################
# DOCSTRING
# This generates a docstring for this module that describes all of the
# standard units defined here.
from .utils import generate_unit_summary as _generate_unit_summary
if __doc__ is not None:
__doc__ += _generate_unit_summary(globals())
|
010acb8739268afb39b57f68a20f26d00f041fc45ee6f0591da35fd4805716f5 | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This package defines SI prefixed units that are required by the VOUnit standard
but that are rarely used in practice and liable to lead to confusion (such as
``msolMass`` for milli-solar mass). They are in a separate module from
`astropy.units.deprecated` because they need to be enabled by default for
`astropy.units` to parse compliant VOUnit strings. As a result, e.g.,
``Unit('msolMass')`` will just work, but to access the unit directly, use
``astropy.units.required_by_vounit.msolMass`` instead of the more typical idiom
possible for the non-prefixed unit, ``astropy.units.solMass``.
"""
_ns = globals()
def _initialize_module():
# Local imports to avoid polluting top-level namespace
from . import cgs
from . import astrophys
from .core import def_unit, _add_prefixes
_add_prefixes(astrophys.solMass, namespace=_ns, prefixes=True)
_add_prefixes(astrophys.solRad, namespace=_ns, prefixes=True)
_add_prefixes(astrophys.solLum, namespace=_ns, prefixes=True)
_initialize_module()
###########################################################################
# DOCSTRING
# This generates a docstring for this module that describes all of the
# standard units defined here.
from .utils import (generate_unit_summary as _generate_unit_summary,
generate_prefixonly_unit_summary as _generate_prefixonly_unit_summary)
if __doc__ is not None:
__doc__ += _generate_unit_summary(globals())
__doc__ += _generate_prefixonly_unit_summary(globals())
def _enable():
"""
Enable the VOUnit-required extra units so they appear in results of
`~astropy.units.UnitBase.find_equivalent_units` and
`~astropy.units.UnitBase.compose`, and are recognized in the ``Unit('...')``
idiom.
"""
# Local import to avoid cyclical import
from .core import add_enabled_units
# Local import to avoid polluting namespace
import inspect
return add_enabled_units(inspect.getmodule(_enable))
# Because these are VOUnit mandated units, they start enabled (which is why the
# function is hidden).
_enable()
|
98d90b12739822c658236023dbfa4ba1e70bcecfe2b6c8ce4758d9dac28d0ff6 | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This package defines the CGS units. They are also available in the
top-level `astropy.units` namespace.
"""
from fractions import Fraction
from . import si
from .core import UnitBase, def_unit
_ns = globals()
def_unit(['cm', 'centimeter'], si.cm, namespace=_ns, prefixes=False)
g = si.g
s = si.s
C = si.C
rad = si.rad
sr = si.sr
cd = si.cd
K = si.K
deg_C = si.deg_C
mol = si.mol
##########################################################################
# ACCELERATION
def_unit(['Gal', 'gal'], cm / s ** 2, namespace=_ns, prefixes=True,
doc="Gal: CGS unit of acceleration")
##########################################################################
# ENERGY
# Use CGS definition of erg
def_unit(['erg'], g * cm ** 2 / s ** 2, namespace=_ns, prefixes=True,
doc="erg: CGS unit of energy")
##########################################################################
# FORCE
def_unit(['dyn', 'dyne'], g * cm / s ** 2, namespace=_ns,
prefixes=True,
doc="dyne: CGS unit of force")
##########################################################################
# PRESSURE
def_unit(['Ba', 'Barye', 'barye'], g / (cm * s ** 2), namespace=_ns,
prefixes=True,
doc="Barye: CGS unit of pressure")
##########################################################################
# DYNAMIC VISCOSITY
def_unit(['P', 'poise'], g / (cm * s), namespace=_ns,
prefixes=True,
doc="poise: CGS unit of dynamic viscosity")
##########################################################################
# KINEMATIC VISCOSITY
def_unit(['St', 'stokes'], cm ** 2 / s, namespace=_ns,
prefixes=True,
doc="stokes: CGS unit of kinematic viscosity")
##########################################################################
# WAVENUMBER
def_unit(['k', 'Kayser', 'kayser'], cm ** -1, namespace=_ns,
prefixes=True,
doc="kayser: CGS unit of wavenumber")
###########################################################################
# ELECTRICAL
def_unit(['D', 'Debye', 'debye'], Fraction(1, 3) * 1e-29 * C * si.m,
namespace=_ns, prefixes=True,
doc="Debye: CGS unit of electric dipole moment")
def_unit(['Fr', 'Franklin', 'statcoulomb', 'statC', 'esu'],
g ** Fraction(1, 2) * cm ** Fraction(3, 2) * s ** -1,
namespace=_ns,
doc='Franklin: CGS (ESU) unit of charge')
def_unit(['statA', 'statampere'], Fr * s ** -1, namespace=_ns,
doc='statampere: CGS (ESU) unit of current')
def_unit(['Bi', 'Biot', 'abA', 'abampere'],
g ** Fraction(1, 2) * cm ** Fraction(1, 2) * s ** -1, namespace=_ns,
doc='Biot: CGS (EMU) unit of current')
def_unit(['abC', 'abcoulomb'], Bi * s, namespace=_ns,
doc='abcoulomb: CGS (EMU) of charge')
###########################################################################
# MAGNETIC
def_unit(['G', 'Gauss', 'gauss'], 1e-4 * si.T, namespace=_ns, prefixes=True,
doc="Gauss: CGS unit for magnetic field")
###########################################################################
# BASES
bases = set([cm, g, s, rad, cd, K, mol])
###########################################################################
# CLEANUP
del UnitBase
del def_unit
del si
del Fraction
###########################################################################
# DOCSTRING
# This generates a docstring for this module that describes all of the
# standard units defined here.
from .utils import generate_unit_summary as _generate_unit_summary
if __doc__ is not None:
__doc__ += _generate_unit_summary(globals())
|
803f838d8eb7c8a0c705c85801a5d3a7910d261cd3ec6b0c93f820d7a20e9f56 | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This package defines deprecated units.
These units are not available in the top-level `astropy.units`
namespace. To use these units, you must import the `astropy.units.deprecated`
module::
>>> from astropy.units import deprecated
>>> q = 10. * deprecated.emu # doctest: +SKIP
To include them in `~astropy.units.UnitBase.compose` and the results of
`~astropy.units.UnitBase.find_equivalent_units`, do::
>>> from astropy.units import deprecated
>>> deprecated.enable() # doctest: +SKIP
"""
_ns = globals()
def _initialize_module():
# Local imports to avoid polluting top-level namespace
from . import cgs
from . import astrophys
from .core import def_unit, _add_prefixes
def_unit(['emu'], cgs.Bi, namespace=_ns,
doc='Biot: CGS (EMU) unit of current')
# Add only some *prefixes* as deprecated units.
_add_prefixes(astrophys.jupiterMass, namespace=_ns, prefixes=True)
_add_prefixes(astrophys.earthMass, namespace=_ns, prefixes=True)
_add_prefixes(astrophys.jupiterRad, namespace=_ns, prefixes=True)
_add_prefixes(astrophys.earthRad, namespace=_ns, prefixes=True)
_initialize_module()
###########################################################################
# DOCSTRING
# This generates a docstring for this module that describes all of the
# standard units defined here.
from .utils import (generate_unit_summary as _generate_unit_summary,
generate_prefixonly_unit_summary as _generate_prefixonly_unit_summary)
if __doc__ is not None:
__doc__ += _generate_unit_summary(globals())
__doc__ += _generate_prefixonly_unit_summary(globals())
def enable():
"""
Enable deprecated units so they appear in results of
`~astropy.units.UnitBase.find_equivalent_units` and
`~astropy.units.UnitBase.compose`.
This may be used with the ``with`` statement to enable deprecated
units only temporarily.
"""
# Local import to avoid cyclical import
from .core import add_enabled_units
# Local import to avoid polluting namespace
import inspect
return add_enabled_units(inspect.getmodule(enable))
|
798e04161d76df505a2134d2a0639c178eeb668445a39d09c62316f3f6122d68 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Miscellaneous utilities for `astropy.units`.
None of the functions in the module are meant for use outside of the
package.
"""
import io
import re
from fractions import Fraction
import numpy as np
from numpy import finfo
_float_finfo = finfo(float)
# take float here to ensure comparison with another float is fast
# give a little margin since often multiple calculations happened
_JUST_BELOW_UNITY = float(1.-4.*_float_finfo.epsneg)
_JUST_ABOVE_UNITY = float(1.+4.*_float_finfo.eps)
def _get_first_sentence(s):
"""
Get the first sentence from a string and remove any carriage
returns.
"""
x = re.match(r".*?\S\.\s", s)
if x is not None:
s = x.group(0)
return s.replace('\n', ' ')
def _iter_unit_summary(namespace):
"""
Generates the ``(unit, doc, represents, aliases, prefixes)``
tuple used to format the unit summary docs in `generate_unit_summary`.
"""
from . import core
# Get all of the units, and keep track of which ones have SI
# prefixes
units = []
has_prefixes = set()
for key, val in namespace.items():
# Skip non-unit items
if not isinstance(val, core.UnitBase):
continue
# Skip aliases
if key != val.name:
continue
if isinstance(val, core.PrefixUnit):
# This will return the root unit that is scaled by the prefix
# attached to it
has_prefixes.add(val._represents.bases[0].name)
else:
units.append(val)
# Sort alphabetically, case insensitive
units.sort(key=lambda x: x.name.lower())
for unit in units:
doc = _get_first_sentence(unit.__doc__).strip()
represents = ''
if isinstance(unit, core.Unit):
represents = f":math:`{unit._represents.to_string('latex')[1:-1]}`"
aliases = ', '.join(f'``{x}``' for x in unit.aliases)
yield (unit, doc, represents, aliases, 'Yes' if unit.name in has_prefixes else 'No')
def generate_unit_summary(namespace):
"""
Generates a summary of units from a given namespace. This is used
to generate the docstring for the modules that define the actual
units.
Parameters
----------
namespace : dict
A namespace containing units.
Returns
-------
docstring : str
A docstring containing a summary table of the units.
"""
docstring = io.StringIO()
docstring.write("""
.. list-table:: Available Units
:header-rows: 1
:widths: 10 20 20 20 1
* - Unit
- Description
- Represents
- Aliases
- SI Prefixes
""")
for unit_summary in _iter_unit_summary(namespace):
docstring.write("""
* - ``{}``
- {}
- {}
- {}
- {}
""".format(*unit_summary))
return docstring.getvalue()
def generate_prefixonly_unit_summary(namespace):
"""
Generates table entries for units in a namespace that are just prefixes
without the base unit. Note that this is intended to be used *after*
`generate_unit_summary` and therefore does not include the table header.
Parameters
----------
namespace : dict
A namespace containing units that are prefixes but do *not* have the
base unit in their namespace.
Returns
-------
docstring : str
A docstring containing a summary table of the units.
"""
from . import PrefixUnit
faux_namespace = {}
for nm, unit in namespace.items():
if isinstance(unit, PrefixUnit):
base_unit = unit.represents.bases[0]
faux_namespace[base_unit.name] = base_unit
docstring = io.StringIO()
for unit_summary in _iter_unit_summary(faux_namespace):
docstring.write("""
* - Prefixes for ``{}``
- {} prefixes
- {}
- {}
- Only
""".format(*unit_summary))
return docstring.getvalue()
def is_effectively_unity(value):
# value is *almost* always real, except, e.g., for u.mag**0.5, when
# it will be complex. Use try/except to ensure normal case is fast
try:
return _JUST_BELOW_UNITY <= value <= _JUST_ABOVE_UNITY
except TypeError: # value is complex
return (_JUST_BELOW_UNITY <= value.real <= _JUST_ABOVE_UNITY and
_JUST_BELOW_UNITY <= value.imag + 1 <= _JUST_ABOVE_UNITY)
def sanitize_scale(scale):
if is_effectively_unity(scale):
return 1.0
# Maximum speed for regular case where scale is a float.
if scale.__class__ is float:
return scale
# We cannot have numpy scalars, since they don't autoconvert to
# complex if necessary. They are also slower.
if hasattr(scale, 'dtype'):
scale = scale.item()
# All classes that scale can be (int, float, complex, Fraction)
# have an "imag" attribute.
if scale.imag:
if abs(scale.real) > abs(scale.imag):
if is_effectively_unity(scale.imag/scale.real + 1):
return scale.real
elif is_effectively_unity(scale.real/scale.imag + 1):
return complex(0., scale.imag)
return scale
else:
return scale.real
def maybe_simple_fraction(p, max_denominator=100):
"""Fraction very close to x with denominator at most max_denominator.
The fraction has to be such that fraction/x is unity to within 4 ulp.
If such a fraction does not exist, returns the float number.
The algorithm is that of `fractions.Fraction.limit_denominator`, but
sped up by not creating a fraction to start with.
"""
if p == 0 or p.__class__ is int:
return p
n, d = p.as_integer_ratio()
a = n // d
# Normally, start with 0,1 and 1,0; here we have applied first iteration.
n0, d0 = 1, 0
n1, d1 = a, 1
while d1 <= max_denominator:
if _JUST_BELOW_UNITY <= n1/(d1*p) <= _JUST_ABOVE_UNITY:
return Fraction(n1, d1)
n, d = d, n-a*d
a = n // d
n0, n1 = n1, n0+a*n1
d0, d1 = d1, d0+a*d1
return p
def validate_power(p):
"""Convert a power to a floating point value, an integer, or a Fraction.
If a fractional power can be represented exactly as a floating point
number, convert it to a float, to make the math much faster; otherwise,
retain it as a `fractions.Fraction` object to avoid losing precision.
Conversely, if the value is indistinguishable from a rational number with a
low-numbered denominator, convert to a Fraction object.
Parameters
----------
p : float, int, Rational, Fraction
Power to be converted
"""
denom = getattr(p, 'denominator', None)
if denom is None:
try:
p = float(p)
except Exception:
if not np.isscalar(p):
raise ValueError("Quantities and Units may only be raised "
"to a scalar power")
else:
raise
# This returns either a (simple) Fraction or the same float.
p = maybe_simple_fraction(p)
# If still a float, nothing more to be done.
if isinstance(p, float):
return p
# Otherwise, check for simplifications.
denom = p.denominator
if denom == 1:
p = p.numerator
elif (denom & (denom - 1)) == 0:
# Above is a bit-twiddling hack to see if denom is a power of two.
# If so, float does not lose precision and will speed things up.
p = float(p)
return p
def resolve_fractions(a, b):
"""
If either input is a Fraction, convert the other to a Fraction
(at least if it does not have a ridiculous denominator).
This ensures that any operation involving a Fraction will use
rational arithmetic and preserve precision.
"""
# We short-circuit on the most common cases of int and float, since
# isinstance(a, Fraction) is very slow for any non-Fraction instances.
a_is_fraction = (a.__class__ is not int and a.__class__ is not float and
isinstance(a, Fraction))
b_is_fraction = (b.__class__ is not int and b.__class__ is not float and
isinstance(b, Fraction))
if a_is_fraction and not b_is_fraction:
b = maybe_simple_fraction(b)
elif not a_is_fraction and b_is_fraction:
a = maybe_simple_fraction(a)
return a, b
def quantity_asanyarray(a, dtype=None):
from .quantity import Quantity
if not isinstance(a, np.ndarray) and not np.isscalar(a) and any(isinstance(x, Quantity) for x in a):
return Quantity(a, dtype=dtype)
else:
return np.asanyarray(a, dtype=dtype)
|
6e22b6f296084fcb81cf2a1e4acd1bbb4aa695238cf7a9cee400daa246530a39 | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This package defines colloquially used Imperial units. They are
available in the `astropy.units.imperial` namespace, but not in the
top-level `astropy.units` namespace, e.g.::
>>> import astropy.units as u
>>> mph = u.imperial.mile / u.hour
>>> mph
Unit("mi / h")
To include them in `~astropy.units.UnitBase.compose` and the results of
`~astropy.units.UnitBase.find_equivalent_units`, do::
>>> import astropy.units as u
>>> u.imperial.enable() # doctest: +SKIP
"""
from .core import UnitBase, def_unit
from . import si
_ns = globals()
###########################################################################
# LENGTH
def_unit(['inch'], 2.54 * si.cm, namespace=_ns,
doc="International inch")
def_unit(['ft', 'foot'], 12 * inch, namespace=_ns,
doc="International foot")
def_unit(['yd', 'yard'], 3 * ft, namespace=_ns,
doc="International yard")
def_unit(['mi', 'mile'], 5280 * ft, namespace=_ns,
doc="International mile")
def_unit(['mil', 'thou'], 0.001 * inch, namespace=_ns,
doc="Thousandth of an inch")
def_unit(['nmi', 'nauticalmile', 'NM'], 1852 * si.m, namespace=_ns,
doc="Nautical mile")
def_unit(['fur', 'furlong'], 660 * ft, namespace=_ns,
doc="Furlong")
###########################################################################
# AREAS
def_unit(['ac', 'acre'], 43560 * ft ** 2, namespace=_ns,
doc="International acre")
###########################################################################
# VOLUMES
def_unit(['gallon'], si.liter / 0.264172052, namespace=_ns,
doc="U.S. liquid gallon")
def_unit(['quart'], gallon / 4, namespace=_ns,
doc="U.S. liquid quart")
def_unit(['pint'], quart / 2, namespace=_ns,
doc="U.S. liquid pint")
def_unit(['cup'], pint / 2, namespace=_ns,
doc="U.S. customary cup")
def_unit(['foz', 'fluid_oz', 'fluid_ounce'], cup / 8, namespace=_ns,
doc="U.S. fluid ounce")
def_unit(['tbsp', 'tablespoon'], foz / 2, namespace=_ns,
doc="U.S. customary tablespoon")
def_unit(['tsp', 'teaspoon'], tbsp / 3, namespace=_ns,
doc="U.S. customary teaspoon")
###########################################################################
# MASS
def_unit(['oz', 'ounce'], 28.349523125 * si.g, namespace=_ns,
doc="International avoirdupois ounce: mass")
def_unit(['lb', 'lbm', 'pound'], 16 * oz, namespace=_ns,
doc="International avoirdupois pound: mass")
def_unit(['st', 'stone'], 14 * lb, namespace=_ns,
doc="International avoirdupois stone: mass")
def_unit(['ton'], 2000 * lb, namespace=_ns,
doc="International avoirdupois ton: mass")
def_unit(['slug'], 32.174049 * lb, namespace=_ns,
doc="slug: mass")
###########################################################################
# SPEED
def_unit(['kn', 'kt', 'knot', 'NMPH'], nmi / si.h, namespace=_ns,
doc="nautical unit of speed: 1 nmi per hour")
###########################################################################
# FORCE
def_unit('lbf', slug * ft * si.s**-2, namespace=_ns,
doc="Pound: force")
def_unit(['kip', 'kilopound'], 1000 * lbf, namespace=_ns,
doc="Kilopound: force")
##########################################################################
# ENERGY
def_unit(['BTU', 'btu'], 1.05505585 * si.kJ, namespace=_ns,
doc="British thermal unit")
def_unit(['cal', 'calorie'], 4.184 * si.J, namespace=_ns,
doc="Thermochemical calorie: pre-SI metric unit of energy")
def_unit(['kcal', 'Cal', 'Calorie', 'kilocal', 'kilocalorie'],
1000 * cal, namespace=_ns,
doc="Calorie: colloquial definition of Calorie")
##########################################################################
# PRESSURE
def_unit('psi', lbf * inch ** -2, namespace=_ns,
doc="Pound per square inch: pressure")
###########################################################################
# POWER
# Imperial units
def_unit(['hp', 'horsepower'], si.W / 0.00134102209, namespace=_ns,
doc="Electrical horsepower")
###########################################################################
# TEMPERATURE
def_unit(['deg_F', 'Fahrenheit'], namespace=_ns, doc='Degrees Fahrenheit',
format={'latex': r'{}^{\circ}F', 'unicode': '°F'})
def_unit(['deg_R', 'Rankine'], namespace=_ns, doc='Rankine scale: absolute scale of thermodynamic temperature')
###########################################################################
# CLEANUP
del UnitBase
del def_unit
###########################################################################
# DOCSTRING
# This generates a docstring for this module that describes all of the
# standard units defined here.
from .utils import generate_unit_summary as _generate_unit_summary
if __doc__ is not None:
__doc__ += _generate_unit_summary(globals())
def enable():
"""
Enable Imperial units so they appear in results of
`~astropy.units.UnitBase.find_equivalent_units` and
`~astropy.units.UnitBase.compose`.
This may be used with the ``with`` statement to enable Imperial
units only temporarily.
"""
# Local import to avoid cyclical import
from .core import add_enabled_units
# Local import to avoid polluting namespace
import inspect
return add_enabled_units(inspect.getmodule(enable))
|
da2c0c67aa0ef24cf668a71f1e193d77284f17db328cafe168a9477cec0b0494 | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module defines structured units and quantities.
"""
# Standard library
import operator
import numpy as np
from .core import Unit, UnitBase, UNITY
__all__ = ['StructuredUnit']
DTYPE_OBJECT = np.dtype('O')
def _names_from_dtype(dtype):
"""Recursively extract field names from a dtype."""
names = []
for name in dtype.names:
subdtype = dtype.fields[name][0]
if subdtype.names:
names.append([name, _names_from_dtype(subdtype)])
else:
names.append(name)
return tuple(names)
def _normalize_names(names):
"""Recursively normalize, inferring upper level names for unadorned tuples.
Generally, we want the field names to be organized like dtypes, as in
``(['pv', ('p', 'v')], 't')``. But we automatically infer upper
field names if the list is absent from items like ``(('p', 'v'), 't')``,
by concatenating the names inside the tuple.
"""
result = []
for name in names:
if isinstance(name, str) and len(name) > 0:
result.append(name)
elif (isinstance(name, list)
and len(name) == 2
and isinstance(name[0], str) and len(name[0]) > 0
and isinstance(name[1], tuple) and len(name[1]) > 0):
result.append([name[0], _normalize_names(name[1])])
elif isinstance(name, tuple) and len(name) > 0:
new_tuple = _normalize_names(name)
result.append([''.join([(i[0] if isinstance(i, list) else i)
for i in new_tuple]), new_tuple])
else:
raise ValueError(f'invalid entry {name!r}. Should be a name, '
'tuple of names, or 2-element list of the '
'form [name, tuple of names].')
return tuple(result)
class StructuredUnit:
"""Container for units for a structured Quantity.
Parameters
----------
units : unit-like, tuple of unit-like, or `~astropy.units.StructuredUnit`
Tuples can be nested. If a `~astropy.units.StructuredUnit` is passed
in, it will be returned unchanged unless different names are requested.
names : tuple of str, tuple or list; `~numpy.dtype`; or `~astropy.units.StructuredUnit`, optional
Field names for the units, possibly nested. Can be inferred from a
structured `~numpy.dtype` or another `~astropy.units.StructuredUnit`.
For nested tuples, by default the name of the upper entry will be the
concatenation of the names of the lower levels. One can pass in a
list with the upper-level name and a tuple of lower-level names to
avoid this. For tuples, not all levels have to be given; for any level
not passed in, default field names of 'f0', 'f1', etc., will be used.
Notes
-----
It is recommended to initialze the class indirectly, using
`~astropy.units.Unit`. E.g., ``u.Unit('AU,AU/day')``.
When combined with a structured array to produce a structured
`~astropy.units.Quantity`, array field names will take precedence.
Generally, passing in ``names`` is needed only if the unit is used
unattached to a `~astropy.units.Quantity` and one needs to access its
fields.
Examples
--------
Various ways to initialize a `~astropy.units.StructuredUnit`::
>>> import astropy.units as u
>>> su = u.Unit('(AU,AU/day),yr')
>>> su
Unit("((AU, AU / d), yr)")
>>> su.field_names
(['f0', ('f0', 'f1')], 'f1')
>>> su['f1']
Unit("yr")
>>> su2 = u.StructuredUnit(((u.AU, u.AU/u.day), u.yr), names=(('p', 'v'), 't'))
>>> su2 == su
True
>>> su2.field_names
(['pv', ('p', 'v')], 't')
>>> su3 = u.StructuredUnit((su2['pv'], u.day), names=(['p_v', ('p', 'v')], 't'))
>>> su3.field_names
(['p_v', ('p', 'v')], 't')
>>> su3.keys()
('p_v', 't')
>>> su3.values()
(Unit("(AU, AU / d)"), Unit("d"))
Structured units share most methods with regular units::
>>> su.physical_type
((PhysicalType('length'), PhysicalType({'speed', 'velocity'})), PhysicalType('time'))
>>> su.si
Unit("((1.49598e+11 m, 1.73146e+06 m / s), 3.15576e+07 s)")
"""
def __new__(cls, units, names=None):
dtype = None
if names is not None:
if isinstance(names, StructuredUnit):
dtype = names._units.dtype
names = names.field_names
elif isinstance(names, np.dtype):
if not names.fields:
raise ValueError('dtype should be structured, with fields.')
dtype = np.dtype([(name, DTYPE_OBJECT) for name in names.names])
names = _names_from_dtype(names)
else:
if not isinstance(names, tuple):
names = (names,)
names = _normalize_names(names)
if not isinstance(units, tuple):
units = Unit(units)
if isinstance(units, StructuredUnit):
# Avoid constructing a new StructuredUnit if no field names
# are given, or if all field names are the same already anyway.
if names is None or units.field_names == names:
return units
# Otherwise, turn (the upper level) into a tuple, for renaming.
units = units.values()
else:
# Single regular unit: make a tuple for iteration below.
units = (units,)
if names is None:
names = tuple(f'f{i}' for i in range(len(units)))
elif len(units) != len(names):
raise ValueError("lengths of units and field names must match.")
converted = []
for unit, name in zip(units, names):
if isinstance(name, list):
# For list, the first item is the name of our level,
# and the second another tuple of names, i.e., we recurse.
unit = cls(unit, name[1])
name = name[0]
else:
# We are at the lowest level. Check unit.
unit = Unit(unit)
if dtype is not None and isinstance(unit, StructuredUnit):
raise ValueError("units do not match in depth with field "
"names from dtype or structured unit.")
converted.append(unit)
self = super().__new__(cls)
if dtype is None:
dtype = np.dtype([((name[0] if isinstance(name, list) else name),
DTYPE_OBJECT) for name in names])
# Decay array to void so we can access by field name and number.
self._units = np.array(tuple(converted), dtype)[()]
return self
def __getnewargs__(self):
"""When de-serializing, e.g. pickle, start with a blank structure."""
return (), None
@property
def field_names(self):
"""Possibly nested tuple of the field names of the parts."""
return tuple(([name, unit.field_names]
if isinstance(unit, StructuredUnit) else name)
for name, unit in self.items())
# Allow StructuredUnit to be treated as an (ordered) mapping.
def __len__(self):
return len(self._units.dtype.names)
def __getitem__(self, item):
# Since we are based on np.void, indexing by field number works too.
return self._units[item]
def values(self):
return self._units.item()
def keys(self):
return self._units.dtype.names
def items(self):
return tuple(zip(self._units.dtype.names, self._units.item()))
def __iter__(self):
yield from self._units.dtype.names
# Helpers for methods below.
def _recursively_apply(self, func, cls=None):
"""Apply func recursively.
Parameters
----------
func : callable
Function to apply to all parts of the structured unit,
recursing as needed.
cls : type, optional
If given, should be a subclass of `~numpy.void`. By default,
will return a new `~astropy.units.StructuredUnit` instance.
"""
results = np.array(tuple([func(part) for part in self.values()]),
self._units.dtype)[()]
if cls is not None:
return results.view((cls, results.dtype))
# Short-cut; no need to interpret field names, etc.
result = super().__new__(self.__class__)
result._units = results
return result
def _recursively_get_dtype(self, value, enter_lists=True):
"""Get structured dtype according to value, using our field names.
This is useful since ``np.array(value)`` would treat tuples as lower
levels of the array, rather than as elements of a structured array.
The routine does presume that the type of the first tuple is
representative of the rest. Used in ``_get_converter``.
For the special value of ``UNITY``, all fields are assumed to be 1.0,
and hence this will return an all-float dtype.
"""
if enter_lists:
while isinstance(value, list):
value = value[0]
if value is UNITY:
value = (UNITY,) * len(self)
elif not isinstance(value, tuple) or len(self) != len(value):
raise ValueError(f"cannot interpret value {value} for unit {self}.")
descr = []
for (name, unit), part in zip(self.items(), value):
if isinstance(unit, StructuredUnit):
descr.append(
(name, unit._recursively_get_dtype(part, enter_lists=False)))
else:
# Got a part associated with a regular unit. Gets its dtype.
# Like for Quantity, we cast integers to float.
part = np.array(part)
part_dtype = part.dtype
if part_dtype.kind in 'iu':
part_dtype = np.dtype(float)
descr.append((name, part_dtype, part.shape))
return np.dtype(descr)
@property
def si(self):
"""The `StructuredUnit` instance in SI units."""
return self._recursively_apply(operator.attrgetter('si'))
@property
def cgs(self):
"""The `StructuredUnit` instance in cgs units."""
return self._recursively_apply(operator.attrgetter('cgs'))
# Needed to pass through Unit initializer, so might as well use it.
def _get_physical_type_id(self):
return self._recursively_apply(
operator.methodcaller('_get_physical_type_id'), cls=Structure)
@property
def physical_type(self):
"""Physical types of all the fields."""
return self._recursively_apply(
operator.attrgetter('physical_type'), cls=Structure)
def decompose(self, bases=set()):
"""The `StructuredUnit` composed of only irreducible units.
Parameters
----------
bases : sequence of `~astropy.units.UnitBase`, optional
The bases to decompose into. When not provided,
decomposes down to any irreducible units. When provided,
the decomposed result will only contain the given units.
This will raises a `UnitsError` if it's not possible
to do so.
Returns
-------
`~astropy.units.StructuredUnit`
With the unit for each field containing only irreducible units.
"""
return self._recursively_apply(
operator.methodcaller('decompose', bases=bases))
def is_equivalent(self, other, equivalencies=[]):
"""`True` if all fields are equivalent to the other's fields.
Parameters
----------
other : `~astropy.units.StructuredUnit`
The structured unit to compare with, or what can initialize one.
equivalencies : list of tuple, optional
A list of equivalence pairs to try if the units are not
directly convertible. See :ref:`unit_equivalencies`.
The list will be applied to all fields.
Returns
-------
bool
"""
try:
other = StructuredUnit(other)
except Exception:
return False
if len(self) != len(other):
return False
for self_part, other_part in zip(self.values(), other.values()):
if not self_part.is_equivalent(other_part,
equivalencies=equivalencies):
return False
return True
def _get_converter(self, other, equivalencies=[]):
if not isinstance(other, type(self)):
other = self.__class__(other, names=self)
converters = [self_part._get_converter(other_part,
equivalencies=equivalencies)
for (self_part, other_part) in zip(self.values(),
other.values())]
def converter(value):
if not hasattr(value, 'dtype'):
value = np.array(value, self._recursively_get_dtype(value))
result = np.empty_like(value)
for name, converter_ in zip(result.dtype.names, converters):
result[name] = converter_(value[name])
# Index with empty tuple to decay array scalars to numpy void.
return result if result.shape else result[()]
return converter
def to(self, other, value=np._NoValue, equivalencies=[]):
"""Return values converted to the specified unit.
Parameters
----------
other : `~astropy.units.StructuredUnit`
The unit to convert to. If necessary, will be converted to
a `~astropy.units.StructuredUnit` using the dtype of ``value``.
value : array-like, optional
Value(s) in the current unit to be converted to the
specified unit. If a sequence, the first element must have
entries of the correct type to represent all elements (i.e.,
not have, e.g., a ``float`` where other elements have ``complex``).
If not given, assumed to have 1. in all fields.
equivalencies : list of tuple, optional
A list of equivalence pairs to try if the units are not
directly convertible. See :ref:`unit_equivalencies`.
This list is in addition to possible global defaults set by, e.g.,
`set_enabled_equivalencies`.
Use `None` to turn off all equivalencies.
Returns
-------
values : scalar or array
Converted value(s).
Raises
------
UnitsError
If units are inconsistent
"""
if value is np._NoValue:
# We do not have UNITY as a default, since then the docstring
# would list 1.0 as default, yet one could not pass that in.
value = UNITY
return self._get_converter(other, equivalencies=equivalencies)(value)
def to_string(self, format='generic'):
"""Output the unit in the given format as a string.
Units are separated by commas.
Parameters
----------
format : `astropy.units.format.Base` instance or str
The name of a format or a formatter object. If not
provided, defaults to the generic format.
Notes
-----
Structured units can be written to all formats, but can be
re-read only with 'generic'.
"""
parts = [part.to_string(format) for part in self.values()]
out_fmt = '({})' if len(self) > 1 else '({},)'
if format == 'latex':
# Strip $ from parts and add them on the outside.
parts = [part[1:-1] for part in parts]
out_fmt = '$' + out_fmt + '$'
return out_fmt.format(', '.join(parts))
def _repr_latex_(self):
return self.to_string('latex')
__array_ufunc__ = None
def __mul__(self, other):
if isinstance(other, str):
try:
other = Unit(other, parse_strict='silent')
except Exception:
return NotImplemented
if isinstance(other, UnitBase):
new_units = tuple(part * other for part in self.values())
return self.__class__(new_units, names=self)
if isinstance(other, StructuredUnit):
return NotImplemented
# Anything not like a unit, try initialising as a structured quantity.
try:
from .quantity import Quantity
return Quantity(other, unit=self)
except Exception:
return NotImplemented
def __rmul__(self, other):
return self.__mul__(other)
def __truediv__(self, other):
if isinstance(other, str):
try:
other = Unit(other, parse_strict='silent')
except Exception:
return NotImplemented
if isinstance(other, UnitBase):
new_units = tuple(part / other for part in self.values())
return self.__class__(new_units, names=self)
return NotImplemented
def __rlshift__(self, m):
try:
from .quantity import Quantity
return Quantity(m, self, copy=False, subok=True)
except Exception:
return NotImplemented
def __str__(self):
return self.to_string()
def __repr__(self):
return f'Unit("{self.to_string()}")'
def __eq__(self, other):
try:
other = StructuredUnit(other)
except Exception:
return NotImplemented
return self.values() == other.values()
def __ne__(self, other):
if not isinstance(other, type(self)):
try:
other = StructuredUnit(other)
except Exception:
return NotImplemented
return self.values() != other.values()
class Structure(np.void):
"""Single element structure for physical type IDs, etc.
Behaves like a `~numpy.void` and thus mostly like a tuple which can also
be indexed with field names, but overrides ``__eq__`` and ``__ne__`` to
compare only the contents, not the field names. Furthermore, this way no
`FutureWarning` about comparisons is given.
"""
# Note that it is important for physical type IDs to not be stored in a
# tuple, since then the physical types would be treated as alternatives in
# :meth:`~astropy.units.UnitBase.is_equivalent`. (Of course, in that
# case, they could also not be indexed by name.)
def __eq__(self, other):
if isinstance(other, np.void):
other = other.item()
return self.item() == other
def __ne__(self, other):
if isinstance(other, np.void):
other = other.item()
return self.item() != other
|
End of preview. Expand
in Dataset Viewer.
README.md exists but content is empty.
Use the Edit dataset card button to edit it.
- Downloads last month
- 103