body
stringlengths
26
98.2k
body_hash
int64
-9,222,864,604,528,158,000
9,221,803,474B
docstring
stringlengths
1
16.8k
path
stringlengths
5
230
name
stringlengths
1
96
repository_name
stringlengths
7
89
lang
stringclasses
1 value
body_without_docstring
stringlengths
20
98.2k
@property def observer(self): '\n The coordinates of the observer.\n\n If set, and a target is set as well, this will override any explicit\n radial velocity passed in.\n\n Returns\n -------\n `~astropy.coordinates.BaseCoordinateFrame`\n The astropy coordinate frame representing the observation.\n ' return self._observer
-2,235,384,996,682,472,000
The coordinates of the observer. If set, and a target is set as well, this will override any explicit radial velocity passed in. Returns ------- `~astropy.coordinates.BaseCoordinateFrame` The astropy coordinate frame representing the observation.
LI/lib/python3.8/site-packages/astropy/coordinates/spectral_coordinate.py
observer
honeybhardwaj/Language_Identification
python
@property def observer(self): '\n The coordinates of the observer.\n\n If set, and a target is set as well, this will override any explicit\n radial velocity passed in.\n\n Returns\n -------\n `~astropy.coordinates.BaseCoordinateFrame`\n The astropy coordinate frame representing the observation.\n ' return self._observer
@property def target(self): '\n The coordinates of the target being observed.\n\n If set, and an observer is set as well, this will override any explicit\n radial velocity passed in.\n\n Returns\n -------\n `~astropy.coordinates.BaseCoordinateFrame`\n The astropy coordinate frame representing the target.\n ' return self._target
-5,736,557,129,610,842,000
The coordinates of the target being observed. If set, and an observer is set as well, this will override any explicit radial velocity passed in. Returns ------- `~astropy.coordinates.BaseCoordinateFrame` The astropy coordinate frame representing the target.
LI/lib/python3.8/site-packages/astropy/coordinates/spectral_coordinate.py
target
honeybhardwaj/Language_Identification
python
@property def target(self): '\n The coordinates of the target being observed.\n\n If set, and an observer is set as well, this will override any explicit\n radial velocity passed in.\n\n Returns\n -------\n `~astropy.coordinates.BaseCoordinateFrame`\n The astropy coordinate frame representing the target.\n ' return self._target
@property def radial_velocity(self): '\n Radial velocity of target relative to the observer.\n\n Returns\n -------\n `~astropy.units.Quantity`\n Radial velocity of target.\n\n Notes\n -----\n This is different from the ``.radial_velocity`` property of a\n coordinate frame in that this calculates the radial velocity with\n respect to the *observer*, not the origin of the frame.\n ' if ((self._observer is None) or (self._target is None)): if (self._radial_velocity is None): return (0 * KMS) else: return self._radial_velocity else: return self._calculate_radial_velocity(self._observer, self._target, as_scalar=True)
1,471,006,984,708,161,500
Radial velocity of target relative to the observer. Returns ------- `~astropy.units.Quantity` Radial velocity of target. Notes ----- This is different from the ``.radial_velocity`` property of a coordinate frame in that this calculates the radial velocity with respect to the *observer*, not the origin of the frame.
LI/lib/python3.8/site-packages/astropy/coordinates/spectral_coordinate.py
radial_velocity
honeybhardwaj/Language_Identification
python
@property def radial_velocity(self): '\n Radial velocity of target relative to the observer.\n\n Returns\n -------\n `~astropy.units.Quantity`\n Radial velocity of target.\n\n Notes\n -----\n This is different from the ``.radial_velocity`` property of a\n coordinate frame in that this calculates the radial velocity with\n respect to the *observer*, not the origin of the frame.\n ' if ((self._observer is None) or (self._target is None)): if (self._radial_velocity is None): return (0 * KMS) else: return self._radial_velocity else: return self._calculate_radial_velocity(self._observer, self._target, as_scalar=True)
@property def redshift(self): '\n Redshift of target relative to observer. Calculated from the radial\n velocity.\n\n Returns\n -------\n float\n Redshift of target.\n ' return _velocity_to_redshift(self.radial_velocity)
-1,663,626,652,174,325,200
Redshift of target relative to observer. Calculated from the radial velocity. Returns ------- float Redshift of target.
LI/lib/python3.8/site-packages/astropy/coordinates/spectral_coordinate.py
redshift
honeybhardwaj/Language_Identification
python
@property def redshift(self): '\n Redshift of target relative to observer. Calculated from the radial\n velocity.\n\n Returns\n -------\n float\n Redshift of target.\n ' return _velocity_to_redshift(self.radial_velocity)
@staticmethod def _calculate_radial_velocity(observer, target, as_scalar=False): '\n Compute the line-of-sight velocity from the observer to the target.\n\n Parameters\n ----------\n observer : `~astropy.coordinates.BaseCoordinateFrame`\n The frame of the observer.\n target : `~astropy.coordinates.BaseCoordinateFrame`\n The frame of the target.\n as_scalar : bool\n If `True`, the magnitude of the velocity vector will be returned,\n otherwise the full vector will be returned.\n\n Returns\n -------\n `~astropy.units.Quantity`\n The radial velocity of the target with respect to the observer.\n ' observer_icrs = observer.transform_to(ICRS()) target_icrs = target.transform_to(ICRS()) pos_hat = SpectralCoord._normalized_position_vector(observer_icrs, target_icrs) d_vel = (target_icrs.velocity - observer_icrs.velocity) vel_mag = pos_hat.dot(d_vel) if as_scalar: return vel_mag else: return (vel_mag * pos_hat)
988,378,101,657,718,800
Compute the line-of-sight velocity from the observer to the target. Parameters ---------- observer : `~astropy.coordinates.BaseCoordinateFrame` The frame of the observer. target : `~astropy.coordinates.BaseCoordinateFrame` The frame of the target. as_scalar : bool If `True`, the magnitude of the velocity vector will be returned, otherwise the full vector will be returned. Returns ------- `~astropy.units.Quantity` The radial velocity of the target with respect to the observer.
LI/lib/python3.8/site-packages/astropy/coordinates/spectral_coordinate.py
_calculate_radial_velocity
honeybhardwaj/Language_Identification
python
@staticmethod def _calculate_radial_velocity(observer, target, as_scalar=False): '\n Compute the line-of-sight velocity from the observer to the target.\n\n Parameters\n ----------\n observer : `~astropy.coordinates.BaseCoordinateFrame`\n The frame of the observer.\n target : `~astropy.coordinates.BaseCoordinateFrame`\n The frame of the target.\n as_scalar : bool\n If `True`, the magnitude of the velocity vector will be returned,\n otherwise the full vector will be returned.\n\n Returns\n -------\n `~astropy.units.Quantity`\n The radial velocity of the target with respect to the observer.\n ' observer_icrs = observer.transform_to(ICRS()) target_icrs = target.transform_to(ICRS()) pos_hat = SpectralCoord._normalized_position_vector(observer_icrs, target_icrs) d_vel = (target_icrs.velocity - observer_icrs.velocity) vel_mag = pos_hat.dot(d_vel) if as_scalar: return vel_mag else: return (vel_mag * pos_hat)
@staticmethod def _normalized_position_vector(observer, target): '\n Calculate the normalized position vector between two frames.\n\n Parameters\n ----------\n observer : `~astropy.coordinates.BaseCoordinateFrame` or `~astropy.coordinates.SkyCoord`\n The observation frame or coordinate.\n target : `~astropy.coordinates.BaseCoordinateFrame` or `~astropy.coordinates.SkyCoord`\n The target frame or coordinate.\n\n Returns\n -------\n pos_hat : `BaseRepresentation`\n Position representation.\n ' d_pos = (target.cartesian.without_differentials() - observer.cartesian.without_differentials()) dp_norm = d_pos.norm() dp_norm[(dp_norm == 0)] = (1 * dp_norm.unit) pos_hat = (d_pos / dp_norm) return pos_hat
8,450,039,182,530,405,000
Calculate the normalized position vector between two frames. Parameters ---------- observer : `~astropy.coordinates.BaseCoordinateFrame` or `~astropy.coordinates.SkyCoord` The observation frame or coordinate. target : `~astropy.coordinates.BaseCoordinateFrame` or `~astropy.coordinates.SkyCoord` The target frame or coordinate. Returns ------- pos_hat : `BaseRepresentation` Position representation.
LI/lib/python3.8/site-packages/astropy/coordinates/spectral_coordinate.py
_normalized_position_vector
honeybhardwaj/Language_Identification
python
@staticmethod def _normalized_position_vector(observer, target): '\n Calculate the normalized position vector between two frames.\n\n Parameters\n ----------\n observer : `~astropy.coordinates.BaseCoordinateFrame` or `~astropy.coordinates.SkyCoord`\n The observation frame or coordinate.\n target : `~astropy.coordinates.BaseCoordinateFrame` or `~astropy.coordinates.SkyCoord`\n The target frame or coordinate.\n\n Returns\n -------\n pos_hat : `BaseRepresentation`\n Position representation.\n ' d_pos = (target.cartesian.without_differentials() - observer.cartesian.without_differentials()) dp_norm = d_pos.norm() dp_norm[(dp_norm == 0)] = (1 * dp_norm.unit) pos_hat = (d_pos / dp_norm) return pos_hat
@u.quantity_input(velocity=(u.km / u.s)) def with_observer_stationary_relative_to(self, frame, velocity=None, preserve_observer_frame=False): "\n A new `SpectralCoord` with the velocity of the observer altered,\n but not the position.\n\n If a coordinate frame is specified, the observer velocities will be\n modified to be stationary in the specified frame. If a coordinate\n instance is specified, optionally with non-zero velocities, the\n observer velocities will be updated so that the observer is co-moving\n with the specified coordinates.\n\n Parameters\n ----------\n frame : str, `~astropy.coordinates.BaseCoordinateFrame` or `~astropy.coordinates.SkyCoord`\n The observation frame in which the observer will be stationary. This\n can be the name of a frame (e.g. 'icrs'), a frame class, frame instance\n with no data, or instance with data. This can optionally include\n velocities.\n velocity : `~astropy.units.Quantity` or `~astropy.coordinates.CartesianDifferential`, optional\n If ``frame`` does not contain velocities, these can be specified as\n a 3-element `~astropy.units.Quantity`. In the case where this is\n also not specified, the velocities default to zero.\n preserve_observer_frame : bool\n If `True`, the final observer frame class will be the same as the\n original one, and if `False` it will be the frame of the velocity\n reference class.\n\n Returns\n -------\n new_coord : `SpectralCoord`\n The new coordinate object representing the spectral data\n transformed based on the observer's new velocity frame.\n " if ((self.observer is None) or (self.target is None)): raise ValueError('This method can only be used if both observer and target are defined on the SpectralCoord.') if isinstance(frame, SkyCoord): frame = frame.frame if isinstance(frame, BaseCoordinateFrame): if (not frame.has_data): frame = frame.realize_frame(CartesianRepresentation((0 * u.km), (0 * u.km), (0 * u.km))) if frame.data.differentials: if (velocity is not None): raise ValueError('frame already has differentials, cannot also specify velocity') else: if (velocity is None): differentials = ZERO_VELOCITIES else: differentials = CartesianDifferential(velocity) frame = frame.realize_frame(frame.data.with_differentials(differentials)) if isinstance(frame, (type, str)): if isinstance(frame, type): frame_cls = frame elif isinstance(frame, str): frame_cls = frame_transform_graph.lookup_name(frame) if (velocity is None): velocity = (((0 * u.m) / u.s), ((0 * u.m) / u.s), ((0 * u.m) / u.s)) elif (velocity.shape != (3,)): raise ValueError('velocity should be a Quantity vector with 3 elements') frame = frame_cls((0 * u.m), (0 * u.m), (0 * u.m), *velocity, representation_type='cartesian', differential_type='cartesian') observer = update_differentials_to_match(self.observer, frame, preserve_observer_frame=preserve_observer_frame) init_obs_vel = self._calculate_radial_velocity(self.observer, self.target, as_scalar=True) fin_obs_vel = self._calculate_radial_velocity(observer, self.target, as_scalar=True) new_data = _apply_relativistic_doppler_shift(self, (fin_obs_vel - init_obs_vel)) new_coord = self.replicate(value=new_data, observer=observer) return new_coord
4,901,878,164,844,446,000
A new `SpectralCoord` with the velocity of the observer altered, but not the position. If a coordinate frame is specified, the observer velocities will be modified to be stationary in the specified frame. If a coordinate instance is specified, optionally with non-zero velocities, the observer velocities will be updated so that the observer is co-moving with the specified coordinates. Parameters ---------- frame : str, `~astropy.coordinates.BaseCoordinateFrame` or `~astropy.coordinates.SkyCoord` The observation frame in which the observer will be stationary. This can be the name of a frame (e.g. 'icrs'), a frame class, frame instance with no data, or instance with data. This can optionally include velocities. velocity : `~astropy.units.Quantity` or `~astropy.coordinates.CartesianDifferential`, optional If ``frame`` does not contain velocities, these can be specified as a 3-element `~astropy.units.Quantity`. In the case where this is also not specified, the velocities default to zero. preserve_observer_frame : bool If `True`, the final observer frame class will be the same as the original one, and if `False` it will be the frame of the velocity reference class. Returns ------- new_coord : `SpectralCoord` The new coordinate object representing the spectral data transformed based on the observer's new velocity frame.
LI/lib/python3.8/site-packages/astropy/coordinates/spectral_coordinate.py
with_observer_stationary_relative_to
honeybhardwaj/Language_Identification
python
@u.quantity_input(velocity=(u.km / u.s)) def with_observer_stationary_relative_to(self, frame, velocity=None, preserve_observer_frame=False): "\n A new `SpectralCoord` with the velocity of the observer altered,\n but not the position.\n\n If a coordinate frame is specified, the observer velocities will be\n modified to be stationary in the specified frame. If a coordinate\n instance is specified, optionally with non-zero velocities, the\n observer velocities will be updated so that the observer is co-moving\n with the specified coordinates.\n\n Parameters\n ----------\n frame : str, `~astropy.coordinates.BaseCoordinateFrame` or `~astropy.coordinates.SkyCoord`\n The observation frame in which the observer will be stationary. This\n can be the name of a frame (e.g. 'icrs'), a frame class, frame instance\n with no data, or instance with data. This can optionally include\n velocities.\n velocity : `~astropy.units.Quantity` or `~astropy.coordinates.CartesianDifferential`, optional\n If ``frame`` does not contain velocities, these can be specified as\n a 3-element `~astropy.units.Quantity`. In the case where this is\n also not specified, the velocities default to zero.\n preserve_observer_frame : bool\n If `True`, the final observer frame class will be the same as the\n original one, and if `False` it will be the frame of the velocity\n reference class.\n\n Returns\n -------\n new_coord : `SpectralCoord`\n The new coordinate object representing the spectral data\n transformed based on the observer's new velocity frame.\n " if ((self.observer is None) or (self.target is None)): raise ValueError('This method can only be used if both observer and target are defined on the SpectralCoord.') if isinstance(frame, SkyCoord): frame = frame.frame if isinstance(frame, BaseCoordinateFrame): if (not frame.has_data): frame = frame.realize_frame(CartesianRepresentation((0 * u.km), (0 * u.km), (0 * u.km))) if frame.data.differentials: if (velocity is not None): raise ValueError('frame already has differentials, cannot also specify velocity') else: if (velocity is None): differentials = ZERO_VELOCITIES else: differentials = CartesianDifferential(velocity) frame = frame.realize_frame(frame.data.with_differentials(differentials)) if isinstance(frame, (type, str)): if isinstance(frame, type): frame_cls = frame elif isinstance(frame, str): frame_cls = frame_transform_graph.lookup_name(frame) if (velocity is None): velocity = (((0 * u.m) / u.s), ((0 * u.m) / u.s), ((0 * u.m) / u.s)) elif (velocity.shape != (3,)): raise ValueError('velocity should be a Quantity vector with 3 elements') frame = frame_cls((0 * u.m), (0 * u.m), (0 * u.m), *velocity, representation_type='cartesian', differential_type='cartesian') observer = update_differentials_to_match(self.observer, frame, preserve_observer_frame=preserve_observer_frame) init_obs_vel = self._calculate_radial_velocity(self.observer, self.target, as_scalar=True) fin_obs_vel = self._calculate_radial_velocity(observer, self.target, as_scalar=True) new_data = _apply_relativistic_doppler_shift(self, (fin_obs_vel - init_obs_vel)) new_coord = self.replicate(value=new_data, observer=observer) return new_coord
def with_radial_velocity_shift(self, target_shift=None, observer_shift=None): "\n Apply a velocity shift to this spectral coordinate.\n\n The shift can be provided as a redshift (float value) or radial\n velocity (`~astropy.units.Quantity` with physical type of 'speed').\n\n Parameters\n ----------\n target_shift : float or `~astropy.units.Quantity`\n Shift value to apply to current target.\n observer_shift : float or `~astropy.units.Quantity`\n Shift value to apply to current observer.\n\n Returns\n -------\n `SpectralCoord`\n New spectral coordinate with the target/observer velocity changed\n to incorporate the shift. This is always a new object even if\n ``target_shift`` and ``observer_shift`` are both `None`.\n " if ((observer_shift is not None) and ((self.target is None) or (self.observer is None))): raise ValueError('Both an observer and target must be defined before applying a velocity shift.') for arg in [x for x in [target_shift, observer_shift] if (x is not None)]: if (isinstance(arg, u.Quantity) and (not arg.unit.is_equivalent((u.one, KMS)))): raise u.UnitsError("Argument must have unit physical type 'speed' for radial velocty or 'dimensionless' for redshift.") if (target_shift is None): if ((self._observer is None) or (self._target is None)): return self.replicate() target_shift = (0 * KMS) else: target_shift = u.Quantity(target_shift) if (target_shift.unit.physical_type == 'dimensionless'): target_shift = _redshift_to_velocity(target_shift) if ((self._observer is None) or (self._target is None)): return self.replicate(value=_apply_relativistic_doppler_shift(self, target_shift), radial_velocity=(self.radial_velocity + target_shift)) if (observer_shift is None): observer_shift = (0 * KMS) else: observer_shift = u.Quantity(observer_shift) if (observer_shift.unit.physical_type == 'dimensionless'): observer_shift = _redshift_to_velocity(observer_shift) target_icrs = self._target.transform_to(ICRS()) observer_icrs = self._observer.transform_to(ICRS()) pos_hat = SpectralCoord._normalized_position_vector(observer_icrs, target_icrs) target_velocity = (_get_velocities(target_icrs) + (target_shift * pos_hat)) observer_velocity = (_get_velocities(observer_icrs) + (observer_shift * pos_hat)) target_velocity = CartesianDifferential(target_velocity.xyz) observer_velocity = CartesianDifferential(observer_velocity.xyz) new_target = target_icrs.realize_frame(target_icrs.cartesian.with_differentials(target_velocity)).transform_to(self._target) new_observer = observer_icrs.realize_frame(observer_icrs.cartesian.with_differentials(observer_velocity)).transform_to(self._observer) init_obs_vel = self._calculate_radial_velocity(observer_icrs, target_icrs, as_scalar=True) fin_obs_vel = self._calculate_radial_velocity(new_observer, new_target, as_scalar=True) new_data = _apply_relativistic_doppler_shift(self, (fin_obs_vel - init_obs_vel)) return self.replicate(value=new_data, observer=new_observer, target=new_target)
-3,032,038,574,636,160,000
Apply a velocity shift to this spectral coordinate. The shift can be provided as a redshift (float value) or radial velocity (`~astropy.units.Quantity` with physical type of 'speed'). Parameters ---------- target_shift : float or `~astropy.units.Quantity` Shift value to apply to current target. observer_shift : float or `~astropy.units.Quantity` Shift value to apply to current observer. Returns ------- `SpectralCoord` New spectral coordinate with the target/observer velocity changed to incorporate the shift. This is always a new object even if ``target_shift`` and ``observer_shift`` are both `None`.
LI/lib/python3.8/site-packages/astropy/coordinates/spectral_coordinate.py
with_radial_velocity_shift
honeybhardwaj/Language_Identification
python
def with_radial_velocity_shift(self, target_shift=None, observer_shift=None): "\n Apply a velocity shift to this spectral coordinate.\n\n The shift can be provided as a redshift (float value) or radial\n velocity (`~astropy.units.Quantity` with physical type of 'speed').\n\n Parameters\n ----------\n target_shift : float or `~astropy.units.Quantity`\n Shift value to apply to current target.\n observer_shift : float or `~astropy.units.Quantity`\n Shift value to apply to current observer.\n\n Returns\n -------\n `SpectralCoord`\n New spectral coordinate with the target/observer velocity changed\n to incorporate the shift. This is always a new object even if\n ``target_shift`` and ``observer_shift`` are both `None`.\n " if ((observer_shift is not None) and ((self.target is None) or (self.observer is None))): raise ValueError('Both an observer and target must be defined before applying a velocity shift.') for arg in [x for x in [target_shift, observer_shift] if (x is not None)]: if (isinstance(arg, u.Quantity) and (not arg.unit.is_equivalent((u.one, KMS)))): raise u.UnitsError("Argument must have unit physical type 'speed' for radial velocty or 'dimensionless' for redshift.") if (target_shift is None): if ((self._observer is None) or (self._target is None)): return self.replicate() target_shift = (0 * KMS) else: target_shift = u.Quantity(target_shift) if (target_shift.unit.physical_type == 'dimensionless'): target_shift = _redshift_to_velocity(target_shift) if ((self._observer is None) or (self._target is None)): return self.replicate(value=_apply_relativistic_doppler_shift(self, target_shift), radial_velocity=(self.radial_velocity + target_shift)) if (observer_shift is None): observer_shift = (0 * KMS) else: observer_shift = u.Quantity(observer_shift) if (observer_shift.unit.physical_type == 'dimensionless'): observer_shift = _redshift_to_velocity(observer_shift) target_icrs = self._target.transform_to(ICRS()) observer_icrs = self._observer.transform_to(ICRS()) pos_hat = SpectralCoord._normalized_position_vector(observer_icrs, target_icrs) target_velocity = (_get_velocities(target_icrs) + (target_shift * pos_hat)) observer_velocity = (_get_velocities(observer_icrs) + (observer_shift * pos_hat)) target_velocity = CartesianDifferential(target_velocity.xyz) observer_velocity = CartesianDifferential(observer_velocity.xyz) new_target = target_icrs.realize_frame(target_icrs.cartesian.with_differentials(target_velocity)).transform_to(self._target) new_observer = observer_icrs.realize_frame(observer_icrs.cartesian.with_differentials(observer_velocity)).transform_to(self._observer) init_obs_vel = self._calculate_radial_velocity(observer_icrs, target_icrs, as_scalar=True) fin_obs_vel = self._calculate_radial_velocity(new_observer, new_target, as_scalar=True) new_data = _apply_relativistic_doppler_shift(self, (fin_obs_vel - init_obs_vel)) return self.replicate(value=new_data, observer=new_observer, target=new_target)
def to_rest(self): '\n Transforms the spectral axis to the rest frame.\n ' if ((self.observer is not None) and (self.target is not None)): return self.with_observer_stationary_relative_to(self.target) result = _apply_relativistic_doppler_shift(self, (- self.radial_velocity)) return self.replicate(value=result, radial_velocity=(0.0 * KMS), redshift=None)
-384,347,302,439,089,100
Transforms the spectral axis to the rest frame.
LI/lib/python3.8/site-packages/astropy/coordinates/spectral_coordinate.py
to_rest
honeybhardwaj/Language_Identification
python
def to_rest(self): '\n \n ' if ((self.observer is not None) and (self.target is not None)): return self.with_observer_stationary_relative_to(self.target) result = _apply_relativistic_doppler_shift(self, (- self.radial_velocity)) return self.replicate(value=result, radial_velocity=(0.0 * KMS), redshift=None)
def searchInsert(self, nums, target): '\n :type nums: List[int]\n :type target: int\n :rtype: int\n ' if ((nums is None) or (len(nums) == 0)): return 0 for i in range(0, len(nums)): if (nums[i] == target): return i elif (nums[i] < target): if ((((i + 1) < len(nums)) and (nums[(i + 1)] > target)) or ((i + 1) == len(nums))): return (i + 1) return 0
1,615,753,001,070,173,400
:type nums: List[int] :type target: int :rtype: int
Python/SearchInsertPosition.py
searchInsert
TonnyL/Windary
python
def searchInsert(self, nums, target): '\n :type nums: List[int]\n :type target: int\n :rtype: int\n ' if ((nums is None) or (len(nums) == 0)): return 0 for i in range(0, len(nums)): if (nums[i] == target): return i elif (nums[i] < target): if ((((i + 1) < len(nums)) and (nums[(i + 1)] > target)) or ((i + 1) == len(nums))): return (i + 1) return 0
def download_library(): 'Download and install the CDF library' if (sys.platform != 'win32'): raise NotImplementedError('CDF library install only supported on Windows') try: import html.parser as HTMLParser except ImportError: import HTMLParser class LinkParser(HTMLParser.HTMLParser, object): def __init__(self, *args, **kwargs): self.links_found = [] super(LinkParser, self).__init__(*args, **kwargs) def handle_starttag(self, tag, attrs): if ((tag != 'a') or (attrs[0][0] != 'href')): return self.links_found.append(attrs[0][1]) import re import subprocess try: import urllib.request as u except ImportError: import urllib as u baseurl = 'https://spdf.sci.gsfc.nasa.gov/pub/software/cdf/dist/' url = u.urlopen(baseurl) listing = url.read() url.close() p = LinkParser() p.feed(listing) cdfdist = [l for l in p.links_found if re.match('^cdf3\\d_\\d(?:_\\d)?/$', l)] if (not cdfdist): raise RuntimeError("Couldn't find CDF distribution directory to download") cdfdist.sort(key=(lambda x: x.rstrip('/').split('_'))) cdfverbase = cdfdist[(- 1)].rstrip('/') instfname = ((cdfverbase + ('_0' if (cdfverbase.count('_') == 1) else '')) + '-setup-{0}.exe'.format((len(('%x' % sys.maxsize)) * 4))) insturl = (((baseurl + cdfverbase) + '/windows/') + instfname) tmpdir = tempfile.mkdtemp() try: (fname, status) = u.urlretrieve(insturl, os.path.join(tmpdir, instfname)) subprocess.check_call([fname, '/install', '/q1'], shell=False) finally: shutil.rmtree(tmpdir)
-6,954,307,582,105,150,000
Download and install the CDF library
pycdf/__init__.py
download_library
cpiker/condaCDF
python
def download_library(): if (sys.platform != 'win32'): raise NotImplementedError('CDF library install only supported on Windows') try: import html.parser as HTMLParser except ImportError: import HTMLParser class LinkParser(HTMLParser.HTMLParser, object): def __init__(self, *args, **kwargs): self.links_found = [] super(LinkParser, self).__init__(*args, **kwargs) def handle_starttag(self, tag, attrs): if ((tag != 'a') or (attrs[0][0] != 'href')): return self.links_found.append(attrs[0][1]) import re import subprocess try: import urllib.request as u except ImportError: import urllib as u baseurl = 'https://spdf.sci.gsfc.nasa.gov/pub/software/cdf/dist/' url = u.urlopen(baseurl) listing = url.read() url.close() p = LinkParser() p.feed(listing) cdfdist = [l for l in p.links_found if re.match('^cdf3\\d_\\d(?:_\\d)?/$', l)] if (not cdfdist): raise RuntimeError("Couldn't find CDF distribution directory to download") cdfdist.sort(key=(lambda x: x.rstrip('/').split('_'))) cdfverbase = cdfdist[(- 1)].rstrip('/') instfname = ((cdfverbase + ('_0' if (cdfverbase.count('_') == 1) else )) + '-setup-{0}.exe'.format((len(('%x' % sys.maxsize)) * 4))) insturl = (((baseurl + cdfverbase) + '/windows/') + instfname) tmpdir = tempfile.mkdtemp() try: (fname, status) = u.urlretrieve(insturl, os.path.join(tmpdir, instfname)) subprocess.check_call([fname, '/install', '/q1'], shell=False) finally: shutil.rmtree(tmpdir)
def _compress(obj, comptype=None, param=None): 'Set or check the compression of a :py:class:`pycdf.CDF` or :py:class:`pycdf.Var`\n\n @param obj: object on which to set or check compression\n @type obj: :py:class:`pycdf.CDF` or :py:class:`pycdf.Var`\n @param comptype: type of compression to change to, see CDF C reference\n manual section 4.10. Constants for this parameter\n are in :py:mod:`pycdf.const`. If not specified, will not change\n compression.\n @type comptype: ctypes.c_long\n @param param: Compression parameter, see CDF CRM 4.10 and :py:mod:`pycdf.const`.\n If not specified, will choose reasonable default (5 for\n gzip; other types have only one possible parameter.)\n @type param: ctypes.c_long\n @return: (comptype, param) currently in effect\n @rtype: tuple\n ' if isinstance(obj, CDF): COMPRESSION_ = const.CDF_COMPRESSION_ elif isinstance(obj, Var): COMPRESSION_ = const.zVAR_COMPRESSION_ else: raise ValueError('Must specify a CDF or Var type.') validparams = {const.NO_COMPRESSION.value: [ctypes.c_long(0)], const.RLE_COMPRESSION.value: [const.RLE_OF_ZEROs], const.HUFF_COMPRESSION.value: [const.OPTIMAL_ENCODING_TREES], const.AHUFF_COMPRESSION.value: [const.OPTIMAL_ENCODING_TREES], const.GZIP_COMPRESSION.value: [ctypes.c_long(5), ctypes.c_long(1), ctypes.c_long(2), ctypes.c_long(3), ctypes.c_long(4), ctypes.c_long(6), ctypes.c_long(7), ctypes.c_long(8), ctypes.c_long(9)]} comptypes = [const.NO_COMPRESSION, const.RLE_COMPRESSION, const.HUFF_COMPRESSION, const.AHUFF_COMPRESSION, const.GZIP_COMPRESSION] comptypevalues = [i.value for i in comptypes] if (comptype != None): if (not hasattr(comptype, 'value')): comptype = ctypes.c_long(comptype) if (param is None): if (not (comptype.value in validparams)): raise CDFError(const.BAD_COMPRESSION) param = validparams[comptype.value][0] paramlist = (ctypes.c_long * 1)(param) obj._call(const.PUT_, COMPRESSION_, comptype, paramlist) params = (ctypes.c_long * const.CDF_MAX_PARMS)(*([0] * const.CDF_MAX_PARMS)) comptype = ctypes.c_long(0) percent = ctypes.c_long(0) obj._call(const.GET_, COMPRESSION_, ctypes.byref(comptype), ctypes.byref(params), ctypes.byref(percent)) param = params[0] if (not (comptype.value in comptypevalues)): raise CDFError(const.BAD_COMPRESSION) validparamvalues = [i.value for i in validparams[comptype.value]] if (not (param in validparamvalues)): raise CDFError(const.BAD_COMPRESSION_PARM) comptype = comptypes[comptypevalues.index(comptype.value)] if (comptype in (const.RLE_COMPRESSION, const.HUFF_COMPRESSION, const.AHUFF_COMPRESSION)): param = validparams[comptype.value][validparamvalues.index(param)] return (comptype, param)
2,903,751,509,734,036,000
Set or check the compression of a :py:class:`pycdf.CDF` or :py:class:`pycdf.Var` @param obj: object on which to set or check compression @type obj: :py:class:`pycdf.CDF` or :py:class:`pycdf.Var` @param comptype: type of compression to change to, see CDF C reference manual section 4.10. Constants for this parameter are in :py:mod:`pycdf.const`. If not specified, will not change compression. @type comptype: ctypes.c_long @param param: Compression parameter, see CDF CRM 4.10 and :py:mod:`pycdf.const`. If not specified, will choose reasonable default (5 for gzip; other types have only one possible parameter.) @type param: ctypes.c_long @return: (comptype, param) currently in effect @rtype: tuple
pycdf/__init__.py
_compress
cpiker/condaCDF
python
def _compress(obj, comptype=None, param=None): 'Set or check the compression of a :py:class:`pycdf.CDF` or :py:class:`pycdf.Var`\n\n @param obj: object on which to set or check compression\n @type obj: :py:class:`pycdf.CDF` or :py:class:`pycdf.Var`\n @param comptype: type of compression to change to, see CDF C reference\n manual section 4.10. Constants for this parameter\n are in :py:mod:`pycdf.const`. If not specified, will not change\n compression.\n @type comptype: ctypes.c_long\n @param param: Compression parameter, see CDF CRM 4.10 and :py:mod:`pycdf.const`.\n If not specified, will choose reasonable default (5 for\n gzip; other types have only one possible parameter.)\n @type param: ctypes.c_long\n @return: (comptype, param) currently in effect\n @rtype: tuple\n ' if isinstance(obj, CDF): COMPRESSION_ = const.CDF_COMPRESSION_ elif isinstance(obj, Var): COMPRESSION_ = const.zVAR_COMPRESSION_ else: raise ValueError('Must specify a CDF or Var type.') validparams = {const.NO_COMPRESSION.value: [ctypes.c_long(0)], const.RLE_COMPRESSION.value: [const.RLE_OF_ZEROs], const.HUFF_COMPRESSION.value: [const.OPTIMAL_ENCODING_TREES], const.AHUFF_COMPRESSION.value: [const.OPTIMAL_ENCODING_TREES], const.GZIP_COMPRESSION.value: [ctypes.c_long(5), ctypes.c_long(1), ctypes.c_long(2), ctypes.c_long(3), ctypes.c_long(4), ctypes.c_long(6), ctypes.c_long(7), ctypes.c_long(8), ctypes.c_long(9)]} comptypes = [const.NO_COMPRESSION, const.RLE_COMPRESSION, const.HUFF_COMPRESSION, const.AHUFF_COMPRESSION, const.GZIP_COMPRESSION] comptypevalues = [i.value for i in comptypes] if (comptype != None): if (not hasattr(comptype, 'value')): comptype = ctypes.c_long(comptype) if (param is None): if (not (comptype.value in validparams)): raise CDFError(const.BAD_COMPRESSION) param = validparams[comptype.value][0] paramlist = (ctypes.c_long * 1)(param) obj._call(const.PUT_, COMPRESSION_, comptype, paramlist) params = (ctypes.c_long * const.CDF_MAX_PARMS)(*([0] * const.CDF_MAX_PARMS)) comptype = ctypes.c_long(0) percent = ctypes.c_long(0) obj._call(const.GET_, COMPRESSION_, ctypes.byref(comptype), ctypes.byref(params), ctypes.byref(percent)) param = params[0] if (not (comptype.value in comptypevalues)): raise CDFError(const.BAD_COMPRESSION) validparamvalues = [i.value for i in validparams[comptype.value]] if (not (param in validparamvalues)): raise CDFError(const.BAD_COMPRESSION_PARM) comptype = comptypes[comptypevalues.index(comptype.value)] if (comptype in (const.RLE_COMPRESSION, const.HUFF_COMPRESSION, const.AHUFF_COMPRESSION)): param = validparams[comptype.value][validparamvalues.index(param)] return (comptype, param)
def __init__(self, libpath=None, library=None): "Load the CDF C library.\n\n Searches for the library in the order:\n 1. Appropriately-named file in CDF_LIB\n 2. Appropriately-named file in CDF_BASE\n 3. Standard library search path\n @raise CDFError: BAD_DATA_TYPE if can't map types properly\n " if (not ('CDF_TMP' in os.environ)): os.environ['CDF_TMP'] = tempfile.gettempdir() if (not library): if (not libpath): (self.libpath, self._library) = self._find_lib() if (self._library is None): raise Exception('Cannot load CDF C library; checked {0}. Try \'os.environ["CDF_LIB"] = library_directory\' before import.'.format(', '.join(self.libpath))) else: self._library = ctypes.CDLL(libpath) self.libpath = libpath else: self._library = library self.libpath = libpath self._library.CDFlib.restype = ctypes.c_long self._library.EPOCHbreakdown.restype = ctypes.c_long self._library.computeEPOCH.restype = ctypes.c_double self._library.computeEPOCH.argtypes = ([ctypes.c_long] * 7) self._library.computeEPOCH16.restype = ctypes.c_double self._library.computeEPOCH16.argtypes = (([ctypes.c_long] * 10) + [ctypes.POINTER((ctypes.c_double * 2))]) if hasattr(self._library, 'CDFsetFileBackward'): self._library.CDFsetFileBackward.restype = None self._library.CDFsetFileBackward.argtypes = [ctypes.c_long] if ((not hasattr(self._library, 'computeTT2000')) and hasattr(self._library, 'CDF_TT2000_from_UTC_parts')): self._library.computeTT2000 = self._library.CDF_TT2000_from_UTC_parts if hasattr(self._library, 'computeTT2000'): self._library.computeTT2000.restype = ctypes.c_longlong self._library.computeTT2000.argtypes = ([ctypes.c_double] * 9) if ((not hasattr(self._library, 'breakdownTT2000')) and hasattr(self._library, 'CDF_TT2000_to_UTC_parts')): self._library.breakdownTT2000 = self._library.CDF_TT2000_to_UTC_parts if hasattr(self._library, 'breakdownTT2000'): self._library.breakdownTT2000.restype = None self._library.breakdownTT2000.argtypes = ([ctypes.c_longlong] + ([ctypes.POINTER(ctypes.c_double)] * 9)) if hasattr(self._library, 'CDF_TT2000_to_UTC_EPOCH'): self._library.CDF_TT2000_to_UTC_EPOCH.restype = ctypes.c_double self._library.CDF_TT2000_to_UTC_EPOCH.argtypes = [ctypes.c_longlong] if hasattr(self._library, 'CDF_TT2000_from_UTC_EPOCH'): self._library.CDF_TT2000_from_UTC_EPOCH.restype = ctypes.c_longlong self._library.CDF_TT2000_from_UTC_EPOCH.argtypes = [ctypes.c_double] if hasattr(self._library, 'CDF_TT2000_to_UTC_EPOCH16'): self._library.CDF_TT2000_to_UTC_EPOCH16.restype = ctypes.c_double self._library.CDF_TT2000_to_UTC_EPOCH16.argtypes = [ctypes.c_longlong, ctypes.POINTER((ctypes.c_double * 2))] if hasattr(self._library, 'CDF_TT2000_from_UTC_EPOCH16'): self._library.CDF_TT2000_from_UTC_EPOCH16.restype = ctypes.c_longlong self._library.CDF_TT2000_from_UTC_EPOCH16.argtypes = [ctypes.POINTER((ctypes.c_double * 2))] ver = ctypes.c_long(0) rel = ctypes.c_long(0) inc = ctypes.c_long(0) sub = ctypes.c_char(b' ') self.call(const.GET_, const.LIB_VERSION_, ctypes.byref(ver), const.GET_, const.LIB_RELEASE_, ctypes.byref(rel), const.GET_, const.LIB_INCREMENT_, ctypes.byref(inc), const.GET_, const.LIB_subINCREMENT_, ctypes.byref(sub)) ver = ver.value rel = rel.value inc = inc.value sub = sub.value self.version = (ver, rel, inc, sub) self._del_middle_rec_bug = ((ver < 3) or ((ver == 3) and ((rel < 4) or ((rel == 4) and (inc < 1))))) self.supports_int8 = ((ver > 3) or ((ver == 3) and (rel >= 4))) self.cdftypenames = {const.CDF_BYTE.value: 'CDF_BYTE', const.CDF_CHAR.value: 'CDF_CHAR', const.CDF_INT1.value: 'CDF_INT1', const.CDF_UCHAR.value: 'CDF_UCHAR', const.CDF_UINT1.value: 'CDF_UINT1', const.CDF_INT2.value: 'CDF_INT2', const.CDF_UINT2.value: 'CDF_UINT2', const.CDF_INT4.value: 'CDF_INT4', const.CDF_UINT4.value: 'CDF_UINT4', const.CDF_INT8.value: 'CDF_INT8', const.CDF_FLOAT.value: 'CDF_FLOAT', const.CDF_REAL4.value: 'CDF_REAL4', const.CDF_DOUBLE.value: 'CDF_DOUBLE', const.CDF_REAL8.value: 'CDF_REAL8', const.CDF_EPOCH.value: 'CDF_EPOCH', const.CDF_EPOCH16.value: 'CDF_EPOCH16', const.CDF_TIME_TT2000.value: 'CDF_TIME_TT2000'} self.numpytypedict = {const.CDF_BYTE.value: numpy.int8, const.CDF_CHAR.value: numpy.int8, const.CDF_INT1.value: numpy.int8, const.CDF_UCHAR.value: numpy.uint8, const.CDF_UINT1.value: numpy.uint8, const.CDF_INT2.value: numpy.int16, const.CDF_UINT2.value: numpy.uint16, const.CDF_INT4.value: numpy.int32, const.CDF_UINT4.value: numpy.uint32, const.CDF_INT8.value: numpy.int64, const.CDF_FLOAT.value: numpy.float32, const.CDF_REAL4.value: numpy.float32, const.CDF_DOUBLE.value: numpy.float64, const.CDF_REAL8.value: numpy.float64, const.CDF_EPOCH.value: numpy.float64, const.CDF_EPOCH16.value: numpy.dtype((numpy.float64, 2)), const.CDF_TIME_TT2000.value: numpy.int64} self.timetypes = [const.CDF_EPOCH.value, const.CDF_EPOCH16.value, const.CDF_TIME_TT2000.value] if (not self.supports_int8): del self.cdftypenames[const.CDF_INT8.value] del self.numpytypedict[const.CDF_INT8.value] del self.cdftypenames[const.CDF_TIME_TT2000.value] del self.numpytypedict[const.CDF_TIME_TT2000.value] elif (sys.platform.startswith('linux') and os.uname()[4].startswith('arm') and hasattr(self._library, 'computeTT2000') and (self._library.computeTT2000(2010, 1, 1, 0, 0, 0, 0, 0, 0) != 315576066184000000)): if (ctypes.sizeof(ctypes.c_longlong) != ctypes.sizeof(ctypes.c_double)): warnings.warn('ARM with unknown type sizes; TT2000 functions will not work.') else: self._library.computeTT2000.argtypes = ([ctypes.c_longlong] * 9) c_ll_p = ctypes.POINTER(ctypes.c_longlong) if (self._library.computeTT2000(ctypes.cast(ctypes.pointer(ctypes.c_double(2010)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(1)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(1)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(0)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(0)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(0)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(0)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(0)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(0)), c_ll_p).contents) != 315576066184000000): warnings.warn('ARM with unknown calling convention; TT2000 functions will not work.') self.datetime_to_tt2000 = self._datetime_to_tt2000_typepunned v_epoch16_to_datetime = numpy.frompyfunc(self.epoch16_to_datetime, 2, 1) self.v_epoch16_to_datetime = (lambda x: v_epoch16_to_datetime(x[(..., 0)], x[(..., 1)])) self.v_epoch_to_datetime = numpy.frompyfunc(self.epoch_to_datetime, 1, 1) self.v_tt2000_to_datetime = numpy.frompyfunc(self.tt2000_to_datetime, 1, 1) self.v_datetime_to_epoch = numpy.vectorize(self.datetime_to_epoch, otypes=[numpy.float64]) v_datetime_to_epoch16 = numpy.frompyfunc(self.datetime_to_epoch16, 1, 2) def _v_datetime_to_epoch16(x): retval = numpy.require(v_datetime_to_epoch16(x), dtype=numpy.float64) if (len(retval.shape) > 1): return numpy.rollaxis(numpy.rollaxis(retval, 0, (- 1)), (- 1), (- 2)) else: return retval self.v_datetime_to_epoch16 = _v_datetime_to_epoch16 self.v_datetime_to_tt2000 = numpy.vectorize(self.datetime_to_tt2000, otypes=[numpy.int64]) self.v_epoch_to_tt2000 = numpy.vectorize(self.epoch_to_tt2000, otypes=[numpy.int64]) self.v_tt2000_to_epoch = numpy.vectorize(self.tt2000_to_epoch, otypes=[numpy.float64]) v_epoch16_to_tt2000 = numpy.frompyfunc(self.epoch16_to_tt2000, 2, 1) self.v_epoch16_to_tt2000 = (lambda x: v_epoch16_to_tt2000(x[(..., 0)], x[(..., 1)])) v_tt2000_to_epoch16 = numpy.frompyfunc(self.tt2000_to_epoch16, 1, 2) def _v_tt2000_to_epoch16(x): retval = numpy.require(v_tt2000_to_epoch16(x), dtype=numpy.float64) if (len(retval.shape) > 1): return numpy.rollaxis(numpy.rollaxis(retval, 0, (- 1)), (- 1), (- 2)) else: return retval self.v_tt2000_to_epoch16 = _v_tt2000_to_epoch16 if (not self.supports_int8): self.datetime_to_tt2000 = self._bad_tt2000 self.tt2000_to_datetime = self._bad_tt2000 self.v_datetime_to_tt2000 = self._bad_tt2000 self.v_tt2000_to_datetime = self._bad_tt2000 self.epoch_to_tt2000 = self._bad_tt2000 self.v_epoch_to_tt2000 = self._bad_tt2000 self.tt2000_to_epoch = self._bad_tt2000 self.v_tt2000_to_epoch = self._bad_tt2000 self.epoch_16_to_tt2000 = self._bad_tt2000 self.v_epoch16_to_tt2000 = self._bad_tt2000 self.tt2000_to_epoch16 = self._bad_tt2000 self.v_tt2000_to_epoch16 = self._bad_tt2000 self.set_backward(True)
4,657,589,305,218,685,000
Load the CDF C library. Searches for the library in the order: 1. Appropriately-named file in CDF_LIB 2. Appropriately-named file in CDF_BASE 3. Standard library search path @raise CDFError: BAD_DATA_TYPE if can't map types properly
pycdf/__init__.py
__init__
cpiker/condaCDF
python
def __init__(self, libpath=None, library=None): "Load the CDF C library.\n\n Searches for the library in the order:\n 1. Appropriately-named file in CDF_LIB\n 2. Appropriately-named file in CDF_BASE\n 3. Standard library search path\n @raise CDFError: BAD_DATA_TYPE if can't map types properly\n " if (not ('CDF_TMP' in os.environ)): os.environ['CDF_TMP'] = tempfile.gettempdir() if (not library): if (not libpath): (self.libpath, self._library) = self._find_lib() if (self._library is None): raise Exception('Cannot load CDF C library; checked {0}. Try \'os.environ["CDF_LIB"] = library_directory\' before import.'.format(', '.join(self.libpath))) else: self._library = ctypes.CDLL(libpath) self.libpath = libpath else: self._library = library self.libpath = libpath self._library.CDFlib.restype = ctypes.c_long self._library.EPOCHbreakdown.restype = ctypes.c_long self._library.computeEPOCH.restype = ctypes.c_double self._library.computeEPOCH.argtypes = ([ctypes.c_long] * 7) self._library.computeEPOCH16.restype = ctypes.c_double self._library.computeEPOCH16.argtypes = (([ctypes.c_long] * 10) + [ctypes.POINTER((ctypes.c_double * 2))]) if hasattr(self._library, 'CDFsetFileBackward'): self._library.CDFsetFileBackward.restype = None self._library.CDFsetFileBackward.argtypes = [ctypes.c_long] if ((not hasattr(self._library, 'computeTT2000')) and hasattr(self._library, 'CDF_TT2000_from_UTC_parts')): self._library.computeTT2000 = self._library.CDF_TT2000_from_UTC_parts if hasattr(self._library, 'computeTT2000'): self._library.computeTT2000.restype = ctypes.c_longlong self._library.computeTT2000.argtypes = ([ctypes.c_double] * 9) if ((not hasattr(self._library, 'breakdownTT2000')) and hasattr(self._library, 'CDF_TT2000_to_UTC_parts')): self._library.breakdownTT2000 = self._library.CDF_TT2000_to_UTC_parts if hasattr(self._library, 'breakdownTT2000'): self._library.breakdownTT2000.restype = None self._library.breakdownTT2000.argtypes = ([ctypes.c_longlong] + ([ctypes.POINTER(ctypes.c_double)] * 9)) if hasattr(self._library, 'CDF_TT2000_to_UTC_EPOCH'): self._library.CDF_TT2000_to_UTC_EPOCH.restype = ctypes.c_double self._library.CDF_TT2000_to_UTC_EPOCH.argtypes = [ctypes.c_longlong] if hasattr(self._library, 'CDF_TT2000_from_UTC_EPOCH'): self._library.CDF_TT2000_from_UTC_EPOCH.restype = ctypes.c_longlong self._library.CDF_TT2000_from_UTC_EPOCH.argtypes = [ctypes.c_double] if hasattr(self._library, 'CDF_TT2000_to_UTC_EPOCH16'): self._library.CDF_TT2000_to_UTC_EPOCH16.restype = ctypes.c_double self._library.CDF_TT2000_to_UTC_EPOCH16.argtypes = [ctypes.c_longlong, ctypes.POINTER((ctypes.c_double * 2))] if hasattr(self._library, 'CDF_TT2000_from_UTC_EPOCH16'): self._library.CDF_TT2000_from_UTC_EPOCH16.restype = ctypes.c_longlong self._library.CDF_TT2000_from_UTC_EPOCH16.argtypes = [ctypes.POINTER((ctypes.c_double * 2))] ver = ctypes.c_long(0) rel = ctypes.c_long(0) inc = ctypes.c_long(0) sub = ctypes.c_char(b' ') self.call(const.GET_, const.LIB_VERSION_, ctypes.byref(ver), const.GET_, const.LIB_RELEASE_, ctypes.byref(rel), const.GET_, const.LIB_INCREMENT_, ctypes.byref(inc), const.GET_, const.LIB_subINCREMENT_, ctypes.byref(sub)) ver = ver.value rel = rel.value inc = inc.value sub = sub.value self.version = (ver, rel, inc, sub) self._del_middle_rec_bug = ((ver < 3) or ((ver == 3) and ((rel < 4) or ((rel == 4) and (inc < 1))))) self.supports_int8 = ((ver > 3) or ((ver == 3) and (rel >= 4))) self.cdftypenames = {const.CDF_BYTE.value: 'CDF_BYTE', const.CDF_CHAR.value: 'CDF_CHAR', const.CDF_INT1.value: 'CDF_INT1', const.CDF_UCHAR.value: 'CDF_UCHAR', const.CDF_UINT1.value: 'CDF_UINT1', const.CDF_INT2.value: 'CDF_INT2', const.CDF_UINT2.value: 'CDF_UINT2', const.CDF_INT4.value: 'CDF_INT4', const.CDF_UINT4.value: 'CDF_UINT4', const.CDF_INT8.value: 'CDF_INT8', const.CDF_FLOAT.value: 'CDF_FLOAT', const.CDF_REAL4.value: 'CDF_REAL4', const.CDF_DOUBLE.value: 'CDF_DOUBLE', const.CDF_REAL8.value: 'CDF_REAL8', const.CDF_EPOCH.value: 'CDF_EPOCH', const.CDF_EPOCH16.value: 'CDF_EPOCH16', const.CDF_TIME_TT2000.value: 'CDF_TIME_TT2000'} self.numpytypedict = {const.CDF_BYTE.value: numpy.int8, const.CDF_CHAR.value: numpy.int8, const.CDF_INT1.value: numpy.int8, const.CDF_UCHAR.value: numpy.uint8, const.CDF_UINT1.value: numpy.uint8, const.CDF_INT2.value: numpy.int16, const.CDF_UINT2.value: numpy.uint16, const.CDF_INT4.value: numpy.int32, const.CDF_UINT4.value: numpy.uint32, const.CDF_INT8.value: numpy.int64, const.CDF_FLOAT.value: numpy.float32, const.CDF_REAL4.value: numpy.float32, const.CDF_DOUBLE.value: numpy.float64, const.CDF_REAL8.value: numpy.float64, const.CDF_EPOCH.value: numpy.float64, const.CDF_EPOCH16.value: numpy.dtype((numpy.float64, 2)), const.CDF_TIME_TT2000.value: numpy.int64} self.timetypes = [const.CDF_EPOCH.value, const.CDF_EPOCH16.value, const.CDF_TIME_TT2000.value] if (not self.supports_int8): del self.cdftypenames[const.CDF_INT8.value] del self.numpytypedict[const.CDF_INT8.value] del self.cdftypenames[const.CDF_TIME_TT2000.value] del self.numpytypedict[const.CDF_TIME_TT2000.value] elif (sys.platform.startswith('linux') and os.uname()[4].startswith('arm') and hasattr(self._library, 'computeTT2000') and (self._library.computeTT2000(2010, 1, 1, 0, 0, 0, 0, 0, 0) != 315576066184000000)): if (ctypes.sizeof(ctypes.c_longlong) != ctypes.sizeof(ctypes.c_double)): warnings.warn('ARM with unknown type sizes; TT2000 functions will not work.') else: self._library.computeTT2000.argtypes = ([ctypes.c_longlong] * 9) c_ll_p = ctypes.POINTER(ctypes.c_longlong) if (self._library.computeTT2000(ctypes.cast(ctypes.pointer(ctypes.c_double(2010)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(1)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(1)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(0)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(0)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(0)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(0)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(0)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(0)), c_ll_p).contents) != 315576066184000000): warnings.warn('ARM with unknown calling convention; TT2000 functions will not work.') self.datetime_to_tt2000 = self._datetime_to_tt2000_typepunned v_epoch16_to_datetime = numpy.frompyfunc(self.epoch16_to_datetime, 2, 1) self.v_epoch16_to_datetime = (lambda x: v_epoch16_to_datetime(x[(..., 0)], x[(..., 1)])) self.v_epoch_to_datetime = numpy.frompyfunc(self.epoch_to_datetime, 1, 1) self.v_tt2000_to_datetime = numpy.frompyfunc(self.tt2000_to_datetime, 1, 1) self.v_datetime_to_epoch = numpy.vectorize(self.datetime_to_epoch, otypes=[numpy.float64]) v_datetime_to_epoch16 = numpy.frompyfunc(self.datetime_to_epoch16, 1, 2) def _v_datetime_to_epoch16(x): retval = numpy.require(v_datetime_to_epoch16(x), dtype=numpy.float64) if (len(retval.shape) > 1): return numpy.rollaxis(numpy.rollaxis(retval, 0, (- 1)), (- 1), (- 2)) else: return retval self.v_datetime_to_epoch16 = _v_datetime_to_epoch16 self.v_datetime_to_tt2000 = numpy.vectorize(self.datetime_to_tt2000, otypes=[numpy.int64]) self.v_epoch_to_tt2000 = numpy.vectorize(self.epoch_to_tt2000, otypes=[numpy.int64]) self.v_tt2000_to_epoch = numpy.vectorize(self.tt2000_to_epoch, otypes=[numpy.float64]) v_epoch16_to_tt2000 = numpy.frompyfunc(self.epoch16_to_tt2000, 2, 1) self.v_epoch16_to_tt2000 = (lambda x: v_epoch16_to_tt2000(x[(..., 0)], x[(..., 1)])) v_tt2000_to_epoch16 = numpy.frompyfunc(self.tt2000_to_epoch16, 1, 2) def _v_tt2000_to_epoch16(x): retval = numpy.require(v_tt2000_to_epoch16(x), dtype=numpy.float64) if (len(retval.shape) > 1): return numpy.rollaxis(numpy.rollaxis(retval, 0, (- 1)), (- 1), (- 2)) else: return retval self.v_tt2000_to_epoch16 = _v_tt2000_to_epoch16 if (not self.supports_int8): self.datetime_to_tt2000 = self._bad_tt2000 self.tt2000_to_datetime = self._bad_tt2000 self.v_datetime_to_tt2000 = self._bad_tt2000 self.v_tt2000_to_datetime = self._bad_tt2000 self.epoch_to_tt2000 = self._bad_tt2000 self.v_epoch_to_tt2000 = self._bad_tt2000 self.tt2000_to_epoch = self._bad_tt2000 self.v_tt2000_to_epoch = self._bad_tt2000 self.epoch_16_to_tt2000 = self._bad_tt2000 self.v_epoch16_to_tt2000 = self._bad_tt2000 self.tt2000_to_epoch16 = self._bad_tt2000 self.v_tt2000_to_epoch16 = self._bad_tt2000 self.set_backward(True)
@staticmethod def _find_lib(): '\n Search for the CDF library\n\n Searches in likely locations for CDF libraries and attempts to load\n them. Stops at first successful load and, if fails, reports all\n the files that were tried as libraries.\n\n Returns\n =======\n out : tuple\n This is either (path to library, loaded library)\n or, in the event of failure, (None, list of libraries tried)\n ' failed = [] for libpath in Library._lib_paths(): try: lib = ctypes.CDLL(libpath) except: failed.append(libpath) else: return (libpath, lib) return (failed, None)
-8,930,959,412,059,058,000
Search for the CDF library Searches in likely locations for CDF libraries and attempts to load them. Stops at first successful load and, if fails, reports all the files that were tried as libraries. Returns ======= out : tuple This is either (path to library, loaded library) or, in the event of failure, (None, list of libraries tried)
pycdf/__init__.py
_find_lib
cpiker/condaCDF
python
@staticmethod def _find_lib(): '\n Search for the CDF library\n\n Searches in likely locations for CDF libraries and attempts to load\n them. Stops at first successful load and, if fails, reports all\n the files that were tried as libraries.\n\n Returns\n =======\n out : tuple\n This is either (path to library, loaded library)\n or, in the event of failure, (None, list of libraries tried)\n ' failed = [] for libpath in Library._lib_paths(): try: lib = ctypes.CDLL(libpath) except: failed.append(libpath) else: return (libpath, lib) return (failed, None)
@staticmethod def _lib_paths(): 'Find candidate paths for the CDF library\n\n Does not check that the library is actually in any particular directory,\n just returns a list of possible locations, in priority order.\n\n Returns\n =======\n out : generator of str\n paths that look like the CDF library\n ' names = {'win32': ['cdf.dll'], 'darwin': ['libcdf.dylib', 'cdf.dylib', 'libcdf.so'], 'linux2': ['libcdf.so'], 'linux': ['libcdf.so']} names = names.get(sys.platform, ['libcdf.so']) search_dir = (lambda x: [os.path.join(x, fname) for fname in names if os.path.exists(os.path.join(x, fname))]) if ('PREFIX' in os.environ): if (sys.platform == 'win32'): for p in search_dir(os.path.join(os.environ['PREFIX'], 'Library', 'bin')): (yield p) else: for p in search_dir(os.path.join(os.environ['PREFIX'], 'lib')): (yield p) if ('CONDA_PREFIX' in os.environ): if (sys.platform == 'win32'): for p in search_dir(os.path.join(os.environ['CONDA_PREFIX'], 'Library', 'bin')): (yield p) else: for p in search_dir(os.path.join(os.environ['CONDA_PREFIX'], 'lib')): (yield p) if ('LIBRARY_BIN' in os.environ): for p in search_dir(os.environ['LIBRARY_BIN']): (yield p) ctypespath = ctypes.util.find_library(('cdf.dll' if (sys.platform == 'win32') else 'cdf')) if ctypespath: (yield ctypespath)
-1,022,284,542,166,911,400
Find candidate paths for the CDF library Does not check that the library is actually in any particular directory, just returns a list of possible locations, in priority order. Returns ======= out : generator of str paths that look like the CDF library
pycdf/__init__.py
_lib_paths
cpiker/condaCDF
python
@staticmethod def _lib_paths(): 'Find candidate paths for the CDF library\n\n Does not check that the library is actually in any particular directory,\n just returns a list of possible locations, in priority order.\n\n Returns\n =======\n out : generator of str\n paths that look like the CDF library\n ' names = {'win32': ['cdf.dll'], 'darwin': ['libcdf.dylib', 'cdf.dylib', 'libcdf.so'], 'linux2': ['libcdf.so'], 'linux': ['libcdf.so']} names = names.get(sys.platform, ['libcdf.so']) search_dir = (lambda x: [os.path.join(x, fname) for fname in names if os.path.exists(os.path.join(x, fname))]) if ('PREFIX' in os.environ): if (sys.platform == 'win32'): for p in search_dir(os.path.join(os.environ['PREFIX'], 'Library', 'bin')): (yield p) else: for p in search_dir(os.path.join(os.environ['PREFIX'], 'lib')): (yield p) if ('CONDA_PREFIX' in os.environ): if (sys.platform == 'win32'): for p in search_dir(os.path.join(os.environ['CONDA_PREFIX'], 'Library', 'bin')): (yield p) else: for p in search_dir(os.path.join(os.environ['CONDA_PREFIX'], 'lib')): (yield p) if ('LIBRARY_BIN' in os.environ): for p in search_dir(os.environ['LIBRARY_BIN']): (yield p) ctypespath = ctypes.util.find_library(('cdf.dll' if (sys.platform == 'win32') else 'cdf')) if ctypespath: (yield ctypespath)
def check_status(self, status, ignore=()): '\n Raise exception or warning based on return status of CDF call\n\n Parameters\n ==========\n status : int\n status returned by the C library\n\n Other Parameters\n ================\n ignore : sequence of ctypes.c_long\n CDF statuses to ignore. If any of these is returned by CDF library,\n any related warnings or exceptions will *not* be raised.\n (Default none).\n\n Raises\n ======\n CDFError : if status < CDF_WARN, indicating an error\n\n Warns\n =====\n CDFWarning : if CDF_WARN <= status < CDF_OK, indicating a warning.\n\n Returns\n =======\n out : int\n status (unchanged)\n ' if ((status == const.CDF_OK) or (status in ignore)): return status if (status < const.CDF_WARN): raise CDFError(status) else: warning = CDFWarning(status) warning.warn() return status
6,336,779,443,411,467,000
Raise exception or warning based on return status of CDF call Parameters ========== status : int status returned by the C library Other Parameters ================ ignore : sequence of ctypes.c_long CDF statuses to ignore. If any of these is returned by CDF library, any related warnings or exceptions will *not* be raised. (Default none). Raises ====== CDFError : if status < CDF_WARN, indicating an error Warns ===== CDFWarning : if CDF_WARN <= status < CDF_OK, indicating a warning. Returns ======= out : int status (unchanged)
pycdf/__init__.py
check_status
cpiker/condaCDF
python
def check_status(self, status, ignore=()): '\n Raise exception or warning based on return status of CDF call\n\n Parameters\n ==========\n status : int\n status returned by the C library\n\n Other Parameters\n ================\n ignore : sequence of ctypes.c_long\n CDF statuses to ignore. If any of these is returned by CDF library,\n any related warnings or exceptions will *not* be raised.\n (Default none).\n\n Raises\n ======\n CDFError : if status < CDF_WARN, indicating an error\n\n Warns\n =====\n CDFWarning : if CDF_WARN <= status < CDF_OK, indicating a warning.\n\n Returns\n =======\n out : int\n status (unchanged)\n ' if ((status == const.CDF_OK) or (status in ignore)): return status if (status < const.CDF_WARN): raise CDFError(status) else: warning = CDFWarning(status) warning.warn() return status
def call(self, *args, **kwargs): "\n Call the CDF internal interface\n\n Passes all parameters directly through to the CDFlib routine of the\n CDF library's C internal interface. Checks the return value with\n :meth:`check_status`.\n\n Terminal NULL is automatically added to args.\n\n Parameters\n ==========\n args : various, see :mod:`ctypes`\n Passed directly to the CDF library interface. Useful\n constants are defined in the :mod:`~pycdf.const` module.\n\n Other Parameters\n ================\n ignore : sequence of CDF statuses\n sequence of CDF statuses to ignore. If any of these\n is returned by CDF library, any related warnings or\n exceptions will *not* be raised.\n\n Returns\n =======\n out : int\n CDF status from the library\n\n Raises\n ======\n CDFError : if CDF library reports an error\n\n Warns\n =====\n CDFWarning : if CDF library reports a warning\n " if ('ignore' in kwargs): return self.check_status(self._library.CDFlib(*(args + (const.NULL_,))), kwargs['ignore']) else: return self.check_status(self._library.CDFlib(*(args + (const.NULL_,))))
1,458,759,059,557,401,000
Call the CDF internal interface Passes all parameters directly through to the CDFlib routine of the CDF library's C internal interface. Checks the return value with :meth:`check_status`. Terminal NULL is automatically added to args. Parameters ========== args : various, see :mod:`ctypes` Passed directly to the CDF library interface. Useful constants are defined in the :mod:`~pycdf.const` module. Other Parameters ================ ignore : sequence of CDF statuses sequence of CDF statuses to ignore. If any of these is returned by CDF library, any related warnings or exceptions will *not* be raised. Returns ======= out : int CDF status from the library Raises ====== CDFError : if CDF library reports an error Warns ===== CDFWarning : if CDF library reports a warning
pycdf/__init__.py
call
cpiker/condaCDF
python
def call(self, *args, **kwargs): "\n Call the CDF internal interface\n\n Passes all parameters directly through to the CDFlib routine of the\n CDF library's C internal interface. Checks the return value with\n :meth:`check_status`.\n\n Terminal NULL is automatically added to args.\n\n Parameters\n ==========\n args : various, see :mod:`ctypes`\n Passed directly to the CDF library interface. Useful\n constants are defined in the :mod:`~pycdf.const` module.\n\n Other Parameters\n ================\n ignore : sequence of CDF statuses\n sequence of CDF statuses to ignore. If any of these\n is returned by CDF library, any related warnings or\n exceptions will *not* be raised.\n\n Returns\n =======\n out : int\n CDF status from the library\n\n Raises\n ======\n CDFError : if CDF library reports an error\n\n Warns\n =====\n CDFWarning : if CDF library reports a warning\n " if ('ignore' in kwargs): return self.check_status(self._library.CDFlib(*(args + (const.NULL_,))), kwargs['ignore']) else: return self.check_status(self._library.CDFlib(*(args + (const.NULL_,))))
def set_backward(self, backward=True): '\n Set backward compatibility mode for new CDFs\n\n Unless backward compatible mode is set, CDF files created by\n the version 3 library can not be read by V2.\n\n Parameters\n ==========\n backward : boolean\n Set backward compatible mode if True; clear it if False.\n\n Raises\n ======\n ValueError : if backward=False and underlying CDF library is V2\n ' if (self.version[0] < 3): if (not backward): raise ValueError('Cannot disable backward-compatible mode for CDF version 2.') else: return self._library.CDFsetFileBackward((const.BACKWARDFILEon if backward else const.BACKWARDFILEoff))
6,009,374,051,950,584,000
Set backward compatibility mode for new CDFs Unless backward compatible mode is set, CDF files created by the version 3 library can not be read by V2. Parameters ========== backward : boolean Set backward compatible mode if True; clear it if False. Raises ====== ValueError : if backward=False and underlying CDF library is V2
pycdf/__init__.py
set_backward
cpiker/condaCDF
python
def set_backward(self, backward=True): '\n Set backward compatibility mode for new CDFs\n\n Unless backward compatible mode is set, CDF files created by\n the version 3 library can not be read by V2.\n\n Parameters\n ==========\n backward : boolean\n Set backward compatible mode if True; clear it if False.\n\n Raises\n ======\n ValueError : if backward=False and underlying CDF library is V2\n ' if (self.version[0] < 3): if (not backward): raise ValueError('Cannot disable backward-compatible mode for CDF version 2.') else: return self._library.CDFsetFileBackward((const.BACKWARDFILEon if backward else const.BACKWARDFILEoff))
def epoch_to_datetime(self, epoch): '\n Converts a CDF epoch value to a datetime\n\n Parameters\n ==========\n epoch : float\n epoch value from CDF\n\n Returns\n =======\n out : :class:`datetime.datetime`\n date and time corresponding to epoch. Invalid values are set to\n usual epoch invalid value, i.e. last moment of year 9999.\n\n See Also\n ========\n v_epoch_to_datetime\n ' yyyy = ctypes.c_long(0) mm = ctypes.c_long(0) dd = ctypes.c_long(0) hh = ctypes.c_long(0) min = ctypes.c_long(0) sec = ctypes.c_long(0) msec = ctypes.c_long(0) self._library.EPOCHbreakdown(ctypes.c_double(epoch), ctypes.byref(yyyy), ctypes.byref(mm), ctypes.byref(dd), ctypes.byref(hh), ctypes.byref(min), ctypes.byref(sec), ctypes.byref(msec)) if (yyyy.value <= 0): return datetime.datetime(9999, 12, 13, 23, 59, 59, 999000) else: return datetime.datetime(yyyy.value, mm.value, dd.value, hh.value, min.value, sec.value, (msec.value * 1000))
-4,194,116,941,151,881,700
Converts a CDF epoch value to a datetime Parameters ========== epoch : float epoch value from CDF Returns ======= out : :class:`datetime.datetime` date and time corresponding to epoch. Invalid values are set to usual epoch invalid value, i.e. last moment of year 9999. See Also ======== v_epoch_to_datetime
pycdf/__init__.py
epoch_to_datetime
cpiker/condaCDF
python
def epoch_to_datetime(self, epoch): '\n Converts a CDF epoch value to a datetime\n\n Parameters\n ==========\n epoch : float\n epoch value from CDF\n\n Returns\n =======\n out : :class:`datetime.datetime`\n date and time corresponding to epoch. Invalid values are set to\n usual epoch invalid value, i.e. last moment of year 9999.\n\n See Also\n ========\n v_epoch_to_datetime\n ' yyyy = ctypes.c_long(0) mm = ctypes.c_long(0) dd = ctypes.c_long(0) hh = ctypes.c_long(0) min = ctypes.c_long(0) sec = ctypes.c_long(0) msec = ctypes.c_long(0) self._library.EPOCHbreakdown(ctypes.c_double(epoch), ctypes.byref(yyyy), ctypes.byref(mm), ctypes.byref(dd), ctypes.byref(hh), ctypes.byref(min), ctypes.byref(sec), ctypes.byref(msec)) if (yyyy.value <= 0): return datetime.datetime(9999, 12, 13, 23, 59, 59, 999000) else: return datetime.datetime(yyyy.value, mm.value, dd.value, hh.value, min.value, sec.value, (msec.value * 1000))
def datetime_to_epoch(self, dt): '\n Converts a Python datetime to a CDF Epoch value\n\n Parameters\n ==========\n dt : :class:`datetime.datetime`\n date and time to convert\n\n Returns\n =======\n out : float\n epoch corresponding to dt\n\n See Also\n ========\n v_datetime_to_epoch\n ' if ((dt.tzinfo != None) and (dt.utcoffset() != None)): dt = (dt - dt.utcoffset()) dt.replace(tzinfo=None) micro = (dt.microsecond % 1000) if ((micro >= 500) and (dt.year < 9999)): dt += datetime.timedelta(0, 0, 1000) return self._library.computeEPOCH(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, int((dt.microsecond / 1000)))
7,494,655,386,455,670,000
Converts a Python datetime to a CDF Epoch value Parameters ========== dt : :class:`datetime.datetime` date and time to convert Returns ======= out : float epoch corresponding to dt See Also ======== v_datetime_to_epoch
pycdf/__init__.py
datetime_to_epoch
cpiker/condaCDF
python
def datetime_to_epoch(self, dt): '\n Converts a Python datetime to a CDF Epoch value\n\n Parameters\n ==========\n dt : :class:`datetime.datetime`\n date and time to convert\n\n Returns\n =======\n out : float\n epoch corresponding to dt\n\n See Also\n ========\n v_datetime_to_epoch\n ' if ((dt.tzinfo != None) and (dt.utcoffset() != None)): dt = (dt - dt.utcoffset()) dt.replace(tzinfo=None) micro = (dt.microsecond % 1000) if ((micro >= 500) and (dt.year < 9999)): dt += datetime.timedelta(0, 0, 1000) return self._library.computeEPOCH(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, int((dt.microsecond / 1000)))
def epoch16_to_datetime(self, epoch0, epoch1): '\n Converts a CDF epoch16 value to a datetime\n\n .. note::\n The call signature has changed since SpacePy 0.1.2. Formerly\n this method took a single argument with two values; now it\n requires two arguments (one for each value). To convert existing\n code, replace ``epoch16_to_datetime(epoch)`` with\n ``epoch16_to_datetime(*epoch)``.\n\n Parameters\n ==========\n epoch0 : float\n epoch16 value from CDF, first half\n epoch1 : float\n epoch16 value from CDF, second half\n\n Raises\n ======\n EpochError : if input invalid\n\n Returns\n =======\n out : :class:`datetime.datetime`\n date and time corresponding to epoch. Invalid values are set to\n usual epoch invalid value, i.e. last moment of year 9999.\n\n See Also\n ========\n v_epoch16_to_datetime\n ' yyyy = ctypes.c_long(0) mm = ctypes.c_long(0) dd = ctypes.c_long(0) hh = ctypes.c_long(0) min = ctypes.c_long(0) sec = ctypes.c_long(0) msec = ctypes.c_long(0) usec = ctypes.c_long(0) nsec = ctypes.c_long(0) psec = ctypes.c_long(0) self._library.EPOCH16breakdown((ctypes.c_double * 2)(epoch0, epoch1), ctypes.byref(yyyy), ctypes.byref(mm), ctypes.byref(dd), ctypes.byref(hh), ctypes.byref(min), ctypes.byref(sec), ctypes.byref(msec), ctypes.byref(usec), ctypes.byref(nsec), ctypes.byref(psec)) if (yyyy.value <= 0): return datetime.datetime(9999, 12, 13, 23, 59, 59, 999999) micro = int((((((float(msec.value) * 1000) + float(usec.value)) + (float(nsec.value) / 1000)) + (float(psec.value) / 1000000.0)) + 0.5)) if (micro < 1000000): return datetime.datetime(yyyy.value, mm.value, dd.value, hh.value, min.value, sec.value, micro) else: add_sec = int((micro / 1000000)) try: return (datetime.datetime(yyyy.value, mm.value, dd.value, hh.value, min.value, sec.value, (micro - (add_sec * 1000000))) + datetime.timedelta(seconds=add_sec)) except OverflowError: return datetime.datetime(datetime.MAXYEAR, 12, 31, 23, 59, 59, 999999)
916,899,503,809,279,500
Converts a CDF epoch16 value to a datetime .. note:: The call signature has changed since SpacePy 0.1.2. Formerly this method took a single argument with two values; now it requires two arguments (one for each value). To convert existing code, replace ``epoch16_to_datetime(epoch)`` with ``epoch16_to_datetime(*epoch)``. Parameters ========== epoch0 : float epoch16 value from CDF, first half epoch1 : float epoch16 value from CDF, second half Raises ====== EpochError : if input invalid Returns ======= out : :class:`datetime.datetime` date and time corresponding to epoch. Invalid values are set to usual epoch invalid value, i.e. last moment of year 9999. See Also ======== v_epoch16_to_datetime
pycdf/__init__.py
epoch16_to_datetime
cpiker/condaCDF
python
def epoch16_to_datetime(self, epoch0, epoch1): '\n Converts a CDF epoch16 value to a datetime\n\n .. note::\n The call signature has changed since SpacePy 0.1.2. Formerly\n this method took a single argument with two values; now it\n requires two arguments (one for each value). To convert existing\n code, replace ``epoch16_to_datetime(epoch)`` with\n ``epoch16_to_datetime(*epoch)``.\n\n Parameters\n ==========\n epoch0 : float\n epoch16 value from CDF, first half\n epoch1 : float\n epoch16 value from CDF, second half\n\n Raises\n ======\n EpochError : if input invalid\n\n Returns\n =======\n out : :class:`datetime.datetime`\n date and time corresponding to epoch. Invalid values are set to\n usual epoch invalid value, i.e. last moment of year 9999.\n\n See Also\n ========\n v_epoch16_to_datetime\n ' yyyy = ctypes.c_long(0) mm = ctypes.c_long(0) dd = ctypes.c_long(0) hh = ctypes.c_long(0) min = ctypes.c_long(0) sec = ctypes.c_long(0) msec = ctypes.c_long(0) usec = ctypes.c_long(0) nsec = ctypes.c_long(0) psec = ctypes.c_long(0) self._library.EPOCH16breakdown((ctypes.c_double * 2)(epoch0, epoch1), ctypes.byref(yyyy), ctypes.byref(mm), ctypes.byref(dd), ctypes.byref(hh), ctypes.byref(min), ctypes.byref(sec), ctypes.byref(msec), ctypes.byref(usec), ctypes.byref(nsec), ctypes.byref(psec)) if (yyyy.value <= 0): return datetime.datetime(9999, 12, 13, 23, 59, 59, 999999) micro = int((((((float(msec.value) * 1000) + float(usec.value)) + (float(nsec.value) / 1000)) + (float(psec.value) / 1000000.0)) + 0.5)) if (micro < 1000000): return datetime.datetime(yyyy.value, mm.value, dd.value, hh.value, min.value, sec.value, micro) else: add_sec = int((micro / 1000000)) try: return (datetime.datetime(yyyy.value, mm.value, dd.value, hh.value, min.value, sec.value, (micro - (add_sec * 1000000))) + datetime.timedelta(seconds=add_sec)) except OverflowError: return datetime.datetime(datetime.MAXYEAR, 12, 31, 23, 59, 59, 999999)
def datetime_to_epoch16(self, dt): '\n Converts a Python datetime to a CDF Epoch16 value\n\n Parameters\n ==========\n dt : :class:`datetime.datetime`\n date and time to convert\n\n Returns\n =======\n out : list of float\n epoch16 corresponding to dt\n\n See Also\n ========\n v_datetime_to_epoch16\n ' if ((dt.tzinfo != None) and (dt.utcoffset() != None)): dt = (dt - dt.utcoffset()) dt.replace(tzinfo=None) epoch16 = (ctypes.c_double * 2)((- 1.0), (- 1.0)) if self._library.computeEPOCH16(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, int((dt.microsecond / 1000)), (dt.microsecond % 1000), 0, 0, epoch16): return ((- 1.0), (- 1.0)) return (epoch16[0], epoch16[1])
-7,864,392,969,522,198,000
Converts a Python datetime to a CDF Epoch16 value Parameters ========== dt : :class:`datetime.datetime` date and time to convert Returns ======= out : list of float epoch16 corresponding to dt See Also ======== v_datetime_to_epoch16
pycdf/__init__.py
datetime_to_epoch16
cpiker/condaCDF
python
def datetime_to_epoch16(self, dt): '\n Converts a Python datetime to a CDF Epoch16 value\n\n Parameters\n ==========\n dt : :class:`datetime.datetime`\n date and time to convert\n\n Returns\n =======\n out : list of float\n epoch16 corresponding to dt\n\n See Also\n ========\n v_datetime_to_epoch16\n ' if ((dt.tzinfo != None) and (dt.utcoffset() != None)): dt = (dt - dt.utcoffset()) dt.replace(tzinfo=None) epoch16 = (ctypes.c_double * 2)((- 1.0), (- 1.0)) if self._library.computeEPOCH16(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, int((dt.microsecond / 1000)), (dt.microsecond % 1000), 0, 0, epoch16): return ((- 1.0), (- 1.0)) return (epoch16[0], epoch16[1])
def epoch_to_epoch16(self, epoch): '\n Converts a CDF EPOCH to a CDF EPOCH16 value\n\n Parameters\n ==========\n epoch : double\n EPOCH to convert. Lists and numpy arrays are acceptable.\n\n Returns\n =======\n out : (double, double)\n EPOCH16 corresponding to epoch\n ' e = numpy.require(epoch, numpy.float64) s = numpy.trunc((e / 1000.0)) res = numpy.hstack((s, ((e - (s * 1000.0)) * 1000000000.0))) if (len(res) <= 2): return res newshape = list(res.shape[0:(- 2)]) newshape.append((res.shape[(- 1)] // 2)) newshape.append(2) return numpy.rollaxis(res.reshape(newshape), (- 1), (- 2))
8,412,332,055,089,482,000
Converts a CDF EPOCH to a CDF EPOCH16 value Parameters ========== epoch : double EPOCH to convert. Lists and numpy arrays are acceptable. Returns ======= out : (double, double) EPOCH16 corresponding to epoch
pycdf/__init__.py
epoch_to_epoch16
cpiker/condaCDF
python
def epoch_to_epoch16(self, epoch): '\n Converts a CDF EPOCH to a CDF EPOCH16 value\n\n Parameters\n ==========\n epoch : double\n EPOCH to convert. Lists and numpy arrays are acceptable.\n\n Returns\n =======\n out : (double, double)\n EPOCH16 corresponding to epoch\n ' e = numpy.require(epoch, numpy.float64) s = numpy.trunc((e / 1000.0)) res = numpy.hstack((s, ((e - (s * 1000.0)) * 1000000000.0))) if (len(res) <= 2): return res newshape = list(res.shape[0:(- 2)]) newshape.append((res.shape[(- 1)] // 2)) newshape.append(2) return numpy.rollaxis(res.reshape(newshape), (- 1), (- 2))
def epoch_to_num(self, epoch): '\n Convert CDF EPOCH to matplotlib number.\n\n Same output as :func:`~matplotlib.dates.date2num` and useful for\n plotting large data sets without converting the times through datetime.\n\n Parameters\n ==========\n epoch : double\n EPOCH to convert. Lists and numpy arrays are acceptable.\n\n Returns\n =======\n out : double\n Floating point number representing days since 0001-01-01.\n ' return (((epoch - 31622400000.0) / (((24 * 60) * 60) * 1000.0)) + 1.0)
-281,976,541,988,294,500
Convert CDF EPOCH to matplotlib number. Same output as :func:`~matplotlib.dates.date2num` and useful for plotting large data sets without converting the times through datetime. Parameters ========== epoch : double EPOCH to convert. Lists and numpy arrays are acceptable. Returns ======= out : double Floating point number representing days since 0001-01-01.
pycdf/__init__.py
epoch_to_num
cpiker/condaCDF
python
def epoch_to_num(self, epoch): '\n Convert CDF EPOCH to matplotlib number.\n\n Same output as :func:`~matplotlib.dates.date2num` and useful for\n plotting large data sets without converting the times through datetime.\n\n Parameters\n ==========\n epoch : double\n EPOCH to convert. Lists and numpy arrays are acceptable.\n\n Returns\n =======\n out : double\n Floating point number representing days since 0001-01-01.\n ' return (((epoch - 31622400000.0) / (((24 * 60) * 60) * 1000.0)) + 1.0)
def epoch16_to_epoch(self, epoch16): '\n Converts a CDF EPOCH16 to a CDF EPOCH value\n\n Parameters\n ==========\n epoch16 : (double, double)\n EPOCH16 to convert. Lists and numpy arrays are acceptable.\n LAST dimension should be 2: the two pairs of EPOCH16\n\n Returns\n =======\n out : double\n EPOCH corresponding to epoch16\n ' e = numpy.require(epoch16, numpy.float64) return ((e[(..., 0)] * 1000.0) + numpy.round((e[(..., 1)] / 1000000000.0)))
1,951,505,701,562,002,700
Converts a CDF EPOCH16 to a CDF EPOCH value Parameters ========== epoch16 : (double, double) EPOCH16 to convert. Lists and numpy arrays are acceptable. LAST dimension should be 2: the two pairs of EPOCH16 Returns ======= out : double EPOCH corresponding to epoch16
pycdf/__init__.py
epoch16_to_epoch
cpiker/condaCDF
python
def epoch16_to_epoch(self, epoch16): '\n Converts a CDF EPOCH16 to a CDF EPOCH value\n\n Parameters\n ==========\n epoch16 : (double, double)\n EPOCH16 to convert. Lists and numpy arrays are acceptable.\n LAST dimension should be 2: the two pairs of EPOCH16\n\n Returns\n =======\n out : double\n EPOCH corresponding to epoch16\n ' e = numpy.require(epoch16, numpy.float64) return ((e[(..., 0)] * 1000.0) + numpy.round((e[(..., 1)] / 1000000000.0)))
def tt2000_to_datetime(self, tt2000): "\n Converts a CDF TT2000 value to a datetime\n\n .. note::\n Although TT2000 values support leapseconds, Python's datetime\n object does not. Any times after 23:59:59.999999 will\n be truncated to 23:59:59.999999.\n\n\n Parameters\n ==========\n tt2000 : int\n TT2000 value from CDF\n\n Raises\n ======\n EpochError : if input invalid\n\n Returns\n =======\n out : :class:`datetime.datetime`\n date and time corresponding to epoch. Invalid values are set to\n usual epoch invalid value, i.e. last moment of year 9999.\n\n See Also\n ========\n v_tt2000_to_datetime\n " yyyy = ctypes.c_double(0) mm = ctypes.c_double(0) dd = ctypes.c_double(0) hh = ctypes.c_double(0) min = ctypes.c_double(0) sec = ctypes.c_double(0) msec = ctypes.c_double(0) usec = ctypes.c_double(0) nsec = ctypes.c_double(0) self._library.breakdownTT2000(ctypes.c_longlong(tt2000), ctypes.byref(yyyy), ctypes.byref(mm), ctypes.byref(dd), ctypes.byref(hh), ctypes.byref(min), ctypes.byref(sec), ctypes.byref(msec), ctypes.byref(usec), ctypes.byref(nsec)) if (yyyy.value <= 0): return datetime.datetime(9999, 12, 13, 23, 59, 59, 999999) sec = int(sec.value) if (sec >= 60): return datetime.datetime(int(yyyy.value), int(mm.value), int(dd.value), int(hh.value), int(min.value), 59, 999999) micro = int(((((msec.value * 1000) + usec.value) + (nsec.value / 1000)) + 0.5)) if (micro < 1000000): return datetime.datetime(int(yyyy.value), int(mm.value), int(dd.value), int(hh.value), int(min.value), sec, micro) else: add_sec = int((micro / 1000000)) try: return (datetime.datetime(int(yyyy.value), int(mm.value), int(dd.value), int(hh.value), int(min.value), sec, (micro - (add_sec * 1000000))) + datetime.timedelta(seconds=add_sec)) except OverflowError: return datetime.datetime(datetime.MAXYEAR, 12, 31, 23, 59, 59, 999999)
923,339,050,099,816,700
Converts a CDF TT2000 value to a datetime .. note:: Although TT2000 values support leapseconds, Python's datetime object does not. Any times after 23:59:59.999999 will be truncated to 23:59:59.999999. Parameters ========== tt2000 : int TT2000 value from CDF Raises ====== EpochError : if input invalid Returns ======= out : :class:`datetime.datetime` date and time corresponding to epoch. Invalid values are set to usual epoch invalid value, i.e. last moment of year 9999. See Also ======== v_tt2000_to_datetime
pycdf/__init__.py
tt2000_to_datetime
cpiker/condaCDF
python
def tt2000_to_datetime(self, tt2000): "\n Converts a CDF TT2000 value to a datetime\n\n .. note::\n Although TT2000 values support leapseconds, Python's datetime\n object does not. Any times after 23:59:59.999999 will\n be truncated to 23:59:59.999999.\n\n\n Parameters\n ==========\n tt2000 : int\n TT2000 value from CDF\n\n Raises\n ======\n EpochError : if input invalid\n\n Returns\n =======\n out : :class:`datetime.datetime`\n date and time corresponding to epoch. Invalid values are set to\n usual epoch invalid value, i.e. last moment of year 9999.\n\n See Also\n ========\n v_tt2000_to_datetime\n " yyyy = ctypes.c_double(0) mm = ctypes.c_double(0) dd = ctypes.c_double(0) hh = ctypes.c_double(0) min = ctypes.c_double(0) sec = ctypes.c_double(0) msec = ctypes.c_double(0) usec = ctypes.c_double(0) nsec = ctypes.c_double(0) self._library.breakdownTT2000(ctypes.c_longlong(tt2000), ctypes.byref(yyyy), ctypes.byref(mm), ctypes.byref(dd), ctypes.byref(hh), ctypes.byref(min), ctypes.byref(sec), ctypes.byref(msec), ctypes.byref(usec), ctypes.byref(nsec)) if (yyyy.value <= 0): return datetime.datetime(9999, 12, 13, 23, 59, 59, 999999) sec = int(sec.value) if (sec >= 60): return datetime.datetime(int(yyyy.value), int(mm.value), int(dd.value), int(hh.value), int(min.value), 59, 999999) micro = int(((((msec.value * 1000) + usec.value) + (nsec.value / 1000)) + 0.5)) if (micro < 1000000): return datetime.datetime(int(yyyy.value), int(mm.value), int(dd.value), int(hh.value), int(min.value), sec, micro) else: add_sec = int((micro / 1000000)) try: return (datetime.datetime(int(yyyy.value), int(mm.value), int(dd.value), int(hh.value), int(min.value), sec, (micro - (add_sec * 1000000))) + datetime.timedelta(seconds=add_sec)) except OverflowError: return datetime.datetime(datetime.MAXYEAR, 12, 31, 23, 59, 59, 999999)
def datetime_to_tt2000(self, dt): '\n Converts a Python datetime to a CDF TT2000 value\n\n Parameters\n ==========\n dt : :class:`datetime.datetime`\n date and time to convert\n\n Returns\n =======\n out : int\n tt2000 corresponding to dt\n\n See Also\n ========\n v_datetime_to_tt2000\n ' if ((dt.tzinfo != None) and (dt.utcoffset() != None)): dt = (dt - dt.utcoffset()) dt = dt.replace(tzinfo=None) if (dt == datetime.datetime.max): return (- (2 ** 63)) return self._library.computeTT2000(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, int((dt.microsecond / 1000)), (dt.microsecond % 1000), 0)
-3,065,209,170,280,399,400
Converts a Python datetime to a CDF TT2000 value Parameters ========== dt : :class:`datetime.datetime` date and time to convert Returns ======= out : int tt2000 corresponding to dt See Also ======== v_datetime_to_tt2000
pycdf/__init__.py
datetime_to_tt2000
cpiker/condaCDF
python
def datetime_to_tt2000(self, dt): '\n Converts a Python datetime to a CDF TT2000 value\n\n Parameters\n ==========\n dt : :class:`datetime.datetime`\n date and time to convert\n\n Returns\n =======\n out : int\n tt2000 corresponding to dt\n\n See Also\n ========\n v_datetime_to_tt2000\n ' if ((dt.tzinfo != None) and (dt.utcoffset() != None)): dt = (dt - dt.utcoffset()) dt = dt.replace(tzinfo=None) if (dt == datetime.datetime.max): return (- (2 ** 63)) return self._library.computeTT2000(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, int((dt.microsecond / 1000)), (dt.microsecond % 1000), 0)
def _datetime_to_tt2000_typepunned(self, dt): '\n Converts a Python datetime to a CDF TT2000 value\n\n Typepunned version that passes doubles as longlongs, to get around\n ARM calling convention oddness.\n\n Parameters\n ==========\n dt : :class:`datetime.datetime`\n date and time to convert\n\n Returns\n =======\n out : int\n tt2000 corresponding to dt\n\n See Also\n ========\n v_datetime_to_tt2000\n ' c_ll_p = ctypes.POINTER(ctypes.c_longlong) if ((dt.tzinfo != None) and (dt.utcoffset() != None)): dt = (dt - dt.utcoffset()) dt = dt.replace(tzinfo=None) if (dt == datetime.datetime.max): return (- (2 ** 63)) return self._library.computeTT2000(ctypes.cast(ctypes.pointer(ctypes.c_double(dt.year)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(dt.month)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(dt.day)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(dt.hour)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(dt.minute)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(dt.second)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double((dt.microsecond // 1000))), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double((dt.microsecond % 1000))), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(0)), c_ll_p).contents)
-4,813,442,311,137,848,000
Converts a Python datetime to a CDF TT2000 value Typepunned version that passes doubles as longlongs, to get around ARM calling convention oddness. Parameters ========== dt : :class:`datetime.datetime` date and time to convert Returns ======= out : int tt2000 corresponding to dt See Also ======== v_datetime_to_tt2000
pycdf/__init__.py
_datetime_to_tt2000_typepunned
cpiker/condaCDF
python
def _datetime_to_tt2000_typepunned(self, dt): '\n Converts a Python datetime to a CDF TT2000 value\n\n Typepunned version that passes doubles as longlongs, to get around\n ARM calling convention oddness.\n\n Parameters\n ==========\n dt : :class:`datetime.datetime`\n date and time to convert\n\n Returns\n =======\n out : int\n tt2000 corresponding to dt\n\n See Also\n ========\n v_datetime_to_tt2000\n ' c_ll_p = ctypes.POINTER(ctypes.c_longlong) if ((dt.tzinfo != None) and (dt.utcoffset() != None)): dt = (dt - dt.utcoffset()) dt = dt.replace(tzinfo=None) if (dt == datetime.datetime.max): return (- (2 ** 63)) return self._library.computeTT2000(ctypes.cast(ctypes.pointer(ctypes.c_double(dt.year)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(dt.month)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(dt.day)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(dt.hour)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(dt.minute)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(dt.second)), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double((dt.microsecond // 1000))), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double((dt.microsecond % 1000))), c_ll_p).contents, ctypes.cast(ctypes.pointer(ctypes.c_double(0)), c_ll_p).contents)
def epoch_to_tt2000(self, epoch): '\n Converts a CDF EPOCH to a CDF TT2000 value\n\n Parameters\n ==========\n epoch : double\n EPOCH to convert\n\n Returns\n =======\n out : int\n tt2000 corresponding to epoch\n\n See Also\n ========\n v_epoch_to_tt2000\n ' return self._library.CDF_TT2000_from_UTC_EPOCH(epoch)
-8,411,049,353,749,146,000
Converts a CDF EPOCH to a CDF TT2000 value Parameters ========== epoch : double EPOCH to convert Returns ======= out : int tt2000 corresponding to epoch See Also ======== v_epoch_to_tt2000
pycdf/__init__.py
epoch_to_tt2000
cpiker/condaCDF
python
def epoch_to_tt2000(self, epoch): '\n Converts a CDF EPOCH to a CDF TT2000 value\n\n Parameters\n ==========\n epoch : double\n EPOCH to convert\n\n Returns\n =======\n out : int\n tt2000 corresponding to epoch\n\n See Also\n ========\n v_epoch_to_tt2000\n ' return self._library.CDF_TT2000_from_UTC_EPOCH(epoch)
def tt2000_to_epoch(self, tt2000): '\n Converts a CDF TT2000 value to a CDF EPOCH\n\n .. note::\n Although TT2000 values support leapseconds, CDF EPOCH values\n do not. Times during leapseconds are rounded up to beginning\n of the next day.\n\n\n Parameters\n ==========\n tt2000 : int\n TT2000 value from CDF\n\n Raises\n ======\n EpochError : if input invalid\n\n Returns\n =======\n out : double\n EPOCH corresponding to the TT2000 input time\n\n See Also\n ========\n v_tt2000_to_epoch\n ' return self._library.CDF_TT2000_to_UTC_EPOCH(tt2000)
-1,038,626,129,928,731,500
Converts a CDF TT2000 value to a CDF EPOCH .. note:: Although TT2000 values support leapseconds, CDF EPOCH values do not. Times during leapseconds are rounded up to beginning of the next day. Parameters ========== tt2000 : int TT2000 value from CDF Raises ====== EpochError : if input invalid Returns ======= out : double EPOCH corresponding to the TT2000 input time See Also ======== v_tt2000_to_epoch
pycdf/__init__.py
tt2000_to_epoch
cpiker/condaCDF
python
def tt2000_to_epoch(self, tt2000): '\n Converts a CDF TT2000 value to a CDF EPOCH\n\n .. note::\n Although TT2000 values support leapseconds, CDF EPOCH values\n do not. Times during leapseconds are rounded up to beginning\n of the next day.\n\n\n Parameters\n ==========\n tt2000 : int\n TT2000 value from CDF\n\n Raises\n ======\n EpochError : if input invalid\n\n Returns\n =======\n out : double\n EPOCH corresponding to the TT2000 input time\n\n See Also\n ========\n v_tt2000_to_epoch\n ' return self._library.CDF_TT2000_to_UTC_EPOCH(tt2000)
def epoch16_to_tt2000(self, epoch0, epoch1): '\n Converts a CDF epoch16 value to TT2000\n\n .. note::\n Because TT2000 does not support picoseconds, the picoseconds\n value in epoch is ignored (i.e., truncated.)\n\n Parameters\n ==========\n epoch0 : float\n epoch16 value from CDF, first half\n epoch1 : float\n epoch16 value from CDF, second half\n\n Raises\n ======\n EpochError : if input invalid\n\n Returns\n =======\n out : long\n TT2000 corresponding to epoch.\n\n See Also\n ========\n v_epoch16_to_tt2000\n ' return self._library.CDF_TT2000_from_UTC_EPOCH16((ctypes.c_double * 2)(epoch0, epoch1))
7,639,834,107,065,348,000
Converts a CDF epoch16 value to TT2000 .. note:: Because TT2000 does not support picoseconds, the picoseconds value in epoch is ignored (i.e., truncated.) Parameters ========== epoch0 : float epoch16 value from CDF, first half epoch1 : float epoch16 value from CDF, second half Raises ====== EpochError : if input invalid Returns ======= out : long TT2000 corresponding to epoch. See Also ======== v_epoch16_to_tt2000
pycdf/__init__.py
epoch16_to_tt2000
cpiker/condaCDF
python
def epoch16_to_tt2000(self, epoch0, epoch1): '\n Converts a CDF epoch16 value to TT2000\n\n .. note::\n Because TT2000 does not support picoseconds, the picoseconds\n value in epoch is ignored (i.e., truncated.)\n\n Parameters\n ==========\n epoch0 : float\n epoch16 value from CDF, first half\n epoch1 : float\n epoch16 value from CDF, second half\n\n Raises\n ======\n EpochError : if input invalid\n\n Returns\n =======\n out : long\n TT2000 corresponding to epoch.\n\n See Also\n ========\n v_epoch16_to_tt2000\n ' return self._library.CDF_TT2000_from_UTC_EPOCH16((ctypes.c_double * 2)(epoch0, epoch1))
def tt2000_to_epoch16(self, tt2000): '\n Converts a CDF TT2000 value to a CDF EPOCH16\n\n .. note::\n Although TT2000 values support leapseconds, CDF EPOCH16 values\n do not. Times during leapseconds are rounded up to beginning\n of the next day.\n\n Parameters\n ==========\n tt2000 : int\n TT2000 value from CDF\n\n Raises\n ======\n EpochError : if input invalid\n\n Returns\n =======\n out : double, double\n EPOCH16 corresponding to the TT2000 input time\n\n See Also\n ========\n v_tt2000_to_epoch16\n ' epoch16 = (ctypes.c_double * 2)((- 1.0), (- 1.0)) if self._library.CDF_TT2000_to_UTC_EPOCH16(tt2000, epoch16): return ((- 1.0), (- 1.0)) return (epoch16[0], epoch16[1])
7,700,580,540,914,393,000
Converts a CDF TT2000 value to a CDF EPOCH16 .. note:: Although TT2000 values support leapseconds, CDF EPOCH16 values do not. Times during leapseconds are rounded up to beginning of the next day. Parameters ========== tt2000 : int TT2000 value from CDF Raises ====== EpochError : if input invalid Returns ======= out : double, double EPOCH16 corresponding to the TT2000 input time See Also ======== v_tt2000_to_epoch16
pycdf/__init__.py
tt2000_to_epoch16
cpiker/condaCDF
python
def tt2000_to_epoch16(self, tt2000): '\n Converts a CDF TT2000 value to a CDF EPOCH16\n\n .. note::\n Although TT2000 values support leapseconds, CDF EPOCH16 values\n do not. Times during leapseconds are rounded up to beginning\n of the next day.\n\n Parameters\n ==========\n tt2000 : int\n TT2000 value from CDF\n\n Raises\n ======\n EpochError : if input invalid\n\n Returns\n =======\n out : double, double\n EPOCH16 corresponding to the TT2000 input time\n\n See Also\n ========\n v_tt2000_to_epoch16\n ' epoch16 = (ctypes.c_double * 2)((- 1.0), (- 1.0)) if self._library.CDF_TT2000_to_UTC_EPOCH16(tt2000, epoch16): return ((- 1.0), (- 1.0)) return (epoch16[0], epoch16[1])
def _bad_tt2000(*args, **kwargs): 'Convenience function for complaining that TT2000 not supported' raise NotImplementedError('TT2000 functions require CDF library 3.4.0 or later')
5,016,153,546,655,569,000
Convenience function for complaining that TT2000 not supported
pycdf/__init__.py
_bad_tt2000
cpiker/condaCDF
python
def _bad_tt2000(*args, **kwargs): raise NotImplementedError('TT2000 functions require CDF library 3.4.0 or later')
def __init__(self, status): '\n Create a CDF Exception\n\n Uses CDF C library to look up an appropriate error message.\n\n Parameters\n ==========\n status : ctypes.c_long\n CDF status\n ' self.status = status self.string = (('CDF error ' + repr(status)) + ', unable to get details.') message = ctypes.create_string_buffer((const.CDF_STATUSTEXT_LEN + 1)) try: retval = lib._library.CDFlib(const.SELECT_, const.CDF_STATUS_, ctypes.c_long(status), const.GET_, const.STATUS_TEXT_, message, const.NULL_) if (retval == const.CDF_OK): if isinstance(message.value, str): self.string = message.value elif isinstance(message.value, bytes): self.string = message.value.decode() except: pass
-3,756,434,079,576,152,000
Create a CDF Exception Uses CDF C library to look up an appropriate error message. Parameters ========== status : ctypes.c_long CDF status
pycdf/__init__.py
__init__
cpiker/condaCDF
python
def __init__(self, status): '\n Create a CDF Exception\n\n Uses CDF C library to look up an appropriate error message.\n\n Parameters\n ==========\n status : ctypes.c_long\n CDF status\n ' self.status = status self.string = (('CDF error ' + repr(status)) + ', unable to get details.') message = ctypes.create_string_buffer((const.CDF_STATUSTEXT_LEN + 1)) try: retval = lib._library.CDFlib(const.SELECT_, const.CDF_STATUS_, ctypes.c_long(status), const.GET_, const.STATUS_TEXT_, message, const.NULL_) if (retval == const.CDF_OK): if isinstance(message.value, str): self.string = message.value elif isinstance(message.value, bytes): self.string = message.value.decode() except: pass
def __str__(self): '\n Error string associated with the library error.\n\n Returns\n =======\n out : str\n Error message from the CDF library.\n ' return self.string
-6,240,089,809,281,292,000
Error string associated with the library error. Returns ======= out : str Error message from the CDF library.
pycdf/__init__.py
__str__
cpiker/condaCDF
python
def __str__(self): '\n Error string associated with the library error.\n\n Returns\n =======\n out : str\n Error message from the CDF library.\n ' return self.string
def warn(self, level=4): '\n Issues a warning based on the information stored in my exception\n\n Intended for use in check_status or similar wrapper function.\n\n Other Parameters\n ================\n level : int\n optional (default 3), how far up the stack the warning should\n be reported. Passed directly to :class:`warnings.warn`.\n ' warnings.warn(self, self.__class__, level)
8,221,289,147,594,777,000
Issues a warning based on the information stored in my exception Intended for use in check_status or similar wrapper function. Other Parameters ================ level : int optional (default 3), how far up the stack the warning should be reported. Passed directly to :class:`warnings.warn`.
pycdf/__init__.py
warn
cpiker/condaCDF
python
def warn(self, level=4): '\n Issues a warning based on the information stored in my exception\n\n Intended for use in check_status or similar wrapper function.\n\n Other Parameters\n ================\n level : int\n optional (default 3), how far up the stack the warning should\n be reported. Passed directly to :class:`warnings.warn`.\n ' warnings.warn(self, self.__class__, level)
def __init__(self, pathname, masterpath=None, create=None, readonly=None): "Open or create a CDF file.\n\n Parameters\n ==========\n pathname : string\n name of the file to open or create\n masterpath : string\n name of the master CDF file to use in creating\n a new file. If not provided, an existing file is\n opened; if provided but evaluates to ``False``\n (e.g., ``''``), an empty new CDF is created.\n create : bool\n Create a new CDF even if masterpath isn't provided\n readonly : bool\n Open the CDF read-only. Default True if opening an\n existing CDF; False if creating a new one.\n\n Raises\n ======\n CDFError\n if CDF library reports an error\n CDFWarning\n if CDF library reports a warning and interpreter\n is set to error on warnings.\n\n Examples\n ========\n Open a CDF by creating a CDF object, e.g.:\n >>> cdffile = pycdf.CDF('cdf_filename.cdf')\n Be sure to :py:meth:`pycdf.CDF.close` or :py:meth:`pycdf.CDF.save`\n when done.\n " if (masterpath is not None): if (create is False): raise ValueError('Cannot specify a master CDF without creating a CDF') if (readonly is True): raise ValueError('Cannot create a CDF in readonly mode') if (create and readonly): raise ValueError('Cannot create a CDF in readonly mode') try: self.pathname = pathname.encode() except AttributeError: raise ValueError('pathname must be string-like: {0}'.format(pathname)) self._handle = ctypes.c_void_p(None) self._opened = False if ((masterpath is None) and (not create)): self._open((True if (readonly is None) else readonly)) elif masterpath: self._from_master(masterpath.encode()) else: self._create() lib.call(const.SELECT_, const.CDF_zMODE_, ctypes.c_long(2)) self._attrlistref = weakref.ref(gAttrList(self)) self.backward = (self.version()[0] < 3) self._var_nums = {} 'Cache of name-to-number mappings for variables in this CDF' self._attr_info = {} 'Cache of name-to-(number, global) mappings for attributes\n in this CDF'
-1,831,719,878,659,203,800
Open or create a CDF file. Parameters ========== pathname : string name of the file to open or create masterpath : string name of the master CDF file to use in creating a new file. If not provided, an existing file is opened; if provided but evaluates to ``False`` (e.g., ``''``), an empty new CDF is created. create : bool Create a new CDF even if masterpath isn't provided readonly : bool Open the CDF read-only. Default True if opening an existing CDF; False if creating a new one. Raises ====== CDFError if CDF library reports an error CDFWarning if CDF library reports a warning and interpreter is set to error on warnings. Examples ======== Open a CDF by creating a CDF object, e.g.: >>> cdffile = pycdf.CDF('cdf_filename.cdf') Be sure to :py:meth:`pycdf.CDF.close` or :py:meth:`pycdf.CDF.save` when done.
pycdf/__init__.py
__init__
cpiker/condaCDF
python
def __init__(self, pathname, masterpath=None, create=None, readonly=None): "Open or create a CDF file.\n\n Parameters\n ==========\n pathname : string\n name of the file to open or create\n masterpath : string\n name of the master CDF file to use in creating\n a new file. If not provided, an existing file is\n opened; if provided but evaluates to ``False``\n (e.g., ````), an empty new CDF is created.\n create : bool\n Create a new CDF even if masterpath isn't provided\n readonly : bool\n Open the CDF read-only. Default True if opening an\n existing CDF; False if creating a new one.\n\n Raises\n ======\n CDFError\n if CDF library reports an error\n CDFWarning\n if CDF library reports a warning and interpreter\n is set to error on warnings.\n\n Examples\n ========\n Open a CDF by creating a CDF object, e.g.:\n >>> cdffile = pycdf.CDF('cdf_filename.cdf')\n Be sure to :py:meth:`pycdf.CDF.close` or :py:meth:`pycdf.CDF.save`\n when done.\n " if (masterpath is not None): if (create is False): raise ValueError('Cannot specify a master CDF without creating a CDF') if (readonly is True): raise ValueError('Cannot create a CDF in readonly mode') if (create and readonly): raise ValueError('Cannot create a CDF in readonly mode') try: self.pathname = pathname.encode() except AttributeError: raise ValueError('pathname must be string-like: {0}'.format(pathname)) self._handle = ctypes.c_void_p(None) self._opened = False if ((masterpath is None) and (not create)): self._open((True if (readonly is None) else readonly)) elif masterpath: self._from_master(masterpath.encode()) else: self._create() lib.call(const.SELECT_, const.CDF_zMODE_, ctypes.c_long(2)) self._attrlistref = weakref.ref(gAttrList(self)) self.backward = (self.version()[0] < 3) self._var_nums = {} 'Cache of name-to-number mappings for variables in this CDF' self._attr_info = {} 'Cache of name-to-(number, global) mappings for attributes\n in this CDF'
def __del__(self): 'Destructor; called when CDF object is destroyed.\n\n Close CDF file if there is still a valid handle.\n .. note::\n To avoid data loss, explicitly call :py:meth:`pycdf.CDF.close` \n or :py:meth:`pycdf.CDF.save`.\n ' if self._opened: self.close()
-7,342,038,237,845,616,000
Destructor; called when CDF object is destroyed. Close CDF file if there is still a valid handle. .. note:: To avoid data loss, explicitly call :py:meth:`pycdf.CDF.close` or :py:meth:`pycdf.CDF.save`.
pycdf/__init__.py
__del__
cpiker/condaCDF
python
def __del__(self): 'Destructor; called when CDF object is destroyed.\n\n Close CDF file if there is still a valid handle.\n .. note::\n To avoid data loss, explicitly call :py:meth:`pycdf.CDF.close` \n or :py:meth:`pycdf.CDF.save`.\n ' if self._opened: self.close()
def __delitem__(self, name): "Delete a zVariable in this CDF, by name or number\n\n Parameters\n ==========\n name : string or int\n Name or number of the CDF variable\n .. note:\n Variable numbers may change if variables are added or removed.\n\n Examples\n ========\n Delete the variable ``Epoch`` from the open CDF file ``cdffile``.\n >>> del cdffile['Epoch']\n " self[name]._delete()
1,720,612,374,302,670,300
Delete a zVariable in this CDF, by name or number Parameters ========== name : string or int Name or number of the CDF variable .. note: Variable numbers may change if variables are added or removed. Examples ======== Delete the variable ``Epoch`` from the open CDF file ``cdffile``. >>> del cdffile['Epoch']
pycdf/__init__.py
__delitem__
cpiker/condaCDF
python
def __delitem__(self, name): "Delete a zVariable in this CDF, by name or number\n\n Parameters\n ==========\n name : string or int\n Name or number of the CDF variable\n .. note:\n Variable numbers may change if variables are added or removed.\n\n Examples\n ========\n Delete the variable ``Epoch`` from the open CDF file ``cdffile``.\n >>> del cdffile['Epoch']\n " self[name]._delete()
def __enter__(self): 'Context manager entrance function.' return self
3,041,422,879,654,393,300
Context manager entrance function.
pycdf/__init__.py
__enter__
cpiker/condaCDF
python
def __enter__(self): return self
def __exit__(self, type, value, traceback): 'Context manager exit function.\n\n Close CDF file.\n ' self.close()
-737,521,970,676,473,300
Context manager exit function. Close CDF file.
pycdf/__init__.py
__exit__
cpiker/condaCDF
python
def __exit__(self, type, value, traceback): 'Context manager exit function.\n\n Close CDF file.\n ' self.close()
def __getitem__(self, name): 'Gets a zVariable in this CDF, by name or number\n\n The CDF acts like a dict\n\n @param name: Name or number of the CDF variable\n @type name: string or int\n @return: CDF variable named or numbered L{name}\n @rtype: :py:class:`pycdf.Var`\n @raise KeyError: for pretty much any problem in lookup\n @note: variable numbers may change if variables are added or removed.\n ' try: return Var(self, name) except CDFException as e: raise KeyError('{0}: {1}'.format(name, e))
9,036,144,436,764,017,000
Gets a zVariable in this CDF, by name or number The CDF acts like a dict @param name: Name or number of the CDF variable @type name: string or int @return: CDF variable named or numbered L{name} @rtype: :py:class:`pycdf.Var` @raise KeyError: for pretty much any problem in lookup @note: variable numbers may change if variables are added or removed.
pycdf/__init__.py
__getitem__
cpiker/condaCDF
python
def __getitem__(self, name): 'Gets a zVariable in this CDF, by name or number\n\n The CDF acts like a dict\n\n @param name: Name or number of the CDF variable\n @type name: string or int\n @return: CDF variable named or numbered L{name}\n @rtype: :py:class:`pycdf.Var`\n @raise KeyError: for pretty much any problem in lookup\n @note: variable numbers may change if variables are added or removed.\n ' try: return Var(self, name) except CDFException as e: raise KeyError('{0}: {1}'.format(name, e))
def __setitem__(self, name, data): 'Writes data to a zVariable in this CDF\n\n If the zVariable does not exist, will create one matching\n L{data}. If it does exist, will attempt to write L{data}\n to it without changing the type or dimensions.\n\n @param name: name or number of the variable to write\n @type name: str or int\n @param data: data to write, or a :py:class:`pycdf.Var` to copy\n ' if isinstance(data, Var): self.clone(data, name) elif (name in self): self[name][...] = data if hasattr(data, 'attrs'): self[name].attrs.clone(data.attrs) else: self.new(name, data)
-3,767,715,177,205,707,000
Writes data to a zVariable in this CDF If the zVariable does not exist, will create one matching L{data}. If it does exist, will attempt to write L{data} to it without changing the type or dimensions. @param name: name or number of the variable to write @type name: str or int @param data: data to write, or a :py:class:`pycdf.Var` to copy
pycdf/__init__.py
__setitem__
cpiker/condaCDF
python
def __setitem__(self, name, data): 'Writes data to a zVariable in this CDF\n\n If the zVariable does not exist, will create one matching\n L{data}. If it does exist, will attempt to write L{data}\n to it without changing the type or dimensions.\n\n @param name: name or number of the variable to write\n @type name: str or int\n @param data: data to write, or a :py:class:`pycdf.Var` to copy\n ' if isinstance(data, Var): self.clone(data, name) elif (name in self): self[name][...] = data if hasattr(data, 'attrs'): self[name].attrs.clone(data.attrs) else: self.new(name, data)
def __iter__(self, current=0): 'Iterates over zVars in CDF\n\n Iterators for dicts return keys\n @note: Returned in variable-number order\n ' while (current < self.__len__()): name = self[current].name() value = (yield name) if (value is None): current += 1 else: current = self[value]._num() current += 1
2,986,849,771,068,039,700
Iterates over zVars in CDF Iterators for dicts return keys @note: Returned in variable-number order
pycdf/__init__.py
__iter__
cpiker/condaCDF
python
def __iter__(self, current=0): 'Iterates over zVars in CDF\n\n Iterators for dicts return keys\n @note: Returned in variable-number order\n ' while (current < self.__len__()): name = self[current].name() value = (yield name) if (value is None): current += 1 else: current = self[value]._num() current += 1
def __len__(self): "Implements 'length' of CDF (number of zVars)\n\n @return: number of zVars in the CDF\n @rtype: int\n " count = ctypes.c_long(0) self._call(const.GET_, const.CDF_NUMzVARS_, ctypes.byref(count)) return count.value
-7,621,790,437,472,465,000
Implements 'length' of CDF (number of zVars) @return: number of zVars in the CDF @rtype: int
pycdf/__init__.py
__len__
cpiker/condaCDF
python
def __len__(self): "Implements 'length' of CDF (number of zVars)\n\n @return: number of zVars in the CDF\n @rtype: int\n " count = ctypes.c_long(0) self._call(const.GET_, const.CDF_NUMzVARS_, ctypes.byref(count)) return count.value
def __contains__(self, key): "Determines whether a particular variable name is in the CDF\n\n @note: Essentially an efficiency function; L{__iter__} is called\n if this isn't defined\n @param key: key/variable name to check\n @type key: string\n @return: True if L{key} is the name of a variable in CDF, else False\n @rtype: Boolean\n " try: foo = self[key] return True except KeyError as e: expected = (str(key) + ': NO_SUCH_VAR: Named variable not found in this CDF.') if (expected in e.args): return False raise
-3,203,029,772,389,402,000
Determines whether a particular variable name is in the CDF @note: Essentially an efficiency function; L{__iter__} is called if this isn't defined @param key: key/variable name to check @type key: string @return: True if L{key} is the name of a variable in CDF, else False @rtype: Boolean
pycdf/__init__.py
__contains__
cpiker/condaCDF
python
def __contains__(self, key): "Determines whether a particular variable name is in the CDF\n\n @note: Essentially an efficiency function; L{__iter__} is called\n if this isn't defined\n @param key: key/variable name to check\n @type key: string\n @return: True if L{key} is the name of a variable in CDF, else False\n @rtype: Boolean\n " try: foo = self[key] return True except KeyError as e: expected = (str(key) + ': NO_SUCH_VAR: Named variable not found in this CDF.') if (expected in e.args): return False raise
def __repr__(self): "Returns representation of CDF\n\n Cannot return anything that can be eval'd to create a copy of the\n CDF, so just wrap the informal representation in angle brackets.\n @return: all the data in this list of attributes\n @rtype: str\n " return (('<CDF:\n' + str(self)) + '\n>')
-1,682,708,596,007,814,400
Returns representation of CDF Cannot return anything that can be eval'd to create a copy of the CDF, so just wrap the informal representation in angle brackets. @return: all the data in this list of attributes @rtype: str
pycdf/__init__.py
__repr__
cpiker/condaCDF
python
def __repr__(self): "Returns representation of CDF\n\n Cannot return anything that can be eval'd to create a copy of the\n CDF, so just wrap the informal representation in angle brackets.\n @return: all the data in this list of attributes\n @rtype: str\n " return (('<CDF:\n' + str(self)) + '\n>')
def __str__(self): "Returns a string representation of the CDF\n\n This is an 'informal' representation in that it cannot be evaluated\n directly to create a :py:class:`pycdf.CDF`, just the names, types, and sizes of all\n variables. (Attributes are not listed.)\n\n @return: description of the variables in the CDF\n @rtype: str\n " if self._opened: return '\n'.join([((key + ': ') + str(value)) for (key, value) in sorted(self.items())]) elif isinstance(self.pathname, str): return 'Closed CDF {0}'.format(self.pathname) else: return 'Closed CDF {0}'.format(self.pathname.decode('ascii'))
5,551,468,211,548,867,000
Returns a string representation of the CDF This is an 'informal' representation in that it cannot be evaluated directly to create a :py:class:`pycdf.CDF`, just the names, types, and sizes of all variables. (Attributes are not listed.) @return: description of the variables in the CDF @rtype: str
pycdf/__init__.py
__str__
cpiker/condaCDF
python
def __str__(self): "Returns a string representation of the CDF\n\n This is an 'informal' representation in that it cannot be evaluated\n directly to create a :py:class:`pycdf.CDF`, just the names, types, and sizes of all\n variables. (Attributes are not listed.)\n\n @return: description of the variables in the CDF\n @rtype: str\n " if self._opened: return '\n'.join([((key + ': ') + str(value)) for (key, value) in sorted(self.items())]) elif isinstance(self.pathname, str): return 'Closed CDF {0}'.format(self.pathname) else: return 'Closed CDF {0}'.format(self.pathname.decode('ascii'))
def _open(self, readonly=True): 'Opens the CDF file (called on init)\n\n Will open an existing CDF file read/write.\n\n Raises\n ======\n CDFError : if CDF library reports an error\n CDFWarning : if CDF library reports a warning and interpreter\n is set to error on warnings.\n .. note:\n Not intended for direct call; pass parameters to\n :py:class:`pycdf.CDF` constructor.\n ' lib.call(const.OPEN_, const.CDF_, self.pathname, ctypes.byref(self._handle)) self._opened = True if readonly: self.readonly(readonly)
1,323,780,822,043,422,700
Opens the CDF file (called on init) Will open an existing CDF file read/write. Raises ====== CDFError : if CDF library reports an error CDFWarning : if CDF library reports a warning and interpreter is set to error on warnings. .. note: Not intended for direct call; pass parameters to :py:class:`pycdf.CDF` constructor.
pycdf/__init__.py
_open
cpiker/condaCDF
python
def _open(self, readonly=True): 'Opens the CDF file (called on init)\n\n Will open an existing CDF file read/write.\n\n Raises\n ======\n CDFError : if CDF library reports an error\n CDFWarning : if CDF library reports a warning and interpreter\n is set to error on warnings.\n .. note:\n Not intended for direct call; pass parameters to\n :py:class:`pycdf.CDF` constructor.\n ' lib.call(const.OPEN_, const.CDF_, self.pathname, ctypes.byref(self._handle)) self._opened = True if readonly: self.readonly(readonly)
def _create(self): 'Creates (and opens) a new CDF file\n\n Created at ``pathname``.\n Assumes zero-dimension r variables\n\n Raises\n ======\n CDFError : if CDF library reports an error\n CDFWarning : if CDF library reports a warning and interpreter\n is set to error on warnings.\n .. note:\n Not intended for direct call; pass parameters to\n :py:class:`pycdf.CDF` constructor.\n ' lib.call(const.CREATE_, const.CDF_, self.pathname, ctypes.c_long(0), (ctypes.c_long * 1)(0), ctypes.byref(self._handle)) self._opened = True
3,707,443,813,987,726,000
Creates (and opens) a new CDF file Created at ``pathname``. Assumes zero-dimension r variables Raises ====== CDFError : if CDF library reports an error CDFWarning : if CDF library reports a warning and interpreter is set to error on warnings. .. note: Not intended for direct call; pass parameters to :py:class:`pycdf.CDF` constructor.
pycdf/__init__.py
_create
cpiker/condaCDF
python
def _create(self): 'Creates (and opens) a new CDF file\n\n Created at ``pathname``.\n Assumes zero-dimension r variables\n\n Raises\n ======\n CDFError : if CDF library reports an error\n CDFWarning : if CDF library reports a warning and interpreter\n is set to error on warnings.\n .. note:\n Not intended for direct call; pass parameters to\n :py:class:`pycdf.CDF` constructor.\n ' lib.call(const.CREATE_, const.CDF_, self.pathname, ctypes.c_long(0), (ctypes.c_long * 1)(0), ctypes.byref(self._handle)) self._opened = True
def _from_master(self, master_path): 'Creates a new CDF from a master CDF file\n\n ``master_path`` is copied to ``pathname`` and opened.\n\n Parameters\n ==========\n master_path : string\n location of the master CDF file\n\n Raises\n ======\n CDFError : if CDF library reports an error\n CDFWarning : if CDF library reports a warning and interpreter\n is set to error on warnings.\n .. note:\n Not intended for direct call; pass parameters to\n :py:class:`pycdf.CDF` constructor.\n ' if os.path.exists(self.pathname): raise CDFError(const.CDF_EXISTS) shutil.copy2(master_path, self.pathname) self._open(False)
-7,633,811,102,613,640,000
Creates a new CDF from a master CDF file ``master_path`` is copied to ``pathname`` and opened. Parameters ========== master_path : string location of the master CDF file Raises ====== CDFError : if CDF library reports an error CDFWarning : if CDF library reports a warning and interpreter is set to error on warnings. .. note: Not intended for direct call; pass parameters to :py:class:`pycdf.CDF` constructor.
pycdf/__init__.py
_from_master
cpiker/condaCDF
python
def _from_master(self, master_path): 'Creates a new CDF from a master CDF file\n\n ``master_path`` is copied to ``pathname`` and opened.\n\n Parameters\n ==========\n master_path : string\n location of the master CDF file\n\n Raises\n ======\n CDFError : if CDF library reports an error\n CDFWarning : if CDF library reports a warning and interpreter\n is set to error on warnings.\n .. note:\n Not intended for direct call; pass parameters to\n :py:class:`pycdf.CDF` constructor.\n ' if os.path.exists(self.pathname): raise CDFError(const.CDF_EXISTS) shutil.copy2(master_path, self.pathname) self._open(False)
def _call(self, *args, **kwargs): "Select this CDF as current and call the CDF internal interface\n\n Adds call to select this CDF to L{args} and passes all parameters\n directly through to the CDFlib routine of the CDF library's C internal\n interface. Checks the return value with L{Library.check_status}.\n\n Parameters\n ==========\n args : various, see :py:mod:`ctypes`.\n Passed directly to the CDF library interface. Useful\n constants are defined in the :doc:`const <pycdf_const>`\n module of this package.\n\n Returns\n =======\n out : ctypes.c_long\n CDF status from the library\n\n .. note:\n Terminal NULL_ is automatically added to ``args``.\n Raises\n ======\n CDFError : if CDF library reports an error\n CDFWarning : if CDF library reports a warning and interpreter\n is set to error on warnings.\n " return lib.call(const.SELECT_, const.CDF_, self._handle, *args, **kwargs)
4,281,152,753,754,060,300
Select this CDF as current and call the CDF internal interface Adds call to select this CDF to L{args} and passes all parameters directly through to the CDFlib routine of the CDF library's C internal interface. Checks the return value with L{Library.check_status}. Parameters ========== args : various, see :py:mod:`ctypes`. Passed directly to the CDF library interface. Useful constants are defined in the :doc:`const <pycdf_const>` module of this package. Returns ======= out : ctypes.c_long CDF status from the library .. note: Terminal NULL_ is automatically added to ``args``. Raises ====== CDFError : if CDF library reports an error CDFWarning : if CDF library reports a warning and interpreter is set to error on warnings.
pycdf/__init__.py
_call
cpiker/condaCDF
python
def _call(self, *args, **kwargs): "Select this CDF as current and call the CDF internal interface\n\n Adds call to select this CDF to L{args} and passes all parameters\n directly through to the CDFlib routine of the CDF library's C internal\n interface. Checks the return value with L{Library.check_status}.\n\n Parameters\n ==========\n args : various, see :py:mod:`ctypes`.\n Passed directly to the CDF library interface. Useful\n constants are defined in the :doc:`const <pycdf_const>`\n module of this package.\n\n Returns\n =======\n out : ctypes.c_long\n CDF status from the library\n\n .. note:\n Terminal NULL_ is automatically added to ``args``.\n Raises\n ======\n CDFError : if CDF library reports an error\n CDFWarning : if CDF library reports a warning and interpreter\n is set to error on warnings.\n " return lib.call(const.SELECT_, const.CDF_, self._handle, *args, **kwargs)
def clone(self, zVar, name=None, data=True): '\n Clone a zVariable (from another CDF or this) into this CDF\n\n Parameters\n ==========\n zVar : :py:class:`Var`\n variable to clone\n\n Other Parameters\n ================\n name : str\n Name of the new variable (default: name of the original)\n data : boolean (optional)\n Copy data, or only type, dimensions, variance, attributes?\n (default: True, copy data as well)\n\n Returns\n =======\n out : :py:class:`Var`\n The newly-created zVar in this CDF\n ' if (name is None): name = zVar.name() if (name in self): del self[name] self.new(name, type=zVar.type(), recVary=zVar.rv(), dimVarys=zVar.dv(), dims=zVar._dim_sizes(), n_elements=zVar._nelems()) self[name].compress(*zVar.compress()) self[name].attrs.clone(zVar.attrs) if data: r = zVar._raw zVar._raw = True self.raw_var(name)[...] = zVar[...] zVar._raw = r return zVar
-8,248,084,366,707,620,000
Clone a zVariable (from another CDF or this) into this CDF Parameters ========== zVar : :py:class:`Var` variable to clone Other Parameters ================ name : str Name of the new variable (default: name of the original) data : boolean (optional) Copy data, or only type, dimensions, variance, attributes? (default: True, copy data as well) Returns ======= out : :py:class:`Var` The newly-created zVar in this CDF
pycdf/__init__.py
clone
cpiker/condaCDF
python
def clone(self, zVar, name=None, data=True): '\n Clone a zVariable (from another CDF or this) into this CDF\n\n Parameters\n ==========\n zVar : :py:class:`Var`\n variable to clone\n\n Other Parameters\n ================\n name : str\n Name of the new variable (default: name of the original)\n data : boolean (optional)\n Copy data, or only type, dimensions, variance, attributes?\n (default: True, copy data as well)\n\n Returns\n =======\n out : :py:class:`Var`\n The newly-created zVar in this CDF\n ' if (name is None): name = zVar.name() if (name in self): del self[name] self.new(name, type=zVar.type(), recVary=zVar.rv(), dimVarys=zVar.dv(), dims=zVar._dim_sizes(), n_elements=zVar._nelems()) self[name].compress(*zVar.compress()) self[name].attrs.clone(zVar.attrs) if data: r = zVar._raw zVar._raw = True self.raw_var(name)[...] = zVar[...] zVar._raw = r return zVar
def col_major(self, new_col=None): '\n Finds the majority of this CDF file\n\n Other Parameters\n ================\n new_col : boolean\n Specify True to change to column-major, False to change to\n row major, or do not specify to check the majority\n rather than changing it.\n (default is check only)\n\n Returns\n =======\n out : boolean\n True if column-major, false if row-major\n ' if (new_col != None): new_maj = (const.COLUMN_MAJOR if new_col else const.ROW_MAJOR) self._call(const.PUT_, const.CDF_MAJORITY_, new_maj) maj = ctypes.c_long(0) self._call(const.GET_, const.CDF_MAJORITY_, ctypes.byref(maj)) if (not (maj.value in (const.ROW_MAJOR.value, const.COLUMN_MAJOR.value))): raise CDFError(const.BAD_MAJORITY) return (maj.value == const.COLUMN_MAJOR.value)
3,227,542,539,931,076,000
Finds the majority of this CDF file Other Parameters ================ new_col : boolean Specify True to change to column-major, False to change to row major, or do not specify to check the majority rather than changing it. (default is check only) Returns ======= out : boolean True if column-major, false if row-major
pycdf/__init__.py
col_major
cpiker/condaCDF
python
def col_major(self, new_col=None): '\n Finds the majority of this CDF file\n\n Other Parameters\n ================\n new_col : boolean\n Specify True to change to column-major, False to change to\n row major, or do not specify to check the majority\n rather than changing it.\n (default is check only)\n\n Returns\n =======\n out : boolean\n True if column-major, false if row-major\n ' if (new_col != None): new_maj = (const.COLUMN_MAJOR if new_col else const.ROW_MAJOR) self._call(const.PUT_, const.CDF_MAJORITY_, new_maj) maj = ctypes.c_long(0) self._call(const.GET_, const.CDF_MAJORITY_, ctypes.byref(maj)) if (not (maj.value in (const.ROW_MAJOR.value, const.COLUMN_MAJOR.value))): raise CDFError(const.BAD_MAJORITY) return (maj.value == const.COLUMN_MAJOR.value)
def readonly(self, ro=None): '\n Sets or check the readonly status of this CDF\n\n If the CDF has been changed since opening, setting readonly mode\n will have no effect.\n\n .. note::\n Closing a CDF that has been opened readonly, or setting readonly\n False, may take a substantial amount of time if there are many\n variables in the CDF, as a (potentially large) cache needs to\n be cleared. Consider specifying ``readonly=False`` when opening\n the file if this is an issue. However, this may make some reading\n operations slower.\n\n Other Parameters\n ================\n ro : Boolean\n True to set the CDF readonly, False to set it read/write,\n or leave out to check only.\n\n Returns\n =======\n out : Boolean\n True if CDF is read-only, else False\n\n Raises\n ======\n CDFError : if bad mode is set\n ' if (ro == True): self._call(const.SELECT_, const.CDF_READONLY_MODE_, const.READONLYon) elif (ro == False): self._call(const.SELECT_, const.CDF_READONLY_MODE_, const.READONLYoff) mode = ctypes.c_long(0) self._call(const.CONFIRM_, const.CDF_READONLY_MODE_, ctypes.byref(mode)) if (mode.value == const.READONLYon.value): return True elif (mode.value == const.READONLYoff.value): return False else: raise CDFError(const.BAD_READONLY_MODE.value)
8,532,621,285,948,232,000
Sets or check the readonly status of this CDF If the CDF has been changed since opening, setting readonly mode will have no effect. .. note:: Closing a CDF that has been opened readonly, or setting readonly False, may take a substantial amount of time if there are many variables in the CDF, as a (potentially large) cache needs to be cleared. Consider specifying ``readonly=False`` when opening the file if this is an issue. However, this may make some reading operations slower. Other Parameters ================ ro : Boolean True to set the CDF readonly, False to set it read/write, or leave out to check only. Returns ======= out : Boolean True if CDF is read-only, else False Raises ====== CDFError : if bad mode is set
pycdf/__init__.py
readonly
cpiker/condaCDF
python
def readonly(self, ro=None): '\n Sets or check the readonly status of this CDF\n\n If the CDF has been changed since opening, setting readonly mode\n will have no effect.\n\n .. note::\n Closing a CDF that has been opened readonly, or setting readonly\n False, may take a substantial amount of time if there are many\n variables in the CDF, as a (potentially large) cache needs to\n be cleared. Consider specifying ``readonly=False`` when opening\n the file if this is an issue. However, this may make some reading\n operations slower.\n\n Other Parameters\n ================\n ro : Boolean\n True to set the CDF readonly, False to set it read/write,\n or leave out to check only.\n\n Returns\n =======\n out : Boolean\n True if CDF is read-only, else False\n\n Raises\n ======\n CDFError : if bad mode is set\n ' if (ro == True): self._call(const.SELECT_, const.CDF_READONLY_MODE_, const.READONLYon) elif (ro == False): self._call(const.SELECT_, const.CDF_READONLY_MODE_, const.READONLYoff) mode = ctypes.c_long(0) self._call(const.CONFIRM_, const.CDF_READONLY_MODE_, ctypes.byref(mode)) if (mode.value == const.READONLYon.value): return True elif (mode.value == const.READONLYoff.value): return False else: raise CDFError(const.BAD_READONLY_MODE.value)
def checksum(self, new_val=None): '\n Set or check the checksum status of this CDF. If checksums\n are enabled, the checksum will be verified every time the file\n is opened.\n\n Other Parameters\n ================\n new_val : boolean\n True to enable checksum, False to disable, or leave out\n to simply check.\n\n Returns\n =======\n out : boolean\n True if the checksum is enabled or False if disabled\n ' if (new_val != None): self._call(const.PUT_, const.CDF_CHECKSUM_, (const.MD5_CHECKSUM if new_val else const.NO_CHECKSUM)) chk = ctypes.c_long(0) self._call(const.GET_, const.CDF_CHECKSUM_, ctypes.byref(chk)) if (not (chk.value in (const.MD5_CHECKSUM.value, const.NO_CHECKSUM.value))): raise CDFError(const.BAD_CHECKSUM) return (chk.value == const.MD5_CHECKSUM.value)
1,468,243,584,143,955,000
Set or check the checksum status of this CDF. If checksums are enabled, the checksum will be verified every time the file is opened. Other Parameters ================ new_val : boolean True to enable checksum, False to disable, or leave out to simply check. Returns ======= out : boolean True if the checksum is enabled or False if disabled
pycdf/__init__.py
checksum
cpiker/condaCDF
python
def checksum(self, new_val=None): '\n Set or check the checksum status of this CDF. If checksums\n are enabled, the checksum will be verified every time the file\n is opened.\n\n Other Parameters\n ================\n new_val : boolean\n True to enable checksum, False to disable, or leave out\n to simply check.\n\n Returns\n =======\n out : boolean\n True if the checksum is enabled or False if disabled\n ' if (new_val != None): self._call(const.PUT_, const.CDF_CHECKSUM_, (const.MD5_CHECKSUM if new_val else const.NO_CHECKSUM)) chk = ctypes.c_long(0) self._call(const.GET_, const.CDF_CHECKSUM_, ctypes.byref(chk)) if (not (chk.value in (const.MD5_CHECKSUM.value, const.NO_CHECKSUM.value))): raise CDFError(const.BAD_CHECKSUM) return (chk.value == const.MD5_CHECKSUM.value)
def close(self): '\n Closes the CDF file\n\n Although called on object destruction (:meth:`~CDF.__del__`),\n to ensure all data are saved, the user should explicitly call\n :meth:`~CDF.close` or :meth:`~CDF.save`.\n\n Raises\n ======\n CDFError : if CDF library reports an error\n\n Warns\n =====\n CDFWarning : if CDF library reports a warning\n ' self._call(const.CLOSE_, const.CDF_) self._opened = False
5,496,972,562,181,374,000
Closes the CDF file Although called on object destruction (:meth:`~CDF.__del__`), to ensure all data are saved, the user should explicitly call :meth:`~CDF.close` or :meth:`~CDF.save`. Raises ====== CDFError : if CDF library reports an error Warns ===== CDFWarning : if CDF library reports a warning
pycdf/__init__.py
close
cpiker/condaCDF
python
def close(self): '\n Closes the CDF file\n\n Although called on object destruction (:meth:`~CDF.__del__`),\n to ensure all data are saved, the user should explicitly call\n :meth:`~CDF.close` or :meth:`~CDF.save`.\n\n Raises\n ======\n CDFError : if CDF library reports an error\n\n Warns\n =====\n CDFWarning : if CDF library reports a warning\n ' self._call(const.CLOSE_, const.CDF_) self._opened = False
def compress(self, comptype=None, param=None): "\n Set or check the compression of this CDF\n\n Sets compression on entire *file*, not per-variable.\n\n See section 2.6 of the CDF user's guide for more information on\n compression.\n\n Other Parameters\n ================\n comptype : ctypes.c_long\n type of compression to change to, see CDF C reference manual\n section 4.10. Constants for this parameter are in\n :mod:`~pycdf.const`. If not specified, will not change\n compression.\n param : ctypes.c_long\n Compression parameter, see CDF CRM 4.10 and\n :mod:`~pycdf.const`.\n If not specified, will choose reasonable default (5 for gzip;\n other types have only one possible parameter.)\n\n Returns\n =======\n out : tuple\n (comptype, param) currently in effect\n\n See Also\n ========\n :meth:`Var.compress`\n\n Examples\n ========\n Set file ``cdffile`` to gzip compression, compression level 9:\n >>> cdffile.compress(pycdf.const.GZIP_COMPRESSION, 9)\n " return _compress(self, comptype, param)
8,248,528,480,997,144,000
Set or check the compression of this CDF Sets compression on entire *file*, not per-variable. See section 2.6 of the CDF user's guide for more information on compression. Other Parameters ================ comptype : ctypes.c_long type of compression to change to, see CDF C reference manual section 4.10. Constants for this parameter are in :mod:`~pycdf.const`. If not specified, will not change compression. param : ctypes.c_long Compression parameter, see CDF CRM 4.10 and :mod:`~pycdf.const`. If not specified, will choose reasonable default (5 for gzip; other types have only one possible parameter.) Returns ======= out : tuple (comptype, param) currently in effect See Also ======== :meth:`Var.compress` Examples ======== Set file ``cdffile`` to gzip compression, compression level 9: >>> cdffile.compress(pycdf.const.GZIP_COMPRESSION, 9)
pycdf/__init__.py
compress
cpiker/condaCDF
python
def compress(self, comptype=None, param=None): "\n Set or check the compression of this CDF\n\n Sets compression on entire *file*, not per-variable.\n\n See section 2.6 of the CDF user's guide for more information on\n compression.\n\n Other Parameters\n ================\n comptype : ctypes.c_long\n type of compression to change to, see CDF C reference manual\n section 4.10. Constants for this parameter are in\n :mod:`~pycdf.const`. If not specified, will not change\n compression.\n param : ctypes.c_long\n Compression parameter, see CDF CRM 4.10 and\n :mod:`~pycdf.const`.\n If not specified, will choose reasonable default (5 for gzip;\n other types have only one possible parameter.)\n\n Returns\n =======\n out : tuple\n (comptype, param) currently in effect\n\n See Also\n ========\n :meth:`Var.compress`\n\n Examples\n ========\n Set file ``cdffile`` to gzip compression, compression level 9:\n >>> cdffile.compress(pycdf.const.GZIP_COMPRESSION, 9)\n " return _compress(self, comptype, param)
def new(self, name, data=None, type=None, recVary=True, dimVarys=None, dims=None, n_elements=None, compress=None, compress_param=None): "\n Create a new zVariable in this CDF\n\n .. note::\n Either ``data`` or ``type`` must be specified. If type is not\n specified, it is guessed from ``data``.\n\n Parameters\n ==========\n name : str\n name of the new variable\n\n Other Parameters\n ================\n data\n data to store in the new variable. If this has a an ``attrs``\n attribute (e.g., :class:`~spacepy.datamodel.dmarray`), it\n will be used to populate attributes of the new variable.\n type : ctypes.c_long\n CDF type of the variable, from :mod:`~pycdf.const`.\n See section 2.5 of the CDF user's guide for more information on\n CDF data types.\n recVary : boolean\n record variance of the variable (default True)\n dimVarys : list of boolean\n dimension variance of each dimension, default True for all\n dimensions.\n dims : list of int\n size of each dimension of this variable, default zero-dimensional.\n Note this is the dimensionality as defined by CDF, i.e., for\n record-varying variables it excludes the leading record dimension.\n See :py:class:`Var`.\n n_elements : int\n number of elements, should be 1 except for CDF_CHAR,\n for which it's the length of the string.\n compress : ctypes.c_long\n Compression to apply to this variable, default None.\n See :py:meth:`Var.compress`.\n compress_param : ctypes.c_long\n Compression parameter if compression used; reasonable default\n is chosen. See :py:meth:`Var.compress`.\n\n Returns\n =======\n out : :py:class:`Var`\n the newly-created zVariable\n\n Raises\n ======\n ValueError : if neither data nor sufficient typing information\n is provided.\n\n Notes\n =====\n Any given data may be representable by a range of CDF types; if\n the type is not specified, pycdf will guess which\n the CDF types which can represent this data. This breaks down to:\n\n #. If input data is a numpy array, match the type of that array\n #. Proper kind (numerical, string, time)\n #. Proper range (stores highest and lowest number provided)\n #. Sufficient resolution (EPOCH16 required if datetime has\n microseconds or below.)\n\n If more than one value satisfies the requirements, types are returned\n in preferred order:\n\n #. Type that matches precision of data first, then\n #. integer type before float type, then\n #. Smallest type first, then\n #. signed type first, then\n #. specifically-named (CDF_BYTE) vs. generically named (CDF_INT1)\n\n So for example, EPOCH_16 is preferred over EPOCH if ``data`` specifies\n below the millisecond level (rule 1), but otherwise EPOCH is preferred\n (rule 2).\n\n For floats, four-byte is preferred unless eight-byte is required:\n\n #. absolute values between 0 and 3e-39\n #. absolute values greater than 1.7e38\n\n This will switch to an eight-byte double in some cases where four bytes\n would be sufficient for IEEE 754 encoding, but where DEC formats would\n require eight.\n " if ((type in (const.CDF_EPOCH16, const.CDF_INT8, const.CDF_TIME_TT2000)) and self.backward): raise ValueError('Cannot use EPOCH16, INT8, or TIME_TT2000 in backward-compatible CDF') if ((not lib.supports_int8) and (type in (const.CDF_INT8, const.CDF_TIME_TT2000))): raise ValueError('INT8 and TIME_TT2000 require CDF library 3.4.0') if (data is None): if (type is None): raise ValueError('Must provide either data or a CDF type.') if (dims is None): dims = [] if (n_elements is None): n_elements = 1 else: (guess_dims, guess_types, guess_elements) = _Hyperslice.types(data) if (dims is None): if recVary: if (guess_dims == ()): raise ValueError('Record-varying data cannot be scalar. Specify NRV with CDF.new() or put data in array.') dims = guess_dims[1:] else: dims = guess_dims if (type is None): type = guess_types[0] if ((type == const.CDF_EPOCH16.value) and self.backward): type = const.CDF_EPOCH if (n_elements is None): n_elements = guess_elements if (dimVarys is None): dimVarys = [True for i in dims] recVary = (const.VARY if recVary else const.NOVARY) dimVarys = [(const.VARY if dimVary else const.NOVARY) for dimVary in dimVarys] if (not hasattr(type, 'value')): type = ctypes.c_long(type) if ((type.value == const.CDF_INT8.value) and (not lib.supports_int8)): raise ValueError('64-bit integer support require CDF library 3.4.0') if ((type.value in (const.CDF_EPOCH16.value, const.CDF_INT8.value, const.CDF_TIME_TT2000.value)) and self.backward): raise ValueError('Data requires EPOCH16, INT8, or TIME_TT2000; incompatible with backward-compatible CDF') new_var = Var(self, name, type, n_elements, dims, recVary, dimVarys) if (compress != None): new_var.compress(compress, compress_param) if (data is not None): new_var[...] = data if hasattr(data, 'attrs'): new_var.attrs.clone(data.attrs) return new_var
-8,630,664,426,403,404,000
Create a new zVariable in this CDF .. note:: Either ``data`` or ``type`` must be specified. If type is not specified, it is guessed from ``data``. Parameters ========== name : str name of the new variable Other Parameters ================ data data to store in the new variable. If this has a an ``attrs`` attribute (e.g., :class:`~spacepy.datamodel.dmarray`), it will be used to populate attributes of the new variable. type : ctypes.c_long CDF type of the variable, from :mod:`~pycdf.const`. See section 2.5 of the CDF user's guide for more information on CDF data types. recVary : boolean record variance of the variable (default True) dimVarys : list of boolean dimension variance of each dimension, default True for all dimensions. dims : list of int size of each dimension of this variable, default zero-dimensional. Note this is the dimensionality as defined by CDF, i.e., for record-varying variables it excludes the leading record dimension. See :py:class:`Var`. n_elements : int number of elements, should be 1 except for CDF_CHAR, for which it's the length of the string. compress : ctypes.c_long Compression to apply to this variable, default None. See :py:meth:`Var.compress`. compress_param : ctypes.c_long Compression parameter if compression used; reasonable default is chosen. See :py:meth:`Var.compress`. Returns ======= out : :py:class:`Var` the newly-created zVariable Raises ====== ValueError : if neither data nor sufficient typing information is provided. Notes ===== Any given data may be representable by a range of CDF types; if the type is not specified, pycdf will guess which the CDF types which can represent this data. This breaks down to: #. If input data is a numpy array, match the type of that array #. Proper kind (numerical, string, time) #. Proper range (stores highest and lowest number provided) #. Sufficient resolution (EPOCH16 required if datetime has microseconds or below.) If more than one value satisfies the requirements, types are returned in preferred order: #. Type that matches precision of data first, then #. integer type before float type, then #. Smallest type first, then #. signed type first, then #. specifically-named (CDF_BYTE) vs. generically named (CDF_INT1) So for example, EPOCH_16 is preferred over EPOCH if ``data`` specifies below the millisecond level (rule 1), but otherwise EPOCH is preferred (rule 2). For floats, four-byte is preferred unless eight-byte is required: #. absolute values between 0 and 3e-39 #. absolute values greater than 1.7e38 This will switch to an eight-byte double in some cases where four bytes would be sufficient for IEEE 754 encoding, but where DEC formats would require eight.
pycdf/__init__.py
new
cpiker/condaCDF
python
def new(self, name, data=None, type=None, recVary=True, dimVarys=None, dims=None, n_elements=None, compress=None, compress_param=None): "\n Create a new zVariable in this CDF\n\n .. note::\n Either ``data`` or ``type`` must be specified. If type is not\n specified, it is guessed from ``data``.\n\n Parameters\n ==========\n name : str\n name of the new variable\n\n Other Parameters\n ================\n data\n data to store in the new variable. If this has a an ``attrs``\n attribute (e.g., :class:`~spacepy.datamodel.dmarray`), it\n will be used to populate attributes of the new variable.\n type : ctypes.c_long\n CDF type of the variable, from :mod:`~pycdf.const`.\n See section 2.5 of the CDF user's guide for more information on\n CDF data types.\n recVary : boolean\n record variance of the variable (default True)\n dimVarys : list of boolean\n dimension variance of each dimension, default True for all\n dimensions.\n dims : list of int\n size of each dimension of this variable, default zero-dimensional.\n Note this is the dimensionality as defined by CDF, i.e., for\n record-varying variables it excludes the leading record dimension.\n See :py:class:`Var`.\n n_elements : int\n number of elements, should be 1 except for CDF_CHAR,\n for which it's the length of the string.\n compress : ctypes.c_long\n Compression to apply to this variable, default None.\n See :py:meth:`Var.compress`.\n compress_param : ctypes.c_long\n Compression parameter if compression used; reasonable default\n is chosen. See :py:meth:`Var.compress`.\n\n Returns\n =======\n out : :py:class:`Var`\n the newly-created zVariable\n\n Raises\n ======\n ValueError : if neither data nor sufficient typing information\n is provided.\n\n Notes\n =====\n Any given data may be representable by a range of CDF types; if\n the type is not specified, pycdf will guess which\n the CDF types which can represent this data. This breaks down to:\n\n #. If input data is a numpy array, match the type of that array\n #. Proper kind (numerical, string, time)\n #. Proper range (stores highest and lowest number provided)\n #. Sufficient resolution (EPOCH16 required if datetime has\n microseconds or below.)\n\n If more than one value satisfies the requirements, types are returned\n in preferred order:\n\n #. Type that matches precision of data first, then\n #. integer type before float type, then\n #. Smallest type first, then\n #. signed type first, then\n #. specifically-named (CDF_BYTE) vs. generically named (CDF_INT1)\n\n So for example, EPOCH_16 is preferred over EPOCH if ``data`` specifies\n below the millisecond level (rule 1), but otherwise EPOCH is preferred\n (rule 2).\n\n For floats, four-byte is preferred unless eight-byte is required:\n\n #. absolute values between 0 and 3e-39\n #. absolute values greater than 1.7e38\n\n This will switch to an eight-byte double in some cases where four bytes\n would be sufficient for IEEE 754 encoding, but where DEC formats would\n require eight.\n " if ((type in (const.CDF_EPOCH16, const.CDF_INT8, const.CDF_TIME_TT2000)) and self.backward): raise ValueError('Cannot use EPOCH16, INT8, or TIME_TT2000 in backward-compatible CDF') if ((not lib.supports_int8) and (type in (const.CDF_INT8, const.CDF_TIME_TT2000))): raise ValueError('INT8 and TIME_TT2000 require CDF library 3.4.0') if (data is None): if (type is None): raise ValueError('Must provide either data or a CDF type.') if (dims is None): dims = [] if (n_elements is None): n_elements = 1 else: (guess_dims, guess_types, guess_elements) = _Hyperslice.types(data) if (dims is None): if recVary: if (guess_dims == ()): raise ValueError('Record-varying data cannot be scalar. Specify NRV with CDF.new() or put data in array.') dims = guess_dims[1:] else: dims = guess_dims if (type is None): type = guess_types[0] if ((type == const.CDF_EPOCH16.value) and self.backward): type = const.CDF_EPOCH if (n_elements is None): n_elements = guess_elements if (dimVarys is None): dimVarys = [True for i in dims] recVary = (const.VARY if recVary else const.NOVARY) dimVarys = [(const.VARY if dimVary else const.NOVARY) for dimVary in dimVarys] if (not hasattr(type, 'value')): type = ctypes.c_long(type) if ((type.value == const.CDF_INT8.value) and (not lib.supports_int8)): raise ValueError('64-bit integer support require CDF library 3.4.0') if ((type.value in (const.CDF_EPOCH16.value, const.CDF_INT8.value, const.CDF_TIME_TT2000.value)) and self.backward): raise ValueError('Data requires EPOCH16, INT8, or TIME_TT2000; incompatible with backward-compatible CDF') new_var = Var(self, name, type, n_elements, dims, recVary, dimVarys) if (compress != None): new_var.compress(compress, compress_param) if (data is not None): new_var[...] = data if hasattr(data, 'attrs'): new_var.attrs.clone(data.attrs) return new_var
def raw_var(self, name): '\n Get a "raw" :class:`Var` object.\n\n Normally a :class:`Var` will perform translation of values for\n certain types (to/from Unicode for CHAR variables on Py3k,\n and to/from datetime for all time types). A "raw" object\n does not perform this translation, on read or write.\n\n This does *not* affect the data on disk, and in fact it\n is possible to maintain multiple Python objects with access\n to the same zVariable.\n\n Parameters\n ==========\n name : str\n name or number of the zVariable\n ' v = self[name] v._raw = True return v
-3,017,748,155,568,611,000
Get a "raw" :class:`Var` object. Normally a :class:`Var` will perform translation of values for certain types (to/from Unicode for CHAR variables on Py3k, and to/from datetime for all time types). A "raw" object does not perform this translation, on read or write. This does *not* affect the data on disk, and in fact it is possible to maintain multiple Python objects with access to the same zVariable. Parameters ========== name : str name or number of the zVariable
pycdf/__init__.py
raw_var
cpiker/condaCDF
python
def raw_var(self, name): '\n Get a "raw" :class:`Var` object.\n\n Normally a :class:`Var` will perform translation of values for\n certain types (to/from Unicode for CHAR variables on Py3k,\n and to/from datetime for all time types). A "raw" object\n does not perform this translation, on read or write.\n\n This does *not* affect the data on disk, and in fact it\n is possible to maintain multiple Python objects with access\n to the same zVariable.\n\n Parameters\n ==========\n name : str\n name or number of the zVariable\n ' v = self[name] v._raw = True return v
def save(self): '\n Saves the CDF file but leaves it open.\n\n If closing the CDF, :meth:`close` is sufficient;\n there is no need to call\n :meth:`save` before :meth:`close`.\n\n .. note::\n Relies on an undocumented call of the CDF C library, which is\n also used in the Java interface.\n\n Raises\n ======\n CDFError : if CDF library reports an error\n\n Warns\n =====\n CDFWarning : if CDF library reports a warning\n ' self._call(const.SAVE_, const.CDF_)
2,463,545,021,715,353,600
Saves the CDF file but leaves it open. If closing the CDF, :meth:`close` is sufficient; there is no need to call :meth:`save` before :meth:`close`. .. note:: Relies on an undocumented call of the CDF C library, which is also used in the Java interface. Raises ====== CDFError : if CDF library reports an error Warns ===== CDFWarning : if CDF library reports a warning
pycdf/__init__.py
save
cpiker/condaCDF
python
def save(self): '\n Saves the CDF file but leaves it open.\n\n If closing the CDF, :meth:`close` is sufficient;\n there is no need to call\n :meth:`save` before :meth:`close`.\n\n .. note::\n Relies on an undocumented call of the CDF C library, which is\n also used in the Java interface.\n\n Raises\n ======\n CDFError : if CDF library reports an error\n\n Warns\n =====\n CDFWarning : if CDF library reports a warning\n ' self._call(const.SAVE_, const.CDF_)
def copy(self): '\n Make a copy of all data and attributes in this CDF\n\n Returns\n =======\n out : :py:class:`CDFCopy`\n :class:`~spacepy.datamodel.SpaceData`-like object of all data\n ' return CDFCopy(self)
8,352,673,866,543,797,000
Make a copy of all data and attributes in this CDF Returns ======= out : :py:class:`CDFCopy` :class:`~spacepy.datamodel.SpaceData`-like object of all data
pycdf/__init__.py
copy
cpiker/condaCDF
python
def copy(self): '\n Make a copy of all data and attributes in this CDF\n\n Returns\n =======\n out : :py:class:`CDFCopy`\n :class:`~spacepy.datamodel.SpaceData`-like object of all data\n ' return CDFCopy(self)
def version(self): '\n Get version of library that created this CDF\n\n Returns\n =======\n out : tuple\n version of CDF library, in form (version, release, increment)\n ' ver = ctypes.c_long(0) rel = ctypes.c_long(0) inc = ctypes.c_long(0) self._call(const.GET_, const.CDF_VERSION_, ctypes.byref(ver), const.GET_, const.CDF_RELEASE_, ctypes.byref(rel), const.GET_, const.CDF_INCREMENT_, ctypes.byref(inc)) return (ver.value, rel.value, inc.value)
235,729,553,055,022,300
Get version of library that created this CDF Returns ======= out : tuple version of CDF library, in form (version, release, increment)
pycdf/__init__.py
version
cpiker/condaCDF
python
def version(self): '\n Get version of library that created this CDF\n\n Returns\n =======\n out : tuple\n version of CDF library, in form (version, release, increment)\n ' ver = ctypes.c_long(0) rel = ctypes.c_long(0) inc = ctypes.c_long(0) self._call(const.GET_, const.CDF_VERSION_, ctypes.byref(ver), const.GET_, const.CDF_RELEASE_, ctypes.byref(rel), const.GET_, const.CDF_INCREMENT_, ctypes.byref(inc)) return (ver.value, rel.value, inc.value)
def _get_attrs(self): "Get attribute list\n\n Provide access to the CDF's attribute list without holding a\n strong reference, as the attribute list has a (strong)\n back-reference to its parent.\n\n Either deref a weak reference (to try and keep the object the same),\n or make a new AttrList instance and assign it to the weak reference\n for next time.\n " al = self._attrlistref() if (al is None): al = gAttrList(self) self._attrlistref = weakref.ref(al) return al
1,744,989,880,435,086,300
Get attribute list Provide access to the CDF's attribute list without holding a strong reference, as the attribute list has a (strong) back-reference to its parent. Either deref a weak reference (to try and keep the object the same), or make a new AttrList instance and assign it to the weak reference for next time.
pycdf/__init__.py
_get_attrs
cpiker/condaCDF
python
def _get_attrs(self): "Get attribute list\n\n Provide access to the CDF's attribute list without holding a\n strong reference, as the attribute list has a (strong)\n back-reference to its parent.\n\n Either deref a weak reference (to try and keep the object the same),\n or make a new AttrList instance and assign it to the weak reference\n for next time.\n " al = self._attrlistref() if (al is None): al = gAttrList(self) self._attrlistref = weakref.ref(al) return al
def _set_attrs(self, value): 'Assign to the attribute list\n\n Clears all elements of the attribute list and copies from value\n ' self.attrs.clone(value)
4,701,805,248,585,535,000
Assign to the attribute list Clears all elements of the attribute list and copies from value
pycdf/__init__.py
_set_attrs
cpiker/condaCDF
python
def _set_attrs(self, value): 'Assign to the attribute list\n\n Clears all elements of the attribute list and copies from value\n ' self.attrs.clone(value)
def var_num(self, varname): "Get the variable number of a particular variable name\n\n This maintains a cache of name-to-number mappings for zVariables\n to keep from having to query the CDF library constantly. It's mostly\n an internal function.\n\n Parameters\n ==========\n varname : bytes\n name of the zVariable. Not this is NOT a string in Python 3!\n\n Raises\n ======\n CDFError : if variable is not found\n\n Returns\n =======\n out : int\n Variable number of this zvariable.\n " num = self._var_nums.get(varname, None) if (num is None): varNum = ctypes.c_long(0) self._call(const.GET_, const.zVAR_NUMBER_, varname, ctypes.byref(varNum)) num = varNum.value self._var_nums[varname] = num return num
291,467,886,314,890,700
Get the variable number of a particular variable name This maintains a cache of name-to-number mappings for zVariables to keep from having to query the CDF library constantly. It's mostly an internal function. Parameters ========== varname : bytes name of the zVariable. Not this is NOT a string in Python 3! Raises ====== CDFError : if variable is not found Returns ======= out : int Variable number of this zvariable.
pycdf/__init__.py
var_num
cpiker/condaCDF
python
def var_num(self, varname): "Get the variable number of a particular variable name\n\n This maintains a cache of name-to-number mappings for zVariables\n to keep from having to query the CDF library constantly. It's mostly\n an internal function.\n\n Parameters\n ==========\n varname : bytes\n name of the zVariable. Not this is NOT a string in Python 3!\n\n Raises\n ======\n CDFError : if variable is not found\n\n Returns\n =======\n out : int\n Variable number of this zvariable.\n " num = self._var_nums.get(varname, None) if (num is None): varNum = ctypes.c_long(0) self._call(const.GET_, const.zVAR_NUMBER_, varname, ctypes.byref(varNum)) num = varNum.value self._var_nums[varname] = num return num
def attr_num(self, attrname): "Get the attribute number and scope by attribute name\n\n This maintains a cache of name-to-number mappings for attributes\n to keep from having to query the CDF library constantly. It's mostly\n an internal function.\n\n Parameters\n ==========\n attrname : bytes\n name of the zVariable. Not this is NOT a string in Python 3!\n\n Raises\n ======\n CDFError : if variable is not found\n\n Returns\n =======\n out : tuple\n attribute number, scope (True for global) of this attribute\n " res = self._attr_info.get(attrname, None) if (res is None): attrNum = ctypes.c_long(0) self._call(const.GET_, const.ATTR_NUMBER_, attrname, ctypes.byref(attrNum)) scope = ctypes.c_long(0) self._call(const.SELECT_, const.ATTR_, attrNum, const.GET_, const.ATTR_SCOPE_, ctypes.byref(scope)) if (scope.value == const.GLOBAL_SCOPE.value): scope = True elif (scope.value == const.VARIABLE_SCOPE.value): scope = False else: raise CDFError(const.BAD_SCOPE) res = (attrNum.value, scope) self._attr_info[attrname] = res return res
4,735,735,691,906,771,000
Get the attribute number and scope by attribute name This maintains a cache of name-to-number mappings for attributes to keep from having to query the CDF library constantly. It's mostly an internal function. Parameters ========== attrname : bytes name of the zVariable. Not this is NOT a string in Python 3! Raises ====== CDFError : if variable is not found Returns ======= out : tuple attribute number, scope (True for global) of this attribute
pycdf/__init__.py
attr_num
cpiker/condaCDF
python
def attr_num(self, attrname): "Get the attribute number and scope by attribute name\n\n This maintains a cache of name-to-number mappings for attributes\n to keep from having to query the CDF library constantly. It's mostly\n an internal function.\n\n Parameters\n ==========\n attrname : bytes\n name of the zVariable. Not this is NOT a string in Python 3!\n\n Raises\n ======\n CDFError : if variable is not found\n\n Returns\n =======\n out : tuple\n attribute number, scope (True for global) of this attribute\n " res = self._attr_info.get(attrname, None) if (res is None): attrNum = ctypes.c_long(0) self._call(const.GET_, const.ATTR_NUMBER_, attrname, ctypes.byref(attrNum)) scope = ctypes.c_long(0) self._call(const.SELECT_, const.ATTR_, attrNum, const.GET_, const.ATTR_SCOPE_, ctypes.byref(scope)) if (scope.value == const.GLOBAL_SCOPE.value): scope = True elif (scope.value == const.VARIABLE_SCOPE.value): scope = False else: raise CDFError(const.BAD_SCOPE) res = (attrNum.value, scope) self._attr_info[attrname] = res return res
def clear_attr_from_cache(self, attrname): "Mark an attribute deleted in the name-to-number cache\n\n Will remove an attribute, and all attributes with higher numbers,\n from the attribute cache.\n\n Does NOT delete the variable!\n\n This maintains a cache of name-to-number mappings for attributes\n to keep from having to query the CDF library constantly. It's mostly\n an internal function.\n\n Parameters\n ==========\n attrname : bytes\n name of the attribute. Not this is NOT a string in Python 3!\n " (num, scope) = self.attr_num(attrname) for (a, n) in list(self._attr_info.items()): if (n[0] >= num): del self._attr_info[a]
5,254,106,712,406,894,000
Mark an attribute deleted in the name-to-number cache Will remove an attribute, and all attributes with higher numbers, from the attribute cache. Does NOT delete the variable! This maintains a cache of name-to-number mappings for attributes to keep from having to query the CDF library constantly. It's mostly an internal function. Parameters ========== attrname : bytes name of the attribute. Not this is NOT a string in Python 3!
pycdf/__init__.py
clear_attr_from_cache
cpiker/condaCDF
python
def clear_attr_from_cache(self, attrname): "Mark an attribute deleted in the name-to-number cache\n\n Will remove an attribute, and all attributes with higher numbers,\n from the attribute cache.\n\n Does NOT delete the variable!\n\n This maintains a cache of name-to-number mappings for attributes\n to keep from having to query the CDF library constantly. It's mostly\n an internal function.\n\n Parameters\n ==========\n attrname : bytes\n name of the attribute. Not this is NOT a string in Python 3!\n " (num, scope) = self.attr_num(attrname) for (a, n) in list(self._attr_info.items()): if (n[0] >= num): del self._attr_info[a]
def clear_from_cache(self, varname): "Mark a variable deleted in the name-to-number cache\n\n Will remove a variable, and all variables with higher numbers,\n from the variable cache.\n\n Does NOT delete the variable!\n\n This maintains a cache of name-to-number mappings for zVariables\n to keep from having to query the CDF library constantly. It's mostly\n an internal function.\n\n Parameters\n ==========\n varname : bytes\n name of the zVariable. Not this is NOT a string in Python 3!\n " num = self.var_num(varname) for (v, n) in list(self._var_nums.items()): if (n >= num): del self._var_nums[v]
-3,410,475,348,191,111,700
Mark a variable deleted in the name-to-number cache Will remove a variable, and all variables with higher numbers, from the variable cache. Does NOT delete the variable! This maintains a cache of name-to-number mappings for zVariables to keep from having to query the CDF library constantly. It's mostly an internal function. Parameters ========== varname : bytes name of the zVariable. Not this is NOT a string in Python 3!
pycdf/__init__.py
clear_from_cache
cpiker/condaCDF
python
def clear_from_cache(self, varname): "Mark a variable deleted in the name-to-number cache\n\n Will remove a variable, and all variables with higher numbers,\n from the variable cache.\n\n Does NOT delete the variable!\n\n This maintains a cache of name-to-number mappings for zVariables\n to keep from having to query the CDF library constantly. It's mostly\n an internal function.\n\n Parameters\n ==========\n varname : bytes\n name of the zVariable. Not this is NOT a string in Python 3!\n " num = self.var_num(varname) for (v, n) in list(self._var_nums.items()): if (n >= num): del self._var_nums[v]
def add_attr_to_cache(self, attrname, num, scope): "Add an attribute to the name-to-number cache\n\n This maintains a cache of name-to-number mappings for attributes\n to keep from having to query the CDF library constantly. It's mostly\n an internal function.\n\n Parameters\n ==========\n varname : bytes\n name of the zVariable. Not this is NOT a string in Python 3!\n num : int\n number of the variable\n scope : bool\n True if global scope; False if variable scope.\n " self._attr_info[attrname] = (num, scope)
2,083,129,337,404,797,700
Add an attribute to the name-to-number cache This maintains a cache of name-to-number mappings for attributes to keep from having to query the CDF library constantly. It's mostly an internal function. Parameters ========== varname : bytes name of the zVariable. Not this is NOT a string in Python 3! num : int number of the variable scope : bool True if global scope; False if variable scope.
pycdf/__init__.py
add_attr_to_cache
cpiker/condaCDF
python
def add_attr_to_cache(self, attrname, num, scope): "Add an attribute to the name-to-number cache\n\n This maintains a cache of name-to-number mappings for attributes\n to keep from having to query the CDF library constantly. It's mostly\n an internal function.\n\n Parameters\n ==========\n varname : bytes\n name of the zVariable. Not this is NOT a string in Python 3!\n num : int\n number of the variable\n scope : bool\n True if global scope; False if variable scope.\n " self._attr_info[attrname] = (num, scope)
def add_to_cache(self, varname, num): "Add a variable to the name-to-number cache\n\n This maintains a cache of name-to-number mappings for zVariables\n to keep from having to query the CDF library constantly. It's mostly\n an internal function.\n\n Parameters\n ==========\n varname : bytes\n name of the zVariable. Not this is NOT a string in Python 3!\n num : int\n number of the variable\n " self._var_nums[varname] = num
3,774,387,525,938,285,600
Add a variable to the name-to-number cache This maintains a cache of name-to-number mappings for zVariables to keep from having to query the CDF library constantly. It's mostly an internal function. Parameters ========== varname : bytes name of the zVariable. Not this is NOT a string in Python 3! num : int number of the variable
pycdf/__init__.py
add_to_cache
cpiker/condaCDF
python
def add_to_cache(self, varname, num): "Add a variable to the name-to-number cache\n\n This maintains a cache of name-to-number mappings for zVariables\n to keep from having to query the CDF library constantly. It's mostly\n an internal function.\n\n Parameters\n ==========\n varname : bytes\n name of the zVariable. Not this is NOT a string in Python 3!\n num : int\n number of the variable\n " self._var_nums[varname] = num
def __init__(self, cdf_file, var_name, *args): 'Create or locate a variable\n\n Parameters\n ==========\n cdf_file : :py:class:`pycdf.CDF`\n CDF file containing this variable\n var_name : string\n name of this variable\n\n Other Parameters\n ================\n args\n additional arguments passed to :py:meth:`_create`. If none,\n opens an existing variable. If provided, creates a\n new one.\n\n Raises\n ======\n CDFError\n if CDF library reports an error\n\n Warns\n =====\n CDFWarning\n if CDF library reports a warning\n ' self.cdf_file = cdf_file self._name = None self._type = None self._raw = False if (len(args) == 0): self._get(var_name) else: self._create(var_name, *args) self._attrlistref = weakref.ref(zAttrList(self))
-5,344,524,584,329,326,000
Create or locate a variable Parameters ========== cdf_file : :py:class:`pycdf.CDF` CDF file containing this variable var_name : string name of this variable Other Parameters ================ args additional arguments passed to :py:meth:`_create`. If none, opens an existing variable. If provided, creates a new one. Raises ====== CDFError if CDF library reports an error Warns ===== CDFWarning if CDF library reports a warning
pycdf/__init__.py
__init__
cpiker/condaCDF
python
def __init__(self, cdf_file, var_name, *args): 'Create or locate a variable\n\n Parameters\n ==========\n cdf_file : :py:class:`pycdf.CDF`\n CDF file containing this variable\n var_name : string\n name of this variable\n\n Other Parameters\n ================\n args\n additional arguments passed to :py:meth:`_create`. If none,\n opens an existing variable. If provided, creates a\n new one.\n\n Raises\n ======\n CDFError\n if CDF library reports an error\n\n Warns\n =====\n CDFWarning\n if CDF library reports a warning\n ' self.cdf_file = cdf_file self._name = None self._type = None self._raw = False if (len(args) == 0): self._get(var_name) else: self._create(var_name, *args) self._attrlistref = weakref.ref(zAttrList(self))
def __getitem__(self, key): 'Returns a slice from the data array. Details under :py:class:`pycdf.Var`.\n\n @return: The data from this variable\n @rtype: list-of-lists of appropriate type.\n @raise IndexError: if L{key} is out of range, mismatches dimensions,\n or simply unparseable.\n @raise CDFError: for errors from the CDF library\n ' hslice = _Hyperslice(self, key) if hslice.rv: if ((hslice.dimsizes[0] == 0) and hslice.degen[0] and (hslice.starts[0] == 0)): raise IndexError('record index out of range') elif (hslice.dimsizes[0] == 0): if ((len(hslice.degen) > 1) and max(hslice.degen[1:])): raise IndexError('record index out of range') else: hslice.counts[...] = 0 if (len(hslice.counts) == 1): hslice.degen[0] = False result = hslice.create_array() if (hslice.counts[0] != 0): hslice.select() lib.call(const.GET_, const.zVAR_HYPERDATA_, result.ctypes.data_as(ctypes.c_void_p)) return hslice.convert_input_array(result)
-6,790,587,963,677,555,000
Returns a slice from the data array. Details under :py:class:`pycdf.Var`. @return: The data from this variable @rtype: list-of-lists of appropriate type. @raise IndexError: if L{key} is out of range, mismatches dimensions, or simply unparseable. @raise CDFError: for errors from the CDF library
pycdf/__init__.py
__getitem__
cpiker/condaCDF
python
def __getitem__(self, key): 'Returns a slice from the data array. Details under :py:class:`pycdf.Var`.\n\n @return: The data from this variable\n @rtype: list-of-lists of appropriate type.\n @raise IndexError: if L{key} is out of range, mismatches dimensions,\n or simply unparseable.\n @raise CDFError: for errors from the CDF library\n ' hslice = _Hyperslice(self, key) if hslice.rv: if ((hslice.dimsizes[0] == 0) and hslice.degen[0] and (hslice.starts[0] == 0)): raise IndexError('record index out of range') elif (hslice.dimsizes[0] == 0): if ((len(hslice.degen) > 1) and max(hslice.degen[1:])): raise IndexError('record index out of range') else: hslice.counts[...] = 0 if (len(hslice.counts) == 1): hslice.degen[0] = False result = hslice.create_array() if (hslice.counts[0] != 0): hslice.select() lib.call(const.GET_, const.zVAR_HYPERDATA_, result.ctypes.data_as(ctypes.c_void_p)) return hslice.convert_input_array(result)
def __delitem__(self, key): 'Removes a record (or set of records) from the CDF\n\n Only whole records can be deleted, so the del call must either specify\n only one dimension or it must specify all elements of the non-record\n dimensions. This is *not* a way to resize a variable!\n\n Deleting records from the middle of a variable may be very slow in\n some circumstances. To work around a bug in CDF library versions\n 3.4.0 and before, all the data must be read in, the requested deletions\n done, and then all written back out.\n\n @param key: index or slice to delete\n @type key: int or slice\n @raise TypeError: if an attempt is made to delete from a non\n record-varying variable, or to delete below\n the record level\n ' if (not self.rv()): raise TypeError('Cannot delete records from non-record-varying variable.') hslice = _Hyperslice(self, key) if ((hslice.dims > 1) and (hslice.counts[1:] != hslice.dimsizes[1:]).any()): raise TypeError('Can only delete entire records.') if (hslice.counts[0] == 0): return start = hslice.starts[0] count = hslice.counts[0] interval = hslice.intervals[0] dimsize = hslice.dimsizes[0] self._call() dangerous_delete = False if (lib._del_middle_rec_bug and ((interval != 1) or ((start != 0) and ((start + count) < dimsize)))): entries = ctypes.c_long(0) lib.call(const.GET_, const.zVAR_nINDEXENTRIES_, ctypes.byref(entries)) dangerous_delete = (entries.value == 1) if dangerous_delete: data = self[...] data = numpy.delete(data, numpy.arange(start, (start + (count * interval)), interval), 0) self[0:(dimsize - count)] = data first_rec = (dimsize - count) last_rec = (dimsize - 1) lib.call(const.DELETE_, const.zVAR_RECORDS_, ctypes.c_long(first_rec), ctypes.c_long(last_rec)) elif (interval == 1): first_rec = ctypes.c_long(start) last_rec = ctypes.c_long(((start + count) - 1)) lib.call(const.DELETE_, const.zVAR_RECORDS_, first_rec, last_rec) else: self._call() for recno in range((start + ((count - 1) * interval)), (start - 1), ((- 1) * interval)): lib.call(const.DELETE_, const.zVAR_RECORDS_, ctypes.c_long(recno), ctypes.c_long(recno))
433,044,169,112,213,400
Removes a record (or set of records) from the CDF Only whole records can be deleted, so the del call must either specify only one dimension or it must specify all elements of the non-record dimensions. This is *not* a way to resize a variable! Deleting records from the middle of a variable may be very slow in some circumstances. To work around a bug in CDF library versions 3.4.0 and before, all the data must be read in, the requested deletions done, and then all written back out. @param key: index or slice to delete @type key: int or slice @raise TypeError: if an attempt is made to delete from a non record-varying variable, or to delete below the record level
pycdf/__init__.py
__delitem__
cpiker/condaCDF
python
def __delitem__(self, key): 'Removes a record (or set of records) from the CDF\n\n Only whole records can be deleted, so the del call must either specify\n only one dimension or it must specify all elements of the non-record\n dimensions. This is *not* a way to resize a variable!\n\n Deleting records from the middle of a variable may be very slow in\n some circumstances. To work around a bug in CDF library versions\n 3.4.0 and before, all the data must be read in, the requested deletions\n done, and then all written back out.\n\n @param key: index or slice to delete\n @type key: int or slice\n @raise TypeError: if an attempt is made to delete from a non\n record-varying variable, or to delete below\n the record level\n ' if (not self.rv()): raise TypeError('Cannot delete records from non-record-varying variable.') hslice = _Hyperslice(self, key) if ((hslice.dims > 1) and (hslice.counts[1:] != hslice.dimsizes[1:]).any()): raise TypeError('Can only delete entire records.') if (hslice.counts[0] == 0): return start = hslice.starts[0] count = hslice.counts[0] interval = hslice.intervals[0] dimsize = hslice.dimsizes[0] self._call() dangerous_delete = False if (lib._del_middle_rec_bug and ((interval != 1) or ((start != 0) and ((start + count) < dimsize)))): entries = ctypes.c_long(0) lib.call(const.GET_, const.zVAR_nINDEXENTRIES_, ctypes.byref(entries)) dangerous_delete = (entries.value == 1) if dangerous_delete: data = self[...] data = numpy.delete(data, numpy.arange(start, (start + (count * interval)), interval), 0) self[0:(dimsize - count)] = data first_rec = (dimsize - count) last_rec = (dimsize - 1) lib.call(const.DELETE_, const.zVAR_RECORDS_, ctypes.c_long(first_rec), ctypes.c_long(last_rec)) elif (interval == 1): first_rec = ctypes.c_long(start) last_rec = ctypes.c_long(((start + count) - 1)) lib.call(const.DELETE_, const.zVAR_RECORDS_, first_rec, last_rec) else: self._call() for recno in range((start + ((count - 1) * interval)), (start - 1), ((- 1) * interval)): lib.call(const.DELETE_, const.zVAR_RECORDS_, ctypes.c_long(recno), ctypes.c_long(recno))
def __setitem__(self, key, data): 'Puts a slice into the data array. Details under :py:class:`pycdf.Var`.\n\n @param key: index or slice to store\n @type key: int or slice\n @param data: data to store\n @type data: numpy.array\n @raise IndexError: if L{key} is out of range, mismatches dimensions,\n or simply unparseable. IndexError will\n @raise CDFError: for errors from the CDF library\n ' hslice = _Hyperslice(self, key) n_recs = hslice.counts[0] hslice.expand(data) cdf_type = self.type() if (cdf_type == const.CDF_EPOCH16.value): if (not self._raw): try: data = lib.v_datetime_to_epoch16(data) except AttributeError: pass data = numpy.require(data, requirements=('C', 'A', 'W'), dtype=numpy.float64) elif (cdf_type == const.CDF_EPOCH.value): if (not self._raw): try: data = lib.v_datetime_to_epoch(data) except AttributeError: pass data = numpy.require(data, requirements=('C', 'A', 'W'), dtype=numpy.float64) elif (cdf_type == const.CDF_TIME_TT2000.value): if (not self._raw): try: data = lib.v_datetime_to_tt2000(data) except AttributeError: pass data = numpy.require(data, requirements=('C', 'A', 'W'), dtype=numpy.int64) else: data = numpy.require(data, requirements=('C', 'A', 'W'), dtype=self._np_type()) if (cdf_type == const.CDF_EPOCH16.value): datashape = data.shape[:(- 1)] else: datashape = data.shape if (datashape != tuple(hslice.expected_dims())): raise ValueError(((('attempt to assign data of dimensions ' + str(datashape)) + ' to slice of dimensions ') + str(tuple(hslice.expected_dims())))) data = hslice.convert_output_array(data) if ((hslice.counts[0] > n_recs) and ((hslice.starts[0] + n_recs) < hslice.dimsizes[0])): saved_data = self[(hslice.starts[0] + n_recs):] if (hslice.counts[0] > 0): hslice.select() lib.call(const.PUT_, const.zVAR_HYPERDATA_, data.ctypes.data_as(ctypes.c_void_p)) if (hslice.counts[0] < n_recs): first_rec = (hslice.starts[0] + hslice.counts[0]) last_rec = (hslice.dimsizes[0] - 1) lib.call(const.DELETE_, const.zVAR_RECORDS_, ctypes.c_long(first_rec), ctypes.c_long(last_rec)) elif ((hslice.counts[0] > n_recs) and ((hslice.starts[0] + n_recs) < hslice.dimsizes[0])): self[(hslice.starts[0] + hslice.counts[0]):] = saved_data
8,200,239,495,651,038,000
Puts a slice into the data array. Details under :py:class:`pycdf.Var`. @param key: index or slice to store @type key: int or slice @param data: data to store @type data: numpy.array @raise IndexError: if L{key} is out of range, mismatches dimensions, or simply unparseable. IndexError will @raise CDFError: for errors from the CDF library
pycdf/__init__.py
__setitem__
cpiker/condaCDF
python
def __setitem__(self, key, data): 'Puts a slice into the data array. Details under :py:class:`pycdf.Var`.\n\n @param key: index or slice to store\n @type key: int or slice\n @param data: data to store\n @type data: numpy.array\n @raise IndexError: if L{key} is out of range, mismatches dimensions,\n or simply unparseable. IndexError will\n @raise CDFError: for errors from the CDF library\n ' hslice = _Hyperslice(self, key) n_recs = hslice.counts[0] hslice.expand(data) cdf_type = self.type() if (cdf_type == const.CDF_EPOCH16.value): if (not self._raw): try: data = lib.v_datetime_to_epoch16(data) except AttributeError: pass data = numpy.require(data, requirements=('C', 'A', 'W'), dtype=numpy.float64) elif (cdf_type == const.CDF_EPOCH.value): if (not self._raw): try: data = lib.v_datetime_to_epoch(data) except AttributeError: pass data = numpy.require(data, requirements=('C', 'A', 'W'), dtype=numpy.float64) elif (cdf_type == const.CDF_TIME_TT2000.value): if (not self._raw): try: data = lib.v_datetime_to_tt2000(data) except AttributeError: pass data = numpy.require(data, requirements=('C', 'A', 'W'), dtype=numpy.int64) else: data = numpy.require(data, requirements=('C', 'A', 'W'), dtype=self._np_type()) if (cdf_type == const.CDF_EPOCH16.value): datashape = data.shape[:(- 1)] else: datashape = data.shape if (datashape != tuple(hslice.expected_dims())): raise ValueError(((('attempt to assign data of dimensions ' + str(datashape)) + ' to slice of dimensions ') + str(tuple(hslice.expected_dims())))) data = hslice.convert_output_array(data) if ((hslice.counts[0] > n_recs) and ((hslice.starts[0] + n_recs) < hslice.dimsizes[0])): saved_data = self[(hslice.starts[0] + n_recs):] if (hslice.counts[0] > 0): hslice.select() lib.call(const.PUT_, const.zVAR_HYPERDATA_, data.ctypes.data_as(ctypes.c_void_p)) if (hslice.counts[0] < n_recs): first_rec = (hslice.starts[0] + hslice.counts[0]) last_rec = (hslice.dimsizes[0] - 1) lib.call(const.DELETE_, const.zVAR_RECORDS_, ctypes.c_long(first_rec), ctypes.c_long(last_rec)) elif ((hslice.counts[0] > n_recs) and ((hslice.starts[0] + n_recs) < hslice.dimsizes[0])): self[(hslice.starts[0] + hslice.counts[0]):] = saved_data
def extend(self, data): '\n Append multiple values to the end of this variable\n\n This is an efficiency function which overrides the base implementation\n in MutableSequence.\n\n Parameters\n ----------\n data :\n the data to append\n ' self[len(self):] = data
1,579,934,320,573,426,000
Append multiple values to the end of this variable This is an efficiency function which overrides the base implementation in MutableSequence. Parameters ---------- data : the data to append
pycdf/__init__.py
extend
cpiker/condaCDF
python
def extend(self, data): '\n Append multiple values to the end of this variable\n\n This is an efficiency function which overrides the base implementation\n in MutableSequence.\n\n Parameters\n ----------\n data :\n the data to append\n ' self[len(self):] = data
def insert(self, index, data): '\n Inserts a *single* record before an index\n\n Parameters\n ----------\n index : int\n index before which to insert the new record\n data :\n the record to insert\n ' self[index:index] = [data]
-3,833,956,266,901,798,400
Inserts a *single* record before an index Parameters ---------- index : int index before which to insert the new record data : the record to insert
pycdf/__init__.py
insert
cpiker/condaCDF
python
def insert(self, index, data): '\n Inserts a *single* record before an index\n\n Parameters\n ----------\n index : int\n index before which to insert the new record\n data :\n the record to insert\n ' self[index:index] = [data]
def _create(self, var_name, datatype, n_elements=1, dims=(), recVary=const.VARY, dimVarys=None): 'Creates a new zVariable\n\n @param var_name: name of this variable\n @type var_name: string\n @param datatype: CDF data type\n @type datatype: ctypes.c_long\n @param n_elements: number of elements (should be 1 except for\n CDF_CHAR variables).\n @type n_elements: long\n @param dims: size of each dimension for multi-dimensional variable,\n or empty for a zero-dimensional\n @type dims: sequence of long\n @param recVary: record variance for this variable (VARY/NOVARY)\n @type recVary: long\n @param dimVarys: array of VARY or NOVARY, variance for each dimension\n @type dimVarys: sequence of long\n @return: new variable with this name\n @rtype: :py:class:`pycdf.Var`\n @raise CDFError: if CDF library reports an error\n @raise CDFWarning: if CDF library reports a warning and interpreter\n is set to error on warnings.\n @note: Not intended to be used directly; use L{CDF.new}.\n ' dim_array = (ctypes.c_long * len(dims))(*dims) enc_name = var_name.encode('ascii') if (dimVarys is None): dim_vary_array = (ctypes.c_long * (len(dims) if (len(dims) > 0) else 1))(const.VARY) else: dim_vary_array = (ctypes.c_long * len(dims))(*dimVarys) varNum = ctypes.c_long(0) self.cdf_file._call(const.CREATE_, const.zVAR_, enc_name, datatype, ctypes.c_long(n_elements), ctypes.c_long(len(dims)), dim_array, recVary, dim_vary_array, ctypes.byref(varNum)) self._name = enc_name self.cdf_file.add_to_cache(enc_name, varNum.value)
-3,817,955,230,981,504,000
Creates a new zVariable @param var_name: name of this variable @type var_name: string @param datatype: CDF data type @type datatype: ctypes.c_long @param n_elements: number of elements (should be 1 except for CDF_CHAR variables). @type n_elements: long @param dims: size of each dimension for multi-dimensional variable, or empty for a zero-dimensional @type dims: sequence of long @param recVary: record variance for this variable (VARY/NOVARY) @type recVary: long @param dimVarys: array of VARY or NOVARY, variance for each dimension @type dimVarys: sequence of long @return: new variable with this name @rtype: :py:class:`pycdf.Var` @raise CDFError: if CDF library reports an error @raise CDFWarning: if CDF library reports a warning and interpreter is set to error on warnings. @note: Not intended to be used directly; use L{CDF.new}.
pycdf/__init__.py
_create
cpiker/condaCDF
python
def _create(self, var_name, datatype, n_elements=1, dims=(), recVary=const.VARY, dimVarys=None): 'Creates a new zVariable\n\n @param var_name: name of this variable\n @type var_name: string\n @param datatype: CDF data type\n @type datatype: ctypes.c_long\n @param n_elements: number of elements (should be 1 except for\n CDF_CHAR variables).\n @type n_elements: long\n @param dims: size of each dimension for multi-dimensional variable,\n or empty for a zero-dimensional\n @type dims: sequence of long\n @param recVary: record variance for this variable (VARY/NOVARY)\n @type recVary: long\n @param dimVarys: array of VARY or NOVARY, variance for each dimension\n @type dimVarys: sequence of long\n @return: new variable with this name\n @rtype: :py:class:`pycdf.Var`\n @raise CDFError: if CDF library reports an error\n @raise CDFWarning: if CDF library reports a warning and interpreter\n is set to error on warnings.\n @note: Not intended to be used directly; use L{CDF.new}.\n ' dim_array = (ctypes.c_long * len(dims))(*dims) enc_name = var_name.encode('ascii') if (dimVarys is None): dim_vary_array = (ctypes.c_long * (len(dims) if (len(dims) > 0) else 1))(const.VARY) else: dim_vary_array = (ctypes.c_long * len(dims))(*dimVarys) varNum = ctypes.c_long(0) self.cdf_file._call(const.CREATE_, const.zVAR_, enc_name, datatype, ctypes.c_long(n_elements), ctypes.c_long(len(dims)), dim_array, recVary, dim_vary_array, ctypes.byref(varNum)) self._name = enc_name self.cdf_file.add_to_cache(enc_name, varNum.value)
def _delete(self): 'Removes this zVariable from the CDF\n\n @raise CDFError: if CDF library reports an error\n @raise CDFWarning: if CDF library reports a warning and interpreter\n is set to error on warnings.\n ' self._call(const.DELETE_, const.zVAR_) self.cdf_file.clear_from_cache(self._name) self._name = None
4,279,529,010,044,290,000
Removes this zVariable from the CDF @raise CDFError: if CDF library reports an error @raise CDFWarning: if CDF library reports a warning and interpreter is set to error on warnings.
pycdf/__init__.py
_delete
cpiker/condaCDF
python
def _delete(self): 'Removes this zVariable from the CDF\n\n @raise CDFError: if CDF library reports an error\n @raise CDFWarning: if CDF library reports a warning and interpreter\n is set to error on warnings.\n ' self._call(const.DELETE_, const.zVAR_) self.cdf_file.clear_from_cache(self._name) self._name = None
def _get(self, var_name): 'Gets an existing zVariable\n\n @param var_name: name of this variable\n @type var_name: string\n @return: variable with this name\n @rtype: :py:class:`pycdf.Var`\n @raise CDFError: if CDF library reports an error\n @raise CDFWarning: if CDF library reports a warning and interpreter\n is set to error on warnings.\n @note: Not intended to be used directly; use L{CDF.__getitem__}.\n ' if isinstance(var_name, str_classes): try: enc_name = var_name.encode('ascii').rstrip() except AttributeError: enc_name = var_name.rstrip() varNum = ctypes.c_long(0) self.cdf_file._call(const.GET_, const.zVAR_NUMBER_, enc_name, ctypes.byref(varNum)) self._name = enc_name self.cdf_file.add_to_cache(enc_name, varNum.value) else: name = ctypes.create_string_buffer((const.CDF_VAR_NAME_LEN256 + 1)) self.cdf_file._call(const.SELECT_, const.zVAR_, ctypes.c_long(var_name), const.GET_, const.zVAR_NAME_, name) self._name = name.value.rstrip() self.cdf_file.add_to_cache(self._name, var_name)
6,053,662,506,556,101,000
Gets an existing zVariable @param var_name: name of this variable @type var_name: string @return: variable with this name @rtype: :py:class:`pycdf.Var` @raise CDFError: if CDF library reports an error @raise CDFWarning: if CDF library reports a warning and interpreter is set to error on warnings. @note: Not intended to be used directly; use L{CDF.__getitem__}.
pycdf/__init__.py
_get
cpiker/condaCDF
python
def _get(self, var_name): 'Gets an existing zVariable\n\n @param var_name: name of this variable\n @type var_name: string\n @return: variable with this name\n @rtype: :py:class:`pycdf.Var`\n @raise CDFError: if CDF library reports an error\n @raise CDFWarning: if CDF library reports a warning and interpreter\n is set to error on warnings.\n @note: Not intended to be used directly; use L{CDF.__getitem__}.\n ' if isinstance(var_name, str_classes): try: enc_name = var_name.encode('ascii').rstrip() except AttributeError: enc_name = var_name.rstrip() varNum = ctypes.c_long(0) self.cdf_file._call(const.GET_, const.zVAR_NUMBER_, enc_name, ctypes.byref(varNum)) self._name = enc_name self.cdf_file.add_to_cache(enc_name, varNum.value) else: name = ctypes.create_string_buffer((const.CDF_VAR_NAME_LEN256 + 1)) self.cdf_file._call(const.SELECT_, const.zVAR_, ctypes.c_long(var_name), const.GET_, const.zVAR_NAME_, name) self._name = name.value.rstrip() self.cdf_file.add_to_cache(self._name, var_name)
def _num(self): 'Returns the zVar number for this variable\n\n @return: number of this zVar\n @rtype: int\n ' return self.cdf_file.var_num(self._name)
-5,077,650,296,107,514,000
Returns the zVar number for this variable @return: number of this zVar @rtype: int
pycdf/__init__.py
_num
cpiker/condaCDF
python
def _num(self): 'Returns the zVar number for this variable\n\n @return: number of this zVar\n @rtype: int\n ' return self.cdf_file.var_num(self._name)
def __len__(self): 'Get number of records for this variable in this file\n\n @return: Number of records\n @rtype: long\n @raise CDFError: if CDF library reports an error\n @raise CDFWarning: if CDF library reports a warning and interpreter\n is set to error on warnings.\n ' count = ctypes.c_long(0) self._call(const.GET_, const.zVAR_MAXREC_, ctypes.byref(count)) return (count.value + 1)
-4,076,633,597,108,568,000
Get number of records for this variable in this file @return: Number of records @rtype: long @raise CDFError: if CDF library reports an error @raise CDFWarning: if CDF library reports a warning and interpreter is set to error on warnings.
pycdf/__init__.py
__len__
cpiker/condaCDF
python
def __len__(self): 'Get number of records for this variable in this file\n\n @return: Number of records\n @rtype: long\n @raise CDFError: if CDF library reports an error\n @raise CDFWarning: if CDF library reports a warning and interpreter\n is set to error on warnings.\n ' count = ctypes.c_long(0) self._call(const.GET_, const.zVAR_MAXREC_, ctypes.byref(count)) return (count.value + 1)
def __repr__(self): "Returns representation of the variable\n\n Cannot return anything that can be eval'd to create a copy,\n so just wrap the informal representation in angle brackets.\n @return: info on this zVar\n @rtype: str\n " return (('<Var:\n' + str(self)) + '\n>')
-3,990,811,872,302,432,000
Returns representation of the variable Cannot return anything that can be eval'd to create a copy, so just wrap the informal representation in angle brackets. @return: info on this zVar @rtype: str
pycdf/__init__.py
__repr__
cpiker/condaCDF
python
def __repr__(self): "Returns representation of the variable\n\n Cannot return anything that can be eval'd to create a copy,\n so just wrap the informal representation in angle brackets.\n @return: info on this zVar\n @rtype: str\n " return (('<Var:\n' + str(self)) + '\n>')
def __str__(self): "Returns a string representation of the variable\n\n This is an 'informal' representation in that it cannot be evaluated\n directly to create a :py:class:`pycdf.Var`.\n\n @return: info on this zVar, CDFTYPE [dimensions] NRV\n (if not record-varying)\n @rtype: str\n " if self.cdf_file._opened: cdftype = self.type() chartypes = (const.CDF_CHAR.value, const.CDF_UCHAR.value) rv = self.rv() typestr = (lib.cdftypenames[cdftype] + (('*' + str(self._nelems())) if (cdftype in chartypes) else '')) if rv: sizestr = str(([len(self)] + self._dim_sizes())) else: sizestr = str(self._dim_sizes()) return (((typestr + ' ') + sizestr) + ('' if rv else ' NRV')) elif isinstance(self._name, str): return 'zVar "{0}" in closed CDF {1}'.format(self._name, self.cdf_file.pathname) else: return 'zVar "{0}" in closed CDF {1}'.format(self._name.decode('ascii'), self.cdf_file.pathname.decode('ascii'))
-3,957,928,169,494,949,400
Returns a string representation of the variable This is an 'informal' representation in that it cannot be evaluated directly to create a :py:class:`pycdf.Var`. @return: info on this zVar, CDFTYPE [dimensions] NRV (if not record-varying) @rtype: str
pycdf/__init__.py
__str__
cpiker/condaCDF
python
def __str__(self): "Returns a string representation of the variable\n\n This is an 'informal' representation in that it cannot be evaluated\n directly to create a :py:class:`pycdf.Var`.\n\n @return: info on this zVar, CDFTYPE [dimensions] NRV\n (if not record-varying)\n @rtype: str\n " if self.cdf_file._opened: cdftype = self.type() chartypes = (const.CDF_CHAR.value, const.CDF_UCHAR.value) rv = self.rv() typestr = (lib.cdftypenames[cdftype] + (('*' + str(self._nelems())) if (cdftype in chartypes) else )) if rv: sizestr = str(([len(self)] + self._dim_sizes())) else: sizestr = str(self._dim_sizes()) return (((typestr + ' ') + sizestr) + ( if rv else ' NRV')) elif isinstance(self._name, str): return 'zVar "{0}" in closed CDF {1}'.format(self._name, self.cdf_file.pathname) else: return 'zVar "{0}" in closed CDF {1}'.format(self._name.decode('ascii'), self.cdf_file.pathname.decode('ascii'))
def _n_dims(self): 'Get number of dimensions for this variable\n\n @return: the number of dimensions\n @rtype: long\n ' n_dims = ctypes.c_long(0) self._call(const.GET_, const.zVAR_NUMDIMS_, ctypes.byref(n_dims)) return n_dims.value
-3,148,282,105,845,771,000
Get number of dimensions for this variable @return: the number of dimensions @rtype: long
pycdf/__init__.py
_n_dims
cpiker/condaCDF
python
def _n_dims(self): 'Get number of dimensions for this variable\n\n @return: the number of dimensions\n @rtype: long\n ' n_dims = ctypes.c_long(0) self._call(const.GET_, const.zVAR_NUMDIMS_, ctypes.byref(n_dims)) return n_dims.value
def _dim_sizes(self): 'Get the dimension sizes for this variable\n\n @return: sequence of sizes\n @rtype: sequence of long\n @note: This will always be in Python order (i.e. row major, last index\n iterates most quickly), *regardless* of the majority of the CDF.\n ' sizes = (ctypes.c_long * const.CDF_MAX_DIMS)(0) self._call(const.GET_, const.zVAR_DIMSIZES_, sizes) sizes = sizes[0:self._n_dims()] return sizes
3,968,508,833,405,391,400
Get the dimension sizes for this variable @return: sequence of sizes @rtype: sequence of long @note: This will always be in Python order (i.e. row major, last index iterates most quickly), *regardless* of the majority of the CDF.
pycdf/__init__.py
_dim_sizes
cpiker/condaCDF
python
def _dim_sizes(self): 'Get the dimension sizes for this variable\n\n @return: sequence of sizes\n @rtype: sequence of long\n @note: This will always be in Python order (i.e. row major, last index\n iterates most quickly), *regardless* of the majority of the CDF.\n ' sizes = (ctypes.c_long * const.CDF_MAX_DIMS)(0) self._call(const.GET_, const.zVAR_DIMSIZES_, sizes) sizes = sizes[0:self._n_dims()] return sizes
def rv(self, new_rv=None): '\n Gets or sets whether this variable has record variance\n\n If the variance is unknown, True is assumed\n (this replicates the apparent behavior of the CDF library on\n variable creation).\n\n Other Parameters\n ================\n new_rv : boolean\n True to change to record variance, False to change to NRV,\n unspecified to simply check variance.\n\n Returns\n =======\n out : Boolean\n True if record varying, False if NRV\n ' if (new_rv != None): self._call(const.PUT_, const.zVAR_RECVARY_, (const.VARY if new_rv else const.NOVARY)) vary = ctypes.c_long(0) self._call(const.GET_, const.zVAR_RECVARY_, ctypes.byref(vary)) return (vary.value != const.NOVARY.value)
-536,947,292,645,786,300
Gets or sets whether this variable has record variance If the variance is unknown, True is assumed (this replicates the apparent behavior of the CDF library on variable creation). Other Parameters ================ new_rv : boolean True to change to record variance, False to change to NRV, unspecified to simply check variance. Returns ======= out : Boolean True if record varying, False if NRV
pycdf/__init__.py
rv
cpiker/condaCDF
python
def rv(self, new_rv=None): '\n Gets or sets whether this variable has record variance\n\n If the variance is unknown, True is assumed\n (this replicates the apparent behavior of the CDF library on\n variable creation).\n\n Other Parameters\n ================\n new_rv : boolean\n True to change to record variance, False to change to NRV,\n unspecified to simply check variance.\n\n Returns\n =======\n out : Boolean\n True if record varying, False if NRV\n ' if (new_rv != None): self._call(const.PUT_, const.zVAR_RECVARY_, (const.VARY if new_rv else const.NOVARY)) vary = ctypes.c_long(0) self._call(const.GET_, const.zVAR_RECVARY_, ctypes.byref(vary)) return (vary.value != const.NOVARY.value)
def dv(self, new_dv=None): '\n Gets or sets dimension variance of each dimension of variable.\n\n If the variance is unknown, True is assumed\n (this replicates the apparent behavior of the\n CDF library on variable creation).\n\n Parameters\n ==========\n new_dv : list of boolean\n Each element True to change that dimension to dimension\n variance, False to change to not dimension variance.\n (Unspecified to simply check variance.)\n\n Returns\n =======\n out : list of boolean\n True if that dimension has variance, else false.\n ' ndims = self._n_dims() if (new_dv != None): if (len(new_dv) != ndims): raise ValueError((('Must specify variance for ' + str(ndims)) + 'dimensions.')) varies = (ctypes.c_long * ndims)(*[(const.VARY if dv else const.NOVARY) for dv in new_dv]) self._call(const.PUT_, const.zVAR_DIMVARYS_, varies) if (ndims == 0): return [] varies = (ctypes.c_long * const.CDF_MAX_DIMS)() self._call(const.GET_, const.zVAR_DIMVARYS_, varies) return [(dv != const.NOVARY.value) for dv in varies[0:ndims]]
5,561,749,453,369,350,000
Gets or sets dimension variance of each dimension of variable. If the variance is unknown, True is assumed (this replicates the apparent behavior of the CDF library on variable creation). Parameters ========== new_dv : list of boolean Each element True to change that dimension to dimension variance, False to change to not dimension variance. (Unspecified to simply check variance.) Returns ======= out : list of boolean True if that dimension has variance, else false.
pycdf/__init__.py
dv
cpiker/condaCDF
python
def dv(self, new_dv=None): '\n Gets or sets dimension variance of each dimension of variable.\n\n If the variance is unknown, True is assumed\n (this replicates the apparent behavior of the\n CDF library on variable creation).\n\n Parameters\n ==========\n new_dv : list of boolean\n Each element True to change that dimension to dimension\n variance, False to change to not dimension variance.\n (Unspecified to simply check variance.)\n\n Returns\n =======\n out : list of boolean\n True if that dimension has variance, else false.\n ' ndims = self._n_dims() if (new_dv != None): if (len(new_dv) != ndims): raise ValueError((('Must specify variance for ' + str(ndims)) + 'dimensions.')) varies = (ctypes.c_long * ndims)(*[(const.VARY if dv else const.NOVARY) for dv in new_dv]) self._call(const.PUT_, const.zVAR_DIMVARYS_, varies) if (ndims == 0): return [] varies = (ctypes.c_long * const.CDF_MAX_DIMS)() self._call(const.GET_, const.zVAR_DIMVARYS_, varies) return [(dv != const.NOVARY.value) for dv in varies[0:ndims]]
def _call(self, *args, **kwargs): "Select this CDF and variable and call the CDF internal interface\n\n Adds call to select this CDF to L{args} and passes all parameters\n directly through to the CDFlib routine of the CDF library's C internal\n interface. Checks the return value with L{Library.check_status}.\n\n @param args: Passed directly to the CDF library interface. Useful\n constants are defined in the :py:mod:`pycdf.const` module of this package.\n @type args: various, see :py:mod:`ctypes`.\n @return: CDF status from the library\n @rtype: ctypes.c_long\n @note: Terminal NULL_ is automatically added to L{args}.\n @raise CDFError: if CDF library reports an error\n @raise CDFWarning: if CDF library reports a warning and interpreter\n is set to error on warnings.\n " return self.cdf_file._call(const.SELECT_, const.zVAR_, ctypes.c_long(self.cdf_file.var_num(self._name)), *args, **kwargs)
-5,022,373,434,638,317,000
Select this CDF and variable and call the CDF internal interface Adds call to select this CDF to L{args} and passes all parameters directly through to the CDFlib routine of the CDF library's C internal interface. Checks the return value with L{Library.check_status}. @param args: Passed directly to the CDF library interface. Useful constants are defined in the :py:mod:`pycdf.const` module of this package. @type args: various, see :py:mod:`ctypes`. @return: CDF status from the library @rtype: ctypes.c_long @note: Terminal NULL_ is automatically added to L{args}. @raise CDFError: if CDF library reports an error @raise CDFWarning: if CDF library reports a warning and interpreter is set to error on warnings.
pycdf/__init__.py
_call
cpiker/condaCDF
python
def _call(self, *args, **kwargs): "Select this CDF and variable and call the CDF internal interface\n\n Adds call to select this CDF to L{args} and passes all parameters\n directly through to the CDFlib routine of the CDF library's C internal\n interface. Checks the return value with L{Library.check_status}.\n\n @param args: Passed directly to the CDF library interface. Useful\n constants are defined in the :py:mod:`pycdf.const` module of this package.\n @type args: various, see :py:mod:`ctypes`.\n @return: CDF status from the library\n @rtype: ctypes.c_long\n @note: Terminal NULL_ is automatically added to L{args}.\n @raise CDFError: if CDF library reports an error\n @raise CDFWarning: if CDF library reports a warning and interpreter\n is set to error on warnings.\n " return self.cdf_file._call(const.SELECT_, const.zVAR_, ctypes.c_long(self.cdf_file.var_num(self._name)), *args, **kwargs)
def _np_type(self): 'Returns the numpy type of this variable\n\n This is the numpy type that will come directly out of the CDF;\n see :meth:`dtype` for the representation post-conversion.\n\n Raises\n ======\n CDFError : for library-reported error or failure to find numpy type\n\n Returns\n =======\n out : dtype\n numpy dtype that will hold value from this variable\n \n ' cdftype = self.type() if ((cdftype == const.CDF_CHAR.value) or (cdftype == const.CDF_UCHAR.value)): return numpy.dtype(('S' + str(self._nelems()))) try: return lib.numpytypedict[cdftype] except KeyError: raise CDFError(const.BAD_DATA_TYPE)
6,706,597,157,620,074,000
Returns the numpy type of this variable This is the numpy type that will come directly out of the CDF; see :meth:`dtype` for the representation post-conversion. Raises ====== CDFError : for library-reported error or failure to find numpy type Returns ======= out : dtype numpy dtype that will hold value from this variable
pycdf/__init__.py
_np_type
cpiker/condaCDF
python
def _np_type(self): 'Returns the numpy type of this variable\n\n This is the numpy type that will come directly out of the CDF;\n see :meth:`dtype` for the representation post-conversion.\n\n Raises\n ======\n CDFError : for library-reported error or failure to find numpy type\n\n Returns\n =======\n out : dtype\n numpy dtype that will hold value from this variable\n \n ' cdftype = self.type() if ((cdftype == const.CDF_CHAR.value) or (cdftype == const.CDF_UCHAR.value)): return numpy.dtype(('S' + str(self._nelems()))) try: return lib.numpytypedict[cdftype] except KeyError: raise CDFError(const.BAD_DATA_TYPE)
def type(self, new_type=None): '\n Returns or sets the CDF type of this variable\n\n Parameters\n ==========\n new_type : ctypes.c_long\n the new type from :mod:`~pycdf.const`\n\n Returns\n =======\n out : int\n CDF type\n ' if (new_type != None): if (not hasattr(new_type, 'value')): new_type = ctypes.c_long(new_type) n_elements = ctypes.c_long(self._nelems()) self._call(const.PUT_, const.zVAR_DATASPEC_, new_type, n_elements) self._type = None if (self._type is None): cdftype = ctypes.c_long(0) self._call(const.GET_, const.zVAR_DATATYPE_, ctypes.byref(cdftype)) self._type = cdftype.value return self._type
-5,909,217,226,126,335,000
Returns or sets the CDF type of this variable Parameters ========== new_type : ctypes.c_long the new type from :mod:`~pycdf.const` Returns ======= out : int CDF type
pycdf/__init__.py
type
cpiker/condaCDF
python
def type(self, new_type=None): '\n Returns or sets the CDF type of this variable\n\n Parameters\n ==========\n new_type : ctypes.c_long\n the new type from :mod:`~pycdf.const`\n\n Returns\n =======\n out : int\n CDF type\n ' if (new_type != None): if (not hasattr(new_type, 'value')): new_type = ctypes.c_long(new_type) n_elements = ctypes.c_long(self._nelems()) self._call(const.PUT_, const.zVAR_DATASPEC_, new_type, n_elements) self._type = None if (self._type is None): cdftype = ctypes.c_long(0) self._call(const.GET_, const.zVAR_DATATYPE_, ctypes.byref(cdftype)) self._type = cdftype.value return self._type
def _nelems(self): 'Number of elements for each value in this variable\n\n This is the length of strings for CHAR and UCHAR,\n should be 1 otherwise.\n @return: length of strings\n @rtype: int\n ' nelems = ctypes.c_long(0) self._call(const.GET_, const.zVAR_NUMELEMS_, ctypes.byref(nelems)) return nelems.value
-7,765,110,379,715,856,000
Number of elements for each value in this variable This is the length of strings for CHAR and UCHAR, should be 1 otherwise. @return: length of strings @rtype: int
pycdf/__init__.py
_nelems
cpiker/condaCDF
python
def _nelems(self): 'Number of elements for each value in this variable\n\n This is the length of strings for CHAR and UCHAR,\n should be 1 otherwise.\n @return: length of strings\n @rtype: int\n ' nelems = ctypes.c_long(0) self._call(const.GET_, const.zVAR_NUMELEMS_, ctypes.byref(nelems)) return nelems.value
def name(self): "\n Returns the name of this variable\n\n Returns\n =======\n out : str\n variable's name\n " if isinstance(self._name, str): return self._name elif isinstance(self._name, bytes): return self._name.decode()
-3,283,656,668,328,771,000
Returns the name of this variable Returns ======= out : str variable's name
pycdf/__init__.py
name
cpiker/condaCDF
python
def name(self): "\n Returns the name of this variable\n\n Returns\n =======\n out : str\n variable's name\n " if isinstance(self._name, str): return self._name elif isinstance(self._name, bytes): return self._name.decode()
def compress(self, comptype=None, param=None): "\n Set or check the compression of this variable\n\n Compression may not be changeable on variables with data already\n written; even deleting the data may not permit the change.\n\n See section 2.6 of the CDF user's guide for more information on\n compression.\n\n Other Parameters\n ================\n comptype : ctypes.c_long\n type of compression to change to, see CDF C reference\n manual section 4.10. Constants for this parameter\n are in :mod:`~pycdf.const`. If not specified, will not\n change compression.\n param : ctypes.c_long\n Compression parameter, see CDF CRM 4.10 and\n :mod:`~pycdf.const`.\n If not specified, will choose reasonable default (5 for\n gzip; other types have only one possible parameter.)\n\n Returns\n =======\n out : tuple\n the (comptype, param) currently in effect\n " return _compress(self, comptype, param)
3,699,615,847,711,787,500
Set or check the compression of this variable Compression may not be changeable on variables with data already written; even deleting the data may not permit the change. See section 2.6 of the CDF user's guide for more information on compression. Other Parameters ================ comptype : ctypes.c_long type of compression to change to, see CDF C reference manual section 4.10. Constants for this parameter are in :mod:`~pycdf.const`. If not specified, will not change compression. param : ctypes.c_long Compression parameter, see CDF CRM 4.10 and :mod:`~pycdf.const`. If not specified, will choose reasonable default (5 for gzip; other types have only one possible parameter.) Returns ======= out : tuple the (comptype, param) currently in effect
pycdf/__init__.py
compress
cpiker/condaCDF
python
def compress(self, comptype=None, param=None): "\n Set or check the compression of this variable\n\n Compression may not be changeable on variables with data already\n written; even deleting the data may not permit the change.\n\n See section 2.6 of the CDF user's guide for more information on\n compression.\n\n Other Parameters\n ================\n comptype : ctypes.c_long\n type of compression to change to, see CDF C reference\n manual section 4.10. Constants for this parameter\n are in :mod:`~pycdf.const`. If not specified, will not\n change compression.\n param : ctypes.c_long\n Compression parameter, see CDF CRM 4.10 and\n :mod:`~pycdf.const`.\n If not specified, will choose reasonable default (5 for\n gzip; other types have only one possible parameter.)\n\n Returns\n =======\n out : tuple\n the (comptype, param) currently in effect\n " return _compress(self, comptype, param)
def copy(self): '\n Copies all data and attributes from this variable\n\n Returns\n =======\n out : :class:`VarCopy`\n list of all data in record order\n ' return VarCopy(self)
49,623,961,128,289,530
Copies all data and attributes from this variable Returns ======= out : :class:`VarCopy` list of all data in record order
pycdf/__init__.py
copy
cpiker/condaCDF
python
def copy(self): '\n Copies all data and attributes from this variable\n\n Returns\n =======\n out : :class:`VarCopy`\n list of all data in record order\n ' return VarCopy(self)
def rename(self, new_name): '\n Renames this variable\n\n Parameters\n ==========\n new_name : str\n the new name for this variable\n ' try: enc_name = new_name.encode('ascii') except AttributeError: enc_name = new_name if (len(enc_name) > const.CDF_VAR_NAME_LEN256): raise CDFError(const.BAD_VAR_NAME) self._call(const.PUT_, const.zVAR_NAME_, enc_name) self.cdf_file.add_to_cache(enc_name, self.cdf_file.var_num(self._name)) del self.cdf_file._var_nums[self._name] self._name = enc_name
3,806,647,295,599,496,000
Renames this variable Parameters ========== new_name : str the new name for this variable
pycdf/__init__.py
rename
cpiker/condaCDF
python
def rename(self, new_name): '\n Renames this variable\n\n Parameters\n ==========\n new_name : str\n the new name for this variable\n ' try: enc_name = new_name.encode('ascii') except AttributeError: enc_name = new_name if (len(enc_name) > const.CDF_VAR_NAME_LEN256): raise CDFError(const.BAD_VAR_NAME) self._call(const.PUT_, const.zVAR_NAME_, enc_name) self.cdf_file.add_to_cache(enc_name, self.cdf_file.var_num(self._name)) del self.cdf_file._var_nums[self._name] self._name = enc_name
@property def shape(self): '\n Provides the numpy array-like shape of this variable.\n\n Returns a tuple; first element is number of records (RV variable\n only) And the rest provide the dimensionality of the variable.\n\n .. note::\n Assigning to this attribute will not change the shape.\n ' if self.rv(): return tuple(([len(self)] + self._dim_sizes())) else: return tuple(self._dim_sizes())
-4,140,950,993,922,606,600
Provides the numpy array-like shape of this variable. Returns a tuple; first element is number of records (RV variable only) And the rest provide the dimensionality of the variable. .. note:: Assigning to this attribute will not change the shape.
pycdf/__init__.py
shape
cpiker/condaCDF
python
@property def shape(self): '\n Provides the numpy array-like shape of this variable.\n\n Returns a tuple; first element is number of records (RV variable\n only) And the rest provide the dimensionality of the variable.\n\n .. note::\n Assigning to this attribute will not change the shape.\n ' if self.rv(): return tuple(([len(self)] + self._dim_sizes())) else: return tuple(self._dim_sizes())
@property def dtype(self): '\n Provide the numpy dtype equivalent to the CDF type of this variable.\n\n Data from this variable will be returned in numpy arrays of this type.\n\n See Also\n --------\n type\n ' cdftype = self.type() if ((cdftype in (const.CDF_CHAR.value, const.CDF_UCHAR.value)) and (str is not bytes) and (not self._raw)): return numpy.dtype(('U' + str(self._nelems()))) if ((cdftype in (const.CDF_EPOCH.value, const.CDF_EPOCH16.value, const.CDF_TIME_TT2000.value)) and (not self._raw)): return numpy.dtype('O') return self._np_type()
-1,868,019,082,301,510,700
Provide the numpy dtype equivalent to the CDF type of this variable. Data from this variable will be returned in numpy arrays of this type. See Also -------- type
pycdf/__init__.py
dtype
cpiker/condaCDF
python
@property def dtype(self): '\n Provide the numpy dtype equivalent to the CDF type of this variable.\n\n Data from this variable will be returned in numpy arrays of this type.\n\n See Also\n --------\n type\n ' cdftype = self.type() if ((cdftype in (const.CDF_CHAR.value, const.CDF_UCHAR.value)) and (str is not bytes) and (not self._raw)): return numpy.dtype(('U' + str(self._nelems()))) if ((cdftype in (const.CDF_EPOCH.value, const.CDF_EPOCH16.value, const.CDF_TIME_TT2000.value)) and (not self._raw)): return numpy.dtype('O') return self._np_type()
def _get_attrs(self): "Get attribute list\n\n Provide access to the zVar's attribute list without holding a\n strong reference, as the attribute list has a (strong)\n back-reference to its parent.\n\n Either deref a weak reference (to try and keep the object the same),\n or make a new AttrList instance and assign it to the weak reference\n for next time.\n " al = self._attrlistref() if (al is None): al = zAttrList(self) self._attrlistref = weakref.ref(al) return al
3,003,470,097,806,460,000
Get attribute list Provide access to the zVar's attribute list without holding a strong reference, as the attribute list has a (strong) back-reference to its parent. Either deref a weak reference (to try and keep the object the same), or make a new AttrList instance and assign it to the weak reference for next time.
pycdf/__init__.py
_get_attrs
cpiker/condaCDF
python
def _get_attrs(self): "Get attribute list\n\n Provide access to the zVar's attribute list without holding a\n strong reference, as the attribute list has a (strong)\n back-reference to its parent.\n\n Either deref a weak reference (to try and keep the object the same),\n or make a new AttrList instance and assign it to the weak reference\n for next time.\n " al = self._attrlistref() if (al is None): al = zAttrList(self) self._attrlistref = weakref.ref(al) return al
def _set_attrs(self, value): 'Assign to the attribute list\n\n Clears all elements of the attribute list and copies from value\n ' self.attrs.clone(value)
4,701,805,248,585,535,000
Assign to the attribute list Clears all elements of the attribute list and copies from value
pycdf/__init__.py
_set_attrs
cpiker/condaCDF
python
def _set_attrs(self, value): 'Assign to the attribute list\n\n Clears all elements of the attribute list and copies from value\n ' self.attrs.clone(value)