sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def _connect_deferred(self, deferred): """ Hook up the Deferred that that this will be the result of. Should only be run in Twisted thread, and only called once. """ self._deferred = deferred # Because we use __del__, we need to make sure there are no cycles # involving this object, which is why we use a weakref: def put(result, eventual=weakref.ref(self)): eventual = eventual() if eventual: eventual._set_result(result) else: err(result, "Unhandled error in EventualResult") deferred.addBoth(put)
Hook up the Deferred that that this will be the result of. Should only be run in Twisted thread, and only called once.
entailment
def _set_result(self, result): """ Set the result of the EventualResult, if not already set. This can only happen in the reactor thread, either as a result of Deferred firing, or as a result of ResultRegistry.stop(). So, no need for thread-safety. """ if self._result_set.isSet(): return self._value = result self._result_set.set()
Set the result of the EventualResult, if not already set. This can only happen in the reactor thread, either as a result of Deferred firing, or as a result of ResultRegistry.stop(). So, no need for thread-safety.
entailment
def _result(self, timeout=None): """ Return the result, if available. It may take an unknown amount of time to return the result, so a timeout option is provided. If the given number of seconds pass with no result, a TimeoutError will be thrown. If a previous call timed out, additional calls to this function will still wait for a result and return it if available. If a result was returned on one call, additional calls will return/raise the same result. """ if timeout is None: warnings.warn( "Unlimited timeouts are deprecated.", DeprecationWarning, stacklevel=3) # Queue.get(None) won't get interrupted by Ctrl-C... timeout = 2**31 self._result_set.wait(timeout) # In Python 2.6 we can't rely on the return result of wait(), so we # have to check manually: if not self._result_set.is_set(): raise TimeoutError() self._result_retrieved = True return self._value
Return the result, if available. It may take an unknown amount of time to return the result, so a timeout option is provided. If the given number of seconds pass with no result, a TimeoutError will be thrown. If a previous call timed out, additional calls to this function will still wait for a result and return it if available. If a result was returned on one call, additional calls will return/raise the same result.
entailment
def wait(self, timeout=None): """ Return the result, or throw the exception if result is a failure. It may take an unknown amount of time to return the result, so a timeout option is provided. If the given number of seconds pass with no result, a TimeoutError will be thrown. If a previous call timed out, additional calls to this function will still wait for a result and return it if available. If a result was returned or raised on one call, additional calls will return/raise the same result. """ if threadable.isInIOThread(): raise RuntimeError( "EventualResult.wait() must not be run in the reactor thread.") if imp.lock_held(): try: imp.release_lock() except RuntimeError: # The lock is held by some other thread. We should be safe # to continue. pass else: # If EventualResult.wait() is run during module import, if the # Twisted code that is being run also imports something the # result will be a deadlock. Even if that is not an issue it # would prevent importing in other threads until the call # returns. raise RuntimeError( "EventualResult.wait() must not be run at module " "import time.") result = self._result(timeout) if isinstance(result, Failure): result.raiseException() return result
Return the result, or throw the exception if result is a failure. It may take an unknown amount of time to return the result, so a timeout option is provided. If the given number of seconds pass with no result, a TimeoutError will be thrown. If a previous call timed out, additional calls to this function will still wait for a result and return it if available. If a result was returned or raised on one call, additional calls will return/raise the same result.
entailment
def original_failure(self): """ Return the underlying Failure object, if the result is an error. If no result is yet available, or the result was not an error, None is returned. This method is useful if you want to get the original traceback for an error result. """ try: result = self._result(0.0) except TimeoutError: return None if isinstance(result, Failure): return result else: return None
Return the underlying Failure object, if the result is an error. If no result is yet available, or the result was not an error, None is returned. This method is useful if you want to get the original traceback for an error result.
entailment
def _startReapingProcesses(self): """ Start a LoopingCall that calls reapAllProcesses. """ lc = LoopingCall(self._reapAllProcesses) lc.clock = self._reactor lc.start(0.1, False)
Start a LoopingCall that calls reapAllProcesses.
entailment
def _common_setup(self): """ The minimal amount of setup done by both setup() and no_setup(). """ self._started = True self._reactor = self._reactorFactory() self._registry = ResultRegistry() # We want to unblock EventualResult regardless of how the reactor is # run, so we always register this: self._reactor.addSystemEventTrigger( "before", "shutdown", self._registry.stop)
The minimal amount of setup done by both setup() and no_setup().
entailment
def setup(self): """ Initialize the crochet library. This starts the reactor in a thread, and connect's Twisted's logs to Python's standard library logging module. This must be called at least once before the library can be used, and can be called multiple times. """ if self._started: return self._common_setup() if platform.type == "posix": self._reactor.callFromThread(self._startReapingProcesses) if self._startLoggingWithObserver: observer = ThreadLogObserver(PythonLoggingObserver().emit) def start(): # Twisted is going to override warnings.showwarning; let's # make sure that has no effect: from twisted.python import log original = log.showwarning log.showwarning = warnings.showwarning self._startLoggingWithObserver(observer, False) log.showwarning = original self._reactor.callFromThread(start) # We only want to stop the logging thread once the reactor has # shut down: self._reactor.addSystemEventTrigger( "after", "shutdown", observer.stop) t = threading.Thread( target=lambda: self._reactor.run(installSignalHandlers=False), name="CrochetReactor") t.start() self._atexit_register(self._reactor.callFromThread, self._reactor.stop) self._atexit_register(_store.log_errors) if self._watchdog_thread is not None: self._watchdog_thread.start()
Initialize the crochet library. This starts the reactor in a thread, and connect's Twisted's logs to Python's standard library logging module. This must be called at least once before the library can be used, and can be called multiple times.
entailment
def _run_in_reactor(self, function, _, args, kwargs): """ Implementation: A decorator that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, an EventualResult is returned. """ def runs_in_reactor(result, args, kwargs): d = maybeDeferred(function, *args, **kwargs) result._connect_deferred(d) result = EventualResult(None, self._reactor) self._registry.register(result) self._reactor.callFromThread(runs_in_reactor, result, args, kwargs) return result
Implementation: A decorator that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, an EventualResult is returned.
entailment
def run_in_reactor(self, function): """ A decorator that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, an EventualResult is returned. """ result = self._run_in_reactor(function) # Backwards compatibility; use __wrapped__ instead. try: result.wrapped_function = function except AttributeError: pass return result
A decorator that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, an EventualResult is returned.
entailment
def wait_for_reactor(self, function): """ DEPRECATED, use wait_for(timeout) instead. A decorator that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, its result is returned or its exception raised. Deferreds are handled transparently. """ warnings.warn( "@wait_for_reactor is deprecated, use @wait_for instead", DeprecationWarning, stacklevel=2) # This will timeout, in theory. In practice the process will be dead # long before that. return self.wait_for(2**31)(function)
DEPRECATED, use wait_for(timeout) instead. A decorator that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, its result is returned or its exception raised. Deferreds are handled transparently.
entailment
def wait_for(self, timeout): """ A decorator factory that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, its result is returned or its exception raised. Deferreds are handled transparently. Calls will timeout after the given number of seconds (a float), raising a crochet.TimeoutError, and cancelling the Deferred being waited on. """ def decorator(function): @wrapt.decorator def wrapper(function, _, args, kwargs): @self.run_in_reactor def run(): return function(*args, **kwargs) eventual_result = run() try: return eventual_result.wait(timeout) except TimeoutError: eventual_result.cancel() raise result = wrapper(function) # Expose underling function for testing purposes; this attribute is # deprecated, use __wrapped__ instead: try: result.wrapped_function = function except AttributeError: pass return result return decorator
A decorator factory that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, its result is returned or its exception raised. Deferreds are handled transparently. Calls will timeout after the given number of seconds (a float), raising a crochet.TimeoutError, and cancelling the Deferred being waited on.
entailment
def in_reactor(self, function): """ DEPRECATED, use run_in_reactor. A decorator that ensures the wrapped function runs in the reactor thread. The wrapped function will get the reactor passed in as a first argument, in addition to any arguments it is called with. When the wrapped function is called, an EventualResult is returned. """ warnings.warn( "@in_reactor is deprecated, use @run_in_reactor", DeprecationWarning, stacklevel=2) @self.run_in_reactor @wraps(function) def add_reactor(*args, **kwargs): return function(self._reactor, *args, **kwargs) return add_reactor
DEPRECATED, use run_in_reactor. A decorator that ensures the wrapped function runs in the reactor thread. The wrapped function will get the reactor passed in as a first argument, in addition to any arguments it is called with. When the wrapped function is called, an EventualResult is returned.
entailment
def _mx(domain): """ Return Deferred that fires with a list of (priority, MX domain) tuples for a given domain. """ def got_records(result): return sorted( [(int(record.payload.preference), str(record.payload.name)) for record in result[0]]) d = lookupMailExchange(domain) d.addCallback(got_records) return d
Return Deferred that fires with a list of (priority, MX domain) tuples for a given domain.
entailment
def store(self, deferred_result): """ Store a EventualResult. Return an integer, a unique identifier that can be used to retrieve the object. """ self._counter += 1 self._stored[self._counter] = deferred_result return self._counter
Store a EventualResult. Return an integer, a unique identifier that can be used to retrieve the object.
entailment
def log_errors(self): """ Log errors for all stored EventualResults that have error results. """ for result in self._stored.values(): failure = result.original_failure() if failure is not None: log.err(failure, "Unhandled error in stashed EventualResult:")
Log errors for all stored EventualResults that have error results.
entailment
def start_ssh_server(port, username, password, namespace): """ Start an SSH server on the given port, exposing a Python prompt with the given namespace. """ # This is a lot of boilerplate, see http://tm.tl/6429 for a ticket to # provide a utility function that simplifies this. from twisted.internet import reactor from twisted.conch.insults import insults from twisted.conch import manhole, manhole_ssh from twisted.cred.checkers import ( InMemoryUsernamePasswordDatabaseDontUse as MemoryDB) from twisted.cred.portal import Portal sshRealm = manhole_ssh.TerminalRealm() def chainedProtocolFactory(): return insults.ServerProtocol(manhole.Manhole, namespace) sshRealm.chainedProtocolFactory = chainedProtocolFactory sshPortal = Portal(sshRealm, [MemoryDB(**{username: password})]) reactor.listenTCP(port, manhole_ssh.ConchFactory(sshPortal), interface="127.0.0.1")
Start an SSH server on the given port, exposing a Python prompt with the given namespace.
entailment
def _synced(method, self, args, kwargs): """Underlying synchronized wrapper.""" with self._lock: return method(*args, **kwargs)
Underlying synchronized wrapper.
entailment
def register(self, f, *args, **kwargs): """ Register a function and arguments to be called later. """ self._functions.append(lambda: f(*args, **kwargs))
Register a function and arguments to be called later.
entailment
def register_function(self, function, name=None): """ Register function to be called from EPC client. :type function: callable :arg function: Function to publish. :type name: str :arg name: Name by which function is published. This method returns the given `function` as-is, so that you can use it as a decorator. """ if name is None: name = function.__name__ self.funcs[name] = function return function
Register function to be called from EPC client. :type function: callable :arg function: Function to publish. :type name: str :arg name: Name by which function is published. This method returns the given `function` as-is, so that you can use it as a decorator.
entailment
def get_method(self, name): """ Get registered method callend `name`. """ try: return self.funcs[name] except KeyError: try: return self.instance._get_method(name) except AttributeError: return SimpleXMLRPCServer.resolve_dotted_attribute( self.instance, name, self.allow_dotted_names)
Get registered method callend `name`.
entailment
def set_debugger(self, debugger): """ Set debugger to run when an error occurs in published method. You can also set debugger by passing `debugger` argument to the class constructor. :type debugger: {'pdb', 'ipdb', None} :arg debugger: type of debugger. """ if debugger == 'pdb': import pdb self.debugger = pdb elif debugger == 'ipdb': import ipdb self.debugger = ipdb else: self.debugger = debugger
Set debugger to run when an error occurs in published method. You can also set debugger by passing `debugger` argument to the class constructor. :type debugger: {'pdb', 'ipdb', None} :arg debugger: type of debugger.
entailment
def connect(self, socket_or_address): """ Connect to server and start serving registered functions. :type socket_or_address: tuple or socket object :arg socket_or_address: A ``(host, port)`` pair to be passed to `socket.create_connection`, or a socket object. """ if isinstance(socket_or_address, tuple): import socket self.socket = socket.create_connection(socket_or_address) else: self.socket = socket_or_address # This is what BaseServer.finish_request does: address = None # it is not used, so leave it empty self.handler = EPCClientHandler(self.socket, address, self) self.call = self.handler.call self.call_sync = self.handler.call_sync self.methods = self.handler.methods self.methods_sync = self.handler.methods_sync self.handler_thread = newthread(self, target=self.handler.start) self.handler_thread.daemon = self.thread_daemon self.handler_thread.start() self.handler.wait_until_ready()
Connect to server and start serving registered functions. :type socket_or_address: tuple or socket object :arg socket_or_address: A ``(host, port)`` pair to be passed to `socket.create_connection`, or a socket object.
entailment
def main(args=None): """ Quick CLI to serve Python functions in a module. Example usage:: python -m epc.server --allow-dotted-names os Note that only the functions which gets and returns simple built-in types (str, int, float, list, tuple, dict) works. """ import argparse from textwrap import dedent parser = argparse.ArgumentParser( formatter_class=type('EPCHelpFormatter', (argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter), {}), description=dedent(main.__doc__)) parser.add_argument( 'module', help='Serve python functions in this module.') parser.add_argument( '--address', default='localhost', help='server address') parser.add_argument( '--port', default=0, type=int, help='server port. 0 means to pick up random port.') parser.add_argument( '--allow-dotted-names', default=False, action='store_true') parser.add_argument( '--pdb', dest='debugger', const='pdb', action='store_const', help='start pdb when error occurs.') parser.add_argument( '--ipdb', dest='debugger', const='ipdb', action='store_const', help='start ipdb when error occurs.') parser.add_argument( '--log-traceback', action='store_true', default=False) ns = parser.parse_args(args) server = EPCServer((ns.address, ns.port), debugger=ns.debugger, log_traceback=ns.log_traceback) server.register_instance( __import__(ns.module), allow_dotted_names=ns.allow_dotted_names) server.print_port() server.serve_forever()
Quick CLI to serve Python functions in a module. Example usage:: python -m epc.server --allow-dotted-names os Note that only the functions which gets and returns simple built-in types (str, int, float, list, tuple, dict) works.
entailment
def print_port(self, stream=sys.stdout): """ Print port this EPC server runs on. As Emacs client reads port number from STDOUT, you need to call this just before calling :meth:`serve_forever`. :type stream: text stream :arg stream: A stream object to write port on. Default is :data:`sys.stdout`. """ stream.write(str(self.server_address[1])) stream.write("\n") stream.flush()
Print port this EPC server runs on. As Emacs client reads port number from STDOUT, you need to call this just before calling :meth:`serve_forever`. :type stream: text stream :arg stream: A stream object to write port on. Default is :data:`sys.stdout`.
entailment
def call(self, name, *args, **kwds): """ Call method connected to this handler. :type name: str :arg name: Method name to call. :type args: list :arg args: Arguments for remote method to call. :type callback: callable :arg callback: A function to be called with returned value of the remote method. :type errback: callable :arg errback: A function to be called with an error occurred in the remote method. It is either an instance of :class:`ReturnError` or :class:`EPCError`. """ self.callmanager.call(self, name, *args, **kwds)
Call method connected to this handler. :type name: str :arg name: Method name to call. :type args: list :arg args: Arguments for remote method to call. :type callback: callable :arg callback: A function to be called with returned value of the remote method. :type errback: callable :arg errback: A function to be called with an error occurred in the remote method. It is either an instance of :class:`ReturnError` or :class:`EPCError`.
entailment
def methods(self, *args, **kwds): """ Request info of callable remote methods. Arguments for :meth:`call` except for `name` can be applied to this function too. """ self.callmanager.methods(self, *args, **kwds)
Request info of callable remote methods. Arguments for :meth:`call` except for `name` can be applied to this function too.
entailment
def call_sync(self, name, args, timeout=None): """ Blocking version of :meth:`call`. :type name: str :arg name: Remote function name to call. :type args: list :arg args: Arguments passed to the remote function. :type timeout: int or None :arg timeout: Timeout in second. None means no timeout. If the called remote function raise an exception, this method raise an exception. If you give `timeout`, this method may raise an `Empty` exception. """ return self._blocking_request(self.call, timeout, name, args)
Blocking version of :meth:`call`. :type name: str :arg name: Remote function name to call. :type args: list :arg args: Arguments passed to the remote function. :type timeout: int or None :arg timeout: Timeout in second. None means no timeout. If the called remote function raise an exception, this method raise an exception. If you give `timeout`, this method may raise an `Empty` exception.
entailment
def func_call_as_str(name, *args, **kwds): """ Return arguments and keyword arguments as formatted string >>> func_call_as_str('f', 1, 2, a=1) 'f(1, 2, a=1)' """ return '{0}({1})'.format( name, ', '.join(itertools.chain( map('{0!r}'.format, args), map('{0[0]!s}={0[1]!r}'.format, sorted(kwds.items())))))
Return arguments and keyword arguments as formatted string >>> func_call_as_str('f', 1, 2, a=1) 'f(1, 2, a=1)'
entailment
def newthread(template="EPCThread-{0}", **kwds): """ Instantiate :class:`threading.Thread` with an appropriate name. """ if not isinstance(template, str): template = '{0}.{1}-{{0}}'.format(template.__module__, template.__class__.__name__) return threading.Thread( name=newname(template), **kwds)
Instantiate :class:`threading.Thread` with an appropriate name.
entailment
def callwith(context_manager): """ A decorator to wrap execution of function with a context manager. """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kwds): with context_manager: return func(*args, **kwds) return wrapper return decorator
A decorator to wrap execution of function with a context manager.
entailment
def _scene_centroid(self): """ Compute image center coordinates :return: Tuple of image center in lat, lon """ ul_lat = self.corner_ul_lat_product ll_lat = self.corner_ll_lat_product ul_lon = self.corner_ul_lon_product ur_lon = self.corner_ur_lon_product lat = (ul_lat + ll_lat) / 2. lon = (ul_lon + ur_lon) / 2. return lat, lon
Compute image center coordinates :return: Tuple of image center in lat, lon
entailment
def earth_sun_d(dtime): """ Earth-sun distance in AU :param dtime time, e.g. datetime.datetime(2007, 5, 1) :type datetime object :return float(distance from sun to earth in astronomical units) """ doy = int(dtime.strftime('%j')) rad_term = 0.9856 * (doy - 4) * pi / 180 distance_au = 1 - 0.01672 * cos(rad_term) return distance_au
Earth-sun distance in AU :param dtime time, e.g. datetime.datetime(2007, 5, 1) :type datetime object :return float(distance from sun to earth in astronomical units)
entailment
def reflectance(self, band): """ :param band: An optical band, i.e. 1-5, 7 :return: At satellite reflectance, [-] """ if band == 6: raise ValueError('LT5 reflectance must be other than band 6') rad = self.radiance(band) esun = self.ex_atm_irrad[band - 1] toa_reflect = (pi * rad * self.earth_sun_dist ** 2) / (esun * cos(self.solar_zenith_rad)) return toa_reflect
:param band: An optical band, i.e. 1-5, 7 :return: At satellite reflectance, [-]
entailment
def albedo(self, model='smith'): """Finds broad-band surface reflectance (albedo) Smith (2010), “The heat budget of the earth’s surface deduced from space” LT5 toa reflectance bands 1, 3, 4, 5, 7 # normalized i.e. 0.356 + 0.130 + 0.373 + 0.085 + 0.07 = 1.014 Should have option for Liang, 2000; Tasumi (2008), "At-Surface Reflectance and Albedo from Satellite for Operational Calculation of Land Surface Energy Balance" :return albedo array of floats """ if model == 'smith': blue, red, nir, swir1, swir2 = (self.reflectance(1), self.reflectance(3), self.reflectance(4), self.reflectance(5), self.reflectance(7)) alb = (0.356 * blue + 0.130 * red + 0.373 * nir + 0.085 * swir1 + 0.072 * swir2 - 0.0018) / 1.014 elif model == 'tasumi': pass # add tasumi algorithm TODO return alb
Finds broad-band surface reflectance (albedo) Smith (2010), “The heat budget of the earth’s surface deduced from space” LT5 toa reflectance bands 1, 3, 4, 5, 7 # normalized i.e. 0.356 + 0.130 + 0.373 + 0.085 + 0.07 = 1.014 Should have option for Liang, 2000; Tasumi (2008), "At-Surface Reflectance and Albedo from Satellite for Operational Calculation of Land Surface Energy Balance" :return albedo array of floats
entailment
def saturation_mask(self, band, value=255): """ Mask saturated pixels, 1 (True) is saturated. :param band: Image band with dn values, type: array :param value: Maximum (saturated) value, i.e. 255 for 8-bit data, type: int :return: boolean array """ dn = self._get_band('b{}'.format(band)) mask = self.mask() mask = where((dn == value) & (mask > 0), True, False) return mask
Mask saturated pixels, 1 (True) is saturated. :param band: Image band with dn values, type: array :param value: Maximum (saturated) value, i.e. 255 for 8-bit data, type: int :return: boolean array
entailment
def ndvi(self): """ Normalized difference vegetation index. :return: NDVI """ red, nir = self.reflectance(3), self.reflectance(4) ndvi = self._divide_zero((nir - red), (nir + red), nan) return ndvi
Normalized difference vegetation index. :return: NDVI
entailment
def lai(self): """ Leaf area index (LAI), or the surface area of leaves to surface area ground. Trezza and Allen, 2014 :param ndvi: normalized difference vegetation index [-] :return: LAI [-] """ ndvi = self.ndvi() lai = 7.0 * (ndvi ** 3) lai = where(lai > 6., 6., lai) return lai
Leaf area index (LAI), or the surface area of leaves to surface area ground. Trezza and Allen, 2014 :param ndvi: normalized difference vegetation index [-] :return: LAI [-]
entailment
def land_surface_temp(self): """ Mean values from Allen (2007) :return: """ rp = 0.91 tau = 0.866 rsky = 1.32 epsilon = self.emissivity(approach='tasumi') radiance = self.radiance(6) rc = ((radiance - rp) / tau) - ((1 - epsilon) * rsky) lst = self.k2 / (log((epsilon * self.k1 / rc) + 1)) return lst
Mean values from Allen (2007) :return:
entailment
def brightness_temp(self, band, temp_scale='K'): """Calculate brightness temperature of Landsat 8 as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php T = K2 / log((K1 / L) + 1) and L = ML * Q + AL where: T = At-satellite brightness temperature (degrees kelvin) L = TOA spectral radiance (Watts / (m2 * srad * mm)) ML = Band-specific multiplicative rescaling factor from the metadata (RADIANCE_MULT_BAND_x, where x is the band number) AL = Band-specific additive rescaling factor from the metadata (RADIANCE_ADD_BAND_x, where x is the band number) Q = Quantized and calibrated standard product pixel values (DN) (ndarray img) K1 = Band-specific thermal conversion constant from the metadata (K1_CONSTANT_BAND_x, where x is the thermal band number) K2 = Band-specific thermal conversion constant from the metadata (K1_CONSTANT_BAND_x, where x is the thermal band number) Returns -------- ndarray: float32 ndarray with shape == input shape """ if band in self.oli_bands: raise ValueError('Landsat 8 brightness should be TIRS band (i.e. 10 or 11)') k1 = getattr(self, 'k1_constant_band_{}'.format(band)) k2 = getattr(self, 'k2_constant_band_{}'.format(band)) rad = self.radiance(band) brightness = k2 / log((k1 / rad) + 1) if temp_scale == 'K': return brightness elif temp_scale == 'F': return brightness * (9 / 5.0) - 459.67 elif temp_scale == 'C': return brightness - 273.15 else: raise ValueError('{} is not a valid temperature scale'.format(temp_scale))
Calculate brightness temperature of Landsat 8 as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php T = K2 / log((K1 / L) + 1) and L = ML * Q + AL where: T = At-satellite brightness temperature (degrees kelvin) L = TOA spectral radiance (Watts / (m2 * srad * mm)) ML = Band-specific multiplicative rescaling factor from the metadata (RADIANCE_MULT_BAND_x, where x is the band number) AL = Band-specific additive rescaling factor from the metadata (RADIANCE_ADD_BAND_x, where x is the band number) Q = Quantized and calibrated standard product pixel values (DN) (ndarray img) K1 = Band-specific thermal conversion constant from the metadata (K1_CONSTANT_BAND_x, where x is the thermal band number) K2 = Band-specific thermal conversion constant from the metadata (K1_CONSTANT_BAND_x, where x is the thermal band number) Returns -------- ndarray: float32 ndarray with shape == input shape
entailment
def reflectance(self, band): """Calculate top of atmosphere reflectance of Landsat 8 as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php R_raw = MR * Q + AR R = R_raw / cos(Z) = R_raw / sin(E) Z = 90 - E (in degrees) where: R_raw = TOA planetary reflectance, without correction for solar angle. R = TOA reflectance with a correction for the sun angle. MR = Band-specific multiplicative rescaling factor from the metadata (REFLECTANCE_MULT_BAND_x, where x is the band number) AR = Band-specific additive rescaling factor from the metadata (REFLECTANCE_ADD_BAND_x, where x is the band number) Q = Quantized and calibrated standard product pixel values (DN) E = Local sun elevation angle. The scene center sun elevation angle in degrees is provided in the metadata (SUN_ELEVATION). Z = Local solar zenith angle (same angle as E, but measured from the zenith instead of from the horizon). Returns -------- ndarray: float32 ndarray with shape == input shape """ if band not in self.oli_bands: raise ValueError('Landsat 8 reflectance should OLI band (i.e. bands 1-8)') elev = getattr(self, 'sun_elevation') dn = self._get_band('b{}'.format(band)) mr = getattr(self, 'reflectance_mult_band_{}'.format(band)) ar = getattr(self, 'reflectance_add_band_{}'.format(band)) if elev < 0.0: raise ValueError("Sun elevation must be non-negative " "(sun must be above horizon for entire scene)") rf = ((mr * dn.astype(float32)) + ar) / sin(deg2rad(elev)) return rf
Calculate top of atmosphere reflectance of Landsat 8 as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php R_raw = MR * Q + AR R = R_raw / cos(Z) = R_raw / sin(E) Z = 90 - E (in degrees) where: R_raw = TOA planetary reflectance, without correction for solar angle. R = TOA reflectance with a correction for the sun angle. MR = Band-specific multiplicative rescaling factor from the metadata (REFLECTANCE_MULT_BAND_x, where x is the band number) AR = Band-specific additive rescaling factor from the metadata (REFLECTANCE_ADD_BAND_x, where x is the band number) Q = Quantized and calibrated standard product pixel values (DN) E = Local sun elevation angle. The scene center sun elevation angle in degrees is provided in the metadata (SUN_ELEVATION). Z = Local solar zenith angle (same angle as E, but measured from the zenith instead of from the horizon). Returns -------- ndarray: float32 ndarray with shape == input shape
entailment
def radiance(self, band): """Calculate top of atmosphere radiance of Landsat 8 as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php L = ML * Q + AL where: L = TOA spectral radiance (Watts / (m2 * srad * mm)) ML = Band-specific multiplicative rescaling factor from the metadata (RADIANCE_MULT_BAND_x, where x is the band number) AL = Band-specific additive rescaling factor from the metadata (RADIANCE_ADD_BAND_x, where x is the band number) Q = Quantized and calibrated standard product pixel values (DN) (ndarray img) Returns -------- ndarray: float32 ndarray with shape == input shape """ ml = getattr(self, 'radiance_mult_band_{}'.format(band)) al = getattr(self, 'radiance_add_band_{}'.format(band)) dn = self._get_band('b{}'.format(band)) rad = ml * dn.astype(float32) + al return rad
Calculate top of atmosphere radiance of Landsat 8 as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php L = ML * Q + AL where: L = TOA spectral radiance (Watts / (m2 * srad * mm)) ML = Band-specific multiplicative rescaling factor from the metadata (RADIANCE_MULT_BAND_x, where x is the band number) AL = Band-specific additive rescaling factor from the metadata (RADIANCE_ADD_BAND_x, where x is the band number) Q = Quantized and calibrated standard product pixel values (DN) (ndarray img) Returns -------- ndarray: float32 ndarray with shape == input shape
entailment
def ndsi(self): """ Normalized difference snow index. :return: NDSI """ green, swir1 = self.reflectance(3), self.reflectance(6) ndsi = self._divide_zero((green - swir1), (green + swir1), nan) return ndsi
Normalized difference snow index. :return: NDSI
entailment
def whiteness_index(self): """Index of "Whiteness" based on visible bands. Parameters ---------- Output ------ ndarray: whiteness index """ mean_vis = (self.blue + self.green + self.red) / 3 blue_absdiff = np.absolute(self._divide_zero(self.blue - mean_vis, mean_vis)) green_absdiff = np.absolute(self._divide_zero(self.green - mean_vis, mean_vis)) red_absdiff = np.absolute(self._divide_zero(self.red - mean_vis, mean_vis)) return blue_absdiff + green_absdiff + red_absdiff
Index of "Whiteness" based on visible bands. Parameters ---------- Output ------ ndarray: whiteness index
entailment
def potential_cloud_pixels(self): """Determine potential cloud pixels (PCPs) Combine basic spectral testsr to get a premliminary cloud mask First pass, section 3.1.1 in Zhu and Woodcock 2012 Equation 6 (Zhu and Woodcock, 2012) Parameters ---------- ndvi: ndarray ndsi: ndarray blue: ndarray green: ndarray red: ndarray nir: ndarray swir1: ndarray swir2: ndarray cirrus: ndarray tirs1: ndarray Output ------ ndarray: potential cloud mask, boolean """ eq1 = self.basic_test() eq2 = self.whiteness_test() eq3 = self.hot_test() eq4 = self.nirswir_test() if self.sat == 'LC8': cir = self.cirrus_test() return (eq1 & eq2 & eq3 & eq4) | cir else: return eq1 & eq2 & eq3 & eq4
Determine potential cloud pixels (PCPs) Combine basic spectral testsr to get a premliminary cloud mask First pass, section 3.1.1 in Zhu and Woodcock 2012 Equation 6 (Zhu and Woodcock, 2012) Parameters ---------- ndvi: ndarray ndsi: ndarray blue: ndarray green: ndarray red: ndarray nir: ndarray swir1: ndarray swir2: ndarray cirrus: ndarray tirs1: ndarray Output ------ ndarray: potential cloud mask, boolean
entailment
def temp_water(self): """Use water to mask tirs and find 82.5 pctile Equation 7 and 8 (Zhu and Woodcock, 2012) Parameters ---------- is_water: ndarray, boolean water mask, water is True, land is False swir2: ndarray tirs1: ndarray Output ------ float: 82.5th percentile temperature over water """ # eq7 th_swir2 = 0.03 water = self.water_test() clear_sky_water = water & (self.swir2 < th_swir2) # eq8 clear_water_temp = self.tirs1.copy() clear_water_temp[~clear_sky_water] = np.nan clear_water_temp[~self.mask] = np.nan pctl_clwt = np.nanpercentile(clear_water_temp, 82.5) return pctl_clwt
Use water to mask tirs and find 82.5 pctile Equation 7 and 8 (Zhu and Woodcock, 2012) Parameters ---------- is_water: ndarray, boolean water mask, water is True, land is False swir2: ndarray tirs1: ndarray Output ------ float: 82.5th percentile temperature over water
entailment
def water_temp_prob(self): """Temperature probability for water Equation 9 (Zhu and Woodcock, 2012) Parameters ---------- water_temp: float 82.5th percentile temperature over water swir2: ndarray tirs1: ndarray Output ------ ndarray: probability of cloud over water based on temperature """ temp_const = 4.0 # degrees C water_temp = self.temp_water() return (water_temp - self.tirs1) / temp_const
Temperature probability for water Equation 9 (Zhu and Woodcock, 2012) Parameters ---------- water_temp: float 82.5th percentile temperature over water swir2: ndarray tirs1: ndarray Output ------ ndarray: probability of cloud over water based on temperature
entailment
def brightness_prob(self, clip=True): """The brightest water may have Band 5 reflectance as high as LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF.11 Equation 10 (Zhu and Woodcock, 2012) Parameters ---------- nir: ndarray clip: boolean Output ------ ndarray: brightness probability, constrained LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF..1 """ thresh = 0.11 bp = np.minimum(thresh, self.nir) / thresh if clip: bp[bp > 1] = 1 bp[bp < 0] = 0 return bp
The brightest water may have Band 5 reflectance as high as LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF.11 Equation 10 (Zhu and Woodcock, 2012) Parameters ---------- nir: ndarray clip: boolean Output ------ ndarray: brightness probability, constrained LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF..1
entailment
def temp_land(self, pcps, water): """Derive high/low percentiles of land temperature Equations 12 an 13 (Zhu and Woodcock, 2012) Parameters ---------- pcps: ndarray potential cloud pixels, boolean water: ndarray water mask, boolean tirs1: ndarray Output ------ tuple: 17.5 and 82.5 percentile temperature over clearsky land """ # eq 12 clearsky_land = ~(pcps | water) # use clearsky_land to mask tirs1 clear_land_temp = self.tirs1.copy() clear_land_temp[~clearsky_land] = np.nan clear_land_temp[~self.mask] = np.nan # take 17.5 and 82.5 percentile, eq 13 low, high = np.nanpercentile(clear_land_temp, (17.5, 82.5)) return low, high
Derive high/low percentiles of land temperature Equations 12 an 13 (Zhu and Woodcock, 2012) Parameters ---------- pcps: ndarray potential cloud pixels, boolean water: ndarray water mask, boolean tirs1: ndarray Output ------ tuple: 17.5 and 82.5 percentile temperature over clearsky land
entailment
def land_temp_prob(self, tlow, thigh): """Temperature-based probability of cloud over land Equation 14 (Zhu and Woodcock, 2012) Parameters ---------- tirs1: ndarray tlow: float Low (17.5 percentile) temperature of land thigh: float High (82.5 percentile) temperature of land Output ------ ndarray : probability of cloud over land based on temperature """ temp_diff = 4 # degrees return (thigh + temp_diff - self.tirs1) / (thigh + 4 - (tlow - 4))
Temperature-based probability of cloud over land Equation 14 (Zhu and Woodcock, 2012) Parameters ---------- tirs1: ndarray tlow: float Low (17.5 percentile) temperature of land thigh: float High (82.5 percentile) temperature of land Output ------ ndarray : probability of cloud over land based on temperature
entailment
def variability_prob(self, whiteness): """Use the probability of the spectral variability to identify clouds over land. Equation 15 (Zhu and Woodcock, 2012) Parameters ---------- ndvi: ndarray ndsi: ndarray whiteness: ndarray Output ------ ndarray : probability of cloud over land based on variability """ if self.sat in ['LT5', 'LE7']: # check for green and red saturation # if red is saturated and less than nir, ndvi = LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF mod_ndvi = np.where(self.red_saturated & (self.nir > self.red), 0, self.ndvi) # if green is saturated and less than swir1, ndsi = LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF mod_ndsi = np.where(self.green_saturated & (self.swir1 > self.green), 0, self.ndsi) ndi_max = np.fmax(np.absolute(mod_ndvi), np.absolute(mod_ndsi)) else: ndi_max = np.fmax(np.absolute(self.ndvi), np.absolute(self.ndsi)) f_max = 1.0 - np.fmax(ndi_max, whiteness) return f_max
Use the probability of the spectral variability to identify clouds over land. Equation 15 (Zhu and Woodcock, 2012) Parameters ---------- ndvi: ndarray ndsi: ndarray whiteness: ndarray Output ------ ndarray : probability of cloud over land based on variability
entailment
def land_threshold(self, land_cloud_prob, pcps, water): """Dynamic threshold for determining cloud cutoff Equation 17 (Zhu and Woodcock, 2012) Parameters ---------- land_cloud_prob: ndarray probability of cloud over land pcps: ndarray potential cloud pixels water: ndarray water mask Output ------ float: land cloud threshold """ # eq 12 clearsky_land = ~(pcps | water) # 82.5th percentile of lCloud_Prob(masked by clearsky_land) + LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF.2 cloud_prob = land_cloud_prob.copy() cloud_prob[~clearsky_land] = np.nan cloud_prob[~self.mask] = np.nan # eq 17 th_const = 0.2 return np.nanpercentile(cloud_prob, 82.5) + th_const
Dynamic threshold for determining cloud cutoff Equation 17 (Zhu and Woodcock, 2012) Parameters ---------- land_cloud_prob: ndarray probability of cloud over land pcps: ndarray potential cloud pixels water: ndarray water mask Output ------ float: land cloud threshold
entailment
def potential_cloud_layer(self, pcp, water, tlow, land_cloud_prob, land_threshold, water_cloud_prob, water_threshold=0.5): """Final step of determining potential cloud layer Equation 18 (Zhu and Woodcock, 2012) Saturation (green or red) test is not in the algorithm Parameters ---------- pcps: ndarray potential cloud pixels water: ndarray water mask tirs1: ndarray tlow: float low percentile of land temperature land_cloud_prob: ndarray probability of cloud over land land_threshold: float cutoff for cloud over land water_cloud_prob: ndarray probability of cloud over water water_threshold: float cutoff for cloud over water Output ------ ndarray: potential cloud layer, boolean """ # Using pcp and water as mask todo # change water threshold to dynamic, line 132 in Zhu, 2015 todo part1 = (pcp & water & (water_cloud_prob > water_threshold)) part2 = (pcp & ~water & (land_cloud_prob > land_threshold)) temptest = self.tirs1 < (tlow - 35) # 35degrees C colder if self.sat in ['LT5', 'LE7']: saturation = self.blue_saturated | self.green_saturated | self.red_saturated return part1 | part2 | temptest | saturation else: return part1 | part2 | temptest
Final step of determining potential cloud layer Equation 18 (Zhu and Woodcock, 2012) Saturation (green or red) test is not in the algorithm Parameters ---------- pcps: ndarray potential cloud pixels water: ndarray water mask tirs1: ndarray tlow: float low percentile of land temperature land_cloud_prob: ndarray probability of cloud over land land_threshold: float cutoff for cloud over land water_cloud_prob: ndarray probability of cloud over water water_threshold: float cutoff for cloud over water Output ------ ndarray: potential cloud layer, boolean
entailment
def potential_snow_layer(self): """Spectral test to determine potential snow Uses the 9.85C (283K) threshold defined in Zhu, Woodcock 2015 Parameters ---------- ndsi: ndarray green: ndarray nir: ndarray tirs1: ndarray Output ------ ndarray: boolean, True is potential snow """ return (self.ndsi > 0.15) & (self.tirs1 < 9.85) & (self.nir > 0.11) & (self.green > 0.1)
Spectral test to determine potential snow Uses the 9.85C (283K) threshold defined in Zhu, Woodcock 2015 Parameters ---------- ndsi: ndarray green: ndarray nir: ndarray tirs1: ndarray Output ------ ndarray: boolean, True is potential snow
entailment
def cloud_mask(self, min_filter=(3, 3), max_filter=(10, 10), combined=False, cloud_and_shadow=False): """Calculate the potential cloud layer from source data *This is the high level function which ties together all the equations for generating potential clouds* Parameters ---------- blue: ndarray green: ndarray red: ndarray nir: ndarray swir1: ndarray swir2: ndarray cirrus: ndarray tirs1: ndarray min_filter: 2-element tuple, default=(3,3) Defines the window for the minimum_filter, for removing outliers max_filter: 2-element tuple, default=(21, 21) Defines the window for the maximum_filter, for "buffering" the edges combined: make a boolean array masking all (cloud, shadow, water) Output ------ ndarray, boolean: potential cloud layer; True = cloud ndarray, boolean potential cloud shadow layer; True = cloud shadow :param cloud_and_shadow: """ # logger.info("Running initial testsr") whiteness = self.whiteness_index() water = self.water_test() # First pass, potential clouds pcps = self.potential_cloud_pixels() if self.sat == 'LC8': cirrus_prob = self.cirrus / 0.04 else: cirrus_prob = 0.0 # Clouds over water wtp = self.water_temp_prob() bp = self.brightness_prob() water_cloud_prob = (wtp * bp) + cirrus_prob wthreshold = 0.5 # Clouds over land tlow, thigh = self.temp_land(pcps, water) ltp = self.land_temp_prob(tlow, thigh) vp = self.variability_prob(whiteness) land_cloud_prob = (ltp * vp) + cirrus_prob lthreshold = self.land_threshold(land_cloud_prob, pcps, water) # logger.info("Calculate potential clouds") pcloud = self.potential_cloud_layer( pcps, water, tlow, land_cloud_prob, lthreshold, water_cloud_prob, wthreshold) # Ignoring snow for now as it exhibits many false positives and negatives # when used as a binary mask # psnow = potential_snow_layer(ndsi, green, nir, tirs1) # pcloud = pcloud & ~psnow # logger.info("Calculate potential cloud shadows") pshadow = self.potential_cloud_shadow_layer(water) # The remainder of the algorithm differs significantly from Fmask # In an attempt to make a more visually appealling cloud mask # with fewer inclusions and more broad shapes if min_filter: # Remove outliers # logger.info("Remove outliers with minimum filter") from scipy.ndimage.filters import minimum_filter from scipy.ndimage.morphology import distance_transform_edt # remove cloud outliers by nibbling the edges pcloud = minimum_filter(pcloud, size=min_filter) # crude, just look x pixels away for potential cloud pixels dist = distance_transform_edt(~pcloud) pixel_radius = 100.0 pshadow = (dist < pixel_radius) & pshadow # remove cloud shadow outliers pshadow = minimum_filter(pshadow, size=min_filter) if max_filter: # grow around the edges # logger.info("Buffer edges with maximum filter") from scipy.ndimage.filters import maximum_filter pcloud = maximum_filter(pcloud, size=max_filter) pshadow = maximum_filter(pshadow, size=max_filter) # mystery, save pcloud here, shows no nan in qgis, save later, shows nan # outfile = '/data01/images/sandbox/pcloud.tif' # georeference = self.sat_image.rasterio_geometry # array = pcloud # array = array.reshape(1, array.shape[LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF], array.shape[1]) # array = np.array(array, dtype=georeference['dtype']) # with rasterio.open(outfile, 'w', **georeference) as dst: # dst.write(array) # mystery test if combined: return pcloud | pshadow | water if cloud_and_shadow: return pcloud | pshadow return pcloud, pshadow, water
Calculate the potential cloud layer from source data *This is the high level function which ties together all the equations for generating potential clouds* Parameters ---------- blue: ndarray green: ndarray red: ndarray nir: ndarray swir1: ndarray swir2: ndarray cirrus: ndarray tirs1: ndarray min_filter: 2-element tuple, default=(3,3) Defines the window for the minimum_filter, for removing outliers max_filter: 2-element tuple, default=(21, 21) Defines the window for the maximum_filter, for "buffering" the edges combined: make a boolean array masking all (cloud, shadow, water) Output ------ ndarray, boolean: potential cloud layer; True = cloud ndarray, boolean potential cloud shadow layer; True = cloud shadow :param cloud_and_shadow:
entailment
def gdal_nodata_mask(pcl, pcsl, tirs_arr): """ Given a boolean potential cloud layer, a potential cloud shadow layer and a thermal band Calculate the GDAL-style uint8 mask """ tirs_mask = np.isnan(tirs_arr) | (tirs_arr == 0) return ((~(pcl | pcsl | tirs_mask)) * 255).astype('uint8')
Given a boolean potential cloud layer, a potential cloud shadow layer and a thermal band Calculate the GDAL-style uint8 mask
entailment
def parsemeta(metadataloc): """Parses the metadata from a Landsat image bundle. Arguments: metadataloc: a filename or a directory. Returns metadata dictionary """ # filename or directory? if several fit, use first one and warn if os.path.isdir(metadataloc): metalist = glob.glob(os.path.join(metadataloc, METAPATTERN)) if not metalist: raise MTLParseError( "No files matching metadata file pattern in directory %s." % metadataloc) elif len(metalist) > 0: metadatafn = metalist[0] filehandle = open(metadatafn, 'r') if len(metalist) > 1: logging.warning( "More than one file in directory match metadata " + "file pattern. Using %s." % metadatafn) elif os.path.isfile(metadataloc): metadatafn = metadataloc filehandle = open(metadatafn, 'r') logging.info("Using file %s." % metadatafn) elif 'L1_METADATA_FILE' in metadataloc: filehandle = StringIO(metadataloc) else: raise MTLParseError( "File location %s is unavailable " % metadataloc + "or doesn't contain a suitable metadata file.") # Reading file line by line and inserting data into metadata dictionary status = 0 metadata = {} grouppath = [] dictpath = [metadata] for line in filehandle: if status == 4: # we reached the end in the previous iteration, # but are still reading lines logging.warning( "Metadata file %s appears to " % metadatafn + "have extra lines after the end of the metadata. " + "This is probably, but not necessarily, harmless.") status = _checkstatus(status, line) grouppath, dictpath = _transstat(status, grouppath, dictpath, line) return metadata
Parses the metadata from a Landsat image bundle. Arguments: metadataloc: a filename or a directory. Returns metadata dictionary
entailment
def _checkstatus(status, line): """Returns state/status after reading the next line. The status codes are:: LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF - BEGIN parsing; 1 - ENTER METADATA GROUP, 2 - READ METADATA LINE, 3 - END METDADATA GROUP, 4 - END PARSING Permitted Transitions:: LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF --> 1, LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF --> 4 1 --> 1, 1 --> 2, 1 --> 3 2 --> 2, 2 --> 3 3 --> 1, 1 --> 3, 3 --> 4 """ newstatus = 0 if status == 0: # begin --> enter metadata group OR end if _islinetype(line, GRPSTART): newstatus = 1 elif _isfinal(line): newstatus = 4 elif status == 1: # enter metadata group --> enter metadata group # OR add metadata item OR leave metadata group if _islinetype(line, GRPSTART): newstatus = 1 elif _islinetype(line, GRPEND): newstatus = 3 elif _isassignment(line): # test AFTER start and end, as both are also assignments newstatus = 2 elif status == 2: if _islinetype(line, GRPEND): newstatus = 3 elif _isassignment(line): # test AFTER start and end, as both are also assignments newstatus = 2 elif status == 3: if _islinetype(line, GRPSTART): newstatus = 1 elif _islinetype(line, GRPEND): newstatus = 3 elif _isfinal(line): newstatus = 4 if newstatus != 0: return newstatus elif status != 4: raise MTLParseError( "Cannot parse the following line after status " + "'%s':\n%s" % (STATUSCODE[status], line))
Returns state/status after reading the next line. The status codes are:: LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF - BEGIN parsing; 1 - ENTER METADATA GROUP, 2 - READ METADATA LINE, 3 - END METDADATA GROUP, 4 - END PARSING Permitted Transitions:: LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF --> 1, LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF --> 4 1 --> 1, 1 --> 2, 1 --> 3 2 --> 2, 2 --> 3 3 --> 1, 1 --> 3, 3 --> 4
entailment
def _transstat(status, grouppath, dictpath, line): """Executes processing steps when reading a line""" if status == 0: raise MTLParseError( "Status should not be '%s' after reading line:\n%s" % (STATUSCODE[status], line)) elif status == 1: currentdict = dictpath[-1] currentgroup = _getgroupname(line) grouppath.append(currentgroup) currentdict[currentgroup] = {} dictpath.append(currentdict[currentgroup]) elif status == 2: currentdict = dictpath[-1] newkey, newval = _getmetadataitem(line) # USGS has started quoting the scene center time. If this # happens strip quotes before post processing. if newkey == 'SCENE_CENTER_TIME' and newval.startswith('"') \ and newval.endswith('"'): # logging.warning('Strip quotes off SCENE_CENTER_TIME.') newval = newval[1:-1] currentdict[newkey] = _postprocess(newval) elif status == 3: oldgroup = _getendgroupname(line) if oldgroup != grouppath[-1]: raise MTLParseError( "Reached line '%s' while reading group '%s'." % (line.strip(), grouppath[-1])) del grouppath[-1] del dictpath[-1] try: currentgroup = grouppath[-1] except IndexError: currentgroup = None elif status == 4: if grouppath: raise MTLParseError( "Reached end before end of group '%s'" % grouppath[-1]) return grouppath, dictpath
Executes processing steps when reading a line
entailment
def _postprocess(valuestr): """ Takes value as str, returns str, int, float, date, datetime, or time """ # USGS has started quoting time sometimes. Grr, strip quotes in this case intpattern = re.compile(r'^\-?\d+$') floatpattern = re.compile(r'^\-?\d+\.\d+(E[+-]?\d\d+)?$') datedtpattern = '%Y-%m-%d' datedttimepattern = '%Y-%m-%dT%H:%M:%SZ' timedtpattern = '%H:%M:%S.%f' timepattern = re.compile(r'^\d{2}:\d{2}:\d{2}(\.\d{6})?') if valuestr.startswith('"') and valuestr.endswith('"'): # it's a string return valuestr[1:-1] elif re.match(intpattern, valuestr): # it's an integer return int(valuestr) elif re.match(floatpattern, valuestr): # floating point number return float(valuestr) # now let's try the datetime objects; throws exception if it doesn't match try: return datetime.datetime.strptime(valuestr, datedtpattern).date() except ValueError: pass try: return datetime.datetime.strptime(valuestr, datedttimepattern) except ValueError: pass # time parsing is complicated: Python's datetime module only accepts # fractions of a second only up to 6 digits mat = re.match(timepattern, valuestr) if mat: test = mat.group(0) try: return datetime.datetime.strptime(test, timedtpattern).time() except ValueError: pass # If we get here, we still haven't returned anything. logging.info( "The value %s couldn't be parsed as " % valuestr + "int, float, date, time, datetime. Returning it as string.") return valuestr
Takes value as str, returns str, int, float, date, datetime, or time
entailment
def warp_vrt(directory, delete_extra=False, use_band_map=False, overwrite=False, remove_bqa=True, return_profile=False): """ Read in image geometry, resample subsequent images to same grid. The purpose of this function is to snap many Landsat images to one geometry. Use Landsat578 to download and unzip them, then run them through this to get identical geometries for analysis. Files :param use_band_map: :param delete_extra: :param directory: A directory containing sub-directories of Landsat images. :return: None """ if 'resample_meta.txt' in os.listdir(directory) and not overwrite: print('{} has already had component images warped'.format(directory)) return None mapping = {'LC8': Landsat8, 'LE7': Landsat7, 'LT5': Landsat5} vrt_options = {} list_dir = [x[0] for x in os.walk(directory) if os.path.basename(x[0])[:3] in mapping.keys()] extras = [os.path.join(directory, x) for x in os.listdir(directory) if x.endswith('.tif')] first = True for d in list_dir: sat = LandsatImage(d).satellite paths = extras root = os.path.join(directory, d) if os.path.isdir(root): for x in os.listdir(root): if remove_bqa and x.endswith('BQA.TIF'): try: os.remove(x) except FileNotFoundError: pass elif use_band_map: bands = BandMap().selected for y in bands[sat]: if x.endswith('B{}.TIF'.format(y)): paths.append(os.path.join(directory, d, x)) else: if x.endswith('.TIF') or x.endswith('.tif'): paths.append(os.path.join(directory, d, x)) if x.endswith('MTL.txt'): mtl = os.path.join(directory, d, x) if first: landsat = mapping[sat](os.path.join(directory, d)) dst = landsat.rasterio_geometry vrt_options = {'resampling': Resampling.nearest, 'dst_crs': dst['crs'], 'dst_transform': dst['transform'], 'dst_height': dst['height'], 'dst_width': dst['width']} message = """ This directory has been resampled to same grid. Master grid is {}. {} """.format(d, datetime.now()) with open(os.path.join(directory, 'resample_meta.txt'), 'w') as f: f.write(message) first = False for tif_path in paths: print('warping {}'.format(os.path.basename(tif_path))) with rasopen(tif_path, 'r') as src: with WarpedVRT(src, **vrt_options) as vrt: data = vrt.read() dst_dir, name = os.path.split(tif_path) outfile = os.path.join(dst_dir, name) meta = vrt.meta.copy() meta['driver'] = 'GTiff' with rasopen(outfile, 'w', **meta) as dst: dst.write(data) if delete_extra: for x in os.listdir(os.path.join(directory, d)): x_file = os.path.join(directory, d, x) if x_file not in paths: if x[-7:] not in ['ask.tif', 'MTL.txt']: print('removing {}'.format(x_file)) os.remove(x_file) if return_profile: return dst
Read in image geometry, resample subsequent images to same grid. The purpose of this function is to snap many Landsat images to one geometry. Use Landsat578 to download and unzip them, then run them through this to get identical geometries for analysis. Files :param use_band_map: :param delete_extra: :param directory: A directory containing sub-directories of Landsat images. :return: None
entailment
def tox_get_python_executable(envconfig): """Return a python executable for the given python base name. The first plugin/hook which returns an executable path will determine it. ``envconfig`` is the testenv configuration which contains per-testenv configuration, notably the ``.envname`` and ``.basepython`` setting. """ try: # pylint: disable=no-member pyenv = (getattr(py.path.local.sysfind('pyenv'), 'strpath', 'pyenv') or 'pyenv') cmd = [pyenv, 'which', envconfig.basepython] pipe = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True ) out, err = pipe.communicate() except OSError: err = '\'pyenv\': command not found' LOG.warning( "pyenv doesn't seem to be installed, you probably " "don't want this plugin installed either." ) else: if pipe.poll() == 0: return out.strip() else: if not envconfig.tox_pyenv_fallback: raise PyenvWhichFailed(err) LOG.debug("`%s` failed thru tox-pyenv plugin, falling back. " "STDERR: \"%s\" | To disable this behavior, set " "tox_pyenv_fallback=False in your tox.ini or use " " --tox-pyenv-no-fallback on the command line.", ' '.join([str(x) for x in cmd]), err)
Return a python executable for the given python base name. The first plugin/hook which returns an executable path will determine it. ``envconfig`` is the testenv configuration which contains per-testenv configuration, notably the ``.envname`` and ``.basepython`` setting.
entailment
def _setup_no_fallback(parser): """Add the option, --tox-pyenv-no-fallback. If this option is set, do not allow fallback to tox's built-in strategy for looking up python executables if the call to `pyenv which` by this plugin fails. This will allow the error to raise instead of falling back to tox's default behavior. """ cli_dest = 'tox_pyenv_fallback' halp = ('If `pyenv which {basepython}` exits non-zero when looking ' 'up the python executable, do not allow fallback to tox\'s ' 'built-in default logic.') # Add a command-line option. tox_pyenv_group = parser.argparser.add_argument_group( title='{0} plugin options'.format(__title__), ) tox_pyenv_group.add_argument( '--tox-pyenv-no-fallback', '-F', dest=cli_dest, default=True, action='store_false', help=halp ) def _pyenv_fallback(testenv_config, value): cli_says = getattr(testenv_config.config.option, cli_dest) return cli_says or value # Add an equivalent tox.ini [testenv] section option. parser.add_testenv_attribute( name=cli_dest, type="bool", postprocess=_pyenv_fallback, default=False, help=('If `pyenv which {basepython}` exits non-zero when looking ' 'up the python executable, allow fallback to tox\'s ' 'built-in default logic.'), )
Add the option, --tox-pyenv-no-fallback. If this option is set, do not allow fallback to tox's built-in strategy for looking up python executables if the call to `pyenv which` by this plugin fails. This will allow the error to raise instead of falling back to tox's default behavior.
entailment
def sendEmail(self, emails, massType='SingleEmailMessage'): """ Send one or more emails from Salesforce. Parameters: emails - a dictionary or list of dictionaries, each representing a single email as described by https://www.salesforce.com/us /developer/docs/api/Content/sforce_api_calls_sendemail.htm massType - 'SingleEmailMessage' or 'MassEmailMessage'. MassEmailMessage is used for mailmerge of up to 250 recepients in a single pass. Note: Newly created Salesforce Sandboxes default to System email only. In this situation, sendEmail() will fail with NO_MASS_MAIL_PERMISSION. """ return SendEmailRequest( self.__serverUrl, self.sessionId, emails, massType ).post(self.__conn)
Send one or more emails from Salesforce. Parameters: emails - a dictionary or list of dictionaries, each representing a single email as described by https://www.salesforce.com/us /developer/docs/api/Content/sforce_api_calls_sendemail.htm massType - 'SingleEmailMessage' or 'MassEmailMessage'. MassEmailMessage is used for mailmerge of up to 250 recepients in a single pass. Note: Newly created Salesforce Sandboxes default to System email only. In this situation, sendEmail() will fail with NO_MASS_MAIL_PERMISSION.
entailment
def quote(myitem, elt=True): '''URL encode string''' if elt and '<' in myitem and len(myitem) > 24 and myitem.find(']]>') == -1: return '<![CDATA[%s]]>' % (myitem) else: myitem = myitem.replace('&', '&amp;').\ replace('<', '&lt;').replace(']]>', ']]&gt;') if not elt: myitem = myitem.replace('"', '&quot;') return myitem
URL encode string
entailment
def _doPrep(field_dict): """ _doPrep is makes changes in-place. Do some prep work converting python types into formats that Salesforce will accept. This includes converting lists of strings to "apple;orange;pear". Dicts will be converted to embedded objects None or empty list values will be Null-ed """ fieldsToNull = [] for key, value in field_dict.items(): if value is None: fieldsToNull.append(key) field_dict[key] = [] if hasattr(value, '__iter__'): if len(value) == 0: fieldsToNull.append(key) elif isinstance(value, dict): innerCopy = copy.deepcopy(value) _doPrep(innerCopy) field_dict[key] = innerCopy else: field_dict[key] = ";".join(value) if 'fieldsToNull' in field_dict: raise ValueError( "fieldsToNull should be populated by the client, not the caller." ) field_dict['fieldsToNull'] = fieldsToNull
_doPrep is makes changes in-place. Do some prep work converting python types into formats that Salesforce will accept. This includes converting lists of strings to "apple;orange;pear". Dicts will be converted to embedded objects None or empty list values will be Null-ed
entailment
def _prepareSObjects(sObjects): '''Prepare a SObject''' sObjectsCopy = copy.deepcopy(sObjects) if isinstance(sObjectsCopy, dict): # If root element is a dict, then this is a single object not an array _doPrep(sObjectsCopy) else: # else this is an array, and each elelment should be prepped. for listitems in sObjectsCopy: _doPrep(listitems) return sObjectsCopy
Prepare a SObject
entailment
def sendEmail(self, emails, mass_type='SingleEmailMessage'): """ Send one or more emails from Salesforce. Parameters: emails - a dictionary or list of dictionaries, each representing a single email as described by https://www.salesforce.com /us/developer/docs/api/Content/sforce_api_calls_sendemail .htm massType - 'SingleEmailMessage' or 'MassEmailMessage'. MassEmailMessage is used for mailmerge of up to 250 recepients in a single pass. Note: Newly created Salesforce Sandboxes default to System email only. In this situation, sendEmail() will fail with NO_MASS_MAIL_PERMISSION. """ preparedEmails = _prepareSObjects(emails) if isinstance(preparedEmails, dict): # If root element is a dict, then this is a single object not an # array del preparedEmails['fieldsToNull'] else: # else this is an array, and each elelment should be prepped. for listitems in preparedEmails: del listitems['fieldsToNull'] res = BaseClient.sendEmail(self, preparedEmails, mass_type) if type(res) not in (TupleType, ListType): res = [res] data = list() for resu in res: d = dict() data.append(d) d['success'] = success = _bool(resu[_tPartnerNS.success]) if not success: d['errors'] = [_extractError(e) for e in resu[_tPartnerNS.errors,]] else: d['errors'] = list() return data
Send one or more emails from Salesforce. Parameters: emails - a dictionary or list of dictionaries, each representing a single email as described by https://www.salesforce.com /us/developer/docs/api/Content/sforce_api_calls_sendemail .htm massType - 'SingleEmailMessage' or 'MassEmailMessage'. MassEmailMessage is used for mailmerge of up to 250 recepients in a single pass. Note: Newly created Salesforce Sandboxes default to System email only. In this situation, sendEmail() will fail with NO_MASS_MAIL_PERMISSION.
entailment
def queryTypesDescriptions(self, types): """ Given a list of types, construct a dictionary such that each key is a type, and each value is the corresponding sObject for that type. """ types = list(types) if types: types_descs = self.describeSObjects(types) else: types_descs = [] return dict(map(lambda t, d: (t, d), types, types_descs))
Given a list of types, construct a dictionary such that each key is a type, and each value is the corresponding sObject for that type.
entailment
def create_token(self): """Create a session protection token for this client. This method generates a session protection token for the cilent, which consists in a hash of the user agent and the IP address. This method can be overriden by subclasses to implement different token generation algorithms. """ user_agent = request.headers.get('User-Agent') if user_agent is None: # pragma: no cover user_agent = 'no user agent' user_agent = user_agent.encode('utf-8') base = self._get_remote_addr() + b'|' + user_agent h = sha256() h.update(base) return h.hexdigest()
Create a session protection token for this client. This method generates a session protection token for the cilent, which consists in a hash of the user agent and the IP address. This method can be overriden by subclasses to implement different token generation algorithms.
entailment
def clear_session(self, response): """Clear the session. This method is invoked when the session is found to be invalid. Subclasses can override this method to implement a custom session reset. """ session.clear() # if flask-login is installed, we try to clear the # "remember me" cookie, just in case it is set if 'flask_login' in sys.modules: remember_cookie = current_app.config.get('REMEMBER_COOKIE', 'remember_token') response.set_cookie(remember_cookie, '', expires=0, max_age=0)
Clear the session. This method is invoked when the session is found to be invalid. Subclasses can override this method to implement a custom session reset.
entailment
def haikunate(self, delimiter='-', token_length=4, token_hex=False, token_chars='0123456789'): """ Generate heroku-like random names to use in your python applications :param delimiter: Delimiter :param token_length: TokenLength :param token_hex: TokenHex :param token_chars: TokenChars :type delimiter: str :type token_length: int :type token_hex: bool :type token_chars: str :return: heroku-like random string :rtype: str """ if token_hex: token_chars = '0123456789abcdef' adjective = self._random_element(self._adjectives) noun = self._random_element(self._nouns) token = ''.join(self._random_element(token_chars) for _ in range(token_length)) sections = [adjective, noun, token] return delimiter.join(filter(None, sections))
Generate heroku-like random names to use in your python applications :param delimiter: Delimiter :param token_length: TokenLength :param token_hex: TokenHex :param token_chars: TokenChars :type delimiter: str :type token_length: int :type token_hex: bool :type token_chars: str :return: heroku-like random string :rtype: str
entailment
def get_parser_class(): """ Returns the parser according to the system platform """ global distro if distro == 'Linux': Parser = parser.LinuxParser if not os.path.exists(Parser.get_command()[0]): Parser = parser.UnixIPParser elif distro in ['Darwin', 'MacOSX']: Parser = parser.MacOSXParser elif distro == 'Windows': # For some strange reason, Windows will always be win32, see: # https://stackoverflow.com/a/2145582/405682 Parser = parser.WindowsParser else: Parser = parser.NullParser Log.error("Unknown distro type '%s'." % distro) Log.debug("Distro detected as '%s'" % distro) Log.debug("Using '%s'" % Parser) return Parser
Returns the parser according to the system platform
entailment
def default_interface(ifconfig=None, route_output=None): """ Return just the default interface device dictionary. :param ifconfig: For mocking actual command output :param route_output: For mocking actual command output """ global Parser return Parser(ifconfig=ifconfig)._default_interface(route_output=route_output)
Return just the default interface device dictionary. :param ifconfig: For mocking actual command output :param route_output: For mocking actual command output
entailment
def parse(self, ifconfig=None): # noqa: max-complexity=12 """ Parse ifconfig output into self._interfaces. Optional Arguments: ifconfig The data (stdout) from the ifconfig command. Default is to call exec_cmd(self.get_command()). """ if not ifconfig: ifconfig, __, __ = exec_cmd(self.get_command()) self.ifconfig_data = ifconfig cur = None patterns = self.get_patterns() for line in self.ifconfig_data.splitlines(): for pattern in patterns: m = re.match(pattern, line) if not m: continue groupdict = m.groupdict() # Special treatment to trigger which interface we're # setting for if 'device' is in the line. Presumably the # device of the interface is within the first line of the # device block. if 'device' in groupdict: cur = groupdict['device'] self.add_device(cur) elif cur is None: raise RuntimeError( "Got results that don't belong to a device" ) for k, v in groupdict.items(): if k in self._interfaces[cur]: if self._interfaces[cur][k] is None: self._interfaces[cur][k] = v elif hasattr(self._interfaces[cur][k], 'append'): self._interfaces[cur][k].append(v) elif self._interfaces[cur][k] == v: # Silently ignore if the it's the same value as last. Example: Multiple # inet4 addresses, result in multiple netmasks. Cardinality mismatch continue else: raise RuntimeError( "Tried to add {}={} multiple times to {}, it was already: {}".format( k, v, cur, self._interfaces[cur][k] ) ) else: self._interfaces[cur][k] = v # Copy the first 'inet4' ip address to 'inet' for backwards compatibility for device, device_dict in self._interfaces.items(): if len(device_dict['inet4']) > 0: device_dict['inet'] = device_dict['inet4'][0] # fix it up self._interfaces = self.alter(self._interfaces)
Parse ifconfig output into self._interfaces. Optional Arguments: ifconfig The data (stdout) from the ifconfig command. Default is to call exec_cmd(self.get_command()).
entailment
def alter(self, interfaces): """ Used to provide the ability to alter the interfaces dictionary before it is returned from self.parse(). Required Arguments: interfaces The interfaces dictionary. Returns: interfaces dict """ # fixup some things for device, device_dict in interfaces.items(): if len(device_dict['inet4']) > 0: device_dict['inet'] = device_dict['inet4'][0] if 'inet' in device_dict and not device_dict['inet'] is None: try: host = socket.gethostbyaddr(device_dict['inet'])[0] interfaces[device]['hostname'] = host except (socket.herror, socket.gaierror): interfaces[device]['hostname'] = None # To be sure that hex values and similar are always consistent, we # return everything in lowercase. For instance, Windows writes # MACs in upper-case. for key, device_item in device_dict.items(): if hasattr(device_item, 'lower'): interfaces[device][key] = device_dict[key].lower() return interfaces
Used to provide the ability to alter the interfaces dictionary before it is returned from self.parse(). Required Arguments: interfaces The interfaces dictionary. Returns: interfaces dict
entailment
def _default_interface(self, route_output=None): """ :param route_output: For mocking actual output """ if not route_output: out, __, __ = exec_cmd('/sbin/ip route') lines = out.splitlines() else: lines = route_output.split("\n") for line in lines: line = line.split() if 'default' in line: iface = line[4] return self.interfaces.get(iface, None)
:param route_output: For mocking actual output
entailment
def get(self, line_number): """Return the needle positions or None. :param int line_number: the number of the line :rtype: list :return: the needle positions for a specific line specified by :paramref:`line_number` or :obj:`None` if no were given """ if line_number not in self._get_cache: self._get_cache[line_number] = self._get(line_number) return self._get_cache[line_number]
Return the needle positions or None. :param int line_number: the number of the line :rtype: list :return: the needle positions for a specific line specified by :paramref:`line_number` or :obj:`None` if no were given
entailment
def get_bytes(self, line_number): """Get the bytes representing needle positions or None. :param int line_number: the line number to take the bytes from :rtype: bytes :return: the bytes that represent the message or :obj:`None` if no data is there for the line. Depending on the :attr:`machine`, the length and result may vary. """ if line_number not in self._needle_position_bytes_cache: line = self._get(line_number) if line is None: line_bytes = None else: line_bytes = self._machine.needle_positions_to_bytes(line) self._needle_position_bytes_cache[line_number] = line_bytes return self._needle_position_bytes_cache[line_number]
Get the bytes representing needle positions or None. :param int line_number: the line number to take the bytes from :rtype: bytes :return: the bytes that represent the message or :obj:`None` if no data is there for the line. Depending on the :attr:`machine`, the length and result may vary.
entailment
def get_line_configuration_message(self, line_number): """Return the cnfLine content without id for the line. :param int line_number: the number of the line :rtype: bytes :return: a cnfLine message without id as defined in :ref:`cnfLine` """ if line_number not in self._line_configuration_message_cache: line_bytes = self.get_bytes(line_number) if line_bytes is not None: line_bytes = bytes([line_number & 255]) + line_bytes line_bytes += bytes([self.is_last(line_number)]) line_bytes += crc8(line_bytes).digest() self._line_configuration_message_cache[line_number] = line_bytes del line_bytes line = self._line_configuration_message_cache[line_number] if line is None: # no need to cache a lot of empty lines line = (bytes([line_number & 255]) + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01') line += crc8(line).digest() return line
Return the cnfLine content without id for the line. :param int line_number: the number of the line :rtype: bytes :return: a cnfLine message without id as defined in :ref:`cnfLine`
entailment
def read_message_type(file): """Read the message type from a file.""" message_byte = file.read(1) if message_byte == b'': return ConnectionClosed message_number = message_byte[0] return _message_types.get(message_number, UnknownMessage)
Read the message type from a file.
entailment
def read_end_of_message(self): """Read the b"\\r\\n" at the end of the message.""" read = self._file.read last = read(1) current = read(1) while last != b'' and current != b'' and not \ (last == b'\r' and current == b'\n'): last = current current = read(1)
Read the b"\\r\\n" at the end of the message.
entailment
def _init(self): """Read the success byte.""" self._api_version = self._file.read(1)[0] self._firmware_version = FirmwareVersion(*self._file.read(2))
Read the success byte.
entailment
def _init(self): """Read the line number.""" self._line_number = next_line( self._communication.last_requested_line_number, self._file.read(1)[0])
Read the line number.
entailment
def _init(self): """Read the success byte.""" self._ready = self._file.read(1) self._hall_left = self._file.read(2) self._hall_right = self._file.read(2) self._carriage_type = self._file.read(1)[0] self._carriage_position = self._file.read(1)[0]
Read the success byte.
entailment
def _init(self): """Read the b"\\r\\n" at the end of the message.""" read_values = [] read = self._file.read last = read(1) current = read(1) while last != b'' and current != b'' and not \ (last == b'\r' and current == b'\n'): read_values.append(last) last = current current = read(1) if current == b'' and last != b'\r': read_values.append(last) self._bytes = b''.join(read_values)
Read the b"\\r\\n" at the end of the message.
entailment
def send(self): """Send this message to the controller.""" self._file.write(self.as_bytes()) self._file.write(b'\r\n')
Send this message to the controller.
entailment
def init(self, left_end_needle, right_end_needle): """Initialize the StartRequest with start and stop needle. :raises TypeError: if the arguments are not integers :raises ValueError: if the values do not match the :ref:`specification <m4-01>` """ if not isinstance(left_end_needle, int): raise TypeError(_left_end_needle_error_message(left_end_needle)) if left_end_needle < 0 or left_end_needle > 198: raise ValueError(_left_end_needle_error_message(left_end_needle)) if not isinstance(right_end_needle, int): raise TypeError(_right_end_needle_error_message(right_end_needle)) if right_end_needle < 1 or right_end_needle > 199: raise ValueError(_right_end_needle_error_message(right_end_needle)) self._left_end_needle = left_end_needle self._right_end_needle = right_end_needle
Initialize the StartRequest with start and stop needle. :raises TypeError: if the arguments are not integers :raises ValueError: if the values do not match the :ref:`specification <m4-01>`
entailment
def content_bytes(self): """Return the start and stop needle.""" get_message = \ self._communication.needle_positions.get_line_configuration_message return get_message(self._line_number)
Return the start and stop needle.
entailment
def sum_all(iterable, start): """Sum up an iterable starting with a start value. In contrast to :func:`sum`, this also works on other types like :class:`lists <list>` and :class:`sets <set>`. """ if hasattr(start, "__add__"): for value in iterable: start += value else: for value in iterable: start |= value return start
Sum up an iterable starting with a start value. In contrast to :func:`sum`, this also works on other types like :class:`lists <list>` and :class:`sets <set>`.
entailment
def next_line(last_line, next_line_8bit): """Compute the next line based on the last line and a 8bit next line. The behaviour of the function is specified in :ref:`reqline`. :param int last_line: the last line that was processed :param int next_line_8bit: the lower 8 bits of the next line :return: the next line closest to :paramref:`last_line` .. seealso:: :ref:`reqline` """ # compute the line without the lowest byte base_line = last_line - (last_line & 255) # compute the three different lines line = base_line + next_line_8bit lower_line = line - 256 upper_line = line + 256 # compute the next line if last_line - lower_line <= line - last_line: return lower_line if upper_line - last_line < last_line - line: return upper_line return line
Compute the next line based on the last line and a 8bit next line. The behaviour of the function is specified in :ref:`reqline`. :param int last_line: the last line that was processed :param int next_line_8bit: the lower 8 bits of the next line :return: the next line closest to :paramref:`last_line` .. seealso:: :ref:`reqline`
entailment
def camel_case_to_under_score(camel_case_name): """Return the underscore name of a camel case name. :param str camel_case_name: a name in camel case such as ``"ACamelCaseName"`` :return: the name using underscores, e.g. ``"a_camel_case_name"`` :rtype: str """ result = [] for letter in camel_case_name: if letter.lower() != letter: result.append("_" + letter.lower()) else: result.append(letter.lower()) if result[0].startswith("_"): result[0] = result[0][1:] return "".join(result)
Return the underscore name of a camel case name. :param str camel_case_name: a name in camel case such as ``"ACamelCaseName"`` :return: the name using underscores, e.g. ``"a_camel_case_name"`` :rtype: str
entailment
def _message_received(self, message): """Notify the observers about the received message.""" with self.lock: self._state.receive_message(message) for callable in chain(self._on_message_received, self._on_message): callable(message)
Notify the observers about the received message.
entailment
def receive_message(self): """Receive a message from the file.""" with self.lock: assert self.can_receive_messages() message_type = self._read_message_type(self._file) message = message_type(self._file, self) self._message_received(message)
Receive a message from the file.
entailment
def can_receive_messages(self): """Whether tihs communication is ready to receive messages.] :rtype: bool .. code:: python assert not communication.can_receive_messages() communication.start() assert communication.can_receive_messages() communication.stop() assert not communication.can_receive_messages() """ with self.lock: return not self._state.is_waiting_for_start() and \ not self._state.is_connection_closed()
Whether tihs communication is ready to receive messages.] :rtype: bool .. code:: python assert not communication.can_receive_messages() communication.start() assert communication.can_receive_messages() communication.stop() assert not communication.can_receive_messages()
entailment
def stop(self): """Stop the communication with the shield.""" with self.lock: self._message_received(ConnectionClosed(self._file, self))
Stop the communication with the shield.
entailment
def send(self, host_message_class, *args): """Send a host message. :param type host_message_class: a subclass of :class:`AYABImterface.communication.host_messages.Message` :param args: additional arguments that shall be passed to the :paramref:`host_message_class` as arguments """ message = host_message_class(self._file, self, *args) with self.lock: message.send() for callable in self._on_message: callable(message)
Send a host message. :param type host_message_class: a subclass of :class:`AYABImterface.communication.host_messages.Message` :param args: additional arguments that shall be passed to the :paramref:`host_message_class` as arguments
entailment
def state(self, new_state): """Set the state.""" with self.lock: self._state.exit() self._state = new_state self._state.enter()
Set the state.
entailment
def parallelize(self, seconds_to_wait=2): """Start a parallel thread for receiving messages. If :meth:`start` was no called before, start will be called in the thread. The thread calls :meth:`receive_message` until the :attr:`state` :meth:`~AYABInterface.communication.states.State.is_connection_closed`. :param float seconds_to_wait: A time in seconds to wait with the parallel execution. This is useful to allow the controller time to initialize. .. seealso:: :attr:`lock`, :meth:`runs_in_parallel` """ with self.lock: thread = Thread(target=self._parallel_receive_loop, args=(seconds_to_wait,)) thread.deamon = True thread.start() self._thread = thread
Start a parallel thread for receiving messages. If :meth:`start` was no called before, start will be called in the thread. The thread calls :meth:`receive_message` until the :attr:`state` :meth:`~AYABInterface.communication.states.State.is_connection_closed`. :param float seconds_to_wait: A time in seconds to wait with the parallel execution. This is useful to allow the controller time to initialize. .. seealso:: :attr:`lock`, :meth:`runs_in_parallel`
entailment
def _parallel_receive_loop(self, seconds_to_wait): """Run the receiving in parallel.""" sleep(seconds_to_wait) with self._lock: self._number_of_threads_receiving_messages += 1 try: with self._lock: if self.state.is_waiting_for_start(): self.start() while True: with self.lock: if self.state.is_connection_closed(): return self.receive_message() finally: with self._lock: self._number_of_threads_receiving_messages -= 1
Run the receiving in parallel.
entailment