signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def get_weight ( self , weight = operator . attrgetter ( 'weight' ) ) : """: param weight : source weight function : returns : total weight of the source model"""
return sum ( weight ( src ) for src in self . get_sources ( ) )
def get_occupation ( self , atom_index , orbital ) : """Returns the occupation for a particular orbital of a particular atom . Args : atom _ num ( int ) : Index of atom in the PROCAR . It should be noted that VASP uses 1 - based indexing for atoms , but this is converted to 0 - based indexing in this parser...
orbital_index = self . orbitals . index ( orbital ) return { spin : np . sum ( d [ : , : , atom_index , orbital_index ] * self . weights [ : , None ] ) for spin , d in self . data . items ( ) }
def listar_healthcheck_expect ( self , id_ambiente ) : """Lista os healthcheck _ expect ́s de um ambiente . : param id _ ambiente : Identificador do ambiente . : return : Dicionário com a seguinte estrutura : { ' healthcheck _ expect ' : [ { ' id ' : < id _ healthcheck _ expect > , ' expect _ string ' : < ...
if not is_valid_int_param ( id_ambiente ) : raise InvalidParameterError ( u'O identificador do ambiente é inválido ou não foi informado.' ) url = 'healthcheckexpect/ambiente/' + str ( id_ambiente ) + '/' code , xml = self . submit ( None , 'GET' , url ) key = 'healthcheck_expect' return get_list_map ( self . respon...
def MergeFlags ( self , args , unique = 1 , dict = None ) : """Merge the dict in args into the construction variables of this env , or the passed - in dict . If args is not a dict , it is converted into a dict using ParseFlags . If unique is not set , the flags are appended rather than merged ."""
if dict is None : dict = self if not SCons . Util . is_Dict ( args ) : args = self . ParseFlags ( args ) if not unique : self . Append ( ** args ) return self for key , value in args . items ( ) : if not value : continue try : orig = self [ key ] except KeyError : ori...
def run_command ( self , commands , timeout_sec = None , exception = None ) : """Executes the given commands and sends OVSDB messages . ` ` commands ` ` must be a list of : py : mod : ` ryu . lib . ovs . vsctl . VSCtlCommand ` . If ` ` timeout _ sec ` ` is specified , raises exception after the given timeou...
if timeout_sec is None : self . _run_command ( commands ) else : with hub . Timeout ( timeout_sec , exception ) : self . _run_command ( commands )
def _who_when ( self , s , cmd , section , accept_just_who = False ) : """Parse who and when information from a string . : return : a tuple of ( name , email , timestamp , timezone ) . name may be the empty string if only an email address was given ."""
match = _WHO_AND_WHEN_RE . search ( s ) if match : datestr = match . group ( 3 ) . lstrip ( ) if self . date_parser is None : # auto - detect the date format if len ( datestr . split ( b' ' ) ) == 2 : date_format = 'raw' elif datestr == b'now' : date_format = 'now' ...
def writeline ( self , fmt , * args ) : """Write ` line ` ( list of objects ) with given ` fmt ` to file . The ` line ` will be chained if object is iterable ( except for basestrings ) ."""
fmt = self . endian + fmt size = struct . calcsize ( fmt ) fix = struct . pack ( self . endian + 'i' , size ) line = struct . pack ( fmt , * args ) self . write ( fix ) self . write ( line ) self . write ( fix )
def _monitor ( self ) : """Monitor the queue for items , and ask the handler to deal with them . This method runs on a separate , internal thread . The thread will terminate if it sees a sentinel object in the queue ."""
err_msg = ( "invalid internal state:" " _stop_nowait can not be set if _stop is not set" ) assert self . _stop . isSet ( ) or not self . _stop_nowait . isSet ( ) , err_msg q = self . queue has_task_done = hasattr ( q , 'task_done' ) while not self . _stop . isSet ( ) : try : record = self . dequeue ( True )...
def _forwards ( apps , schema_editor ) : """Make sure that the MarkupItem model actually points to the correct proxy model , that implements the given language ."""
# Need to work on the actual models here . from fluent_contents . plugins . markup . models import LANGUAGE_MODEL_CLASSES from fluent_contents . plugins . markup . models import MarkupItem from django . contrib . contenttypes . models import ContentType ctype = ContentType . objects . get_for_model ( MarkupItem ) for l...
def _get_qe ( self , key , obj ) : """Instantiate a query engine , or retrieve a cached one ."""
if key in self . _cached : return self . _cached [ key ] qe = create_query_engine ( obj , self . _class ) self . _cached [ key ] = qe return qe
def xml_starttag ( self , name , attrs = None ) : """Write XML start tag ."""
self . write ( self . indent * self . level ) self . write ( u"<%s" % xmlquote ( name ) ) if attrs : for name , value in attrs . items ( ) : args = ( xmlquote ( name ) , xmlquoteattr ( value ) ) self . write ( u' %s="%s"' % args ) self . writeln ( u">" ) self . level += 1
def extract_opts ( ** opts ) : """Small utility to extract a set of one - char options from sys . argv ."""
values = { } for opt , init in opts . items ( ) : try : idx = sys . argv . index ( '-%s' % opt ) except ValueError : continue if idx + 1 < len ( sys . argv ) : opts [ opt ] = sys . argv . pop ( idx + 1 ) sys . argv . pop ( idx ) return opts
def cacheable ( self ) : """Return the cacheable attribute of the BFD file being processed ."""
if not self . _ptr : raise BfdException ( "BFD not initialized" ) return _bfd . get_bfd_attribute ( self . _ptr , BfdAttributes . CACHEABLE )
def attributes ( cls , create = False , extra = None ) : """Build a dict of attribute values , respecting declaration order . The process is : - Handle ' orderless ' attributes , overriding defaults with provided kwargs when applicable - Handle ordered attributes , overriding them with provided kwargs when ...
warnings . warn ( "Usage of Factory.attributes() is deprecated." , DeprecationWarning , stacklevel = 2 , ) declarations = cls . _meta . pre_declarations . as_dict ( ) declarations . update ( extra or { } ) from . import helpers return helpers . make_factory ( dict , ** declarations )
def addDarkCurrent ( self , slope , intercept = None , date = None , info = '' , error = None ) : '''Args : slope ( np . array ) intercept ( np . array ) error ( numpy . array ) slope ( float ) : dPx / dExposureTime [ sec ] error ( float ) : absolute date ( str ) : " DD Mon YY " e . g . " 30 Nov 16"'''
date = _toDate ( date ) self . _checkShape ( slope ) self . _checkShape ( intercept ) d = self . coeffs [ 'dark current' ] if intercept is None : data = slope else : data = ( slope , intercept ) d . insert ( _insertDateIndex ( date , d ) , [ date , info , data , error ] )
def add ( self , * dic ) : '''add a config to StartCalendarInterval . Args : * dic ( dict ) : dictionary with format { ' Day ' : 12 , ' Hour ' : 34 } Avaliable keys are Month , Day , Weekday , Hour , Minute . * Note the uppercase . * You can use gen ( ) , genMix ( ) to generate complex config dictionary .'''
dicList = list ( flatten ( dic ) ) # for every dict in the list passed in for d in dicList : # make a dict single ( list of pairs ) di = [ ] for k in d : # checkKey ( k , self . keyWord ) di . append ( Pair ( k , IntegerSingle ( d [ k ] ) ) ) dictSingle = DictSingle ( di ) # append dict single t...
def decode_door ( packet , channel = 1 ) : """Decode a door sensor ."""
val = str ( packet . get ( QSDATA , '' ) ) if len ( val ) == 6 and val . startswith ( '46' ) and channel == 1 : return val [ - 1 ] == '0' return None
def create_upload_and_chunk_url ( self , project_id , path_data , hash_data , remote_filename = None , storage_provider_id = None ) : """Create an non - chunked upload that returns upload id and upload url . This type of upload doesn ' t allow additional upload urls . For single chunk files this method is more ef...
upload_response = self . _create_upload ( project_id , path_data , hash_data , remote_filename = remote_filename , storage_provider_id = storage_provider_id , chunked = False ) return upload_response [ 'id' ] , upload_response [ 'signed_url' ]
def append ( self , other ) : """Append a collection of Index options together . Parameters other : Index or list / tuple of indices Returns appended : Index"""
to_concat = [ self ] if isinstance ( other , ( list , tuple ) ) : to_concat = to_concat + list ( other ) else : to_concat . append ( other ) for obj in to_concat : if not isinstance ( obj , Index ) : raise TypeError ( 'all inputs must be Index' ) names = { obj . name for obj in to_concat } name = No...
def table_r_node ( self , node ) : """General pattern where the last node should should get the text span attributes of the entire tree"""
start = len ( self . f . getvalue ( ) ) try : self . default ( node ) except GenericASTTraversalPruningException : final = len ( self . f . getvalue ( ) ) self . set_pos_info ( node , start , final ) self . set_pos_info ( node [ - 1 ] , start , final ) raise GenericASTTraversalPruningException
def native_container ( self ) : """Native container object ."""
if self . __native is None : self . __native = self . _get_container ( ) return self . __native
def delete_node ( node_id , purge_data , ** kwargs ) : """Remove node from DB completely If there are attributes on the node , use purge _ data to try to delete the data . If no other resources link to this data , it will be deleted ."""
user_id = kwargs . get ( 'user_id' ) try : node_i = db . DBSession . query ( Node ) . filter ( Node . id == node_id ) . one ( ) except NoResultFound : raise ResourceNotFoundError ( "Node %s not found" % ( node_id ) ) group_items = db . DBSession . query ( ResourceGroupItem ) . filter ( ResourceGroupItem . node_...
def _print_MatMul ( self , expr ) : """Matrix multiplication printer . The sympy one turns everything into a dot product without type - checking ."""
from sympy import MatrixExpr links = [ ] for i , j in zip ( expr . args [ 1 : ] , expr . args [ : - 1 ] ) : if isinstance ( i , MatrixExpr ) and isinstance ( j , MatrixExpr ) : links . append ( ').dot(' ) else : links . append ( '*' ) printouts = [ self . _print ( i ) for i in expr . args ] resu...
def is_checkmate ( position , input_color ) : """Finds if particular King is checkmated . : type : position : Board : type : input _ color : Color : rtype : bool"""
return position . no_moves ( input_color ) and position . get_king ( input_color ) . in_check ( position )
def option ( self , key , value ) : """Adds an output option for the underlying data source . You can set the following option ( s ) for writing files : * ` ` timeZone ` ` : sets the string that indicates a timezone to be used to format timestamps in the JSON / CSV datasources or partition values . If it is...
self . _jwrite = self . _jwrite . option ( key , to_str ( value ) ) return self
def learn ( self , runs , dir = 1 , periodic = False , recurrent = True , randomSpeed = False , learnRecurrent = False , envelope = True , ) : """Traverses a sinusoidal trajectory across the environment , learning during the process . A pair of runs across the environment ( one in each direction ) takes 10 seco...
# Simulate for a second to get nice starting activation bumps . # Turn plotting off so as not to confuse the viewer oldPlotting = self . plotting self . plotting = False self . simulate ( 10 , 1 , 1 , 0 , envelope = False , inputNoise = None , save = False ) self . plotting = oldPlotting # Set up plotting if self . plo...
def check_command ( self , name , exclude_packages = None , exclude_command_class = None ) : """Uses get _ command _ class ( ) to check for the presence of a command ."""
return get_command_class ( name , exclude_packages = exclude_packages , exclude_command_class = exclude_command_class ) is not None
def _mmComputeTransitionTraces ( self ) : """Computes the transition traces , if necessary . Transition traces are the following : predicted = > active cells predicted = > inactive cells predicted = > active columns predicted = > inactive columns unpredicted = > active columns"""
if not self . _mmTransitionTracesStale : return self . _mmData [ "predictedActiveCellsForSequence" ] = defaultdict ( set ) self . _mmTraces [ "predictedActiveCells" ] = IndicesTrace ( self , "predicted => active cells (correct)" ) self . _mmTraces [ "predictedInactiveCells" ] = IndicesTrace ( self , "predicted => i...
def min_version ( self ) : """Version with the fewest downloads ."""
data = self . version_downloads if not data : return ( None , 0 ) return min ( data . items ( ) , key = lambda item : item [ 1 ] )
def from_range ( cls , range_list , register_flag = True ) : """core class method to create visible objects from a range ( nested list )"""
s = dict_from_range ( range_list ) obj = cls . from_serializable ( s , register_flag ) return obj
def deserialize ( cls , value , * args , ** kwargs ) : """Deserialize a value just after importing it ` cls . deserialize ` should always return a value of type ` cls . TYPE ` or ` None ` ."""
if isinstance ( value , cls . TYPE ) : return value elif is_null ( value ) : return None else : return value
def get_dependencies_from_json ( ireq ) : """Retrieves dependencies for the given install requirement from the json api . : param ireq : A single InstallRequirement : type ireq : : class : ` ~ pip . _ internal . req . req _ install . InstallRequirement ` : return : A set of dependency lines for generating new...
if ireq . editable or not is_pinned_requirement ( ireq ) : return # It is technically possible to parse extras out of the JSON API ' s # requirement format , but it is such a chore let ' s just use the simple API . if ireq . extras : return session = requests . session ( ) atexit . register ( session . close ) ...
def _do_subread_set ( flag , input_file , of , negative_filter , aligned ) : best = { } cmd = 'samtools view ' + flag + ' ' + input_file sys . stderr . write ( cmd + "\n" ) p = Popen ( cmd . split ( ) , stdout = PIPE ) z = 0 for line in p . stdout : z += 1 if z % 10000 == 0 : ...
cmd = 'samtools view ' + flag + ' ' + input_file sys . stderr . write ( cmd + "\n" ) z = 0 p = Popen ( cmd . split ( ) , stdout = PIPE ) for line in p . stdout : z += 1 if z % 10000 == 0 : sys . stderr . write ( str ( z ) + " subreads alignment paths scanned during selected for best\r" ) pbname = Pa...
def _validate_nodes_with_data ( self , names ) : """Validate NodeWithData pseudo - type ."""
names = names if isinstance ( names , list ) else [ names ] if not names : raise RuntimeError ( "Argument `nodes` is not valid" ) for ndict in names : if ( not isinstance ( ndict , dict ) ) or ( isinstance ( ndict , dict ) and ( set ( ndict . keys ( ) ) != set ( [ "name" , "data" ] ) ) ) : raise Runtime...
def __telnet_event_listener ( self , ip , callback ) : """creates a telnet connection to the lightpad"""
tn = telnetlib . Telnet ( ip , 2708 ) self . _last_event = "" self . _telnet_running = True while self . _telnet_running : try : raw_string = tn . read_until ( b'.\n' , 5 ) if len ( raw_string ) >= 2 and raw_string [ - 2 : ] == b'.\n' : # lightpad sends " . \ n " at the end that we need to chop off ...
def hashVariantAnnotation ( cls , gaVariant , gaVariantAnnotation ) : """Produces an MD5 hash of the gaVariant and gaVariantAnnotation objects"""
treffs = [ treff . id for treff in gaVariantAnnotation . transcript_effects ] return hashlib . md5 ( "{}\t{}\t{}\t" . format ( gaVariant . reference_bases , tuple ( gaVariant . alternate_bases ) , treffs ) ) . hexdigest ( )
def set_multivar ( self , section , option , value = '' ) : '''This function is unique to the GitConfigParser . It will add another value for the option if it already exists , converting the option ' s value to a list if applicable . If " value " is a list , then any existing values for the specified sectio...
self . _string_check ( value , allow_list = True ) if not section or section == self . DEFAULTSECT : sectdict = self . _defaults else : try : sectdict = self . _sections [ section ] except KeyError : raise NoSectionError ( # pylint : disable = undefined - variable salt . utils . stri...
def get_items_of_credit_note_per_page ( self , credit_note_id , per_page = 1000 , page = 1 ) : """Get items of credit note per page : param credit _ note _ id : the credit note id : param per _ page : How many objects per page . Default : 1000 : param page : Which page . Default : 1 : return : list"""
return self . _get_resource_per_page ( resource = CREDIT_NOTE_ITEMS , per_page = per_page , page = page , params = { 'credit_note_id' : credit_note_id } , )
def set_linestyle ( self , ls ) : """Set the line style to be one of"""
DEBUG_MSG ( "set_linestyle()" , 1 , self ) self . select ( ) GraphicsContextBase . set_linestyle ( self , ls ) try : self . _style = GraphicsContextWx . _dashd_wx [ ls ] except KeyError : self . _style = wx . LONG_DASH # Style not used elsewhere . . . # On MS Windows platform , only line width of 1 allowed for ...
def update ( self , gist , content ) : """Updates the contents of file hosted inside a gist at GitHub . : param gist : ( dict ) gist parsed by GitHubTools . _ parse _ gist ( ) : param content : ( str or bytes ) to be written : return : ( bool ) indicatind the success or failure of the update"""
# abort if content is False if content is False : return False # request url = self . _api_url ( "gists" , gist . get ( "id" ) ) data = { "files" : { self . filename : { "content" : content } } } self . output ( "Sending contents of {} to {}" . format ( self . file_path , url ) ) response = self . requests . patch ...
def __analizar_errores ( self , ret ) : "Comprueba y extrae errores si existen en la respuesta XML"
errores = [ ] if 'errores' in ret : errores . extend ( ret [ 'errores' ] ) if errores : self . Errores = [ "%(codigo)s: %(descripcion)s" % err [ 'error' ] [ 0 ] for err in errores ] self . errores = [ { 'codigo' : err [ 'error' ] [ 0 ] [ 'codigo' ] , 'descripcion' : err [ 'error' ] [ 0 ] [ 'descripcion' ] ....
def slow_reduction_transfer ( ii , j , idx , count , x , u , v , c ) : '''Perform the reduction transfer step from the Jonker - Volgenant algorithm The data is input in a ragged array in terms of " i " structured as a vector of values for each i , j combination where : ii - the i to be reduced j - the j - i...
for i in ii : j1 = x [ i ] jj = j [ idx [ i ] : ( idx [ i ] + count [ i ] ) ] uu = np . min ( ( c [ idx [ i ] : ( idx [ i ] + count [ i ] ) ] - v [ jj ] ) [ jj != j1 ] ) v [ j1 ] = v [ j1 ] - uu + u [ i ] u [ i ] = uu
def play_tone ( self , frequency , duration , delay = 0.0 , volume = 100 , play_type = PLAY_WAIT_FOR_COMPLETE ) : """Play a single tone , specified by its frequency , duration , volume and final delay . : param int frequency : the tone frequency , in Hertz : param float duration : Tone duration , in seconds :...
self . _validate_play_type ( play_type ) if duration <= 0 : raise ValueError ( 'invalid duration (%s)' % duration ) if delay < 0 : raise ValueError ( 'invalid delay (%s)' % delay ) if not 0 < volume <= 100 : raise ValueError ( 'invalid volume (%s)' % volume ) self . set_volume ( volume ) duration_ms = int (...
def _exit_session ( self ) : """Exits session to Hetzner account and returns ."""
api = self . api [ self . account ] response = self . _get ( api [ 'exit' ] [ 'GET' ] [ 'url' ] ) if not Provider . _filter_dom ( response . text , api [ 'filter' ] ) : LOGGER . info ( 'Hetzner => Exit session' ) else : LOGGER . warning ( 'Hetzner => Unable to exit session' ) self . session = None return True
def match_var ( self , tokens , item ) : """Matches a variable ."""
setvar , = tokens if setvar != wildcard : if setvar in self . names : self . add_check ( self . names [ setvar ] + " == " + item ) else : self . add_def ( setvar + " = " + item ) self . names [ setvar ] = item
def subprocess_manager ( self , exec_args ) : '''Bro subprocess manager'''
try : sp = gevent . subprocess . Popen ( exec_args , stdout = gevent . subprocess . PIPE , stderr = gevent . subprocess . PIPE ) except OSError : raise RuntimeError ( 'Could not run bro executable (either not installed or not in path): %s' % ( exec_args ) ) out , err = sp . communicate ( ) if out : print 's...
def filter_featured_apps ( admin_apps , request ) : """Given a list of apps return a set of pseudo - apps considered featured . Apps are considered featured if the are defined in the settings property called ` DASHBOARD _ FEATURED _ APPS ` which contains a list of the apps that are considered to be featured ....
featured_apps = [ ] # Build the featured apps list based upon settings . for orig_app_spec in appsettings . DASHBOARD_FEATURED_APPS : # make a copy that we can write to , to fix deprecations without # changing settings app_spec = orig_app_spec . copy ( ) if "verbose_name" in app_spec : warnings . warn (...
def parse_authn_request_response ( self , xmlstr , binding , outstanding = None , outstanding_certs = None , conv_info = None ) : """Deal with an AuthnResponse : param xmlstr : The reply as a xml string : param binding : Which binding that was used for the transport : param outstanding : A dictionary with ses...
if not getattr ( self . config , 'entityid' , None ) : raise SAMLError ( "Missing entity_id specification" ) if not xmlstr : return None kwargs = { "outstanding_queries" : outstanding , "outstanding_certs" : outstanding_certs , "allow_unsolicited" : self . allow_unsolicited , "want_assertions_signed" : self . w...
def new_wins ( self , orig_criteria , orig_idx , new_criteria , new_idx ) : """Returns a bool indicating whether a new adversarial example is better than the pre - existing one for the same clean example . : param orig _ criteria : dict mapping names of criteria to their value for each example in the whole da...
raise NotImplementedError ( str ( type ( self ) ) + " needs to implement new_wins." )
def show_data_file ( fname ) : """shows a data file in CSV format - all files live in CORE folder"""
txt = '<H2>' + fname + '</H2>' print ( fname ) # try : txt += web . read_csv_to_html_table ( fname , 'Y' ) # it is ok to use a table for actual table data # except : # txt + = ' < H2 > ERROR - cant read file < / H2 > ' # txt + = web . read _ csv _ to _ html _ list ( fname ) # only use this for single column lists txt +...
def create_video ( video_data ) : """Called on to create Video objects in the database create _ video is used to create Video objects whose children are EncodedVideo objects which are linked to Profile objects . This is an alternative to the HTTP requests so it can be used internally . The VideoSerializer is ...
serializer = VideoSerializer ( data = video_data ) if serializer . is_valid ( ) : serializer . save ( ) return video_data . get ( "edx_video_id" ) else : raise ValCannotCreateError ( serializer . errors )
def generate_command ( command = None , package = None , path = "~" , topic = "mycommands" ) : """the command will generate the package and code for a sample cmd3 module . : param command : the name of the command : param package : name of the new package . Often this will be cloudmesh _ COMMAND which will be...
if command is None : Console . error ( "command not specified" ) return if topic is None : topic = "mycommands" if path is None : path = "." path = path_expand ( path ) if package is None : package = "cloudmesh_" + command data = { 'command' : command , 'package' : package , 'path' : path , 'topic' ...
def p_expression_noteql ( self , p ) : 'expression : expression NEL expression'
p [ 0 ] = NotEql ( p [ 1 ] , p [ 3 ] , lineno = p . lineno ( 1 ) ) p . set_lineno ( 0 , p . lineno ( 1 ) )
def get_contents_debug_adapter_protocol ( self , lst , fmt = None ) : '''This method is to be used in the case where the variables are all saved by its id ( and as such don ' t need to have the ` resolve ` method called later on , so , keys don ' t need to embed the reference in the key ) . Note that the retu...
l = len ( lst ) ret = [ ] format_str = '%0' + str ( int ( len ( str ( l - 1 ) ) ) ) + 'd' if fmt is not None and fmt . get ( 'hex' , False ) : format_str = '0x%0' + str ( int ( len ( hex ( l ) . lstrip ( '0x' ) ) ) ) + 'x' for i , item in enumerate ( lst ) : ret . append ( ( format_str % i , item , '[%s]' % i )...
def _transform_local_field_to_expression ( expression , node , context ) : """Transform a LocalField compiler expression into its SQLAlchemy expression representation . Args : expression : expression , LocalField compiler expression . node : SqlNode , the SqlNode the expression applies to . context : Compil...
column_name = expression . field_name column = sql_context_helpers . get_column ( column_name , node , context ) return column
def zone_create_or_update ( name , resource_group , ** kwargs ) : '''. . versionadded : : Fluorine Creates or updates a DNS zone . Does not modify DNS records within the zone . : param name : The name of the DNS zone to create ( without a terminating dot ) . : param resource _ group : The name of the resource...
# DNS zones are global objects kwargs [ 'location' ] = 'global' dnsconn = __utils__ [ 'azurearm.get_client' ] ( 'dns' , ** kwargs ) # Convert list of ID strings to list of dictionaries with id key . if isinstance ( kwargs . get ( 'registration_virtual_networks' ) , list ) : kwargs [ 'registration_virtual_networks' ...
def getLogger ( name = None ) : """Return a logger with the specified name , creating it if necessary . If no name is specified , return the root logger ."""
if name : logger = SLogger . manager . getLogger ( name ) return logger else : return rootLogger
def track_time ( self , name , description = '' , max_rows = None ) : """Create a Timer object in the Tracker ."""
if name in self . _tables : raise TableConflictError ( name ) if max_rows is None : max_rows = AnonymousUsageTracker . MAX_ROWS_PER_TABLE self . register_table ( name , self . uuid , 'Timer' , description ) self . _tables [ name ] = Timer ( name , self , max_rows = max_rows )
def _complete_url ( self , url_part , registration_prefix ) : """This method is used to defer the construction of the final url in the case that the Api is created with a Blueprint . : param url _ part : The part of the url the endpoint is registered with : param registration _ prefix : The part of the url co...
parts = { 'b' : registration_prefix , 'a' : self . prefix , 'e' : url_part } return '' . join ( parts [ key ] for key in self . url_part_order if parts [ key ] )
def op ( name , images , max_outputs = 3 , display_name = None , description = None , collections = None ) : """Create a legacy image summary op for use in a TensorFlow graph . Arguments : name : A unique name for the generated summary node . images : A ` Tensor ` representing pixel data with shape ` [ k , h ...
# TODO ( nickfelt ) : remove on - demand imports once dep situation is fixed . import tensorflow . compat . v1 as tf if display_name is None : display_name = name summary_metadata = metadata . create_summary_metadata ( display_name = display_name , description = description ) with tf . name_scope ( name ) , tf . co...
def get_queryset ( self ) : """The queryset is over - ridden to show only plug events in which the strain matches the breeding strain ."""
self . strain = get_object_or_404 ( Strain , Strain_slug__iexact = self . kwargs [ 'slug' ] ) return PlugEvents . objects . filter ( Breeding__Strain = self . strain )
def camel_case_to_snake_case ( name ) : """HelloWorld - > hello _ world"""
s1 = _FIRST_CAP_RE . sub ( r'\1_\2' , name ) return _ALL_CAP_RE . sub ( r'\1_\2' , s1 ) . lower ( )
def load_configuration_file ( self ) : """Load all configuration from file"""
if not os . path . exists ( self . config_file ) : return try : with open ( self . config_file , 'r' ) as file : csvreader = csv . reader ( file , delimiter = '=' , escapechar = '\\' , quoting = csv . QUOTE_NONE ) for line in csvreader : if len ( line ) == 2 : key , v...
def create_term ( self , lemma , pos , morphofeat , tokens , id = None ) : """Create a new term and add it to the term layer @ type lemma : string @ param lemma : The lemma of the term @ type pos : string @ param pos : The postrag ( rst letter ) of the POS attribute @ type morphofeat : string @ param mo...
if id is None : n = 1 if self . term_layer is None else len ( self . term_layer . idx ) + 1 id = "t{n}" . format ( ** locals ( ) ) new_term = Cterm ( type = self . type ) new_term . set_id ( id ) new_term . set_lemma ( lemma ) new_term . set_pos ( pos ) new_term . set_morphofeat ( morphofeat ) new_span = Cspan ...
def remove_vertex ( self , vertex ) : """Remove vertex from G"""
try : self . vertices . pop ( vertex ) self . succ . pop ( vertex ) except KeyError : raise GraphInsertError ( "Vertex %s doesn't exist." % ( vertex , ) ) if vertex in self . nodes : self . nodes . pop ( vertex ) for element in self . vertices : if vertex in self . vertices [ element ] : sel...
def __send_message ( self , operation ) : """Send a query or getmore operation and handles the response . If operation is ` ` None ` ` this is an exhaust cursor , which reads the next result batch off the exhaust socket instead of sending getMore messages to the server . Can raise ConnectionFailure ."""
client = self . __collection . database . client try : response = client . _run_operation_with_response ( operation , self . _unpack_response , exhaust = self . __exhaust , address = self . __address ) except OperationFailure : self . __killed = True # Make sure exhaust socket is returned immediately , if n...
def session_check_name ( session_name ) : """Raises exception session name invalid , modeled after tmux function . tmux ( 1 ) session names may not be empty , or include periods or colons . These delimiters are reserved for noting session , window and pane . Parameters session _ name : str Name of session...
if not session_name or len ( session_name ) == 0 : raise exc . BadSessionName ( "tmux session names may not be empty." ) elif '.' in session_name : raise exc . BadSessionName ( "tmux session name \"%s\" may not contain periods." , session_name ) elif ':' in session_name : raise exc . BadSessionName ( "tmux ...
def check_known_inconsistencies ( bill_data , bond_data ) : """There are a couple quirks in the data provided by Bank of Canada . Check that no new quirks have been introduced in the latest download ."""
inconsistent_dates = bill_data . index . sym_diff ( bond_data . index ) known_inconsistencies = [ # bill _ data has an entry for 2010-02-15 , which bond _ data doesn ' t . # bond _ data has an entry for 2006-09-04 , which bill _ data doesn ' t . # Both of these dates are bank holidays ( Flag Day and Labor Day , # respe...
def cluster_assignments ( self ) : """Return an array of cluster assignments corresponding to the most recent set of instances clustered . : return : the cluster assignments : rtype : ndarray"""
array = javabridge . call ( self . jobject , "getClusterAssignments" , "()[D" ) if array is None : return None else : return javabridge . get_env ( ) . get_double_array_elements ( array )
def get_square_axes_limits ( coords , margin = 0.05 ) : """Return N - dimensional square ' s limits # # Arguments # ' coords ' : list of coordinates of poins to be plotted # ' margin ' : margin to be added from boundaries of the square . - ' margin ' can be negative if one wants to reduce the square size . ...
# coords = [ x , y , z ] try : coords = [ np . array ( coord ) for coord in coords ] except : raise Exception ( "Failed to convert elements of 'coords' into numpy.array" ) lims = [ ( coord . min ( ) , coord . max ( ) ) for coord in coords ] mids = [ 0.5 * ( lim [ 0 ] + lim [ 1 ] ) for lim in lims ] widths = [ 0...
def format_item ( self , item , defaults = None , stencil = None ) : """Format an item ."""
from pyrobase . osutil import shell_escape try : item_text = fmt . to_console ( formatting . format_item ( self . options . output_format , item , defaults ) ) except ( NameError , ValueError , TypeError ) , exc : self . fatal ( "Trouble with formatting item %r\n\n FORMAT = %r\n\n REASON =" % ( item , self . ...
def version ( self , value ) : """Setter for * * self . _ _ version * * attribute . : param value : Attribute value . : type value : unicode"""
if value is not None : assert type ( value ) is unicode , "'{0}' attribute: '{1}' type is not 'unicode'!" . format ( "version" , value ) self . __version = value
def nla_reserve ( msg , attrtype , attrlen ) : """Reserve space for an attribute . https : / / github . com / thom311 / libnl / blob / libnl3_2_25 / lib / attr . c # L456 Reserves room for an attribute in the specified Netlink message and fills in the attribute header ( type , length ) . Returns None if there...
tlen = NLMSG_ALIGN ( msg . nm_nlh . nlmsg_len ) + nla_total_size ( attrlen ) if tlen > msg . nm_size : return None nla = nlattr ( nlmsg_tail ( msg . nm_nlh ) ) nla . nla_type = attrtype nla . nla_len = nla_attr_size ( attrlen ) if attrlen : padlen = nla_padlen ( attrlen ) nla . bytearray [ nla . nla_len : n...
def update_model ( self ) : '''a method to update model with latest training data : return : True'''
import requests url = self . endpoint_public + '/calculate' params = { 'group' : self . group_name } response = requests . get ( url , params = params ) response_details = response . json ( ) return response_details [ 'success' ]
def append_data ( self , len_tag , val_tag , data , header = False ) : """Append raw data , possibly including a embedded SOH . : param len _ tag : Tag number for length field . : param val _ tag : Tag number for value field . : param data : Raw data byte string . : param header : Append to header if True ;...
self . append_pair ( len_tag , len ( data ) , header = header ) self . append_pair ( val_tag , data , header = header ) return
def liouvillian ( H , Ls = None ) : r"""Return the Liouvillian super - operator associated with ` H ` and ` Ls ` The Liouvillian : math : ` \ mathcal { L } ` generates the Markovian - dynamics of a system via the Master equation : . . math : : \ dot { \ rho } = \ mathcal { L } \ rho = - i [ H , \ rho ] + ...
if Ls is None : Ls = [ ] elif isinstance ( Ls , Matrix ) : Ls = Ls . matrix . ravel ( ) . tolist ( ) summands = [ - I * commutator ( H ) , ] summands . extend ( [ lindblad ( L ) for L in Ls ] ) return SuperOperatorPlus . create ( * summands )
def clusterStatus ( self ) : """Returns a dict of cluster nodes and their status information"""
servers = yield self . getClusterServers ( ) d = { 'workers' : { } , 'crons' : { } , 'queues' : { } } now = time . time ( ) reverse_map = { } for sname in servers : last = yield self . get ( 'rhumba.server.%s.heartbeat' % sname ) status = yield self . get ( 'rhumba.server.%s.status' % sname ) uuid = yield s...
def log10 ( x ) : """Base - 10 logarithm"""
if isinstance ( x , UncertainFunction ) : mcpts = np . log10 ( x . _mcpts ) return UncertainFunction ( mcpts ) else : return np . log10 ( x )
def add_minrmsd_to_ref ( self , ref , ref_frame = 0 , atom_indices = None , precentered = False ) : r"""Adds the minimum root - mean - square - deviation ( minrmsd ) with respect to a reference structure to the feature list . Parameters ref : Reference structure for computing the minrmsd . Can be of two types...
from . misc import MinRmsdFeature f = MinRmsdFeature ( ref , ref_frame = ref_frame , atom_indices = atom_indices , topology = self . topology , precentered = precentered ) self . __add_feature ( f )
def read_string ( cls , string ) : """Decodes a given bencoded string or bytestring . Returns decoded structure ( s ) . : param str string : : rtype : list"""
if PY3 and not isinstance ( string , byte_types ) : string = string . encode ( ) return cls . decode ( string )
def ekopw ( fname ) : """Open an existing E - kernel file for writing . http : / / naif . jpl . nasa . gov / pub / naif / toolkit _ docs / C / cspice / ekopw _ c . html : param fname : Name of EK file . : type fname : str : return : Handle attached to EK file . : rtype : int"""
fname = stypes . stringToCharP ( fname ) handle = ctypes . c_int ( ) libspice . ekopw_c ( fname , ctypes . byref ( handle ) ) return handle . value
def sort_file_tabs_alphabetically ( self ) : """Sort open tabs alphabetically ."""
while self . sorted ( ) is False : for i in range ( 0 , self . tabs . tabBar ( ) . count ( ) ) : if ( self . tabs . tabBar ( ) . tabText ( i ) > self . tabs . tabBar ( ) . tabText ( i + 1 ) ) : self . tabs . tabBar ( ) . moveTab ( i , i + 1 )
def intersection ( a , b , scale = 1 ) : '''Intersection between two segments .'''
try : a1 , a2 = a except TypeError : a1 = a . start a2 = a . stop try : b1 , b2 = b except TypeError : b1 = b . start b2 = b . stop if a2 <= b1 : return None if a1 >= b2 : return None # a2 > b1 and a1 < b2 if a2 <= b2 : if a1 <= b1 : return slice ( b1 * scale , a2 * scale ) ...
def get_parameter_action ( action ) : """To foster a general schema that can accomodate multiple parsers , the general behavior here is described rather than the specific language of a given parser . For instance , the ' append ' action of an argument is collapsing each argument given to a single argument . It ...
actions = set ( ) if isinstance ( action , argparse . _AppendAction ) : actions . add ( SPECIFY_EVERY_PARAM ) return actions
def extract_log_level_from_environment ( k , default ) : """Gets the log level from the environment variable ."""
return LOG_LEVELS . get ( os . environ . get ( k ) ) or int ( os . environ . get ( k , default ) )
def validate_instance ( instance ) : """Validating if the instance should be logged , or is excluded"""
excludes = settings . AUTOMATED_LOGGING [ 'exclude' ] [ 'model' ] for excluded in excludes : if ( excluded in [ instance . _meta . app_label . lower ( ) , instance . __class__ . __name__ . lower ( ) ] or instance . __module__ . lower ( ) . startswith ( excluded ) ) : return False return True
def _validate_configuration ( self ) : """Validates that required parameters are present ."""
if not self . access_token : raise ConfigurationException ( 'You will need to initialize a client with an Access Token' ) if not self . api_url : raise ConfigurationException ( 'The client configuration needs to contain an API URL' ) if not self . default_locale : raise ConfigurationException ( 'The client ...
def _is_cache_dir_appropriate ( cache_dir , cache_file ) : """Determine if a directory is acceptable for building . A directory is suitable if any of the following are true : - it doesn ' t exist - it is empty - it contains an existing build cache"""
if os . path . exists ( cache_dir ) : files = os . listdir ( cache_dir ) if cache_file in files : return True return not bool ( files ) return True
def removeIndexOnAttribute ( self , attributeName ) : '''removeIndexOnAttribute - Remove an attribute from indexing ( for getElementsByAttr function ) and remove indexed data . @ param attributeName < lowercase str > - An attribute name . Will be lowercased . " name " and " id " will have no effect .'''
attributeName = attributeName . lower ( ) if attributeName in self . otherAttributeIndexFunctions : del self . otherAttributeIndexFunctions [ attributeName ] if attributeName in self . _otherAttributeIndexes : del self . _otherAttributeIndexes [ attributeName ]
def fire_running ( self , running ) : '''Pass in a state " running " dict , this is the return dict from a state call . The dict will be processed and fire events . By default yellows and reds fire events on the master and minion , but this can be configured .'''
load = { 'id' : self . opts [ 'id' ] , 'events' : [ ] , 'cmd' : '_minion_event' } for stag in sorted ( running , key = lambda k : running [ k ] . get ( '__run_num__' , 0 ) ) : if running [ stag ] [ 'result' ] and not running [ stag ] [ 'changes' ] : continue tag = 'state_{0}_{1}' . format ( six . text_t...
def initialize_environment ( app ) : """Perform initializations needed before the build process starts ."""
env = app . builder . env # Assure ` ` traceability _ all _ items ` ` will always be there . if not hasattr ( env , 'traceability_all_items' ) : env . traceability_all_items = { } update_available_item_relationships ( app )
def load_network_from_file ( filename ) : import cPickle """Load the complete configuration of a previously stored network ."""
network = NeuralNet ( { "n_inputs" : 1 , "layers" : [ [ 0 , None ] ] } ) with open ( filename , 'rb' ) as file : store_dict = cPickle . load ( file ) network . n_inputs = store_dict [ "n_inputs" ] network . n_weights = store_dict [ "n_weights" ] network . layers = store_dict [ "layers" ] network . w...
def optional_file_like ( path ) : """Validator that ensures that if a file exists it regular , a fifo , or a character device . The file is not required to exist . This includes character special devices like / dev / null ."""
if ( os . path . exists ( path ) and not ( os . path . isfile ( path ) or stat . S_ISFIFO ( os . stat ( path ) . st_mode ) or stat . S_ISCHR ( os . stat ( path ) . st_mode ) ) ) : raise ValidationFailed ( '{} is not a valid file, character device, or fifo.' . format ( path ) )
def from_center_of_mass ( cls , inputs , window_length , center_of_mass , ** kwargs ) : """Convenience constructor for passing ` decay _ rate ` in terms of center of mass . Forwards ` decay _ rate ` as ` 1 - ( 1 / 1 + center _ of _ mass ) ` . This provides behavior equivalent to passing ` center _ of _ mass `...
return cls ( inputs = inputs , window_length = window_length , decay_rate = ( 1.0 - ( 1.0 / ( 1.0 + center_of_mass ) ) ) , ** kwargs )
def _load_github_repo ( ) : """Loads the GitHub repository from the users config ."""
if 'TRAVIS' in os . environ : raise RuntimeError ( 'Detected that we are running in Travis. ' 'Stopping to prevent infinite loops.' ) try : with open ( os . path . join ( config_dir , 'repo' ) , 'r' ) as f : return f . read ( ) except ( OSError , IOError ) : raise RuntimeError ( 'Could not find your...
def get_instance ( self , payload ) : """Build an instance of DayInstance : param dict payload : Payload response from the API : returns : twilio . rest . preview . bulk _ exports . export . day . DayInstance : rtype : twilio . rest . preview . bulk _ exports . export . day . DayInstance"""
return DayInstance ( self . _version , payload , resource_type = self . _solution [ 'resource_type' ] , )
def MatrixTriangularSolve ( a , rhs , lower , adj ) : """Matrix triangular solve op ."""
trans = 0 if not adj else 2 r = np . empty ( rhs . shape ) . astype ( a . dtype ) for coord in np . ndindex ( a . shape [ : - 2 ] ) : pos = coord + ( Ellipsis , ) r [ pos ] = sp . linalg . solve_triangular ( a [ pos ] if not adj else np . conj ( a [ pos ] ) , rhs [ pos ] , trans = trans , lower = lower ) return...
def from_fptr ( cls , label , type_ , fptr ) : """Return ` ` FSEntry ` ` object ."""
return FSEntry ( label = label , type = type_ , path = fptr . path , use = fptr . use , file_uuid = fptr . file_uuid , derived_from = fptr . derived_from , checksum = fptr . checksum , checksumtype = fptr . checksumtype , )
def _load_input_data_port_models ( self ) : """Reloads the input data port models directly from the the state"""
if not self . state_copy_initialized : return self . input_data_ports = [ ] for input_data_port_m in self . state_copy . input_data_ports : new_ip_m = deepcopy ( input_data_port_m ) new_ip_m . parent = self new_ip_m . data_port = input_data_port_m . data_port self . input_data_ports . append ( new_i...
def _glob1 ( self , pattern , ondisk = True , source = False , strings = False ) : """Globs for and returns a list of entry names matching a single pattern in this directory . This searches any repositories and source directories for corresponding entries and returns a Node ( or string ) relative to the cur...
search_dir_list = self . get_all_rdirs ( ) for srcdir in self . srcdir_list ( ) : search_dir_list . extend ( srcdir . get_all_rdirs ( ) ) selfEntry = self . Entry names = [ ] for dir in search_dir_list : # We use the . name attribute from the Node because the keys of # the dir . entries dictionary are normalized ( ...