idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
2,300 | def plot_boolean ( self , on , boolean_col , plot_col = None , boolean_label = None , boolean_value_map = { } , order = None , ax = None , alternative = "two-sided" , ** kwargs ) : cols , df = self . as_dataframe ( on , return_cols = True , ** kwargs ) plot_col = self . plot_col_from_cols ( cols = cols , plot_col = plo... | Plot a comparison of boolean_col in the cohort on a given variable via on or col . |
2,301 | def plot_correlation ( self , on , x_col = None , plot_type = "jointplot" , stat_func = pearsonr , show_stat_func = True , plot_kwargs = { } , ** kwargs ) : if plot_type not in [ "boxplot" , "barplot" , "jointplot" , "regplot" ] : raise ValueError ( "Invalid plot_type %s" % plot_type ) plot_cols , df = self . as_datafr... | Plot the correlation between two variables . |
2,302 | def _list_patient_ids ( self ) : results = [ ] for patient in self : results . append ( patient . id ) return ( results ) | Utility function to return a list of patient ids in the Cohort |
2,303 | def summarize_provenance_per_cache ( self ) : provenance_summary = { } df = self . as_dataframe ( ) for cache in self . cache_names : cache_name = self . cache_names [ cache ] cache_provenance = None num_discrepant = 0 this_cache_dir = path . join ( self . cache_dir , cache_name ) if path . exists ( this_cache_dir ) : ... | Utility function to summarize provenance files for cached items used by a Cohort for each cache_dir that exists . Only existing cache_dirs are summarized . |
2,304 | def summarize_provenance ( self ) : provenance_per_cache = self . summarize_provenance_per_cache ( ) summary_provenance = None num_discrepant = 0 for cache in provenance_per_cache : if not ( summary_provenance ) : summary_provenance = provenance_per_cache [ cache ] summary_provenance_name = cache num_discrepant += comp... | Utility function to summarize provenance files for cached items used by a Cohort . |
2,305 | def summarize_data_sources ( self ) : provenance_file_summary = self . summarize_provenance ( ) dataframe_hash = self . summarize_dataframe ( ) results = { "provenance_file_summary" : provenance_file_summary , "dataframe_hash" : dataframe_hash } return ( results ) | Utility function to summarize data source status for this Cohort useful for confirming the state of data used for an analysis |
2,306 | def strelka_somatic_variant_stats ( variant , variant_metadata ) : sample_info = variant_metadata [ "sample_info" ] assert len ( sample_info ) == 2 , "More than two samples found in the somatic VCF" tumor_stats = _strelka_variant_stats ( variant , sample_info [ "TUMOR" ] ) normal_stats = _strelka_variant_stats ( varian... | Parse out the variant calling statistics for a given variant from a Strelka VCF |
2,307 | def _strelka_variant_stats ( variant , sample_info ) : if variant . is_deletion or variant . is_insertion : ref_depth = int ( sample_info [ 'TAR' ] [ 0 ] ) alt_depth = int ( sample_info [ 'TIR' ] [ 0 ] ) depth = ref_depth + alt_depth else : ref_depth = int ( sample_info [ variant . ref + "U" ] [ 0 ] ) alt_depth = int (... | Parse a single sample s variant calling statistics based on Strelka VCF output |
2,308 | def mutect_somatic_variant_stats ( variant , variant_metadata ) : sample_info = variant_metadata [ "sample_info" ] assert len ( sample_info ) == 2 , "More than two samples found in the somatic VCF" tumor_sample_infos = [ info for info in sample_info . values ( ) if info [ "GT" ] == "0/1" ] assert len ( tumor_sample_inf... | Parse out the variant calling statistics for a given variant from a Mutect VCF |
2,309 | def maf_somatic_variant_stats ( variant , variant_metadata ) : tumor_stats = None normal_stats = None if "t_ref_count" in variant_metadata : tumor_stats = _maf_variant_stats ( variant , variant_metadata , prefix = "t" ) if "n_ref_count" in variant_metadata : normal_stats = _maf_variant_stats ( variant , variant_metadat... | Parse out the variant calling statistics for a given variant from a MAF file |
2,310 | def _vcf_is_strelka ( variant_file , variant_metadata ) : if "strelka" in variant_file . lower ( ) : return True elif "NORMAL" in variant_metadata [ "sample_info" ] . keys ( ) : return True else : vcf_reader = vcf . Reader ( open ( variant_file , "r" ) ) try : vcf_type = vcf_reader . metadata [ "content" ] except KeyEr... | Return True if variant_file given is in strelka format |
2,311 | def variant_stats_from_variant ( variant , metadata , merge_fn = ( lambda all_stats : max ( all_stats , key = ( lambda stats : stats . tumor_stats . depth ) ) ) ) : all_stats = [ ] for ( variant_file , variant_metadata ) in metadata . items ( ) : if _vcf_is_maf ( variant_file = variant_file ) : stats = maf_somatic_vari... | Parse the variant calling stats from a variant called from multiple variant files . The stats are merged based on merge_fn |
2,312 | def load_ensembl_coverage ( cohort , coverage_path , min_tumor_depth , min_normal_depth = 0 , pageant_dir_fn = None ) : if pageant_dir_fn is None : pageant_dir_fn = lambda patient : patient . id columns_both = [ "depth1" , "depth2" , "onBP1" , "onBP2" , "numOnLoci" , "fracBPOn1" , "fracBPOn2" , "fracLociOn" , "offBP1" ... | Load in Pageant CoverageDepth results with Ensembl loci . |
2,313 | def vertical_percent ( plot , percent = 0.1 ) : plot_bottom , plot_top = plot . get_ylim ( ) return percent * ( plot_top - plot_bottom ) | Using the size of the y axis return a fraction of that size . |
2,314 | def add_significance_indicator ( plot , col_a = 0 , col_b = 1 , significant = False ) : plot_bottom , plot_top = plot . get_ylim ( ) line_height = vertical_percent ( plot , 0.1 ) plot_top = plot_top + line_height plot . set_ylim ( top = plot_top + line_height * 2 ) color = "black" line_top = plot_top + line_height plot... | Add a p - value significance indicator . |
2,315 | def stripboxplot ( x , y , data , ax = None , significant = None , ** kwargs ) : ax = sb . boxplot ( x = x , y = y , data = data , ax = ax , fliersize = 0 , ** kwargs ) plot = sb . stripplot ( x = x , y = y , data = data , ax = ax , jitter = kwargs . pop ( "jitter" , 0.05 ) , color = kwargs . pop ( "color" , "0.3" ) , ... | Overlay a stripplot on top of a boxplot . |
2,316 | def fishers_exact_plot ( data , condition1 , condition2 , ax = None , condition1_value = None , alternative = "two-sided" , ** kwargs ) : plot = sb . barplot ( x = condition1 , y = condition2 , ax = ax , data = data , ** kwargs ) plot . set_ylabel ( "Percent %s" % condition2 ) condition1_mask = get_condition_mask ( dat... | Perform a Fisher s exact test to compare to binary columns |
2,317 | def mann_whitney_plot ( data , condition , distribution , ax = None , condition_value = None , alternative = "two-sided" , skip_plot = False , ** kwargs ) : condition_mask = get_condition_mask ( data , condition , condition_value ) U , p_value = mannwhitneyu ( data [ condition_mask ] [ distribution ] , data [ ~ conditi... | Create a box plot comparing a condition and perform a Mann Whitney test to compare the distribution in condition A v B |
2,318 | def roc_curve_plot ( data , value_column , outcome_column , bootstrap_samples = 100 , ax = None ) : scores = bootstrap_auc ( df = data , col = value_column , pred_col = outcome_column , n_bootstrap = bootstrap_samples ) mean_bootstrap_auc = scores . mean ( ) print ( "{}, Bootstrap (samples = {}) AUC:{}, std={}" . forma... | Create a ROC curve and compute the bootstrap AUC for the given variable and outcome |
2,319 | def _strip_column_name ( col_name , keep_paren_contents = True ) : new_col_name = col_name punctuation_to_text = { '<=' : 'le' , '>=' : 'ge' , '=<' : 'le' , '=>' : 'ge' , '<' : 'lt' , '>' : 'gt' , '#' : 'num' } for punctuation , punctuation_text in punctuation_to_text . items ( ) : new_col_name = new_col_name . replace... | Utility script applying several regexs to a string . Intended to be used by strip_column_names . |
2,320 | def strip_column_names ( cols , keep_paren_contents = True ) : new_cols = [ _strip_column_name ( col , keep_paren_contents = keep_paren_contents ) for col in cols ] if len ( new_cols ) != len ( set ( new_cols ) ) : warn_str = 'Warning: strip_column_names (if run) would introduce duplicate names.' warn_str += ' Revertin... | Utility script for renaming pandas columns to patsy - friendly names . |
2,321 | def set_attributes ( obj , additional_data ) : for key , value in additional_data . items ( ) : if hasattr ( obj , key ) : raise ValueError ( "Key %s in additional_data already exists in this object" % key ) setattr ( obj , _strip_column_name ( key ) , value ) | Given an object and a dictionary give the object new attributes from that dictionary . |
2,322 | def return_obj ( cols , df , return_cols = False ) : df_holder = DataFrameHolder ( cols = cols , df = df ) return df_holder . return_self ( return_cols = return_cols ) | Construct a DataFrameHolder and then return either that or the DataFrame . |
2,323 | def compare_provenance ( this_provenance , other_provenance , left_outer_diff = "In current but not comparison" , right_outer_diff = "In comparison but not current" ) : if ( not this_provenance or not other_provenance ) : return 0 this_items = set ( this_provenance . items ( ) ) other_items = set ( other_provenance . i... | Utility function to compare two abritrary provenance dicts returns number of discrepancies . |
2,324 | def generate_random_missense_variants ( num_variants = 10 , max_search = 100000 , reference = "GRCh37" ) : variants = [ ] for i in range ( max_search ) : bases = [ "A" , "C" , "T" , "G" ] random_ref = choice ( bases ) bases . remove ( random_ref ) random_alt = choice ( bases ) random_contig = choice ( [ "1" , "2" , "3"... | Generate a random collection of missense variants by trying random variants repeatedly . |
2,325 | def generate_simple_vcf ( filename , variant_collection ) : contigs = [ ] positions = [ ] refs = [ ] alts = [ ] for variant in variant_collection : contigs . append ( "chr" + variant . contig ) positions . append ( variant . start ) refs . append ( variant . ref ) alts . append ( variant . alt ) df = pd . DataFrame ( )... | Output a very simple metadata - free VCF for each variant in a variant_collection . |
2,326 | def list_folder ( self , path ) : try : folder_contents = [ ] for f in os . listdir ( path ) : attr = paramiko . SFTPAttributes . from_stat ( os . stat ( os . path . join ( path , f ) ) ) attr . filename = f folder_contents . append ( attr ) return folder_contents except OSError as e : return SFTPServer . convert_errno... | Looks up folder contents of path . |
2,327 | def filter_variants ( variant_collection , patient , filter_fn , ** kwargs ) : if filter_fn : return variant_collection . clone_with_new_elements ( [ variant for variant in variant_collection if filter_fn ( FilterableVariant ( variant = variant , variant_collection = variant_collection , patient = patient , ) , ** kwar... | Filter variants from the Variant Collection |
2,328 | def filter_effects ( effect_collection , variant_collection , patient , filter_fn , all_effects , ** kwargs ) : def top_priority_maybe ( effect_collection ) : if all_effects : return effect_collection return EffectCollection ( list ( effect_collection . top_priority_effect_per_variant ( ) . values ( ) ) ) def apply_fil... | Filter variants from the Effect Collection |
2,329 | def count_lines_in ( filename ) : "Count lines in a file" f = open ( filename ) lines = 0 buf_size = 1024 * 1024 read_f = f . read buf = read_f ( buf_size ) while buf : lines += buf . count ( '\n' ) buf = read_f ( buf_size ) return lines | Count lines in a file |
2,330 | def view_name_from ( path ) : "Resolve a path to the full python module name of the related view function" try : return CACHED_VIEWS [ path ] except KeyError : view = resolve ( path ) module = path name = '' if hasattr ( view . func , '__module__' ) : module = resolve ( path ) . func . __module__ if hasattr ( view . fu... | Resolve a path to the full python module name of the related view function |
2,331 | def generate_table_from ( data ) : "Output a nicely formatted ascii table" table = Texttable ( max_width = 120 ) table . add_row ( [ "view" , "method" , "status" , "count" , "minimum" , "maximum" , "mean" , "stdev" , "queries" , "querytime" ] ) table . set_cols_align ( [ "l" , "l" , "l" , "r" , "r" , "r" , "r" , "r" , ... | Output a nicely formatted ascii table |
2,332 | def analyze_log_file ( logfile , pattern , reverse_paths = True , progress = True ) : "Given a log file and regex group and extract the performance data" if progress : lines = count_lines_in ( logfile ) pbar = ProgressBar ( widgets = [ Percentage ( ) , Bar ( ) ] , maxval = lines + 1 ) . start ( ) counter = 0 data = { }... | Given a log file and regex group and extract the performance data |
2,333 | def to_string ( self , limit = None ) : header = self . short_string ( ) if len ( self ) == 0 : return header contents = "" element_lines = [ " -- %s" % ( element , ) for element in self . elements [ : limit ] ] contents = "\n" . join ( element_lines ) if limit is not None and len ( self . elements ) > limit : content... | Create a string representation of this collection showing up to limit items . |
2,334 | def safe_log_error ( self , error : Exception , * info : str ) : self . __do_safe ( lambda : self . logger . error ( error , * info ) ) | Log error failing silently on error |
2,335 | def safe_log_info ( self , * info : str ) : self . __do_safe ( lambda : self . logger . info ( * info ) ) | Log info failing silently on error |
2,336 | def _default_client ( jws_client , reactor , key , alg ) : if jws_client is None : pool = HTTPConnectionPool ( reactor ) agent = Agent ( reactor , pool = pool ) jws_client = JWSClient ( HTTPClient ( agent = agent ) , key , alg ) return jws_client | Make a client if we didn t get one . |
2,337 | def _find_supported_challenge ( authzr , responders ) : matches = [ ( responder , challbs [ 0 ] ) for challbs in authzr . body . resolved_combinations for responder in responders if [ challb . typ for challb in challbs ] == [ responder . challenge_type ] ] if len ( matches ) == 0 : raise NoSupportedChallenges ( authzr ... | Find a challenge combination that consists of a single challenge that the responder can satisfy . |
2,338 | def answer_challenge ( authzr , client , responders ) : responder , challb = _find_supported_challenge ( authzr , responders ) response = challb . response ( client . key ) def _stop_responding ( ) : return maybeDeferred ( responder . stop_responding , authzr . body . identifier . value , challb . chall , response ) re... | Complete an authorization using a responder . |
2,339 | def poll_until_valid ( authzr , clock , client , timeout = 300.0 ) : def repoll ( result ) : authzr , retry_after = result if authzr . body . status in { STATUS_PENDING , STATUS_PROCESSING } : return ( deferLater ( clock , retry_after , lambda : None ) . addCallback ( lambda _ : client . poll ( authzr ) ) . addCallback... | Poll an authorization until it is in a state other than pending or processing . |
2,340 | def from_url ( cls , reactor , url , key , alg = RS256 , jws_client = None ) : action = LOG_ACME_CONSUME_DIRECTORY ( url = url , key_type = key . typ , alg = alg . name ) with action . context ( ) : check_directory_url_type ( url ) jws_client = _default_client ( jws_client , reactor , key , alg ) return ( DeferredConte... | Construct a client from an ACME directory at a given URL . |
2,341 | def register ( self , new_reg = None ) : if new_reg is None : new_reg = messages . NewRegistration ( ) action = LOG_ACME_REGISTER ( registration = new_reg ) with action . context ( ) : return ( DeferredContext ( self . update_registration ( new_reg , uri = self . directory [ new_reg ] ) ) . addErrback ( self . _maybe_r... | Create a new registration with the ACME server . |
2,342 | def _maybe_registered ( self , failure , new_reg ) : failure . trap ( ServerError ) response = failure . value . response if response . code == http . CONFLICT : reg = new_reg . update ( resource = messages . UpdateRegistration . resource_type ) uri = self . _maybe_location ( response ) return self . update_registratio... | If the registration already exists we should just load it . |
2,343 | def agree_to_tos ( self , regr ) : return self . update_registration ( regr . update ( body = regr . body . update ( agreement = regr . terms_of_service ) ) ) | Accept the terms - of - service for a registration . |
2,344 | def update_registration ( self , regr , uri = None ) : if uri is None : uri = regr . uri if isinstance ( regr , messages . RegistrationResource ) : message = messages . UpdateRegistration ( ** dict ( regr . body ) ) else : message = regr action = LOG_ACME_UPDATE_REGISTRATION ( uri = uri , registration = message ) with ... | Submit a registration to the server to update it . |
2,345 | def _parse_regr_response ( self , response , uri = None , new_authzr_uri = None , terms_of_service = None ) : links = _parse_header_links ( response ) if u'terms-of-service' in links : terms_of_service = links [ u'terms-of-service' ] [ u'url' ] if u'next' in links : new_authzr_uri = links [ u'next' ] [ u'url' ] if new_... | Parse a registration response from the server . |
2,346 | def _check_regr ( self , regr , new_reg ) : body = getattr ( new_reg , 'body' , new_reg ) for k , v in body . items ( ) : if k == 'resource' or not v : continue if regr . body [ k ] != v : raise errors . UnexpectedUpdate ( regr ) if regr . body . key != self . key . public_key ( ) : raise errors . UnexpectedUpdate ( re... | Check that a registration response contains the registration we were expecting . |
2,347 | def request_challenges ( self , identifier ) : action = LOG_ACME_CREATE_AUTHORIZATION ( identifier = identifier ) with action . context ( ) : message = messages . NewAuthorization ( identifier = identifier ) return ( DeferredContext ( self . _client . post ( self . directory [ message ] , message ) ) . addCallback ( se... | Create a new authorization . |
2,348 | def _expect_response ( cls , response , code ) : if response . code != code : raise errors . ClientError ( 'Expected {!r} response but got {!r}' . format ( code , response . code ) ) return response | Ensure we got the expected response code . |
2,349 | def _parse_authorization ( cls , response , uri = None ) : links = _parse_header_links ( response ) try : new_cert_uri = links [ u'next' ] [ u'url' ] except KeyError : raise errors . ClientError ( '"next" link missing' ) return ( response . json ( ) . addCallback ( lambda body : messages . AuthorizationResource ( body ... | Parse an authorization resource . |
2,350 | def _check_authorization ( cls , authzr , identifier ) : if authzr . body . identifier != identifier : raise errors . UnexpectedUpdate ( authzr ) return authzr | Check that the authorization we got is the one we expected . |
2,351 | def answer_challenge ( self , challenge_body , response ) : action = LOG_ACME_ANSWER_CHALLENGE ( challenge_body = challenge_body , response = response ) with action . context ( ) : return ( DeferredContext ( self . _client . post ( challenge_body . uri , response ) ) . addCallback ( self . _parse_challenge ) . addCallb... | Respond to an authorization challenge . |
2,352 | def _parse_challenge ( cls , response ) : links = _parse_header_links ( response ) try : authzr_uri = links [ 'up' ] [ 'url' ] except KeyError : raise errors . ClientError ( '"up" link missing' ) return ( response . json ( ) . addCallback ( lambda body : messages . ChallengeResource ( authzr_uri = authzr_uri , body = m... | Parse a challenge resource . |
2,353 | def _check_challenge ( cls , challenge , challenge_body ) : if challenge . uri != challenge_body . uri : raise errors . UnexpectedUpdate ( challenge . uri ) return challenge | Check that the challenge resource we got is the one we expected . |
2,354 | def retry_after ( cls , response , default = 5 , _now = time . time ) : val = response . headers . getRawHeaders ( b'retry-after' , [ default ] ) [ 0 ] try : return int ( val ) except ValueError : return http . stringToDatetime ( val ) - _now ( ) | Parse the Retry - After value from a response . |
2,355 | def request_issuance ( self , csr ) : action = LOG_ACME_REQUEST_CERTIFICATE ( ) with action . context ( ) : return ( DeferredContext ( self . _client . post ( self . directory [ csr ] , csr , content_type = DER_CONTENT_TYPE , headers = Headers ( { b'Accept' : [ DER_CONTENT_TYPE ] } ) ) ) . addCallback ( self . _expect_... | Request a certificate . |
2,356 | def _parse_certificate ( cls , response ) : links = _parse_header_links ( response ) try : cert_chain_uri = links [ u'up' ] [ u'url' ] except KeyError : cert_chain_uri = None return ( response . content ( ) . addCallback ( lambda body : messages . CertificateResource ( uri = cls . _maybe_location ( response ) , cert_ch... | Parse a response containing a certificate resource . |
2,357 | def fetch_chain ( self , certr , max_length = 10 ) : action = LOG_ACME_FETCH_CHAIN ( ) with action . context ( ) : if certr . cert_chain_uri is None : return succeed ( [ ] ) elif max_length < 1 : raise errors . ClientError ( 'chain too long' ) return ( DeferredContext ( self . _client . get ( certr . cert_chain_uri , c... | Fetch the intermediary chain for a certificate . |
2,358 | def _wrap_in_jws ( self , nonce , obj ) : with LOG_JWS_SIGN ( key_type = self . _key . typ , alg = self . _alg . name , nonce = nonce ) : jobj = obj . json_dumps ( ) . encode ( ) return ( JWS . sign ( payload = jobj , key = self . _key , alg = self . _alg , nonce = nonce ) . json_dumps ( ) . encode ( ) ) | Wrap JSONDeSerializable object in JWS . |
2,359 | def _check_response ( cls , response , content_type = JSON_CONTENT_TYPE ) : def _got_failure ( f ) : f . trap ( ValueError ) return None def _got_json ( jobj ) : if 400 <= response . code < 600 : if response_ct == JSON_ERROR_CONTENT_TYPE and jobj is not None : raise ServerError ( messages . Error . from_json ( jobj ) ,... | Check response content and its type . |
2,360 | def head ( self , url , * args , ** kwargs ) : with LOG_JWS_HEAD ( ) . context ( ) : return DeferredContext ( self . _send_request ( u'HEAD' , url , * args , ** kwargs ) ) . addActionFinish ( ) | Send HEAD request without checking the response . |
2,361 | def get ( self , url , content_type = JSON_CONTENT_TYPE , ** kwargs ) : with LOG_JWS_GET ( ) . context ( ) : return ( DeferredContext ( self . _send_request ( u'GET' , url , ** kwargs ) ) . addCallback ( self . _check_response , content_type = content_type ) . addActionFinish ( ) ) | Send GET request and check response . |
2,362 | def _add_nonce ( self , response ) : nonce = response . headers . getRawHeaders ( REPLAY_NONCE_HEADER , [ None ] ) [ 0 ] with LOG_JWS_ADD_NONCE ( raw_nonce = nonce ) as action : if nonce is None : raise errors . MissingNonce ( response ) else : try : decoded_nonce = Header . _fields [ 'nonce' ] . decode ( nonce . decod... | Store a nonce from a response we received . |
2,363 | def _get_nonce ( self , url ) : action = LOG_JWS_GET_NONCE ( ) if len ( self . _nonces ) > 0 : with action : nonce = self . _nonces . pop ( ) action . add_success_fields ( nonce = nonce ) return succeed ( nonce ) else : with action . context ( ) : return ( DeferredContext ( self . head ( url ) ) . addCallback ( self . ... | Get a nonce to use in a request removing it from the nonces on hand . |
2,364 | def _post ( self , url , obj , content_type , ** kwargs ) : with LOG_JWS_POST ( ) . context ( ) : headers = kwargs . setdefault ( 'headers' , Headers ( ) ) headers . setRawHeaders ( b'content-type' , [ JSON_CONTENT_TYPE ] ) return ( DeferredContext ( self . _get_nonce ( url ) ) . addCallback ( self . _wrap_in_jws , obj... | POST an object and check the response . |
2,365 | def post ( self , url , obj , content_type = JSON_CONTENT_TYPE , ** kwargs ) : def retry_bad_nonce ( f ) : f . trap ( ServerError ) if f . value . message . typ . split ( ':' ) [ - 1 ] == 'badNonce' : self . _nonces . clear ( ) self . _add_nonce ( f . value . response ) return self . _post ( url , obj , content_type , ... | POST an object and check the response . Retry once if a badNonce error is received . |
2,366 | def _daemon_thread ( * a , ** kw ) : thread = Thread ( * a , ** kw ) thread . daemon = True return thread | Create a threading . Thread but always set daemon . |
2,367 | def _defer_to_worker ( deliver , worker , work , * args , ** kwargs ) : deferred = Deferred ( ) def wrapped_work ( ) : try : result = work ( * args , ** kwargs ) except BaseException : f = Failure ( ) deliver ( lambda : deferred . errback ( f ) ) else : deliver ( lambda : deferred . callback ( result ) ) worker . do ( ... | Run a task in a worker delivering the result as a Deferred in the reactor thread . |
2,368 | def _split_zone ( server_name , zone_name ) : server_name = server_name . rstrip ( u'.' ) zone_name = zone_name . rstrip ( u'.' ) if not ( server_name == zone_name or server_name . endswith ( u'.' + zone_name ) ) : raise NotInZone ( server_name = server_name , zone_name = zone_name ) return server_name [ : - len ( zone... | Split the zone portion off from a DNS label . |
2,369 | def _get_existing ( driver , zone_name , server_name , validation ) : if zone_name is None : zones = sorted ( ( z for z in driver . list_zones ( ) if server_name . rstrip ( u'.' ) . endswith ( u'.' + z . domain . rstrip ( u'.' ) ) ) , key = lambda z : len ( z . domain ) , reverse = True ) if len ( zones ) == 0 : raise ... | Get existing validation records . |
2,370 | def _validation ( response ) : h = hashlib . sha256 ( response . key_authorization . encode ( "utf-8" ) ) return b64encode ( h . digest ( ) ) . decode ( ) | Get the validation value for a challenge response . |
2,371 | def load_or_create_client_key ( pem_path ) : acme_key_file = pem_path . asTextMode ( ) . child ( u'client.key' ) if acme_key_file . exists ( ) : key = serialization . load_pem_private_key ( acme_key_file . getContent ( ) , password = None , backend = default_backend ( ) ) else : key = generate_private_key ( u'rsa' ) ac... | Load the client key from a directory creating it if it does not exist . |
2,372 | def _parse ( reactor , directory , pemdir , * args , ** kwargs ) : def colon_join ( items ) : return ':' . join ( [ item . replace ( ':' , '\\:' ) for item in items ] ) sub = colon_join ( list ( args ) + [ '=' . join ( item ) for item in kwargs . items ( ) ] ) pem_path = FilePath ( pemdir ) . asTextMode ( ) acme_key = ... | Parse a txacme endpoint description . |
2,373 | def lazyread ( f , delimiter ) : try : running = f . read ( 0 ) except Exception as e : if e . __class__ . __name__ == 'IncompleteReadError' : running = b'' else : raise while True : new_data = f . read ( 1024 ) if not new_data : yield running return running += new_data while delimiter in running : curr , running = run... | Generator which continually reads f to the next instance of delimiter . |
2,374 | def generate_private_key ( key_type ) : if key_type == u'rsa' : return rsa . generate_private_key ( public_exponent = 65537 , key_size = 2048 , backend = default_backend ( ) ) raise ValueError ( key_type ) | Generate a random private key using sensible parameters . |
2,375 | def tap ( f ) : @ wraps ( f ) def _cb ( res , * a , ** kw ) : d = maybeDeferred ( f , res , * a , ** kw ) d . addCallback ( lambda ignored : res ) return d return _cb | Tap a Deferred callback chain with a function whose return value is ignored . |
2,376 | def decode_csr ( b64der ) : try : return x509 . load_der_x509_csr ( decode_b64jose ( b64der ) , default_backend ( ) ) except ValueError as error : raise DeserializationError ( error ) | Decode JOSE Base - 64 DER - encoded CSR . |
2,377 | def csr_for_names ( names , key ) : if len ( names ) == 0 : raise ValueError ( 'Must have at least one name' ) if len ( names [ 0 ] ) > 64 : common_name = u'san.too.long.invalid' else : common_name = names [ 0 ] return ( x509 . CertificateSigningRequestBuilder ( ) . subject_name ( x509 . Name ( [ x509 . NameAttribute (... | Generate a certificate signing request for the given names and private key . |
2,378 | def _wrap_parse ( code , filename ) : code = 'async def wrapper():\n' + indent ( code , ' ' ) return ast . parse ( code , filename = filename ) . body [ 0 ] . body [ 0 ] . value | async wrapper is required to avoid await calls raising a SyntaxError |
2,379 | def layers_to_solr ( self , layers ) : layers_dict_list = [ ] layers_success_ids = [ ] layers_errors_ids = [ ] for layer in layers : layer_dict , message = layer2dict ( layer ) if not layer_dict : layers_errors_ids . append ( [ layer . id , message ] ) LOGGER . error ( message ) else : layers_dict_list . append ( layer... | Sync n layers in Solr . |
2,380 | def layer_to_solr ( self , layer ) : success = True message = 'Synced layer id %s to Solr' % layer . id layer_dict , message = layer2dict ( layer ) if not layer_dict : success = False else : layer_json = json . dumps ( layer_dict ) try : url_solr_update = '%s/solr/hypermap/update/json/docs' % SEARCH_URL headers = { "co... | Sync a layer in Solr . |
2,381 | def clear_solr ( self , catalog = "hypermap" ) : solr_url = "{0}/solr/{1}" . format ( SEARCH_URL , catalog ) solr = pysolr . Solr ( solr_url , timeout = 60 ) solr . delete ( q = '*:*' ) LOGGER . debug ( 'Solr core cleared' ) | Clear all indexes in the solr core |
2,382 | def create_service_from_endpoint ( endpoint , service_type , title = None , abstract = None , catalog = None ) : from models import Service if Service . objects . filter ( url = endpoint , catalog = catalog ) . count ( ) == 0 : request = requests . get ( endpoint ) if request . status_code == 200 : LOGGER . debug ( 'Cr... | Create a service from an endpoint if it does not already exists . |
2,383 | def service_url_parse ( url ) : endpoint = get_sanitized_endpoint ( url ) url_split_list = url . split ( endpoint + '/' ) if len ( url_split_list ) != 0 : url_split_list = url_split_list [ 1 ] . split ( '/' ) else : raise Exception ( 'Wrong url parsed' ) parsed_url = [ s for s in url_split_list if '?' not in s if 'Serv... | Function that parses from url the service and folder of services . |
2,384 | def inverse_mercator ( xy ) : lon = ( xy [ 0 ] / 20037508.34 ) * 180 lat = ( xy [ 1 ] / 20037508.34 ) * 180 lat = 180 / math . pi * ( 2 * math . atan ( math . exp ( lat * math . pi / 180 ) ) - math . pi / 2 ) return ( lon , lat ) | Given coordinates in spherical mercator return a lon lat tuple . |
2,385 | def get_wms_version_negotiate ( url , timeout = 10 ) : try : LOGGER . debug ( 'Trying a WMS 1.3.0 GetCapabilities request' ) return WebMapService ( url , version = '1.3.0' , timeout = timeout ) except Exception as err : LOGGER . warning ( 'WMS 1.3.0 support not found: %s' , err ) LOGGER . debug ( 'Trying a WMS 1.1.1 Ge... | OWSLib wrapper function to perform version negotiation against owslib . wms . WebMapService |
2,386 | def get_sanitized_endpoint ( url ) : sanitized_url = url . rstrip ( ) esri_string = '/rest/services' if esri_string in url : match = re . search ( esri_string , sanitized_url ) sanitized_url = url [ 0 : ( match . start ( 0 ) + len ( esri_string ) ) ] return sanitized_url | Sanitize an endpoint as removing unneeded parameters |
2,387 | def get_esri_extent ( esriobj ) : extent = None srs = None if 'fullExtent' in esriobj . _json_struct : extent = esriobj . _json_struct [ 'fullExtent' ] if 'extent' in esriobj . _json_struct : extent = esriobj . _json_struct [ 'extent' ] try : srs = extent [ 'spatialReference' ] [ 'wkid' ] except KeyError , err : LOGGER... | Get the extent of an ESRI resource |
2,388 | def bbox2wktpolygon ( bbox ) : minx = float ( bbox [ 0 ] ) miny = float ( bbox [ 1 ] ) maxx = float ( bbox [ 2 ] ) maxy = float ( bbox [ 3 ] ) return 'POLYGON((%.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f))' % ( minx , miny , minx , maxy , maxx , maxy , maxx , miny , minx , miny ) | Return OGC WKT Polygon of a simple bbox list of strings |
2,389 | def get_solr_date ( pydate , is_negative ) : try : if isinstance ( pydate , datetime . datetime ) : solr_date = '%sZ' % pydate . isoformat ( ) [ 0 : 19 ] if is_negative : LOGGER . debug ( '%s This layer has a negative date' % solr_date ) solr_date = '-%s' % solr_date return solr_date else : return None except Exception... | Returns a date in a valid Solr format from a string . |
2,390 | def get_date ( layer ) : date = None sign = '+' date_type = 1 layer_dates = layer . get_layer_dates ( ) if layer_dates : sign = layer_dates [ 0 ] [ 0 ] date = layer_dates [ 0 ] [ 1 ] date_type = layer_dates [ 0 ] [ 2 ] if date is None : date = layer . created if date . year > 2300 : date = None if date_type == 0 : date... | Returns a custom date representation . A date can be detected or from metadata . It can be a range or a simple date in isoformat . |
2,391 | def detect_metadata_url_scheme ( url ) : scheme = None url_lower = url . lower ( ) if any ( x in url_lower for x in [ 'wms' , 'service=wms' ] ) : scheme = 'OGC:WMS' if any ( x in url_lower for x in [ 'wmts' , 'service=wmts' ] ) : scheme = 'OGC:WMTS' elif all ( x in url for x in [ '/MapServer' , 'f=json' ] ) : scheme = ... | detect whether a url is a Service type that HHypermap supports |
2,392 | def serialize_checks ( check_set ) : check_set_list = [ ] for check in check_set . all ( ) [ : 25 ] : check_set_list . append ( { 'datetime' : check . checked_datetime . isoformat ( ) , 'value' : check . response_time , 'success' : 1 if check . success else 0 } ) return check_set_list | Serialize a check_set for raphael |
2,393 | def domains ( request ) : url = '' query = '*:*&facet=true&facet.limit=-1&facet.pivot=domain_name,service_id&wt=json&indent=true&rows=0' if settings . SEARCH_TYPE == 'elasticsearch' : url = '%s/select?q=%s' % ( settings . SEARCH_URL , query ) if settings . SEARCH_TYPE == 'solr' : url = '%s/solr/hypermap/select?q=%s' % ... | A page with number of services and layers faceted on domains . |
2,394 | def tasks_runner ( request ) : cached_layers_number = 0 cached_layers = cache . get ( 'layers' ) if cached_layers : cached_layers_number = len ( cached_layers ) cached_deleted_layers_number = 0 cached_deleted_layers = cache . get ( 'deleted_layers' ) if cached_deleted_layers : cached_deleted_layers_number = len ( cache... | A page that let the admin to run global tasks . |
2,395 | def layer_mapproxy ( request , catalog_slug , layer_uuid , path_info ) : layer = get_object_or_404 ( Layer , uuid = layer_uuid , catalog__slug = catalog_slug ) if layer . service . type == 'Hypermap:WorldMap' : layer . service . url = layer . url mp , yaml_config = get_mapproxy ( layer ) query = request . META [ 'QUERY... | Get Layer with matching catalog and uuid |
2,396 | def parse_datetime ( date_str ) : is_common_era = True date_str_parts = date_str . split ( "-" ) if date_str_parts and date_str_parts [ 0 ] == '' : is_common_era = False if len ( date_str_parts ) == 2 : date_str = date_str + "-01-01T00:00:00Z" parsed_datetime = { 'is_common_era' : is_common_era , 'parsed_datetime' : No... | Parses a date string to date object . for BCE dates only supports the year part . |
2,397 | def query_ids ( self , ids ) : results = self . _get_repo_filter ( Layer . objects ) . filter ( uuid__in = ids ) . all ( ) if len ( results ) == 0 : results = self . _get_repo_filter ( Service . objects ) . filter ( uuid__in = ids ) . all ( ) return results | Query by list of identifiers |
2,398 | def query_domain ( self , domain , typenames , domainquerytype = 'list' , count = False ) : objects = self . _get_repo_filter ( Layer . objects ) if domainquerytype == 'range' : return [ tuple ( objects . aggregate ( Min ( domain ) , Max ( domain ) ) . values ( ) ) ] else : if count : return [ ( d [ domain ] , d [ '%s_... | Query by property domain values |
2,399 | def query_source ( self , source ) : return self . _get_repo_filter ( Layer . objects ) . filter ( url = source ) | Query by source |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.