idx int64 0 252k | question stringlengths 48 5.28k | target stringlengths 5 1.23k |
|---|---|---|
2,900 | def undecorate ( func ) : orig_call_wrapper = lambda x : x for call_wrapper , unwrap in SUPPORTED_DECORATOR . items ( ) : if isinstance ( func , call_wrapper ) : func = unwrap ( func ) orig_call_wrapper = call_wrapper break return orig_call_wrapper , func | Returns the decorator and the undecorated function of given object . |
2,901 | def item ( ctx , appid , title ) : ctx . obj [ 'appid' ] = appid ctx . obj [ 'title' ] = title | Market - related commands . |
2,902 | def get_price ( ctx , currency ) : appid = ctx . obj [ 'appid' ] title = ctx . obj [ 'title' ] item_ = Item ( appid , title ) item_ . get_price_data ( currency ) click . secho ( 'Lowest price: %s %s' % ( item_ . price_lowest , item_ . price_currency ) , fg = 'green' ) | Prints out market item price . |
2,903 | def get_cards ( ctx ) : appid = ctx . obj [ 'appid' ] app = Application ( appid ) click . secho ( 'Cards for `%s` [appid: %s]' % ( app . title , appid ) , fg = 'green' ) if not app . has_cards : click . secho ( 'This app has no cards.' , fg = 'red' , err = True ) return cards , booster = app . get_cards ( ) def get_lin... | Prints out cards available for application . |
2,904 | def get_card_prices ( ctx , currency ) : appid = ctx . obj [ 'appid' ] detailed = True appids = [ appid ] if ',' in appid : appids = [ appid . strip ( ) for appid in appid . split ( ',' ) ] detailed = False for appid in appids : print_card_prices ( appid , currency , detailed = detailed ) click . echo ( '' ) | Prints out lowest card prices for an application . Comma - separated list of application IDs is supported . |
2,905 | def get_gems ( ctx ) : username = ctx . obj [ 'username' ] click . secho ( 'Total gems owned by `%s`: %d' % ( username , User ( username ) . gems_total ) , fg = 'green' ) | Prints out total gems count for a Steam user . |
2,906 | def get_games ( ctx ) : username = ctx . obj [ 'username' ] games = User ( username ) . get_games_owned ( ) for game in sorted ( games . values ( ) , key = itemgetter ( 'title' ) ) : click . echo ( '%s [appid: %s]' % ( game [ 'title' ] , game [ 'appid' ] ) ) click . secho ( 'Total gems owned by `%s`: %d' % ( username ,... | Prints out games owned by a Steam user . |
2,907 | def get_booster_stats ( ctx , currency ) : username = ctx . obj [ 'username' ] inventory = User ( username ) . _get_inventory_raw ( ) boosters = { } for item in inventory [ 'rgDescriptions' ] . values ( ) : is_booster = False tags = item [ 'tags' ] for tag in tags : if tag [ 'internal_name' ] == TAG_ITEM_CLASS_BOOSTER ... | Prints out price stats for booster packs available in Steam user inventory . |
2,908 | def get_cards_stats ( ctx , currency , skip_owned , appid , foil ) : username = ctx . obj [ 'username' ] cards_by_app = defaultdict ( list ) inventory = User ( username ) . traverse_inventory ( item_filter = TAG_ITEM_CLASS_CARD ) for item in inventory : appid_ = item . app . appid if not appid or appid_ in appid : card... | Prints out price stats for cards available in Steam user inventory . |
2,909 | def run_radia_with_merge ( job , rna_bam , tumor_bam , normal_bam , univ_options , radia_options ) : spawn = job . wrapJobFn ( run_radia , rna_bam [ 'rna_genome' ] , tumor_bam , normal_bam , univ_options , radia_options , disk = '100M' , memory = '100M' ) . encapsulate ( ) merge = job . wrapJobFn ( merge_perchrom_vcfs ... | A wrapper for the the entire RADIA sub - graph . |
2,910 | def run_radia ( job , rna_bam , tumor_bam , normal_bam , univ_options , radia_options ) : if 'rna_genome' in rna_bam . keys ( ) : rna_bam = rna_bam [ 'rna_genome' ] elif set ( rna_bam . keys ( ) ) == { 'rna_genome_sorted.bam' , 'rna_genome_sorted.bam.bai' } : pass else : raise RuntimeError ( 'An improperly formatted di... | Spawn a RADIA job for each chromosome on the input bam trios . |
2,911 | def run_radia_perchrom ( job , bams , univ_options , radia_options , chrom ) : work_dir = os . getcwd ( ) input_files = { 'rna.bam' : bams [ 'tumor_rna' ] , 'rna.bam.bai' : bams [ 'tumor_rnai' ] , 'tumor.bam' : bams [ 'tumor_dna' ] , 'tumor.bam.bai' : bams [ 'tumor_dnai' ] , 'normal.bam' : bams [ 'normal_dna' ] , 'norm... | Run RADIA call on a single chromosome in the input bams . |
2,912 | def run_filter_radia ( job , bams , radia_file , univ_options , radia_options , chrom ) : work_dir = os . getcwd ( ) input_files = { 'rna.bam' : bams [ 'tumor_rna' ] , 'rna.bam.bai' : bams [ 'tumor_rnai' ] , 'tumor.bam' : bams [ 'tumor_dna' ] , 'tumor.bam.bai' : bams [ 'tumor_dnai' ] , 'normal.bam' : bams [ 'normal_dna... | Run filterradia on the RADIA output . |
2,913 | def index_bamfile ( job , bamfile , sample_type , univ_options , samtools_options , sample_info = None , export = True ) : work_dir = os . getcwd ( ) in_bamfile = sample_type if sample_info is not None : assert isinstance ( sample_info , str ) in_bamfile = '_' . join ( [ in_bamfile , sample_info ] ) in_bamfile += '.bam... | Index bamfile using samtools |
2,914 | def sort_bamfile ( job , bamfile , sample_type , univ_options , samtools_options ) : work_dir = os . getcwd ( ) in_bamfile = '' . join ( [ sample_type , '.bam' ] ) out_bamfile = '_' . join ( [ sample_type , 'sorted.bam' ] ) input_files = { in_bamfile : bamfile } input_files = get_files_from_filestore ( job , input_file... | Sort bamfile using samtools |
2,915 | def get_identity ( user ) : identity = Identity ( user . id ) if hasattr ( user , 'id' ) : identity . provides . add ( UserNeed ( user . id ) ) for role in getattr ( user , 'roles' , [ ] ) : identity . provides . add ( RoleNeed ( role . name ) ) identity . user = user return identity | Create an identity for a given user instance . |
2,916 | def object_to_items ( data_structure ) : items = [ ] try : items = list ( data_structure . __dict__ . items ( ) ) except : pass hierarchy = [ data_structure ] try : hierarchy += inspect . getmro ( data_structure ) except : pass slots = [ ] try : for b in hierarchy : try : slots += b . __slots__ except : pass except : p... | Converts a object to a items list respecting also slots . |
2,917 | def recursive_sort ( data_structure ) : if not isinstance ( data_structure , _primitive_types ) : is_meta = isinstance ( data_structure , Meta ) was_dict = isinstance ( data_structure , WasDict ) if not ( is_meta or was_dict ) : was_dict = isinstance ( data_structure , dict ) if not was_dict : try : data_structure = da... | Sort a recursive data_structure . |
2,918 | def traverse_frozen_data ( data_structure ) : parent_stack = [ data_structure ] while parent_stack : node = parent_stack . pop ( 0 ) tlen = - 1 if not isinstance ( node , _string_types ) : try : tlen = len ( node ) except : pass if tlen == - 1 : yield node else : parent_stack = list ( node ) + parent_stack | Yields the leaves of the frozen data - structure pre - order . |
2,919 | def tree_diff ( a , b , n = 5 , sort = False ) : a = dump ( a ) b = dump ( b ) if not sort : a = vformat ( a ) . split ( "\n" ) b = vformat ( b ) . split ( "\n" ) else : a = vformat ( recursive_sort ( a ) ) . split ( "\n" ) b = vformat ( recursive_sort ( b ) ) . split ( "\n" ) return "\n" . join ( difflib . unified_dif... | Dump any data - structure or object traverse it depth - first in - order and apply a unified diff . |
2,920 | def stats ( self ) : stats_online = CRef . cint ( ) stats_ingame = CRef . cint ( ) stats_chatting = CRef . cint ( ) self . _iface . get_clan_stats ( self . group_id , stats_online , stats_ingame , stats_chatting , ) return { 'online' : int ( stats_online ) , 'ingame' : int ( stats_ingame ) , 'chatting' : int ( stats_ch... | Basic group statistics . |
2,921 | def startproject ( name , directory , verbosity ) : handle_template ( 'project' , name , target = directory , verbosity = verbosity ) click . echo ( f"Success: '{name}' project was successfully created on '{directory}'" ) | Creates a Trading - Bots project directory structure for the given project NAME in the current directory or optionally in the given DIRECTORY . |
2,922 | def createbot ( name , directory , verbosity ) : handle_template ( 'bot' , name , target = directory , verbosity = verbosity ) click . echo ( f"Success: '{name}' bot was successfully created on '{directory}'" ) | Creates a Bot s directory structure for the given bot NAME in the current directory or optionally in the given DIRECTORY . |
2,923 | def get_state ( self , as_str = False ) : uid = self . user_id if self . _iface_user . get_id ( ) == uid : result = self . _iface . get_my_state ( ) else : result = self . _iface . get_state ( uid ) if as_str : return UserState . get_alias ( result ) return result | Returns user state . See UserState . |
2,924 | def load_permissions_on_identity_loaded ( sender , identity ) : identity . provides . add ( any_user ) if current_user . is_authenticated : identity . provides . add ( authenticated_user ) | Add system roles Needs to users identities . |
2,925 | def print_errors ( self , file_name ) : for error in self . get_messages ( file_name ) : print ( '\t' , error . __unicode__ ( ) ) | Prints the errors observed for a file |
2,926 | def clean ( self ) : data = super ( RasterQueryForm , self ) . clean ( ) geom = data . pop ( 'upload' , None ) or data . pop ( 'bbox' , None ) if geom : data [ 'g' ] = geom return data | Return cleaned fields as a dict determine which geom takes precedence . |
2,927 | def register ( matcher , * aliases ) : docstr = matcher . __doc__ if matcher . __doc__ is not None else '' helpmatchers [ matcher ] = docstr . strip ( ) for alias in aliases : matchers [ alias ] = matcher norm = normalize ( alias ) normalized [ norm ] = alias norm = norm . replace ( '_' , '' ) normalized [ norm ] = ali... | Register a matcher associated to one or more aliases . Each alias given is also normalized . |
2,928 | def normalize ( alias ) : alias = re . sub ( r'([a-z])([A-Z])' , r'\1_\2' , alias ) words = alias . lower ( ) . split ( '_' ) words = filter ( lambda w : w not in IGNORED_WORDS , words ) return '_' . join ( words ) | Normalizes an alias by removing adverbs defined in IGNORED_WORDS |
2,929 | def lookup ( alias ) : if alias in matchers : return matchers [ alias ] else : norm = normalize ( alias ) if norm in normalized : alias = normalized [ norm ] return matchers [ alias ] if - 1 != alias . find ( '_' ) : norm = normalize ( alias ) . replace ( '_' , '' ) return lookup ( norm ) return None | Tries to find a matcher callable associated to the given alias . If an exact match does not exists it will try normalizing it and even removing underscores to find one . |
2,930 | def suggest ( alias , max = 3 , cutoff = 0.5 ) : aliases = matchers . keys ( ) similar = get_close_matches ( alias , aliases , n = max , cutoff = cutoff ) return similar | Suggest a list of aliases which are similar enough |
2,931 | def sample_chromosomes ( job , genome_fai_file ) : work_dir = os . getcwd ( ) genome_fai = untargz ( job . fileStore . readGlobalFile ( genome_fai_file ) , work_dir ) return chromosomes_from_fai ( genome_fai ) | Get a list of chromosomes in the input data . |
2,932 | def run_mutation_aggregator ( job , mutation_results , univ_options ) : out = { } for chrom in mutation_results [ 'mutect' ] . keys ( ) : out [ chrom ] = job . addChildJobFn ( merge_perchrom_mutations , chrom , mutation_results , univ_options ) . rv ( ) merged_snvs = job . addFollowOnJobFn ( merge_perchrom_vcfs , out ,... | Aggregate all the called mutations . |
2,933 | def merge_perchrom_mutations ( job , chrom , mutations , univ_options ) : work_dir = os . getcwd ( ) from protect . mutation_calling . muse import process_muse_vcf from protect . mutation_calling . mutect import process_mutect_vcf from protect . mutation_calling . radia import process_radia_vcf from protect . mutation_... | Merge the mutation calls for a single chromosome . |
2,934 | def read_vcf ( vcf_file ) : vcf_dict = [ ] with open ( vcf_file , 'r' ) as invcf : for line in invcf : if line . startswith ( '#' ) : continue line = line . strip ( ) . split ( ) vcf_dict . append ( ( line [ 0 ] , line [ 1 ] , line [ 3 ] , line [ 4 ] ) ) return vcf_dict | Read a vcf file to a dict of lists . |
2,935 | def merge_perchrom_vcfs ( job , perchrom_vcfs , tool_name , univ_options ) : work_dir = os . getcwd ( ) input_files = { '' . join ( [ chrom , '.vcf' ] ) : jsid for chrom , jsid in perchrom_vcfs . items ( ) } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = False ) first = True with open ... | Merge per - chromosome vcf files into a single genome level vcf . |
2,936 | def unmerge ( job , input_vcf , tool_name , chromosomes , tool_options , univ_options ) : work_dir = os . getcwd ( ) input_files = { 'input.vcf' : input_vcf , 'genome.fa.fai.tar.gz' : tool_options [ 'genome_fai' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = False ) input_files [ '... | Un - merge a vcf file into per - chromosome vcfs . |
2,937 | def as_feature ( data ) : if not isinstance ( data , ( Feature , FeatureCollection ) ) : if is_featurelike ( data ) : data = Feature ( ** data ) elif has_features ( data ) : data = FeatureCollection ( ** data ) elif isinstance ( data , collections . Sequence ) : data = FeatureCollection ( features = data ) elif has_lay... | Returns a Feature or FeatureCollection . |
2,938 | def has_layer ( fcollection ) : for val in six . viewvalues ( fcollection ) : if has_features ( val ) : return True return False | Returns true for a multi - layer dict of FeatureCollections . |
2,939 | def wrap_rsem ( job , star_bams , univ_options , rsem_options ) : rsem = job . addChildJobFn ( run_rsem , star_bams [ 'rna_transcriptome.bam' ] , univ_options , rsem_options , cores = rsem_options [ 'n' ] , disk = PromisedRequirement ( rsem_disk , star_bams , rsem_options [ 'index' ] ) ) return rsem . rv ( ) | A wrapper for run_rsem using the results from run_star as input . |
2,940 | def run_rsem ( job , rna_bam , univ_options , rsem_options ) : work_dir = os . getcwd ( ) input_files = { 'star_transcriptome.bam' : rna_bam , 'rsem_index.tar.gz' : rsem_options [ 'index' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = False ) input_files [ 'rsem_index' ] = untargz ... | Run rsem on the input RNA bam . |
2,941 | def activate ( self , page = None ) : page = page or '' if '://' in page : self . _iface . activate_overlay_url ( page ) else : self . _iface . activate_overlay_game ( page ) | Activates overlay with browser optionally opened at a given page . |
2,942 | def any_of ( value , * args ) : if len ( args ) : value = ( value , ) + args return ExpectationAny ( value ) | At least one of the items in value should match |
2,943 | def all_of ( value , * args ) : if len ( args ) : value = ( value , ) + args return ExpectationAll ( value ) | All the items in value should match |
2,944 | def none_of ( value , * args ) : if len ( args ) : value = ( value , ) + args return ExpectationNone ( value ) | None of the items in value should match |
2,945 | def run_cutadapt ( job , fastqs , univ_options , cutadapt_options ) : work_dir = os . getcwd ( ) input_files = { 'rna_1.fastq' : fastqs [ 0 ] , 'rna_2.fastq' : fastqs [ 1 ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = False ) gz = '.gz' if is_gzipfile ( input_files [ 'rna_1.fastq' ... | Runs cutadapt on the input RNA fastq files . |
2,946 | def index ( ) : identity = g . identity actions = { } for action in access . actions . values ( ) : actions [ action . value ] = DynamicPermission ( action ) . allows ( identity ) if current_user . is_anonymous : return render_template ( "invenio_access/open.html" , actions = actions , identity = identity ) else : retu... | Basic test view . |
2,947 | def role_admin ( ) : identity = g . identity actions = { } for action in access . actions . values ( ) : actions [ action . value ] = DynamicPermission ( action ) . allows ( identity ) message = 'You are opening a page requiring the "admin-access" permission' return render_template ( "invenio_access/limited.html" , mes... | View only allowed to admin role . |
2,948 | def read_fastas ( input_files ) : tumor_file = [ y for x , y in input_files . items ( ) if x . startswith ( 'T' ) ] [ 0 ] normal_file = [ y for x , y in input_files . items ( ) if x . startswith ( 'N' ) ] [ 0 ] output_files = defaultdict ( list ) output_files = _read_fasta ( tumor_file , output_files ) num_entries = le... | Read the tumor and normal fastas into a joint dict . |
2,949 | def _read_fasta ( fasta_file , output_dict ) : read_name = None with open ( fasta_file , 'r' ) as f : for line in f : line = line . strip ( ) if not line : continue if line . startswith ( '>' ) : read_name = line . lstrip ( '>' ) else : assert read_name is not None , line output_dict [ read_name ] . append ( line . str... | Read the peptide fasta into an existing dict . |
2,950 | def _process_consensus_mhcii ( mhc_file , normal = False ) : core_col = None results = pandas . DataFrame ( columns = [ 'allele' , 'pept' , 'tumor_pred' , 'core' ] ) with open ( mhc_file , 'r' ) as mf : peptides = set ( ) for line in mf : if not line . startswith ( 'HLA' ) : continue line = line . strip ( ) . split ( '... | Process the results from running IEDB MHCII binding predictions using the consensus method into a pandas dataframe . |
2,951 | def _process_net_mhcii ( mhc_file , normal = False ) : results = pandas . DataFrame ( columns = [ 'allele' , 'pept' , 'tumor_pred' , 'core' , 'peptide_name' ] ) with open ( mhc_file , 'r' ) as mf : peptides = set ( ) allele = re . sub ( '-DQB' , '/DQB' , mf . readline ( ) . strip ( ) ) _ = mf . readline ( ) for line in... | Process the results from running NetMHCIIpan binding predictions into a pandas dataframe . |
2,952 | def _process_mhci ( mhc_file , normal = False ) : results = pandas . DataFrame ( columns = [ 'allele' , 'pept' , 'tumor_pred' , 'core' ] ) with open ( mhc_file , 'r' ) as mf : peptides = set ( ) for line in mf : if not line . startswith ( 'HLA' ) : continue line = line . strip ( ) . split ( '\t' ) allele = line [ 0 ] p... | Process the results from running IEDB MHCI binding predictions into a pandas dataframe . |
2,953 | def pept_diff ( p1 , p2 ) : if len ( p1 ) != len ( p2 ) : return - 1 else : return sum ( [ p1 [ i ] != p2 [ i ] for i in range ( len ( p1 ) ) ] ) | Return the number of differences betweeen 2 peptides |
2,954 | def print_mhc_peptide ( neoepitope_info , peptides , pepmap , outfile , netmhc = False ) : if netmhc : peptide_names = [ neoepitope_info . peptide_name ] else : peptide_names = [ x for x , y in peptides . items ( ) if neoepitope_info . pept in y ] neoepitope_info = neoepitope_info . _asdict ( ) if neoepitope_info [ 'no... | Accept data about one neoepitope from merge_mhc_peptide_calls and print it to outfile . This is a generic module to reduce code redundancy . |
2,955 | def check ( domain , prefix , code , strategies = '*' ) : if strategies == '*' or 'dns_txt' in strategies : if check_dns_txt ( domain , prefix , code ) : return True if strategies == '*' or 'dns_cname' in strategies : if check_dns_cname ( domain , prefix , code ) : return True if strategies == '*' or 'meta_tag' in stra... | Check the ownership of a domain by going thru a serie of strategies . If at least one strategy succeed the domain is considered verified and this methods returns true . |
2,956 | def register_cache_buster ( self , app , config = None ) : if not ( config is None or isinstance ( config , dict ) ) : raise ValueError ( "`config` must be an instance of dict or None" ) bust_map = { } unbust_map = { } app . logger . debug ( 'Starting computing hashes for static assets' ) for dirpath , dirnames , filen... | Register app in cache buster so that url_for adds a unique prefix to URLs generated for the static endpoint . Also make the app able to serve cache - busted static files . |
2,957 | def env_or_default ( var , default = None ) : if var in os . environ : return os . environ [ var ] return default | Get environment variable or provide default . |
2,958 | def kms_encrypt ( value , key , aws_config = None ) : aws_config = aws_config or { } aws = boto3 . session . Session ( ** aws_config ) client = aws . client ( 'kms' ) enc_res = client . encrypt ( KeyId = key , Plaintext = value ) return n ( b64encode ( enc_res [ 'CiphertextBlob' ] ) ) | Encrypt and value with KMS key . |
2,959 | def get_value ( * args , ** kwargs ) : global _config if _config is None : raise ValueError ( 'configuration not set; must run figgypy.set_config first' ) return _config . get_value ( * args , ** kwargs ) | Get from config object by exposing Config . get_value method . |
2,960 | def set_value ( * args , ** kwargs ) : global _config if _config is None : raise ValueError ( 'configuration not set; must run figgypy.set_config first' ) return _config . set_value ( * args , ** kwargs ) | Set value in the global Config object . |
2,961 | def decode_escapes ( s ) : def decode_match ( match ) : return codecs . decode ( match . group ( 0 ) , 'unicode-escape' ) return ESCAPE_SEQUENCE_RE . sub ( decode_match , s ) | Unescape libconfig string literals |
2,962 | def loads ( string , filename = None , includedir = '' ) : try : f = io . StringIO ( string ) except TypeError : raise TypeError ( "libconf.loads() input string must by unicode" ) return load ( f , filename = filename , includedir = includedir ) | Load the contents of string to a Python object |
2,963 | def dump_string ( s ) : s = ( s . replace ( '\\' , '\\\\' ) . replace ( '"' , '\\"' ) . replace ( '\f' , r'\f' ) . replace ( '\n' , r'\n' ) . replace ( '\r' , r'\r' ) . replace ( '\t' , r'\t' ) ) s = UNPRINTABLE_CHARACTER_RE . sub ( lambda m : r'\x{:02x}' . format ( ord ( m . group ( 0 ) ) ) , s ) return '"' + s + '"' | Stringize s adding double quotes and escaping as necessary |
2,964 | def get_dump_type ( value ) : if isinstance ( value , dict ) : return 'd' if isinstance ( value , tuple ) : return 'l' if isinstance ( value , list ) : return 'a' if isinstance ( value , bool ) : return 'b' if isint ( value ) : if is_long_int ( value ) : return 'i64' else : return 'i' if isinstance ( value , float ) : ... | Get the libconfig datatype of a value |
2,965 | def get_array_value_dtype ( lst ) : array_value_type = None for value in lst : dtype = get_dump_type ( value ) if dtype not in { 'b' , 'i' , 'i64' , 'f' , 's' } : raise ConfigSerializeError ( "Invalid datatype in array (may only contain scalars):" "%r of type %s" % ( value , type ( value ) ) ) if array_value_type is No... | Return array value type raise ConfigSerializeError for invalid arrays |
2,966 | def dump_value ( key , value , f , indent = 0 ) : spaces = ' ' * indent if key is None : key_prefix = '' key_prefix_nl = '' else : key_prefix = key + ' = ' key_prefix_nl = key + ' =\n' + spaces dtype = get_dump_type ( value ) if dtype == 'd' : f . write ( u'{}{}{{\n' . format ( spaces , key_prefix_nl ) ) dump_dict ( va... | Save a value of any libconfig type |
2,967 | def dump_collection ( cfg , f , indent = 0 ) : for i , value in enumerate ( cfg ) : dump_value ( None , value , f , indent ) if i < len ( cfg ) - 1 : f . write ( u',\n' ) | Save a collection of attributes |
2,968 | def dump_dict ( cfg , f , indent = 0 ) : for key in cfg : if not isstr ( key ) : raise ConfigSerializeError ( "Dict keys must be strings: %r" % ( key , ) ) dump_value ( key , cfg [ key ] , f , indent ) f . write ( u';\n' ) | Save a dictionary of attributes |
2,969 | def dumps ( cfg ) : str_file = io . StringIO ( ) dump ( cfg , str_file ) return str_file . getvalue ( ) | Serialize cfg into a libconfig - formatted str |
2,970 | def dump ( cfg , f ) : if not isinstance ( cfg , dict ) : raise ConfigSerializeError ( 'dump() requires a dict as input, not %r of type %r' % ( cfg , type ( cfg ) ) ) dump_dict ( cfg , f , 0 ) | Serialize cfg as a libconfig - formatted stream into f |
2,971 | def tokenize ( self , string ) : pos = 0 while pos < len ( string ) : m = SKIP_RE . match ( string , pos = pos ) if m : skip_lines = m . group ( 0 ) . split ( '\n' ) if len ( skip_lines ) > 1 : self . row += len ( skip_lines ) - 1 self . column = 1 + len ( skip_lines [ - 1 ] ) else : self . column += len ( skip_lines [... | Yield tokens from the input string or throw ConfigParseError |
2,972 | def from_file ( cls , f , filename = None , includedir = '' , seenfiles = None ) : if filename is None : filename = getattr ( f , 'name' , '<unknown>' ) if seenfiles is None : seenfiles = set ( ) if filename in seenfiles : raise ConfigParseError ( "Circular include: %r" % ( filename , ) ) seenfiles = seenfiles | { file... | Create a token stream by reading an input file |
2,973 | def error ( self , msg ) : if self . finished ( ) : raise ConfigParseError ( "Unexpected end of input; %s" % ( msg , ) ) else : t = self . peek ( ) raise ConfigParseError ( "Unexpected token %s; %s" % ( t , msg ) ) | Raise a ConfigParseError at the current input position |
2,974 | def load_variables ( ) : if ( not os . environ . get ( "PYCONFLUENCE_TOKEN" ) or not os . environ . get ( "PYCONFLUENCE_USER" ) or not os . environ . get ( "PYCONFLUENCE_ORG" ) ) : print ( "One or more pyconfluence environment variables are not set. " "See README for directions on how to resolve this." ) sys . exit ( "... | Load variables from environment variables . |
2,975 | def rest ( url , req = "GET" , data = None ) : load_variables ( ) return _rest ( base_url + url , req , data ) | Main function to be called from this module . |
2,976 | def _rest ( url , req , data = None ) : if url . upper ( ) . startswith ( "HTTPS" ) : print ( "Secure connection required: Please use HTTPS or https" ) return "" req = req . upper ( ) if req != "GET" and req != "PUT" and req != "POST" and req != "DELETE" : return "" status , body = _api_action ( url , req , data ) if (... | Send a rest rest request to the server . |
2,977 | def _api_action ( url , req , data = None ) : requisite_headers = { 'Accept' : 'application/json' , 'Content-Type' : 'application/json' } auth = ( user , token ) if req == "GET" : response = requests . get ( url , headers = requisite_headers , auth = auth ) elif req == "PUT" : response = requests . put ( url , headers ... | Take action based on what kind of request is needed . |
2,978 | def _platform_patterns ( self , platform = 'generic' , compiled = False ) : patterns = self . _dict_compiled . get ( platform , None ) if compiled else self . _dict_text . get ( platform , None ) if patterns is None : raise KeyError ( "Unknown platform: {}" . format ( platform ) ) return patterns | Return all the patterns for specific platform . |
2,979 | def pattern ( self , platform , key , compiled = True ) : patterns = self . _platform_patterns ( platform , compiled = compiled ) pattern = patterns . get ( key , self . _platform_patterns ( compiled = compiled ) . get ( key , None ) ) if pattern is None : raise KeyError ( "Patterns database corrupted. Platform: {}, Ke... | Return the pattern defined by the key string specific to the platform . |
2,980 | def description ( self , platform , key ) : patterns = self . _dict_dscr . get ( platform , None ) description = patterns . get ( key , None ) return description | Return the patter description . |
2,981 | def platform ( self , with_prompt , platforms = None ) : if platforms is None : platforms = self . _dict [ 'generic' ] [ 'prompt_detection' ] for platform in platforms : pattern = self . pattern ( platform , 'prompt' ) result = re . search ( pattern , with_prompt ) if result : return platform return None | Return the platform name based on the prompt matching . |
2,982 | def after_connect ( self ) : show_users = self . device . send ( "show users" , timeout = 120 ) result = re . search ( pattern_manager . pattern ( self . platform , 'connected_locally' ) , show_users ) if result : self . log ( 'Locally connected to Calvados. Exiting.' ) self . device . send ( 'exit' ) return True retur... | Execute after connect . |
2,983 | def get_hostname_text ( self ) : try : hostname_text = self . device . send ( 'hostname' , timeout = 10 ) if hostname_text : self . device . hostname = hostname_text . splitlines ( ) [ 0 ] return hostname_text except CommandError : self . log ( "Non Unix jumphost type detected" ) return None | Return hostname information from the Unix host . |
2,984 | def _find_file ( f ) : if os . path . isabs ( f ) : return f else : for d in Config . _dirs : _f = os . path . join ( d , f ) if os . path . isfile ( _f ) : return _f raise FiggypyError ( "could not find configuration file {} in dirs {}" . format ( f , Config . _dirs ) ) | Find a config file if possible . |
2,985 | def _load_file ( self , f ) : try : with open ( f , 'r' ) as _fo : _seria_in = seria . load ( _fo ) _y = _seria_in . dump ( 'yaml' ) except IOError : raise FiggypyError ( "could not open configuration file" ) self . values . update ( yaml . load ( _y ) ) | Get values from config file |
2,986 | def setup ( self , config_file = None , aws_config = None , gpg_config = None , decrypt_gpg = True , decrypt_kms = True ) : if aws_config is not None : self . aws_config = aws_config if gpg_config is not None : self . gpg_config = gpg_config if decrypt_kms is not None : self . decrypt_kms = decrypt_kms if decrypt_gpg i... | Make setup easier by providing a constructor method . |
2,987 | def authenticate ( self , driver ) : events = [ driver . username_re , driver . password_re , self . device . prompt_re , driver . rommon_re , driver . unable_to_connect_re , driver . authentication_error_re , pexpect . TIMEOUT , pexpect . EOF ] transitions = [ ( driver . username_re , [ 0 ] , 1 , partial ( a_send_user... | Authenticate using the Console Server protocol specific FSM . |
2,988 | def delegate ( attribute_name , method_names ) : info = { 'attribute' : attribute_name , 'methods' : method_names } def decorator ( cls ) : attribute = info [ 'attribute' ] if attribute . startswith ( "__" ) : attribute = "_" + cls . __name__ + attribute for name in info [ 'methods' ] : setattr ( cls , name , eval ( "l... | Pass the call to the attribute called attribute_name for every method listed in method_names . |
2,989 | def pattern_to_str ( pattern ) : if isinstance ( pattern , str ) : return repr ( pattern ) else : return repr ( pattern . pattern ) if pattern else None | Convert regex pattern to string . |
2,990 | def levenshtein_distance ( str_a , str_b ) : len_a , len_b = len ( str_a ) , len ( str_b ) if len_a > len_b : str_a , str_b = str_b , str_a len_a , len_b = len_b , len_a current = range ( len_a + 1 ) for i in range ( 1 , len_b + 1 ) : previous , current = current , [ i ] + [ 0 ] * len_a for j in range ( 1 , len_a + 1 )... | Calculate the Levenshtein distance between string a and b . |
2,991 | def parse_inventory ( inventory_output = None ) : udi = { "name" : "" , "description" : "" , "pid" : "" , "vid" : "" , "sn" : "" } if inventory_output is None : return udi capture_next = False chassis_udi_text = None for line in inventory_output . split ( '\n' ) : lc_line = line . lower ( ) if ( 'chassis' in lc_line or... | Parse the inventory text and return udi dict . |
2,992 | def normalize_urls ( urls ) : _urls = [ ] if isinstance ( urls , list ) : if urls : if isinstance ( urls [ 0 ] , list ) : _urls = urls elif isinstance ( urls [ 0 ] , str ) : _urls = [ urls ] else : raise RuntimeError ( "No target host url provided." ) elif isinstance ( urls , str ) : _urls = [ [ urls ] ] return _urls | Overload urls and make list of lists of urls . |
2,993 | def yaml_file_to_dict ( script_name , path = None ) : def load_yaml ( file_path ) : with open ( file_path , 'r' ) as yamlfile : try : dictionary = yaml . load ( yamlfile ) except yaml . YAMLError : return { } return dictionary def merge ( user , default ) : if isinstance ( user , dict ) and isinstance ( default , dict ... | Read yaml file and return the dict . |
2,994 | def write ( self , text ) : index = text . find ( '\n' ) if index == - 1 : self . _buffer = self . _buffer + text else : self . _buffer = self . _buffer + text [ : index + 1 ] if self . _pattern : result = re . search ( self . _pattern , self . _buffer ) if result : for group in result . groups ( ) : if group : self . ... | Override the standard write method to filter the content . |
2,995 | def start ( builtins = False , profile_threads = True ) : if profile_threads : threading . setprofile ( _callback ) _yappi . start ( builtins , profile_threads ) | Start profiler . |
2,996 | def set_clock_type ( type ) : type = type . upper ( ) if type not in CLOCK_TYPES : raise YappiError ( "Invalid clock type:%s" % ( type ) ) _yappi . set_clock_type ( CLOCK_TYPES [ type ] ) | Sets the internal clock type for timing . Profiler shall not have any previous stats . Otherwise an exception is thrown . |
2,997 | async def read_reply ( self ) : code = 500 messages = [ ] go_on = True while go_on : try : line = await self . readline ( ) except ValueError as e : code = 500 go_on = False else : try : code = int ( line [ : 3 ] ) except ValueError as e : raise ConnectionResetError ( "Connection lost." ) from e else : go_on = line [ 3... | Reads a reply from the server . |
2,998 | def make_hop_info_from_url ( url , verify_reachability = None ) : parsed = urlparse ( url ) username = None if parsed . username is None else unquote ( parsed . username ) password = None if parsed . password is None else unquote ( parsed . password ) try : enable_password = parse_qs ( parsed . query ) [ "enable_passwo... | Build HopInfo object from url . |
2,999 | def is_reachable ( self ) : if self . verify_reachability and hasattr ( self . verify_reachability , '__call__' ) : return self . verify_reachability ( host = self . hostname , port = self . port ) return True | Return if host is reachable . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.