idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
2,000
def plot_log_histogram ( df , palette , title , histnorm = "" ) : data = [ go . Histogram ( x = np . log10 ( df . loc [ df [ "dataset" ] == d , "lengths" ] ) , opacity = 0.4 , name = d , histnorm = histnorm , marker = dict ( color = c ) ) for d , c in zip ( df [ "dataset" ] . unique ( ) , palette ) ] xtickvals = [ 10 *...
Plot overlaying histograms with log transformation of length Return both html and fig for png
2,001
def get_file ( db_folder , file_name ) : if not os . path . isdir ( db_folder ) : return file_name = file_name . lower ( ) . strip ( ) for cand_name in os . listdir ( db_folder ) : if cand_name . lower ( ) . strip ( ) == file_name : return os . path . join ( db_folder , cand_name )
Glob for the poor .
2,002
def parse ( db_folder , out_folder ) : stru_dat = get_file ( db_folder , 'CroStru.dat' ) data_tad = get_file ( db_folder , 'CroBank.tad' ) data_dat = get_file ( db_folder , 'CroBank.dat' ) if None in [ stru_dat , data_tad , data_dat ] : raise CronosException ( "Not all database files are present." ) meta , tables = par...
Parse a cronos database .
2,003
def encode1 ( self ) : data_uri = b64encode ( open ( self . path , 'rb' ) . read ( ) ) . decode ( 'utf-8' ) . replace ( '\n' , '' ) return '<img src="data:image/png;base64,{0}">' . format ( data_uri )
Return the base64 encoding of the figure file and insert in html image tag .
2,004
def encode2 ( self ) : buf = BytesIO ( ) self . fig . savefig ( buf , format = 'png' , bbox_inches = 'tight' , dpi = 100 ) buf . seek ( 0 ) string = b64encode ( buf . read ( ) ) return '<img src="data:image/png;base64,{0}">' . format ( urlquote ( string ) )
Return the base64 encoding of the fig attribute and insert in html image tag .
2,005
def loadFromDisk ( self , calculation ) : suffixes = { 'Isotropic' : 'iso' , 'Circular Dichroism (R-L)' : 'cd' , 'Right Polarized (R)' : 'r' , 'Left Polarized (L)' : 'l' , 'Linear Dichroism (V-H)' : 'ld' , 'Vertical Polarized (V)' : 'v' , 'Horizontal Polarized (H)' : 'h' , } self . raw = list ( ) for spectrumName in se...
Read the spectra from the files generated by Quanty and store them as a list of spectum objects .
2,006
def updateResultsView ( self , index ) : flags = ( QItemSelectionModel . Clear | QItemSelectionModel . Rows | QItemSelectionModel . Select ) self . resultsView . selectionModel ( ) . select ( index , flags ) self . resultsView . resizeColumnsToContents ( ) self . resultsView . setFocus ( )
Update the selection to contain only the result specified by the index . This should be the last index of the model . Finally updade the context menu .
2,007
def updatePlotWidget ( self ) : pw = self . getPlotWidget ( ) pw . reset ( ) results = self . resultsModel . getCheckedItems ( ) for result in results : if isinstance ( result , ExperimentalData ) : spectrum = result . spectra [ 'Expt' ] spectrum . legend = '{}-{}' . format ( result . index , 'Expt' ) spectrum . xLabel...
Updating the plotting widget should not require any information about the current state of the widget .
2,008
def row ( self ) : if self . parent is not None : children = self . parent . getChildren ( ) return children . index ( self ) else : return 0
Return the row of the child .
2,009
def parent ( self , index ) : childItem = self . item ( index ) parentItem = childItem . parent if parentItem == self . rootItem : parentIndex = QModelIndex ( ) else : parentIndex = self . createIndex ( parentItem . row ( ) , 0 , parentItem ) return parentIndex
Return the index of the parent for a given index of the child . Unfortunately the name of the method has to be parent even though a more verbose name like parentIndex would avoid confusion about what parent actually is - an index or an item .
2,010
def setData ( self , index , value , role ) : if not index . isValid ( ) : return False item = self . item ( index ) column = index . column ( ) if role == Qt . EditRole : items = list ( ) items . append ( item ) if self . sync : parentIndex = self . parent ( index ) for sibling in self . siblings ( parentIndex ) : sib...
Set the role data for the item at index to value .
2,011
def flags ( self , index ) : activeFlags = ( Qt . ItemIsEnabled | Qt . ItemIsSelectable | Qt . ItemIsUserCheckable ) item = self . item ( index ) column = index . column ( ) if column > 0 and not item . childCount ( ) : activeFlags = activeFlags | Qt . ItemIsEditable return activeFlags
Return the active flags for the given index . Add editable flag to items other than the first column .
2,012
def _getModelData ( self , modelData , parentItem = None ) : if parentItem is None : parentItem = self . rootItem for item in parentItem . getChildren ( ) : key = item . getItemData ( 0 ) if item . childCount ( ) : modelData [ key ] = odict ( ) self . _getModelData ( modelData [ key ] , item ) else : if isinstance ( it...
Return the data contained in the model .
2,013
def _contextMenu ( self , pos ) : menu = QMenu ( self ) menu . addAction ( self . _zoomBackAction ) plotArea = self . getWidgetHandle ( ) globalPosition = plotArea . mapToGlobal ( pos ) menu . exec_ ( globalPosition )
Handle plot area customContextMenuRequested signal .
2,014
def convolve_fft ( array , kernel ) : array = np . asarray ( array , dtype = np . complex ) kernel = np . asarray ( kernel , dtype = np . complex ) if array . ndim != kernel . ndim : raise ValueError ( "Image and kernel must have same number of " "dimensions" ) array_shape = array . shape kernel_shape = kernel . shape ...
Convolve an array with a kernel using FFT . Implemntation based on the convolve_fft function from astropy .
2,015
def diagonalize ( self ) : self . eigvals , self . eigvecs = np . linalg . eig ( ( self . tensor . transpose ( ) + self . tensor ) / 2.0 ) self . eigvals = np . diag ( np . dot ( np . dot ( self . eigvecs . transpose ( ) , self . tensor ) , self . eigvecs ) )
Diagonalize the tensor .
2,016
def _skip_lines ( self , n ) : for i in range ( n ) : self . line = next ( self . output ) return self . line
Skip a number of lines from the output .
2,017
def _parse_tensor ( self , indices = False ) : if indices : self . line = self . _skip_lines ( 1 ) tensor = np . zeros ( ( 3 , 3 ) ) for i in range ( 3 ) : tokens = self . line . split ( ) if indices : tensor [ i ] [ 0 ] = float ( tokens [ 1 ] ) tensor [ i ] [ 1 ] = float ( tokens [ 2 ] ) tensor [ i ] [ 2 ] = float ( t...
Parse a tensor .
2,018
def __validate ( self , target , value , oldvalue , initiator ) : if value == oldvalue : return value if self . allow_null and value is None : return value if self . check_value ( value ) : return value else : if self . throw_exception : if self . message : self . message = self . message . format ( field = self . fiel...
Method executed when the event set is triggered .
2,019
def __create_event ( self ) : if not event . contains ( self . field , 'set' , self . __validate ) : event . listen ( self . field , 'set' , self . __validate , retval = True )
Create an SQLAlchemy event listening the set in a particular column .
2,020
def stop ( self ) : if event . contains ( self . field , 'set' , self . __validate ) : event . remove ( self . field , 'set' , self . __validate )
Remove the listener to stop the validation
2,021
def start ( self ) : if not event . contains ( self . field , 'set' , self . __validate ) : self . __create_event ( )
Restart the listener
2,022
def nhapDaiHan ( self , cucSo , gioiTinh ) : for cung in self . thapNhiCung : khoangCach = khoangCachCung ( cung . cungSo , self . cungMenh , gioiTinh ) cung . daiHan ( cucSo + khoangCach * 10 ) return self
Nhap dai han
2,023
def nt2aa ( ntseq ) : nt2num = { 'A' : 0 , 'C' : 1 , 'G' : 2 , 'T' : 3 , 'a' : 0 , 'c' : 1 , 'g' : 2 , 't' : 3 } aa_dict = 'KQE*TPASRRG*ILVLNHDYTPASSRGCILVFKQE*TPASRRGWMLVLNHDYTPASSRGCILVF' return '' . join ( [ aa_dict [ nt2num [ ntseq [ i ] ] + 4 * nt2num [ ntseq [ i + 1 ] ] + 16 * nt2num [ ntseq [ i + 2 ] ] ] for i i...
Translate a nucleotide sequence into an amino acid sequence .
2,024
def nt2codon_rep ( ntseq ) : nt2num = { 'A' : 0 , 'C' : 1 , 'G' : 2 , 'T' : 3 , 'a' : 0 , 'c' : 1 , 'g' : 2 , 't' : 3 } codon_rep = '\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xa...
Represent nucleotide sequence by sequence of codon symbols .
2,025
def cutR_seq ( seq , cutR , max_palindrome ) : complement_dict = { 'A' : 'T' , 'C' : 'G' , 'G' : 'C' , 'T' : 'A' } if cutR < max_palindrome : seq = seq + '' . join ( [ complement_dict [ nt ] for nt in seq [ cutR - max_palindrome : ] ] [ : : - 1 ] ) else : seq = seq [ : len ( seq ) - cutR + max_palindrome ] return seq
Cut genomic sequence from the right .
2,026
def cutL_seq ( seq , cutL , max_palindrome ) : complement_dict = { 'A' : 'T' , 'C' : 'G' , 'G' : 'C' , 'T' : 'A' } if cutL < max_palindrome : seq = '' . join ( [ complement_dict [ nt ] for nt in seq [ : max_palindrome - cutL ] ] [ : : - 1 ] ) + seq else : seq = seq [ cutL - max_palindrome : ] return seq
Cut genomic sequence from the left .
2,027
def generate_sub_codons_left ( codons_dict ) : sub_codons_left = { } for aa in codons_dict . keys ( ) : sub_codons_left [ aa ] = list ( set ( [ x [ 0 ] for x in codons_dict [ aa ] ] + [ x [ : 2 ] for x in codons_dict [ aa ] ] ) ) return sub_codons_left
Generate the sub_codons_left dictionary of codon prefixes .
2,028
def calc_steady_state_dist ( R ) : w , v = np . linalg . eig ( R ) for i in range ( 4 ) : if np . abs ( w [ i ] - 1 ) < 1e-8 : return np . real ( v [ : , i ] / np . sum ( v [ : , i ] ) ) return - 1
Calculate the steady state dist of a 4 state markov transition matrix .
2,029
def rnd_ins_seq ( ins_len , C_R , CP_first_nt ) : nt2num = { 'A' : 0 , 'C' : 1 , 'G' : 2 , 'T' : 3 } num2nt = 'ACGT' if ins_len == 0 : return '' seq = num2nt [ CP_first_nt . searchsorted ( np . random . random ( ) ) ] ins_len += - 1 while ins_len > 0 : seq += num2nt [ C_R [ nt2num [ seq [ - 1 ] ] , : ] . searchsorted (...
Generate a random insertion nucleotide sequence of length ins_len .
2,030
def update_rates ( self ) : source , created = RateSource . objects . get_or_create ( name = self . get_source_name ( ) ) source . base_currency = self . get_base_currency ( ) source . save ( ) for currency , value in six . iteritems ( self . get_rates ( ) ) : try : rate = Rate . objects . get ( source = source , curre...
Creates or updates rates for a source
2,031
def showDosHeaderData ( peInstance ) : dosFields = peInstance . dosHeader . getFields ( ) print "[+] IMAGE_DOS_HEADER values:\n" for field in dosFields : if isinstance ( dosFields [ field ] , datatypes . Array ) : print " % ( field , len ( dosFields [ field ] ) ) counter = 0 for element in dosFields [ field ] : print "...
Prints IMAGE_DOS_HEADER fields .
2,032
def showFileHeaderData ( peInstance ) : fileHeaderFields = peInstance . ntHeaders . fileHeader . getFields ( ) print "[+] IMAGE_FILE_HEADER values:\n" for field in fileHeaderFields : print " % ( field , fileHeaderFields [ field ] . value )
Prints IMAGE_FILE_HEADER fields .
2,033
def showOptionalHeaderData ( peInstance ) : print "[+] IMAGE_OPTIONAL_HEADER:\n" ohFields = peInstance . ntHeaders . optionalHeader . getFields ( ) for field in ohFields : if not isinstance ( ohFields [ field ] , datadirs . DataDirectory ) : print " % ( field , ohFields [ field ] . value )
Prints IMAGE_OPTIONAL_HEADER fields .
2,034
def showDataDirectoriesData ( peInstance ) : print "[+] Data directories:\n" dirs = peInstance . ntHeaders . optionalHeader . dataDirectory counter = 1 for dir in dirs : print "[%d] % ( counter , dir . name . value , dir . rva . value , dir . size . value ) counter += 1
Prints the DATA_DIRECTORY fields .
2,035
def showSectionsHeaders ( peInstance ) : print "[+] Sections information:\n" print " % peInstance . ntHeaders . fileHeader . numberOfSections . value for section in peInstance . sectionHeaders : fields = section . getFields ( ) for field in fields : if isinstance ( fields [ field ] , datatypes . String ) : fmt = "%s = ...
Prints IMAGE_SECTION_HEADER for every section present in the file .
2,036
def showImports ( peInstance ) : iidEntries = peInstance . ntHeaders . optionalHeader . dataDirectory [ consts . IMPORT_DIRECTORY ] . info if iidEntries : for iidEntry in iidEntries : fields = iidEntry . getFields ( ) print "module: %s" % iidEntry . metaData . moduleName . value for field in fields : print "%s -> %x" %...
Shows imports information .
2,037
def showExports ( peInstance ) : exports = peInstance . ntHeaders . optionalHeader . dataDirectory [ consts . EXPORT_DIRECTORY ] . info if exports : exp_fields = exports . getFields ( ) for field in exp_fields : print "%s -> %x" % ( field , exp_fields [ field ] . value ) for entry in exports . exportTable : entry_field...
Show exports information
2,038
def getFields ( self ) : d = { } for i in self . _attrsList : key = i value = getattr ( self , i ) d [ key ] = value return d
Returns all the class attributues .
2,039
def calc_euler_tour ( g , start , end ) : even_g = nx . subgraph ( g , g . nodes ( ) ) . copy ( ) if end in even_g . neighbors ( start ) : even_g . remove_edge ( start , end ) comps = list ( nx . connected_components ( even_g ) ) if len ( comps ) == 1 : trail = list ( nx . eulerian_circuit ( even_g , start ) ) trail . ...
Calculates an Euler tour over the graph g from vertex start to vertex end . Assumes start and end are odd - degree vertices and that there are no other odd - degree vertices .
2,040
def greedy_trails ( subg , odds , verbose ) : if verbose : print ( '\tCreating edge map' ) edges = defaultdict ( list ) for x , y in subg . edges ( ) : edges [ x ] . append ( y ) edges [ y ] . append ( x ) if verbose : print ( '\tSelecting trails' ) trails = [ ] for x in subg . nodes ( ) : if verbose > 2 : print ( '\t\...
Greedily select trails by making the longest you can until the end
2,041
def decompose_graph ( g , heuristic = 'tour' , max_odds = 20 , verbose = 0 ) : subgraphs = [ nx . subgraph ( g , x ) . copy ( ) for x in nx . connected_components ( g ) ] chains = [ ] num_subgraphs = len ( subgraphs ) step = 0 while num_subgraphs > 0 : if verbose : print ( 'Step #{0} ({1} subgraphs)' . format ( step , ...
Decompose a graph into a set of non - overlapping trails .
2,042
def lerp ( self , a , t ) : return self . plus ( a . minus ( self ) . times ( t ) )
Lerp . Linear interpolation from self to a
2,043
def interpolate ( self , other , t ) : return Vertex ( self . pos . lerp ( other . pos , t ) , self . normal . lerp ( other . normal , t ) )
Create a new vertex between this vertex and other by linearly interpolating all properties using a parameter of t . Subclasses should override this to interpolate additional properties .
2,044
def splitPolygon ( self , polygon , coplanarFront , coplanarBack , front , back ) : COPLANAR = 0 FRONT = 1 BACK = 2 SPANNING = 3 polygonType = 0 vertexLocs = [ ] numVertices = len ( polygon . vertices ) for i in range ( numVertices ) : t = self . normal . dot ( polygon . vertices [ i ] . pos ) - self . w loc = - 1 if t...
Split polygon by this plane if needed then put the polygon or polygon fragments in the appropriate lists . Coplanar polygons go into either coplanarFront or coplanarBack depending on their orientation with respect to this plane . Polygons in front or in back of this plane go into either front or back
2,045
def invert ( self ) : for poly in self . polygons : poly . flip ( ) self . plane . flip ( ) if self . front : self . front . invert ( ) if self . back : self . back . invert ( ) temp = self . front self . front = self . back self . back = temp
Convert solid space to empty space and empty space to solid space .
2,046
def clipPolygons ( self , polygons ) : if not self . plane : return polygons [ : ] front = [ ] back = [ ] for poly in polygons : self . plane . splitPolygon ( poly , front , back , front , back ) if self . front : front = self . front . clipPolygons ( front ) if self . back : back = self . back . clipPolygons ( back ) ...
Recursively remove all polygons in polygons that are inside this BSP tree .
2,047
def clipTo ( self , bsp ) : self . polygons = bsp . clipPolygons ( self . polygons ) if self . front : self . front . clipTo ( bsp ) if self . back : self . back . clipTo ( bsp )
Remove all polygons in this BSP tree that are inside the other BSP tree bsp .
2,048
def allPolygons ( self ) : polygons = self . polygons [ : ] if self . front : polygons . extend ( self . front . allPolygons ( ) ) if self . back : polygons . extend ( self . back . allPolygons ( ) ) return polygons
Return a list of all polygons in this BSP tree .
2,049
def get_rate ( currency ) : source = get_rate_source ( ) try : return Rate . objects . get ( source = source , currency = currency ) . value except Rate . DoesNotExist : raise CurrencyConversionException ( "Rate for %s in %s do not exists. " "Please run python manage.py update_rates" % ( currency , source . name ) )
Returns the rate from the default currency to currency .
2,050
def get_rate_source ( ) : backend = money_rates_settings . DEFAULT_BACKEND ( ) try : return RateSource . objects . get ( name = backend . get_source_name ( ) ) except RateSource . DoesNotExist : raise CurrencyConversionException ( "Rate for %s source do not exists. " "Please run python manage.py update_rates" % backend...
Get the default Rate Source and return it .
2,051
def base_convert_money ( amount , currency_from , currency_to ) : source = get_rate_source ( ) if source . base_currency != currency_from : rate_from = get_rate ( currency_from ) else : rate_from = Decimal ( 1 ) rate_to = get_rate ( currency_to ) if isinstance ( amount , float ) : amount = Decimal ( amount ) . quantize...
Convert amount from currency_from to currency_to
2,052
def convert_money ( amount , currency_from , currency_to ) : new_amount = base_convert_money ( amount , currency_from , currency_to ) return moneyed . Money ( new_amount , currency_to )
Convert amount from currency_from to currency_to and return a Money instance of the converted amount .
2,053
def format_date ( format_string = None , datetime_obj = None ) : datetime_obj = datetime_obj or datetime . now ( ) if format_string is None : seconds = int ( datetime_obj . strftime ( "%s" ) ) milliseconds = datetime_obj . microsecond // 1000 return str ( seconds * 1000 + milliseconds ) else : formatter = SimpleDateFor...
Format a datetime object with Java SimpleDateFormat s - like string .
2,054
def allZero ( buffer ) : allZero = True for byte in buffer : if byte != "\x00" : allZero = False break return allZero
Tries to determine if a buffer is empty .
2,055
def readAlignedString ( self , align = 4 ) : s = self . readString ( ) r = align - len ( s ) % align while r : s += self . data [ self . offset ] self . offset += 1 r -= 1 return s . rstrip ( "\x00" )
Reads an ASCII string aligned to the next align - bytes boundary .
2,056
def readAt ( self , offset , size ) : if offset > self . length : if self . log : print "Warning: Trying to read: %d bytes - only %d bytes left" % ( nroBytes , self . length - self . offset ) offset = self . length - self . offset tmpOff = self . tell ( ) self . setOffset ( offset ) r = self . read ( size ) self . setO...
Reads as many bytes indicated in the size parameter at the specific offset .
2,057
def send ( self , message , channel_name = None , fail_silently = False , options = None ) : if channel_name is None : channels = self . settings [ "CHANNELS" ] else : try : channels = { "__selected__" : self . settings [ "CHANNELS" ] [ channel_name ] } except KeyError : raise Exception ( "channels does not exist %s" ,...
Send a notification to channels
2,058
def request ( self , method , path , params = None , headers = None , cookies = None , data = None , json = None , allow_redirects = None , timeout = None ) : headers = headers or { } timeout = timeout if timeout is not None else self . _timeout allow_redirects = allow_redirects if allow_redirects is not None else self...
Prepares and sends an HTTP request . Returns the HTTPResponse object .
2,059
def load_genomic_CDR3_anchor_pos_and_functionality ( anchor_pos_file_name ) : anchor_pos_and_functionality = { } anchor_pos_file = open ( anchor_pos_file_name , 'r' ) first_line = True for line in anchor_pos_file : if first_line : first_line = False continue split_line = line . split ( ',' ) split_line = [ x . strip ( ...
Read anchor position and functionality from file .
2,060
def generate_cutV_genomic_CDR3_segs ( self ) : max_palindrome = self . max_delV_palindrome self . cutV_genomic_CDR3_segs = [ ] for CDR3_V_seg in [ x [ 1 ] for x in self . genV ] : if len ( CDR3_V_seg ) < max_palindrome : self . cutV_genomic_CDR3_segs += [ cutR_seq ( CDR3_V_seg , 0 , len ( CDR3_V_seg ) ) ] else : self ....
Add palindromic inserted nucleotides to germline V sequences . The maximum number of palindromic insertions are appended to the germline V segments so that delV can index directly for number of nucleotides to delete from a segment . Sets the attribute cutV_genomic_CDR3_segs .
2,061
def generate_cutJ_genomic_CDR3_segs ( self ) : max_palindrome = self . max_delJ_palindrome self . cutJ_genomic_CDR3_segs = [ ] for CDR3_J_seg in [ x [ 1 ] for x in self . genJ ] : if len ( CDR3_J_seg ) < max_palindrome : self . cutJ_genomic_CDR3_segs += [ cutL_seq ( CDR3_J_seg , 0 , len ( CDR3_J_seg ) ) ] else : self ....
Add palindromic inserted nucleotides to germline J sequences . The maximum number of palindromic insertions are appended to the germline J segments so that delJ can index directly for number of nucleotides to delete from a segment . Sets the attribute cutJ_genomic_CDR3_segs .
2,062
def generate_cutD_genomic_CDR3_segs ( self ) : max_palindrome_L = self . max_delDl_palindrome max_palindrome_R = self . max_delDr_palindrome self . cutD_genomic_CDR3_segs = [ ] for CDR3_D_seg in [ x [ 1 ] for x in self . genD ] : if len ( CDR3_D_seg ) < min ( max_palindrome_L , max_palindrome_R ) : self . cutD_genomic_...
Add palindromic inserted nucleotides to germline V sequences . The maximum number of palindromic insertions are appended to the germline D segments so that delDl and delDr can index directly for number of nucleotides to delete from a segment . Sets the attribute cutV_genomic_CDR3_segs .
2,063
def verify_and_fill_address_paths_from_bip32key ( address_paths , master_key , network ) : assert network , network wallet_obj = Wallet . deserialize ( master_key , network = network ) address_paths_cleaned = [ ] for address_path in address_paths : path = address_path [ 'path' ] input_address = address_path [ 'address'...
Take address paths and verifies their accuracy client - side .
2,064
def run ( self , lam , initial_values = None ) : if initial_values is not None : if self . k == 0 and self . trails is not None : betas , zs , us = initial_values else : betas , us = initial_values else : if self . k == 0 and self . trails is not None : betas = [ np . zeros ( self . num_nodes , dtype = 'double' ) for _...
Run the graph - fused logit lasso with a fixed lambda penalty .
2,065
def data_log_likelihood ( self , successes , trials , beta ) : return binom . logpmf ( successes , trials , 1.0 / ( 1 + np . exp ( - beta ) ) ) . sum ( )
Calculates the log - likelihood of a Polya tree bin given the beta values .
2,066
def spawn_worker ( params ) : setup_logging ( params ) log . info ( "Adding worker: idx=%s\tconcurrency=%s\tresults=%s" , params . worker_index , params . concurrency , params . report ) worker = Worker ( params ) worker . start ( ) worker . join ( )
This method has to be module level function
2,067
def create_plateaus ( data , edges , plateau_size , plateau_vals , plateaus = None ) : nodes = set ( edges . keys ( ) ) if plateaus is None : plateaus = [ ] for i in range ( len ( plateau_vals ) ) : if len ( nodes ) == 0 : break node = np . random . choice ( list ( nodes ) ) nodes . remove ( node ) plateau = [ node ] a...
Creates plateaus of constant value in the data .
2,068
def pretty_str ( p , decimal_places = 2 , print_zero = True , label_columns = False ) : if len ( p . shape ) == 1 : return vector_str ( p , decimal_places , print_zero ) if len ( p . shape ) == 2 : return matrix_str ( p , decimal_places , print_zero , label_columns ) raise Exception ( 'Invalid array with shape {0}' . f...
Pretty - print a matrix or vector .
2,069
def matrix_str ( p , decimal_places = 2 , print_zero = True , label_columns = False ) : return '[{0}]' . format ( "\n " . join ( [ ( str ( i ) if label_columns else '' ) + vector_str ( a , decimal_places , print_zero ) for i , a in enumerate ( p ) ] ) )
Pretty - print the matrix .
2,070
def vector_str ( p , decimal_places = 2 , print_zero = True ) : style = '{0:.' + str ( decimal_places ) + 'f}' return '[{0}]' . format ( ", " . join ( [ ' ' if not print_zero and a == 0 else style . format ( a ) for a in p ] ) )
Pretty - print the vector values .
2,071
def nearly_unique ( arr , rel_tol = 1e-4 , verbose = 0 ) : results = np . array ( [ arr [ 0 ] ] ) for x in arr : if np . abs ( results - x ) . min ( ) > rel_tol : results = np . append ( results , x ) return results
Heuristic method to return the uniques within some precision in a numpy array
2,072
def get_delta ( D , k ) : if k < 0 : raise Exception ( 'k must be at least 0th order.' ) result = D for i in range ( k ) : result = D . T . dot ( result ) if i % 2 == 0 else D . dot ( result ) return result
Calculate the k - th order trend filtering matrix given the oriented edge incidence matrix and the value of k .
2,073
def decompose_delta ( deltak ) : if not isspmatrix_coo ( deltak ) : deltak = coo_matrix ( deltak ) dk_rows = deltak . shape [ 0 ] dk_rowbreaks = np . cumsum ( deltak . getnnz ( 1 ) , dtype = "int32" ) dk_cols = deltak . col . astype ( 'int32' ) dk_vals = deltak . data . astype ( 'double' ) return dk_rows , dk_rowbreaks...
Decomposes the k - th order trend filtering matrix into a c - compatible set of arrays .
2,074
def hasMZSignature ( self , rd ) : rd . setOffset ( 0 ) sign = rd . read ( 2 ) if sign == "MZ" : return True return False
Check for MZ signature .
2,075
def hasPESignature ( self , rd ) : rd . setOffset ( 0 ) e_lfanew_offset = unpack ( "<L" , rd . readAt ( 0x3c , 4 ) ) [ 0 ] sign = rd . readAt ( e_lfanew_offset , 2 ) if sign == "PE" : return True return False
Check for PE signature .
2,076
def validate ( self ) : if self . dosHeader . e_magic . value != consts . MZ_SIGNATURE : raise excep . PEException ( "Invalid MZ signature. Found %d instead of %d." % ( self . dosHeader . magic . value , consts . MZ_SIGNATURE ) ) if self . dosHeader . e_lfanew . value > len ( self ) : raise excep . PEException ( "Inval...
Performs validations over some fields of the PE structure to determine if the loaded file has a valid PE format .
2,077
def readFile ( self , pathToFile ) : fd = open ( pathToFile , "rb" ) data = fd . read ( ) fd . close ( ) return data
Returns data from a file .
2,078
def _getPaddingDataToSectionOffset ( self ) : start = self . _getPaddingToSectionOffset ( ) end = self . sectionHeaders [ 0 ] . pointerToRawData . value - start return self . _data [ start : start + end ]
Returns the data between the last section header and the begenning of data from the first section .
2,079
def _getSignature ( self , readDataInstance , dataDirectoryInstance ) : signature = "" if readDataInstance is not None and dataDirectoryInstance is not None : securityDirectory = dataDirectoryInstance [ consts . SECURITY_DIRECTORY ] if ( securityDirectory . rva . value and securityDirectory . size . value ) : readDataI...
Returns the digital signature within a digital signed PE file .
2,080
def _getOverlay ( self , readDataInstance , sectionHdrsInstance ) : if readDataInstance is not None and sectionHdrsInstance is not None : try : offset = sectionHdrsInstance [ - 1 ] . pointerToRawData . value + sectionHdrsInstance [ - 1 ] . sizeOfRawData . value readDataInstance . setOffset ( offset ) except excep . Wro...
Returns the overlay data from the PE file .
2,081
def getOffsetFromRva ( self , rva ) : offset = - 1 s = self . getSectionByRva ( rva ) if s != offset : offset = ( rva - self . sectionHeaders [ s ] . virtualAddress . value ) + self . sectionHeaders [ s ] . pointerToRawData . value else : offset = rva return offset
Converts an offset to an RVA .
2,082
def getRvaFromOffset ( self , offset ) : rva = - 1 s = self . getSectionByOffset ( offset ) if s : rva = ( offset - self . sectionHeaders [ s ] . pointerToRawData . value ) + self . sectionHeaders [ s ] . virtualAddress . value return rva
Converts a RVA to an offset .
2,083
def getSectionByOffset ( self , offset ) : index = - 1 for i in range ( len ( self . sectionHeaders ) ) : if ( offset < self . sectionHeaders [ i ] . pointerToRawData . value + self . sectionHeaders [ i ] . sizeOfRawData . value ) : index = i break return index
Given an offset in the file tries to determine the section this offset belong to .
2,084
def getSectionIndexByName ( self , name ) : index = - 1 if name : for i in range ( len ( self . sectionHeaders ) ) : if self . sectionHeaders [ i ] . name . value . find ( name ) >= 0 : index = i break return index
Given a string representing a section name tries to find the section index .
2,085
def getSectionByRva ( self , rva ) : index = - 1 if rva < self . sectionHeaders [ 0 ] . virtualAddress . value : return index for i in range ( len ( self . sectionHeaders ) ) : fa = self . ntHeaders . optionalHeader . fileAlignment . value prd = self . sectionHeaders [ i ] . pointerToRawData . value srd = self . sectio...
Given a RVA in the file tries to determine the section this RVA belongs to .
2,086
def _getPaddingToSectionOffset ( self ) : return len ( str ( self . dosHeader ) + str ( self . dosStub ) + str ( self . ntHeaders ) + str ( self . sectionHeaders ) )
Returns the offset to last section header present in the PE file .
2,087
def fullLoad ( self ) : self . _parseDirectories ( self . ntHeaders . optionalHeader . dataDirectory , self . PE_TYPE )
Parse all the directories in the PE file .
2,088
def _fixPe ( self ) : sizeOfImage = 0 for sh in self . sectionHeaders : sizeOfImage += sh . misc self . ntHeaders . optionaHeader . sizeoOfImage . value = self . _sectionAlignment ( sizeOfImage + 0x1000 )
Fixes the necessary fields in the PE file instance in order to create a valid PE32 . i . e . SizeOfImage .
2,089
def getDataAtRva ( self , rva , size ) : return self . getDataAtOffset ( self . getOffsetFromRva ( rva ) , size )
Gets binary data at a given RVA .
2,090
def getDataAtOffset ( self , offset , size ) : data = str ( self ) return data [ offset : offset + size ]
Gets binary data at a given offset .
2,091
def _parseDelayImportDirectory ( self , rva , size , magic = consts . PE32 ) : return self . getDataAtRva ( rva , size )
Parses the delay imports directory .
2,092
def _parseBoundImportDirectory ( self , rva , size , magic = consts . PE32 ) : data = self . getDataAtRva ( rva , size ) rd = utils . ReadData ( data ) boundImportDirectory = directories . ImageBoundImportDescriptor . parse ( rd ) for i in range ( len ( boundImportDirectory ) - 1 ) : if hasattr ( boundImportDirectory [...
Parses the bound import directory .
2,093
def _parseLoadConfigDirectory ( self , rva , size , magic = consts . PE32 ) : data = self . getDataAtRva ( rva , directories . ImageLoadConfigDirectory ( ) . sizeof ( ) ) rd = utils . ReadData ( data ) if magic == consts . PE32 : return directories . ImageLoadConfigDirectory . parse ( rd ) elif magic == consts . PE64 :...
Parses IMAGE_LOAD_CONFIG_DIRECTORY .
2,094
def _parseTlsDirectory ( self , rva , size , magic = consts . PE32 ) : data = self . getDataAtRva ( rva , size ) rd = utils . ReadData ( data ) if magic == consts . PE32 : return directories . TLSDirectory . parse ( rd ) elif magic == consts . PE64 : return directories . TLSDirectory64 . parse ( rd ) else : raise excep...
Parses the TLS directory .
2,095
def _parseRelocsDirectory ( self , rva , size , magic = consts . PE32 ) : data = self . getDataAtRva ( rva , size ) rd = utils . ReadData ( data ) relocsArray = directories . ImageBaseRelocation ( ) while rd . offset < size : relocEntry = directories . ImageBaseRelocationEntry . parse ( rd ) relocsArray . append ( relo...
Parses the relocation directory .
2,096
def get_addresses_on_both_chains ( wallet_obj , used = None , zero_balance = None ) : mpub = wallet_obj . serialize_b58 ( private = False ) wallet_name = get_blockcypher_walletname_from_mpub ( mpub = mpub , subchain_indices = [ 0 , 1 ] , ) wallet_addresses = get_wallet_addresses ( wallet_name = wallet_name , api_key = ...
Get addresses across both subchains based on the filter criteria passed in
2,097
def dump_all_keys_or_addrs ( wallet_obj ) : print_traversal_warning ( ) puts ( '\nDo you understand this warning?' ) if not confirm ( user_prompt = DEFAULT_PROMPT , default = False ) : puts ( colored . red ( 'Dump Cancelled!' ) ) return mpub = wallet_obj . serialize_b58 ( private = False ) if wallet_obj . private_key :...
Offline - enabled mechanism to dump addresses
2,098
def dump_selected_keys_or_addrs ( wallet_obj , used = None , zero_balance = None ) : if wallet_obj . private_key : content_str = 'private keys' else : content_str = 'addresses' if not USER_ONLINE : puts ( colored . red ( '\nInternet connection required, would you like to dump *all* %s instead?' % ( content_str , conten...
Works for both public key only or private key access
2,099
def dump_private_keys_or_addrs_chooser ( wallet_obj ) : if wallet_obj . private_key : puts ( 'Which private keys and addresses do you want?' ) else : puts ( 'Which addresses do you want?' ) with indent ( 2 ) : puts ( colored . cyan ( '1: Active - have funds to spend' ) ) puts ( colored . cyan ( '2: Spent - no funds to ...
Offline - enabled mechanism to dump everything