idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
3,800
def _pigpio_aio_command_ext ( self , cmd , p1 , p2 , p3 , extents , rl = True ) : with ( yield from self . _lock ) : ext = bytearray ( struct . pack ( 'IIII' , cmd , p1 , p2 , p3 ) ) for x in extents : if isinstance ( x , str ) : ext . extend ( _b ( x ) ) else : ext . extend ( x ) self . _loop . sock_sendall ( self . s , ext ) response = yield from self . _loop . sock_recv ( self . s , 16 ) _ , res = struct . unpack ( '12sI' , response ) return res
Runs an extended pigpio socket command .
162
10
3,801
def store_script ( self , script ) : if len ( script ) : res = yield from self . _pigpio_aio_command_ext ( _PI_CMD_PROC , 0 , 0 , len ( script ) , [ script ] ) return _u2i ( res ) else : return 0
Store a script for later execution .
69
7
3,802
def run_script ( self , script_id , params = None ) : # I p1 script id # I p2 0 # I p3 params * 4 (0-10 params) # (optional) extension # I[] params if params is not None : ext = bytearray ( ) for p in params : ext . extend ( struct . pack ( "I" , p ) ) nump = len ( params ) extents = [ ext ] else : nump = 0 extents = [ ] res = yield from self . _pigpio_aio_command_ext ( _PI_CMD_PROCR , script_id , 0 , nump * 4 , extents ) return _u2i ( res )
Runs a stored script .
158
6
3,803
def script_status ( self , script_id ) : res = yield from self . _pigpio_aio_command ( _PI_CMD_PROCP , script_id , 0 ) bytes = u2i ( res ) if bytes > 0 : # Fixme : this sould be the same a _rxbuf # data = self._rxbuf(bytes) data = yield from self . _loop . sock_recv ( self . s , bytes ) while len ( data ) < bytes : b = yield from self . _loop . sock_recv ( self . s , bytes - len ( data ) ) data . extend ( b ) pars = struct . unpack ( '11i' , _str ( data ) ) status = pars [ 0 ] params = pars [ 1 : ] else : status = bytes params = ( ) return status , params
Returns the run status of a stored script as well as the current values of parameters 0 to 9 .
185
20
3,804
def stop_script ( self , script_id ) : res = yield from self . _pigpio_aio_command ( _PI_CMD_PROCS , script_id , 0 ) return _u2i ( res )
Stops a running script .
53
6
3,805
def delete_script ( self , script_id ) : res = yield from self . _pigpio_aio_command ( _PI_CMD_PROCD , script_id , 0 ) return _u2i ( res )
Deletes a stored script .
53
6
3,806
def clear_bank_1 ( self , bits ) : res = yield from self . _pigpio_aio_command ( _PI_CMD_BC1 , bits , 0 ) return _u2i ( res )
Clears gpios 0 - 31 if the corresponding bit in bits is set .
50
16
3,807
def set_bank_1 ( self , bits ) : res = yield from self . _pigpio_aio_command ( _PI_CMD_BS1 , bits , 0 ) return _u2i ( res )
Sets gpios 0 - 31 if the corresponding bit in bits is set .
50
16
3,808
def set_mode ( self , gpio , mode ) : res = yield from self . _pigpio_aio_command ( _PI_CMD_MODES , gpio , mode ) return _u2i ( res )
Sets the gpio mode .
52
7
3,809
def get_mode ( self , gpio ) : res = yield from self . _pigpio_aio_command ( _PI_CMD_MODEG , gpio , 0 ) return _u2i ( res )
Returns the gpio mode .
50
6
3,810
def write ( self , gpio , level ) : res = yield from self . _pigpio_aio_command ( _PI_CMD_WRITE , gpio , level ) return _u2i ( res )
Sets the gpio level .
50
7
3,811
def camelcase2list ( s , lower = False ) : s = re . findall ( r'([A-Z][a-z0-9]+)' , s ) return [ w . lower ( ) for w in s ] if lower else s
Converts a camelcase string to a list .
55
10
3,812
def get_route_param_names ( endpoint ) : try : g = current_app . url_map . iter_rules ( endpoint ) return next ( g ) . arguments except KeyError : return { }
Returns parameter names from the route .
44
7
3,813
def update_meta_info ( self ) : result = super ( BaseStructuredCalibration , self ) . update_meta_info ( ) result [ 'instrument' ] = self . instrument result [ 'uuid' ] = self . uuid result [ 'tags' ] = self . tags result [ 'type' ] = self . name ( ) minfo = self . meta_info try : result [ 'mode' ] = minfo [ 'mode_name' ] origin = minfo [ 'origin' ] date_obs = origin [ 'date_obs' ] except KeyError : origin = { } date_obs = "1970-01-01T00:00:00.00" result [ 'observation_date' ] = conv . convert_date ( date_obs ) result [ 'origin' ] = origin return result
Extract metadata from myself
181
5
3,814
def gnuplot ( script_name , args_dict = { } , data = [ ] , silent = True ) : gnuplot_command = 'gnuplot' if data : assert 'data' not in args_dict , 'Can\'t use \'data\' variable twice.' data_temp = _GnuplotDataTemp ( * data ) args_dict [ 'data' ] = data_temp . name if args_dict : gnuplot_command += ' -e "' for arg in args_dict . items ( ) : gnuplot_command += arg [ 0 ] + '=' if isinstance ( arg [ 1 ] , str ) : gnuplot_command += '\'' + arg [ 1 ] + '\'' elif isinstance ( arg [ 1 ] , bool ) : if arg [ 1 ] is True : gnuplot_command += '1' else : gnuplot_command += '0' elif hasattr ( arg [ 1 ] , '__iter__' ) : gnuplot_command += '\'' + ' ' . join ( [ str ( v ) for v in arg [ 1 ] ] ) + '\'' else : gnuplot_command += str ( arg [ 1 ] ) gnuplot_command += '; ' gnuplot_command = gnuplot_command [ : - 1 ] gnuplot_command += '"' gnuplot_command += ' ' + script_name if silent : gnuplot_command += ' > /dev/null 2>&1' os . system ( gnuplot_command ) return gnuplot_command
Call a Gnuplot script passing it arguments and datasets .
344
12
3,815
def gnuplot_2d ( x , y , filename , title = '' , x_label = '' , y_label = '' ) : _ , ext = os . path . splitext ( filename ) if ext != '.png' : filename += '.png' gnuplot_cmds = ''' set datafile separator "," set term pngcairo size 30cm,25cm set out filename unset key set border lw 1.5 set grid lt -1 lc rgb "gray80" set title title set xlabel x_label set ylabel y_label plot filename_data u 1:2 w lp pt 6 ps 0.5 ''' scr = _GnuplotScriptTemp ( gnuplot_cmds ) data = _GnuplotDataTemp ( x , y ) args_dict = { 'filename' : filename , 'filename_data' : data . name , 'title' : title , 'x_label' : x_label , 'y_label' : y_label } gnuplot ( scr . name , args_dict )
Function to produce a general 2D plot .
233
9
3,816
def gnuplot_3d_matrix ( z_matrix , filename , title = '' , x_label = '' , y_label = '' ) : _ , ext = os . path . splitext ( filename ) if ext != '.png' : filename += '.png' gnuplot_cmds = ''' set datafile separator "," set term pngcairo size 30cm,25cm set out filename unset key set border lw 1.5 set view map set title title set xlabel x_label set ylabel y_label splot filename_data matrix w pm3d ''' scr = _GnuplotScriptTemp ( gnuplot_cmds ) data = _GnuplotDataZMatrixTemp ( z_matrix ) args_dict = { 'filename' : filename , 'filename_data' : data . name , 'title' : title , 'x_label' : x_label , 'y_label' : y_label } gnuplot ( scr . name , args_dict )
Function to produce a general 3D plot from a 2D matrix .
223
14
3,817
def read ( self , skip = [ ] , goto_metal = None , goto_reaction = None ) : if len ( skip ) > 0 : for skip_f in skip : self . omit_folders . append ( skip_f ) """ If publication level is input""" if os . path . isfile ( self . data_base + '/publication.txt' ) : self . user_base_level -= 1 self . stdout . write ( '---------------------- \n' ) self . stdout . write ( 'Starting folderreader! \n' ) self . stdout . write ( '---------------------- \n' ) found_reaction = False for root , dirs , files in os . walk ( self . user_base ) : for omit_folder in self . omit_folders : # user specified omit_folder if omit_folder in dirs : dirs . remove ( omit_folder ) level = len ( root . split ( "/" ) ) - self . user_base_level if level == self . pub_level : self . read_pub ( root ) if level == self . DFT_level : self . DFT_code = os . path . basename ( root ) if level == self . XC_level : self . DFT_functional = os . path . basename ( root ) self . gas_folder = root + '/gas/' self . read_gas ( ) if level == self . reference_level : if 'gas' in os . path . basename ( root ) : continue if goto_metal is not None : if os . path . basename ( root ) == goto_metal : goto_metal = None else : dirs [ : ] = [ ] # don't read any sub_dirs continue self . read_bulk ( root ) if level == self . slab_level : self . read_slab ( root ) if level == self . reaction_level : if goto_reaction is not None : if os . path . basename ( root ) == goto_reaction : goto_reaction = None else : dirs [ : ] = [ ] # don't read any sub_dirs continue self . read_reaction ( root ) if level == self . final_level : self . root = root self . read_energies ( root ) if self . key_value_pairs_reaction is not None : yield self . key_value_pairs_reaction
Get reactions from folders .
523
5
3,818
def slice_create ( center , block , start = 0 , stop = None ) : do = coor_to_pix_1d ( center - block ) up = coor_to_pix_1d ( center + block ) l = max ( start , do ) if stop is not None : h = min ( up + 1 , stop ) else : h = up + 1 return slice ( l , h , 1 )
Return an slice with a symmetric region around center .
92
11
3,819
def image_box ( center , shape , box ) : return tuple ( slice_create ( c , b , stop = s ) for c , s , b in zip ( center , shape , box ) )
Create a region of size box around a center in a image of shape .
43
15
3,820
def expand_region ( tuple_of_s , a , b , start = 0 , stop = None ) : return tuple ( expand_slice ( s , a , b , start = start , stop = stop ) for s in tuple_of_s )
Apply expend_slice on a tuple of slices
54
9
3,821
def adapt_obsres ( self , obsres ) : _logger . debug ( 'adapt observation result for work dir' ) for f in obsres . images : # Remove path components f . filename = os . path . basename ( f . filename ) return obsres
Adapt obsres after file copy
57
6
3,822
def store ( self , completed_task , resultsdir ) : with working_directory ( resultsdir ) : _logger . info ( 'storing result' ) return completed_task . store ( self )
Store the values of the completed task .
43
8
3,823
def validate ( self , obj ) : if not isinstance ( obj , self . internal_type ) : raise ValidationError ( obj , self . internal_type ) return True
Validate convertibility to internal representation
37
7
3,824
def delete_user ( self , user ) : assert self . user == 'catroot' or self . user == 'postgres' assert not user == 'public' con = self . connection or self . _connect ( ) cur = con . cursor ( ) cur . execute ( 'DROP SCHEMA {user} CASCADE;' . format ( user = user ) ) cur . execute ( 'REVOKE USAGE ON SCHEMA public FROM {user};' . format ( user = user ) ) cur . execute ( 'REVOKE SELECT ON ALL TABLES IN SCHEMA public FROM {user};' . format ( user = user ) ) cur . execute ( 'DROP ROLE {user};' . format ( user = user ) ) self . stdout . write ( 'REMOVED USER {user}\n' . format ( user = user ) ) if self . connection is None : con . commit ( ) con . close ( ) return self
Delete user and all data
203
5
3,825
def truncate_schema ( self ) : assert self . server == 'localhost' con = self . connection or self . _connect ( ) self . _initialize ( con ) cur = con . cursor ( ) cur . execute ( 'DELETE FROM publication;' ) cur . execute ( 'TRUNCATE systems CASCADE;' ) con . commit ( ) con . close ( ) return
Will delete all data in schema . Only for test use!
85
12
3,826
def write ( self , * args , * * kwargs ) : return self . stream . write ( ending = "" , * args , * * kwargs )
Call the stream s write method without linebreaks at line endings .
35
13
3,827
def _update ( self ) : if self . strains . size and self . strains . size == self . values . size : x = np . log ( self . strains ) y = self . values if x . size < 4 : self . _interpolater = interp1d ( x , y , 'linear' , bounds_error = False , fill_value = ( y [ 0 ] , y [ - 1 ] ) ) else : self . _interpolater = interp1d ( x , y , 'cubic' , bounds_error = False , fill_value = ( y [ 0 ] , y [ - 1 ] ) )
Initialize the 1D interpolation .
138
8
3,828
def is_nonlinear ( self ) : return any ( isinstance ( p , NonlinearProperty ) for p in [ self . mod_reduc , self . damping ] )
If nonlinear properties are specified .
38
7
3,829
def relative_error ( self ) : if self . previous is not None : # FIXME # Use the maximum strain value -- this is important for error # calculation with frequency dependent properties # prev = np.max(self.previous) # value = np.max(self.value) try : err = 100. * np . max ( ( self . previous - self . value ) / self . value ) except ZeroDivisionError : err = np . inf else : err = 0 return err
The relative error in percent between the two iterations .
103
10
3,830
def duplicate ( cls , other ) : return cls ( other . soil_type , other . thickness , other . shear_vel )
Create a copy of the layer .
30
7
3,831
def damping ( self ) : try : value = self . _damping . value except AttributeError : value = self . _damping return value
Strain - compatible damping .
32
7
3,832
def auto_discretize ( self , max_freq = 50. , wave_frac = 0.2 ) : layers = [ ] for l in self : if l . soil_type . is_nonlinear : opt_thickness = l . shear_vel / max_freq * wave_frac count = np . ceil ( l . thickness / opt_thickness ) . astype ( int ) thickness = l . thickness / count for _ in range ( count ) : layers . append ( Layer ( l . soil_type , thickness , l . shear_vel ) ) else : layers . append ( l ) return Profile ( layers , wt_depth = self . wt_depth )
Subdivide the layers to capture strain variation .
153
10
3,833
def location ( self , wave_field , depth = None , index = None ) : if not isinstance ( wave_field , WaveField ) : wave_field = WaveField [ wave_field ] if index is None and depth is not None : for i , layer in enumerate ( self [ : - 1 ] ) : if layer . depth <= depth < layer . depth_base : depth_within = depth - layer . depth break else : # Bedrock i = len ( self ) - 1 layer = self [ - 1 ] depth_within = 0 elif index is not None and depth is None : layer = self [ index ] i = self . index ( layer ) depth_within = 0 else : raise NotImplementedError return Location ( i , layer , wave_field , depth_within )
Create a Location for a specific depth .
169
8
3,834
def time_average_vel ( self , depth ) : depths = [ l . depth for l in self ] # Final layer is infinite and is treated separately travel_times = [ 0 ] + [ l . travel_time for l in self [ : - 1 ] ] # If needed, add the final layer to the required depth if depths [ - 1 ] < depth : depths . append ( depth ) travel_times . append ( ( depth - self [ - 1 ] . depth ) / self [ - 1 ] . shear_vel ) total_travel_times = np . cumsum ( travel_times ) # Interpolate the travel time to the depth of interest avg_shear_vel = depth / np . interp ( depth , depths , total_travel_times ) return avg_shear_vel
Calculate the time - average velocity .
171
9
3,835
def simplified_rayliegh_vel ( self ) : # FIXME: What if last layer has no thickness? thicks = np . array ( [ l . thickness for l in self ] ) depths_mid = np . array ( [ l . depth_mid for l in self ] ) shear_vels = np . array ( [ l . shear_vel for l in self ] ) mode_incr = depths_mid * thicks / shear_vels ** 2 # Mode shape is computed as the sumation from the base of # the profile. Need to append a 0 for the roll performed in the next # step shape = np . r_ [ np . cumsum ( mode_incr [ : : - 1 ] ) [ : : - 1 ] , 0 ] freq_fund = np . sqrt ( 4 * np . sum ( thicks * depths_mid ** 2 / shear_vels ** 2 ) / np . sum ( thicks * # Roll is used to offset the mode_shape so that the sum # can be calculated for two adjacent layers np . sum ( np . c_ [ shape , np . roll ( shape , - 1 ) ] , axis = 1 ) [ : - 1 ] ** 2 ) ) period_fun = 2 * np . pi / freq_fund rayleigh_vel = 4 * thicks . sum ( ) / period_fun return rayleigh_vel
Simplified Rayliegh velocity of the site .
301
11
3,836
def iter_thickness ( self , depth_total ) : total = 0 depth_prev = 0 while depth_prev < depth_total : # Add a random exponential increment total += np . random . exponential ( 1.0 ) # Convert between x and depth using the inverse of \Lambda(t) depth = np . power ( ( self . c_2 * total ) / self . c_3 + total / self . c_3 + np . power ( self . c_1 , self . c_2 + 1 ) , 1 / ( self . c_2 + 1 ) ) - self . c_1 thickness = depth - depth_prev if depth > depth_total : thickness = ( depth_total - depth_prev ) depth = depth_prev + thickness depth_mid = ( depth_prev + depth ) / 2 yield thickness , depth_mid depth_prev = depth
Iterate over the varied thicknesses .
189
8
3,837
def _calc_covar_matrix ( self , profile ) : corr = self . _calc_corr ( profile ) std = self . _calc_ln_std ( profile ) # Modify the standard deviation by the truncated norm scale std *= randnorm . scale var = std ** 2 covar = corr * std [ : - 1 ] * std [ 1 : ] # Main diagonal is the variance mat = diags ( [ covar , var , covar ] , [ - 1 , 0 , 1 ] ) . toarray ( ) return mat
Calculate the covariance matrix .
124
8
3,838
def _calc_corr ( self , profile ) : depth = np . array ( [ l . depth_mid for l in profile [ : - 1 ] ] ) thick = np . diff ( depth ) depth = depth [ 1 : ] # Depth dependent correlation corr_depth = ( self . rho_200 * np . power ( ( depth + self . rho_0 ) / ( 200 + self . rho_0 ) , self . b ) ) corr_depth [ depth > 200 ] = self . rho_200 # Thickness dependent correlation corr_thick = self . rho_0 * np . exp ( - thick / self . delta ) # Final correlation # Correlation coefficient corr = ( 1 - corr_depth ) * corr_thick + corr_depth # Bedrock is perfectly correlated with layer above it corr = np . r_ [ corr , 1 ] return corr
Compute the adjacent - layer correlations
199
7
3,839
def generic_model ( cls , site_class , * * kwds ) : p = dict ( cls . PARAMS [ site_class ] ) p . update ( kwds ) return cls ( * * p )
Use generic model parameters based on site class .
50
9
3,840
def calc_std_damping ( damping ) : damping = np . asarray ( damping ) . astype ( float ) std = ( np . exp ( - 5 ) + np . exp ( - 0.25 ) * np . sqrt ( 100 * damping ) ) / 100. return std
Calculate the standard deviation as a function of damping in decimal .
66
15
3,841
def _append_to ( self , field , element ) : if element not in EMPTIES : self . obj . setdefault ( field , [ ] ) self . obj . get ( field ) . append ( element )
Append the element to the field of the record .
46
11
3,842
def add_name_variant ( self , name ) : self . _ensure_field ( 'name' , { } ) self . obj [ 'name' ] . setdefault ( 'name_variants' , [ ] ) . append ( name )
Add name variant .
55
4
3,843
def add_native_name ( self , name ) : self . _ensure_field ( 'name' , { } ) self . obj [ 'name' ] . setdefault ( 'native_names' , [ ] ) . append ( name )
Add native name .
53
4
3,844
def add_previous_name ( self , name ) : self . _ensure_field ( 'name' , { } ) self . obj [ 'name' ] . setdefault ( 'previous_names' , [ ] ) . append ( name )
Add previous name .
55
4
3,845
def add_email_address ( self , email , hidden = None ) : existing_emails = get_value ( self . obj , 'email_addresses' , [ ] ) found_email = next ( ( existing_email for existing_email in existing_emails if existing_email . get ( 'value' ) == email ) , None ) if found_email is None : new_email = { 'value' : email } if hidden is not None : new_email [ 'hidden' ] = hidden self . _append_to ( 'email_addresses' , new_email ) elif hidden is not None : found_email [ 'hidden' ] = hidden
Add email address .
144
4
3,846
def add_url ( self , url , description = None ) : url = { 'value' : url , } if description : url [ 'description' ] = description self . _append_to ( 'urls' , url )
Add a personal website .
49
5
3,847
def add_project ( self , name , record = None , start_date = None , end_date = None , curated = False , current = False ) : new_experiment = { } new_experiment [ 'name' ] = name if start_date : new_experiment [ 'start_date' ] = normalize_date ( start_date ) if end_date : new_experiment [ 'end_date' ] = normalize_date ( end_date ) if record : new_experiment [ 'record' ] = record new_experiment [ 'curated_relation' ] = curated new_experiment [ 'current' ] = current self . _append_to ( 'project_membership' , new_experiment ) self . obj [ 'project_membership' ] . sort ( key = self . _get_work_priority_tuple , reverse = True )
Add an experiment that the person worked on .
194
9
3,848
def add_advisor ( self , name , ids = None , degree_type = None , record = None , curated = False ) : new_advisor = { } new_advisor [ 'name' ] = normalize_name ( name ) if ids : new_advisor [ 'ids' ] = force_list ( ids ) if degree_type : new_advisor [ 'degree_type' ] = degree_type if record : new_advisor [ 'record' ] = record new_advisor [ 'curated_relation' ] = curated self . _append_to ( 'advisors' , new_advisor )
Add an advisor .
140
4
3,849
def add_private_note ( self , note , source = None ) : note = { 'value' : note , } if source : note [ 'source' ] = source self . _append_to ( '_private_notes' , note )
Add a private note .
53
5
3,850
def _compute_value ( power , wg ) : if power not in wg : p1 , p2 = power # y power if p1 == 0 : yy = wg [ ( 0 , - 1 ) ] wg [ power ] = numpy . power ( yy , p2 / 2 ) . sum ( ) / len ( yy ) # x power else : xx = wg [ ( - 1 , 0 ) ] wg [ power ] = numpy . power ( xx , p1 / 2 ) . sum ( ) / len ( xx ) return wg [ power ]
Return the weight corresponding to single power .
128
8
3,851
def _compute_weight ( powers , wg ) : # split pow1 = ( powers [ 0 ] , 0 ) pow2 = ( 0 , powers [ 1 ] ) cal1 = _compute_value ( pow1 , wg ) cal2 = _compute_value ( pow2 , wg ) return cal1 * cal2
Return the weight corresponding to given powers .
73
8
3,852
def imsurfit ( data , order , output_fit = False ) : # we create a grid with the same number of points # between -1 and 1 c0 = complex ( 0 , data . shape [ 0 ] ) c1 = complex ( 0 , data . shape [ 1 ] ) xx , yy = numpy . ogrid [ - 1 : 1 : c0 , - 1 : 1 : c1 ] ncoeff = ( order + 1 ) * ( order + 2 ) // 2 powerlist = list ( _powers ( order ) ) # Array with ncoff x,y moments of the data bb = numpy . zeros ( ncoeff ) # Moments for idx , powers in enumerate ( powerlist ) : p1 , p2 = powers bb [ idx ] = ( data * xx ** p1 * yy ** p2 ) . sum ( ) / data . size # Now computing aa matrix # it contains \sum x^a y^b # most of the terms are zero # only those with a and b even remain # aa is symmetric x = xx [ : , 0 ] y = yy [ 0 ] # weights are stored so we compute them only once wg = { ( 0 , 0 ) : 1 , ( - 1 , 0 ) : x ** 2 , ( 0 , - 1 ) : y ** 2 } # wg[(2,0)] = wg[(-1,0)].sum() / len(x) # wg[(0,2)] = wg[(0,-1)].sum() / len(y) aa = numpy . zeros ( ( ncoeff , ncoeff ) ) for j , ci in enumerate ( powerlist ) : for i , ri in enumerate ( powerlist [ j : ] ) : p1 = ci [ 0 ] + ri [ 0 ] p2 = ci [ 1 ] + ri [ 1 ] if p1 % 2 == 0 and p2 % 2 == 0 : # val = (x ** p1).sum() / len(x) * (y ** p2).sum() / len(y) val = _compute_weight ( ( p1 , p2 ) , wg ) aa [ j , i + j ] = val # Making symmetric the array aa += numpy . triu ( aa , k = 1 ) . T polycoeff = numpy . linalg . solve ( aa , bb ) if output_fit : index = 0 result = 0 for o in range ( order + 1 ) : for b in range ( o + 1 ) : a = o - b result += polycoeff [ index ] * ( xx ** a ) * ( yy ** b ) index += 1 return polycoeff , result return ( polycoeff , )
Fit a bidimensional polynomial to an image .
609
11
3,853
def _yql_query ( yql ) : url = _YAHOO_BASE_URL . format ( urlencode ( { 'q' : yql } ) ) # send request _LOGGER . debug ( "Send request to url: %s" , url ) try : request = urlopen ( url ) rawData = request . read ( ) # parse jason data = json . loads ( rawData . decode ( "utf-8" ) ) _LOGGER . debug ( "Query data from yahoo: %s" , str ( data ) ) return data . get ( "query" , { } ) . get ( "results" , { } ) except ( urllib . error . HTTPError , urllib . error . URLError ) : _LOGGER . info ( "Can't fetch data from Yahoo!" ) return None
Fetch data from Yahoo! Return a dict if successfull or None .
183
15
3,854
def get_woeid ( lat , lon ) : yql = _YQL_WOEID . format ( lat , lon ) # send request tmpData = _yql_query ( yql ) if tmpData is None : _LOGGER . error ( "No woid is received!" ) return None # found woid? return tmpData . get ( "place" , { } ) . get ( "woeid" , None )
Ask Yahoo! who is the woeid from GPS position .
97
13
3,855
def updateWeather ( self ) : yql = _YQL_WEATHER . format ( self . _woeid , self . _unit ) # send request tmpData = _yql_query ( yql ) # data exists if tmpData is not None and "channel" in tmpData : self . _data = tmpData [ "channel" ] return True _LOGGER . error ( "Fetch no weather data Yahoo!" ) self . _data = { } return False
Fetch weather data from Yahoo! True if success .
101
11
3,856
def get_energies ( atoms_list ) : if len ( atoms_list ) == 1 : return atoms_list [ 0 ] . get_potential_energy ( ) elif len ( atoms_list ) > 1 : energies = [ ] for atoms in atoms_list : energies . append ( atoms . get_potential_energy ( ) ) return energies
Potential energy for a list of atoms objects
77
9
3,857
def check_in_ase ( atoms , ase_db , energy = None ) : db_ase = ase . db . connect ( ase_db ) if energy is None : energy = atoms . get_potential_energy ( ) formula = get_chemical_formula ( atoms ) rows = db_ase . select ( energy = energy ) n = 0 ids = [ ] for row in rows : if formula == row . formula : n += 1 ids . append ( row . id ) if n > 0 : id = ids [ 0 ] unique_id = db_ase . get ( id ) [ 'unique_id' ] return id , unique_id else : return None , None
Check if entry is allready in ASE db
150
10
3,858
def task ( name , deps = None , fn = None ) : if callable ( deps ) : fn = deps deps = None if not deps and not fn : logger . log ( logger . red ( "The task '%s' is empty" % name ) ) else : tasks [ name ] = [ fn , deps ]
Define a new task .
74
6
3,859
def sum_of_gaussian_factory ( N ) : name = "SumNGauss%d" % N attr = { } # parameters for i in range ( N ) : key = "amplitude_%d" % i attr [ key ] = Parameter ( key ) key = "center_%d" % i attr [ key ] = Parameter ( key ) key = "stddev_%d" % i attr [ key ] = Parameter ( key ) attr [ 'background' ] = Parameter ( 'background' , default = 0.0 ) def fit_eval ( self , x , * args ) : result = x * 0 + args [ - 1 ] for i in range ( N ) : result += args [ 3 * i ] * np . exp ( - 0.5 * ( x - args [ 3 * i + 1 ] ) ** 2 / args [ 3 * i + 2 ] ** 2 ) return result attr [ 'evaluate' ] = fit_eval def deriv ( self , x , * args ) : d_result = np . ones ( ( ( 3 * N + 1 ) , len ( x ) ) ) for i in range ( N ) : d_result [ 3 * i ] = ( np . exp ( - 0.5 / args [ 3 * i + 2 ] ** 2 * ( x - args [ 3 * i + 1 ] ) ** 2 ) ) d_result [ 3 * i + 1 ] = args [ 3 * i ] * d_result [ 3 * i ] * ( x - args [ 3 * i + 1 ] ) / args [ 3 * i + 2 ] ** 2 d_result [ 3 * i + 2 ] = args [ 3 * i ] * d_result [ 3 * i ] * ( x - args [ 3 * i + 1 ] ) ** 2 / args [ 3 * i + 2 ] ** 3 return d_result attr [ 'fit_deriv' ] = deriv klass = type ( name , ( Fittable1DModel , ) , attr ) return klass
Return a model of the sum of N Gaussians and a constant background .
447
16
3,860
def Debounce ( threshold = 100 ) : threshold *= 1000 max_tick = 0xFFFFFFFF class _decorated ( object ) : def __init__ ( self , pigpio_cb ) : self . _fn = pigpio_cb self . last = 0 self . is_method = False def __call__ ( self , * args , * * kwargs ) : if self . is_method : tick = args [ 3 ] else : tick = args [ 2 ] if self . last > tick : delay = max_tick - self . last + tick else : delay = tick - self . last if delay > threshold : self . _fn ( * args , * * kwargs ) print ( 'call passed by debouncer {} {} {}' . format ( tick , self . last , threshold ) ) self . last = tick else : print ( 'call filtered out by debouncer {} {} {}' . format ( tick , self . last , threshold ) ) def __get__ ( self , instance , type = None ) : # with is called when an instance of `_decorated` is used as a class # attribute, which is the case when decorating a method in a class self . is_method = True return functools . partial ( self , instance ) return _decorated
Simple debouncing decorator for apigpio callbacks .
279
13
3,861
def verify_refresh_request ( request ) : jwtauth_settings = request . app . settings . jwtauth . __dict__ . copy ( ) identity_policy = JWTIdentityPolicy ( * * jwtauth_settings ) return identity_policy . verify_refresh ( request )
Wrapper around JWTIdentityPolicy . verify_refresh which verify if the request to refresh the token is valid . If valid it returns the userid which can be used to create to create an updated identity with remember_identity . Otherwise it raises an exception based on InvalidTokenError .
67
60
3,862
def detrend ( arr , x = None , deg = 5 , tol = 1e-3 , maxloop = 10 ) : xx = numpy . arange ( len ( arr ) ) if x is None else x base = arr . copy ( ) trend = base pol = numpy . ones ( ( deg + 1 , ) ) for _ in range ( maxloop ) : pol_new = numpy . polyfit ( xx , base , deg ) pol_norm = numpy . linalg . norm ( pol ) diff_pol_norm = numpy . linalg . norm ( pol - pol_new ) if diff_pol_norm / pol_norm < tol : break pol = pol_new trend = numpy . polyval ( pol , xx ) base = numpy . minimum ( base , trend ) return trend
Compute a baseline trend of a signal
177
8
3,863
def calc_osc_accels ( self , osc_freqs , osc_damping = 0.05 , tf = None ) : if tf is None : tf = np . ones_like ( self . freqs ) else : tf = np . asarray ( tf ) . astype ( complex ) resp = np . array ( [ self . calc_peak ( tf * self . _calc_sdof_tf ( of , osc_damping ) ) for of in osc_freqs ] ) return resp
Compute the pseudo - acceleration spectral response of an oscillator with a specific frequency and damping .
114
20
3,864
def _calc_fourier_spectrum ( self , fa_length = None ) : if fa_length is None : # Use the next power of 2 for the length n = 1 while n < self . accels . size : n <<= 1 else : n = fa_length self . _fourier_amps = np . fft . rfft ( self . _accels , n ) freq_step = 1. / ( 2 * self . _time_step * ( n / 2 ) ) self . _freqs = freq_step * np . arange ( 1 + n / 2 )
Compute the Fourier Amplitude Spectrum of the time series .
136
13
3,865
def _calc_sdof_tf ( self , osc_freq , damping = 0.05 ) : return ( - osc_freq ** 2. / ( np . square ( self . freqs ) - np . square ( osc_freq ) - 2.j * damping * osc_freq * self . freqs ) )
Compute the transfer function for a single - degree - of - freedom oscillator .
79
17
3,866
def load_at2_file ( cls , filename ) : with open ( filename ) as fp : next ( fp ) description = next ( fp ) . strip ( ) next ( fp ) parts = next ( fp ) . split ( ) time_step = float ( parts [ 1 ] ) accels = [ float ( p ) for l in fp for p in l . split ( ) ] return cls ( filename , description , time_step , accels )
Read an AT2 formatted time series .
105
8
3,867
def load_smc_file ( cls , filename ) : from . tools import parse_fixed_width with open ( filename ) as fp : lines = list ( fp ) # 11 lines of strings lines_str = [ lines . pop ( 0 ) for _ in range ( 11 ) ] if lines_str [ 0 ] . strip ( ) != '2 CORRECTED ACCELEROGRAM' : raise RuntimeWarning ( "Loading uncorrected SMC file." ) m = re . search ( 'station =(.+)component=(.+)' , lines_str [ 5 ] ) description = '; ' . join ( [ g . strip ( ) for g in m . groups ( ) ] ) # 6 lines of (8i10) formatted integers values_int = parse_fixed_width ( 48 * [ ( 10 , int ) ] , [ lines . pop ( 0 ) for _ in range ( 6 ) ] ) count_comment = values_int [ 15 ] count = values_int [ 16 ] # 10 lines of (5e15.7) formatted floats values_float = parse_fixed_width ( 50 * [ ( 15 , float ) ] , [ lines . pop ( 0 ) for _ in range ( 10 ) ] ) time_step = 1 / values_float [ 1 ] # Skip comments lines = lines [ count_comment : ] accels = np . array ( parse_fixed_width ( count * [ ( 10 , float ) , ] , lines ) ) return TimeSeriesMotion ( filename , description , time_step , accels )
Read an SMC formatted time series .
333
8
3,868
def get_info ( self ) : reconstructed = self . is_reconstructed ( ) site , site_type = self . get_site ( ) return reconstructed , site , site_type
Return surface reconstruction as well as primary and secondary adsorption site labels
40
14
3,869
def check_dissociated ( self , cutoff = 1.2 ) : dissociated = False if not len ( self . B ) > self . nslab + 1 : # only one adsorbate return dissociated adsatoms = [ atom for atom in self . B [ self . nslab : ] ] ads0 , ads1 = set ( atom . symbol for atom in adsatoms ) bond_dist = get_ads_dist ( self . B , ads0 , ads1 ) Cradii = [ cradii [ atom . number ] for atom in [ ase . Atom ( ads0 ) , ase . Atom ( ads1 ) ] ] bond_dist0 = sum ( Cradii ) if bond_dist > cutoff * bond_dist0 : print ( 'DISSOCIATED: {} Ang > 1.2 * {} Ang' . format ( bond_dist , bond_dist0 ) ) dissociated = True return dissociated
Check if adsorbate dissociates
203
8
3,870
def is_reconstructed ( self , xy_cutoff = 0.3 , z_cutoff = 0.4 ) : assert self . A , 'Initial slab geometry needed to classify reconstruction' # remove adsorbate A = self . A [ : - 1 ] . copy ( ) B = self . B [ : - 1 ] . copy ( ) # Order wrt x-positions x_indices = np . argsort ( A . positions [ : , 0 ] ) A = A [ x_indices ] B = B [ x_indices ] a = A . positions b = B . positions allowed_z_movement = z_cutoff * cradii [ A . get_atomic_numbers ( ) ] allowed_xy_movement = xy_cutoff * np . mean ( cradii [ A . get_atomic_numbers ( ) ] ) D , D_len = get_distances ( p1 = a , p2 = b , cell = A . cell , pbc = True ) d_xy = np . linalg . norm ( np . diagonal ( D ) [ : 2 ] , axis = 0 ) d_z = np . diagonal ( D ) [ 2 : ] [ 0 ] cond1 = np . all ( d_xy < allowed_xy_movement ) cond2 = np . all ( [ d_z [ i ] < allowed_z_movement [ i ] for i in range ( len ( a ) ) ] ) if cond1 and cond2 : # not reconstructed return False else : return True
Compare initial and final slab configuration to determine if slab reconstructs during relaxation
337
14
3,871
def get_under_bridge ( self ) : C0 = self . B [ - 1 : ] * ( 3 , 3 , 1 ) ads_pos = C0 . positions [ 4 ] C = self . get_subsurface_layer ( ) * ( 3 , 3 , 1 ) dis = self . B . cell [ 0 ] [ 0 ] * 2 ret = None for ele in C : new_dis = np . linalg . norm ( ads_pos - ele . position ) if new_dis < dis : dis = new_dis ret = ele . symbol return ret
Return element closest to the adsorbate in the subsurface layer
122
14
3,872
def get_under_hollow ( self ) : C0 = self . B [ - 1 : ] * ( 3 , 3 , 1 ) ads_pos = C0 . positions [ 4 ] C = self . get_subsurface_layer ( ) * ( 3 , 3 , 1 ) ret = 'FCC' if np . any ( [ np . linalg . norm ( ads_pos [ : 2 ] - ele . position [ : 2 ] ) < 0.5 * cradii [ ele . number ] for ele in C ] ) : ret = 'HCP' return ret
Return HCP if an atom is present below the adsorbate in the subsurface layer and FCC if not
125
23
3,873
def fmap ( order , aij , bij , x , y ) : u = np . zeros_like ( x ) v = np . zeros_like ( y ) k = 0 for i in range ( order + 1 ) : for j in range ( i + 1 ) : u += aij [ k ] * ( x ** ( i - j ) ) * ( y ** j ) v += bij [ k ] * ( x ** ( i - j ) ) * ( y ** j ) k += 1 return u , v
Evaluate the 2D polynomial transformation .
115
11
3,874
def ncoef_fmap ( order ) : ncoef = 0 for i in range ( order + 1 ) : for j in range ( i + 1 ) : ncoef += 1 return ncoef
Expected number of coefficients in a 2D transformation of a given order .
45
15
3,875
def order_fmap ( ncoef ) : loop = True order = 1 while loop : loop = not ( ncoef == ncoef_fmap ( order ) ) if loop : order += 1 if order > NMAX_ORDER : print ( 'No. of coefficients: ' , ncoef ) raise ValueError ( "order > " + str ( NMAX_ORDER ) + " not implemented" ) return order
Compute order corresponding to a given number of coefficients .
92
11
3,876
def get_template_name ( self ) : if self . template_name is None : name = camelcase2list ( self . __class__ . __name__ ) name = '{}/{}.html' . format ( name . pop ( 0 ) , '_' . join ( name ) ) self . template_name = name . lower ( ) return self . template_name
Returns the name of the template .
82
7
3,877
def append_file ( self , file ) : self . files . append ( file ) if self . transformer : future = asyncio . ensure_future ( self . transformer . transform ( file ) ) future . add_done_callback ( self . handle_transform )
Append a new file in the stream .
55
9
3,878
def flush_if_ended ( self ) : if self . ended and self . next and len ( self . files ) == self . transformed : future = asyncio . ensure_future ( self . transformer . flush ( ) ) future . add_done_callback ( lambda x : self . next . end_of_stream ( ) )
Call flush function if all files have been transformed .
70
10
3,879
def handle_transform ( self , task ) : self . transformed += 1 file = task . result ( ) if file : self . next . append_file ( file ) self . flush_if_ended ( )
Handle a transform callback .
44
5
3,880
def pipe ( self , transformer ) : if self . next : return stream = Stream ( ) self . next = stream stream . prev = self self . transformer = transformer transformer . stream = self transformer . piped ( ) for file in self . files : future = asyncio . ensure_future ( self . transformer . transform ( file ) ) future . add_done_callback ( self . handle_transform ) self . onpiped . set_result ( None ) self . flush_if_ended ( ) return stream
Pipe this stream to another .
107
7
3,881
def pipes ( stream , * transformers ) : for transformer in transformers : stream = stream . pipe ( transformer ) return stream
Pipe several transformers end to end .
26
9
3,882
def convert ( self , val ) : pre = super ( Parameter , self ) . convert ( val ) if self . custom_validator is not None : post = self . custom_validator ( pre ) else : post = pre return post
Convert input values to type values .
51
8
3,883
def validate ( self , val ) : if self . validation : self . type . validate ( val ) if self . custom_validator is not None : self . custom_validator ( val ) return True
Validate values according to the requirement
43
7
3,884
def route ( obj , rule , * args , * * kwargs ) : def decorator ( cls ) : endpoint = kwargs . get ( 'endpoint' , camel_to_snake ( cls . __name__ ) ) kwargs [ 'view_func' ] = cls . as_view ( endpoint ) obj . add_url_rule ( rule , * args , * * kwargs ) return cls return decorator
Decorator for the View classes .
98
8
3,885
def fit ( self , target_type , target , adjust_thickness = False , adjust_site_atten = False , adjust_source_vel = False ) : density = self . profile . density nl = len ( density ) # Slowness bounds slowness = self . profile . slowness thickness = self . profile . thickness site_atten = self . _site_atten # Slowness initial = slowness bounds = 1 / np . tile ( ( 4000 , 100 ) , ( nl , 1 ) ) if not adjust_source_vel : bounds [ - 1 ] = ( initial [ - 1 ] , initial [ - 1 ] ) # Thickness bounds if adjust_thickness : bounds = np . r_ [ bounds , [ [ t / 2 , 2 * t ] for t in thickness ] ] initial = np . r_ [ initial , thickness ] # Site attenuation bounds if adjust_site_atten : bounds = np . r_ [ bounds , [ [ 0.0001 , 0.200 ] ] ] initial = np . r_ [ initial , self . site_atten ] def calc_rmse ( this , that ) : return np . mean ( ( ( this - that ) / that ) ** 2 ) def err ( x ) : _slowness = x [ 0 : nl ] if adjust_thickness : _thickness = x [ nl : ( 2 * nl ) ] else : _thickness = thickness if adjust_site_atten : self . _site_atten = x [ - 1 ] crustal_amp , site_term = self . _calc_amp ( density , _thickness , _slowness ) calc = crustal_amp if target_type == 'crustal_amp' else site_term err = 10 * calc_rmse ( target , calc ) # Prefer the original values so add the difference to the error err += calc_rmse ( slowness , _slowness ) if adjust_thickness : err += calc_rmse ( thickness , _thickness ) if adjust_site_atten : err += calc_rmse ( self . _site_atten , site_atten ) return err res = minimize ( err , initial , method = 'L-BFGS-B' , bounds = bounds ) slowness = res . x [ 0 : nl ] if adjust_thickness : thickness = res . x [ nl : ( 2 * nl ) ] profile = Profile ( [ Layer ( l . soil_type , t , 1 / s ) for l , t , s in zip ( self . profile , thickness , slowness ) ] , self . profile . wt_depth ) # Update the calculated amplificaiton return ( self . motion , profile , self . loc_input )
Fit to a target crustal amplification or site term .
603
11
3,886
def wave_at_location ( self , l ) : cterm = 1j * self . _wave_nums [ l . index ] * l . depth_within if l . wave_field == WaveField . within : return ( self . _waves_a [ l . index ] * np . exp ( cterm ) + self . _waves_b [ l . index ] * np . exp ( - cterm ) ) elif l . wave_field == WaveField . outcrop : return 2 * self . _waves_a [ l . index ] * np . exp ( cterm ) elif l . wave_field == WaveField . incoming_only : return self . _waves_a [ l . index ] * np . exp ( cterm ) else : raise NotImplementedError
Compute the wave field at specific location .
170
9
3,887
def calc_accel_tf ( self , lin , lout ) : tf = self . wave_at_location ( lout ) / self . wave_at_location ( lin ) return tf
Compute the acceleration transfer function .
42
7
3,888
def calc_stress_tf ( self , lin , lout , damped ) : tf = self . calc_strain_tf ( lin , lout ) if damped : # Scale by complex shear modulus to include the influence of # damping tf *= lout . layer . comp_shear_mod else : tf *= lout . layer . shear_mod return tf
Compute the stress transfer function .
84
7
3,889
def calc_strain_tf ( self , lin , lout ) : # FIXME: Correct discussion for using acceleration FAS # Strain(angFreq, z=h_m/2) # ------------------------ = # accel_n(angFreq) # # i k*_m [ A_m exp(i k*_m h_m / 2) - B_m exp(-i k*_m h_m / 2)] # ------------------------------------------------------------ # -angFreq^2 (2 * A_n) # assert lout . wave_field == WaveField . within ang_freqs = self . motion . angular_freqs # The numerator cannot be computed using wave_at_location() because # it is A - B. cterm = 1j * self . _wave_nums [ lout . index , : ] * lout . depth_within numer = ( 1j * self . _wave_nums [ lout . index , : ] * ( self . _waves_a [ lout . index , : ] * np . exp ( cterm ) - self . _waves_b [ lout . index , : ] * np . exp ( - cterm ) ) ) denom = - ang_freqs ** 2 * self . wave_at_location ( lin ) # Only compute transfer function for non-zero frequencies mask = ~ np . isclose ( ang_freqs , 0 ) tf = np . zeros_like ( mask , dtype = np . complex ) # Scale into units from gravity tf [ mask ] = GRAVITY * numer [ mask ] / denom [ mask ] return tf
Compute the strain transfer function from lout to location_in .
355
14
3,890
def _estimate_strains ( self ) : # Estimate the strain based on the PGV and shear-wave velocity for l in self . _profile : l . reset ( ) l . strain = self . _motion . pgv / l . initial_shear_vel
Compute an estimate of the strains .
60
8
3,891
def _calc_strain ( self , loc_input , loc_layer , motion , * args ) : strain_max = self . _calc_strain_max ( loc_input , loc_layer , motion , * args ) return self . strain_ratio * strain_max
Compute the strain used for iterations of material properties .
63
11
3,892
def _calc_strain_max ( self , loc_input , loc_layer , motion , * args ) : return motion . calc_peak ( self . calc_strain_tf ( loc_input , loc_layer ) )
Compute the effective strain at the center of a layer .
51
12
3,893
def _estimate_strains ( self ) : eql = EquivalentLinearCalculator ( ) eql ( self . _motion , self . _profile , self . _loc_input )
Estimate the strains by running an EQL site response .
43
12
3,894
def timeit ( method ) : import datetime @ functools . wraps ( method ) def timed_method ( self , rinput ) : time_start = datetime . datetime . utcnow ( ) result = method ( self , rinput ) time_end = datetime . datetime . utcnow ( ) result . time_it ( time_start , time_end ) self . logger . info ( 'total time measured' ) return result return timed_method
Decorator to measure the time used by the recipe
101
11
3,895
def save_intermediate_img ( self , img , name ) : if self . intermediate_results : img . writeto ( name , overwrite = True )
Save intermediate FITS objects .
33
6
3,896
def save_intermediate_array ( self , array , name ) : if self . intermediate_results : fits . writeto ( name , array , overwrite = True )
Save intermediate array object as FITS .
35
8
3,897
def build_recipe_input ( self , ob , dal ) : result = { } # We have to decide if the ob input # is a plain description (ObservingBlock) # or if it contains the nested results (Obsres) # # it has to contain the tags corresponding to the observing modes... ob_query_skip = False ob_query_field = 'obresult' if isinstance ( ob , ObservingBlock ) : import numina . types . obsresult as obtype # We have to build an Obsres for key , req in self . requirements ( ) . items ( ) : if isinstance ( req . type , obtype . ObservationResultType ) : ob_query_field = key ob_query_skip = True query_option = self . query_options . get ( key ) # print('req for ob is named', key, query_option) new_or = ObservationResult ( ) new_or . __dict__ = ob . __dict__ obsres = req . query ( dal , new_or , options = query_option ) tagger = self . mode . tagger if tagger is not None : self . logger . debug ( 'Use mode tagger to fill tags in OB' ) obsres . tags = tagger ( obsres ) else : obsres . tags = None break else : # nothing to do obsres = ob else : obsres = ob # Get tags_names per REQ self . logger . debug ( 'getting query fields per REQ' ) qfields = set ( ) for key , req in self . requirements ( ) . items ( ) : tag_n = req . tag_names ( ) self . logger . debug ( "%s has these query fields %s" , key , tag_n ) qfields . update ( tag_n ) if obsres . tags is None : self . logger . debug ( 'running recipe tagger' ) self . logger . debug ( 'with query fields %s' , qfields ) if qfields : obsres . tags = self . obsres_extractor ( obsres , qfields ) else : obsres . tags = { } for key , req in self . requirements ( ) . items ( ) : try : query_option = self . query_options . get ( key ) if key == ob_query_field and ob_query_skip : result [ key ] = obsres else : result [ key ] = req . query ( dal , obsres , options = query_option ) except NoResultFound as notfound : req . on_query_not_found ( notfound ) return self . create_input ( * * result )
Build a RecipeInput object .
563
6
3,898
def subsets_of_fileinfo_from_txt ( filename ) : # check for input file if not os . path . isfile ( filename ) : raise ValueError ( "File " + filename + " not found!" ) # read input file with open ( filename ) as f : file_content = f . read ( ) . splitlines ( ) # obtain the different subsets of files dict_of_subsets_of_fileinfo = { } label = None sublist_of_fileinfo = [ ] idict = 0 ifiles = 0 nfiles = 0 sublist_finished = True for line in file_content : if len ( line ) > 0 : if line [ 0 ] != '#' : if label is None : if line [ 0 ] == "@" : nfiles = int ( line [ 1 : ] . split ( ) [ 0 ] ) label = line [ 1 : ] . split ( ) [ 1 ] sublist_of_fileinfo = [ ] ifiles = 0 sublist_finished = False else : raise ValueError ( "Expected @ symbol not found!" ) else : if line [ 0 ] == "@" : raise ValueError ( "Unexpected @ symbol found!" ) tmplist = line . split ( ) tmpfile = tmplist [ 0 ] if len ( tmplist ) > 1 : tmpinfo = tmplist [ 1 : ] else : tmpinfo = None if not os . path . isfile ( tmpfile ) : raise ValueError ( "File " + tmpfile + " not found!" ) sublist_of_fileinfo . append ( FileInfo ( tmpfile , tmpinfo ) ) ifiles += 1 if ifiles == nfiles : dict_of_subsets_of_fileinfo [ idict ] = { } tmpdict = dict_of_subsets_of_fileinfo [ idict ] tmpdict [ 'label' ] = label tmpdict [ 'list_of_fileinfo' ] = sublist_of_fileinfo idict += 1 label = None sublist_of_fileinfo = [ ] ifiles = 0 sublist_finished = True if not sublist_finished : raise ValueError ( "Unexpected end of sublist of files." ) return dict_of_subsets_of_fileinfo
Returns a dictionary with subsets of FileInfo instances from a TXT file .
490
16
3,899
def subarray_match ( shape , ref , sshape , sref = None ) : # Reference point in im ref1 = asarray ( ref , dtype = 'int' ) if sref is not None : ref2 = asarray ( sref , dtype = 'int' ) else : ref2 = zeros_like ( ref1 ) offset = ref1 - ref2 urc1 = minimum ( offset + asarray ( sshape ) - 1 , asarray ( shape ) - 1 ) blc1 = maximum ( offset , 0 ) urc2 = urc1 - offset blc2 = blc1 - offset def valid_slice ( b , u ) : if b >= u + 1 : return None else : return slice ( b , u + 1 ) f = tuple ( valid_slice ( b , u ) for b , u in zip ( blc1 , urc1 ) ) s = tuple ( valid_slice ( b , u ) for b , u in zip ( blc2 , urc2 ) ) if not all ( f ) or not all ( s ) : return ( None , None ) return ( f , s )
Compute the slice representation of intersection of two arrays .
246
11