idx
int64
0
24.9k
question
stringlengths
68
4.14k
target
stringlengths
9
749
400
def dir_name ( name ) name = name . dup name . gsub! ( ":" , VAGRANT_COLON ) if Util :: Platform . windows? name . gsub! ( "/" , VAGRANT_SLASH ) name end
Returns the directory name for the box of the given name .
401
def undir_name ( name ) name = name . dup name . gsub! ( VAGRANT_COLON , ":" ) name . gsub! ( VAGRANT_SLASH , "/" ) name end
Returns the directory name for the box cleaned up
402
def v1_upgrade ( dir ) @logger . debug ( "Upgrading box in directory: #{dir}" ) temp_dir = Pathname . new ( Dir . mktmpdir ( TEMP_PREFIX , @temp_root ) ) @logger . debug ( "Temporary directory for upgrading: #{temp_dir}" ) dir . children ( true ) . each do | child | next if child == temp_dir @logger . debug ( "Copying to upgrade directory: #{child}" ) FileUtils . mv ( child , temp_dir . join ( child . basename ) ) end metadata_file = temp_dir . join ( "metadata.json" ) if ! metadata_file . file? metadata_file . open ( "w" ) do | f | f . write ( JSON . generate ( { provider : "virtualbox" } ) ) end end temp_dir end
This upgrades the V1 box contained unpacked in the given directory and returns the directory of the upgraded version . This is _destructive_ to the contents of the old directory . That is the contents of the old V1 box will be destroyed or moved .
403
def with_temp_dir ( dir = nil ) dir ||= Dir . mktmpdir ( TEMP_PREFIX , @temp_root ) dir = Pathname . new ( dir ) yield dir ensure FileUtils . rm_rf ( dir . to_s ) end
This is a helper that makes sure that our temporary directories are cleaned up no matter what .
404
def initialize_capabilities! ( host , hosts , capabilities , * args ) @cap_logger = Log4r :: Logger . new ( "vagrant::capability_host::#{self.class.to_s.downcase}" ) if host && ! hosts [ host ] raise Errors :: CapabilityHostExplicitNotDetected , value : host . to_s end if ! host host = autodetect_capability_host ( hosts , * args ) if ! host raise Errors :: CapabilityHostNotDetected if ! host end if ! hosts [ host ] raise "Internal error. Host not found: #{host}" end name = host host_info = hosts [ name ] host = host_info [ 0 ] . new chain = [ ] chain << [ name , host ] if host_info [ 1 ] parent_name = host_info [ 1 ] parent_info = hosts [ parent_name ] while parent_info chain << [ parent_name , parent_info [ 0 ] . new ] parent_name = parent_info [ 1 ] parent_info = hosts [ parent_name ] end end @cap_host_chain = chain @cap_args = args @cap_caps = capabilities true end
Initializes the capability system by detecting the proper capability host to execute on and building the chain of capabilities to execute .
405
def capability ( cap_name , * args ) cap_mod = capability_module ( cap_name . to_sym ) if ! cap_mod raise Errors :: CapabilityNotFound , cap : cap_name . to_s , host : @cap_host_chain [ 0 ] [ 0 ] . to_s end cap_method = nil begin cap_method = cap_mod . method ( cap_name ) rescue NameError raise Errors :: CapabilityInvalid , cap : cap_name . to_s , host : @cap_host_chain [ 0 ] [ 0 ] . to_s end args = @cap_args + args @cap_logger . info ( "Execute capability: #{cap_name} #{args.inspect} (#{@cap_host_chain[0][0]})" ) cap_method . call ( * args ) end
Executes the capability with the given name optionally passing more arguments onwards to the capability . If the capability returns a value it will be returned .
406
def capability_module ( cap_name ) @cap_logger . debug ( "Searching for cap: #{cap_name}" ) @cap_host_chain . each do | host_name , host | @cap_logger . debug ( "Checking in: #{host_name}" ) caps = @cap_caps [ host_name ] if caps && caps . key? ( cap_name ) @cap_logger . debug ( "Found cap: #{cap_name} in #{host_name}" ) return caps [ cap_name ] end end nil end
Returns the registered module for a capability with the given name .
407
def in_use? ( index ) results = [ ] index . each do | entry | box_data = entry . extra_data [ "box" ] next if ! box_data if box_data [ "name" ] == self . name && box_data [ "provider" ] == self . provider . to_s && box_data [ "version" ] == self . version . to_s results << entry end end return nil if results . empty? results end
Checks if this box is in use according to the given machine index and returns the entries that appear to be using the box .
408
def load_metadata ( ** download_options ) tf = Tempfile . new ( "vagrant-load-metadata" ) tf . close url = @metadata_url if File . file? ( url ) || url !~ / /i url = File . expand_path ( url ) url = Util :: Platform . cygwin_windows_path ( url ) url = "file:#{url}" end opts = { headers : [ "Accept: application/json" ] } . merge ( download_options ) Util :: Downloader . new ( url , tf . path , ** opts ) . download! BoxMetadata . new ( File . open ( tf . path , "r" ) ) rescue Errors :: DownloaderError => e raise Errors :: BoxMetadataDownloadError , message : e . extra_data [ :message ] ensure tf . unlink if tf end
Loads the metadata URL and returns the latest metadata associated with this box .
409
def has_update? ( version = nil , download_options : { } ) if ! @metadata_url raise Errors :: BoxUpdateNoMetadata , name : @name end if download_options . delete ( :automatic_check ) && ! automatic_update_check_allowed? @logger . info ( "Skipping box update check" ) return end version += ", " if version version ||= "" version += "> #{@version}" md = self . load_metadata ( download_options ) newer = md . version ( version , provider : @provider ) return nil if ! newer [ md , newer , newer . provider ( @provider ) ] end
Checks if the box has an update and returns the metadata version and provider . If the box doesn t have an update that satisfies the constraints it will return nil .
410
def automatic_update_check_allowed? check_path = directory . join ( "box_update_check" ) if check_path . exist? last_check_span = Time . now . to_i - check_path . mtime . to_i if last_check_span < BOX_UPDATE_CHECK_INTERVAL @logger . info ( "box update check is under the interval threshold" ) return false end end FileUtils . touch ( check_path ) true end
Check if a box update check is allowed . Uses a file in the box data directory to track when the last auto update check was performed and returns true if the BOX_UPDATE_CHECK_INTERVAL has passed .
411
def repackage ( path ) @logger . debug ( "Repackaging box '#{@name}' to: #{path}" ) Util :: SafeChdir . safe_chdir ( @directory ) do files = Dir . glob ( File . join ( "." , "**" , "*" ) ) . select { | f | File . file? ( f ) } Util :: Subprocess . execute ( "bsdtar" , "-czf" , path . to_s , * files ) end @logger . info ( "Repackaged box '#{@name}' successfully: #{path}" ) true end
This repackages this box and outputs it to the given path .
412
def interpret ( line ) return nil if line . strip . start_with? ( "#" ) keyword , command = line . split ( "=" , 2 ) . collect ( & :strip ) if keyword . match ( / \s /i ) raise Errors :: AliasInvalidError , alias : line , message : "Alias keywords must not contain any whitespace." end [ keyword , command ] end
This interprets a raw line from the aliases file .
413
def register ( keyword , command ) @aliases . register ( keyword . to_sym ) do lambda do | args | if command . start_with? ( "!" ) return Util :: SafeExec . exec "#{command[1..-1]} #{args.join(" ")}" . strip end return CLI . new ( command . split . concat ( args ) , @env ) . execute end end end
This registers an alias .
414
def redirect ( uri , * args ) if env [ 'HTTP_VERSION' ] == 'HTTP/1.1' and env [ "REQUEST_METHOD" ] != 'GET' status 303 else status 302 end response [ 'Location' ] = uri ( uri , settings . absolute_redirects? , settings . prefixed_redirects? ) halt ( * args ) end
Halt processing and redirect to the URI provided .
415
def uri ( addr = nil , absolute = true , add_script_name = true ) return addr if addr =~ / \A \+ \. \- / uri = [ host = "" ] if absolute host << "http#{'s' if request.secure?}://" if request . forwarded? or request . port != ( request . secure? ? 443 : 80 ) host << request . host_with_port else host << request . host end end uri << request . script_name . to_s if add_script_name uri << ( addr ? addr : request . path_info ) . to_s File . join uri end
Generates the absolute URI for a given path in the app . Takes Rack routers and reverse proxies into account .
416
def content_type ( type = nil , params = { } ) return response [ 'Content-Type' ] unless type default = params . delete :default mime_type = mime_type ( type ) || default fail "Unknown media type: %p" % type if mime_type . nil? mime_type = mime_type . dup unless params . include? :charset or settings . add_charset . all? { | p | not p === mime_type } params [ :charset ] = params . delete ( 'charset' ) || settings . default_encoding end params . delete :charset if mime_type . include? 'charset' unless params . empty? mime_type << ( mime_type . include? ( ';' ) ? ', ' : ';' ) mime_type << params . map { | kv | kv . join ( '=' ) } . join ( ', ' ) end response [ 'Content-Type' ] = mime_type end
Set the Content - Type of the response body given a media type or file extension .
417
def stream ( keep_open = false ) scheduler = env [ 'async.callback' ] ? EventMachine : Stream current = @params . dup body Stream . new ( scheduler , keep_open ) { | out | with_params ( current ) { yield ( out ) } } end
Allows to start sending data to the client even though later parts of the response body have not yet been generated .
418
def etag_matches? ( list , new_resource = request . post? ) return ! new_resource if list == '*' list . to_s . split ( / \s \s / ) . include? response [ 'ETag' ] end
Helper method checking if a ETag value list includes the current ETag .
419
def render_ruby ( engine , template , options = { } , locals = { } , & block ) options , template = template , nil if template . is_a? ( Hash ) template = Proc . new { block } if template . nil? render engine , template , options , locals end
logic shared between builder and nokogiri
420
def filter! ( type , base = settings ) filter! type , base . superclass if base . superclass . respond_to? ( :filters ) base . filters [ type ] . each { | args | process_route ( * args ) } end
Run filters defined on the class and all superclasses .
421
def route! ( base = settings , pass_block = nil ) if routes = base . routes [ @request . request_method ] routes . each do | pattern , keys , conditions , block | pass_block = process_route ( pattern , keys , conditions ) do | * args | route_eval { block [ * args ] } end end end if base . superclass . respond_to? ( :routes ) return route! ( base . superclass , pass_block ) end route_eval ( & pass_block ) if pass_block route_missing end
Run routes defined on the class and all superclasses .
422
def indifferent_hash Hash . new { | hash , key | hash [ key . to_s ] if Symbol === key } end
Creates a Hash with indifferent access .
423
def _reduce_22 ( val , _values , result ) n = val [ 0 ] ; result = n . count ( '.' ) > 0 ? n . to_f : n . to_i result end
reduce 21 omitted
424
def dry_run begin Fluent :: Engine . dry_run_mode = true change_privilege init_engine run_configure rescue Fluent :: ConfigError => e $log . error "config error" , file : @config_path , error : e $log . debug_backtrace exit! ( 1 ) ensure Fluent :: Engine . dry_run_mode = false end end
Set Engine s dry_run_mode true to override all target_id of worker sections
425
def slice ( index , num ) ensure_unpacked! MultiEventStream . new ( @unpacked_times . slice ( index , num ) , @unpacked_records . slice ( index , num ) ) end
This method returns MultiEventStream because there are no reason to surve binary serialized by msgpack .
426
def read_ack_from_sock ( sock , unpacker ) begin raw_data = sock . instance_of? ( Fluent :: PluginHelper :: Socket :: WrappedSocket :: TLS ) ? sock . readpartial ( @read_length ) : sock . recv ( @read_length ) rescue Errno :: ECONNRESET , EOFError raw_data = "" end info = @sock_ack_waiting_mutex . synchronize { @sock_ack_waiting . find { | i | i . sock == sock } } if raw_data . empty? log . warn "destination node closed the connection. regard it as unavailable." , host : info . node . host , port : info . node . port info . node . disable! rollback_write ( info . chunk_id , update_retry : false ) return nil else unpacker . feed ( raw_data ) res = unpacker . read log . trace "getting response from destination" , host : info . node . host , port : info . node . port , chunk_id : dump_unique_id_hex ( info . chunk_id ) , response : res if res [ 'ack' ] != info . chunk_id_base64 log . warn "ack in response and chunk id in sent data are different" , chunk_id : dump_unique_id_hex ( info . chunk_id ) , ack : res [ 'ack' ] rollback_write ( info . chunk_id , update_retry : false ) return nil else log . trace "got a correct ack response" , chunk_id : dump_unique_id_hex ( info . chunk_id ) end return info . chunk_id end rescue => e log . error "unexpected error while receiving ack message" , error : e log . error_backtrace ensure info . sock . close_write rescue nil info . sock . close rescue nil @sock_ack_waiting_mutex . synchronize do @sock_ack_waiting . delete ( info ) end end
return chunk id to be committed
427
def parse_unixtime ( value ) unless value . is_a? ( String ) || value . is_a? ( Numeric ) raise TimeParseError , "value must be a string or a number: #{value}(value.class)" end if @cache1_key == value return @cache1_time elsif @cache2_key == value return @cache2_time end begin time = Fluent :: EventTime . new ( value . to_i ) rescue => e raise TimeParseError , "invalid time format: value = #{value}, error_class = #{e.class.name}, error = #{e.message}" end @cache1_key = @cache2_key @cache1_time = @cache2_time @cache2_key = value @cache2_time = time time end
to include TimeParseError
428
def plugin_info_by_id ( plugin_id , opts = { } ) found = all_plugins . find { | pe | pe . respond_to? ( :plugin_id ) && pe . plugin_id . to_s == plugin_id } if found get_monitor_info ( found , opts ) else nil end end
search a plugin by plugin_id
429
def plugins_info_by_type ( type , opts = { } ) array = all_plugins . select { | pe | ( pe . config [ '@type' ] == type ) rescue nil } array . map { | pe | get_monitor_info ( pe , opts ) } end
This method returns an array because multiple plugins could have the same type
430
def get_monitor_info ( pe , opts = { } ) obj = { } obj [ 'plugin_id' ] = pe . plugin_id obj [ 'plugin_category' ] = plugin_category ( pe ) obj [ 'type' ] = pe . config [ '@type' ] obj [ 'config' ] = pe . config if opts [ :with_config ] MONITOR_INFO . each_pair { | key , code | begin catch ( :skip ) do obj [ key ] = pe . instance_exec ( & code ) end rescue NoMethodError => e unless @first_warn log . error "NoMethodError in monitoring plugins" , key : key , plugin : pe . class , error : e log . error_backtrace @first_warn = true end rescue => e log . warn "unexpected error in monitoring plugins" , key : key , plugin : pe . class , error : e end } obj [ 'retry' ] = get_retry_info ( pe . retry ) if opts [ :with_retry ] and pe . instance_variable_defined? ( :@retry ) if opts [ :with_debug_info ] iv = { } pe . instance_eval do instance_variables . each { | sym | next if IGNORE_ATTRIBUTES . include? ( sym ) key = sym . to_s [ 1 .. - 1 ] iv [ key ] = instance_variable_get ( sym ) } end obj [ 'instance_variables' ] = iv elsif ivars = opts [ :ivars ] iv = { } ivars . each { | name | iname = "@#{name}" iv [ name ] = pe . instance_variable_get ( iname ) if pe . instance_variable_defined? ( iname ) } obj [ 'instance_variables' ] = iv end obj end
get monitor info from the plugin pe and return a hash object
431
def sanitize ( action ) permissions = @permitted [ action ] if permissions . respond_to? ( :call ) cast_to_hash permissions . call ( default_params ) elsif permissions . present? cast_to_hash permit_keys ( default_params , permissions ) else unknown_action! ( action ) end end
Sanitize the parameters for a specific + action + .
432
def permit ( action , keys : nil , except : nil , & block ) if block_given? @permitted [ action ] = block end if keys . present? @permitted [ action ] ||= @auth_keys . dup @permitted [ action ] . concat ( keys ) end if except . present? @permitted [ action ] ||= @auth_keys . dup @permitted [ action ] = @permitted [ action ] - except end end
Add or remove new parameters to the permitted list of an + action + .
433
def stringify_params ( conditions ) return conditions unless conditions . is_a? ( Hash ) conditions . each do | k , v | conditions [ k ] = v . to_s if param_requires_string_conversion? ( v ) end end
Force keys to be string to avoid injection on mongoid related database .
434
def parse ( source , options = { } ) @options = options @profiling = options [ :profile ] @line_numbers = options [ :line_numbers ] || @profiling parse_context = options . is_a? ( ParseContext ) ? options : ParseContext . new ( options ) @root = Document . parse ( tokenize ( source ) , parse_context ) @warnings = parse_context . warnings self end
Parse source code . Returns self for easy chaining
435
def render ( * args ) return '' . freeze if @root . nil? context = case args . first when Liquid :: Context c = args . shift if @rethrow_errors c . exception_renderer = -> ( e ) { raise } end c when Liquid :: Drop drop = args . shift drop . context = Context . new ( [ drop , assigns ] , instance_assigns , registers , @rethrow_errors , @resource_limits ) when Hash Context . new ( [ args . shift , assigns ] , instance_assigns , registers , @rethrow_errors , @resource_limits ) when nil Context . new ( assigns , instance_assigns , registers , @rethrow_errors , @resource_limits ) else raise ArgumentError , "Expected Hash or Liquid::Context as parameter" end case args . last when Hash options = args . pop registers . merge! ( options [ :registers ] ) if options [ :registers ] . is_a? ( Hash ) apply_options_to_context ( context , options ) when Module , Array context . add_filters ( args . pop ) end context . resource_limits . reset begin result = with_profiling ( context ) do @root . render ( context ) end result . respond_to? ( :join ) ? result . join : result rescue Liquid :: MemoryError => e context . handle_error ( e ) ensure @errors = context . errors end end
Render takes a hash with local variables .
436
def truncate ( input , length = 50 , truncate_string = "..." . freeze ) return if input . nil? input_str = input . to_s length = Utils . to_integer ( length ) truncate_string_str = truncate_string . to_s l = length - truncate_string_str . length l = 0 if l < 0 input_str . length > length ? input_str [ 0 ... l ] + truncate_string_str : input_str end
Truncate a string down to x characters
437
def sort ( input , property = nil ) ary = InputIterator . new ( input ) return [ ] if ary . empty? if property . nil? ary . sort do | a , b | nil_safe_compare ( a , b ) end elsif ary . all? { | el | el . respond_to? ( :[] ) } begin ary . sort { | a , b | nil_safe_compare ( a [ property ] , b [ property ] ) } rescue TypeError raise_property_error ( property ) end end end
Sort elements of the array provide optional property with which to sort an array of hashes or drops
438
def where ( input , property , target_value = nil ) ary = InputIterator . new ( input ) if ary . empty? [ ] elsif ary . first . respond_to? ( :[] ) && target_value . nil? begin ary . select { | item | item [ property ] } rescue TypeError raise_property_error ( property ) end elsif ary . first . respond_to? ( :[] ) begin ary . select { | item | item [ property ] == target_value } rescue TypeError raise_property_error ( property ) end end end
Filter the elements of an array to those with a certain property value . By default the target is any truthy value .
439
def uniq ( input , property = nil ) ary = InputIterator . new ( input ) if property . nil? ary . uniq elsif ary . empty? [ ] elsif ary . first . respond_to? ( :[] ) begin ary . uniq { | a | a [ property ] } rescue TypeError raise_property_error ( property ) end end end
Remove duplicate elements from an array provide optional property with which to determine uniqueness
440
def replace ( input , string , replacement = '' . freeze ) input . to_s . gsub ( string . to_s , replacement . to_s ) end
Replace occurrences of a string with another
441
def replace_first ( input , string , replacement = '' . freeze ) input . to_s . sub ( string . to_s , replacement . to_s ) end
Replace the first occurrences of a string with another
442
def stack ( new_scope = nil ) old_stack_used = @this_stack_used if new_scope push ( new_scope ) @this_stack_used = true else @this_stack_used = false end yield ensure pop if @this_stack_used @this_stack_used = old_stack_used end
Pushes a new local scope on the stack pops it at the end of the block
443
def find_variable ( key , raise_on_not_found : true ) index = @scopes . find_index { | s | s . key? ( key ) } scope = @scopes [ index ] if index variable = nil if scope . nil? @environments . each do | e | variable = lookup_and_evaluate ( e , key , raise_on_not_found : raise_on_not_found ) if ! variable . nil? || @strict_variables && raise_on_not_found scope = e break end end end scope ||= @environments . last || @scopes . last variable ||= lookup_and_evaluate ( scope , key , raise_on_not_found : raise_on_not_found ) variable = variable . to_liquid variable . context = self if variable . respond_to? ( :context= ) variable end
Fetches an object starting at the local scope and then moving up the hierachy
444
def deserialize_offenses ( offenses ) source_buffer = Parser :: Source :: Buffer . new ( @filename ) source_buffer . source = File . read ( @filename , encoding : Encoding :: UTF_8 ) offenses . map! do | o | location = Parser :: Source :: Range . new ( source_buffer , o [ 'location' ] [ 'begin_pos' ] , o [ 'location' ] [ 'end_pos' ] ) Cop :: Offense . new ( o [ 'severity' ] , location , o [ 'message' ] , o [ 'cop_name' ] , o [ 'status' ] . to_sym ) end end
Restore an offense object loaded from a JSON file .
445
def space_before? position = begin_pos . zero? ? begin_pos : begin_pos - 1 pos . source_buffer . source . match ( / \G \s / , position ) end
Checks if there is whitespace before token
446
def option ( opts , * args ) long_opt_symbol = long_opt_symbol ( args ) args += Array ( OptionsHelp :: TEXT [ long_opt_symbol ] ) opts . on ( * args ) do | arg | @options [ long_opt_symbol ] = arg yield arg if block_given? end end
Sets a value in the
447
def possibly_include_hidden? return @possibly_include_hidden if defined? ( @possibly_include_hidden ) @possibly_include_hidden = patterns_to_include . any? do | s | s . is_a? ( Regexp ) || s . start_with? ( '.' ) || s . include? ( '/.' ) end end
Returns true if there s a chance that an Include pattern matches hidden files false if that s definitely not possible .
448
def check_for_infinite_loop ( processed_source , offenses ) checksum = processed_source . checksum if @processed_sources . include? ( checksum ) raise InfiniteCorrectionLoop . new ( processed_source . path , offenses ) end @processed_sources << checksum end
Check whether a run created source identical to a previous run which means that we definitely have an infinite loop .
449
def target_files_in_dir ( base_dir = Dir . pwd ) if File :: ALT_SEPARATOR base_dir = base_dir . gsub ( File :: ALT_SEPARATOR , File :: SEPARATOR ) end all_files = find_files ( base_dir , File :: FNM_DOTMATCH ) hidden_files = Set . new ( all_files - find_files ( base_dir , 0 ) ) base_dir_config = @config_store . for ( base_dir ) target_files = all_files . select do | file | to_inspect? ( file , hidden_files , base_dir_config ) end target_files . sort_by! { | path | - Integer ( File . mtime ( path ) ) } if fail_fast? target_files end
Finds all Ruby source files under the current or other supplied directory . A Ruby source file is defined as a file with the . rb extension or a file with no extension that has a ruby shebang line as its first line . It is possible to specify includes and excludes using the config file so you can include other Ruby files like Rakefiles and gemspecs .
450
def rubocop_checksum ResultCache . source_checksum ||= begin lib_root = File . join ( File . dirname ( __FILE__ ) , '..' ) exe_root = File . join ( lib_root , '..' , 'exe' ) source_files = $LOADED_FEATURES + Find . find ( exe_root ) . to_a sources = source_files . select { | path | File . file? ( path ) } . sort . map { | path | IO . read ( path , encoding : Encoding :: UTF_8 ) } Digest :: SHA1 . hexdigest ( sources . join ) end end
The checksum of the rubocop program running the inspection .
451
def relevant_options_digest ( options ) options = options . reject { | key , _ | NON_CHANGING . include? ( key ) } options = options . to_s . gsub ( / /i , '_' ) options . length <= 32 ? options : Digest :: SHA1 . hexdigest ( options ) end
Return a hash of the options given at invocation minus the ones that have no effect on which offenses and disabled line ranges are found and thus don t affect caching .
452
def merge ( base_hash , derived_hash , ** opts ) result = base_hash . merge ( derived_hash ) keys_appearing_in_both = base_hash . keys & derived_hash . keys keys_appearing_in_both . each do | key | if opts [ :unset_nil ] && derived_hash [ key ] . nil? result . delete ( key ) elsif base_hash [ key ] . is_a? ( Hash ) result [ key ] = merge ( base_hash [ key ] , derived_hash [ key ] , ** opts ) elsif should_union? ( base_hash , key , opts [ :inherit_mode ] ) result [ key ] = base_hash [ key ] | derived_hash [ key ] elsif opts [ :debug ] warn_on_duplicate_setting ( base_hash , derived_hash , key , opts ) end end result end
Return a recursive merge of two hashes . That is a normal hash merge with the addition that any value that is a hash and occurs in both arguments will also be merged . And so on .
453
def build_node Chef :: Log . trace ( "Building node object for #{@node_name}" ) @node = Chef :: Node . find_or_create ( node_name ) ohai_data = @ohai . data . merge ( @node . automatic_attrs ) @node . consume_external_attrs ( ohai_data , nil ) @run_list_expansion = @node . expand! ( "server" ) @expanded_run_list_with_versions = @run_list_expansion . recipes . with_version_constraints_strings Chef :: Log . info ( "Run List is [#{@node.run_list}]" ) Chef :: Log . info ( "Run List expands to [#{@expanded_run_list_with_versions.join(', ')}]" ) @node end
DoppelGanger implementation of build_node . preserves as many of the node s attributes and does not save updates to the server
454
def list_objects objects = @model_class . method ( :list ) . arity == 0 ? @model_class . list : @model_class . list ( true ) objects . map { | obj | Array ( obj ) . find { | o | o . kind_of? ( @model_class ) } } end
paper over inconsistencies in the model classes APIs and return the objects the user wanted instead of the URI = > object stuff
455
def apply_defaults Config [ :node_name ] ||= Etc . getlogin unless Config . key? ( :client_key ) || Config . key? ( :client_key_contents ) key_path = find_default_key ( [ "#{Config[:node_name]}.pem" , "user.pem" ] ) Config [ :client_key ] = key_path if key_path end unless Config . key? ( :validation_key ) || Config . key? ( :validation_key_contents ) key_path = find_default_key ( [ "#{Config[:validation_client_name]}.pem" , "validator.pem" , "validation.pem" ] ) Config [ :validation_key ] = key_path if key_path end end
Apply default configuration values for workstation - style tools .
456
def find_default_key ( key_names ) key_names . each do | filename | path = Pathname . new ( filename ) if config_location local_path = path . expand_path ( File . dirname ( config_location ) ) return local_path . to_s if local_path . exist? end home_path = path . expand_path ( home_chef_dir ) return home_path . to_s if home_path . exist? end nil end
Look for a default key file .
457
def status ( value = nil ) response . status = Rack :: Utils . status_code ( value ) if value response . status end
Set or retrieve the response status code .
458
def config_file ( * paths ) Dir . chdir ( root || '.' ) do paths . each do | pattern | Dir . glob ( pattern ) do | file | raise UnsupportedConfigType unless [ '.yml' , '.erb' ] . include? ( File . extname ( file ) ) logger . info "loading config file '#{file}'" if logging? && respond_to? ( :logger ) document = ERB . new ( IO . read ( file ) ) . result yaml = YAML . load ( document ) config = config_for_env ( yaml ) config . each_pair { | key , value | set ( key , value ) } end end end end
Loads the configuration from the YAML files whose + paths + are passed as arguments filtering the settings for the current environment . Note that these + paths + can actually be globs .
459
def environment_keys? ( hash ) hash . is_a? ( Hash ) && hash . any? { | k , _ | environments . include? ( k . to_s ) } end
Returns true if supplied with a hash that has any recognized + environments + in its root keys .
460
def stylesheet ( * urls ) urls << { } unless urls . last . respond_to? :to_hash urls . last [ :type ] ||= mime_type ( :css ) link ( :stylesheet , * urls ) end
Sets Link HTTP header and returns HTML tags for using stylesheets .
461
def link ( * urls ) opts = urls . last . respond_to? ( :to_hash ) ? urls . pop : { } opts [ :rel ] = urls . shift unless urls . first . respond_to? :to_str options = opts . map { | k , v | " #{k}=#{v.to_s.inspect}" } html_pattern = "<link href=\"%s\"#{options.join} />" http_pattern = [ "<%s>" , * options ] . join ";" link = ( response [ "Link" ] ||= "" ) urls . map do | url | link << ",\n" unless link . empty? link << ( http_pattern % url ) html_pattern % url end . join "\n" end
Sets Link HTTP header and returns corresponding HTML tags .
462
def each_run ( file ) if file file = File . new ( file ) matrix = YAML . load ( ERB . new ( file . read ) . result ) file . close matrix . each_with_index do | run , i | DEFAULT_RUN . merge ( run ) yield ( run , i ) end else yield ( DEFAULT_RUN ) end end
Parse a file of run definitions and yield each run .
463
def add_attribute ( name , & block ) declaration = Declaration :: Dynamic . new ( name , @ignore , block ) @definition . declare_attribute ( declaration ) end
Adds an attribute to the factory . The attribute value will be generated lazily by calling the block whenever an instance is generated . The block will not be called if the attribute is overridden for a specific instance .
464
def sequence ( name , * args , & block ) sequence = Sequence . new ( name , * args , & block ) FactoryBot :: Internal . register_inline_sequence ( sequence ) add_attribute ( name ) { increment_sequence ( sequence ) } end
Adds an attribute that will have unique values generated by a sequence with a specified format .
465
def association ( name , * options ) if block_given? raise AssociationDefinitionError . new ( "Unexpected block passed to '#{name}' association " "in '#{@definition.name}' factory" , ) else declaration = Declaration :: Association . new ( name , * options ) @definition . declare_attribute ( declaration ) end end
Adds an attribute that builds an association . The associated instance will be built using the same build strategy as the parent instance .
466
def lookahead @lookahead ||= begin ast_node = selected_operation root_type = warden . root_type_for_operation ( ast_node . operation_type || "query" ) root_type = root_type . metadata [ :type_class ] || raise ( "Invariant: `lookahead` only works with class-based types" ) GraphQL :: Execution :: Lookahead . new ( query : self , root_type : root_type , ast_nodes : [ ast_node ] ) end end
A lookahead for the root selections of this query
467
def result if ! @executed with_prepared_ast { Execution :: Multiplex . run_queries ( @schema , [ self ] , context : @context ) } end @result ||= Query :: Result . new ( query : self , values : @result_values ) end
Get the result for this query executing it once
468
def implements ( interfaces , inherit : false ) if ! interfaces . is_a? ( Array ) raise ArgumentError , "`implements(interfaces)` must be an array, not #{interfaces.class} (#{interfaces})" end @clean_interfaces = nil @clean_inherited_fields = nil dirty_ifaces = inherit ? @dirty_inherited_interfaces : @dirty_interfaces dirty_ifaces . concat ( interfaces ) end
Declare that this object implements this interface . This declaration will be validated when the schema is defined .
469
def wrap_arity ( callable , from : , to : , name : , last : false ) arity = get_arity ( callable ) if arity == to || arity < 0 callable elsif arity == from message = "#{name} with #{from} arguments is deprecated, it now accepts #{to} arguments, see:" backtrace = caller ( 0 , 20 ) user_line = backtrace . find { | l | l !~ / \/ / } warn ( message + "\n" + user_line + "\n" ) wrapper = last ? LastArgumentsWrapper : FirstArgumentsWrapper wrapper . new ( callable , from ) else raise "Can't wrap #{callable} (arity: #{arity}) to have arity #{to}" end end
Given a callable whose API used to take from arguments check its arity and if needed apply a wrapper so that it can be called with to arguments . If a wrapper is applied warn the application with name .
470
def execute ( subscription_id , event , object ) query_data = read_subscription ( subscription_id ) query_string = query_data . fetch ( :query_string ) variables = query_data . fetch ( :variables ) context = query_data . fetch ( :context ) operation_name = query_data . fetch ( :operation_name ) result = @schema . execute ( { query : query_string , context : context , subscription_topic : event . topic , operation_name : operation_name , variables : variables , root_value : object , } ) deliver ( subscription_id , result ) rescue GraphQL :: Schema :: Subscription :: NoUpdateError rescue GraphQL :: Schema :: Subscription :: UnsubscribedError delete_subscription ( subscription_id ) end
event was triggered on object and subscription_id was subscribed so it should be updated .
471
def execute_all ( event , object ) each_subscription_id ( event ) do | subscription_id | execute ( subscription_id , event , object ) end end
Event event occurred on object Update all subscribers .
472
def to_definition ( schema , printer : nil , ** args ) printer ||= GraphQL :: Schema :: Printer . new ( schema , ** args ) printer . print_type ( self ) end
Return a GraphQL string for the type definition
473
def write_outfile ( method_name , file ) schema = @load_schema . call ( self ) context = @load_context . call ( self ) result = schema . public_send ( method_name , only : @only , except : @except , context : context ) dir = File . dirname ( file ) FileUtils . mkdir_p ( dir ) File . write ( file , result ) end
Use the provided method_name to generate a string from the specified schema then write it to file .
474
def define_task namespace ( @namespace ) do namespace ( "schema" ) do desc ( "Dump the schema to IDL in #{idl_path}" ) task :idl => @dependencies do write_outfile ( :to_definition , idl_path ) puts "Schema IDL dumped into #{idl_path}" end desc ( "Dump the schema to JSON in #{json_path}" ) task :json => @dependencies do write_outfile ( :to_json , json_path ) puts "Schema JSON dumped into #{json_path}" end desc ( "Dump the schema to JSON and IDL" ) task :dump => [ :idl , :json ] end end end
Use the Rake DSL to add tasks
475
def prepare_lazy ( obj , args , ctx ) GraphQL :: Execution :: Lazy . new { lazy_resolve ( obj , args , ctx ) } end
Prepare a lazy value for this field . It may be then - ed and resolved later .
476
def call ( member , ctx ) ( @only ? @only . call ( member , ctx ) : true ) && ( @except ? ! @except . call ( member , ctx ) : true ) end
Returns true if member ctx passes this filter
477
def analyze_query ( query , analyzers , multiplex_states : [ ] ) query . trace ( "analyze_query" , { query : query } ) do analyzers_to_run = analyzers . select do | analyzer | if analyzer . respond_to? ( :analyze? ) analyzer . analyze? ( query ) else true end end reducer_states = analyzers_to_run . map { | r | ReducerState . new ( r , query ) } + multiplex_states irep = query . internal_representation irep . operation_definitions . each do | name , op_node | reduce_node ( op_node , reducer_states ) end reducer_states . map ( & :finalize_reducer ) end end
Visit query s internal representation calling analyzers along the way .
478
def reduce_node ( irep_node , reducer_states ) visit_analyzers ( :enter , irep_node , reducer_states ) irep_node . typed_children . each do | type_defn , children | children . each do | name , child_irep_node | reduce_node ( child_irep_node , reducer_states ) end end visit_analyzers ( :leave , irep_node , reducer_states ) end
Enter the node visit its children then leave the node .
479
def validate ( string_or_document , rules : nil , context : nil ) doc = if string_or_document . is_a? ( String ) GraphQL . parse ( string_or_document ) else string_or_document end query = GraphQL :: Query . new ( self , document : doc , context : context ) validator_opts = { schema : self } rules && ( validator_opts [ :rules ] = rules ) validator = GraphQL :: StaticValidation :: Validator . new ( validator_opts ) res = validator . validate ( query ) res [ :errors ] end
Validate a query string according to this schema .
480
def execute ( query_str = nil , ** kwargs ) if query_str kwargs [ :query ] = query_str end multiplex_context = if ( ctx = kwargs [ :context ] ) { backtrace : ctx [ :backtrace ] , tracers : ctx [ :tracers ] , } else { } end all_results = multiplex ( [ kwargs ] , max_complexity : nil , context : multiplex_context ) all_results [ 0 ] end
Execute a query on itself . Raises an error if the schema definition is invalid .
481
def check_resolved_type ( type , object , ctx = :__undefined__ ) if ctx == :__undefined__ ctx = object object = type type = nil end if object . is_a? ( GraphQL :: Schema :: Object ) object = object . object end if type . respond_to? ( :graphql_definition ) type = type . graphql_definition end type_proc = type && type . resolve_type_proc type_result = if type_proc type_proc . call ( object , ctx ) else yield ( type , object , ctx ) end if type_result . nil? nil else after_lazy ( type_result ) do | resolved_type_result | if resolved_type_result . respond_to? ( :graphql_definition ) resolved_type_result = resolved_type_result . graphql_definition end if ! resolved_type_result . is_a? ( GraphQL :: BaseType ) type_str = "#{resolved_type_result} (#{resolved_type_result.class.name})" raise "resolve_type(#{object}) returned #{type_str}, but it should return a GraphQL type" else resolved_type_result end end end end
This is a compatibility hack so that instance - level and class - level methods can get correctness checks without calling one another
482
def id_from_object ( object , type , ctx ) if @id_from_object_proc . nil? raise ( NotImplementedError , "Can't generate an ID for #{object.inspect} of type #{type}, schema's `id_from_object` must be defined" ) else @id_from_object_proc . call ( object , type , ctx ) end end
Get a unique identifier from this object
483
def to_definition ( only : nil , except : nil , context : { } ) GraphQL :: Schema :: Printer . print_schema ( self , only : only , except : except , context : context ) end
Return the GraphQL IDL for the schema
484
def coerce_non_null_input ( value_name , ctx ) if @values_by_name . key? ( value_name ) @values_by_name . fetch ( value_name ) . value elsif match_by_value = @values_by_name . find { | k , v | v . value == value_name } match_by_value [ 1 ] . value else nil end end
Get the underlying value for this enum value
485
def inspect_input ( input : ) [ input . class . name , input . helper_method , input . string_value , input [ :string_value ] , input . key? ( :string_value ) . to_s , input [ :string_value ] , input . ensemble , input . key? ( :ensemble ) . to_s , ] end
This is for testing input object behavior
486
def as_indexed_json ( options = { } ) self . as_json ( include : { categories : { only : :title } , authors : { methods : [ :full_name , :department ] , only : [ :full_name , :department ] } , comments : { only : :text } } ) end
Customize the JSON serialization for Elasticsearch
487
def inspect inspected = super inspected . gsub! @password , '*******' if @password inspected . gsub! @management_console_password , '*******' if @management_console_password inspected . gsub! @bearer_token , '********' if @bearer_token inspected . gsub! @access_token , "#{'*'*36}#{@access_token[36..-1]}" if @access_token inspected . gsub! @client_secret , "#{'*'*36}#{@client_secret[36..-1]}" if @client_secret inspected end
Text representation of the client masking tokens and passwords
488
def agent @agent ||= Sawyer :: Agent . new ( endpoint , sawyer_options ) do | http | http . headers [ :accept ] = default_media_type http . headers [ :content_type ] = "application/json" http . headers [ :user_agent ] = user_agent if basic_authenticated? http . basic_auth ( @login , @password ) elsif token_authenticated? http . authorization 'token' , @access_token elsif bearer_authenticated? http . authorization 'Bearer' , @bearer_token elsif application_authenticated? http . params = http . params . merge application_authentication end end end
Hypermedia agent for the GitHub API
489
def boolean_from_response ( method , path , options = { } ) request ( method , path , options ) @last_response . status == 204 rescue Octokit :: NotFound false end
Executes the request checking if it was successful
490
def aria_selected_link_to ( text , link , options = { } ) link_to ( text , link , options . merge ( "aria-selected" : is_active_link? ( link , options [ :aria_link_type ] || :inclusive ) ) ) end
Adds the aria - selected attribute to a link when it s pointing to the current path . The API is the same than the link_to one and uses this helper internally .
491
def all_fields fields = public_attributes . map do | name , type | @template . content_tag ( :div , input_field ( name , type ) , class : "field" ) end safe_join ( fields ) end
Renders all form attributes defined by the handler .
492
def input ( name , options = { } ) if options [ :as ] send ( options [ :as ] . to_s , name , options [ :input ] || { } ) else type = find_input_type ( name . to_s ) input_field ( name , type ) end end
Renders a single attribute from the form handlers .
493
def resource_types @resource_types ||= %w( Decidim::Accountability::Result Decidim::Blogs::Post Decidim::Comments::Comment Decidim::Consultations::Question Decidim::Debates::Debate Decidim::Meetings::Meeting Decidim::Proposals::Proposal Decidim::Surveys::Survey Decidim::Assembly Decidim::Consultation Decidim::Initiative Decidim::ParticipatoryProcess ) . select do | klass | klass . safe_constantize . present? end end
All the resource types that are eligible to be included as an activity .
494
def authorization_form_for ( record , options = { } , & block ) default_options = { builder : AuthorizationFormBuilder , as : "authorization_handler" , url : decidim_verifications . authorizations_path } options = default_options . merge ( options ) decidim_form_for ( record , options , & block ) end
Creates a ew authorization form in a view accepts the same arguments as form_for .
495
def store_current_location return if ( devise_controller? && params [ :redirect_url ] . blank? ) || ! request . format . html? value = params [ :redirect_url ] || request . url store_location_for ( :user , value ) end
Stores the url where the user will be redirected after login .
496
def has_visible_scopes? ( resource ) resource . participatory_space . scopes_enabled? && resource . scope . present? && resource . participatory_space . scope != resource . scope end
Checks if the resource should show its scope or not . resource - the resource to analize
497
def scopes_picker_tag ( name , value , options = { } ) root = try ( :current_participatory_space ) &. scope field = options [ :field ] || name scopes_picker_field_tag name , value , id : options [ :id ] do | scope | { url : decidim . scopes_picker_path ( root : root , current : scope &. id , field : field ) , text : scope_name_for_picker ( scope , I18n . t ( "decidim.scopes.global" ) ) } end end
Renders a scopes picker field in a form not linked to a specific model . name - name for the input value - value for the input
498
def amendments_for ( amendable ) return unless amendable . amendable? && amendable . emendations . count . positive? content = content_tag ( :h2 , class : "section-heading" , id : "amendments" ) do t ( "section_heading" , scope : "decidim.amendments.amendable" , count : amendable . emendations . count ) end content << cell ( "decidim/collapsible_list" , amendable . emendations , cell_options : { context : { current_user : current_user } } , list_class : "row small-up-1 medium-up-2 card-grid" , size : 4 ) . to_s content_tag :div , content . html_safe , class : "section" end
Renders the emendations of an amendable resource
499
def allowed_to_accept_and_reject? ( emendation ) return unless emendation . amendment . evaluating? emendation . amendable . created_by? ( current_user ) || current_user . admin? end
Checks if the user can accept and reject the emendation