idx
int64
0
24.9k
question
stringlengths
68
4.14k
target
stringlengths
9
749
800
def check_yard_coverage ( stat_lines ) if config [ 'min_coverage_percentage' ] match = stat_lines . last . match ( / \s \d \s \s / ) unless match return :warn end yard_coverage = match . captures [ 0 ] . to_f if yard_coverage >= config [ 'min_coverage_percentage' ] . to_f return :pass end yard_coverage end end
Check the yard coverage
801
def error_messages ( yard_coverage , error_text ) first_message = "You have a #{yard_coverage}% yard documentation coverage. " "#{config['min_coverage_percentage']}% is the minimum required." messages = [ Overcommit :: Hook :: Message . new ( :error , nil , nil , first_message ) ] errors = error_text . strip . split ( "\n" ) errors . each do | undocumented_object | undocumented_object_message , file_info = undocumented_object . split ( / \s / ) file_info_match = file_info . match ( / \( \d \) / ) if file_info_match file = file_info_match . captures [ 0 ] line = file_info_match . captures [ 1 ] messages << Overcommit :: Hook :: Message . new ( :error , file , line , "#{file}:#{line}: #{undocumented_object_message}" ) end end messages end
Create the error messages
802
def modified_files staged = squash? refs = 'HEAD^ HEAD' if merge_commit? @modified_files ||= Overcommit :: GitRepo . modified_files ( staged : staged , refs : refs ) end
Get a list of files that were added copied or modified in the merge commit . Renames and deletions are ignored since there should be nothing to check .
803
def modified_files @modified_files ||= begin @modified_files = [ ] rewritten_commits . each do | rewritten_commit | refs = "#{rewritten_commit.old_hash} #{rewritten_commit.new_hash}" @modified_files |= Overcommit :: GitRepo . modified_files ( refs : refs ) end filter_modified_files ( @modified_files ) end end
Get a list of files that have been added or modified as part of a rewritten commit . Renames and deletions are ignored since there should be nothing to check .
804
def basic_status_and_output ( messages ) status = if messages . any? { | message | message . type == :error } :fail elsif messages . any? { | message | message . type == :warning } :warn else :pass end output = '' if messages . any? output += messages . join ( "\n" ) + "\n" end [ status , output ] end
Returns status and output for messages assuming no special treatment of messages occurring on unmodified lines .
805
def run_and_transform if output = check_for_requirements status = :fail else result = Overcommit :: Utils . with_environment ( @config . fetch ( 'env' ) { { } } ) { run } status , output = process_hook_return_value ( result ) end [ transform_status ( status ) , output ] end
Runs the hook and transforms the status returned based on the hook s configuration .
806
def check_for_libraries output = [ ] required_libraries . each do | library | begin require library rescue LoadError install_command = @config [ 'install_command' ] install_command = " -- install via #{install_command}" if install_command output << "Unable to load '#{library}'#{install_command}" end end return if output . empty? output . join ( "\n" ) end
If the hook defines required library paths that it wants to load attempt to load them .
807
def setup_environment store_modified_times Overcommit :: GitRepo . store_merge_state Overcommit :: GitRepo . store_cherry_pick_state if ! initial_commit? && any_changes? @stash_attempted = true stash_message = "Overcommit: Stash of repo state before hook run at #{Time.now}" result = Overcommit :: Utils . execute ( %w[ git -c commit.gpgsign=false stash save --keep-index --quiet ] + [ stash_message ] ) unless result . success? raise Overcommit :: Exceptions :: HookSetupFailed , "Unable to setup environment for #{hook_script_name} hook run:" "\nSTDOUT:#{result.stdout}\nSTDERR:#{result.stderr}" end @changes_stashed = ` ` . include? ( stash_message ) end restore_modified_times end
Stash unstaged contents of files so hooks don t see changes that aren t about to be committed .
808
def cleanup_environment unless initial_commit? || ( @stash_attempted && ! @changes_stashed ) clear_working_tree restore_modified_times end if @changes_stashed restore_working_tree restore_modified_times end Overcommit :: GitRepo . restore_merge_state Overcommit :: GitRepo . restore_cherry_pick_state restore_modified_times end
Restore unstaged changes and reset file modification times so it appears as if nothing ever changed .
809
def modified_files unless @modified_files currently_staged = Overcommit :: GitRepo . modified_files ( staged : true ) @modified_files = currently_staged if amendment? subcmd = 'show --format=%n' previously_modified = Overcommit :: GitRepo . modified_files ( subcmd : subcmd ) @modified_files |= filter_modified_files ( previously_modified ) end end @modified_files end
Get a list of added copied or modified files that have been staged . Renames and deletions are ignored since there should be nothing to check .
810
def clear_working_tree removed_submodules = Overcommit :: GitRepo . staged_submodule_removals result = Overcommit :: Utils . execute ( %w[ git reset --hard ] ) unless result . success? raise Overcommit :: Exceptions :: HookCleanupFailed , "Unable to cleanup working tree after #{hook_script_name} hooks run:" "\nSTDOUT:#{result.stdout}\nSTDERR:#{result.stderr}" end removed_submodules . each do | submodule | FileUtils . rmdir ( submodule . path ) end end
Clears the working tree so that the stash can be applied .
811
def restore_working_tree result = Overcommit :: Utils . execute ( %w[ git stash pop --index --quiet ] ) unless result . success? raise Overcommit :: Exceptions :: HookCleanupFailed , "Unable to restore working tree after #{hook_script_name} hooks run:" "\nSTDOUT:#{result.stdout}\nSTDERR:#{result.stderr}" end end
Applies the stash to the working tree to restore the user s state .
812
def store_modified_times @modified_times = { } staged_files = modified_files unstaged_files = Overcommit :: GitRepo . modified_files ( staged : false ) ( staged_files + unstaged_files ) . each do | file | next if Overcommit :: Utils . broken_symlink? ( file ) next unless File . exist? ( file ) @modified_times [ file ] = File . mtime ( file ) end end
Stores the modification times for all modified files to make it appear like they never changed .
813
def restore_modified_times @modified_times . each do | file , time | next if Overcommit :: Utils . broken_symlink? ( file ) next unless File . exist? ( file ) File . utime ( time , time , file ) end end
Restores the file modification times for all modified files to make it appear like they never changed .
814
def validate ( config , hash , options ) @options = options . dup @log = options [ :logger ] hash = convert_nils_to_empty_hashes ( hash ) ensure_hook_type_sections_exist ( hash ) check_hook_name_format ( hash ) check_hook_env ( hash ) check_for_missing_enabled_option ( hash ) unless @options [ :default ] check_for_too_many_processors ( config , hash ) check_for_verify_plugin_signatures_option ( hash ) hash end
Validates hash for any invalid options normalizing where possible .
815
def convert_nils_to_empty_hashes ( hash ) hash . each_with_object ( { } ) do | ( key , value ) , h | h [ key ] = case value when nil then { } when Hash then convert_nils_to_empty_hashes ( value ) else value end end end
Normalizes nil values to empty hashes .
816
def check_hook_name_format ( hash ) errors = [ ] Overcommit :: Utils . supported_hook_type_classes . each do | hook_type | hash . fetch ( hook_type ) { { } } . each_key do | hook_name | next if hook_name == 'ALL' unless hook_name . match? ( / \A \z / ) errors << "#{hook_type}::#{hook_name} has an invalid name " "#{hook_name}. It must contain only alphanumeric " 'characters (no underscores or dashes, etc.)' end end end if errors . any? if @log @log . error errors . join ( "\n" ) @log . newline end raise Overcommit :: Exceptions :: ConfigurationError , 'One or more hooks had invalid names' end end
Prints an error message and raises an exception if a hook has an invalid name since this can result in strange errors elsewhere .
817
def check_for_missing_enabled_option ( hash ) return unless @log any_warnings = false Overcommit :: Utils . supported_hook_type_classes . each do | hook_type | hash . fetch ( hook_type ) { { } } . each do | hook_name , hook_config | next if hook_name == 'ALL' if hook_config [ 'enabled' ] . nil? @log . warning "#{hook_type}::#{hook_name} hook does not explicitly " 'set `enabled` option in .overcommit.yml' any_warnings = true end end end @log . newline if any_warnings end
Prints a warning if there are any hooks listed in the configuration without enabled explicitly set .
818
def check_for_too_many_processors ( config , hash ) concurrency = config . concurrency errors = [ ] Overcommit :: Utils . supported_hook_type_classes . each do | hook_type | hash . fetch ( hook_type ) { { } } . each do | hook_name , hook_config | processors = hook_config . fetch ( 'processors' ) { 1 } if processors > concurrency errors << "#{hook_type}::#{hook_name} `processors` value " "(#{processors}) is larger than the global `concurrency` " "option (#{concurrency})" end end end if errors . any? if @log @log . error errors . join ( "\n" ) @log . newline end raise Overcommit :: Exceptions :: ConfigurationError , 'One or more hooks had invalid `processor` value configured' end end
Prints a warning if any hook has a number of processors larger than the global concurrency setting .
819
def all_builtin_hook_configs hook_configs = { } Overcommit :: Utils . supported_hook_type_classes . each do | hook_type | hook_names = @hash [ hook_type ] . keys . reject { | name | name == 'ALL' } hook_configs [ hook_type ] = Hash [ hook_names . map do | hook_name | [ hook_name , for_hook ( hook_name , hook_type ) ] end ] end hook_configs end
Returns configuration for all built - in hooks in each hook type .
820
def all_plugin_hook_configs hook_configs = { } Overcommit :: Utils . supported_hook_types . each do | hook_type | hook_type_class_name = Overcommit :: Utils . camel_case ( hook_type ) directory = File . join ( plugin_directory , hook_type . tr ( '-' , '_' ) ) plugin_paths = Dir [ File . join ( directory , '*.rb' ) ] . sort hook_names = plugin_paths . map do | path | Overcommit :: Utils . camel_case ( File . basename ( path , '.rb' ) ) end hook_configs [ hook_type_class_name ] = Hash [ hook_names . map do | hook_name | [ hook_name , for_hook ( hook_name , Overcommit :: Utils . camel_case ( hook_type ) ) ] end ] end hook_configs end
Returns configuration for all plugin hooks in each hook type .
821
def enabled_builtin_hooks ( hook_context ) @hash [ hook_context . hook_class_name ] . keys . reject { | hook_name | hook_name == 'ALL' } . select { | hook_name | built_in_hook? ( hook_context , hook_name ) } . select { | hook_name | hook_enabled? ( hook_context , hook_name ) } end
Returns the built - in hooks that have been enabled for a hook type .
822
def enabled_ad_hoc_hooks ( hook_context ) @hash [ hook_context . hook_class_name ] . keys . reject { | hook_name | hook_name == 'ALL' } . select { | hook_name | ad_hoc_hook? ( hook_context , hook_name ) } . select { | hook_name | hook_enabled? ( hook_context , hook_name ) } end
Returns the ad hoc hooks that have been enabled for a hook type .
823
def for_hook ( hook , hook_type = nil ) unless hook_type components = hook . class . name . split ( '::' ) hook = components . last hook_type = components [ - 2 ] end hook_config = smart_merge ( @hash [ hook_type ] [ 'ALL' ] , @hash [ hook_type ] [ hook ] || { } ) hook_config [ 'enabled' ] = hook_enabled? ( hook_type , hook ) hook_config . freeze end
Returns a non - modifiable configuration for a hook .
824
def apply_environment! ( hook_context , env ) skipped_hooks = "#{env['SKIP']} #{env['SKIP_CHECKS']} #{env['SKIP_HOOKS']}" . split ( / / ) only_hooks = env . fetch ( 'ONLY' ) { '' } . split ( / / ) hook_type = hook_context . hook_class_name if only_hooks . any? || skipped_hooks . include? ( 'all' ) || skipped_hooks . include? ( 'ALL' ) @hash [ hook_type ] [ 'ALL' ] [ 'skip' ] = true end only_hooks . select { | hook_name | hook_exists? ( hook_context , hook_name ) } . map { | hook_name | Overcommit :: Utils . camel_case ( hook_name ) } . each do | hook_name | @hash [ hook_type ] [ hook_name ] ||= { } @hash [ hook_type ] [ hook_name ] [ 'skip' ] = false end skipped_hooks . select { | hook_name | hook_exists? ( hook_context , hook_name ) } . map { | hook_name | Overcommit :: Utils . camel_case ( hook_name ) } . each do | hook_name | @hash [ hook_type ] [ hook_name ] ||= { } @hash [ hook_type ] [ hook_name ] [ 'skip' ] = true end end
Applies additional configuration settings based on the provided environment variables .
825
def stored_signature result = Overcommit :: Utils . execute ( %w[ git config --local --get ] + [ signature_config_key ] ) if result . status == 1 return '' elsif result . status != 0 raise Overcommit :: Exceptions :: GitConfigError , "Unable to read from local repo git config: #{result.stderr}" end result . stdout . chomp end
Returns the stored signature of this repo s Overcommit configuration .
826
def all_files ` ` . split ( / \n / ) . map { | relative_file | File . expand_path ( relative_file ) } . reject { | file | File . directory? ( file ) } end
Returns the names of all files that are tracked by git .
827
def restore_merge_state if @merge_head FileUtils . touch ( File . expand_path ( 'MERGE_MODE' , Overcommit :: Utils . git_dir ) ) File . open ( File . expand_path ( 'MERGE_HEAD' , Overcommit :: Utils . git_dir ) , 'w' ) do | f | f . write ( @merge_head ) end @merge_head = nil end if @merge_msg File . open ( File . expand_path ( 'MERGE_MSG' , Overcommit :: Utils . git_dir ) , 'w' ) do | f | f . write ( "#{@merge_msg}\n" ) end @merge_msg = nil end end
Restore any relevant files that were present when repo was in the middle of a merge .
828
def restore_cherry_pick_state if @cherry_head File . open ( File . expand_path ( 'CHERRY_PICK_HEAD' , Overcommit :: Utils . git_dir ) , 'w' ) do | f | f . write ( @cherry_head ) end @cherry_head = nil end end
Restore any relevant files that were present when repo was in the middle of a cherry - pick .
829
def update_signature! result = Overcommit :: Utils . execute ( %w[ git config --local ] + [ signature_config_key , signature ] ) unless result . success? raise Overcommit :: Exceptions :: GitConfigError , "Unable to write to local repo git config: #{result.stderr}" end end
Update the current stored signature for this hook .
830
def signature hook_config = @config . for_hook ( @hook_name , @context . hook_class_name ) . dup . tap { | config | IGNORED_CONFIG_KEYS . each { | k | config . delete ( k ) } } content_to_sign = if signable_file? ( hook_path ) && Overcommit :: GitRepo . tracked? ( hook_path ) hook_contents end Digest :: SHA256 . hexdigest ( content_to_sign . to_s + hook_config . to_s ) end
Calculates a hash of a hook using a combination of its configuration and file contents .
831
def end_hook ( hook , status , output ) print_header ( hook ) if ( ! hook . quiet? && ! @config [ 'quiet' ] ) || status != :pass print_result ( hook , status , output ) end
Executed at the end of an individual hook run .
832
def process_line ( line ) file_tasks , args = line . split ( ":" , 2 ) return if args . nil? dependents = args . split . map { | d | respace ( d ) } file_tasks . scan ( / \S / ) do | file_task | file_task = respace ( file_task ) file file_task => dependents end end
Process one logical line of makefile data .
833
def invoke ( * args ) task_args = TaskArguments . new ( arg_names , args ) invoke_with_call_chain ( task_args , InvocationChain :: EMPTY ) end
Invoke the task if it is needed . Prerequisites are invoked first .
834
def transform_comments ( separator , & block ) if @comments . empty? nil else block ||= lambda { | c | c } @comments . map ( & block ) . join ( separator ) end end
Transform the list of comments as specified by the block and join with the separator .
835
def rake_merge_option ( args , defaults ) if Hash === args . last defaults . update ( args . last ) args . pop end args . push defaults args end
Merge the given options with the default values .
836
def rake_check_options ( options , * optdecl ) h = options . dup optdecl . each do | name | h . delete name end raise ArgumentError , "no such option: #{h.keys.join(' ')}" unless h . empty? end
Check that the options do not contain options not listed in + optdecl + . An ArgumentError exception is thrown if non - declared options are found .
837
def trim ( n ) result = self while n > 0 && ! result . empty? result = result . tail n -= 1 end result end
Trim + n + innermost scope levels from the scope . In no case will this trim beyond the toplevel scope .
838
def init ( app_name = "rake" , argv = ARGV ) standard_exception_handling do @name = app_name begin args = handle_options argv rescue ArgumentError args = handle_options end collect_command_line_tasks ( args ) end end
Initialize the command line parameters and app name .
839
def have_rakefile @rakefiles . each do | fn | if File . exist? ( fn ) others = FileList . glob ( fn , File :: FNM_CASEFOLD ) return others . size == 1 ? others . first : fn elsif fn == "" return fn end end return nil end
True if one of the files in RAKEFILES is in the current directory . If a match is found it is copied into
840
def sub ( pat , rep ) inject ( self . class . new ) { | res , fn | res << fn . sub ( pat , rep ) } end
Return a new FileList with the results of running + sub + against each element of the original list .
841
def gsub ( pat , rep ) inject ( self . class . new ) { | res , fn | res << fn . gsub ( pat , rep ) } end
Return a new FileList with the results of running + gsub + against each element of the original list .
842
def sub! ( pat , rep ) each_with_index { | fn , i | self [ i ] = fn . sub ( pat , rep ) } self end
Same as + sub + except that the original file list is modified .
843
def gsub! ( pat , rep ) each_with_index { | fn , i | self [ i ] = fn . gsub ( pat , rep ) } self end
Same as + gsub + except that the original file list is modified .
844
def define fail "Version required (or :noversion)" if @version . nil? @version = nil if :noversion == @version desc "Build all the packages" task :package desc "Force a rebuild of the package files" task repackage : [ :clobber_package , :package ] desc "Remove package products" task :clobber_package do rm_r package_dir rescue nil end task clobber : [ :clobber_package ] [ [ need_tar , tgz_file , "z" ] , [ need_tar_gz , tar_gz_file , "z" ] , [ need_tar_bz2 , tar_bz2_file , "j" ] , [ need_tar_xz , tar_xz_file , "J" ] ] . each do | need , file , flag | if need task package : [ "#{package_dir}/#{file}" ] file "#{package_dir}/#{file}" => [ package_dir_path ] + package_files do chdir ( package_dir ) { sh @tar_command , "#{flag}cvf" , file , package_name } end end end if need_zip task package : [ "#{package_dir}/#{zip_file}" ] file "#{package_dir}/#{zip_file}" => [ package_dir_path ] + package_files do chdir ( package_dir ) { sh @zip_command , "-r" , zip_file , package_name } end end directory package_dir_path => @package_files do @package_files . each do | fn | f = File . join ( package_dir_path , fn ) fdir = File . dirname ( f ) mkdir_p ( fdir ) unless File . exist? ( fdir ) if File . directory? ( fn ) mkdir_p ( f ) else rm_f f safe_ln ( fn , f ) end end end self end
Create the tasks defined by this task library .
845
def join @threads_mon . synchronize do begin stat :joining @join_cond . wait unless @threads . empty? stat :joined rescue Exception => e stat :joined $stderr . puts e $stderr . print "Queue contains #{@queue.size} items. " + "Thread pool contains #{@threads.count} threads\n" $stderr . print "Current Thread #{Thread.current} status = " + "#{Thread.current.status}\n" $stderr . puts e . backtrace . join ( "\n" ) @threads . each do | t | $stderr . print "Thread #{t} status = #{t.status}\n" $stderr . puts t . backtrace . join ( "\n" ) end raise e end end end
Waits until the queue of futures is empty and all threads have exited .
846
def process_queue_item return false if @queue . empty? promise = @queue . deq ( true ) stat :dequeued , item_id : promise . object_id promise . work return true rescue ThreadError false end
processes one item on the queue . Returns true if there was an item to process false if there was no item
847
def resolve_args_without_dependencies ( args ) task_name = args . shift if args . size == 1 && args . first . respond_to? ( :to_ary ) arg_names = args . first . to_ary else arg_names = args end [ task_name , arg_names , [ ] ] end
Resolve task arguments for a task or rule when there are no dependencies declared .
848
def enhance_with_matching_rule ( task_name , level = 0 ) fail Rake :: RuleRecursionOverflowError , "Rule Recursion Too Deep" if level >= 16 @rules . each do | pattern , args , extensions , block | if pattern && pattern . match ( task_name ) task = attempt_rule ( task_name , pattern , args , extensions , block , level ) return task if task end end nil rescue Rake :: RuleRecursionOverflowError => ex ex . add_target ( task_name ) fail ex end
If a rule can be found that matches the task name enhance the task with the prerequisites and actions from the rule . Set the source attribute of the task appropriately for the rule . Return the enhanced task or nil of no rule was found .
849
def find_location locations = caller i = 0 while locations [ i ] return locations [ i + 1 ] if locations [ i ] =~ / \/ / i += 1 end nil end
Find the location that called into the dsl layer .
850
def attempt_rule ( task_name , task_pattern , args , extensions , block , level ) sources = make_sources ( task_name , task_pattern , extensions ) prereqs = sources . map { | source | trace_rule level , "Attempting Rule #{task_name} => #{source}" if File . exist? ( source ) || Rake :: Task . task_defined? ( source ) trace_rule level , "(#{task_name} => #{source} ... EXIST)" source elsif parent = enhance_with_matching_rule ( source , level + 1 ) trace_rule level , "(#{task_name} => #{source} ... ENHANCE)" parent . name else trace_rule level , "(#{task_name} => #{source} ... FAIL)" return nil end } task = FileTask . define_task ( task_name , { args => prereqs } , & block ) task . sources = prereqs task end
Attempt to create a rule given the list of prerequisites .
851
def out_of_date? ( stamp ) all_prerequisite_tasks . any? { | prereq | prereq_task = application [ prereq , @scope ] if prereq_task . instance_of? ( Rake :: FileTask ) prereq_task . timestamp > stamp || @application . options . build_all else prereq_task . timestamp > stamp end } end
Are there any prerequisites with a later time than the given time stamp?
852
def new_scope ( names ) values = names . map { | n | self [ n ] } self . class . new ( names , values + extras , self ) end
Create a new argument scope using the prerequisite argument names .
853
def work stat :attempting_lock_on , item_id : object_id if @mutex . try_lock stat :has_lock_on , item_id : object_id chore stat :releasing_lock_on , item_id : object_id @mutex . unlock else stat :bailed_on , item_id : object_id end end
If no one else is working this promise go ahead and do the chore .
854
def retryer ( & blk ) loop do @try_count += 1 y = blk . call ( @try_count , @retry_exception ) @retry_exception = nil return y if y raise Bosh :: Common :: RetryCountExceeded if @try_count >= @retry_limit wait end rescue Exception => exception raise unless @matchers . any? { | m | m . matches? ( exception ) } raise unless exception . message =~ @matching raise if @try_count >= @retry_limit @retry_exception = exception wait retry ensure @ensure_callback . call ( @try_count ) end
Loops until the block returns a true value
855
def flush_dns_cache if @flush_command && ! @flush_command . empty? stdout , stderr , status = Open3 . capture3 ( @flush_command ) if status == 0 @logger . debug ( "Flushed #{stdout.chomp} records from DNS cache" ) else @logger . warn ( "Failed to flush DNS cache: #{stderr.chomp}" ) end end end
Purge cached DNS records
856
def instance ( instance_group_name , index_or_id , options = { deployment_name : Deployments :: DEFAULT_DEPLOYMENT_NAME } ) find_instance ( instances ( options ) , instance_group_name , index_or_id ) end
vm always returns a vm
857
def upsert_agent ( instance ) @logger . info ( "Adding agent #{instance.agent_id} (#{instance.job}/#{instance.id}) to #{name}..." ) agent_id = instance . agent_id if agent_id . nil? @logger . warn ( "No agent id for instance #{instance.job}/#{instance.id} in deployment #{name}" ) if instance . expects_vm? && ! instance . has_vm? agent = Agent . new ( "agent_with_no_vm" , deployment : name ) @instance_id_to_agent [ instance . id ] = agent agent . update_instance ( instance ) end return false end if instance . job . nil? @logger . debug ( "VM with no job found: #{agent_id}" ) end agent = @agent_id_to_agent [ agent_id ] if agent . nil? @logger . debug ( "Discovered agent #{agent_id}" ) agent = Agent . new ( agent_id , deployment : name ) @agent_id_to_agent [ agent_id ] = agent @instance_id_to_agent . delete ( instance . id ) if @instance_id_to_agent [ instance . id ] end agent . update_instance ( instance ) true end
Processes VM data from BOSH Director extracts relevant agent data wraps it into Agent object and adds it to a list of managed agents .
858
def subscribe_inbox if @subject_id . nil? client = nats @lock . synchronize do if @subject_id . nil? @subject_id = client . subscribe ( "#{@inbox_name}.>" ) do | message , _ , subject | @handled_response = true handle_response ( message , subject ) end end end end end
subscribe to an inbox if not already subscribed
859
def dependency_spec spec = { } @dependencies . each do | dependency | unless dependency . compiled? raise DirectorError , 'Cannot generate package dependency spec ' "for '#{@package.name}', " "'#{dependency.package.name}' hasn't been compiled yet" end compiled_package = dependency . compiled_package spec [ compiled_package . name ] = { 'name' => compiled_package . name , 'version' => "#{compiled_package.version}.#{compiled_package.build}" , 'sha1' => compiled_package . sha1 , 'blobstore_id' => compiled_package . blobstore_id , } end spec end
This call only makes sense if all dependencies have already been compiled otherwise it raises an exception
860
def format_exception ( exception ) return exception . to_s unless exception . is_a? ( Hash ) msg = exception [ 'message' ] . to_s if exception [ 'backtrace' ] msg += "\n" msg += Array ( exception [ 'backtrace' ] ) . join ( "\n" ) end if exception [ 'blobstore_id' ] blob = download_and_delete_blob ( exception [ 'blobstore_id' ] ) msg += "\n" msg += blob . to_s end msg end
Returns formatted exception information
861
def inject_compile_log ( response ) if response [ 'value' ] && response [ 'value' ] . is_a? ( Hash ) && response [ 'value' ] [ 'result' ] . is_a? ( Hash ) && blob_id = response [ 'value' ] [ 'result' ] [ 'compile_log_id' ] compile_log = download_and_delete_blob ( blob_id ) response [ 'value' ] [ 'result' ] [ 'compile_log' ] = compile_log end end
the blob is removed from the blobstore once we have fetched it but if there is a crash before it is injected into the response and then logged there is a chance that we lose it
862
def download_remote_file ( resource , remote_file , local_file , num_redirects = 0 ) @logger . info ( "Downloading remote #{resource} from #{remote_file}" ) if @logger uri = URI . parse ( remote_file ) req = Net :: HTTP :: Get . new ( uri ) if uri . user && uri . password req . basic_auth uri . user , uri . password end Net :: HTTP . start ( uri . host , uri . port , :ENV , :use_ssl => uri . scheme == 'https' ) do | http | http . request req do | response | case response when Net :: HTTPSuccess File . open ( local_file , 'wb' ) do | file | response . read_body do | chunk | file . write ( chunk ) end end when Net :: HTTPFound , Net :: HTTPMovedPermanently raise ResourceError , "Too many redirects at '#{remote_file}'." if num_redirects >= 9 location = response . header [ 'location' ] raise ResourceError , "No location header for redirect found at '#{remote_file}'." if location . nil? location = URI . join ( uri , location ) . to_s download_remote_file ( resource , location , local_file , num_redirects + 1 ) when Net :: HTTPNotFound @logger . error ( "Downloading remote #{resource} from #{remote_file} failed: #{response.message}" ) if @logger raise ResourceNotFound , "No #{resource} found at '#{remote_file}'." else @logger . error ( "Downloading remote #{resource} from #{remote_file} failed: #{response.message}" ) if @logger raise ResourceError , "Downloading remote #{resource} failed. Check task debug log for details." end end end rescue URI :: Error , SocketError , :: Timeout :: Error , Errno :: EINVAL , Errno :: ECONNRESET , Errno :: ECONNREFUSED , EOFError , Net :: HTTPBadResponse , Net :: HTTPHeaderSyntaxError , Net :: ProtocolError => e @logger . error ( "Downloading remote #{resource} from #{remote_file} failed: #{e.inspect}" ) if @logger raise ResourceError , "Downloading remote #{resource} failed. Check task debug log for details." end
Downloads a remote file
863
def update_settings ( instance_id , settings ) params = { :instance_id => instance_id } instance = Models :: RegistryInstance [ params ] || Models :: RegistryInstance . new ( params ) instance . settings = settings instance . save end
Updates instance settings
864
def read_settings ( instance_id , remote_ip = nil ) check_instance_ips ( remote_ip , instance_id ) if remote_ip get_instance ( instance_id ) . settings end
Reads instance settings
865
def find_or_create_provider_intent ( link_provider : , link_original_name : , link_type : ) intent = Bosh :: Director :: Models :: Links :: LinkProviderIntent . find ( link_provider : link_provider , original_name : link_original_name , ) if intent . nil? intent = Bosh :: Director :: Models :: Links :: LinkProviderIntent . create ( link_provider : link_provider , original_name : link_original_name , type : link_type , ) else intent . type = link_type end intent . serial_id = @serial_id if intent . serial_id != @serial_id intent . save end
Used by provider not using alias because want to update existing provider intent when alias changes
866
def consumer? ( provider_intent , deployment_plan ) return true if provider_intent . shared link_consumers = deployment_plan . model . link_consumers link_consumers = link_consumers . select do | consumer | consumer . serial_id == @serial_id end link_consumers . any? do | consumer | consumer . intents . any? do | consumer_intent | can_be_consumed? ( consumer , provider_intent , consumer_intent , @serial_id ) end end end
A consumer which is within the same deployment
867
def wait ( retries_left , & blk ) blk . call rescue Exception => e retries_left -= 1 if retries_left > 0 sleep ( 0.5 ) retry else raise end end
Do not add retries_left default value
868
def lock acquire @refresh_thread = Thread . new do renew_interval = [ 1.0 , @expiration / 2 ] . max begin done_refreshing = false until @unlock || done_refreshing @refresh_mutex . synchronize do @refresh_signal . wait ( @refresh_mutex , renew_interval ) break if @unlock @logger . debug ( "Renewing lock: #@name" ) lock_expiration = Time . now . to_f + @expiration + 1 if Models :: Lock . where ( name : @name , uid : @uid ) . update ( expired_at : Time . at ( lock_expiration ) ) == 0 done_refreshing = true end end end ensure if ! @unlock Models :: Event . create ( user : Config . current_job . username , action : 'lost' , object_type : 'lock' , object_name : @name , task : @task_id , deployment : @deployment_name , error : 'Lock renewal thread exiting' , timestamp : Time . now , ) Models :: Task [ @task_id ] . update ( state : 'cancelling' ) @logger . debug ( 'Lock renewal thread exiting' ) end end end if block_given? begin yield ensure release end end end
Creates new lock with the given name .
869
def release @refresh_mutex . synchronize { @unlock = true delete @refresh_signal . signal } @refresh_thread . join if @refresh_thread @event_manager . create_event ( { user : Config . current_job . username , action : 'release' , object_type : 'lock' , object_name : @name , task : @task_id , deployment : @deployment_name , } ) end
Release a lock that was not auto released by the lock method .
870
def list_orphan_networks Models :: Network . where ( orphaned : true ) . map do | network | { 'name' => network . name , 'type' => network . type , 'created_at' => network . created_at . to_s , 'orphaned_at' => network . orphaned_at . to_s , } end end
returns a list of orphaned networks
871
def perform_job ( * args ) @task_logger . info ( 'Creating job' ) job = @job_class . new ( * args ) Config . current_job = job job . task_id = @task_id job . task_checkpoint run_checkpointing @task_logger . info ( "Performing task: #{@task.inspect}" ) @task . timestamp = Time . now @task . started_at = Time . now @task . checkpoint_time = Time . now @task . save result = job . perform @task_logger . info ( 'Done' ) finish_task ( :done , result ) rescue Bosh :: Director :: TaskCancelled => e log_exception ( e ) @task_logger . info ( "Task #{@task.id} cancelled" ) finish_task ( :cancelled , 'task cancelled' ) rescue Exception => e log_exception ( e ) @task_logger . error ( "#{e}\n#{e.backtrace.join("\n")}" ) finish_task ( :error , e ) end
Instantiates and performs director job .
872
def run_checkpointing task_checkpointer = TaskCheckPointer . new ( @task . id ) Thread . new do with_thread_name ( "task:#{@task.id}-checkpoint" ) do while true sleep ( Config . task_checkpoint_interval ) task_checkpointer . checkpoint end end end end
Spawns a thread that periodically updates task checkpoint time . There is no need to kill this thread as job execution lifetime is the same as worker process lifetime .
873
def truncate ( string , len = 128 ) stripped = string . strip [ 0 .. len ] if stripped . length > len stripped . gsub ( / \s \S / , "" ) + "..." else stripped end end
Truncates string to fit task result length
874
def finish_task ( state , result ) @task . refresh @task . state = state @task . result = truncate ( result . to_s ) @task . timestamp = Time . now @task . save end
Marks task completion
875
def log_exception ( exception ) director_error = DirectorError . create_from_exception ( exception ) Config . event_log . log_error ( director_error ) end
Logs the exception in the event log
876
def generate! ( requirements , instance_group , job , package , stemcell ) @logger . info ( "Checking whether package '#{package.desc}' needs to be compiled for stemcell '#{stemcell.desc}'" ) requirement_key = [ package . id , "#{stemcell.os}/#{stemcell.version}" ] requirement = requirements [ requirement_key ] if requirement requirement . add_instance_group ( instance_group ) return requirement end package_dependency_manager = PackageDependenciesManager . new ( job . release . model ) requirement = create_requirement ( instance_group , job , package , stemcell , package_dependency_manager ) @logger . info ( "Processing package '#{package.desc}' dependencies" ) dependencies = package_dependency_manager . dependencies ( package ) dependencies . each do | dependency | @logger . info ( "Package '#{package.desc}' depends on package '#{dependency.desc}'" ) dependency_requirement = generate! ( requirements , instance_group , job , dependency , stemcell ) requirement . add_dependency ( dependency_requirement ) end requirements [ requirement_key ] = requirement requirement end
The rquirements hash passed in by the caller will be populated with CompiledPackageRequirement objects
877
def bind_jobs @deployment_plan . releases . each do | release | release . bind_jobs end @deployment_plan . instance_groups . each ( & :validate_package_names_do_not_collide! ) @deployment_plan . instance_groups . each ( & :validate_exported_from_matches_stemcell! ) end
Binds template models for each release spec in the deployment plan
878
def double_quote ( value ) return if value . nil? case value . to_s when "*" , / \. / value else PG :: Connection . quote_ident ( value . to_s ) end end
Ensures the given value is properly double quoted . This also ensures we don t have conflicts with reversed keywords .
879
def literal_key ( key ) case key when TrueClass then "'t'" when FalseClass then "'f'" when Numeric then key else key = key . to_s key . start_with? ( "'" ) && key . end_with? ( "'" ) ? key : "'#{key}'" end end
Ensures the key is properly single quoted and treated as a actual PG key reference .
880
def to_arel_sql ( value ) case value when Arel :: Node , Arel :: Nodes :: SqlLiteral , nil value when ActiveRecord :: Relation Arel . sql ( value . spawn . to_sql ) else Arel . sql ( value . respond_to? ( :to_sql ) ? value . to_sql : value . to_s ) end end
Converts a potential subquery into a compatible Arel SQL node .
881
def contains ( opts , * rest ) build_where_chain ( opts , rest ) do | arel | case arel when Arel :: Nodes :: In , Arel :: Nodes :: Equality column = left_column ( arel ) || column_from_association ( arel ) if [ :hstore , :jsonb ] . include? ( column . type ) Arel :: Nodes :: ContainsHStore . new ( arel . left , arel . right ) elsif column . try ( :array ) Arel :: Nodes :: ContainsArray . new ( arel . left , arel . right ) else raise ArgumentError , "Invalid argument for .where.contains(), got #{arel.class}" end else raise ArgumentError , "Invalid argument for .where.contains(), got #{arel.class}" end end end
Finds Records that contains a nested set elements
882
def api_url @api_url ||= Addressable :: URI . new ( :scheme => scheme , :host => host , :port => port , :path => path_with_base ( "/_api" , resource_path ) ) . normalize . to_s end
Absolute URL to the API representation of this resource
883
def to_api ( include_content : false ) output = hash_for_api output = output . merge ( url_fields ) if include_content output = output . merge ( content_fields ) else CONTENT_FIELDS . each { | field | output . delete ( field ) } end output . delete ( "output" ) if is_a? ( Jekyll :: Collection ) output . delete ( "docs" ) output [ "entries_url" ] = entries_url end if is_a? ( Jekyll :: Document ) output [ "relative_path" ] = relative_path . sub ( "_drafts/" , "" ) if draft? output [ "name" ] = basename end if is_a? ( Jekyll :: StaticFile ) output [ "from_theme" ] = from_theme_gem? end output end
Returns a hash suitable for use as an API response .
884
def content_fields output = { } if is_a? ( Jekyll :: StaticFile ) output [ "encoded_content" ] = encoded_content elsif is_a? ( JekyllAdmin :: DataFile ) output [ "content" ] = content output [ "raw_content" ] = raw_content else output [ "raw_content" ] = raw_content output [ "front_matter" ] = front_matter end if is_a? ( Jekyll :: Document ) %w( next previous ) . each do | direction | method = "#{direction}_doc" . to_sym doc = public_send ( method ) output [ direction ] = doc . to_api if doc end end output end
Returns a hash of content fields for inclusion in the API output
885
def directory_path sanitized_path ( case namespace when "collections" File . join ( collection . relative_directory , params [ "splat" ] . first ) when "data" File . join ( DataFile . data_dir , params [ "splat" ] . first ) when "drafts" File . join ( "_drafts" , params [ "splat" ] . first ) else params [ "splat" ] . first end ) end
Returns the path to the requested file s containing directory
886
def write_file ( path , content ) Jekyll . logger . debug "WRITING:" , path path = sanitized_path ( path ) FileUtils . mkdir_p File . dirname ( path ) File . open ( path , "wb" ) do | file | file . write ( content ) end if ENV [ "RACK_ENV" ] == "production" site . read else site . process end end
Write a file to disk with the given content
887
def delete_file ( path ) Jekyll . logger . debug "DELETING:" , path FileUtils . rm_f sanitized_path ( path ) site . process end
Delete the file at the given path
888
def restored_front_matter front_matter . map do | key , value | value = "null" if value . nil? [ key , value ] end . to_h end
verbose null values in front matter
889
def single_line_rule_set? ( rule ) rule . children . all? { | child | child . line == rule . source_range . end_pos . line } end
Return whether this rule set occupies a single line .
890
def check_adjacent_properties ( properties ) properties [ 0 .. - 2 ] . zip ( properties [ 1 .. - 1 ] ) . each do | first , second | next unless first . line == second . line add_lint ( second , "Property '#{second.name.join}' should be placed on own line" ) end end
Compare each property against the next property to see if they are on the same line .
891
def before_node_visit ( node ) return unless ( commands = Array ( extract_commands ( node ) ) ) . any? commands . each do | command | linters = command [ :linters ] next unless linters . include? ( 'all' ) || linters . include? ( @linter . name ) process_command ( command , node ) next if node . is_a? ( Sass :: Tree :: RuleNode ) || %r{ \s \* } . match ( @linter . engine . lines [ command [ :line ] - 1 ] ) pop_control_comment_stack ( node ) end end
Executed before a node has been visited .
892
def last_child ( node ) last = node . children . inject ( node ) do | lowest , child | return lowest unless child . respond_to? ( :line ) lowest . line < child . line ? child : lowest end return if last == node last end
Gets the child of the node that resides on the lowest line in the file .
893
def seq_contains_sel_class? ( seq , selector_class ) seq . members . any? do | simple | simple . is_a? ( selector_class ) end end
Checks if a simple sequence contains a simple selector of a certain class .
894
def max_sequence_depth ( comma_sequence , current_depth ) return current_depth + 1 unless comma_sequence comma_sequence . members . map { | sequence | sequence_depth ( sequence , current_depth ) } . max end
Find the maximum depth of all sequences in a comma sequence .
895
def check_root_ruleset_indent ( node , actual_indent ) if @indent == 0 && node . is_a? ( Sass :: Tree :: RuleNode ) unless actual_indent % @indent_width == 0 add_lint ( node . line , lint_message ( "a multiple of #{@indent_width}" , actual_indent ) ) return true end end false end
Allow rulesets to be indented any amount when the indent is zero as long as it s a multiple of the indent width
896
def one_shift_greater_than_parent? ( node , actual_indent ) parent_indent = node_indent ( node_indent_parent ( node ) ) . length expected_indent = parent_indent + @indent_width expected_indent == actual_indent end
Returns whether node is indented exactly one indent width greater than its parent .
897
def feel_for_enclosing_parens ( node ) range = node . source_range original_source = source_from_range ( range ) left_offset = - 1 right_offset = 0 if original_source [ - 1 ] != ')' right_offset += 1 while character_at ( range . end_pos , right_offset ) =~ / \s / return original_source if character_at ( range . end_pos , right_offset ) != ')' end left_offset -= 1 while character_at ( range . start_pos , left_offset ) =~ / \s / return original_source if character_at ( range . start_pos , left_offset ) != '(' return original_source if character_at ( range . start_pos , left_offset - 1 ) . match? ( / / ) range . start_pos . offset += left_offset range . end_pos . offset += right_offset source_from_range ( range ) end
An expression enclosed in parens will include or not include each paren depending on whitespace . Here we feel out for enclosing parens and return them as the new source for the node .
898
def check_preceding_node ( node , type ) case prev_node ( node ) when nil , Sass :: Tree :: FunctionNode , Sass :: Tree :: MixinNode , Sass :: Tree :: MixinDefNode , Sass :: Tree :: RuleNode , Sass :: Tree :: CommentNode nil else unless engine . lines [ node . line - 2 ] . strip . empty? add_lint ( node . line , MESSAGE_FORMAT % [ type , 'preceded' ] ) end end end
In cases where the previous node is not a block declaration we won t have run any checks against it so we need to check here if the previous line is an empty line
899
def value_offset ( prop ) src_range = prop . name_source_range src_range . start_pos . offset + ( src_range . end_pos . offset - src_range . start_pos . offset ) + whitespace_after_colon ( prop ) . take_while { | w | w == ' ' } . size end
Offset of value for property