idx int64 0 24.9k | question stringlengths 68 4.14k | target stringlengths 9 749 |
|---|---|---|
600 | def detect_link_tag_time ( newer_tag ) newer_tag_time = newer_tag . nil? ? Time . new : get_time_of_tag ( newer_tag ) if newer_tag . nil? && options [ :future_release ] newer_tag_name = options [ :future_release ] newer_tag_link = options [ :future_release ] else newer_tag_name = newer_tag . nil? ? options [ :unreleased_label ] : newer_tag [ "name" ] newer_tag_link = newer_tag . nil? ? "HEAD" : newer_tag_name end [ newer_tag_link , newer_tag_name , newer_tag_time ] end | Detect link name and time for specified tag . |
601 | def parse_heading ( heading ) captures = { "version" => nil , "url" => nil , "date" => nil } @heading_structures . each do | regexp | matches = Regexp . new ( regexp ) . match ( heading ) if matches captures . merge! ( Hash [ matches . names . zip ( matches . captures ) ] ) break end end captures end | Parse a single heading and return a Hash |
602 | def parse ( data ) sections = data . split ( / / ) headings = data . scan ( / / ) headings . each_with_index . map do | heading , index | section = parse_heading ( heading ) section [ "content" ] = sections . at ( index + 1 ) section end end | Parse the given ChangeLog data into a list of Hashes |
603 | def find_issues_to_add ( all_issues , tag_name ) all_issues . select do | issue | if issue [ "milestone" ] . nil? false else milestone_is_tag = @filtered_tags . find do | tag | tag [ "name" ] == issue [ "milestone" ] [ "title" ] end if milestone_is_tag . nil? false else issue [ "milestone" ] [ "title" ] == tag_name end end end end | Add all issues that should be in that tag according milestone |
604 | def filter_array_by_labels ( all_issues ) filtered_issues = include_issues_by_labels ( all_issues ) filtered_issues = exclude_issues_by_labels ( filtered_issues ) exclude_issues_without_labels ( filtered_issues ) end | General filtered function |
605 | def generate_entry_for_tag ( pull_requests , issues , newer_tag_name , newer_tag_link , newer_tag_time , older_tag_name ) github_site = @options [ :github_site ] || "https://github.com" project_url = "#{github_site}/#{@options[:user]}/#{@options[:project]}" create_sections @content = generate_header ( newer_tag_name , newer_tag_link , newer_tag_time , older_tag_name , project_url ) @content += generate_body ( pull_requests , issues ) @content end | Generates log entry with header and body |
606 | def generate_header ( newer_tag_name , newer_tag_link , newer_tag_time , older_tag_name , project_url ) header = "" time_string = newer_tag_time . strftime ( @options [ :date_format ] ) release_url = if @options [ :release_url ] format ( @options [ :release_url ] , newer_tag_link ) else "#{project_url}/tree/#{newer_tag_link}" end header += if newer_tag_name . equal? ( @options [ :unreleased_label ] ) "## [#{newer_tag_name}](#{release_url})\n\n" else "## [#{newer_tag_name}](#{release_url}) (#{time_string})\n\n" end if @options [ :compare_link ] && older_tag_name header += "[Full Changelog](#{project_url}/compare/#{older_tag_name}...#{newer_tag_link})\n\n" end header end | Generates header text for an entry . |
607 | def sort_into_sections ( pull_requests , issues ) if @options [ :issues ] unmapped_issues = sort_labeled_issues ( issues ) add_unmapped_section ( unmapped_issues ) end if @options [ :pulls ] unmapped_pull_requests = sort_labeled_issues ( pull_requests ) add_unmapped_section ( unmapped_pull_requests ) end nil end | Sorts issues and PRs into entry sections by labels and lack of labels . |
608 | def sort_labeled_issues ( issues ) sorted_issues = [ ] issues . each do | issue | label_names = issue [ "labels" ] . collect { | l | l [ "name" ] } @sections . each do | section | unless ( section . labels & label_names ) . empty? section . issues << issue sorted_issues << issue break end end end issues - sorted_issues end | Iterates through sections and sorts labeled issues into them based on the label mapping . Returns any unmapped or unlabeled issues . |
609 | def extract_pair ( line ) key , value = line . split ( "=" , 2 ) [ key . tr ( "-" , "_" ) . to_sym , value . gsub ( / \n \r / , "" ) ] end | Returns a the option name as a symbol and its string value sans newlines . |
610 | def run log = @generator . compound_changelog if @options . write_to_file? output_filename = @options [ :output ] . to_s File . open ( output_filename , "wb" ) { | file | file . write ( log ) } puts "Done!" puts "Generated log placed in #{Dir.pwd}/#{output_filename}" else puts log end end | Class responsible for whole changelog generation cycle |
611 | def generate_entry_between_tags ( older_tag , newer_tag ) filtered_issues , filtered_pull_requests = filter_issues_for_tags ( newer_tag , older_tag ) if newer_tag . nil? && filtered_issues . empty? && filtered_pull_requests . empty? return "" end newer_tag_link , newer_tag_name , newer_tag_time = detect_link_tag_time ( newer_tag ) older_tag_name = if older_tag . nil? @fetcher . oldest_commit [ "sha" ] else older_tag [ "name" ] end Entry . new ( options ) . generate_entry_for_tag ( filtered_pull_requests , filtered_issues , newer_tag_name , newer_tag_link , newer_tag_time , older_tag_name ) end | Generate log only between 2 specified tags |
612 | def filter_issues_for_tags ( newer_tag , older_tag ) filtered_pull_requests = filter_by_tag ( @pull_requests , newer_tag ) filtered_issues = delete_by_time ( @issues , "actual_date" , older_tag , newer_tag ) newer_tag_name = newer_tag . nil? ? nil : newer_tag [ "name" ] if options [ :filter_issues_by_milestone ] filtered_issues = filter_by_milestone ( filtered_issues , newer_tag_name , @issues ) filtered_pull_requests = filter_by_milestone ( filtered_pull_requests , newer_tag_name , @pull_requests ) end [ filtered_issues , filtered_pull_requests ] end | Filters issues and pull requests based on respectively actual_date and merged_at timestamp fields . actual_date is the detected form of closed_at based on merge event SHA commit times . |
613 | def generate_entries_for_all_tags puts "Generating entry..." if options [ :verbose ] entries = generate_unreleased_entry @tag_section_mapping . each_pair do | _tag_section , left_right_tags | older_tag , newer_tag = left_right_tags entries += generate_entry_between_tags ( older_tag , newer_tag ) end entries end | The full cycle of generation for whole project |
614 | def to_options @options . merge ( environment : environment , env_config : env_config , apps_path : apps_path , rackup : rackup , host : host , port : port ) end | Serialize the most relevant settings into a Hash |
615 | def root ( value = nil ) if value @root = value else Utils :: Kernel . Pathname ( @root || Dir . pwd ) . realpath end end | The root of the application |
616 | def routes ( path = nil , & blk ) if path or block_given? @routes = Config :: Routes . new ( root , path , & blk ) else @routes end end | Application routes . |
617 | def port ( value = nil ) if value @port = Integer ( value ) else return @port if defined? ( @port ) return @env . port unless @env . default_port? return DEFAULT_SSL_PORT if force_ssl @env . port end end | The URI port for this application . This is used by the router helpers to generate absolute URLs . |
618 | def load! ( path ) return unless defined? ( Dotenv :: Parser ) contents = :: File . open ( path , "rb:bom|utf-8" , & :read ) parsed = Dotenv :: Parser . call ( contents ) parsed . each do | k , v | next if @env . has_key? ( k ) @env [ k ] = v end nil end | Loads a dotenv file and updates self |
619 | def default_options @default_options ||= Utils :: Hash . symbolize ( { PROJECT_NAME => project_name , TEST_KEY => DEFAULT_TEST_SUITE , TEMPLATE_KEY => DEFAULT_TEMPLATE } ) . freeze end | Default values for writing the hanamirc file |
620 | def parse_file ( path ) { } . tap do | hash | File . readlines ( path ) . each do | line | key , value = line . split ( SEPARATOR ) hash [ key ] = value . strip end end end | Read hanamirc file and parse it s values |
621 | def load! load_default_stack stack . each { | m , args , block | builder . use ( load_middleware ( m ) , * args , & block ) } builder . run routes self end | Instantiate a middleware stack |
622 | def use ( middleware , * args , & blk ) stack . push [ middleware , args , blk ] stack . uniq! end | Append a middleware to the stack . |
623 | def prepend ( middleware , * args , & blk ) stack . unshift [ middleware , args , blk ] stack . uniq! end | Prepend a middleware to the stack . |
624 | def path ( name , * args ) Utils :: Escape :: SafeString . new ( @routes . path ( name , * args ) ) end | Initialize the factory |
625 | def url ( name , * args ) Utils :: Escape :: SafeString . new ( @routes . url ( name , * args ) ) end | Return an absolute path for the given route name |
626 | def exists? ( service_name ) open_service ( service_name , SC_MANAGER_CONNECT , SERVICE_QUERY_STATUS ) do | _ | true end rescue Puppet :: Util :: Windows :: Error => e return false if e . code == ERROR_SERVICE_DOES_NOT_EXIST raise e end | Returns true if the service exists false otherwise . |
627 | def start ( service_name , timeout : DEFAULT_TIMEOUT ) Puppet . debug _ ( "Starting the %{service_name} service. Timeout set to: %{timeout} seconds" ) % { service_name : service_name , timeout : timeout } valid_initial_states = [ SERVICE_STOP_PENDING , SERVICE_STOPPED , SERVICE_START_PENDING ] transition_service_state ( service_name , valid_initial_states , SERVICE_RUNNING , timeout ) do | service | if StartServiceW ( service , 0 , FFI :: Pointer :: NULL ) == FFI :: WIN32_FALSE raise Puppet :: Util :: Windows :: Error , _ ( "Failed to start the service" ) end end Puppet . debug _ ( "Successfully started the %{service_name} service" ) % { service_name : service_name } end | Start a windows service |
628 | def stop ( service_name , timeout : DEFAULT_TIMEOUT ) Puppet . debug _ ( "Stopping the %{service_name} service. Timeout set to: %{timeout} seconds" ) % { service_name : service_name , timeout : timeout } valid_initial_states = SERVICE_STATES . keys - [ SERVICE_STOPPED ] transition_service_state ( service_name , valid_initial_states , SERVICE_STOPPED , timeout ) do | service | send_service_control_signal ( service , SERVICE_CONTROL_STOP ) end Puppet . debug _ ( "Successfully stopped the %{service_name} service" ) % { service_name : service_name } end | Stop a windows service |
629 | def resume ( service_name , timeout : DEFAULT_TIMEOUT ) Puppet . debug _ ( "Resuming the %{service_name} service. Timeout set to: %{timeout} seconds" ) % { service_name : service_name , timeout : timeout } valid_initial_states = [ SERVICE_PAUSE_PENDING , SERVICE_PAUSED , SERVICE_CONTINUE_PENDING ] transition_service_state ( service_name , valid_initial_states , SERVICE_RUNNING , timeout ) do | service | wait_on_pending_state ( service , SERVICE_PAUSE_PENDING , timeout ) send_service_control_signal ( service , SERVICE_CONTROL_CONTINUE ) end Puppet . debug _ ( "Successfully resumed the %{service_name} service" ) % { service_name : service_name } end | Resume a paused windows service |
630 | def service_state ( service_name ) state = nil open_service ( service_name , SC_MANAGER_CONNECT , SERVICE_QUERY_STATUS ) do | service | query_status ( service ) do | status | state = SERVICE_STATES [ status [ :dwCurrentState ] ] end end if state . nil? raise Puppet :: Error . new ( _ ( "Unknown Service state '%{current_state}' for '%{service_name}'" ) % { current_state : state . to_s , service_name : service_name } ) end state end | Query the state of a service using QueryServiceStatusEx |
631 | def service_start_type ( service_name ) start_type = nil open_service ( service_name , SC_MANAGER_CONNECT , SERVICE_QUERY_CONFIG ) do | service | query_config ( service ) do | config | start_type = SERVICE_START_TYPES [ config [ :dwStartType ] ] end end if start_type . nil? raise Puppet :: Error . new ( _ ( "Unknown start type '%{start_type}' for '%{service_name}'" ) % { start_type : start_type . to_s , service_name : service_name } ) end start_type end | Query the configuration of a service using QueryServiceConfigW |
632 | def set_startup_mode ( service_name , startup_type ) startup_code = SERVICE_START_TYPES . key ( startup_type ) if startup_code . nil? raise Puppet :: Error . new ( _ ( "Unknown start type %{start_type}" ) % { startup_type : startup_type . to_s } ) end open_service ( service_name , SC_MANAGER_CONNECT , SERVICE_CHANGE_CONFIG ) do | service | success = ChangeServiceConfigW ( service , SERVICE_NO_CHANGE , startup_code , SERVICE_NO_CHANGE , FFI :: Pointer :: NULL , FFI :: Pointer :: NULL , FFI :: Pointer :: NULL , FFI :: Pointer :: NULL , FFI :: Pointer :: NULL , FFI :: Pointer :: NULL , FFI :: Pointer :: NULL ) if success == FFI :: WIN32_FALSE raise Puppet :: Util :: Windows :: Error . new ( _ ( "Failed to update service configuration" ) ) end end end | Change the startup mode of a windows service |
633 | def services services = { } open_scm ( SC_MANAGER_ENUMERATE_SERVICE ) do | scm | size_required = 0 services_returned = 0 FFI :: MemoryPointer . new ( :dword ) do | bytes_pointer | FFI :: MemoryPointer . new ( :dword ) do | svcs_ret_ptr | FFI :: MemoryPointer . new ( :dword ) do | resume_ptr | resume_ptr . write_dword ( 0 ) EnumServicesStatusExW ( scm , :SC_ENUM_PROCESS_INFO , ALL_SERVICE_TYPES , SERVICE_STATE_ALL , FFI :: Pointer :: NULL , 0 , bytes_pointer , svcs_ret_ptr , resume_ptr , FFI :: Pointer :: NULL ) size_required = bytes_pointer . read_dword FFI :: MemoryPointer . new ( size_required ) do | buffer_ptr | resume_ptr . write_dword ( 0 ) svcs_ret_ptr . write_dword ( 0 ) success = EnumServicesStatusExW ( scm , :SC_ENUM_PROCESS_INFO , ALL_SERVICE_TYPES , SERVICE_STATE_ALL , buffer_ptr , buffer_ptr . size , bytes_pointer , svcs_ret_ptr , resume_ptr , FFI :: Pointer :: NULL ) if success == FFI :: WIN32_FALSE raise Puppet :: Util :: Windows :: Error . new ( _ ( "Failed to fetch services" ) ) end services_returned = svcs_ret_ptr . read_dword cursor_ptr = FFI :: Pointer . new ( ENUM_SERVICE_STATUS_PROCESSW , buffer_ptr ) 0 . upto ( services_returned - 1 ) do | index | service = ENUM_SERVICE_STATUS_PROCESSW . new ( cursor_ptr [ index ] ) services [ service [ :lpServiceName ] . read_arbitrary_wide_string_up_to ( SERVICENAME_MAX ) ] = { :display_name => service [ :lpDisplayName ] . read_arbitrary_wide_string_up_to ( SERVICENAME_MAX ) , :service_status_process => service [ :ServiceStatusProcess ] } end end end end end end services end | enumerate over all services in all states and return them as a hash |
634 | def gen_sub_directories super File . makedirs ( MODULE_DIR ) File . makedirs ( NODE_DIR ) File . makedirs ( PLUGIN_DIR ) rescue $stderr . puts $ERROR_INFO . message exit 1 end | generate all the subdirectories modules classes and files |
635 | def gen_top_index ( collection , title , template , filename ) template = TemplatePage . new ( RDoc :: Page :: FR_INDEX_BODY , template ) res = [ ] collection . sort . each do | f | if f . document_self res << { "classlist" => CGI . escapeHTML ( "#{MODULE_DIR}/fr_#{f.index_name}.html" ) , "module" => CGI . escapeHTML ( "#{CLASS_DIR}/#{f.index_name}.html" ) , "name" => CGI . escapeHTML ( f . index_name ) } end end values = { "entries" => res , 'list_title' => CGI . escapeHTML ( title ) , 'index_url' => main_url , 'charset' => @options . charset , 'style_url' => style_url ( '' , @options . css ) , } Puppet :: FileSystem . open ( filename , nil , "w:UTF-8" ) do | f | template . write_html_on ( f , values ) end end | generate a top index |
636 | def gen_class_index gen_an_index ( @classes , 'All Classes' , RDoc :: Page :: CLASS_INDEX , "fr_class_index.html" ) @allfiles . each do | file | unless file [ 'file' ] . context . file_relative_name =~ / \. / gen_composite_index ( file , RDoc :: Page :: COMBO_INDEX , "#{MODULE_DIR}/fr_#{file["file"].context.module_name}.html" ) end end end | generate the all classes index file and the combo index |
637 | def main_url main_page = @options . main_page ref = nil if main_page ref = AllReferences [ main_page ] if ref ref = ref . path else $stderr . puts "Could not find main page #{main_page}" end end unless ref for file in @files if file . document_self and file . context . global ref = CGI . escapeHTML ( "#{CLASS_DIR}/#{file.context.module_name}.html" ) break end end end unless ref for file in @files if file . document_self and ! file . context . global ref = CGI . escapeHTML ( "#{CLASS_DIR}/#{file.context.module_name}.html" ) break end end end unless ref $stderr . puts "Couldn't find anything to document" $stderr . puts "Perhaps you've used :stopdoc: in all classes" exit ( 1 ) end ref end | returns the initial_page url |
638 | def http_url ( full_name , prefix ) path = full_name . dup path . gsub! ( / \s \w / ) { "from-#$1" } if path [ '<<' ] File . join ( prefix , path . split ( "::" ) . collect { | p | Digest :: MD5 . hexdigest ( p ) } ) + ".html" end | return the relative file name to store this class in which is also its url |
639 | def methods_available? ( obj ) methods . each do | m | if obj . is_a? ( Class ) return false unless obj . public_method_defined? ( m ) else return false unless obj . respond_to? ( m ) end end true end | Checks whether all feature predicate methods are available . |
640 | def update ( data ) process_name ( data ) if data [ 'name' ] process_version ( data ) if data [ 'version' ] process_source ( data ) if data [ 'source' ] process_data_provider ( data ) if data [ 'data_provider' ] merge_dependencies ( data ) if data [ 'dependencies' ] @data . merge! ( data ) return self end | Merges the current set of metadata with another metadata hash . This method also handles the validation of module names and versions in an effort to be proactive about module publishing constraints . |
641 | def add_dependency ( name , version_requirement = nil , repository = nil ) validate_name ( name ) validate_version_range ( version_requirement ) if version_requirement if dup = @data [ 'dependencies' ] . find { | d | d . full_module_name == name && d . version_requirement != version_requirement } raise ArgumentError , _ ( "Dependency conflict for %{module_name}: Dependency %{name} was given conflicting version requirements %{version_requirement} and %{dup_version}. Verify that there are no duplicates in the metadata.json." ) % { module_name : full_module_name , name : name , version_requirement : version_requirement , dup_version : dup . version_requirement } end dep = Dependency . new ( name , version_requirement , repository ) @data [ 'dependencies' ] . add ( dep ) dep end | Validates the name and version_requirement for a dependency then creates the Dependency and adds it . Returns the Dependency that was added . |
642 | def process_name ( data ) validate_name ( data [ 'name' ] ) author , @module_name = data [ 'name' ] . split ( / \/ / , 2 ) data [ 'author' ] ||= author if @data [ 'author' ] == DEFAULTS [ 'author' ] end | Do basic validation and parsing of the name parameter . |
643 | def process_source ( data ) if data [ 'source' ] =~ %r[ ] source_uri = URI . parse ( data [ 'source' ] ) else source_uri = URI . parse ( "http://#{data['source']}" ) end if source_uri . host =~ / \. \. / source_uri . scheme = 'https' source_uri . path . sub! ( / \. / , '' ) data [ 'project_page' ] ||= @data [ 'project_page' ] || source_uri . to_s data [ 'issues_url' ] ||= @data [ 'issues_url' ] || source_uri . to_s . sub ( / \/ / , '' ) + '/issues' end rescue URI :: Error return end | Do basic parsing of the source parameter . If the source is hosted on GitHub we can predict sensible defaults for both project_page and issues_url . |
644 | def merge_dependencies ( data ) data [ 'dependencies' ] . each do | dep | add_dependency ( dep [ 'name' ] , dep [ 'version_requirement' ] , dep [ 'repository' ] ) end data . delete 'dependencies' end | Validates and parses the dependencies . |
645 | def validate_name ( name ) return if name =~ / \A \/ \Z /i namespace , modname = name . split ( / \/ / , 2 ) modname = :namespace_missing if namespace == '' err = case modname when nil , '' , :namespace_missing _ ( "the field must be a namespaced module name" ) when / /i _ ( "the module name contains non-alphanumeric (or underscore) characters" ) when / /i _ ( "the module name must begin with a letter" ) else _ ( "the namespace contains non-alphanumeric characters" ) end raise ArgumentError , _ ( "Invalid 'name' field in metadata.json: %{err}" ) % { err : err } end | Validates that the given module name is both namespaced and well - formed . |
646 | def validate_version ( version ) return if SemanticPuppet :: Version . valid? ( version ) err = _ ( "version string cannot be parsed as a valid Semantic Version" ) raise ArgumentError , _ ( "Invalid 'version' field in metadata.json: %{err}" ) % { err : err } end | Validates that the version string can be parsed as per SemVer . |
647 | def validate_data_provider ( value ) if value . is_a? ( String ) unless value =~ / / if value =~ / / raise ArgumentError , _ ( "field 'data_provider' contains non-alphanumeric characters" ) else raise ArgumentError , _ ( "field 'data_provider' must begin with a letter" ) end end else raise ArgumentError , _ ( "field 'data_provider' must be a string" ) end end | Validates that the given _value_ is a symbolic name that starts with a letter and then contains only letters digits or underscore . Will raise an ArgumentError if that s not the case . |
648 | def validate_version_range ( version_range ) SemanticPuppet :: VersionRange . parse ( version_range ) rescue ArgumentError => e raise ArgumentError , _ ( "Invalid 'version_range' field in metadata.json: %{err}" ) % { err : e } end | Validates that the version range can be parsed by Semantic . |
649 | def insert_default_acl self . class . default_acl . each do | acl | unless rights [ acl [ :acl ] ] Puppet . info _ ( "Inserting default '%{acl}' (auth %{auth}) ACL" ) % { acl : acl [ :acl ] , auth : acl [ :authenticated ] } mk_acl ( acl ) end end unless rights [ "/" ] rights . newright ( "/" ) . restrict_authenticated ( :any ) end end | force regular ACLs to be present |
650 | def op_userflags ( * flags , & block ) return if flags . empty? unrecognized_flags = flags . reject { | flag | ADS_USERFLAGS . keys . include? ( flag ) } unless unrecognized_flags . empty? raise ArgumentError , _ ( "Unrecognized ADS UserFlags: %{unrecognized_flags}" ) % { unrecognized_flags : unrecognized_flags . join ( ', ' ) } end self [ 'UserFlags' ] = flags . inject ( self [ 'UserFlags' ] , & block ) end | Common helper for set_userflags and unset_userflags . |
651 | def each_srv_record ( domain , service_name = :puppet , & block ) if ( domain . nil? or domain . empty? ) Puppet . debug "Domain not known; skipping SRV lookup" return end Puppet . debug "Searching for SRV records for domain: #{domain}" case service_name when :puppet then service = '_x-puppet' when :file then service = '_x-puppet-fileserver' else service = "_x-puppet-#{service_name.to_s}" end record_name = "#{service}._tcp.#{domain}" if @record_cache . has_key? ( service_name ) && ! expired? ( service_name ) records = @record_cache [ service_name ] . records Puppet . debug "Using cached record for #{record_name}" else records = @resolver . getresources ( record_name , Resolv :: DNS :: Resource :: IN :: SRV ) if records . size > 0 @record_cache [ service_name ] = CacheEntry . new ( records ) end Puppet . debug "Found #{records.size} SRV records for: #{record_name}" end if records . size == 0 && service_name != :puppet each_srv_record ( domain , :puppet , & block ) else each_priority ( records ) do | recs | while next_rr = recs . delete ( find_weighted_server ( recs ) ) Puppet . debug "Yielding next server of #{next_rr.target.to_s}:#{next_rr.port}" yield next_rr . target . to_s , next_rr . port end end end end | Iterate through the list of records for this service and yield each server and port pair . Records are only fetched via DNS query the first time and cached for the duration of their service s TTL thereafter . |
652 | def find_weighted_server ( records ) return nil if records . nil? || records . empty? return records . first if records . size == 1 total_weight = records . inject ( 0 ) { | sum , record | sum + weight ( record ) } current_weight = 0 chosen_weight = 1 + Kernel . rand ( total_weight ) records . each do | record | current_weight += weight ( record ) return record if current_weight >= chosen_weight end end | Given a list of records of the same priority chooses a random one from among them favoring those with higher weights . |
653 | def expired? ( service_name ) if entry = @record_cache [ service_name ] return Time . now > ( entry . resolution_time + entry . ttl ) else return true end end | Checks if the cached entry for the given service has expired . |
654 | def clear_environment ( mode = default_env ) case mode when :posix ENV . clear when :windows Puppet :: Util :: Windows :: Process . get_environment_strings . each do | key , _ | Puppet :: Util :: Windows :: Process . set_environment_variable ( key , nil ) end else raise _ ( "Unable to clear the environment for mode %{mode}" ) % { mode : mode } end end | Removes all environment variables |
655 | def which ( bin ) if absolute_path? ( bin ) return bin if FileTest . file? bin and FileTest . executable? bin else exts = Puppet :: Util . get_env ( 'PATHEXT' ) exts = exts ? exts . split ( File :: PATH_SEPARATOR ) : %w[ .COM .EXE .BAT .CMD ] Puppet :: Util . get_env ( 'PATH' ) . split ( File :: PATH_SEPARATOR ) . each do | dir | begin dest = File . expand_path ( File . join ( dir , bin ) ) rescue ArgumentError => e if e . to_s =~ / / and ( Puppet :: Util . get_env ( 'HOME' ) . nil? || Puppet :: Util . get_env ( 'HOME' ) == "" ) Puppet :: Util :: Warnings . warnonce ( _ ( "PATH contains a ~ character, and HOME is not set; ignoring PATH element '%{dir}'." ) % { dir : dir } ) elsif e . to_s =~ / / Puppet :: Util :: Warnings . warnonce ( _ ( "Couldn't expand PATH containing a ~ character; ignoring PATH element '%{dir}'." ) % { dir : dir } ) else raise end else if Puppet :: Util :: Platform . windows? && File . extname ( dest ) . empty? exts . each do | ext | destext = File . expand_path ( dest + ext ) return destext if FileTest . file? destext and FileTest . executable? destext end end return dest if FileTest . file? dest and FileTest . executable? dest end end end nil end | Resolve a path for an executable to the absolute path . This tries to behave in the same manner as the unix which command and uses the PATH environment variable . |
656 | def path_to_uri ( path ) return unless path params = { :scheme => 'file' } if Puppet :: Util :: Platform . windows? path = path . gsub ( / \\ / , '/' ) if unc = / \/ \/ \/ \/ / . match ( path ) params [ :host ] = unc [ 1 ] path = unc [ 2 ] elsif path =~ / \/ /i path = '/' + path end end params [ :path ] , params [ :query ] = uri_encode ( path ) . split ( '?' ) search_for_fragment = params [ :query ] ? :query : :path if params [ search_for_fragment ] . include? ( '#' ) params [ search_for_fragment ] , _ , params [ :fragment ] = params [ search_for_fragment ] . rpartition ( '#' ) end begin URI :: Generic . build ( params ) rescue => detail raise Puppet :: Error , _ ( "Failed to convert '%{path}' to URI: %{detail}" ) % { path : path , detail : detail } , detail . backtrace end end | Convert a path to a file URI |
657 | def uri_to_path ( uri ) return unless uri . is_a? ( URI ) path = URI . unescape ( uri . path . encode ( Encoding :: UTF_8 ) ) if Puppet :: Util :: Platform . windows? && uri . scheme == 'file' if uri . host path = "//#{uri.host}" + path else path . sub! ( / \/ / , '' ) end end path end | Get the path component of a URI |
658 | def exit_on_fail ( message , code = 1 ) yield rescue SystemExit => err raise err rescue Exception => err Puppet . log_exception ( err , "#{message}: #{err}" ) Puppet :: Util :: Log . force_flushqueue ( ) exit ( code ) end | Executes a block of code wrapped with some special exception handling . Causes the ruby interpreter to exit if the block throws an exception . |
659 | def map_args ( args , scope , undef_value ) args . map { | a | convert ( a , scope , undef_value ) } end | Converts 4x supported values to a 3x values . |
660 | def delete ( attr ) attr = attr . intern if @parameters . has_key? ( attr ) @parameters . delete ( attr ) else raise Puppet :: DevError . new ( _ ( "Undefined attribute '%{attribute}' in %{name}" ) % { attribute : attr , name : self } ) end end | Removes an attribute from the object ; useful in testing or in cleanup when an error has been encountered |
661 | def managed? if @managed return @managed else @managed = false properties . each { | property | s = property . should if s and ! property . class . unmanaged @managed = true break end } return @managed end end | Returns true if the instance is a managed instance . A yes here means that the instance was created from the language vs . being created in order resolve other questions such as finding a package in a list . |
662 | def retrieve fail "Provider #{provider.class.name} is not functional on this host" if self . provider . is_a? ( Puppet :: Provider ) and ! provider . class . suitable? result = Puppet :: Resource . new ( self . class , title ) result [ :name ] = self [ :name ] unless self [ :name ] == title if ensure_prop = property ( :ensure ) or ( self . class . needs_ensure_retrieved and self . class . validattr? ( :ensure ) and ensure_prop = newattr ( :ensure ) ) result [ :ensure ] = ensure_state = ensure_prop . retrieve else ensure_state = nil end properties . each do | property | next if property . name == :ensure if ensure_state == :absent result [ property ] = :absent else result [ property ] = property . retrieve end end result end | Retrieves the current value of all contained properties . Parameters and meta - parameters are not included in the result . |
663 | def builddepends self . class . relationship_params . collect do | klass | if param = @parameters [ klass . name ] param . to_edges end end . flatten . reject { | r | r . nil? } end | Builds the dependencies associated with this resource . |
664 | def set_sensitive_parameters ( sensitive_parameters ) sensitive_parameters . each do | name | p = parameter ( name ) if p . is_a? ( Puppet :: Property ) p . sensitive = true elsif p . is_a? ( Puppet :: Parameter ) warning ( _ ( "Unable to mark '%{name}' as sensitive: %{name} is a parameter and not a property, and cannot be automatically redacted." ) % { name : name } ) elsif self . class . attrclass ( name ) warning ( _ ( "Unable to mark '%{name}' as sensitive: the property itself was not assigned a value." ) % { name : name } ) else err ( _ ( "Unable to mark '%{name}' as sensitive: the property itself is not defined on %{type}." ) % { name : name , type : type } ) end end parameters . each do | name , param | next if param . sensitive if param . is_a? ( Puppet :: Parameter ) param . sensitive = param . is_sensitive if param . respond_to? ( :is_sensitive ) end end end | Mark parameters associated with this type as sensitive based on the associated resource . |
665 | def finish eachparameter do | parameter | parameter . post_compile if parameter . respond_to? :post_compile end self . class . relationship_params . collect do | klass | if param = @parameters [ klass . name ] param . validate_relationship end end . flatten . reject { | r | r . nil? } end | Finishes any outstanding processing . This method should be called as a final step in setup to allow the parameters that have associated auto - require needs to be processed . |
666 | def for ( module_path , manifest ) Puppet :: Node :: Environment . create ( :anonymous , module_path . split ( File :: PATH_SEPARATOR ) , manifest ) end | Create an anonymous environment . |
667 | def get_conf ( name ) env = get ( name ) if env Puppet :: Settings :: EnvironmentConf . static_for ( env , Puppet [ :environment_timeout ] , Puppet [ :static_catalogs ] , Puppet [ :rich_data ] ) else nil end end | Returns a basic environment configuration object tied to the environment s implementation values . Will not interpolate . |
668 | def add_entry ( name , cache_entry ) Puppet . debug { "Caching environment '#{name}' #{cache_entry.label}" } @cache [ name ] = cache_entry expires = cache_entry . expires @expirations . add ( expires ) if @next_expiration > expires @next_expiration = expires end end | Adds a cache entry to the cache |
669 | def clear_all_expired ( ) t = Time . now return if t < @next_expiration && ! @cache . any? { | name , _ | @cache_expiration_service . expired? ( name . to_sym ) } to_expire = @cache . select { | name , entry | entry . expires < t || @cache_expiration_service . expired? ( name . to_sym ) } to_expire . each do | name , entry | Puppet . debug { "Evicting cache entry for environment '#{name}'" } @cache_expiration_service . evicted ( name ) clear ( name ) @expirations . delete ( entry . expires ) Puppet . settings . clear_environment_settings ( name ) end @next_expiration = @expirations . first || END_OF_TIME end | Clears all environments that have expired either by exceeding their time to live or through an explicit eviction determined by the cache expiration service . |
670 | def entry ( env ) ttl = ( conf = get_conf ( env . name ) ) ? conf . environment_timeout : Puppet . settings . value ( :environment_timeout ) case ttl when 0 NotCachedEntry . new ( env ) when Float :: INFINITY Entry . new ( env ) else TTLEntry . new ( env , ttl ) end end | Creates a suitable cache entry given the time to live for one environment |
671 | def evict_if_expired ( name ) if ( result = @cache [ name ] ) && ( result . expired? || @cache_expiration_service . expired? ( name ) ) Puppet . debug { "Evicting cache entry for environment '#{name}'" } @cache_expiration_service . evicted ( name ) clear ( name ) Puppet . settings . clear_environment_settings ( name ) end end | Evicts the entry if it has expired Also clears caches in Settings that may prevent the entry from being updated |
672 | def data unless @data @data = { } @path . find do | descendant | if Puppet :: ModuleTool . artifact? ( descendant ) Find . prune elsif descendant . file? path = descendant . relative_path_from ( @path ) @data [ path . to_s ] = checksum ( descendant ) end end end return @data end | Return checksums for object s + Pathname + generate if it s needed . Result is a hash of path strings to checksum strings . |
673 | def with_base_url ( dns_resolver ) if @server && @port begin return yield ( base_url ) rescue SystemCallError => e if Puppet [ :use_srv_records ] Puppet . debug "Connection to cached server and port #{@server}:#{@port} failed, reselecting." else raise Puppet :: Error , _ ( "Connection to cached server and port %{server}:%{port} failed: %{message}" ) % { server : @server , port : @port , message : e . message } end end end if Puppet [ :use_srv_records ] dns_resolver . each_srv_record ( Puppet [ :srv_domain ] , @srv_service ) do | srv_server , srv_port | begin @server = srv_server @port = srv_port return yield ( base_url ) rescue SystemCallError Puppet . debug "Connection to selected server and port #{@server}:#{@port} failed. Trying next cached SRV record." @server = nil @port = nil end end end @server = @default_server @port = @default_port Puppet . debug "No more servers in SRV record, falling back to #{@server}:#{@port}" if Puppet [ :use_srv_records ] return yield ( base_url ) end | Create a Route containing information for querying the given API hosted at a server determined either by SRV service or by the fallback server on the fallback port . |
674 | def run exit_on_fail ( _ ( "Could not get application-specific default settings" ) ) do initialize_app_defaults end Puppet :: ApplicationSupport . push_application_context ( self . class . run_mode , self . class . get_environment_mode ) exit_on_fail ( _ ( "Could not initialize" ) ) { preinit } exit_on_fail ( _ ( "Could not parse application options" ) ) { parse_options } exit_on_fail ( _ ( "Could not prepare for execution" ) ) { setup } if deprecated? Puppet . deprecation_warning ( _ ( "`puppet %{name}` is deprecated and will be removed in a future release." ) % { name : name } ) end exit_on_fail ( _ ( "Could not configure routes from %{route_file}" ) % { route_file : Puppet [ :route_file ] } ) { configure_indirector_routes } exit_on_fail ( _ ( "Could not log runtime debug info" ) ) { log_runtime_environment } exit_on_fail ( _ ( "Could not run" ) ) { run_command } end | Execute the application . |
675 | def log_runtime_environment ( extra_info = nil ) runtime_info = { 'puppet_version' => Puppet . version , 'ruby_version' => RUBY_VERSION , 'run_mode' => self . class . run_mode . name , } runtime_info [ 'default_encoding' ] = Encoding . default_external runtime_info . merge! ( extra_info ) unless extra_info . nil? Puppet . debug 'Runtime environment: ' + runtime_info . map { | k , v | k + '=' + v . to_s } . join ( ', ' ) end | Output basic information about the runtime environment for debugging purposes . |
676 | def required_repeated_param ( type , name ) internal_param ( type , name , true ) raise ArgumentError , _ ( 'A required repeated parameter cannot be added after an optional parameter' ) if @min != @max @min += 1 @max = :default end | Defines a repeated positional parameter with _type_ and _name_ that may occur 1 to infinite number of times . It may only appear last or just before a block parameter . |
677 | def block_param ( * type_and_name ) case type_and_name . size when 0 type = @all_callables name = :block when 1 type = @all_callables name = type_and_name [ 0 ] when 2 type , name = type_and_name type = Puppet :: Pops :: Types :: TypeParser . singleton . parse ( type , loader ) unless type . is_a? ( Puppet :: Pops :: Types :: PAnyType ) else raise ArgumentError , _ ( "block_param accepts max 2 arguments (type, name), got %{size}." ) % { size : type_and_name . size } end unless Puppet :: Pops :: Types :: TypeCalculator . is_kind_of_callable? ( type , false ) raise ArgumentError , _ ( "Expected PCallableType or PVariantType thereof, got %{type_class}" ) % { type_class : type . class } end unless name . is_a? ( Symbol ) raise ArgumentError , _ ( "Expected block_param name to be a Symbol, got %{name_class}" ) % { name_class : name . class } end if @block_type . nil? @block_type = type @block_name = name else raise ArgumentError , _ ( 'Attempt to redefine block' ) end end | Defines one required block parameter that may appear last . If type and name is missing the default type is Callable and the name is block . If only one parameter is given then that is the name and the type is Callable . |
678 | def type ( assignment_string ) rb_location = caller [ 0 ] begin result = parser . parse_string ( "type #{assignment_string}" , nil ) rescue StandardError => e rb_location = rb_location . gsub ( / / , '' ) raise ArgumentError , _ ( "Parsing of 'type \"%{assignment_string}\"' failed with message: <%{message}>.\n" + "Called from <%{ruby_file_location}>" ) % { assignment_string : assignment_string , message : e . message , ruby_file_location : rb_location } end unless result . body . kind_of? ( Puppet :: Pops :: Model :: TypeAlias ) rb_location = rb_location . gsub ( / / , '' ) raise ArgumentError , _ ( "Expected a type alias assignment on the form 'AliasType = T', got '%{assignment_string}'.\n" + "Called from <%{ruby_file_location}>" ) % { assignment_string : assignment_string , ruby_file_location : rb_location } end @local_types << result . body end | Defines a local type alias the given string should be a Puppet Language type alias expression in string form without the leading type keyword . Calls to local_type must be made before the first parameter definition or an error will be raised . |
679 | def lookup ( lookup_variants , lookup_invocation ) case lookup_variants . size when 0 throw :no_such_key when 1 merge_single ( yield ( lookup_variants [ 0 ] ) ) else lookup_invocation . with ( :merge , self ) do result = lookup_variants . reduce ( NOT_FOUND ) do | memo , lookup_variant | not_found = true value = catch ( :no_such_key ) do v = yield ( lookup_variant ) not_found = false v end if not_found memo else memo . equal? ( NOT_FOUND ) ? convert_value ( value ) : merge ( memo , value ) end end throw :no_such_key if result == NOT_FOUND lookup_invocation . report_result ( result ) end end end | Merges the result of yielding the given _lookup_variants_ to a given block . |
680 | def uncompress_body ( response ) case response [ 'content-encoding' ] when 'gzip' return Zlib :: GzipReader . new ( StringIO . new ( response . body ) , :encoding => Encoding :: BINARY ) . read when 'deflate' return Zlib :: Inflate . new . inflate ( response . body ) when nil , 'identity' return response . body else raise Net :: HTTPError . new ( _ ( "Unknown content encoding - %{encoding}" ) % { encoding : response [ 'content-encoding' ] } , response ) end end | return an uncompressed body if the response has been compressed |
681 | def newright ( name , line = nil , file = nil ) add_right ( Right . new ( name , line , file ) ) end | Define a new right to which access can be provided . |
682 | def allowed? ( name , ip ) if name or ip raise Puppet :: DevError , _ ( "Name and IP must be passed to 'allowed?'" ) unless name and ip else return true end return true if globalallow? if decl = declarations . find { | d | d . match? ( name , ip ) } return decl . result end info _ ( "defaulting to no access for %{name}" ) % { name : name } false end | Is a given combination of name and ip address allowed? If either input is non - nil then both inputs must be provided . If neither input is provided then the authstore is considered local and defaults to true . |
683 | def type ( name ) if name . to_s . include? ( ':' ) return nil end @types ||= { } return @types [ name ] if @types . include? name if name . is_a? String name = name . downcase . intern return @types [ name ] if @types . include? name end if typeloader . load ( name , Puppet . lookup ( :current_environment ) ) Puppet . warning ( _ ( "Loaded puppet/type/%{name} but no class was created" ) % { name : name } ) unless @types . include? name elsif ! Puppet [ :always_retry_plugins ] @types [ name ] = nil end return @types [ name ] end | Returns a Type instance by name . This will load the type if not already defined . |
684 | def [] ( loader_name ) loader = @loaders_by_name [ loader_name ] if loader . nil? loader = private_loader_for_module ( loader_name [ 0 .. - 9 ] ) if loader_name . end_with? ( ' private' ) raise Puppet :: ParseError , _ ( "Unable to find loader named '%{loader_name}'" ) % { loader_name : loader_name } if loader . nil? end loader end | Lookup a loader by its unique name . |
685 | def find_loader ( module_name ) if module_name . nil? || EMPTY_STRING == module_name public_environment_loader else loader = public_loader_for_module ( module_name ) if loader . nil? raise Puppet :: ParseError , _ ( "Internal Error: did not find public loader for module: '%{module_name}'" ) % { module_name : module_name } end loader end end | Finds the appropriate loader for the given module_name or for the environment in case module_name is nil or empty . |
686 | def load_main_manifest parser = Parser :: EvaluatingParser . singleton parsed_code = Puppet [ :code ] program = if parsed_code != "" parser . parse_string ( parsed_code , 'unknown-source-location' ) else file = @environment . manifest if file == Puppet :: Node :: Environment :: NO_MANIFEST nil elsif File . directory? ( file ) raise Puppet :: Error , "manifest of environment '#{@environment.name}' appoints directory '#{file}'. It must be a file" elsif File . exists? ( file ) parser . parse_file ( file ) else raise Puppet :: Error , "manifest of environment '#{@environment.name}' appoints '#{file}'. It does not exist" end end instantiate_definitions ( program , public_environment_loader ) unless program . nil? program rescue Puppet :: ParseErrorWithIssue => detail detail . environment = @environment . name raise rescue => detail msg = _ ( 'Could not parse for environment %{env}: %{detail}' ) % { env : @environment , detail : detail } error = Puppet :: Error . new ( msg ) error . set_backtrace ( detail . backtrace ) raise error end | Load the main manifest for the given environment |
687 | def instantiate_definitions ( program , loader ) program . definitions . each { | d | instantiate_definition ( d , loader ) } nil end | Add 4 . x definitions found in the given program to the given loader . |
688 | def instantiate_definition ( definition , loader ) case definition when Model :: PlanDefinition instantiate_PlanDefinition ( definition , loader ) when Model :: FunctionDefinition instantiate_FunctionDefinition ( definition , loader ) when Model :: TypeAlias instantiate_TypeAlias ( definition , loader ) when Model :: TypeMapping instantiate_TypeMapping ( definition , loader ) else raise Puppet :: ParseError , "Internal Error: Unknown type of definition - got '#{definition.class}'" end end | Add given 4 . x definition to the given loader . |
689 | def []= ( key , value ) entry = find_entry ( key ) @dirty = true if entry . nil? @entries << [ key , value ] else entry [ 1 ] = value end end | Set the entry key = value . If no entry with the given key exists one is appended to the end of the section |
690 | def format if @destroy text = "" else text = "[#{name}]\n" @entries . each do | entry | if entry . is_a? ( Array ) key , value = entry text << "#{key}=#{value}\n" unless value . nil? else text << entry end end end text end | Format the section as text in the way it should be written to file |
691 | def read text = @filetype . read if text . nil? raise IniParseError , _ ( "Cannot read nonexistent file %{file}" ) % { file : @file . inspect } end parse ( text ) end | Read and parse the on - disk file associated with this object |
692 | def add_section ( name ) if section_exists? ( name ) raise IniParseError . new ( _ ( "Section %{name} is already defined, cannot redefine" ) % { name : name . inspect } , @file ) end section = Section . new ( name , @file ) @contents << section section end | Create a new section and store it in the file contents |
693 | def to_s msg = super @file = nil if ( @file . is_a? ( String ) && @file . empty? ) msg += Puppet :: Util :: Errors . error_location_with_space ( @file , @line , @pos ) msg end | May be called with 3 arguments for message file line and exception or 4 args including the position on the line . |
694 | def handle_retry_after ( response ) retry_after = response [ 'Retry-After' ] return response if retry_after . nil? retry_sleep = parse_retry_after_header ( retry_after ) server_hostname = if response . uri . is_a? ( URI ) response . uri . host else _ ( 'the remote server' ) end if retry_sleep . nil? Puppet . err ( _ ( 'Received a %{status_code} response from %{server_hostname}, but the Retry-After header value of "%{retry_after}" could not be converted to an integer or RFC 2822 date.' ) % { status_code : response . code , server_hostname : server_hostname , retry_after : retry_after . inspect } ) return response end retry_sleep = [ retry_sleep , Puppet [ :runinterval ] ] . min Puppet . warning ( _ ( 'Received a %{status_code} response from %{server_hostname}. Sleeping for %{retry_sleep} seconds before retrying the request.' ) % { status_code : response . code , server_hostname : server_hostname , retry_sleep : retry_sleep } ) :: Kernel . sleep ( retry_sleep ) return nil end | Handles the Retry - After header of a HTTPResponse |
695 | def parse_retry_after_header ( header_value ) retry_after = begin Integer ( header_value ) rescue TypeError , ArgumentError begin DateTime . rfc2822 ( header_value ) rescue ArgumentError return nil end end case retry_after when Integer retry_after when DateTime sleep = ( retry_after . to_time - DateTime . now . to_time ) . to_i ( sleep > 0 ) ? sleep : 0 end end | Parse the value of a Retry - After header |
696 | def values_by_name ( key , names ) vals = { } names . each do | name | FFI :: Pointer . from_string_to_wide_string ( name ) do | subkeyname_ptr | begin _ , vals [ name ] = read ( key , subkeyname_ptr ) rescue Puppet :: Util :: Windows :: Error => e raise e unless e . code == Puppet :: Util :: Windows :: Error :: ERROR_FILE_NOT_FOUND end end end vals end | Retrieve a set of values from a registry key given their names Value names listed but not found in the registry will not be added to the resultant Hashtable |
697 | def fields = ( fields ) @fields = fields . collect do | field | r = field . intern raise ArgumentError . new ( _ ( "Cannot have fields named %{name}" ) % { name : r } ) if INVALID_FIELDS . include? ( r ) r end end | Customize this so we can do a bit of validation . |
698 | def join ( details ) joinchar = self . joiner fields . collect { | field | if details [ field ] == :absent or details [ field ] == [ :absent ] or details [ field ] . nil? if self . optional . include? ( field ) self . absent else raise ArgumentError , _ ( "Field '%{field}' is required" ) % { field : field } end else details [ field ] . to_s end } . reject { | c | c . nil? } . join ( joinchar ) end | Convert a record into a line by joining the fields together appropriately . This is pulled into a separate method so it can be called by the hooks . |
699 | def assign ( members : , topics : ) group_assignment = { } members . each do | member_id | group_assignment [ member_id ] = Protocol :: MemberAssignment . new end topic_partitions = topics . flat_map do | topic | begin partitions = @cluster . partitions_for ( topic ) . map ( & :partition_id ) rescue UnknownTopicOrPartition raise UnknownTopicOrPartition , "unknown topic #{topic}" end Array . new ( partitions . count ) { topic } . zip ( partitions ) end partitions_per_member = topic_partitions . group_by . with_index do | _ , index | index % members . count end . values members . zip ( partitions_per_member ) . each do | member_id , member_partitions | unless member_partitions . nil? member_partitions . each do | topic , partition | group_assignment [ member_id ] . assign ( topic , [ partition ] ) end end end group_assignment rescue Kafka :: LeaderNotAvailable sleep 1 retry end | Assign the topic partitions to the group members . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.