idx
int64
0
24.9k
question
stringlengths
68
4.14k
target
stringlengths
9
749
4,200
def create ( definition , options = { } ) custom_params = options [ :dc ] ? use_named_parameter ( 'dc' , options [ :dc ] ) : nil @raw = send_post_request ( @conn , [ '/v1/query' ] , options , definition , custom_params ) parse_body rescue Faraday :: ClientError raise Diplomat :: QueryAlreadyExists end
Create a prepared query or prepared query template
4,201
def delete ( key , options = { } ) custom_params = options [ :dc ] ? use_named_parameter ( 'dc' , options [ :dc ] ) : nil ret = send_delete_request ( @conn , [ "/v1/query/#{key}" ] , options , custom_params ) ret . status == 200 end
Delete a prepared query or prepared query template
4,202
def update ( key , definition , options = { } ) custom_params = options [ :dc ] ? use_named_parameter ( 'dc' , options [ :dc ] ) : nil ret = send_put_request ( @conn , [ "/v1/query/#{key}" ] , options , definition , custom_params ) ret . status == 200 end
Update a prepared query or prepared query template
4,203
def execute ( key , options = { } ) custom_params = [ ] custom_params << use_named_parameter ( 'dc' , options [ :dc ] ) if options [ :dc ] custom_params << use_named_parameter ( 'near' , options [ :near ] ) if options [ :near ] custom_params << use_named_parameter ( 'limit' , options [ :limit ] ) if options [ :limit ] ret = send_get_request ( @conn , [ "/v1/query/#{key}/execute" ] , options , custom_params ) OpenStruct . new JSON . parse ( ret . body ) end
Execute a prepared query or prepared query template
4,204
def register_script ( check_id , name , notes , args , interval , options = { } ) unless args . is_a? ( Array ) raise ( Diplomat :: DeprecatedArgument , 'Script usage is deprecated, replace by an array of args' ) end definition = JSON . generate ( 'ID' => check_id , 'Name' => name , 'Notes' => notes , 'Args' => args , 'Interval' => interval ) ret = send_put_request ( @conn , [ '/v1/agent/check/register' ] , options , definition ) ret . status == 200 end
Register a check
4,205
def update_ttl ( check_id , status , output = nil , options = { } ) definition = JSON . generate ( 'Status' => status , 'Output' => output ) ret = send_put_request ( @conn , [ "/v1/agent/check/update/#{check_id}" ] , options , definition ) ret . status == 200 end
Update a TTL check
4,206
def get ( key , options = { } , not_found = :reject , found = :return ) key_subst = if key . start_with? '/' key [ 1 .. - 1 ] else key . freeze end @key = key_subst @options = options custom_params = [ ] custom_params << recurse_get ( @options ) custom_params << use_consistency ( options ) custom_params << dc ( @options ) custom_params << keys ( @options ) custom_params << separator ( @options ) return_nil_values = @options && @options [ :nil_values ] transformation = @options && @options [ :transformation ] && @options [ :transformation ] . methods . find_index ( :call ) ? @options [ :transformation ] : nil raw = send_get_request ( @conn_no_err , [ "/v1/kv/#{@key}" ] , options , custom_params ) if raw . status == 404 case not_found when :reject raise Diplomat :: KeyNotFound , key when :return return @value = '' when :wait index = raw . headers [ 'x-consul-index' ] end elsif raw . status == 200 case found when :reject raise Diplomat :: KeyAlreadyExists , key when :return @raw = raw @raw = parse_body return @raw . first [ 'ModifyIndex' ] if @options && @options [ :modify_index ] return @raw . first [ 'Session' ] if @options && @options [ :session ] return decode_values if @options && @options [ :decode_values ] return convert_to_hash ( return_value ( return_nil_values , transformation , true ) ) if @options && @options [ :convert_to_hash ] return return_value ( return_nil_values , transformation ) when :wait index = raw . headers [ 'x-consul-index' ] end else raise Diplomat :: UnknownStatus , "status #{raw.status}: #{raw.body}" end custom_params << use_named_parameter ( 'index' , index ) if options . nil? options = { timeout : 86_400 } else options [ :timeout ] = 86_400 end @raw = send_get_request ( @conn , [ "/v1/kv/#{@key}" ] , options , custom_params ) @raw = parse_body return_value ( return_nil_values , transformation ) end
Get a value by its key potentially blocking for the first or next value
4,207
def delete ( key , options = { } ) @key = key @options = options custom_params = [ ] custom_params << recurse_get ( @options ) custom_params << dc ( @options ) @raw = send_delete_request ( @conn , [ "/v1/kv/#{@key}" ] , options , custom_params ) end
Delete a value by its key
4,208
def fire ( name , value = nil , service = nil , node = nil , tag = nil , dc = nil , options = { } ) custom_params = [ ] custom_params << use_named_parameter ( 'service' , service ) if service custom_params << use_named_parameter ( 'node' , node ) if node custom_params << use_named_parameter ( 'tag' , tag ) if tag custom_params << use_named_parameter ( 'dc' , dc ) if dc send_put_request ( @conn , [ "/v1/event/fire/#{name}" ] , options , value , custom_params ) nil end
Send an event
4,209
def get ( name = nil , token = :last , not_found = :wait , found = :return , options = { } ) @raw = send_get_request ( @conn , [ '/v1/event/list' ] , options , use_named_parameter ( 'name' , name ) ) body = JSON . parse ( @raw . body ) idx = case token when :first then 0 when :last then body . length - 1 when :next then body . length else body . find_index { | e | e [ 'ID' ] == token } + 1 end if JSON . parse ( @raw . body ) . count . zero? || idx == body . length case not_found when :reject raise Diplomat :: EventNotFound , name when :return event_name = '' event_payload = '' event_token = :last when :wait @raw = wait_for_next_event ( [ '/v1/event/list' ] , options , use_named_parameter ( 'name' , name ) ) @raw = parse_body event = @raw . last event_name = event [ 'Name' ] event_payload = Base64 . decode64 ( event [ 'Payload' ] ) event_token = event [ 'ID' ] end else case found when :reject raise Diplomat :: EventAlreadyExits , name when :return event = body [ idx ] event_name = event [ 'Name' ] event_payload = event [ 'Payload' ] . nil? ? nil : Base64 . decode64 ( event [ 'Payload' ] ) event_token = event [ 'ID' ] end end { value : { name : event_name , payload : event_payload } , token : event_token } end
Get a specific event in the sequence matching name
4,210
def get ( key , scope = :first , options = { } , meta = nil ) custom_params = [ ] custom_params << use_named_parameter ( 'wait' , options [ :wait ] ) if options [ :wait ] custom_params << use_named_parameter ( 'index' , options [ :index ] ) if options [ :index ] custom_params << use_named_parameter ( 'dc' , options [ :dc ] ) if options [ :dc ] if options [ :tag ] [ * options [ :tag ] ] . each do | value | custom_params << use_named_parameter ( 'tag' , value ) end end options [ :params_encoder ] = Faraday :: FlatParamsEncoder ret = send_get_request ( @conn , [ "/v1/catalog/service/#{key}" ] , options , custom_params ) if meta && ret . headers meta [ :index ] = ret . headers [ 'x-consul-index' ] if ret . headers [ 'x-consul-index' ] meta [ :knownleader ] = ret . headers [ 'x-consul-knownleader' ] if ret . headers [ 'x-consul-knownleader' ] meta [ :lastcontact ] = ret . headers [ 'x-consul-lastcontact' ] if ret . headers [ 'x-consul-lastcontact' ] end if scope == :all JSON . parse ( ret . body ) . map { | service | OpenStruct . new service } else OpenStruct . new JSON . parse ( ret . body ) . first end end
Get a service by it s key
4,211
def register ( definition , options = { } ) url = options [ :path ] || [ '/v1/agent/service/register' ] register = send_put_request ( @conn , url , options , definition ) register . status == 200 end
Register a service
4,212
def maintenance ( service_id , options = { enable : true } ) custom_params = [ ] custom_params << [ "enable=#{options[:enable]}" ] custom_params << [ "reason=#{options[:reason].split(' ').join('+')}" ] if options [ :reason ] maintenance = send_put_request ( @conn , [ "/v1/agent/service/maintenance/#{service_id}" ] , options , nil , custom_params ) maintenance . status == 200 end
Enable or disable maintenance for a service
4,213
def node ( n , options = { } ) custom_params = [ ] custom_params << use_named_parameter ( 'dc' , options [ :dc ] ) if options [ :dc ] ret = send_get_request ( @conn , [ "/v1/health/node/#{n}" ] , options , custom_params ) JSON . parse ( ret . body ) . map { | node | OpenStruct . new node } end
Get node health
4,214
def checks ( s , options = { } ) custom_params = [ ] custom_params << use_named_parameter ( 'dc' , options [ :dc ] ) if options [ :dc ] ret = send_get_request ( @conn , [ "/v1/health/checks/#{s}" ] , options , custom_params ) JSON . parse ( ret . body ) . map { | check | OpenStruct . new check } end
Get service checks
4,215
def service ( s , options = { } ) custom_params = [ ] custom_params << use_named_parameter ( 'dc' , options [ :dc ] ) if options [ :dc ] custom_params << [ 'passing' ] if options [ :passing ] custom_params << use_named_parameter ( 'tag' , options [ :tag ] ) if options [ :tag ] custom_params << use_named_parameter ( 'near' , options [ :near ] ) if options [ :near ] ret = send_get_request ( @conn , [ "/v1/health/service/#{s}" ] , options , custom_params ) JSON . parse ( ret . body ) . map { | service | OpenStruct . new service } end
Get service health
4,216
def acquire ( key , session , value = nil , options = { } ) custom_params = [ ] custom_params << use_named_parameter ( 'acquire' , session ) custom_params << use_named_parameter ( 'dc' , options [ :dc ] ) if options [ :dc ] custom_params << use_named_parameter ( 'flags' , options [ :flags ] ) if options && options [ :flags ] data = value unless value . nil? raw = send_put_request ( @conn , [ "/v1/kv/#{key}" ] , options , data , custom_params ) raw . body . chomp == 'true' end
Acquire a lock
4,217
def wait_to_acquire ( key , session , value = nil , check_interval = 10 , options = { } ) acquired = false until acquired acquired = acquire ( key , session , value , options ) sleep ( check_interval ) unless acquired return true if acquired end end
wait to aquire a lock
4,218
def release ( key , session , options = { } ) custom_params = [ ] custom_params << use_named_parameter ( 'release' , session ) custom_params << use_named_parameter ( 'dc' , options [ :dc ] ) if options [ :dc ] custom_params << use_named_parameter ( 'flags' , options [ :flags ] ) if options && options [ :flags ] raw = send_put_request ( @conn , [ "/v1/kv/#{key}" ] , options , nil , custom_params ) raw . body end
Release a lock
4,219
def concat_url ( parts ) parts . reject! ( & :empty? ) if parts . length > 1 parts . first + '?' + parts . drop ( 1 ) . join ( '&' ) else parts . first end end
Assemble a url from an array of parts .
4,220
def info ( id , options = { } , not_found = :reject , found = :return ) @id = id @options = options custom_params = [ ] custom_params << use_consistency ( options ) raw = send_get_request ( @conn_no_err , [ "/v1/acl/info/#{id}" ] , options , custom_params ) if raw . status == 200 && raw . body . chomp != 'null' case found when :reject raise Diplomat :: AclAlreadyExists , id when :return @raw = raw return parse_body end elsif raw . status == 200 && raw . body . chomp == 'null' case not_found when :reject raise Diplomat :: AclNotFound , id when :return return nil end else raise Diplomat :: UnknownStatus , "status #{raw.status}: #{raw.body}" end end
Get Acl info by ID
4,221
def update ( value , options = { } ) raise Diplomat :: IdParameterRequired unless value [ 'ID' ] || value [ :ID ] custom_params = use_cas ( @options ) @raw = send_put_request ( @conn , [ '/v1/acl/update' ] , options , value , custom_params ) parse_body end
Update an Acl definition create if not present
4,222
def create ( value , options = { } ) custom_params = use_cas ( @options ) @raw = send_put_request ( @conn , [ '/v1/acl/create' ] , options , value , custom_params ) parse_body end
Create an Acl definition
4,223
def get ( meta = nil , options = { } ) ret = send_get_request ( @conn , [ '/v1/catalog/datacenters' ] , options ) if meta && ret . headers meta [ :index ] = ret . headers [ 'x-consul-index' ] if ret . headers [ 'x-consul-index' ] meta [ :knownleader ] = ret . headers [ 'x-consul-knownleader' ] if ret . headers [ 'x-consul-knownleader' ] meta [ :lastcontact ] = ret . headers [ 'x-consul-lastcontact' ] if ret . headers [ 'x-consul-lastcontact' ] end JSON . parse ( ret . body ) end
Get an array of all avaliable datacenters accessible by the local consul agent
4,224
def update_properties ( props ) load_properties ( true ) deep_merge = proc do | _ , old_value , new_value | if old_value . is_a? ( Hash ) && new_value . is_a? ( Hash ) old_value . merge ( new_value , & deep_merge ) else new_value end end props = properties . merge ( props || { } , & deep_merge ) save_properties ( properties . merge ( props || { } ) ) self end
Update the metadata properties of this resource . The + props + will be merged with any existing properties . Nested hashes in the properties will also be merged .
4,225
def property? ( * keys ) keys . inject ( node : properties , result : true ) do | memo , key | if memo [ :result ] memo [ :result ] = memo [ :node ] . is_a? ( Hash ) && memo [ :node ] . include? ( key ) memo [ :node ] = memo [ :node ] [ key ] if memo [ :result ] end memo end [ :result ] end
Check if the metadata properties includes the + keys + . The + keys + represent a nested path in the properties to check .
4,226
def serializable_hash ( runtime_options = { } ) return nil if object . nil? opts = options . merge ( runtime_options || { } ) root_exposure . serializable_value ( self , opts ) end
The serializable hash is the Entity s primary output . It is the transformed hash for the given data model and is used as the basis for serialization to JSON and other formats .
4,227
def logged_queries ( & block ) queries = [ ] subscriber = ActiveSupport :: Notifications . subscribe ( 'sql.active_record' ) do | name , started , finished , id , payload | queries << payload [ :sql ] end block . call queries ensure ActiveSupport :: Notifications . unsubscribe ( subscriber ) end
Logs the queries run inside the block and return them .
4,228
def word_hash ( str , enable_stemmer = true , tokenizer : Tokenizer :: Whitespace , token_filters : [ TokenFilter :: Stopword ] ) if token_filters . include? ( TokenFilter :: Stemmer ) unless enable_stemmer token_filters . reject! do | token_filter | token_filter == TokenFilter :: Stemmer end end else token_filters << TokenFilter :: Stemmer if enable_stemmer end words = tokenizer . call ( str ) token_filters . each do | token_filter | words = token_filter . call ( words ) end d = Hash . new ( 0 ) words . each do | word | d [ word . intern ] += 1 end d end
Return a Hash of strings = > ints . Each word in the string is stemmed interned and indexes to its frequency in the document .
4,229
def classify ( text ) result , score = classify_with_score ( text ) result = nil if threshold_enabled? && ( score < @threshold || score == Float :: INFINITY ) result end
Return the classification without the score
4,230
def custom_stopwords ( stopwords ) unless stopwords . is_a? ( Enumerable ) if stopwords . strip . empty? stopwords = [ ] elsif File . exist? ( stopwords ) stopwords = File . read ( stopwords ) . force_encoding ( 'utf-8' ) . split else return end end TokenFilter :: Stopword :: STOPWORDS [ @language ] = Set . new stopwords end
Overwrites the default stopwords for current language with supplied list of stopwords or file
4,231
def build_index ( cutoff = 0.75 ) return unless needs_rebuild? make_word_list doc_list = @items . values tda = doc_list . collect { | node | node . raw_vector_with ( @word_list ) } if $GSL tdm = GSL :: Matrix . alloc ( * tda ) . trans ntdm = build_reduced_matrix ( tdm , cutoff ) ntdm . size [ 1 ] . times do | col | vec = GSL :: Vector . alloc ( ntdm . column ( col ) ) . row doc_list [ col ] . lsi_vector = vec doc_list [ col ] . lsi_norm = vec . normalize end else tdm = Matrix . rows ( tda ) . trans ntdm = build_reduced_matrix ( tdm , cutoff ) ntdm . column_size . times do | col | doc_list [ col ] . lsi_vector = ntdm . column ( col ) if doc_list [ col ] if ntdm . column ( col ) . zero? doc_list [ col ] . lsi_norm = ntdm . column ( col ) if doc_list [ col ] else doc_list [ col ] . lsi_norm = ntdm . column ( col ) . normalize if doc_list [ col ] end end end @built_at_version = @version end
This function rebuilds the index if needs_rebuild? returns true . For very large document spaces this indexing operation may take some time to complete so it may be wise to place the operation in another thread .
4,232
def highest_relative_content ( max_chunks = 10 ) return [ ] if needs_rebuild? avg_density = { } @items . each_key { | item | avg_density [ item ] = proximity_array_for_content ( item ) . inject ( 0.0 ) { | x , y | x + y [ 1 ] } } avg_density . keys . sort_by { | x | avg_density [ x ] } . reverse [ 0 .. max_chunks - 1 ] . map end
This method returns max_chunks entries ordered by their average semantic rating . Essentially the average distance of each entry from all other entries is calculated the highest are returned .
4,233
def proximity_array_for_content ( doc , & block ) return [ ] if needs_rebuild? content_node = node_for_content ( doc , & block ) result = @items . keys . collect do | item | val = if $GSL content_node . search_vector * @items [ item ] . transposed_search_vector else ( Matrix [ content_node . search_vector ] * @items [ item ] . search_vector ) [ 0 ] end [ item , val ] end result . sort_by { | x | x [ 1 ] } . reverse end
This function is the primitive that find_related and classify build upon . It returns an array of 2 - element arrays . The first element of this array is a document and the second is its score defining how close it is to other indexed items .
4,234
def proximity_norms_for_content ( doc , & block ) return [ ] if needs_rebuild? content_node = node_for_content ( doc , & block ) if $GSL && content_node . raw_norm . isnan? . all? puts "There are no documents that are similar to #{doc}" else content_node_norms ( content_node ) end end
Similar to proximity_array_for_content this function takes similar arguments and returns a similar array . However it uses the normalized calculated vectors instead of their full versions . This is useful when you re trying to perform operations on content that is much smaller than the text you re working with . search uses this primitive .
4,235
def search ( string , max_nearest = 3 ) return [ ] if needs_rebuild? carry = proximity_norms_for_content ( string ) unless carry . nil? result = carry . collect { | x | x [ 0 ] } result [ 0 .. max_nearest - 1 ] end end
This function allows for text - based search of your index . Unlike other functions like find_related and classify search only takes short strings . It will also ignore factors like repeated words . It is best for short google - like search terms . A search will first priortize lexical relationships then semantic ones .
4,236
def find_related ( doc , max_nearest = 3 , & block ) carry = proximity_array_for_content ( doc , & block ) . reject { | pair | pair [ 0 ] . eql? doc } result = carry . collect { | x | x [ 0 ] } result [ 0 .. max_nearest - 1 ] end
This function takes content and finds other documents that are semantically close returning an array of documents sorted from most to least relavant . max_nearest specifies the number of documents to return . A value of 0 means that it returns all the indexed documents sorted by relavence .
4,237
def classify ( doc , cutoff = 0.30 , & block ) scored_categories ( doc , cutoff , & block ) . last . first end
Return the most obvious category without the score
4,238
def scored_categories ( doc , cutoff = 0.30 , & block ) icutoff = ( @items . size * cutoff ) . round carry = proximity_array_for_content ( doc , & block ) carry = carry [ 0 .. icutoff - 1 ] votes = Hash . new ( 0.0 ) carry . each do | pair | @items [ pair [ 0 ] ] . categories . each do | category | votes [ category ] += pair [ 1 ] end end votes . sort_by { | _ , score | score } end
This function uses a voting system to categorize documents based on the categories of other documents . It uses the same logic as the find_related function to find related documents then returns the list of sorted categories .
4,239
def highest_ranked_stems ( doc , count = 3 ) raise 'Requested stem ranking on non-indexed content!' unless @items [ doc ] content_vector_array = node_for_content ( doc ) . lsi_vector . to_a top_n = content_vector_array . sort . reverse [ 0 .. count - 1 ] top_n . collect { | x | @word_list . word_for_index ( content_vector_array . index ( x ) ) } end
Prototype only works on indexed documents . I have no clue if this is going to work but in theory it s supposed to .
4,240
def raw_vector_with ( word_list ) vec = if $GSL GSL :: Vector . alloc ( word_list . size ) else Array . new ( word_list . size , 0 ) end @word_hash . each_key do | word | vec [ word_list [ word ] ] = @word_hash [ word ] if word_list [ word ] end if $GSL sum = 0.0 vec . each { | v | sum += v } total_words = sum else total_words = vec . reduce ( 0 , :+ ) . to_f end total_unique_words = 0 if $GSL vec . each { | word | total_unique_words += 1 if word != 0.0 } else total_unique_words = vec . count { | word | word != 0 } end if total_words > 1.0 && total_unique_words > 1 weighted_total = 0.0 cached_calcs = Hash . new do | hash , term | hash [ term ] = ( ( term / total_words ) * Math . log ( term / total_words ) ) end vec . each do | term | weighted_total += cached_calcs [ term ] if term > 0.0 end cached_calcs = Hash . new do | hash , val | hash [ val ] = Math . log ( val + 1 ) / - weighted_total end vec . collect! do | val | cached_calcs [ val ] end end if $GSL @raw_norm = vec . normalize @raw_vector = vec else @raw_norm = Vector [ * vec ] . normalize @raw_vector = Vector [ * vec ] end end
Creates the raw vector out of word_hash using word_list as the key for mapping the vector space .
4,241
def rotate_horizontal return unless rotated? head , body = orientation . slice ( self ) if header && header . empty? @header = head [ 0 ] @rows = body . map { | row | to_row ( row , @header ) } else @rows = body . map { | row | to_row ( row ) } end end
Rotate the table horizontally
4,242
def row ( index , & block ) if block_given? rows . fetch ( index ) { return self } . each ( & block ) self else rows . fetch ( index ) { return nil } end end
Return a row number at the index of the table as an Array . When a block is given the elements of that Array are iterated over .
4,243
def column ( index ) index_unknown = index . is_a? ( Integer ) && ( index >= columns_size || index < 0 ) if block_given? return self if index_unknown rows . map { | row | yield row [ index ] } else return nil if index_unknown rows . map { | row | row [ index ] } . compact end end
Return a column number at the index of the table as an Array . If the table has a header then column can be searched by header name . When a block is given the elements of that Array are iterated over .
4,244
def << ( row ) if row == Border :: SEPARATOR separators << columns_size - ( header ? 0 : 2 ) else rows_copy = rows . dup assert_row_sizes rows_copy << row rows << to_row ( row ) end self end
Add row to table
4,245
def render_with ( border_class , renderer_type = ( not_set = true ) , options = { } , & block ) unless not_set if renderer_type . respond_to? ( :to_hash ) options = renderer_type else options [ :renderer ] = renderer_type end end Renderer . render_with ( border_class , self , options , & block ) end
Render a given table using custom border class .
4,246
def coerce ( rows ) coerced_rows = [ ] @converter . convert ( rows ) . to ( :array ) . each do | row | if row == Border :: SEPARATOR separators << coerced_rows . length - ( header ? 0 : 1 ) else coerced_rows << to_row ( row , header ) end end coerced_rows end
Coerce an Enumerable into a Table This coercion mechanism is used by Table to handle Enumerable types and force them into array type .
4,247
def statsd_measure ( method , name , * metric_options ) add_to_method ( method , name , :measure ) do define_method ( method ) do | * args , & block | StatsD . measure ( StatsD :: Instrument . generate_metric_name ( name , self , * args ) , * metric_options ) { super ( * args , & block ) } end end end
Adds execution duration instrumentation to a method as a timing .
4,248
def statsd_distribution ( method , name , * metric_options ) add_to_method ( method , name , :distribution ) do define_method ( method ) do | * args , & block | StatsD . distribution ( StatsD :: Instrument . generate_metric_name ( name , self , * args ) , * metric_options ) { super ( * args , & block ) } end end end
Adds execution duration instrumentation to a method as a distribution .
4,249
def statsd_count ( method , name , * metric_options ) add_to_method ( method , name , :count ) do define_method ( method ) do | * args , & block | StatsD . increment ( StatsD :: Instrument . generate_metric_name ( name , self , * args ) , 1 , * metric_options ) super ( * args , & block ) end end end
Adds counter instrumentation to a method .
4,250
def format_for_exception return "Command execution failed. STDOUT/STDERR suppressed for sensitive resource" if sensitive msg = "" msg << "#{@terminate_reason}\n" if @terminate_reason msg << "---- Begin output of #{command} ----\n" msg << "STDOUT: #{stdout.strip}\n" msg << "STDERR: #{stderr.strip}\n" msg << "---- End output of #{command} ----\n" msg << "Ran #{command} returned #{status.exitstatus}" if status msg end
Creates a String showing the output of the command including a banner showing the exact command executed . Used by + invalid! + to show command results when the command exited with an unexpected status .
4,251
def normalize ( text_block ) return text_block if @options [ :preserve_whitespace ] text_block = text_block . gsub ( / \s \* / , '' ) indent_size = nil unindented = text_block . split ( "\n" ) . collect do | line | preceding_whitespace = line . scan ( / \s / ) [ 0 ] . to_s . size indent_size = preceding_whitespace if indent_size . nil? if line == "" "" elsif indent_size <= preceding_whitespace && indent_size > 0 line . slice ( indent_size , line . length - 1 ) else line end end . join ( "\n" ) unindented . strip end
Normalizes the comment block to ignore any consistent preceding whitespace . Consistent means the same amount of whitespace on every line of the comment block . Also strips any whitespace at the start and end of the whole block .
4,252
def transition ( line ) @rules [ @state ] . each do | r | next unless line =~ r . pattern @state = r . to_state return true end @state = :start_state false end
Executes a transition of the state machine for the given line . Returns false if the line does not match any transition rule and the state machine was reset to the initial state .
4,253
def add ( severity , message = nil , progname = nil , & block ) if severity < @level return true end if message . nil? if block_given? message = yield else message = progname progname = nil end end progname ||= @progname self << format_message ( SEVERITY_FORMATS_ [ severity + 1 ] , Time . now , progname , message ) true end
override add method
4,254
def __packed? FILES . each do | fn | return true if fn . end_with? 'tar.gz' and File . exists? ( File . join ( $path , fn ) ) end return false end
Check if the local KB is packet or not .
4,255
def output_dir @output_dir_name = File . join ( Dir . home , 'dawnscanner' , 'results' , File . basename ( @target ) , Time . now . strftime ( '%Y%m%d' ) ) if Dir . exist? ( @output_dir_name ) i = 1 while ( Dir . exist? ( @output_dir_name ) ) do @output_dir_name = File . join ( Dir . home , 'dawnscanner' , 'results' , File . basename ( @target ) , "#{Time.now.strftime('%Y%m%d')}_#{i}" ) i += 1 end end @output_dir_name end
Output stuff - START
4,256
def apply ( name ) if @checks . nil? $logger . err "you must load knowledge base before trying to apply security checks" return false end return false if @checks . empty? @checks . each do | check | _do_apply ( check ) if check . name == name end false end
Output stuff - END
4,257
def init_on_load ( persisted_rel , from_node_id , to_node_id , type ) @rel_type = type @_persisted_obj = persisted_rel changed_attributes_clear! @attributes = convert_and_assign_attributes ( persisted_rel . props ) load_nodes ( from_node_id , to_node_id ) end
called when loading the rel from the database
4,258
def process_attributes ( attributes = nil ) return attributes if attributes . blank? multi_parameter_attributes = { } new_attributes = { } attributes . each_pair do | key , value | if key . match ( DATE_KEY_REGEX ) match = key . to_s . match ( DATE_KEY_REGEX ) found_key = match [ 1 ] index = match [ 2 ] . to_i ( multi_parameter_attributes [ found_key ] ||= { } ) [ index ] = value . empty? ? nil : value . send ( "to_#{$3}" ) else new_attributes [ key ] = value end end multi_parameter_attributes . empty? ? new_attributes : process_multiparameter_attributes ( multi_parameter_attributes , new_attributes ) end
Gives support for Rails date_select datetime_select time_select helpers .
4,259
def attributes_nil_hash @_attributes_nil_hash ||= { } . tap do | attr_hash | registered_properties . each_pair do | k , prop_obj | val = prop_obj . default_value attr_hash [ k . to_s ] = val end end . freeze end
During object wrap a hash is needed that contains each declared property with a nil value . The active_attr dependency is capable of providing this but it is expensive and calculated on the fly each time it is called . Rather than rely on that we build this progressively as properties are registered . When the node or rel is loaded this is used as a template .
4,260
def attributes_string_map @_attributes_string_map ||= { } . tap do | attr_hash | attributes_nil_hash . each_key { | k | attr_hash [ k . to_sym ] = k } end . freeze end
During object wrapping a props hash is built with string keys but Neo4j - core provides symbols . Rather than a to_s or symbolize_keys during every load we build a map of symbol - to - string to speed up the process . This increases memory used by the gem but reduces object allocation and GC so it is faster in practice .
4,261
def node_before_callbacks! validate_unpersisted_nodes! from_node . conditional_callback ( :create , from_node . persisted? ) do to_node . conditional_callback ( :create , to_node . persisted? ) do yield end end end
Node callbacks only need to be executed if the node is not persisted . We let the conditional_callback method do the work we only have to give it the type of callback we expect to be run and the condition which if true will prevent it from executing .
4,262
def update ( attributes ) self . class . run_transaction do | tx | self . attributes = process_attributes ( attributes ) saved = save tx . mark_failed unless saved saved end end
Updates this resource with all the attributes from the passed - in Hash and requests that the record be saved . If saving fails because the resource is invalid then false will be returned .
4,263
def loaded fail UnsetRelatedNodeError , 'Node not set, cannot load' if @node . nil? @node = if @node . respond_to? ( :neo_id ) @node else Neo4j :: ActiveBase . new_query . match ( :n ) . where ( n : { neo_id : @node } ) . pluck ( :n ) . first end end
Loads a node from the database or returns the node if already laoded
4,264
def convert_properties_to ( obj , medium , properties ) direction = medium == :ruby ? :to_ruby : :to_db properties . each_pair do | key , value | next if skip_conversion? ( obj , key , value ) properties [ key ] = convert_property ( key , value , direction ) end end
Modifies a hash s values to be of types acceptable to Neo4j or matching what the user defined using type in property definitions .
4,265
def convert_property ( key , value , direction ) converted_property ( primitive_type ( key . to_sym ) , value , direction ) end
Converts a single property from its current format to its db - or Ruby - expected output type .
4,266
def primitive_type ( attr ) case when serialized_properties . include? ( attr ) serialized_properties [ attr ] when magic_typecast_properties . include? ( attr ) magic_typecast_properties [ attr ] else fetch_upstream_primitive ( attr ) end end
If the attribute is to be typecast using a custom converter which converter should it use? If no returns the type to find a native serializer .
4,267
def skip_conversion? ( obj , attr , value ) value . nil? || ! obj . class . attributes . key? ( attr ) end
Returns true if the property isn t defined in the model or if it is nil
4,268
def matches_content? return true if @expected_content . empty? @actual_content = ChefSpec :: Renderer . new ( @runner , resource ) . content return false if @actual_content . nil? @expected_content . delete_if do | expected | if expected . is_a? ( Regexp ) @actual_content =~ expected elsif RSpec :: Matchers . is_a_matcher? ( expected ) expected . matches? ( @actual_content ) elsif expected . is_a? ( Proc ) expected . call ( @actual_content ) true else @actual_content . include? ( expected ) end end @expected_content . empty? end
Determines if the resources content matches the expected content .
4,269
def load_data ( name , key , data = { } ) ChefSpec :: ZeroServer . load_data ( name , key , data ) end
Shortcut method for loading data into Chef Zero .
4,270
def reset! if RSpec . configuration . server_runner_clear_cookbooks @server . clear_data @cookbooks_uploaded = false else @data_loaded . each do | key , names | if key == "data" names . each { | n | @server . data_store . delete_dir ( [ "organizations" , "chef" , key , n ] ) } else names . each { | n | @server . data_store . delete ( [ "organizations" , "chef" , key , n ] ) } end end end @data_loaded = { } end
Remove all the data we just loaded from the ChefZero server
4,271
def nuke! @server = ChefZero :: Server . new ( log_level : RSpec . configuration . log_level || :warn , port : RSpec . configuration . server_runner_port , data_store : data_store ( RSpec . configuration . server_runner_data_store ) , ) @cookbooks_uploaded = false @data_loaded = { } end
Really reset everything and reload the configuration
4,272
def upload_cookbooks! return if @cookbooks_uploaded loader = Chef :: CookbookLoader . new ( Chef :: Config [ :cookbook_path ] ) loader . load_cookbooks cookbook_uploader_for ( loader ) . upload_cookbooks @cookbooks_uploaded = true end
Upload the cookbooks to the Chef Server .
4,273
def converge ( * recipe_names ) apply_chef_config! @converging = false node . run_list . reset! recipe_names . each { | recipe_name | node . run_list . add ( recipe_name ) } return self if dry_run? expand_run_list! node . attributes . role_default = Chef :: Mixin :: DeepMerge . merge ( node . attributes . role_default , options [ :default_attributes ] ) if options [ :default_attributes ] node . attributes . normal = Chef :: Mixin :: DeepMerge . merge ( node . attributes . normal , options [ :normal_attributes ] ) if options [ :normal_attributes ] node . attributes . role_override = Chef :: Mixin :: DeepMerge . merge ( node . attributes . role_override , options [ :override_attributes ] ) if options [ :override_attributes ] node . attributes . automatic = Chef :: Mixin :: DeepMerge . merge ( node . attributes . automatic , options [ :automatic_attributes ] ) if options [ :automatic_attributes ] begin @run_context = client . setup_run_context rescue Chef :: Exceptions :: NoSuchResourceType => e raise Error :: MayNeedToSpecifyPlatform . new ( original_error : e . message ) end yield node if block_given? @converging = true converge_val = @client . converge ( @run_context ) if converge_val . is_a? ( Exception ) raise converge_val end self end
Instantiate a new SoloRunner to run examples with .
4,274
def converge_block ( & block ) converge do recipe = Chef :: Recipe . new ( cookbook_name , '_test' , run_context ) recipe . instance_exec ( & block ) end end
Execute a block of recipe code .
4,275
def find_resource ( type , name , action = nil ) resource_collection . all_resources . reverse_each . find do | resource | resource . declared_type == type . to_sym && ( name === resource . identity || name === resource . name ) && ( action . nil? || resource . performed_action? ( action ) ) end end
Find the resource with the declared type and resource name and optionally match a performed action .
4,276
def find_resources ( type ) resource_collection . all_resources . select do | resource | resource_name ( resource ) == type . to_sym end end
Find the resource with the declared type .
4,277
def step_into? ( resource ) key = resource_name ( resource ) Array ( options [ :step_into ] ) . map ( & method ( :resource_name ) ) . include? ( key ) end
Determines if the runner should step into the given resource . The + step_into + option takes a string but this method coerces everything to symbols for safety .
4,278
def method_missing ( m , * args , & block ) if block = ChefSpec . matchers [ resource_name ( m . to_sym ) ] instance_exec ( args . first , & block ) else super end end
Respond to custom matchers defined by the user .
4,279
def with_default_options ( options ) config = RSpec . configuration { cookbook_root : config . cookbook_root || calling_cookbook_root ( options , caller ) , cookbook_path : config . cookbook_path || calling_cookbook_path ( options , caller ) , role_path : config . role_path || default_role_path , environment_path : config . environment_path || default_environment_path , file_cache_path : config . file_cache_path , log_level : config . log_level , path : config . path , platform : config . platform , version : config . version , } . merge ( options ) end
Set the default options with the given options taking precedence .
4,280
def calling_cookbook_root ( options , kaller ) calling_spec = options [ :spec_declaration_locations ] || kaller . find { | line | line =~ / \/ / } raise Error :: CookbookPathNotFound if calling_spec . nil? bits = calling_spec . split ( / / , 2 ) . first . split ( File :: SEPARATOR ) spec_dir = bits . index ( 'spec' ) || 0 File . join ( bits . slice ( 0 , spec_dir ) ) end
The inferred cookbook root from the calling spec .
4,281
def calling_cookbook_path ( options , kaller ) File . expand_path ( File . join ( calling_cookbook_root ( options , kaller ) , '..' ) ) end
The inferred path from the calling spec .
4,282
def default_role_path Pathname . new ( Dir . pwd ) . ascend do | path | possible = File . join ( path , 'roles' ) return possible if File . exist? ( possible ) end nil end
The inferred path to roles .
4,283
def start! ( & block ) warn ( "ChefSpec's coverage reporting is deprecated and will be removed in a future version" ) instance_eval ( & block ) if block at_exit { ChefSpec :: Coverage . report! } end
Create a new coverage object singleton .
4,284
def add_filter ( filter = nil , & block ) id = "#{filter.inspect}/#{block.inspect}" . hash @filters [ id ] = if filter . kind_of? ( Filter ) filter elsif filter . kind_of? ( String ) StringFilter . new ( filter ) elsif filter . kind_of? ( Regexp ) RegexpFilter . new ( filter ) elsif block BlockFilter . new ( block ) else raise ArgumentError , 'Please specify either a string, ' 'filter, or block to filter source files with!' end true end
Add a filter to the coverage analysis .
4,285
def set_template ( file = 'human.erb' ) [ ChefSpec . root . join ( 'templates' , 'coverage' , file ) , File . expand_path ( file , Dir . pwd ) ] . each do | temp | if File . exist? ( temp ) @template = temp return end end raise Error :: TemplateNotFound . new ( path : file ) end
Change the template for reporting of converage analysis .
4,286
def teardown! env = :: Librarian :: Chef :: Environment . new ( project_path : Dir . pwd ) env . config_db . local [ 'path' ] = @originalpath FileUtils . rm_rf ( @tmpdir ) if File . exist? ( @tmpdir ) end
Remove the temporary directory and restore the librarian - chef cookbook path .
4,287
def setup! policyfile_path = File . join ( Dir . pwd , 'Policyfile.rb' ) installer = ChefDK :: PolicyfileServices :: Install . new ( policyfile : policyfile_path , ui : ChefDK :: UI . null ) installer . run exporter = ChefDK :: PolicyfileServices :: ExportRepo . new ( policyfile : policyfile_path , export_dir : @tmpdir ) FileUtils . rm_rf ( @tmpdir ) exporter . run :: RSpec . configure do | config | config . cookbook_path = [ File . join ( @tmpdir , 'cookbooks' ) , File . join ( @tmpdir , 'cookbook_artifacts' ) ] end end
Setup and install the necessary dependencies in the temporary directory
4,288
def resource_name ( thing ) if thing . respond_to? ( :declared_type ) && thing . declared_type name = thing . declared_type elsif thing . respond_to? ( :resource_name ) name = thing . resource_name else name = thing end name . to_s . gsub ( '-' , '_' ) . to_sym end
Calculate the name of a resource replacing dashes with underscores and converting symbols to strings and back again .
4,289
def node_load_failed ( node_name , exception , config ) expecting_exception ( exception ) do description = Chef :: Formatters :: ErrorMapper . node_load_failed ( node_name , exception , config ) display_error ( description ) end end
Failed to load node data from the server
4,290
def run_list_expand_failed ( node , exception ) expecting_exception ( exception ) do description = Chef :: Formatters :: ErrorMapper . run_list_expand_failed ( node , exception ) display_error ( description ) end end
Error expanding the run list
4,291
def cookbook_resolution_failed ( expanded_run_list , exception ) expecting_exception ( exception ) do description = Chef :: Formatters :: ErrorMapper . cookbook_resolution_failed ( expanded_run_list , exception ) display_error ( description ) end end
Called when there is an error getting the cookbook collection from the server .
4,292
def cookbook_sync_failed ( cookbooks , exception ) expecting_exception ( exception ) do description = Chef :: Formatters :: ErrorMapper . cookbook_sync_failed ( cookbooks , exception ) display_error ( description ) end end
Called when an error occurs during cookbook sync
4,293
def recipe_not_found ( exception ) expecting_exception ( exception ) do description = Chef :: Formatters :: ErrorMapper . file_load_failed ( nil , exception ) display_error ( description ) end end
Called when a recipe cannot be resolved
4,294
def resource_failed ( resource , action , exception ) expecting_exception ( exception ) do description = Chef :: Formatters :: ErrorMapper . resource_failed ( resource , action , exception ) display_error ( description ) end end
Called when a resource fails and will not be retried .
4,295
def content case resource_name ( resource ) when :template content_from_template ( chef_run , resource ) when :file content_from_file ( chef_run , resource ) when :cookbook_file content_from_cookbook_file ( chef_run , resource ) else nil end end
Create a new Renderer for the given Chef run and resource .
4,296
def content_from_template ( chef_run , template ) cookbook_name = template . cookbook || template . cookbook_name template_location = cookbook_collection ( chef_run . node ) [ cookbook_name ] . preferred_filename_on_disk_location ( chef_run . node , :templates , template . source ) if Chef :: Mixin :: Template . const_defined? ( :TemplateContext ) template_context = Chef :: Mixin :: Template :: TemplateContext . new ( [ ] ) template_context . update ( { :node => chef_run . node , :template_finder => template_finder ( chef_run , cookbook_name ) , } . merge ( template . variables ) ) if template . respond_to? ( :helper_modules ) template_context . _extend_modules ( template . helper_modules ) end template_context . render_template ( template_location ) else template . provider . new ( template , chef_run . run_context ) . send ( :render_with_context , template_location ) do | file | File . read ( file . path ) end end end
Compute the contents of a template using Chef s templating logic .
4,297
def content_from_cookbook_file ( chef_run , cookbook_file ) cookbook_name = cookbook_file . cookbook || cookbook_file . cookbook_name cookbook = cookbook_collection ( chef_run . node ) [ cookbook_name ] File . read ( cookbook . preferred_filename_on_disk_location ( chef_run . node , :files , cookbook_file . source ) ) end
Get the contents of a cookbook file using Chef .
4,298
def cookbook_collection ( node ) if chef_run . respond_to? ( :run_context ) chef_run . run_context . cookbook_collection elsif node . respond_to? ( :run_context ) node . run_context . cookbook_collection else node . cookbook_collection end end
The cookbook collection for the current Chef run context . Handles the differing cases between Chef 10 and Chef 11 .
4,299
def template_finder ( chef_run , cookbook_name ) if Chef :: Provider . const_defined? ( :TemplateFinder ) Chef :: Provider :: TemplateFinder . new ( chef_run . run_context , cookbook_name , chef_run . node ) else nil end end
Return a new instance of the TemplateFinder if we are running on Chef 11 .