idx int64 0 24.9k | question stringlengths 68 4.14k | target stringlengths 9 749 |
|---|---|---|
20,800 | def nest ( label , nested ) indented = indent indented . puts ( "#{label}:" ) nested . pretty_dump ( indented . indent ) self end | Write nest with label |
20,801 | def puts ( string ) util = output util . write ( prefix ) util . puts ( string ) self end | Write string with indentation |
20,802 | def build_context context return unless @format [ :context ] . is_a? Hash return unless context . is_a? Hash return @format [ :context ] . collect do | key , format | sprintf ( format , context [ key ] ) end . join rescue KeyError , ArgumentError => ex return "[context: #{ex.message}]" end | Builds prefix from |
20,803 | def expect name , retval , args = [ ] method_definition = Module . new do define_method name do | * args , & block | __tracer . assert name , args retval end end extend method_definition __tracer . register name , args self end | Expect that method + name + is called optionally with + args + and returns + retval + . |
20,804 | def report ( test ) case test . result when :success if @type == :test print_test_success ( test ) else print_generate_success ( test ) end @success += 1 when :skipped if @type == :test print_test_skipped ( test ) else print_generate_skipped ( test ) end @skipped += 1 when :failed print_test_failed ( test ) @failed += 1 when :timedout if @type == :test print_test_timeout ( test ) else print_generate_timeout ( test ) end @failed += 1 end end | Initialize a new Reporter |
20,805 | def url_for ( option ) host = SermepaWebTpv . response_host path = SermepaWebTpv . send ( option ) return if ! host . present? || ! path . present? URI . join ( host , path ) . to_s end | Available options redirect_success_path redirect_failure_path callback_response_path |
20,806 | def background = ( background ) if ! background . is_a? ( TrueClass ) && ! background . is_a? ( FalseClass ) fail ( ArgumentError , "Background should be boolean" ) end manager . allowsBackgroundLocationUpdates = background @background = background end | Sets whether location should be updated on the background or not . |
20,807 | def define_unary_method ( mod , op , conversion = :identity ) mod . module_eval do define_method "#{op}_with_hornetseye" do | a | if a . matched? if a . dimension == 0 and a . variables . empty? target = a . typecode . send conversion target . new mod . send ( op , a . simplify . get ) else Hornetseye :: ElementWise ( proc { | x | mod . send op , x } , "#{mod}.#{op}" , proc { | x | x . send conversion } ) . new ( a ) . force end else send "#{op}_without_hornetseye" , a end end alias_method_chain op , :hornetseye module_function "#{op}_without_hornetseye" module_function op end end | Extend unary method with capability to handle arrays |
20,808 | def define_binary_method ( mod , op , coercion = :coercion ) mod . module_eval do define_method "#{op}_with_hornetseye" do | a , b | if a . matched? or b . matched? a = Node . match ( a , b ) . new a unless a . matched? b = Node . match ( b , a ) . new b unless b . matched? if a . dimension == 0 and a . variables . empty? and b . dimension == 0 and b . variables . empty? target = a . typecode . send coercion , b . typecode target . new mod . send ( op , a . simplify . get , b . simplify . get ) else Hornetseye :: ElementWise ( proc { | x , y | mod . send op , x , y } , "#{mod}.#{op}" , proc { | t , u | t . send coercion , u } ) . new ( a , b ) . force end else send "#{op}_without_hornetseye" , a , b end end alias_method_chain op , :hornetseye module_function "#{op}_without_hornetseye" module_function op end end | Extend binary method with capability to handle arrays |
20,809 | def expand_to_class_path ( path ) sep = path [ WINDOWS_PATH_SEP ] ? WINDOWS_PATH_SEP : UNIX_PATH_SEP dirs = path . split ( sep ) . map { | dir | File . expand_path ( dir ) } expanded = expand_jars ( dirs ) expanded . each { | dir | add_to_classpath ( dir ) } end | Adds the directories in the given path and all Java jar files contained in the directories to the Java classpath . |
20,810 | def add_to_classpath ( file ) unless File . exist? ( file ) then logger . warn ( "File to place on Java classpath does not exist: #{file}" ) return end if File . extname ( file ) == '.jar' then require file else if File . directory? ( file ) then last = file [ - 1 , 1 ] if last == "\\" then file = file [ 0 ... - 1 ] + '/' elsif last != '/' then file = file + '/' end end $CLASSPATH << file end end | Adds the given jar file or directory to the classpath . |
20,811 | def expand_jars ( directories ) expanded = directories . map do | dir | jars = Dir [ File . join ( dir , "**" , "*.jar" ) ] . sort jars . empty? ? [ dir ] : jars end expanded . flatten end | Expands the given directories to include the contained jar files . If a directory contains jar files then the jar files are included in the resulting array . Otherwise the directory itself is included in the resulting array . |
20,812 | def validate_message_broker message_broker = @connection . message_broker raise TypeError , "'message_broker' must respond to #close" unless message_broker . respond_to? ( :close ) raise TypeError , "'message_broker' must respond to #kill" unless message_broker . respond_to? ( :kill ) raise TypeError , "'message_broker' must respond to #object_request_broker=" unless message_broker . respond_to? ( :object_request_broker= ) raise TypeError , "'message_broker' must respond to #send" unless message_broker . respond_to? ( :send ) end | Validates that the message broker will respond to the necessary methods . |
20,813 | def list ( options = { } , & block ) builder = List . new ( self , options ) capture ( builder , & block ) if block_given? builder . to_s end | Typography Headings not implemented Lead not implemented Small not implemented Bold not implemented Italics not implemented muted text - warning text - error text - info text - success not implemented Abbreviations not implemented Addresses not implemented Blockquotes not implemented Lists |
20,814 | def set_defaults_for model options [ :page ] = 1 options [ :per_page ] ||= 10 options [ :name ] ||= model . to_s . downcase options [ :order ] ||= 'created_at ASC' self . name = options [ :name ] end | set defaults for options |
20,815 | def get_data_for model model = apply_search_to ( model ) @data = model . limit ( options [ :per_page ] ) . offset ( start_item ) . order ( options [ :order ] ) options [ :total_count ] = model . count options [ :page_total ] = ( options [ :total_count ] / options [ :per_page ] . to_f ) . ceil end | make the model queries to pull back the data based on pagination and search results if given |
20,816 | def retrieve ( fieldList , from , ids ) soap . retrieve ( Retrieve . new ( fieldList , from , ids ) ) . result end | Retrieve a list of specific objects |
20,817 | def rediskey if parent? suffix = parent . kind_of? ( Familia ) && parent . class . suffix != :object ? parent . class . suffix : name k = parent . rediskey ( name , nil ) else k = [ name ] . flatten . compact . join ( Familia . delim ) end if @opts [ :quantize ] args = case @opts [ :quantize ] when Numeric [ @opts [ :quantize ] ] when Array @opts [ :quantize ] else [ ] end k = [ k , qstamp ( * args ) ] . join ( Familia . delim ) end k end | returns a redis key based on the parent object so it will include the proper index . |
20,818 | def clean! HotTub . logger . info "[HotTub] Cleaning pool #{@name}!" if HotTub . logger @mutex . synchronize do begin @_pool . each do | clnt | clean_client ( clnt ) end ensure @cond . signal end end nil end | Clean all clients currently checked into the pool . Its possible clients may be returned to the pool after cleaning |
20,819 | def drain! HotTub . logger . info "[HotTub] Draining pool #{@name}!" if HotTub . logger @mutex . synchronize do begin while clnt = @_pool . pop close_client ( clnt ) end ensure @_out . clear @_pool . clear @pid = Process . pid @cond . broadcast end end nil end | Drain the pool of all clients currently checked into the pool . After draining wake all sleeping threads to allow repopulating the pool or if shutdown allow threads to quickly finish their work Its possible clients may be returned to the pool after cleaning |
20,820 | def shutdown! HotTub . logger . info "[HotTub] Shutting down pool #{@name}!" if HotTub . logger @shutdown = true kill_reaper if @reaper drain! @shutdown = false nil end | Kills the reaper and drains the pool . |
20,821 | def reap! HotTub . logger . info "[HotTub] Reaping pool #{@name}!" if HotTub . log_trace? while ! @shutdown reaped = nil @mutex . synchronize do begin if _reap? if _dead_clients? reaped = @_out . select { | clnt , thrd | ! thrd . alive? } . keys @_out . delete_if { | k , v | reaped . include? k } else reaped = [ @_pool . shift ] end else reaped = nil end ensure @cond . signal end end if reaped reaped . each do | clnt | close_client ( clnt ) end else break end end nil end | Remove and close extra clients Releases mutex each iteration because reaping is a low priority action |
20,822 | def push ( clnt ) if clnt orphaned = false @mutex . synchronize do begin if ! @shutdown && @_out . delete ( clnt ) @_pool << clnt else orphaned = true end ensure @cond . signal end end close_orphan ( clnt ) if orphaned reap! if @blocking_reap end nil end | Safely add client back to pool only if that client is registered |
20,823 | def pop alarm = ( Time . now + @wait_timeout ) clnt = nil dirty = false while ! @shutdown raise_alarm if ( Time . now > alarm ) @mutex . synchronize do begin if clnt = @_pool . pop dirty = true else clnt = _fetch_new ( & @client_block ) end ensure if clnt _checkout ( clnt ) @cond . signal else @reaper . wakeup if @reaper && _dead_clients? @cond . wait ( @mutex , @wait_timeout ) end end end break if clnt end clean_client ( clnt ) if dirty && clnt clnt end | Safely pull client from pool adding if allowed If a client is not available check for dead resources and schedule reap if nesseccary |
20,824 | def _fetch_new ( & client_block ) if ( @never_block || ( _total_current_size < @max_size ) ) if client_block . arity == 0 nc = yield else nc = yield @sessions_key end HotTub . logger . info "[HotTub] Adding client: #{nc.class.name} to #{@name}." if HotTub . log_trace? nc end end | Returns a new client if its allowed . _add is volatile ; and may cause threading issues if called outside |
20,825 | def each ( options = { } , & block ) options = { :source => compares_files? ? 'files' : 'strings' , :context => 3 } . merge options Diffy :: Diff . new ( preprocessed_expected . to_s , preprocessed_produced . to_s , options ) . each & block end | Enumerate all lines which differ . |
20,826 | def new ( * args , & block ) instance = allocate . tap { | obj | obj . __send__ ( :initialize , * args , & block ) } IceNine . deep_freeze ( instance ) end | Reloads instance s constructor to make it immutable |
20,827 | def whitelist_redact_hash redact_hash digest_hash = { } redact_hash . each do | key , how | if ( how . to_sym == :digest ) digest_hash [ digest_key ( key ) ] = :keep end end digest_hash . merge redact_hash end | Calculate all keys that should be kept in whitelist mode In multiple iterations of redact - > decrypt - digest keys will remain and then get deleted in the second iteration so we have to add the digest keys so they re not wiped out on later iteration |
20,828 | def parse ( str ) node = MecabNode . new mecab_sparse_tonode ( @mecab , str ) result = [ ] while ! node . null? do if node . surface . empty? node = node . next next end result << node node = node . next end result end | Initialize the mecab tagger with the given option . |
20,829 | def siblings_and_self ( options = { } ) scopes = options [ :scope ] || self . default_sibling_scope scope_values = options [ :scope_values ] || { } scopes = Array . wrap ( scopes ) . compact criteria = base_document_class . all detail_scopes = [ ] scopes . reverse_each do | scope | scope_value = scope_values . fetch ( scope ) { self . send ( scope ) } relation_metadata = self . reflect_on_association ( scope ) if relation_metadata && scope_value proxy = self . siblings_through_relation ( scope , scope_value ) if proxy criteria = proxy . criteria next end end detail_scopes << scope end detail_scopes . each do | scope | scope_value = scope_values . fetch ( scope ) { self . send ( scope ) } relation_metadata = self . reflect_on_association ( scope ) if relation_metadata criteria = criteria . where ( relation_metadata . key => scope_value ) if scope_value && relation_metadata . polymorphic? type = scope_value . class . name inverse_of = send ( relation_metadata . inverse_of_field ) criteria = criteria . where ( relation_metadata . inverse_type => type ) criteria = criteria . any_in ( relation_metadata . inverse_of_field => [ inverse_of , nil ] ) end else criteria = criteria . where ( scope => scope_value ) end end criteria end | Returns this document s siblings and itself . |
20,830 | def sibling_of? ( other , options = { } ) scopes = options [ :scope ] || self . default_sibling_scope scope_values = options [ :scope_values ] || { } other_scope_values = options [ :other_scope_values ] || { } scopes = Array . wrap ( scopes ) . compact return false if base_document_class != base_document_class ( other ) scopes . each do | scope | scope_value = scope_values . fetch ( scope ) { self . send ( scope ) } other_scope_value = other_scope_values . fetch ( scope ) { other . send ( scope ) } return false if scope_value != other_scope_value end true end | Is this document a sibling of the other document? |
20,831 | def become_sibling_of ( other , options = { } ) return true if self . sibling_of? ( other , options ) scopes = options [ :scope ] || self . default_sibling_scope other_scope_values = options [ :other_scope_values ] || { } scopes = Array . wrap ( scopes ) . compact return false if base_document_class != base_document_class ( other ) scopes . each do | scope | other_scope_value = other_scope_values . fetch ( scope ) { other . send ( scope ) } relation_metadata = self . reflect_on_association ( scope ) if relation_metadata && other_scope_value inverse_metadata = other . intelligent_inverse_metadata ( scope , other_scope_value ) if inverse_metadata inverse = inverse_metadata . name if inverse_metadata . many? other_scope_value . send ( inverse ) << self else other_scope_value . send ( "#{inverse}=" , self ) end next end end self . send ( "#{scope}=" , other_scope_value ) end end | Makes this document a sibling of the other document . |
20,832 | def search ( dependency ) @cache [ dependency . hash ] ||= begin find_by_name ( dependency . name ) . select do | spec | dependency =~ spec end . sort_by { | s | s . version } end end | Takes an array of gem sources and fetches the full index of gems from each one . It then combines the indexes together keeping track of the original source so that any resolved gem can be fetched from the correct source . |
20,833 | def search_current_el xpath desired_el = @current_el . at_xpath xpath return desired_el unless desired_el . nil? @current_el_parents . each do | parent | desired_el = parent . at_xpath xpath return desired_el unless desired_el . nil? end nil end | Search each parent of some verb for a given element . Used for rule inheritance . |
20,834 | def process ( data ) @buffer << data lines = @buffer . split "\n" @buffer = @buffer . end_with? ( "\n" ) ? '' : lines . pop lines . each do | line | yield line . strip end end | Appends a new piece of ASCII data to this buffer and yields back any lines that are now complete . |
20,835 | def perform_spread ( * args ) spread_duration = get_sidekiq_options [ 'spread_duration' ] || 1 . hour spread_in = 0 spread_at = nil spread_method = get_sidekiq_options [ 'spread_method' ] || :rand spread_mod_value = nil spread_method = spread_method . to_sym if spread_method . present? has_options = false opts = if ! args . empty? && args . last . is_a? ( :: Hash ) has_options = true args . pop else { } end sd = _extract_spread_opt ( opts , :duration ) spread_duration = sd if sd . present? si = _extract_spread_opt ( opts , :in ) spread_in = si if si . present? sa = _extract_spread_opt ( opts , :at ) spread_at = sa if sa . present? sm = _extract_spread_opt ( opts , :method ) spread_method = sm . to_sym if sm . present? smv = _extract_spread_opt ( opts , :mod_value ) spread_mod_value = smv if smv . present? remaining_opts = opts . reject { | o | PERFORM_SPREAD_OPTS . include? ( o . to_sym ) } num_args = args . length params = new . method ( :perform ) . parameters num_req_args = params . select { | p | p [ 0 ] == :req } . length num_opt_args = params . select { | p | p [ 0 ] == :opt } . length num_req_key_args = params . select { | p | p [ 0 ] == :keyreq } . length num_opt_key_args = params . select { | p | p [ 0 ] == :key } . length raise ArgumentError , "#{name}#perform should not use keyword args" if num_req_key_args > 0 || num_opt_key_args > 0 if has_options if num_args < num_req_args args . push ( remaining_opts ) elsif num_args < ( num_req_args + num_opt_args ) && ! remaining_opts . empty? args . push ( remaining_opts ) end end spread_mod_value = args . first if spread_mod_value . blank? && spread_method == :mod _check_spread_args! ( spread_duration , spread_method , spread_mod_value ) spread = _set_spread ( spread_method , spread_duration . to_i , spread_mod_value ) if spread_at . present? t = spread_at . to_i + spread perform_at ( t , * args ) else t = spread_in . to_i + spread if t . zero? perform_async ( * args ) else perform_in ( t , * args ) end end end | Randomly schedule worker over a window of time . Arguments are keys of the final options hash . |
20,836 | def translation_key key = name . gsub ( / \z / , '' ) key . gsub! ( / / , '' ) key . gsub! ( / \d / , '\1_\2' ) key . gsub! ( / \d / , '\1_\2' ) key . tr! ( "-" , "_" ) key . downcase! end | Returns the underscored name used in the full i18n translation key |
20,837 | def check ( name = @current_step_name , & block ) raise "No step name" unless name @current_step_name ||= name checks << CheckBlock . new ( "progression_name" , name , & block ) define_method ( "#{name}?" ) do check = checks . find { | s | s . name == name } ! ! check . run ( self ) end end | Create a predicate method for the current step name to test if it s complete |
20,838 | def complete ( & block ) name = @current_step_name or raise "No step name" completions << CompletionBlock . new ( "progression_name" , name , & block ) define_method ( "complete_#{name}" ) do completion = completions . find { | c | c . name == name } completion . run ( self ) end end | Check a method like complete_current_step_name to mark a step complete . This is not always needed but useful if you want to persist a completion for performance purposes . |
20,839 | def tag ( type , * paths , & block ) paths . map do | path | item = :: Asset . manifest . find { | i | i . path == path } item ? item . sources . map { | f | yield ( asset_url ( f ) ) } : yield ( path ) end . flatten . join ( "\n" ) end | Build the tags |
20,840 | def each_cargo_section ( name ) raise CargoStreamerError . new ( "Mode must be 'r' to read cargo data" ) unless @mode == "r" locations = @cargo_locations [ name ] or return locations . each do | seek_location | @ioh . seek ( seek_location ) digest = "" encoded_data = "" @ioh . each_line do | line | line . chomp! if line == CARGO_END break elsif digest == "" digest = line else encoded_data += line end end yield verify_and_decode_cargo ( digest , encoded_data ) end end | Reads verifies and decodes each cargo section with a given name passing each section s decoded data to the block |
20,841 | def extract_features ( options = { } ) rate = options [ :sample_rate ] || "8000" begin parameters = [ ] parameters << "-r #{rate}" parameters << ":source" parameters = parameters . flatten . compact . join ( " " ) . strip . squeeze ( " " ) success = Cocaine :: CommandLine . new ( "yaafehero" , parameters ) . run ( :source => get_path ( @file ) ) rescue => e raise AudioHeroError , "These was an issue getting stats from #{@basename}" end garbage_collect ( @file ) if options [ :gc ] == "true" MessagePack . unpack ( success ) end | Requires custom version of yaafe |
20,842 | def restore post = Post . find ( params [ :id ] ) restore = Post . restore ( post ) url = if restore . post_type == 'page' "/admin/pages/#{restore.id}/edit" elsif restore . post_type == 'post' "/admin/articles/#{restore.id}/edit" end redirect_to URI . parse ( url ) . path , notice : I18n . t ( "controllers.admin.revisions.restore.flash.notice" , post_type : restore . post_type . capitalize ) end | restore the post to the given post data |
20,843 | def rewrite_asset_path ( source , path = nil ) if Fingerjam :: Base . enabled? && Fingerjam :: Base . cached? ( source ) Fingerjam :: Base . cached_url ( source ) else if path && path . respond_to? ( :call ) return path . call ( source ) elsif path && path . is_a? ( String ) return path % [ source ] end asset_id = rails_asset_id ( source ) if asset_id . blank? source else source + "?#{asset_id}" end end end | Used by Rails view helpers |
20,844 | def put key , value key = convert_key key if ( prop = self . class . metadata [ key ] ) prop . set self , value else _raw_put key , value end end | Store a value at a key . If the key is a prop name store it through the prop which will check it s type . |
20,845 | def sign ( params ) RubyDesk . logger . debug { "Params to sign: #{params.inspect}" } sorted_params = params . sort { | a , b | a . to_s <=> b . to_s } RubyDesk . logger . debug { "Sorted params: #{sorted_params.inspect}" } sorted_params . map! do | k , v | [ k , URI . unescape ( v ) ] end concatenated = @api_secret + sorted_params . join RubyDesk . logger . debug { "concatenated: #{concatenated}" } md5 = Digest :: MD5 . hexdigest ( concatenated ) RubyDesk . logger . debug { "md5: #{md5}" } return md5 end | Sign the given parameters and returns the signature |
20,846 | def invoke_api_call ( api_call ) url = URI . parse ( api_call [ :url ] ) http = Net :: HTTP . new ( url . host , url . port ) http . use_ssl = true http . verify_mode = OpenSSL :: SSL :: VERIFY_NONE data = api_call [ :params ] . to_a . map { | pair | pair . map { | x | URI . escape ( x . to_s ) } . join '=' } . join ( '&' ) headers = { 'Content-Type' => 'application/x-www-form-urlencoded' } RubyDesk . logger . info "URL: #{api_call[:url]}" RubyDesk . logger . info "method: #{api_call[:method]}" RubyDesk . logger . info "Params: #{data}" case api_call [ :method ] when :get , 'get' then resp , data = http . request ( Net :: HTTP :: Get . new ( url . path + "?" + data , headers ) ) when :post , 'post' then resp , data = http . request ( Net :: HTTP :: Post . new ( url . path , headers ) , data ) when :delete , 'delete' then resp , data = http . request ( Net :: HTTP :: Delete . new ( url . path , headers ) , data ) end RubyDesk . logger . info "Response code: #{resp.code}" RubyDesk . logger . info "Returned data: #{data}" case resp . code when "200" then return data when "400" then raise RubyDesk :: BadRequest , data when "401" , "403" then raise RubyDesk :: UnauthorizedError , data when "404" then raise RubyDesk :: PageNotFound , data when "500" then raise RubyDesk :: ServerError , data else raise RubyDesk :: Error , data end end | invokes the given API call and returns body of the response as text |
20,847 | def prepare_and_invoke_api_call ( path , options = { } ) api_call = prepare_api_call ( path , options ) data = invoke_api_call ( api_call ) parsed_data = case options [ :format ] when 'json' then JSON . parse ( data ) when 'xml' then REXML :: Document . new ( data ) else JSON . parse ( data ) rescue REXML :: Document . new ( data ) rescue data end RubyDesk . logger . info "Parsed data: #{parsed_data.inspect}" return parsed_data end | Prepares an API call with the given arguments then invokes it and returns its body |
20,848 | def auth_url auth_call = prepare_api_call ( "" , :params => { :api_key => @api_key } , :base_url => ODESK_AUTH_URL , :format => nil , :method => :get , :auth => false ) data = auth_call [ :params ] . to_a . map { | pair | pair . join '=' } . join ( '&' ) return auth_call [ :url ] + "?" + data end | Returns the URL that authenticates the application for the current user . This is used for web applications only |
20,849 | def desktop_auth_url raise "Frob should be requested first. Use RubyDesk::Controller#get_frob()" unless @frob auth_call = prepare_api_call ( "" , :params => { :api_key => @api_key , :frob => @frob } , :base_url => ODESK_AUTH_URL , :format => nil , :method => :get , :auth => false ) data = auth_call [ :params ] . to_a . map { | pair | pair . join '=' } . join ( '&' ) return auth_call [ :url ] + "?" + data end | Returns a URL that the desktop user should visit to activate current frob . This method should not be called before a frob has been requested |
20,850 | def handle? ( req ) if @opts . key? ( :if ) cond = @opts [ :if ] cond = cond . call ( req ) if cond . respond_to? ( :call ) return cond end true end | Returns false if the current request should not be handled by the middleware |
20,851 | def parent_of? ( metric ) if new_record? start = ( started_at - metric . started_at ) * 1000.0 start <= 0 && ( start + duration >= metric . duration ) else self . id == metric . parent_id end end | Returns if the current node is the parent of the given node . If this is a new record we can use started_at values to detect parenting . However if it was already saved we lose microseconds information from timestamps and we must rely solely in id and parent_id information . |
20,852 | def to_request item_pedido = { codigo_produto : self . codigo_produto , codigo_categoria : self . codigo_categoria , nome_produto : self . nome_produto , quantidade_produto : self . quantidade_produto , valor_unitario_produto : self . valor_unitario_produto , nome_categoria : self . nome_categoria , } return item_pedido end | Nova instancia da classe ItemPedido |
20,853 | def values ( key ) h = hash ( key ) hoff = @hashes [ ( h % 256 ) * 2 ] hlen = @hashes [ ( h % 256 ) * 2 + 1 ] return [ ] if hlen == 0 off = ( h / 256 ) % hlen vals = [ ] while ( slot = read ( hoff + off * hashref_size .. hoff + off * hashref_size + hashref_size - 1 ) ) && ( dslot = ary_unpack ( slot , 2 ) ) && dslot [ 1 ] != 0 if dslot [ 0 ] == h pos = dslot [ 1 ] rkey , value = read_entry ( pos ) if rkey == key vals << value end end off = ( off + 1 ) % hlen end return vals end | Returns all values for + key + in an array |
20,854 | def errors_for_actionpack e0 = :: ActiveModel :: Errors . new ( self ) @errors . each do | e | e0 . add :base , e end e0 end | actionpack 4 requires a more robust Errors object |
20,855 | def add_property ( accessor , predicate , object ) new_property = RDF :: Statement . new ( bnode , predicate , object ) attributes_list [ accessor ] = new_property end | Add a property without defining it on the class . This will stay will use the subject and the regular infrastructure . |
20,856 | def update ( attributes , collection ) each_resource_with_edit_url ( collection ) do | resource , edit_url | put_updated_resource ( edit_url , resource ) end collection . size end | Constructs and executes UPDATE statement for given attributes and a query |
20,857 | def architectures architectures = Hash . new stdin , stdout , stderr = Open3 . popen3 ( 'dwarfdump' , '-u' , @dsym ) stdout . each_line do | line | if line =~ / \- \( \) / architectures [ $2 ] = $1 end end return architectures end | Creates a new symbolicator for a given dSYM file . |
20,858 | def announce apply_environment unless @approved mailopts = self . mailopts if mailto . empty? report "No recipents given." else if trial? subject = mailopts [ 'subject' ] mailto = mailopts [ 'to' ] . flatten . join ( ", " ) report "email '#{subject}' to #{mailto}" else if @approved email ( mailopts ) else exit - 1 end end end end | Send announcement message . |
20,859 | def message @message ||= ( path = Dir [ file ] . first if file if path project . announcement ( File . new ( file ) ) else parts . map { | part | / \/ \/ / =~ part . to_s ? $' : part } project . announcement ( * parts ) end ) end | Message to send . Defaults to a generated release announcement . |
20,860 | def apply_environment return if noenv @server ||= ENV [ 'EMAIL_SERVER' ] @from ||= ENV [ 'EMAIL_FROM' ] || ENV [ 'EMAIL_ACCOUNT' ] @account ||= ENV [ 'EMAIL_ACCOUNT' ] || ENV [ 'EMAIL_FROM' ] @password ||= ENV [ 'EMAIL_PASSWORD' ] @port ||= ENV [ 'EMAIL_PORT' ] @domain ||= ENV [ 'EMAIL_DOMAIN' ] @login ||= ENV [ 'EMAIL_LOGIN' ] @secure ||= ENV [ 'EMAIL_SECURE' ] end | Apply environment settings . |
20,861 | def apply started_at = DateTime . now . to_s check_puppetfile_content copy_static_modules process_status = apply_step create_step_checkpoint ( started_at ) if process_status . success? process_status end | Puppet apply Fundamental step |
20,862 | def create_step_checkpoint ( started_at ) self . node . started_at = started_at self . node . finished_at = DateTime . now . to_s Bebox :: Environment . create_checkpoint_directories ( project_root , environment ) generate_file_from_template ( "#{Bebox::FilesHelper::templates_path}/node/provisioned_node.yml.erb" , "#{self.project_root}/.checkpoints/environments/#{self.environment}/steps/#{self.step}/#{self.node.hostname}.yml" , { node : self . node } ) end | Create checkpoint for step |
20,863 | def in_edges ( filter = nil , & block ) r = self . collect { | v | v . in_edges ( filter , & block ) } EdgeSet . new ( r ) . flatten . uniq end | Walking section Returns incoming edges of all vertices of this set |
20,864 | def in_adjacent ( filter = nil , & block ) r = self . collect { | v | v . in_adjacent ( filter , & block ) } VertexSet . new ( r ) . flatten . uniq end | Returns all back - adjacent vertices reachable from this set |
20,865 | def adjacent ( filter = nil , & block ) ( in_adjacent ( filter , & block ) + out_adjacent ( filter , & block ) ) . uniq end | Returns all adjacent vertices reachable from this set |
20,866 | def config @parsed_config ||= Nokogiri :: XML ( config_xml ) . xpath ( "//property" ) . inject ( { } ) { | props , xprop | props [ xprop . xpath ( "./name" ) . text ] = xprop . xpath ( "./value" ) . text props } end | Get the job s config as a Hash |
20,867 | def push ( queue_name , message_body ) sqs . send_message ( queue_url : url_for_queue ( queue_name ) , message_body : message_body ) end | Push a message onto a queue |
20,868 | def reserve ( queue_name ) resp = sqs . receive_message ( queue_url : url_for_queue ( queue_name ) , max_number_of_messages : 1 ) return nil unless resp . messages . any? Message . new queue_client : self , queue : queue_name , sqs_message : resp . messages . first end | Reserve a message from the specified queue |
20,869 | def delete ( message ) sqs . delete_message queue_url : url_for_queue ( message . queue ) , receipt_handle : message . receipt_handle end | Delete a message from the queue |
20,870 | def defer_retry ( message ) sqs . change_message_visibility queue_url : url_for_queue ( message . queue ) , receipt_handle : message . receipt_handle , visibility_timeout : BetterSqs . configuration . sqs_message_deferral_seconds end | Updates the message visibility timeout to create some delay before an attempt will be made to reprocess the message |
20,871 | def resize! begin image = MiniMagick :: Image . open ( @file . path ) image . resize ( "#{@width}x#{@height}" ) rescue else image . write ( @file . path ) rescue nil end end | Resize file . Keeping aspect ratio . |
20,872 | def thumbnail! begin image = MiniMagick :: Image . open ( @file . path ) image . resize ( "#{Pushfile.settings[:images][:thumb][:width]}x" ) rescue @thumb = nil else t = @name . split ( '.' ) ; ext = t . pop @thumb = t . join ( "." ) . concat ( "_thumb.#{ext}" ) image . write ( "/tmp/#{@thumb}" ) rescue @thumb = nil end end | Create thumbnail same name but with _thumb at the end |
20,873 | def element ( i ) unless i . matched? unless ( 0 ... shape . last ) . member? i raise "Index must be in 0 ... #{shape.last} (was #{i})" end i = INT . new i end i . size = @index . size if i . is_a? ( Variable ) and @index . size . get @term . subst @index => i end | Get element of this term |
20,874 | def report_event ( name , parrent_page_id = nil , space = nil ) page = find_page_by_name ( name , parrent_page_id ) if page append_to_page ( page [ "id" ] , parrent_page_id ) else create_page ( name , space , parrent_page_id ) end clear_log end | appends the log to confluence page if found if not creates new page clears the log |
20,875 | def create_page ( title , space , parrent_page_id = nil ) params = { 'type' => 'page' , 'title' => title , 'space' => { 'key' => space } , 'body' => { 'storage' => { 'value' => ( "#{ @body_message.to_json.gsub("&&", "&&").gsub(/\\u001b.../, " ") }" ) . force_encoding ( 'UTF-8' ) , 'representation' => 'storage' } } } if parrent_page_id params [ 'ancestors' ] = [ { 'type' => 'page' , 'id' => parrent_page_id } ] end uri = URI . parse ( @base_url ) https = Net :: HTTP . new ( uri . host , uri . port ) https . use_ssl = true req = Net :: HTTP :: Post . new ( uri . path , initheader = { 'Content-Type' => 'application/json' } ) req . basic_auth ( @user , @password ) req [ 'Accept' ] = 'application/json' req . body = "#{params.to_json}" response = https . request ( req ) response = JSON . parse ( response . body ) if response [ "statusCode" ] == 400 puts response . inspect puts req . body . inspect puts "Create page: Error reporting to confluence: #{response["message"]}" raise "Create page: Error reporting to confluence: #{response["message"]}" else puts "Reported page creation." end end | Creates new page with title set if parrent_page_id is provided it adjusts ancestor accordingly and the same space short key |
20,876 | def constrained? if self . constrain if self . constrain == true true elsif self . constrain . is_a? String true elsif self . constrain . is_a? Array and not self . constrain . empty? true else false end else false end end | Create an atom of the specified species at the given coordinates |
20,877 | def distance_to ( atom ) Math . sqrt ( ( self . x - atom . x ) ** 2 + ( self . y - atom . y ) ** 2 + ( self . z - atom . z ) ** 2 ) end | Return the distance to another atom |
20,878 | def displace ( x , y , z ) Atom . new ( self . x + x , self . y + y , self . z + z , self . species , self . constrain ) end | An exact clone of the atom . Same ID and everything Return a new atom with the same species and relaxation constraints but with coordinates displaced by + x + + y + + z + |
20,879 | def displace! ( x , y , z ) self . x += x self . y += y self . z += z end | Displace this atom in place |
20,880 | def format_geometry_in line = "atom %16.6f %16.6f %16.6f %s" % [ self . x , self . y , self . z , self . species ] if self . constrain if self . constrain == true line << "\nconstrain_relaxation .true." elsif self . constrain . is_a? String line << "\nconstrain_relaxation #{self.constrain}" elsif self . constrain . is_a? Array and not self . constrain . empty? self . constrain . each { | c | line << "\nconstrain_relaxation #{c}" } line << "\n" end end line end | Print a string representation of this atom formatted in the geometry . in format used by Aims |
20,881 | def build_commands ( strategy_name , type , action_name ) action = Arsenal . actions [ action_name ] if action options = action . options case type when :prerequisite commands = action . prerequisite_call config Trooper . logger . action "Prerequisite: #{action.description}" else commands = action . call config Trooper . logger . action action . description end [ commands , options ] else raise MissingActionError , "Cant find action: #{action_name}" end end | build the commands to be sent to the host object |
20,882 | def hosts @hosts ||= begin r , h , u = [ ] , ( config [ :hosts ] rescue nil ) , ( config [ :user ] rescue nil ) h . each { | host | r << Host . new ( host , u ) } if h && u ; r end end | returns an array of host objects |
20,883 | def runner_execute! ( host , commands , options = { } ) result = host . execute commands , options if result && result [ 1 ] == :stdout Trooper . logger . info "#{result[2]}\n" true else false end end | runs the commands on a host and deals with output |
20,884 | def add_row ( row ) size = row . size raise ArgumentError . new ( "Given a row of data with #{size} entries, but there are only #{@table_columns.size} columns in the table" ) unless size == @table_columns . size @data << row end | Adds a single row to the table |
20,885 | def add_rows ( rows ) sizes = rows . collect { | r | r . size } . uniq expected_size = @table_columns . size errors = sizes . select { | s | s != expected_size } raise ArgumentError . new ( "Given a row of data with #{errors.to_sentence} entries, but there are only #{expected_size} columns in the table" ) if errors . any? @data += rows end | Adds multiple rows to the table |
20,886 | def format_data formatted_rows = [ ] @data . each do | row | values = [ ] row . each_with_index do | entry , index | values << Gvis :: DataCell . new ( entry , @column_types . to_a [ index ] [ 1 ] ) . to_js end rowstring = "[#{values.join(", ")}]" formatted_rows << rowstring end "[#{formatted_rows.join(', ')}]" end | Outputs the data within this table as a javascript array ready for use by google . visualization . DataTable This is where conversions of ruby date objects to javascript Date objects and escaping strings and formatting options is done |
20,887 | def register_column ( type , name ) type = type . to_s . downcase raise ArgumentError . new ( "invalid column type #{type}, permitted types are #{COLUMN_TYPES.join(', ')}" ) unless COLUMN_TYPES . include? ( type ) @table_columns << name . to_s @column_types . merge! ( name . to_s => type ) end | Registers each column explicitly with data type and a name associated |
20,888 | def extract_books booklist = [ ] if @doc . count != 0 @doc . each do | book_data | book = { } book [ 'title' ] = book_data [ 'titleMain' ] book [ 'book_url' ] = BOOK_URL + book_data [ 'prodId' ] book [ 'crt_time' ] = book_data [ 'crtTime' ] . split ( ' ' ) [ 0 ] booklist << book end end booklist end | Return the books in the format specified in spec . |
20,889 | def collect_urls @fetch_queue << @crawl_url . resolved_base_url until @fetch_queue . empty? || @processed . length >= @options [ :max_requests ] url = @fetch_queue . pop yield ( url ) page_urls_for ( url ) end result = @processed + @fetch_queue Logger . log "Crawling finished:" Logger . log "Processed links: #{@processed.length}" Logger . log "Found links: #{result.length}" result . to_a rescue Interrupt , IRB :: Abort Logger . err_log 'Crawl interrupted.' @fetch_queue . to_a end | Collects all links on domain for domain . |
20,890 | def liner liner_keys . inject ( { } ) { | h , k | h [ k ] = self [ k ] ; h } . freeze end | Build a hash of liner attributes |
20,891 | def build return false if @generated logger . info ( " ) @dependencies = Set . new file . parent . mkpath if input . template? evaluate_template else copy_resource end @generated = true end | Generate output for this template |
20,892 | def each_vertex ( filter = nil , & block ) if filter . nil? @vertices . each & block else vertices ( filter ) . each & block end end | Calls block on each graph vertex for with the filter and block predicate evaluates to true . |
20,893 | def remove_vertices ( * vertices ) vertices = to_vertices ( * vertices ) . sort { | v1 , v2 | v2 <=> v1 } vertices . each do | vertex | remove_edges ( vertex . in_edges + vertex . out_edges ) @vertices . delete_at ( vertex . index ) vertex . index = - 1 end @vertices . each_with_index { | v , i | v . index = i } self end | Removes all vertices returned by evaluating the _vertices_ selection expression . |
20,894 | def each_edge ( filter = nil , & block ) if filter . nil? @edges . each & block else edges ( filter ) . each & block end end | Calls block on each graph edge for with the filter and block predicate evaluates to true . |
20,895 | def remove_edges ( * edges ) edges = to_edges ( edges ) . sort { | e1 , e2 | e2 <=> e1 } edges . each do | edge | edge . source . remove_out_edge ( edge ) edge . target . remove_in_edge ( edge ) @edges . delete_at ( edge . index ) edge . index = - 1 end @edges . each_with_index { | edge , i | edge . index = i } self end | Removes all edges returned by evaluating the _edges_ selection expression . |
20,896 | def to_dot ( buffer = '' ) buffer << "digraph G {\n" buffer << " graph[#{to_dot_attributes(self.to_h(true))}]\n" each_vertex do | v | buffer << " V#{v.index} [#{to_dot_attributes(v.to_h(true))}]\n" end each_edge do | e | buffer << " V#{e.source.index} -> V#{e.target.index} [#{to_dot_attributes(e.to_h(true))}]\n" end buffer << "}\n" end | Standard exports Encodes this graph for dot graphviz |
20,897 | def to_dot_attributes ( hash ) buffer = "" hash . each_pair do | k , v | buffer << " " unless buffer . empty? v = case v when Array if v . all? { | elm | Array === elm and elm . length == 2 and elm . all? { | subelm | Numeric === subelm } } v . inject ( '' ) { | memo , elm | "#{memo} #{elm.join(',')}" } . strip else v . join ( ', ' ) end else v . to_s end buffer << "#{k}=\"#{v}\"" end buffer end | Converts a hash to dot attributes |
20,898 | def check_sanity @vertices . each_with_index do | v , i | raise "Removed vertex in vertex list" unless v . index == i v . in_edges . each do | ine | raise "Removed edge in vertex incoming edges" if ine . index < 0 raise "Vertex and edge don't agree on target" unless ine . target == v end v . out_edges . each do | oute | raise "Removed edge in vertex outgoing edges" if oute . index < 0 raise "Vertex and edge don't agree on source" unless oute . source == v end end @edges . each_with_index do | e , i | raise "Removed edge in edge list" unless e . index == i raise "Edge in-connected to a removed vertex" if e . source . index < 0 raise "Edge out-connected to a removed vertex" if e . target . index < 0 end end | Checks graph sanity |
20,899 | def to_vertices ( * args ) selected = args . collect do | arg | case arg when Integer [ @vertices [ arg ] ] when VertexSet arg when Array arg . collect { | v | to_vertices ( v ) } . flatten . uniq when Digraph :: Vertex [ arg ] else pred = Predicate . to_predicate ( arg ) vertices ( pred ) end end . flatten . uniq VertexSet . new ( selected ) end | Applies argument conventions about selection of vertices |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.