idx
int64
0
24.9k
question
stringlengths
68
4.14k
target
stringlengths
9
749
22,000
def migrate_valid_references ( row , migrated ) ordered = migrated . transitive_closure ( :dependents ) ordered . keep_if { | obj | migrated . include? ( obj ) } . reverse! valid , invalid = ordered . partition do | obj | if migration_valid? ( obj ) then obj . migrate_references ( row , migrated , @target_class , @attr_flt_hash [ obj . class ] ) true else obj . class . owner_attributes . each { | pa | obj . clear_attribute ( pa ) } false end end valid . reverse . each do | obj | unless owner_valid? ( obj , valid , invalid ) then invalid << valid . delete ( obj ) logger . debug { "The migrator invalidated #{obj} since it does not have a valid owner." } end end valid . reject do | obj | if @owners . include? ( obj . class ) and obj . dependents . all? { | dep | invalid . include? ( dep ) } then obj . class . domain_attributes . each { | pa | obj . clear_attribute ( pa ) } invalid << obj logger . debug { "The migrator invalidated #{obj.qp} since it was created solely to hold subsequently invalidated dependents." } true end end end
Sets the migration references for each valid migrated object .
22,001
def create_instance ( klass , row , created ) logger . debug { "The migrator is building #{klass.qp}..." } created << obj = klass . new migrate_properties ( obj , row , created ) add_defaults ( obj , row , created ) logger . debug { "The migrator built #{obj}." } obj end
Creates an instance of the given klass from the given row . The new klass instance and all intermediate migrated instances are added to the created set .
22,002
def migrate_properties ( obj , row , created ) @cls_paths_hash [ obj . class ] . each do | path | header = @header_map [ path ] [ obj . class ] value = row [ header ] value . strip! if String === value next if value . nil? ref = fill_path ( obj , path [ 0 ... - 1 ] , row , created ) migrate_property ( ref , path . last , value , row ) end end
Migrates each input field to the associated domain object attribute . String input values are stripped . Missing input values are ignored .
22,003
def fill_path ( obj , path , row , created ) path . inject ( obj ) do | parent , prop | parent . send ( prop . reader ) or create_reference ( parent , prop , row , created ) end end
Fills the given reference Property path starting at obj .
22,004
def create_reference ( obj , property , row , created ) if property . type . abstract? then raise MigrationError . new ( "Cannot create #{obj.qp} #{property} with abstract type #{property.type}" ) end ref = property . type . new ref . migrate ( row , Array :: EMPTY_ARRAY ) obj . send ( property . writer , ref ) created << ref logger . debug { "The migrator created #{obj.qp} #{property} #{ref}." } ref end
Sets the given migrated object s reference attribute to a new referenced domain object .
22,005
def load_defaults_files ( files ) hash = LazyHash . new { Hash . new } files . enumerate { | file | load_defaults_file ( file , hash ) } hash end
Loads the defaults configuration files .
22,006
def load_defaults_file ( file , hash ) begin config = YAML :: load_file ( file ) rescue raise MigrationError . new ( "Could not read defaults file #{file}: " + $! ) end config . each do | path_s , value | next if value . nil_or_empty? klass , path = create_attribute_path ( path_s ) hash [ klass ] [ path ] = value end end
Loads the defaults config file into the given hash .
22,007
def load_filter_files ( files ) hash = { } files . enumerate { | file | load_filter_file ( file , hash ) } logger . debug { "The migrator loaded the filters #{hash.qp}." } hash end
Loads the filter config files .
22,008
def load_filter_file ( file , hash ) logger . debug { "Loading the migration filter configuration #{file}..." } begin config = YAML :: load_file ( file ) rescue raise MigrationError . new ( "Could not read filter file #{file}: " + $! ) end config . each do | path_s , flt | next if flt . nil_or_empty? klass , path = create_attribute_path ( path_s ) if path . empty? then raise MigrationError . new ( "Migration filter configuration path does not include a property: #{path_s}" ) elsif path . size > 1 then raise MigrationError . new ( "Migration filter configuration path with more than one property is not supported: #{path_s}" ) end pa = klass . standard_attribute ( path . first . to_sym ) flt_hash = hash [ klass ] ||= { } flt_hash [ pa ] = flt end end
Loads the filter config file into the given hash .
22,009
def create_room ( name , members_admin_ids , params = { } ) members_admin_ids = array_to_string ( members_admin_ids ) if value = params [ :members_member_ids ] params [ :members_member_ids ] = array_to_string ( value ) end if value = params [ :members_readonly_ids ] params [ :members_readonly_ids ] = array_to_string ( value ) end params = params . merge ( name : name , members_admin_ids : members_admin_ids ) post ( 'rooms' , params ) end
Create new chat room
22,010
def update_room_members ( room_id , members_admin_ids , params = { } ) members_admin_ids = array_to_string ( members_admin_ids ) if value = params [ :members_member_ids ] params [ :members_member_ids ] = array_to_string ( value ) end if value = params [ :members_readonly_ids ] params [ :members_readonly_ids ] = array_to_string ( value ) end params = params . merge ( members_admin_ids : members_admin_ids ) put ( "rooms/#{room_id}/members" , params ) end
Update chat room members
22,011
def create_room_task ( room_id , body , to_ids , params = { } ) to_ids = array_to_string ( to_ids ) if value = params [ :limit ] params [ :limit ] = time_to_integer ( value ) end post ( "rooms/#{room_id}/tasks" , params . merge ( body : body , to_ids : to_ids ) ) end
Create new task of chat room
22,012
def room_file ( room_id , file_id , params = { } ) unless ( value = params [ :create_download_url ] ) . nil? params [ :create_download_url ] = boolean_to_integer ( value ) end get ( "rooms/#{room_id}/files/#{file_id}" , params ) end
Get file information
22,013
def easter ( year ) golden_number = ( year % 19 ) + 1 if year <= 1752 then dominical_number = ( year + ( year / 4 ) + 5 ) % 7 paschal_full_moon = ( 3 - ( 11 * golden_number ) - 7 ) % 30 else dominical_number = ( year + ( year / 4 ) - ( year / 100 ) + ( year / 400 ) ) % 7 solar_correction = ( year - 1600 ) / 100 - ( year - 1600 ) / 400 lunar_correction = ( ( ( year - 1400 ) / 100 ) * 8 ) / 25 paschal_full_moon = ( 3 - 11 * golden_number + solar_correction - lunar_correction ) % 30 end dominical_number += 7 until dominical_number > 0 paschal_full_moon += 30 until paschal_full_moon > 0 paschal_full_moon -= 1 if paschal_full_moon == 29 or ( paschal_full_moon == 28 and golden_number > 11 ) difference = ( 4 - paschal_full_moon - dominical_number ) % 7 difference += 7 if difference < 0 day_easter = paschal_full_moon + difference + 1 day_easter < 11 ? new ( year , 3 , day_easter + 21 ) : new ( year , 4 , day_easter - 10 ) end
Calculate easter sunday for the given year
22,014
def names_by_value ( a ) a = @data [ a ] if a . is_a? ( String ) || a . is_a? ( Symbol ) Hash [ a . map { | i | i . reverse } ] end
Converts an options - style nested array into a hash for easy name lookup
22,015
def read ( filename , months = nil ) @orders = [ ] start = months ? ( Date . today << months . to_i ) . strftime ( '%Y-%m-%d' ) : nil file = File . open ( filename , "r:UTF-8" ) do | f | while line = f . gets order = Halffare :: Model :: Order . new ( line ) if ( start . nil? || line [ 0 , 10 ] >= start ) && ( order . note != Fetch :: ORDER_NOTE_FILE_CREATED ) @orders . push ( order ) end end end log_info "read #{@orders.length} orders from #{filename}" if @orders . length == 0 if start . nil? log_notice "no orders found" else log_notice "no orders found after #{start}, maybe tweak the --months param" end end end
Reads orders from filename that date back to max months months .
22,016
def calculate ( strategy , halffare ) @halfprice = 0 @fullprice = 0 if halffare log_info "assuming order prices as half-fare" else log_info "assuming order prices as full" end log_notice "please note that you are using a strategy that involves guessing the real price" if [ 'guess' , 'sbbguess' ] . include? strategy strategy = price_factory ( strategy ) strategy . halffare = halffare log_info "using price strategy: #{strategy.class}" price = Price . new ( strategy ) log_info "calculating prices..." @date_min = false @date_max = false @orders . each do | order | if Halffare . debug log_order ( order ) end halfprice , fullprice = price . get ( order ) if Halffare . debug if halfprice != 0 && fullprice != 0 log_result "FOUND: #{order.description} (#{order.price}): half=#{currency(halfprice)}, full=#{currency(fullprice)}" if halffare log_emphasize "You would pay (full price): #{currency(fullprice)}, you save #{currency(fullprice - order.price)}" else log_emphasize "You would pay (half-fare): #{currency(halfprice)}, you pay #{currency(order.price - halfprice)} more" end end end @halfprice += halfprice @fullprice += fullprice @date_min = order . travel_date if ! @date_min || order . travel_date < @date_min @date_max = order . travel_date if ! @date_max || order . travel_date > @date_max end end
Calculates prices according to given strategy .
22,017
def buffer_output ( & block ) raise Shells :: NotRunning unless running? block ||= Proc . new { } stdout_received do | data | self . last_output = Time . now append_stdout strip_ansi_escape ( data ) , & block end stderr_received do | data | self . last_output = Time . now append_stderr strip_ansi_escape ( data ) , & block end end
Sets the block to call when data is received .
22,018
def push_buffer raise Shells :: NotRunning unless running? debug 'Pushing buffer >>' sync do output_stack . push [ stdout , stderr , output ] self . stdout = '' self . stderr = '' self . output = '' end end
Pushes the buffers for output capture .
22,019
def pop_merge_buffer raise Shells :: NotRunning unless running? debug 'Merging buffer <<' sync do hist_stdout , hist_stderr , hist_output = ( output_stack . pop || [ ] ) if hist_stdout self . stdout = hist_stdout + stdout end if hist_stderr self . stderr = hist_stderr + stderr end if hist_output self . output = hist_output + output end end end
Pops the buffers and merges the captured output .
22,020
def pop_discard_buffer raise Shells :: NotRunning unless running? debug 'Discarding buffer <<' sync do hist_stdout , hist_stderr , hist_output = ( output_stack . pop || [ ] ) self . stdout = hist_stdout || '' self . stderr = hist_stderr || '' self . output = hist_output || '' end end
Pops the buffers and discards the captured output .
22,021
def block_call ( context , & block ) args = [ ] block . parameters . each do | parameter | args << context [ parameter [ 1 ] ] end block . call ( * args ) end
Calls the block resolving the parameters by getting the parameter name from the given context .
22,022
def call ( * args ) result = nil @components . each do | component | break if result and not @call_all result = component . send @message , * args end result end
Calls the added components by sending the configured message and the given args .
22,023
def script_sig = ( blob ) script = Script . new ( :blob => blob ) @script_sig = script . to_s @native . script_sig = blob end
Set the scriptSig for this input using a string of bytes .
22,024
def append_node ( type , indent : @indents . length , add : false , value : nil ) @stacks << @stacks . last . dup while indent >= @stacks . length parent = @stacks [ indent ] . last node = AST :: NodeRegistrator . create ( type , @lineno ) parent . children << node node . value = value unless value . nil? @stacks [ indent ] << node if add node end
Append element to stacks and result tree
22,025
def clean_client ( clnt ) if @clean_client begin perform_action ( clnt , @clean_client ) rescue => e HotTub . logger . error "[HotTub] There was an error cleaning one of your #{self.class.name} clients: #{e}" if HotTub . logger end end clnt end
Attempts to clean the provided client checking the options first for a clean block then checking the known clients
22,026
def close_client ( clnt ) @close_client = ( known_client_action ( clnt , :close ) || false ) if @close_client . nil? if @close_client begin perform_action ( clnt , @close_client ) rescue => e HotTub . logger . error "[HotTub] There was an error closing one of your #{self.class.name} clients: #{e}" if HotTub . logger end end nil end
Attempts to close the provided client checking the options first for a close block then checking the known clients
22,027
def reap_client? ( clnt ) rc = false if @reap_client begin rc = perform_action ( clnt , @reap_client ) rescue => e HotTub . logger . error "[HotTub] There was an error reaping one of your #{self.class.name} clients: #{e}" if HotTub . logger end end rc end
Attempts to determine if a client should be reaped block should return a boolean
22,028
def get_environment ( options ) environment = options [ :environment ] environment ||= choose_option ( Environment . list ( project_root ) , _ ( 'cli.choose_environment' ) ) Bebox :: Environment . environment_exists? ( project_root , environment ) ? ( return environment ) : exit_now! ( error ( _ ( 'cli.not_exist_environment' ) % { environment : environment } ) ) end
Obtain the environment from command parameters or menu
22,029
def default_environment environments = Bebox :: Environment . list ( project_root ) if environments . count > 0 return environments . include? ( 'vagrant' ) ? 'vagrant' : environments . first else return '' end end
Obtain the default environment for a project
22,030
def converte_configuracao_para_latex @artigo_latex . merge! ( @artigo ) [ 'resumo' , 'abstract' , 'bibliografia' ] . each { | key | Open3 . popen3 ( "pandoc --smart -f markdown -t latex --no-wrap" ) { | stdin , stdout , stderr , wait_thr | pid = wait_thr . pid stdin . write @artigo [ key ] stdin . close @artigo_latex [ key ] = stdout . read } } end
Converte os arquivos de texto markdown para texto latex
22,031
def salva_configuracao_yaml_para_inclusao_em_pandoc File . open ( @arquivo_saida_yaml , 'w' ) { | file | file . write ( "\n" ) file . write @artigo_latex . to_yaml file . write ( "---\n" ) } end
Precisa gerar arquivos com quebra de linha antes e depois porque pandoc utiliza
22,032
def last_records ( tag = @state_tag ) if @state_type == 'file' return @data [ 'last_records' ] [ tag ] elsif @state_type == 'memory' return @data [ 'last_records' ] [ tag ] elsif @state_type == 'redis' begin alertStart = @redis . get ( tag ) return alertStart rescue Exception => e puts e . message puts e . backtrace . inspect end end end
end of intitialize
22,033
def get_query_operator ( part ) operator = Montage :: Operators . find_operator ( part ) [ operator . operator , operator . montage_operator ] end
Grabs the proper query operator from the string
22,034
def parse_part ( part ) parsed_part = JSON . parse ( part ) rescue part if is_i? ( parsed_part ) parsed_part . to_i elsif is_f? ( parsed_part ) parsed_part . to_f elsif parsed_part =~ / \( \) / to_array ( parsed_part ) elsif parsed_part . is_a? ( Array ) parsed_part else parsed_part . gsub ( / / , "" ) end end
Parse a single portion of the query string . String values representing a float or integer are coerced into actual numerical values . Newline characters are removed and single quotes are replaced with double quotes
22,035
def get_parts ( str ) operator , montage_operator = get_query_operator ( str ) fail QueryError , "Invalid Montage query operator!" unless montage_operator column_name = get_column_name ( str , operator ) fail QueryError , "Your query has an undetermined error" unless column_name value = parse_part ( parse_condition_set ( str , operator ) ) [ column_name , montage_operator , value ] end
Get all the parts of the query string
22,036
def parse_hash query . map do | key , value | new_value = value . is_a? ( Array ) ? [ "$in" , value ] : value [ key . to_s , new_value ] end end
Parse a hash type query
22,037
def parse_string query . split ( / \b \b /i ) . map do | part | column_name , operator , value = get_parts ( part ) if operator == "" [ "#{column_name}" , value ] else [ "#{column_name}" , [ "#{operator}" , value ] ] end end end
Parse a string type query . Splits multiple conditions on case insensitive and strings that do not fall within single quotations . Note that the Montage equals operator is supplied as a blank string
22,038
def to_array ( value ) values = value . gsub ( / \( \) / , "" ) . split ( ',' ) type = [ :is_i? , :is_f? ] . find ( Proc . new { :is_s? } ) { | t | send ( t , values . first ) } values . map { | v | v . send ( TYPE_MAP [ type ] ) } end
Takes a string value and splits it into an array Will coerce all values into the type of the first type
22,039
def belongs_to ( parent_resource ) parent_resource = parent_resource . to_sym parent_resources << parent_resource unless parent_resource . in? ( parent_resources ) end
Tell a class that it s resources may be nested within another named resource
22,040
def receive ( max_len = nil ) max_len ||= 1024 begin data = @socket . recv ( max_len ) rescue IO :: WaitReadable @logger . logv 'Retrying receive...' IO . select ( [ @socket ] ) retry rescue Exception => e if e . class != 'IOError' && e . message != 'closed stream' @logger . log_exception ( e , 'EXCEPTION during receive!' ) end end if ( data . nil? || data . empty? ) @logger . log "Client #{@socket.inspect} disconnected!" data = '' else @logger . log "Received #{data.length} bytes" end return data end
Wait to receive data from the client
22,041
def validate_each ( record , attribute , value ) begin unless value . blank? IPAddr . new ( value ) if options [ :no_mask ] if value =~ / \/ / record . errors [ attribute ] << ( options [ :message ] || 'must not contain a mask' ) end elsif options [ :require_mask ] unless value =~ / \/ / record . errors [ attribute ] << ( options [ :message ] || 'must contain a mask' ) end end end rescue IPAddr :: InvalidAddressError record . errors [ attribute ] << ( options [ :message ] || 'is not a valid IP address' ) end end
Validates attributes to determine if the values contain valid IP addresses .
22,042
def load_attributes_from_response ( response ) if ( response [ 'Transfer-Encoding' ] == 'chunked' || ( ! response [ 'Content-Length' ] . blank? && response [ 'Content-Length' ] != "0" ) ) && ! response . body . nil? && response . body . strip . size > 0 load ( self . class . format . decode ( response . body ) [ self . class . element_name ] ) if self . respond_to? ( :items ) new_items = [ ] self . items . each { | item | new_items << item . attributes . first [ 1 ] } self . items = new_items end @persisted = true end end
override ARes method to parse only the client part
22,043
def method_missing ( meth , * args , & block ) meth_str = meth . to_s if / \w \= / . match ( meth_str ) _set ( $1 , * args , & block ) elsif args . length > 0 || block_given? _add ( meth , * args , & block ) elsif / \w \? / . match ( meth_str ) ! ! _get ( $1 ) else _get_or_create_namespace ( meth ) end end
Deep dup all the values .
22,044
def default_message message = [ ] if value? && name? message << format_message ( value . class , "object" , value . inspect , "has invalid ##{ name } attribute" ) end if expected? message << format_message ( "expected" , expected ) end if actual? message << format_message ( "found" , actual ) end if message . empty? super else message . join ', ' end end
Create a default message if none was provided .
22,045
def message ( text , options = { } , env = :prod , type = :normal ) if type == :normal data = normal_notification ( text , options ) elsif type == :silent data = silent_notification ( text , options ) elsif type == :nosound data = nosound_notification ( text , options ) end return dev_json ( data ) if env == :dev prod_json ( data ) end
Return json payload
22,046
def subscribe ( queue , exchange = nil , options = { } , & block ) raise ArgumentError , "Must call this method with a block" unless block return false unless usable? return true unless @queues . select { | q | q . name == queue [ :name ] } . empty? to_exchange = if exchange if options [ :exchange2 ] " to exchanges #{exchange[:name]} and #{options[:exchange2][:name]}" else " to exchange #{exchange[:name]}" end end queue_options = queue [ :options ] || { } exchange_options = ( exchange && exchange [ :options ] ) || { } begin logger . info ( "[setup] Subscribing queue #{queue[:name]}#{to_exchange} on broker #{@alias}" ) q = @channel . queue ( queue [ :name ] , queue_options ) @queues << q if exchange x = @channel . __send__ ( exchange [ :type ] , exchange [ :name ] , exchange_options ) binding = q . bind ( x , options [ :key ] ? { :key => options [ :key ] } : { } ) if ( exchange2 = options [ :exchange2 ] ) q . bind ( @channel . __send__ ( exchange2 [ :type ] , exchange2 [ :name ] , exchange2 [ :options ] || { } ) ) end q = binding end q . subscribe ( options [ :ack ] ? { :ack => true } : { } ) do | header , message | begin if ( pool = ( options [ :fiber_pool ] || @options [ :fiber_pool ] ) ) pool . spawn { receive ( queue [ :name ] , header , message , options , & block ) } else receive ( queue [ :name ] , header , message , options , & block ) end rescue StandardError => e header . ack if options [ :ack ] logger . exception ( "Failed setting up to receive message from queue #{queue.inspect} " + "on broker #{@alias}" , e , :trace ) @exception_stats . track ( "receive" , e ) update_non_delivery_stats ( "receive failure" , e ) end end rescue StandardError => e logger . exception ( "Failed subscribing queue #{queue.inspect}#{to_exchange} on broker #{@alias}" , e , :trace ) @exception_stats . track ( "subscribe" , e ) false end end
Subscribe an AMQP queue to an AMQP exchange Do not wait for confirmation from broker that subscription is complete When a message is received acknowledge unserialize and log it as specified If the message is unserialized and it is not of the right type it is dropped after logging an error
22,047
def unsubscribe ( queue_names , & block ) unless failed? @queues . reject! do | q | if queue_names . include? ( q . name ) begin logger . info ( "[stop] Unsubscribing queue #{q.name} on broker #{@alias}" ) q . unsubscribe { block . call if block } rescue StandardError => e logger . exception ( "Failed unsubscribing queue #{q.name} on broker #{@alias}" , e , :trace ) @exception_stats . track ( "unsubscribe" , e ) block . call if block end true else false end end end true end
Unsubscribe from the specified queues Silently ignore unknown queues
22,048
def queue_status ( queue_names , & block ) return false unless connected? @queues . each do | q | if queue_names . include? ( q . name ) begin q . status { | messages , consumers | block . call ( q . name , messages , consumers ) if block } rescue StandardError => e logger . exception ( "Failed checking status of queue #{q.name} on broker #{@alias}" , e , :trace ) @exception_stats . track ( "queue_status" , e ) block . call ( q . name , nil , nil ) if block end end end true end
Check status of specified queues Silently ignore unknown queues If a queue whose status is being checked does not exist in the broker this broker connection will fail and become unusable
22,049
def publish ( exchange , packet , message , options = { } ) return false unless connected? begin exchange_options = exchange [ :options ] || { } unless options [ :no_serialize ] log_data = "" unless options [ :no_log ] && logger . level != :debug re = "RE-" if packet . respond_to? ( :tries ) && ! packet . tries . empty? log_filter = options [ :log_filter ] unless logger . level == :debug log_data = "#{re}SEND #{@alias} #{packet.to_s(log_filter, :send_version)}" if logger . level == :debug log_data += ", publish options #{options.inspect}, exchange #{exchange[:name]}, " + "type #{exchange[:type]}, options #{exchange[:options].inspect}" end log_data += ", #{options[:log_data]}" if options [ :log_data ] logger . info ( log_data ) unless log_data . empty? end end delete_amqp_resources ( exchange [ :type ] , exchange [ :name ] ) if exchange_options [ :declare ] @channel . __send__ ( exchange [ :type ] , exchange [ :name ] , exchange_options ) . publish ( message , options ) true rescue StandardError => e logger . exception ( "Failed publishing to exchange #{exchange.inspect} on broker #{@alias}" , e , :trace ) @exception_stats . track ( "publish" , e ) update_non_delivery_stats ( "publish failure" , e ) false end end
Publish message to AMQP exchange
22,050
def close ( propagate = true , normal = true , log = true , & block ) final_status = normal ? :closed : :failed if ! [ :closed , :failed ] . include? ( @status ) begin logger . info ( "[stop] Closed connection to broker #{@alias}" ) if log update_status ( final_status ) if propagate @connection . close do @status = final_status yield if block_given? end rescue StandardError => e logger . exception ( "Failed to close broker #{@alias}" , e , :trace ) @exception_stats . track ( "close" , e ) @status = final_status yield if block_given? end else @status = final_status yield if block_given? end true end
Close broker connection
22,051
def connect ( address , reconnect_interval ) begin logger . info ( "[setup] Connecting to broker #{@identity}, alias #{@alias}" ) @status = :connecting @connection = AMQP . connect ( :user => @options [ :user ] , :pass => @options [ :pass ] , :vhost => @options [ :vhost ] , :host => address [ :host ] , :port => address [ :port ] , :ssl => @options [ :ssl ] , :identity => @identity , :insist => @options [ :insist ] || false , :heartbeat => @options [ :heartbeat ] , :reconnect_delay => lambda { rand ( reconnect_interval ) } , :reconnect_interval => reconnect_interval ) @channel = MQ . new ( @connection ) @channel . __send__ ( :connection ) . connection_status { | status | update_status ( status ) } @channel . prefetch ( @options [ :prefetch ] ) if @options [ :prefetch ] @channel . return_message { | header , message | handle_return ( header , message ) } rescue StandardError => e @status = :failed @failure_stats . update logger . exception ( "Failed connecting to broker #{@alias}" , e , :trace ) @exception_stats . track ( "connect" , e ) @connection . close if @connection end end
Connect to broker and register for status updates Also set prefetch value if specified and setup for message returns
22,052
def receive ( queue , header , message , options , & block ) begin if options [ :no_unserialize ] || @serializer . nil? execute_callback ( block , @identity , message , header ) elsif message == "nil" header . ack if options [ :ack ] logger . debug ( "RECV #{@alias} nil message ignored" ) elsif ( packet = unserialize ( queue , message , options ) ) execute_callback ( block , @identity , packet , header ) elsif options [ :ack ] header . ack end true rescue StandardError => e header . ack if options [ :ack ] logger . exception ( "Failed receiving message from queue #{queue.inspect} on broker #{@alias}" , e , :trace ) @exception_stats . track ( "receive" , e ) update_non_delivery_stats ( "receive failure" , e ) end end
Receive message by optionally unserializing it passing it to the callback and optionally acknowledging it
22,053
def unserialize ( queue , message , options = { } ) begin received_at = Time . now . to_f packet = @serializer . method ( :load ) . arity . abs > 1 ? @serializer . load ( message , queue ) : @serializer . load ( message ) if options . key? ( packet . class ) unless options [ :no_log ] && logger . level != :debug re = "RE-" if packet . respond_to? ( :tries ) && ! packet . tries . empty? packet . received_at = received_at if packet . respond_to? ( :received_at ) log_filter = options [ packet . class ] unless logger . level == :debug logger . info ( "#{re}RECV #{@alias} #{packet.to_s(log_filter, :recv_version)} #{options[:log_data]}" ) end packet else category = options [ :category ] + " " if options [ :category ] logger . error ( "Received invalid #{category}packet type from queue #{queue} on broker #{@alias}: #{packet.class}\n" + caller . join ( "\n" ) ) nil end rescue StandardError => e trace , track = case e . class . name . sub ( / / , "" ) when "SerializationError" then [ :caller , e . to_s !~ / / ] when "ConnectivityFailure" then [ :caller , false ] else [ :trace , true ] end logger . exception ( "Failed unserializing message from queue #{queue.inspect} on broker #{@alias}" , e , trace ) @exception_stats . track ( "receive" , e ) if track @options [ :exception_on_receive_callback ] . call ( message , e ) if @options [ :exception_on_receive_callback ] update_non_delivery_stats ( "receive failure" , e ) nil end end
Unserialize message check that it is an acceptable type and log it
22,054
def handle_return ( header , message ) begin to = if header . exchange && ! header . exchange . empty? then header . exchange else header . routing_key end reason = header . reply_text callback = @options [ :return_message_callback ] logger . __send__ ( callback ? :debug : :info , "RETURN #{@alias} for #{to} because #{reason}" ) callback . call ( @identity , to , reason , message ) if callback rescue Exception => e logger . exception ( "Failed return #{header.inspect} of message from broker #{@alias}" , e , :trace ) @exception_stats . track ( "return" , e ) end true end
Handle message returned by broker because it could not deliver it
22,055
def execute_callback ( callback , * args ) ( callback . arity == 2 ? callback . call ( * args [ 0 , 2 ] ) : callback . call ( * args ) ) if callback end
Execute packet receive callback make it a separate method to ease instrumentation
22,056
def ensure_success! ( response ) return if response . is_a? Net :: HTTPSuccess message = "#{response.code} - #{response.message}" message += "\n#{response.body}" if response . body . present? raise Crapi :: BadHttpResponseError , message end
Verifies the given value is that of a successful HTTP response .
22,057
def format_payload ( payload , as : JSON_CONTENT_TYPE ) return payload unless payload . is_a? Hash case as when JSON_CONTENT_TYPE JSON . generate ( payload . as_json ) when FORM_CONTENT_TYPE payload . to_query else payload . to_s end end
Serializes the given payload per the requested content - type .
22,058
def parse_response ( response ) case response . content_type when JSON_CONTENT_TYPE JSON . parse ( response . body , quirks_mode : true , symbolize_names : true ) else response . body end end
Parses the given response as its claimed content - type .
22,059
def pretty_print ( q ) map = pretty_print_attribute_hash . delete_if { | k , v | v . nil_or_empty? } content = map . map { | label , value | " #{label}=>#{format_print_value(value)}" } . join ( ",\n" ) q . text ( "#{qp} structure:\n#{content}" ) end
Prints this classifier s content to the log .
22,060
def each_threaded ( & block ) threads = [ ] climber . connection_pool . connections . each do | connection | threads << Thread . new { connection . send ( self . class . enumerator_method , & block ) } end threads . each ( & :join ) return end
Perform a threaded iteration across all connections in the climber s connection pool . This method cannot be used for enumerable enumeration because a break called within one of the threads will cause a LocalJumpError . This could be fixed but expected behavior on break varies as to whether or not to wait for all threads before returning a result . However still useful for operations that always visit all elements . An instance of the element is yielded with each iteration .
22,061
def temporary_url ( expires_at = Time . now + 3600 ) url = URI . escape ( "#{protocol}#{host(true)}/#{path_prefix}#{key}" ) signature = Signature . generate_temporary_url_signature ( :bucket => name , :resource => key , :expires_at => expires_at , :secret_access_key => secret_access_key ) "#{url}?SNDAAccessKeyId=#{access_key_id}&Expires=#{expires_at.to_i.to_s}&Signature=#{signature}" end
Returns a temporary url to the object that expires on the timestamp given . Defaults to 5min expire time .
22,062
def configuration_is_valid? ( configuration ) required_parameters = [ 'file_name' , 'standard_stream' ] required_parameters . each { | parameter | return false unless configuration . include? ( parameter ) } return false if configuration [ 'file_name' ] . empty? return false unless [ 'stdout' , 'stderr' , 'none' ] . include? ( configuration [ 'standard_stream' ] ) return true end
inversion of control method required by the AuditorAPI to validate the configuration
22,063
def belongs_to? ( group ) group = AccessGroup . get ( group ) unless group . is_a? ( :: Incline :: AccessGroup ) return false unless group safe_belongs_to? ( group ) end
Determines if this group belongs to the specified group .
22,064
def effective_groups ret = [ self ] memberships . each do | m | unless ret . include? ( m ) tmp = m . effective_groups tmp . each do | g | ret << g unless ret . include? ( g ) end end end ret . sort { | a , b | a . name <=> b . name } end
Gets a list of all the groups this group provides effective membership to .
22,065
def user_ids = ( values ) values ||= [ ] values = [ values ] unless values . is_a? ( :: Array ) values = values . reject { | v | v . blank? } . map { | v | v . to_i } self . users = Incline :: User . where ( id : values ) . to_a end
Sets the user IDs for the members of this group .
22,066
def load_settings ( path ) Konfig . load_directory ( path ) built_in_adapters = File . join ( File . dirname ( __FILE__ ) , 'adapters' , '*.rb' ) require_all built_in_adapters user_adapters = File . join ( path , 'adapters' , '*_adapter.rb' ) require_all user_adapters Adapter . create_child_instances ( Konfig . default_store . data ) Adapter . send_to_child_instances :adapt end
Set up the Konfig system
22,067
def encrypt ( plain_text , password = nil , salt = nil ) password = password . nil? ? Hoodie . crypto . password : password salt = salt . nil? ? Hoodie . crypto . salt : salt cipher = new_cipher ( :encrypt , password , salt ) cipher . iv = iv = cipher . random_iv ciphertext = cipher . update ( plain_text ) ciphertext << cipher . final Base64 . encode64 ( combine_iv_ciphertext ( iv , ciphertext ) ) end
Encrypt the given string using the AES - 256 - CBC algorithm .
22,068
def generate_hiera_template ssh_key = Bebox :: Project . public_ssh_key_from_file ( self . project_root , self . name ) project_name = Bebox :: Project . shortname_from_file ( self . project_root ) Bebox :: PROVISION_STEPS . each do | step | step_dir = Bebox :: Provision . step_name ( step ) generate_file_from_template ( "#{templates_path}/puppet/#{step}/hiera/data/environment.yaml.erb" , "#{self.project_root}/puppet/steps/#{step_dir}/hiera/data/#{self.name}.yaml" , { step_dir : step_dir , ssh_key : ssh_key , project_name : project_name } ) end end
Generate the hiera data template for the environment
22,069
def replace_path ( path , name = nil ) name ||= File . basename path self . replace_pattern path , name end
Define a path whose occurrences in the output should be replaced by either its basename or a given placeholder .
22,070
def run ( command_line ) require 'open3' env = Hash [ environment_vars . map { | k , v | [ k . to_s , v . to_s ] } ] Open3 . capture2e ( env , command_line . to_s ) end
Run the command .
22,071
def command_line ( head_arguments = '' , tail_arguments = '' ) args = [ head_arguments , default_args , tail_arguments ] . flatten . compact . select { | s | s . length > 0 } . join ' ' "#{executable} #{args}" end
Merges the given with the configured arguments and returns the command line to execute .
22,072
def apply_replacements ( output ) replace_patterns . reduce ( output ) do | output , replacement_pattern | replacement_pattern . replace ( output ) end end
Apply the configured replacements to the output .
22,073
def get ( key , locale , opts = { } ) keys = resolve_scopes ( key ) locales = resolve_locales ( locale ) read_first ( locales , keys ) || opts [ :default ] || default ( locales , keys , opts ) end
Retrieves translation from the storage or return default value .
22,074
def put ( translations ) Scope . flatten ( translations ) . each do | key , value | storage . write ( key , value ) unless storage . exist? ( key ) end end
Writes translations to the storage .
22,075
def build_request ( method , options ) builder = underscore ( "build_#{method}" ) self . respond_to? ( builder ) ? self . send ( builder , options ) : soap_envelope ( options ) . target! end
same as original but don t call . target! on custom builder
22,076
def * ( other ) if other . is_a? ( InternalComplex ) or other . is_a? ( Complex ) InternalComplex . new @real * other . real - @imag * other . imag , @real * other . imag + @imag * other . real elsif InternalComplex . generic? other InternalComplex . new @real * other , @imag * other else x , y = other . coerce self x * y end end
Multiply complex values
22,077
def / ( other ) if other . is_a? ( InternalComplex ) or other . is_a? ( Complex ) self * other . conj / other . abs2 elsif InternalComplex . generic? other InternalComplex . new @real / other , @imag / other else x , y = other . coerce self x / y end end
Divide complex values
22,078
def assign ( value ) value = value . simplify if @value . real . respond_to? :assign @value . real . assign value . get . real else @value . real = value . get . real end if @value . imag . respond_to? :assign @value . imag . assign value . get . imag else @value . imag = value . get . imag end value end
Constructor for native complex number
22,079
def real_with_decompose if typecode == OBJECT or is_a? ( Variable ) or Thread . current [ :lazy ] real_without_decompose elsif typecode < COMPLEX_ decompose 0 else self end end
Fast extraction for real values of complex array
22,080
def real = ( value ) if typecode < COMPLEX_ decompose ( 0 ) [ ] = value elsif typecode == OBJECT self [ ] = Hornetseye :: lazy do value + imag * Complex :: I end else self [ ] = value end end
Assignment for real values of complex array
22,081
def imag_with_decompose if typecode == OBJECT or is_a? ( Variable ) or Thread . current [ :lazy ] imag_without_decompose elsif typecode < COMPLEX_ decompose 1 else Hornetseye :: lazy ( * shape ) { typecode . new ( 0 ) } end end
Fast extraction of imaginary values of complex array
22,082
def set_profile ( value ) path = "#{root}/.balancer/profiles/#{value}.yml" unless File . exist? ( path ) puts "The profile file #{path} does not exist. Exiting." . colorize ( :red ) exit 1 end ENV [ 'BALANCER_PROFILE' ] = value end
Only set the BALANCER_PROFILE if not set already at the CLI . CLI takes highest precedence .
22,083
def to_s actual = @inicio cadena = "|" while ! actual . nil? cadena << actual [ :valor ] . to_s if ! actual [ :sig ] . nil? cadena << ", " end actual = actual [ :sig ] end cadena << "|" return cadena end
Iniciar los punteros de la lista inicio y final Metodo para imprimir la lista con formato
22,084
def insertar_inicio ( val ) if @inicio . nil? @inicio = Struct :: Nodo . new ( nil , val , nil ) @final = @inicio else copia = @inicio @inicio = Struct :: Nodo . new ( nil , val , copia ) copia [ :ant ] = @inicio end end
Metodo que nos permite insertar algo al inicio de la lista
22,085
def insertar_final ( val ) if @final . nil? @inicio = Struct :: Nodo . new ( nil , val , nil ) @final = @inicio else copia = @final @final [ :sig ] = Struct :: Nodo . new ( copia , val , nil ) copia2 = @final [ :sig ] @final = copia2 end end
Metodo que nos permite insertar algo al final de la lista
22,086
def tamano ( ) if ! @inicio . nil? contador = 1 copia = @inicio while ! copia [ :sig ] . nil? contador += 1 copia2 = copia [ :sig ] copia = copia2 end end return contador end
Metodo que nos devuelve la cantidad de elementos en la lista
22,087
def posicion ( pos ) if @inicio . nil? raise RuntimeError , "La lista esta vacia" end if pos < 0 || pos > tamano - 1 raise RuntimeError , "La posicion no es correcta" end contador = 0 copia = @inicio while contador < pos && ! copia . nil? copia2 = copia [ :sig ] copia = copia2 contador += 1 end return copia [ :valor ] end
Metodo que devuelve lo contenido en una posicion de la lista
22,088
def ordenar! cambio = true while cambio cambio = false i = @inicio i_1 = @inicio [ :sig ] while i_1 != nil if ( i [ :valor ] > i_1 [ :valor ] ) i [ :valor ] , i_1 [ :valor ] = i_1 [ :valor ] , i [ :valor ] cambio = true end i = i_1 i_1 = i_1 [ :sig ] end end end
Metodo que nos ordenada la lista segun los criterios de la APA
22,089
def order_by ( filter , * fields , label : fields . first . to_s . titleize , order_link_class : default_order_link_class ) return label if filter . nil? exprs = Hash [ filter . order_expressions ] order_links = fields . map do | field | if exprs [ field ] order_link_class . new exprs [ field ] . invert , active : true else order_link_class . new Sequel . asc ( field ) end end filter_params = filter . filter_parameters . dup filter_params [ :order ] = unify_array ( order_links . map ( & :name ) ) params_hash = { filter . class :: Model . model_name . param_key . to_sym => filter_params } link_text = raw ( [ label , order_links . first . andand . icon ] . compact . join ( ' ' ) ) link_to link_text , params_hash , { class : order_links . first . andand . css_class } end
Heavily modified from SearchLogic .
22,090
def delete ( collection ) each_resource_with_edit_url ( collection ) do | resource , edit_url | connection . delete ( edit_url , 'If-Match' => "*" ) end collection . size end
Constructs and executes DELETE statement for given query
22,091
def send_request ( method , path , fields = { } ) response = send_logged_request ( URI ( BASE_URL + path ) , method , request_data ( fields ) ) fail UnexpectedHttpResponse , response unless response . is_a? Net :: HTTPSuccess JSON . parse ( response . body ) end
Accepts hash of fields to send . Returns parsed response body always a hash .
22,092
def [] ( uri , factory = nil ) data = store [ uri ] data . nil? ? nil : Aspire :: Object :: User . new ( uri , factory , json : data ) end
Initialises a new UserLookup instance
22,093
def load ( filename = nil ) delim = / \s \s / enum = Aspire :: Enumerator :: ReportEnumerator . new ( filename ) . enumerator enum . each do | row | uri = row [ 3 ] data = csv_to_json_api ( row , email_delim : delim , role_delim : delim ) csv_to_json_other ( row , data ) store [ uri ] = data end end
Populates the store from an All User Profiles report CSV file
22,094
def method_missing ( method , * args , & block ) super unless store . respond_to? ( method ) store . public_send ( method , * args , & block ) end
Proxies missing methods to the store
22,095
def csv_to_json_api ( row , data = { } , email_delim : nil , role_delim : nil ) data [ 'email' ] = ( row [ 4 ] || '' ) . split ( email_delim ) data [ 'firstName' ] = row [ 0 ] data [ 'role' ] = ( row [ 7 ] || '' ) . split ( role_delim ) data [ 'surname' ] = row [ 1 ] data [ 'uri' ] = row [ 3 ] data end
Adds CSV fields which mirror the Aspire user profile JSON API fields
22,096
def csv_to_json_other ( row , data = { } ) data [ 'jobRole' ] = row [ 5 ] || '' data [ 'lastLogin' ] = row [ 8 ] data [ 'name' ] = row [ 2 ] || '' data [ 'visibility' ] = row [ 6 ] || '' data end
Adds CSV fields which aren t part of the Aspire user profile JSON API
22,097
def generate_id if self . id . nil? overflow = self . class . __overflow__ ||= self . class . bucket . get ( "#{self.class.design_document}:#{CLUSTER_ID}:overflow" , :quiet => true ) count = self . class . bucket . incr ( "#{self.class.design_document}:#{CLUSTER_ID}:count" , :create => true ) if count == 0 || overflow . nil? overflow ||= 0 overflow += 1 self . class . bucket . set ( "#{self.class.design_document}:#{CLUSTER_ID}:overflow" , overflow ) self . class . __overflow__ = overflow end self . id = self . class . __class_id_generator__ . call ( overflow , count ) while self . class . bucket . get ( self . id , :quiet => true ) . present? self . class . bucket . set ( "#{self.class.design_document}:#{CLUSTER_ID}:overflow" , overflow + 1 ) count = self . class . bucket . incr ( "#{self.class.design_document}:#{CLUSTER_ID}:count" ) if self . class . __overflow__ == overflow self . class . __overflow__ = nil end self . id = self . class . __class_id_generator__ . call ( overflow + 1 , count ) end end end
Cluster ID number instance method
22,098
def merge_template_options ( default_options , template_key , custom_options = { } ) template_options = configuration . template_options . fetch template_key , { } options = Wingtips :: HashUtils . deep_merge ( default_options , template_options ) Wingtips :: HashUtils . deep_merge ( options , custom_options ) end
merge order is = defaults template custom
22,099
def add ( cache , strategy ) c = cache . to_sym s = strategy . to_sym fail Exceptions :: UnknownCachingStrategy , "Unknown caching strategy \":#{strategy}\" given. Did you register it in advance?" unless caching_strategies . key? ( strategy ) caches [ c ] = caching_strategies [ s ] . new caches [ c ] end
Add a new cache