idx
int64
0
24.9k
question
stringlengths
68
4.14k
target
stringlengths
9
749
2,900
def get ( options ) return options if options . is_a? ( Unacknowledged ) || options . is_a? ( Acknowledged ) if options validate! ( options ) if unacknowledged? ( options ) Unacknowledged . new ( options ) else Acknowledged . new ( options ) end end end
Create a write concern object for the provided options .
2,901
def command ( operation , opts = { } ) txn_read_pref = if opts [ :session ] && opts [ :session ] . in_transaction? opts [ :session ] . txn_read_preference else nil end txn_read_pref ||= opts [ :read ] || ServerSelector :: PRIMARY Lint . validate_underscore_read_preference ( txn_read_pref ) preference = ServerSelector . get ( txn_read_pref ) client . send ( :with_session , opts ) do | session | read_with_retry ( session , preference ) do | server | Operation :: Command . new ( { :selector => operation . dup , :db_name => name , :read => preference , :session => session } ) . execute ( server ) end end end
Execute a command on the database .
2,902
def drop ( options = { } ) operation = { :dropDatabase => 1 } client . send ( :with_session , options ) do | session | Operation :: DropDatabase . new ( { selector : operation , db_name : name , write_concern : write_concern , session : session } ) . execute ( next_primary ) end end
Drop the database and all its associated information .
2,903
def read ( length ) handle_errors do data = read_from_socket ( length ) raise IOError unless ( data . length > 0 || length == 0 ) while data . length < length chunk = read_from_socket ( length - data . length ) raise IOError unless ( chunk . length > 0 || length == 0 ) data << chunk end data end end
Create the new socket for the provided family - ipv4 piv6 or unix .
2,904
def end_session if ! ended? && @client if within_states? ( TRANSACTION_IN_PROGRESS_STATE ) begin abort_transaction rescue Mongo :: Error end end @client . cluster . session_pool . checkin ( @server_session ) end ensure @server_session = nil end
End this session .
2,905
def add_txn_num! ( command ) command . tap do | c | c [ :txnNumber ] = BSON :: Int64 . new ( @server_session . txn_num ) if in_transaction? end end
Add the transaction number to a command document if applicable .
2,906
def add_txn_opts! ( command , read ) command . tap do | c | if read && txn_read_pref = txn_read_preference Mongo :: Lint . validate_underscore_read_preference ( txn_read_pref ) txn_read_pref = txn_read_pref . dup txn_read_pref [ :mode ] = txn_read_pref [ :mode ] . to_s . gsub ( / \w / ) { | match | match [ 1 ] . upcase } Mongo :: Lint . validate_camel_case_read_preference ( txn_read_pref ) c [ '$readPreference' ] = txn_read_pref end if starting_transaction? if rc = c [ :readConcern ] rc = rc . dup rc . delete ( :level ) end if txn_read_concern if rc rc . update ( txn_read_concern ) else rc = txn_read_concern . dup end end if rc . nil? || rc . empty? c . delete ( :readConcern ) else c [ :readConcern ] = rc end end if c [ :readConcern ] && c [ :readConcern ] [ :level ] c [ :readConcern ] [ :level ] = c [ :readConcern ] [ :level ] . to_s end if ( c [ :abortTransaction ] || c [ :commitTransaction ] ) if @already_committed wc = BSON :: Document . new ( c [ :writeConcern ] || txn_write_concern || { } ) wc . merge! ( w : :majority ) wc [ :wtimeout ] ||= 10000 c [ :writeConcern ] = wc elsif txn_write_concern c [ :writeConcern ] ||= txn_write_concern end end if c [ :writeConcern ] && c [ :writeConcern ] [ :w ] && c [ :writeConcern ] [ :w ] . is_a? ( Symbol ) c [ :writeConcern ] [ :w ] = c [ :writeConcern ] [ :w ] . to_s end end end
Add the transactions options if applicable .
2,907
def validate_read_preference! ( command ) return unless in_transaction? && non_primary_read_preference_mode? ( command ) raise Mongo :: Error :: InvalidTransactionOperation . new ( Mongo :: Error :: InvalidTransactionOperation :: INVALID_READ_PREFERENCE ) end
Ensure that the read preference of a command primary .
2,908
def start_transaction ( options = nil ) if options Lint . validate_read_concern_option ( options [ :read_concern ] ) end check_if_ended! if within_states? ( STARTING_TRANSACTION_STATE , TRANSACTION_IN_PROGRESS_STATE ) raise Mongo :: Error :: InvalidTransactionOperation . new ( Mongo :: Error :: InvalidTransactionOperation :: TRANSACTION_ALREADY_IN_PROGRESS ) end next_txn_num @txn_options = options || @options [ :default_transaction_options ] || { } if txn_write_concern && WriteConcern . send ( :unacknowledged? , txn_write_concern ) raise Mongo :: Error :: InvalidTransactionOperation . new ( Mongo :: Error :: InvalidTransactionOperation :: UNACKNOWLEDGED_WRITE_CONCERN ) end @state = STARTING_TRANSACTION_STATE @already_committed = false end
Places subsequent operations in this session into a new transaction .
2,909
def commit_transaction ( options = nil ) check_if_ended! check_if_no_transaction! if within_states? ( TRANSACTION_ABORTED_STATE ) raise Mongo :: Error :: InvalidTransactionOperation . new ( Mongo :: Error :: InvalidTransactionOperation . cannot_call_after_msg ( :abortTransaction , :commitTransaction ) ) end options ||= { } begin if within_states? ( TRANSACTION_COMMITTED_STATE ) @state = @last_commit_skipped ? STARTING_TRANSACTION_STATE : TRANSACTION_IN_PROGRESS_STATE @already_committed = true end if starting_transaction? @last_commit_skipped = true else @last_commit_skipped = false write_concern = options [ :write_concern ] || txn_options [ :write_concern ] if write_concern && ! write_concern . is_a? ( WriteConcern :: Base ) write_concern = WriteConcern . get ( write_concern ) end write_with_retry ( self , write_concern , true ) do | server , txn_num , is_retry | if is_retry if write_concern wco = write_concern . options . merge ( w : :majority ) wco [ :wtimeout ] ||= 10000 write_concern = WriteConcern . get ( wco ) else write_concern = WriteConcern . get ( w : :majority , wtimeout : 10000 ) end end Operation :: Command . new ( selector : { commitTransaction : 1 } , db_name : 'admin' , session : self , txn_num : txn_num , write_concern : write_concern , ) . execute ( server ) end end rescue Mongo :: Error :: NoServerAvailable , Mongo :: Error :: SocketError => e e . send ( :add_label , Mongo :: Error :: UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL ) raise e rescue Mongo :: Error :: OperationFailure => e err_doc = e . instance_variable_get ( :@result ) . send ( :first_document ) if e . write_retryable? || ( err_doc [ 'writeConcernError' ] && ! UNLABELED_WRITE_CONCERN_CODES . include? ( err_doc [ 'writeConcernError' ] [ 'code' ] ) ) e . send ( :add_label , Mongo :: Error :: UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL ) end raise e ensure @state = TRANSACTION_COMMITTED_STATE end end
Commit the currently active transaction on the session .
2,910
def abort_transaction check_if_ended! check_if_no_transaction! if within_states? ( TRANSACTION_COMMITTED_STATE ) raise Mongo :: Error :: InvalidTransactionOperation . new ( Mongo :: Error :: InvalidTransactionOperation . cannot_call_after_msg ( :commitTransaction , :abortTransaction ) ) end if within_states? ( TRANSACTION_ABORTED_STATE ) raise Mongo :: Error :: InvalidTransactionOperation . new ( Mongo :: Error :: InvalidTransactionOperation . cannot_call_twice_msg ( :abortTransaction ) ) end begin unless starting_transaction? write_with_retry ( self , txn_options [ :write_concern ] , true ) do | server , txn_num | Operation :: Command . new ( selector : { abortTransaction : 1 } , db_name : 'admin' , session : self , txn_num : txn_num ) . execute ( server ) end end @state = TRANSACTION_ABORTED_STATE rescue Mongo :: Error :: InvalidTransactionOperation raise rescue Mongo :: Error @state = TRANSACTION_ABORTED_STATE rescue Exception @state = TRANSACTION_ABORTED_STATE raise end end
Abort the currently active transaction without making any changes to the database .
2,911
def with_transaction ( options = nil ) deadline = Time . now + 120 transaction_in_progress = false loop do commit_options = { } if options commit_options [ :write_concern ] = options [ :write_concern ] end start_transaction ( options ) transaction_in_progress = true begin rv = yield self rescue Exception => e if within_states? ( STARTING_TRANSACTION_STATE , TRANSACTION_IN_PROGRESS_STATE ) abort_transaction transaction_in_progress = false end if Time . now >= deadline transaction_in_progress = false raise end if e . is_a? ( Mongo :: Error ) && e . label? ( Mongo :: Error :: TRANSIENT_TRANSACTION_ERROR_LABEL ) next end raise else if within_states? ( TRANSACTION_ABORTED_STATE , NO_TRANSACTION_STATE , TRANSACTION_COMMITTED_STATE ) transaction_in_progress = false return rv end begin commit_transaction ( commit_options ) transaction_in_progress = false return rv rescue Mongo :: Error => e if e . label? ( Mongo :: Error :: UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL ) if e . is_a? ( Mongo :: Error :: OperationFailure ) && e . code == 64 && e . wtimeout? transaction_in_progress = false raise end if Time . now >= deadline transaction_in_progress = false raise end wc_options = case v = commit_options [ :write_concern ] when WriteConcern :: Base v . options when nil { } else v end commit_options [ :write_concern ] = wc_options . merge ( w : :majority ) retry elsif e . label? ( Mongo :: Error :: TRANSIENT_TRANSACTION_ERROR_LABEL ) if Time . now >= deadline transaction_in_progress = false raise end next else transaction_in_progress = false raise end end end end ensure if transaction_in_progress log_warn ( 'with_transaction callback altered with_transaction loop, aborting transaction' ) begin abort_transaction rescue Error :: OperationFailure , Error :: InvalidTransactionOperation end end end
Executes the provided block in a transaction retrying as necessary .
2,912
def txn_read_preference rp = txn_options && txn_options [ :read_preference ] || @client . read_preference Mongo :: Lint . validate_underscore_read_preference ( rp ) rp end
Get the read preference the session will use in the currently active transaction .
2,913
def get ( preference = { } ) return preference if PREFERENCES . values . include? ( preference . class ) Mongo :: Lint . validate_underscore_read_preference ( preference ) PREFERENCES . fetch ( ( preference [ :mode ] || :primary ) . to_sym ) . new ( preference ) end
Create a server selector object .
2,914
def read_with_retry_cursor ( session , server_selector , view , & block ) read_with_retry ( session , server_selector ) do | server | result = yield server Cursor . new ( view , result , server , session : session ) end end
Execute a read operation returning a cursor with retrying .
2,915
def read_with_retry ( session = nil , server_selector = nil , & block ) if session . nil? && server_selector . nil? unless $_mongo_read_with_retry_warned $_mongo_read_with_retry_warned = true Logger . logger . warn ( "Legacy read_with_retry invocation - please update the application and/or its dependencies" ) end server_selector = ServerSelector . get ( mode : :primary_preferred ) legacy_read_with_retry ( nil , server_selector , & block ) elsif session && session . retry_reads? modern_read_with_retry ( session , server_selector , & block ) elsif client . max_read_retries > 0 legacy_read_with_retry ( session , server_selector , & block ) else server = select_server ( cluster , server_selector ) yield server end end
Execute a read operation with retrying .
2,916
def read_with_one_retry ( options = nil ) yield rescue Error :: SocketError , Error :: SocketTimeoutError => e retry_message = options && options [ :retry_message ] log_retry ( e , message : retry_message ) yield end
Execute a read operation with a single retry on network errors .
2,917
def write_with_retry ( session , write_concern , ending_transaction = false , & block ) if ending_transaction && ! session raise ArgumentError , 'Cannot end a transaction without a session' end unless ending_transaction || retry_write_allowed? ( session , write_concern ) return legacy_write_with_retry ( nil , session , & block ) end server = cluster . next_primary unless ending_transaction || server . retry_writes? return legacy_write_with_retry ( server , session , & block ) end begin txn_num = session . in_transaction? ? session . txn_num : session . next_txn_num yield ( server , txn_num , false ) rescue Error :: SocketError , Error :: SocketTimeoutError => e if session . in_transaction? && ! ending_transaction raise end retry_write ( e , txn_num , & block ) rescue Error :: OperationFailure => e if ( session . in_transaction? && ! ending_transaction ) || ! e . write_retryable? raise end retry_write ( e , txn_num , & block ) end end
Implements write retrying functionality by yielding to the passed block one or more times .
2,918
def log_retry ( e , options = nil ) message = if options && options [ :message ] options [ :message ] else "Retry" end Logger . logger . warn "#{message} due to: #{e.class.name} #{e.message}" end
Log a warning so that any application slow down is immediately obvious .
2,919
def create ( opts = { } ) operation = { :create => name } . merge ( options ) operation . delete ( :write ) server = next_primary if ( options [ :collation ] || options [ Operation :: COLLATION ] ) && ! server . features . collation_enabled? raise Error :: UnsupportedCollation . new end client . send ( :with_session , opts ) do | session | Operation :: Create . new ( { selector : operation , db_name : database . name , write_concern : write_concern , session : session } ) . execute ( server ) end end
Force the collection to be created in the database .
2,920
def drop ( opts = { } ) client . send ( :with_session , opts ) do | session | Operation :: Drop . new ( { selector : { :drop => name } , db_name : database . name , write_concern : write_concern , session : session } ) . execute ( next_primary ) end rescue Error :: OperationFailure => ex raise ex unless ex . message =~ / / false end
Drop the collection . Will also drop all indexes associated with the collection .
2,921
def distinct ( field_name , filter = nil , options = { } ) View . new ( self , filter || { } , options ) . distinct ( field_name , options ) end
Get a list of distinct values for a specific field .
2,922
def insert_one ( document , opts = { } ) client . send ( :with_session , opts ) do | session | write_with_retry ( session , write_concern ) do | server , txn_num | Operation :: Insert . new ( :documents => [ document ] , :db_name => database . name , :coll_name => name , :write_concern => write_concern , :bypass_document_validation => ! ! opts [ :bypass_document_validation ] , :options => opts , :id_generator => client . options [ :id_generator ] , :session => session , :txn_num => txn_num ) . execute ( server ) end end end
Insert a single document into the collection .
2,923
def insert_many ( documents , options = { } ) inserts = documents . map { | doc | { :insert_one => doc } } bulk_write ( inserts , options ) end
Insert the provided documents into the collection .
2,924
def replace_one ( filter , replacement , options = { } ) find ( filter , options ) . replace_one ( replacement , options ) end
Replaces a single document in the collection with the new document .
2,925
def update_many ( filter , update , options = { } ) find ( filter , options ) . update_many ( update , options ) end
Update documents in the collection .
2,926
def update_one ( filter , update , options = { } ) find ( filter , options ) . update_one ( update , options ) end
Update a single document in the collection .
2,927
def find_one_and_update ( filter , update , options = { } ) find ( filter , options ) . find_one_and_update ( update , options ) end
Finds a single document via findAndModify and updates it returning the original doc unless otherwise specified .
2,928
def find_one_and_replace ( filter , replacement , options = { } ) find ( filter , options ) . find_one_and_update ( replacement , options ) end
Finds a single document and replaces it returning the original doc unless otherwise specified .
2,929
def render_bad_parameters ( * args ) default_message = if request . xhr? _ ( 'Invalid parameters sent in the request for this operation. Please contact a system administrator.' ) else _ ( 'Invalid parameters sent. You may have mistyped the address. If you continue having trouble with this, please contact an Administrator.' ) end exception = args . find { | o | o . is_a? Exception } message = args . find { | o | o . is_a? String } || exception . try ( :message ) || default_message status = if exception && exception . respond_to? ( :status_code ) exception . status_code else 400 end if exception log_exception exception else Rails . logger . warn message end respond_to do | format | format . html do render :template => 'common/400' , :layout => ! request . xhr? , :status => status , :locals => { :message => message } end format . atom { head exception . status_code } format . xml { head exception . status_code } format . json { head exception . status_code } end User . current = nil end
render bad params to user
2,930
def find_object if params . key? ( :product_id ) @obj = find_product elsif params . key? ( :repository_id ) @obj = find_repository else fail HttpErrors :: NotFound , N_ ( "Couldn't find subject of synchronization" ) if @obj . nil? end @obj end
used in unit tests
2,931
def insert_successor ( create_params , path ) self . class . transaction do new_successor = self . class . create! ( create_params ) if library? if path old_successor = path . first old_successor . prior = new_successor end save_successor new_successor elsif successor . nil? save_successor new_successor else old_successor = successor old_successor . prior = new_successor save_successor new_successor end fail HttpErrors :: UnprocessableEntity , _ ( 'An environment is missing a prior' ) unless all_have_prior? new_successor end end
creates new env from create_params with self as a prior
2,932
def full_path p = self until p . prior . nil? || p . prior . library p = p . prior end p . prior . nil? ? p . path : [ p . prior ] + p . path end
Unlike path which only gives the path from this environment going forward Get the full path that is go to the HEAD of the path this environment is on and then give me that entire path
2,933
def content_options ( host , selected_id , object_type , options = { } ) include_blank = options . fetch ( :include_blank , nil ) include_blank = '<option></option>' if include_blank == true orgs = relevant_organizations ( host ) all_options = [ ] orgs . each do | org | content_object_options = "" accessible_content_objects = if object_type == :lifecycle_environment accessible_lifecycle_environments ( org , host ) elsif object_type == :content_source accessible_content_proxies ( host ) end accessible_content_objects . each do | content_object | selected = selected_id == content_object . id ? 'selected' : '' content_object_options << %(<option value="#{content_object.id}" class="kt-env" #{selected}>#{h(content_object.name)}</option>) end if orgs . count > 1 all_options << %(<optgroup label="#{org.name}">#{content_object_options}</optgroup>) else all_options << content_object_options end end all_options = all_options . join all_options . insert ( 0 , include_blank ) if include_blank all_options . html_safe end
Generic method to provide a list of options in the UI
2,934
def sync_repos ( repo_ids ) collected = [ ] repos = Repository . where ( :id => repo_ids ) . syncable repos . each do | repo | if latest_task ( repo ) . try ( :state ) != 'running' ForemanTasks . async_task ( :: Actions :: Katello :: Repository :: Sync , repo ) end collected << format_sync_progress ( repo ) end collected end
loop through checkbox list of products and sync
2,935
def dynflow_handled_last_sync? ( pulp_task_id ) task = ForemanTasks :: Task :: DynflowTask . for_action ( :: Actions :: Katello :: Repository :: Sync ) . for_resource ( self ) . order ( :started_at ) . last return task && task . main_action . pulp_task_id == pulp_task_id end
Returns true if the pulp_task_id was triggered by the last synchronization action for the repository . Dynflow action handles the synchronization by it s own so no need to synchronize it again in this callback . Since the callbacks are run just after synchronization is finished it should be enough to check for the last synchronization task .
2,936
def destroyable? if self . environment . try ( :library? ) && self . content_view . default? if self . environment . organization . being_deleted? return true elsif self . custom? && self . deletable? return true elsif ! self . custom? && self . redhat_deletable? return true else errors . add ( :base , _ ( "Repository cannot be deleted since it has already been included in a published Content View. " "Please delete all Content View versions containing this repository before attempting to delete it." ) ) return false end end return true end
deleteable? is already taken by the authorization mixin
2,937
def component_ids = ( component_version_ids_to_set ) content_view_components . destroy_all component_version_ids_to_set . each do | content_view_version_id | cvv = ContentViewVersion . find ( content_view_version_id ) content_view_components . build ( :content_view_version => cvv , :latest => false , :composite_content_view => self ) end end
Warning this call wipes out existing associations And replaces them with the component version ids passed in .
2,938
def all_version_library_instances all_repos = all_version_repos . where ( :library_instance_id => nil ) . pluck ( "#{Katello::Repository.table_name}.id" ) all_repos += all_version_repos . pluck ( :library_instance_id ) Repository . where ( :id => all_repos ) end
get the library instances of all repos within this view
2,939
def add_environment ( env , version ) if self . content_view_environments . where ( :environment_id => env . id ) . empty? label = generate_cp_environment_label ( env ) ContentViewEnvironment . create! ( :name => label , :label => label , :cp_id => generate_cp_environment_id ( env ) , :environment_id => env . id , :content_view => self , :content_view_version => version ) end end
Associate an environment with this content view . This can occur whenever a version of the view is promoted to an environment . It is necessary for candlepin to become aware that the view is available for consumers .
2,940
def remove_environment ( env ) if self . versions . in_environment ( env ) . blank? view_env = self . content_view_environments . where ( :environment_id => env . id ) view_env . first . destroy unless view_env . blank? end end
Unassociate an environment from this content view . This can occur whenever a view is deleted from an environment . It is necessary to make candlepin aware that the view is no longer available for consumers .
2,941
def format_errors ( details ) errors = { messages : [ ] , details : [ ] } if details && ! details . key? ( :finished_count ) details . each do | step , report | if step == "content" parse_content ( report , errors ) else parse_generic ( report , errors ) end end end errors end
Possible formats coming from pulp
2,942
def read @store . transaction ( true ) do @store . roots . each_with_object ( { } ) do | key , obj | obj [ key ] = @store [ key ] end end end
returns copy of data stored
2,943
def update config . update ( :remotes ) do | rmts | yield ( rmts || { } ) . tap do | new_rmts | raise_invalid unless validate ( new_rmts ) end end end
Update remotes Perform validation
2,944
def subscribe ( * topics ) tpl = TopicPartitionList . new_native_tpl ( topics . length ) topics . each do | topic | Rdkafka :: Bindings . rd_kafka_topic_partition_list_add ( tpl , topic , - 1 ) end response = Rdkafka :: Bindings . rd_kafka_subscribe ( @native_kafka , tpl ) if response != 0 raise Rdkafka :: RdkafkaError . new ( response , "Error subscribing to '#{topics.join(', ')}'" ) end end
Subscribe to one or more topics letting Kafka handle partition assignments .
2,945
def unsubscribe response = Rdkafka :: Bindings . rd_kafka_unsubscribe ( @native_kafka ) if response != 0 raise Rdkafka :: RdkafkaError . new ( response ) end end
Unsubscribe from all subscribed topics .
2,946
def pause ( list ) unless list . is_a? ( TopicPartitionList ) raise TypeError . new ( "list has to be a TopicPartitionList" ) end tpl = list . to_native_tpl response = Rdkafka :: Bindings . rd_kafka_pause_partitions ( @native_kafka , tpl ) if response != 0 list = TopicPartitionList . from_native_tpl ( tpl ) raise Rdkafka :: RdkafkaTopicPartitionListError . new ( response , list , "Error pausing '#{list.to_h}'" ) end end
Pause producing or consumption for the provided list of partitions
2,947
def resume ( list ) unless list . is_a? ( TopicPartitionList ) raise TypeError . new ( "list has to be a TopicPartitionList" ) end tpl = list . to_native_tpl response = Rdkafka :: Bindings . rd_kafka_resume_partitions ( @native_kafka , tpl ) if response != 0 raise Rdkafka :: RdkafkaError . new ( response , "Error resume '#{list.to_h}'" ) end end
Resume producing consumption for the provided list of partitions
2,948
def subscription tpl = FFI :: MemoryPointer . new ( :pointer ) response = Rdkafka :: Bindings . rd_kafka_subscription ( @native_kafka , tpl ) if response != 0 raise Rdkafka :: RdkafkaError . new ( response ) end tpl = tpl . read ( :pointer ) . tap { | it | it . autorelease = false } begin Rdkafka :: Consumer :: TopicPartitionList . from_native_tpl ( tpl ) ensure Rdkafka :: Bindings . rd_kafka_topic_partition_list_destroy ( tpl ) end end
Return the current subscription to topics and partitions
2,949
def assign ( list ) unless list . is_a? ( TopicPartitionList ) raise TypeError . new ( "list has to be a TopicPartitionList" ) end tpl = list . to_native_tpl response = Rdkafka :: Bindings . rd_kafka_assign ( @native_kafka , tpl ) if response != 0 raise Rdkafka :: RdkafkaError . new ( response , "Error assigning '#{list.to_h}'" ) end end
Atomic assignment of partitions to consume
2,950
def committed ( list = nil , timeout_ms = 1200 ) if list . nil? list = assignment elsif ! list . is_a? ( TopicPartitionList ) raise TypeError . new ( "list has to be nil or a TopicPartitionList" ) end tpl = list . to_native_tpl response = Rdkafka :: Bindings . rd_kafka_committed ( @native_kafka , tpl , timeout_ms ) if response != 0 raise Rdkafka :: RdkafkaError . new ( response ) end TopicPartitionList . from_native_tpl ( tpl ) end
Return the current committed offset per partition for this consumer group . The offset field of each requested partition will either be set to stored offset or to - 1001 in case there was no stored offset for that partition .
2,951
def store_offset ( message ) native_topic = Rdkafka :: Bindings . rd_kafka_topic_new ( @native_kafka , message . topic , nil ) response = Rdkafka :: Bindings . rd_kafka_offset_store ( native_topic , message . partition , message . offset ) if response != 0 raise Rdkafka :: RdkafkaError . new ( response ) end ensure if native_topic && ! native_topic . null? Rdkafka :: Bindings . rd_kafka_topic_destroy ( native_topic ) end end
Store offset of a message to be used in the next commit of this consumer
2,952
def commit ( list = nil , async = false ) if ! list . nil? && ! list . is_a? ( TopicPartitionList ) raise TypeError . new ( "list has to be nil or a TopicPartitionList" ) end tpl = if list list . to_native_tpl else nil end response = Rdkafka :: Bindings . rd_kafka_commit ( @native_kafka , tpl , async ) if response != 0 raise Rdkafka :: RdkafkaError . new ( response ) end end
Commit the current offsets of this consumer
2,953
def poll ( timeout_ms ) message_ptr = Rdkafka :: Bindings . rd_kafka_consumer_poll ( @native_kafka , timeout_ms ) if message_ptr . null? nil else native_message = Rdkafka :: Bindings :: Message . new ( message_ptr ) if native_message [ :err ] != 0 raise Rdkafka :: RdkafkaError . new ( native_message [ :err ] ) end Rdkafka :: Consumer :: Message . new ( native_message ) end ensure if ! message_ptr . nil? && ! message_ptr . null? Rdkafka :: Bindings . rd_kafka_message_destroy ( message_ptr ) end end
Poll for the next message on one of the subscribed topics
2,954
def consumer opaque = Opaque . new config = native_config ( opaque ) if @consumer_rebalance_listener opaque . consumer_rebalance_listener = @consumer_rebalance_listener Rdkafka :: Bindings . rd_kafka_conf_set_rebalance_cb ( config , Rdkafka :: Bindings :: RebalanceCallback ) end kafka = native_kafka ( config , :rd_kafka_consumer ) Rdkafka :: Bindings . rd_kafka_poll_set_consumer ( kafka ) Rdkafka :: Consumer . new ( kafka ) end
Create a consumer with this configuration .
2,955
def producer opaque = Opaque . new config = native_config ( opaque ) Rdkafka :: Bindings . rd_kafka_conf_set_dr_msg_cb ( config , Rdkafka :: Bindings :: DeliveryCallback ) Rdkafka :: Producer . new ( native_kafka ( config , :rd_kafka_producer ) ) . tap do | producer | opaque . producer = producer end end
Create a producer with this configuration .
2,956
def native_config ( opaque = nil ) Rdkafka :: Bindings . rd_kafka_conf_new . tap do | config | @config_hash . merge ( REQUIRED_CONFIG ) . each do | key , value | error_buffer = FFI :: MemoryPointer . from_string ( " " * 256 ) result = Rdkafka :: Bindings . rd_kafka_conf_set ( config , key . to_s , value . to_s , error_buffer , 256 ) unless result == :config_ok raise ConfigError . new ( error_buffer . read_string ) end end if opaque pointer = :: FFI :: Pointer . new ( :pointer , opaque . object_id ) Rdkafka :: Bindings . rd_kafka_conf_set_opaque ( config , pointer ) Rdkafka :: Config . opaques [ pointer . to_i ] = opaque end Rdkafka :: Bindings . rd_kafka_conf_set_log_cb ( config , Rdkafka :: Bindings :: LogCallback ) Rdkafka :: Bindings . rd_kafka_conf_set_stats_cb ( config , Rdkafka :: Bindings :: StatsCallback ) end end
This method is only intented to be used to create a client using it in another way will leak memory .
2,957
def find_upwards ( target , start_dir = nil ) previous = nil current = File . expand_path ( start_dir || Dir . pwd ) until ! File . directory? ( current ) || current == previous filename = File . join ( current , target ) return filename if File . file? ( filename ) previous = current current = File . expand_path ( '..' , current ) end end
Searches upwards from current working directory for the given target file .
2,958
def make_tmpdir_name ( base ) t = Time . now . strftime ( '%Y%m%d' ) name = "#{base}#{t}-#{Process.pid}-#{rand(0x100000000).to_s(36)}" File . join ( Dir . tmpdir , name ) end
Generate a name for a temporary directory .
2,959
def canonical_path ( path ) if Gem . win_platform? unless File . exist? ( path ) raise PDK :: CLI :: FatalError , _ ( "Cannot resolve a full path to '%{path}', as it does not currently exist." ) % { path : path } end PDK :: Util :: Windows :: File . get_long_pathname ( path ) else File . expand_path ( path ) end end
Return an expanded absolute path
2,960
def cachedir if Gem . win_platform? File . join ( ENV [ 'LOCALAPPDATA' ] , 'PDK' , 'cache' ) else File . join ( Dir . home , '.pdk' , 'cache' ) end end
Returns the fully qualified path to a per - user PDK cachedir .
2,961
def module_root metadata_path = find_upwards ( 'metadata.json' ) if metadata_path File . dirname ( metadata_path ) elsif in_module_root? Dir . pwd else nil end end
Returns path to the root of the module being worked on .
2,962
def find_valid_json_in ( text , opts = { } ) break_on_first = opts . key? ( :break_on_first ) ? opts [ :break_on_first ] : true json_result = break_on_first ? nil : [ ] text . scan ( %r{ \{ \g \} }x ) do | str | begin if break_on_first json_result = JSON . parse ( str ) break else json_result . push ( JSON . parse ( str ) ) end rescue JSON :: ParserError next end end json_result end
Iterate through possible JSON documents until we find one that is valid .
2,963
def targets_relative_to_pwd ( targets ) targets . map do | t | if Pathname . new ( t ) . absolute? Pathname . new ( t ) . relative_path_from ( Pathname . pwd ) else t end end end
Returns the targets paths relative to the working directory
2,964
def write_junit ( target = self . class . default_target ) target = File . open ( target , 'w' ) if target . is_a? String document = REXML :: Document . new document << REXML :: XMLDecl . new testsuites = REXML :: Element . new ( 'testsuites' ) id = 0 events . each do | testsuite_name , testcases | testsuite = REXML :: Element . new ( 'testsuite' ) testsuite . attributes [ 'name' ] = testsuite_name testsuite . attributes [ 'tests' ] = testcases . length testsuite . attributes [ 'errors' ] = testcases . select ( & :error? ) . length testsuite . attributes [ 'failures' ] = testcases . select ( & :failure? ) . length testsuite . attributes [ 'skipped' ] = testcases . select ( & :skipped? ) . length testsuite . attributes [ 'time' ] = 0 testsuite . attributes [ 'timestamp' ] = Time . now . strftime ( '%Y-%m-%dT%H:%M:%S' ) testsuite . attributes [ 'hostname' ] = Socket . gethostname testsuite . attributes [ 'id' ] = id testsuite . attributes [ 'package' ] = testsuite_name testsuite . add_element ( 'properties' ) testcases . each { | r | testsuite . elements << r . to_junit } testsuite . add_element ( 'system-out' ) testsuite . add_element ( 'system-err' ) testsuites . elements << testsuite id += 1 end document . elements << testsuites document . write ( target , 2 ) ensure target . close if target . is_a? File end
Renders the report as a JUnit XML document .
2,965
def write_text ( target = self . class . default_target ) target = File . open ( target , 'w' ) if target . is_a? String coverage_report = nil events . each do | _tool , tool_events | tool_events . each do | event | if event . rspec_puppet_coverage? coverage_report = event . to_text else target . puts ( event . to_text ) unless event . pass? end end end ensure target . puts "\n#{coverage_report}" if coverage_report target . close if target . is_a? File end
Renders the report as plain text .
2,966
def update! ( new_answers = { } ) unless new_answers . is_a? ( Hash ) raise PDK :: CLI :: FatalError , _ ( 'Answer file can be updated only with a Hash' ) end answers . merge! ( new_answers ) save_to_disk end
Update the stored answers in memory and then save them to disk .
2,967
def read_from_disk return { } if ! File . file? ( answer_file_path ) || File . zero? ( answer_file_path ) unless File . readable? ( answer_file_path ) raise PDK :: CLI :: FatalError , _ ( "Unable to open '%{file}' for reading" ) % { file : answer_file_path , } end answers = JSON . parse ( File . read ( answer_file_path ) ) if answers . is_a? ( Hash ) answers else PDK . logger . warn _ ( "Answer file '%{path}' did not contain a valid set of answers, recreating it" ) % { path : answer_file_path , } { } end rescue JSON :: JSONError PDK . logger . warn _ ( "Answer file '%{path}' did not contain valid JSON, recreating it" ) % { path : answer_file_path , } { } end
Read existing answers into memory from the answer file on disk .
2,968
def save_to_disk FileUtils . mkdir_p ( File . dirname ( answer_file_path ) ) write_file ( answer_file_path , JSON . pretty_generate ( answers ) ) rescue SystemCallError , IOError => e raise PDK :: CLI :: FatalError , _ ( "Unable to write '%{file}': %{msg}" ) % { file : answer_file_path , msg : e . message , } end
Save the in memory answer set to the answer file on disk .
2,969
def template_content if File . file? ( @template_file ) && File . readable? ( @template_file ) return File . read ( @template_file ) end raise ArgumentError , _ ( "'%{template}' is not a readable file" ) % { template : @template_file } end
Reads the content of the template file into memory .
2,970
def render_erb renderer = ERB . new ( template_content , nil , '-' ) renderer . filename = @template_file renderer . result ( binding ) end
Renders the content of the template file as an ERB template .
2,971
def append_attribute ( node , attribute , value ) current_value = node . get_attribute ( attribute ) || '' current_values = current_value . split ( / \s / ) updated_value = current_values | [ value ] node . set_attribute ( attribute , updated_value . join ( ' ' ) ) end
If the attribute is not set add it If the attribute is set don t overwrite the existing value
2,972
def text ( options = { } ) result = serialize_root . children . inner_text rescue "" if options [ :encode_special_chars ] == false result else encode_special_chars result end end
Returns a plain - text version of the markup contained by the document with HTML entities encoded .
2,973
def option ( * args , & block ) switches , description = Runner . separate_switches_from_description ( * args ) proc = block || option_proc ( switches ) @options << { args : args , proc : proc , switches : switches , description : description , } end
Add an option .
2,974
def call ( args = [ ] ) object , meth = @when_called [ 0 , 2 ] meth ||= :call options = proxy_option_struct case object when Proc then object . call ( args , options ) when Class then meth != :call ? object . new . send ( meth , args , options ) : object . new ( args , options ) else object . send ( meth , args , options ) if object end end
Call the commands when_called block with _args_ .
2,975
def run! trace = @always_trace || false require_program :version , :description trap ( 'INT' ) { abort program ( :int_message ) } if program ( :int_message ) trap ( 'INT' ) { program ( :int_block ) . call } if program ( :int_block ) global_option ( '-h' , '--help' , 'Display help documentation' ) do args = @args - %w( -h --help ) command ( :help ) . run ( * args ) return end global_option ( '-v' , '--version' , 'Display version information' ) do say version return end global_option ( '-t' , '--trace' , 'Display backtrace when an error occurs' ) { trace = true } unless @never_trace || @always_trace parse_global_options remove_global_options options , @args if trace run_active_command else begin run_active_command rescue InvalidCommandError => e abort "#{e}. Use --help for more information" rescue OptionParser :: InvalidOption , OptionParser :: InvalidArgument , OptionParser :: MissingArgument => e abort e . to_s rescue => e if @never_trace abort "error: #{e}." else abort "error: #{e}. Use --trace to view backtrace" end end end end
Run command parsing and execution process .
2,976
def program ( key , * args , & block ) if key == :help && ! args . empty? @program [ :help ] ||= { } @program [ :help ] [ args . first ] = args . at ( 1 ) elsif key == :help_formatter && ! args . empty? @program [ key ] = ( @help_formatter_aliases [ args . first ] || args . first ) elsif block @program [ key ] = block else unless args . empty? @program [ key ] = args . count == 1 ? args [ 0 ] : args end @program [ key ] end end
Assign program information .
2,977
def alias_command ( alias_name , name , * args ) @commands [ alias_name . to_s ] = command name @aliases [ alias_name . to_s ] = args end
Alias command _name_ with _alias_name_ . Optionally _args_ may be passed as if they were being passed straight to the original command via the command - line .
2,978
def args_without_command_name removed = [ ] parts = command_name_from_args . split rescue [ ] @args . dup . delete_if do | arg | removed << arg if parts . include? ( arg ) && ! removed . include? ( arg ) end end
Return arguments without the command name .
2,979
def remove_global_options ( options , args ) options . each do | option | switches = option [ :switches ] . dup next if switches . empty? if ( switch_has_arg = switches . any? { | s | s =~ / / } ) switches . map! { | s | s [ 0 , s . index ( '=' ) || s . index ( ' ' ) || s . length ] } end switches = expand_optionally_negative_switches ( switches ) past_switch , arg_removed = false , false args . delete_if do | arg | if switches . any? { | s | s [ 0 , arg . length ] == arg } arg_removed = ! switch_has_arg past_switch = true elsif past_switch && ! arg_removed && arg !~ / / arg_removed = true else arg_removed = true false end end end end
Removes global _options_ from _args_ . This prevents an invalid option error from occurring when options are parsed again for the command .
2,980
def parse_global_options parser = options . inject ( OptionParser . new ) do | options , option | options . on ( * option [ :args ] , & global_option_proc ( option [ :switches ] , & option [ :proc ] ) ) end options = @args . dup begin parser . parse! ( options ) rescue OptionParser :: InvalidOption => e options = options . reject { | o | e . args . include? ( o ) } retry end end
Parse global command options .
2,981
def require_program ( * keys ) keys . each do | key | fail CommandError , "program #{key} required" if program ( key ) . nil? || program ( key ) . empty? end end
Raises a CommandError when the program any of the _keys_ are not present or empty .
2,982
def run_active_command require_valid_command if alias? command_name_from_args active_command . run ( * ( @aliases [ command_name_from_args . to_s ] + args_without_command_name ) ) else active_command . run ( * args_without_command_name ) end end
Run the active command .
2,983
def ask_editor ( input = nil , preferred_editor = nil ) editor = available_editor preferred_editor program = Commander :: Runner . instance . program ( :name ) . downcase rescue 'commander' tmpfile = Tempfile . new program begin tmpfile . write input if input tmpfile . close system ( "#{editor} #{tmpfile.path.shellescape}" ) ? IO . read ( tmpfile . path ) : nil ensure tmpfile . unlink end end
Prompt an editor for input . Optionally supply initial _input_ which is written to the editor .
2,984
def enable_paging return unless $stdout . tty? return unless Process . respond_to? :fork read , write = IO . pipe if Kernel . fork $stdin . reopen read write . close read . close Kernel . select [ $stdin ] ENV [ 'LESS' ] = 'FSRX' unless ENV . key? 'LESS' pager = ENV [ 'PAGER' ] || 'less' exec pager rescue exec '/bin/sh' , '-c' , pager else $stdout . reopen write $stderr . reopen write if $stderr . tty? write . close read . close end rescue NotImplementedError ensure write . close if write && ! write . closed? read . close if read && ! read . closed? end
Enable paging of output after called .
2,985
def progress ( arr , options = { } ) bar = ProgressBar . new arr . length , options bar . show arr . each { | v | bar . increment yield ( v ) } end
Output progress while iterating _arr_ .
2,986
def replace_tokens ( str , hash ) hash . inject ( str ) do | string , ( key , value ) | string . gsub ":#{key}" , value . to_s end end
Substitute _hash_ s keys with their associated values in _str_ .
2,987
def rubyize_format ( original_data ) data = original_data . to_snake_keys . deep_symbolize_keys definitions = data [ :container_definitions ] definitions . each_with_index do | definition , i | next unless definition [ :log_configuration ] options = definition [ :log_configuration ] [ :options ] next unless options original_definition = original_data [ "containerDefinitions" ] [ i ] definition [ :log_configuration ] [ :options ] = original_definition [ "logConfiguration" ] [ "options" ] end data end
The ruby aws - sdk expects symbols for keys and AWS docs for the task definition uses json camelCase for the keys . This method transforms the keys to the expected ruby aws - sdk format .
2,988
def display_scale_help return if service . running_count >= service . desired_count events = service [ "events" ] [ 0 .. 3 ] error_event = events . find do | e | e . message =~ / / end return unless error_event puts "There is an issue scaling the #{@service.color(:green)} service to #{service.desired_count}. Here's the error:" puts error_event . message . color ( :red ) if service . launch_type == "EC2" puts "If AutoScaling is set up for the container instances, it can take a little time to add additional instances. You'll see this message until the capacity is added." end end
If the running count less than the desired account yet check the events and show a message with helpful debugging information .
2,989
def ufo_env settings = YAML . load_file ( "#{Ufo.root}/.ufo/settings.yml" ) env = settings . find do | _env , section | section ||= { } ENV [ 'AWS_PROFILE' ] && ENV [ 'AWS_PROFILE' ] == section [ 'aws_profile' ] end ufo_env = env . first if env ufo_env = ENV [ 'UFO_ENV' ] if ENV [ 'UFO_ENV' ] ufo_env || 'development' end
Resovles infinite problem since Ufo . env can be determined from UFO_ENV or settings . yml files . When ufo is determined from settings it should not called Ufo . env since that in turn calls Settings . new . data which can then cause an infinite loop .
2,990
def evaluate_template_definitions source_code = IO . read ( @template_definitions_path ) begin instance_eval ( source_code , @template_definitions_path ) rescue Exception => e if e . class == SystemExit raise else task_definition_error ( e ) puts "\nFull error:" raise end end end
All we re doing at this point is saving blocks of code into memory The instance_eval provides the task_definition and helper methods as they are part of this class .
2,991
def task_definition_error ( e ) error_info = e . backtrace . first path , line_no , _ = error_info . split ( ':' ) line_no = line_no . to_i puts "Error evaluating #{path}:" . color ( :red ) puts e . message puts "Here's the line in #{path} with the error:\n\n" contents = IO . read ( path ) content_lines = contents . split ( "\n" ) context = 5 top , bottom = [ line_no - context - 1 , 0 ] . max , line_no + context - 1 spacing = content_lines . size . to_s . size content_lines [ top .. bottom ] . each_with_index do | line_content , index | line_number = top + index + 1 if line_number == line_no printf ( "%#{spacing}d %s\n" . color ( :red ) , line_number , line_content ) else printf ( "%#{spacing}d %s\n" , line_number , line_content ) end end end
Prints out a user friendly task_definition error message
2,992
def template_body custom_template = "#{Ufo.root}/.ufo/settings/cfn/stack.yml" path = if File . exist? ( custom_template ) custom_template else File . expand_path ( "../cfn/stack.yml" , File . dirname ( __FILE__ ) ) end RenderMePretty . result ( path , context : context . scope ) end
do not memoize template_body it can change for a rename retry
2,993
def save_template path = "/tmp/ufo/#{@stack_name}/stack.yml" FileUtils . mkdir_p ( File . dirname ( path ) ) IO . write ( path , template_body ) puts "Generated template saved at: #{path}" path = "/tmp/ufo/#{@stack_name}/parameters.yml" IO . write ( path , JSON . pretty_generate ( parameters ) ) puts "Generated parameters saved at: #{path}" end
Store template in tmp in case for debugging
2,994
def stop_old_tasks return unless @options [ :wait ] Thread . new do stop = Ufo :: Stop . new ( @service , @options . merge ( mute : true ) ) while true stop . log "checking for old tasks and waiting for 10 seconds" stop . run sleep 10 end end end
Start a thread that will poll for ecs deployments and kill of tasks in old deployments . This must be done in a thread because the stack update process is blocking .
2,995
def load_balancer ( service ) load_balancer = service . load_balancers . first return unless load_balancer resp = elb . describe_target_groups ( target_group_arns : [ load_balancer . target_group_arn ] ) target_group = resp . target_groups . first load_balancer_arn = target_group . load_balancer_arns . first resp = elb . describe_load_balancers ( load_balancer_arns : [ load_balancer_arn ] ) resp . load_balancers . first end
Passing in service so method can be used else where .
2,996
def all_commands commands = @command_class . all_commands . reject do | k , v | v . is_a? ( Thor :: HiddenCommand ) end commands . keys end
all top - level commands
2,997
def run unless stack_exists? ( @stack_name ) puts "The stack #{@stack_name.color(:green)} does not exist." return end resp = cloudformation . describe_stacks ( stack_name : @stack_name ) stack = resp . stacks . first puts "The current status for the stack #{@stack_name.color(:green)} is #{stack.stack_status.color(:green)}" status_poller = Stack :: Status . new ( @stack_name ) if stack . stack_status =~ / / puts "Stack events (tailing):" status_poller . hide_time_took = true status_poller . wait else puts "Stack events:" status_poller . refresh_events status_poller . show_events ( true ) end end
used for the ufo status command
2,998
def adjust_fargate_options ( options ) task_def = recent_task_definition return options unless task_def [ :network_mode ] == "awsvpc" awsvpc_conf = { subnets : network [ :ecs_subnets ] } if task_def [ :requires_compatibilities ] == [ "FARGATE" ] awsvpc_conf [ :assign_public_ip ] = "ENABLED" options [ :launch_type ] = "FARGATE" end options [ :network_configuration ] = { awsvpc_configuration : awsvpc_conf } options end
adjust network_configuration based on fargate and network mode of awsvpc
2,999
def adjust_security_groups ( options ) return options unless options [ :network_configuration ] && options [ :network_configuration ] [ :awsvpc_configuration ] awsvpc_conf = options [ :network_configuration ] [ :awsvpc_configuration ] security_groups = awsvpc_conf [ :security_groups ] if [ nil , '' , 'nil' ] . include? ( security_groups ) security_groups = [ ] end if security_groups . empty? fetch = Network :: Fetch . new ( network [ :vpc ] ) sg = fetch . security_group_id security_groups << sg security_groups . uniq! end options [ :network_configuration ] [ :awsvpc_configuration ] [ :security_groups ] = security_groups options end
Ensures at least 1 security group is assigned if awsvpc_configuration is provided .