_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q26900
Easyzpl.StoredLabel.add_field
test
def add_field(value) return if value.nil? return if value.strip.empty? # Increment the variable field count self.variable_fields_count += 1 # Add the field label_data.push('^FN' + variable_fields_count.to_s + '^FD' + value + '^FS') end
ruby
{ "resource": "" }
q26901
Mongoid.Slug.build_slug
test
def build_slug if localized? begin orig_locale = I18n.locale all_locales.each do |target_locale| I18n.locale = target_locale apply_slug end ensure I18n.locale = orig_locale end else apply_slug end true end
ruby
{ "resource": "" }
q26902
Mongoid.Slug.new_with_slugs?
test
def new_with_slugs? if localized? # We need to check if slugs are present for the locale without falling back # to a default new_record? && _slugs_translations.fetch(I18n.locale.to_s, []).any? else new_record? && _slugs.present? end end
ruby
{ "resource": "" }
q26903
Mongoid.Slug.persisted_with_slug_changes?
test
def persisted_with_slug_changes? if localized? changes = _slugs_change return (persisted? && false) if changes.nil? # ensure we check for changes only between the same locale original = changes.first.try(:fetch, I18n.locale.to_s, nil) compare = changes.last.try(:fetch, I18n.locale.to_s, nil) persisted? && original != compare else persisted? && _slugs_changed? end end
ruby
{ "resource": "" }
q26904
ETL.Util.distance_of_time_in_words
test
def distance_of_time_in_words(from_time, to_time=Time.now) from_time = from_time.to_time if from_time.respond_to?(:to_time) to_time = to_time.to_time if to_time.respond_to?(:to_time) seconds = (to_time - from_time).round distance_in_days = (seconds/(60*60*24)).round seconds = seconds % (60*60*24) distance_in_hours = (seconds/(60*60)).round seconds = seconds % (60*60) distance_in_minutes = (seconds/60).round seconds = seconds % 60 distance_in_seconds = seconds s = '' s << "#{distance_in_days} days," if distance_in_days > 0 s << "#{distance_in_hours} hours, " if distance_in_hours > 0 s << "#{distance_in_minutes} minutes, " if distance_in_minutes > 0 s << "#{distance_in_seconds} seconds" s end
ruby
{ "resource": "" }
q26905
ETL.Util.approximate_distance_of_time_in_words
test
def approximate_distance_of_time_in_words(from_time, to_time=Time.now, include_seconds=true) from_time = from_time.to_time if from_time.respond_to?(:to_time) to_time = to_time.to_time if to_time.respond_to?(:to_time) distance_in_minutes = (((to_time - from_time).abs)/60).round distance_in_seconds = ((to_time - from_time).abs).round case distance_in_minutes when 0..1 return (distance_in_minutes == 0) ? 'less than a minute' : '1 minute' unless include_seconds case distance_in_seconds when 0..4 then 'less than 5 seconds' when 5..9 then 'less than 10 seconds' when 10..19 then 'less than 20 seconds' when 20..39 then 'half a minute' when 40..59 then 'less than a minute' else '1 minute' end when 2..44 then "#{distance_in_minutes} minutes" when 45..89 then 'about 1 hour' when 90..1439 then "about #{(distance_in_minutes.to_f / 60.0).round} hours" when 1440..2879 then '1 day' when 2880..43199 then "#{(distance_in_minutes / 1440).round} days" when 43200..86399 then 'about 1 month' when 86400..525959 then "#{(distance_in_minutes / 43200).round} months" when 525960..1051919 then 'about 1 year' else "over #{(distance_in_minutes / 525960).round} years" end end
ruby
{ "resource": "" }
q26906
ETL.Engine.track_error
test
def track_error(control, msg) errors << msg control.error_handlers.each do |handler| handler.call(msg) end end
ruby
{ "resource": "" }
q26907
ETL.Engine.process_batch
test
def process_batch(batch) batch = ETL::Batch::Batch.resolve(batch, self) say "Processing batch #{batch.file}" ETL::Engine.batch = ETL::Execution::Batch.create!( :batch_file => batch.file, :status => 'executing' ) batch.execute ETL::Engine.batch.completed_at = Time.now ETL::Engine.batch.status = (errors.length > 0 ? 'completed with errors' : 'completed') ETL::Engine.batch.save! end
ruby
{ "resource": "" }
q26908
ETL.Engine.pre_process
test
def pre_process(control) Engine.logger.debug "Pre-processing #{control.file}" control.pre_processors.each do |processor| processor.process end Engine.logger.debug "Pre-processing complete" end
ruby
{ "resource": "" }
q26909
ETL.Engine.post_process
test
def post_process(control) say_on_own_line "Executing post processes" Engine.logger.debug "Post-processing #{control.file}" control.post_processors.each do |processor| processor.process end Engine.logger.debug "Post-processing complete" say "Post-processing complete" end
ruby
{ "resource": "" }
q26910
ETL.Engine.execute_dependencies
test
def execute_dependencies(control) Engine.logger.debug "Executing dependencies" control.dependencies.flatten.each do |dependency| case dependency when Symbol f = dependency.to_s + '.ctl' Engine.logger.debug "Executing dependency: #{f}" say "Executing dependency: #{f}" process(f) when String Engine.logger.debug "Executing dependency: #{f}" say "Executing dependency: #{f}" process(dependency) else raise "Invalid dependency type: #{dependency.class}" end end end
ruby
{ "resource": "" }
q26911
ETL.Engine.execute_screens
test
def execute_screens(control, timing = :before_post_process) screens = case timing when :after_post_process control.after_post_process_screens else # default to before post-process screens control.screens end [:fatal,:error,:warn].each do |type| screens[type].each do |block| begin block.call rescue => e case type when :fatal raise FatalScreenError, e when :error raise ScreenError, e when :warn say "Screen warning: #{e}" end end end end end
ruby
{ "resource": "" }
q26912
RedisModelExtension.ClassInitialize.redis_key
test
def redis_key *fields @redis_key_config = fields.flatten validate_redis_key #own specification of redis key - delete autoincrement remove_redis_autoincrement_key unless redis_user_field_config.include?(:id) || @redis_key_config.include?(:id) # automaticaly add all fields from key to validation # if any of fields in redis key is nil # then prevent to save it @redis_key_config.each do |field| validates field, :presence => :true if field != :id end end
ruby
{ "resource": "" }
q26913
RedisModelExtension.ClassInitialize.redis_key_normalize
test
def redis_key_normalize *metrics @redis_key_normalize_conf ||= [] metrics.each do |metric| raise ArgumentError, "Please provide valid normalization: #{VALID_NORMALIZATIONS.join(", ")}" unless VALID_NORMALIZATIONS.include?(metric) @redis_key_normalize_conf << metric end end
ruby
{ "resource": "" }
q26914
RedisModelExtension.ClassInitialize.redis_alias
test
def redis_alias name, main_fields, name_of_field_for_order = nil, name_of_field_for_args = nil #set fields if they are not allready set! if name_of_field_for_order && name_of_field_for_args redis_field name_of_field_for_order, :array, [] unless redis_fields_config.has_key?(name_of_field_for_order) redis_field name_of_field_for_args, :hash, {} unless redis_fields_config.has_key?(name_of_field_for_args) end @redis_alias_config ||= {} #add specification of dynamic alias @redis_alias_config[name] = { main_fields: main_fields, order_field: name_of_field_for_order, args_field: name_of_field_for_args, } #create alias methods for find and get (find_by_name, get_by_name) create_class_alias_method(name) end
ruby
{ "resource": "" }
q26915
RedisModelExtension.StoreKeys.store_redis_keys
test
def store_redis_keys args = to_arg #store main key redis_old_keys[:key] = self.class.generate_key(args) #store main key #store alias keys redis_old_keys[:aliases] = [] redis_alias_config.each do |alias_name, fields| redis_old_keys[:aliases] << redis_alias_key(alias_name) if valid_alias_key? alias_name end end
ruby
{ "resource": "" }
q26916
RedisModelExtension.ClassOldInitialize.conf
test
def conf fields = {} redis_fields_config.each do |key, type| fields[key] = TYPE_TRANSLATIONS[type] if TYPE_TRANSLATIONS.has_key?(type) end { fields: fields, required: @required_config.sort, redis_key: redis_key_config, redis_aliases: redis_alias_config.inject({}){|o,(k,v)| o[k] = v[:main_fields]; o}, reject_nil_values: !redis_save_fields_with_nil_conf, } end
ruby
{ "resource": "" }
q26917
RedisModelExtension.ClassRedisKey.exists?
test
def exists? args = {} RedisModelExtension::Database.redis.exists(self.name.constantize.generate_key(args)) end
ruby
{ "resource": "" }
q26918
RedisModelExtension.ClassRedisKey.alias_exists?
test
def alias_exists? alias_name, args = {} RedisModelExtension::Database.redis.exists(self.name.constantize.generate_alias_key(alias_name, args)) end
ruby
{ "resource": "" }
q26919
RedisModelExtension.ClassValidations.valid_item_for_redis_key?
test
def valid_item_for_redis_key? args, key (args.has_key?(key) && !args[key].nil?) || redis_fields_config[key] == :autoincrement end
ruby
{ "resource": "" }
q26920
RedisModelExtension.ClassValidations.validate_redis_key
test
def validate_redis_key valid_fields = redis_fields_config.select{|k,v| v != :array && v != :hash }.keys bad_fields = redis_key_config - valid_fields raise ArgumentError, "Sorry, but you cannot use as redis key [nonexisting | array | hash] fields: [#{bad_fields.join(",")}], availible are: #{valid_fields.join(", ")}" unless bad_fields.size == 0 end
ruby
{ "resource": "" }
q26921
RedisModelExtension.Attributes.to_arg
test
def to_arg redis_fields_config.inject({}) do |args, (key, type)| args[key] = self.send(key) args end end
ruby
{ "resource": "" }
q26922
RedisModelExtension.ClassGetFind.find_by_alias
test
def find_by_alias(alias_name, args = {}) #check if asked dynamic alias exists raise ArgumentError, "Unknown dynamic alias: '#{alias_name}', use: #{redis_alias_config.keys.join(", ")} " unless redis_alias_config.has_key?(alias_name.to_sym) #normalize input hash of arguments args = HashWithIndifferentAccess.new(args) out = [] klass = self.name.constantize search_key = klass.generate_alias_key(alias_name, args) #is key specified directly? -> no needs of looking for other keys! -> faster unless search_key =~ /\*/ out = klass.get_by_alias(alias_name, args) if klass.alias_exists?(alias_name, args) else RedisModelExtension::Database.redis.keys(search_key).each do |key| out << klass.get_by_alias_key(key) end end out.flatten end
ruby
{ "resource": "" }
q26923
RedisModelExtension.ClassGetFind.get
test
def get(args = {}) # when argument is integer - search by id args = { id: args } if args.is_a?(Integer) #normalize input hash of arguments args = HashWithIndifferentAccess.new(args) klass = self.name.constantize if klass.valid_key?(args) && klass.exists?(args) klass.new_by_key(klass.generate_key(args)) else nil end end
ruby
{ "resource": "" }
q26924
RedisModelExtension.ClassGetFind.get_by_alias_key
test
def get_by_alias_key(alias_key) klass = self.name.constantize if RedisModelExtension::Database.redis.exists(alias_key) out = [] RedisModelExtension::Database.redis.smembers(alias_key).each do |key| item = klass.new_by_key(key) out << item if item end return out end nil end
ruby
{ "resource": "" }
q26925
RedisModelExtension.ClassGetFind.new_by_key
test
def new_by_key(key) args = RedisModelExtension::Database.redis.hgetall(key) return nil unless args && args.any? args.symbolize_keys! new_instance = new(args) new_instance.store_keys return new_instance end
ruby
{ "resource": "" }
q26926
RedisModelExtension.ValueTransform.value_to_redis
test
def value_to_redis name, value if redis_fields_config.has_key?(name) value_transform value, redis_fields_config[name] else value end end
ruby
{ "resource": "" }
q26927
RedisModelExtension.ValueTransform.value_transform
test
def value_transform value, type return nil if value.nil? || value.to_s.size == 0 case type when :integer then value.to_i when :autoincrement then value.to_i when :string then value.to_s when :float then value.to_f when :bool then value.to_s when :symbol then value.to_s when :marshal then Marshal.dump(value) when :array then Yajl::Encoder.encode(value) when :hash then Yajl::Encoder.encode(value) when :time then Time.parse(value.to_s).strftime("%Y.%m.%d %H:%M:%S") when :date then Date.parse(value.to_s).strftime("%Y-%m-%d") else value end end
ruby
{ "resource": "" }
q26928
RedisModelExtension.ValueTransform.value_parse
test
def value_parse value, type return nil if value.nil? || value.to_s.size == 0 case type when :integer then value.to_i when :autoincrement then value.to_i when :string then value.to_s when :float then value.to_f when :bool then value.to_s.to_bool when :symbol then value.to_s.to_sym when :marshal then value.is_a?(String) ? Marshal.load(value) : value when :array then value.is_a?(String) ? Yajl::Parser.parse(value) : value when :hash then value.is_a?(String) ? Hashr.new(Yajl::Parser.parse(value)) : Hashr.new(value) when :time then value.is_a?(String) ? Time.parse(value) : value when :date then value.is_a?(String) ? Date.parse(value) : value else value end end
ruby
{ "resource": "" }
q26929
RedisModelExtension.SaveDestroy.update
test
def update args args.each do |key, value| method = "#{key}=".to_sym if self.respond_to? method self.send(method, value) end end end
ruby
{ "resource": "" }
q26930
RedisModelExtension.SaveDestroy.destroy_aliases!
test
def destroy_aliases! #do it only if it is existing object! if redis_old_keys[:aliases].size > 0 redis_old_keys[:aliases].each do |alias_key| RedisModelExtension::Database.redis.srem alias_key, redis_old_keys[:key] #delete alias with 0 keys RedisModelExtension::Database.redis.del(alias_key) if RedisModelExtension::Database.redis.scard(alias_key).to_i == 0 end end end
ruby
{ "resource": "" }
q26931
Firim.CommandsGenerator.add
test
def add(username, token) Firim::AccountManager.new( user: username, token: token ).add_to_keychain end
ruby
{ "resource": "" }
q26932
Nsq.Discovery.get_nsqds
test
def get_nsqds(lookupd, topic = nil) uri_scheme = 'http://' unless lookupd.match(%r(https?://)) uri = URI.parse("#{uri_scheme}#{lookupd}") uri.query = "ts=#{Time.now.to_i}" if topic uri.path = '/lookup' uri.query += "&topic=#{URI.escape(topic)}" else uri.path = '/nodes' end begin body = Net::HTTP.get(uri) data = JSON.parse(body) producers = data['producers'] || # v1.0.0-compat (data['data'] && data['data']['producers']) if producers producers.map do |producer| "#{producer['broadcast_address']}:#{producer['tcp_port']}" end else [] end rescue Exception => e error "Error during discovery for #{lookupd}: #{e}" nil end end
ruby
{ "resource": "" }
q26933
Nsq.ClientBase.discover_repeatedly
test
def discover_repeatedly(opts = {}) @discovery_thread = Thread.new do @discovery = Discovery.new(opts[:nsqlookupds]) loop do begin nsqds = nsqds_from_lookupd(opts[:topic]) drop_and_add_connections(nsqds) rescue DiscoveryException # We can't connect to any nsqlookupds. That's okay, we'll just # leave our current nsqd connections alone and try again later. warn 'Could not connect to any nsqlookupd instances in discovery loop' end sleep opts[:interval] end end @discovery_thread.abort_on_exception = true end
ruby
{ "resource": "" }
q26934
Nsq.Connection.with_retries
test
def with_retries(&block) base_sleep_seconds = 0.5 max_sleep_seconds = 300 # 5 minutes # Let's do this thing attempts = 0 begin attempts += 1 return block.call(attempts) rescue Errno::ECONNREFUSED, Errno::ECONNRESET, Errno::EHOSTUNREACH, Errno::ENETDOWN, Errno::ENETUNREACH, Errno::ETIMEDOUT, Timeout::Error => ex raise ex if attempts >= 100 # The sleep time is an exponentially-increasing function of base_sleep_seconds. # But, it never exceeds max_sleep_seconds. sleep_seconds = [base_sleep_seconds * (2 ** (attempts - 1)), max_sleep_seconds].min # Randomize to a random value in the range sleep_seconds/2 .. sleep_seconds sleep_seconds = sleep_seconds * (0.5 * (1 + rand())) # But never sleep less than base_sleep_seconds sleep_seconds = [base_sleep_seconds, sleep_seconds].max warn "Failed to connect: #{ex}. Retrying in #{sleep_seconds.round(1)} seconds." snooze(sleep_seconds) retry end end
ruby
{ "resource": "" }
q26935
X12.Base.show
test
def show(ind = '') count = 0 self.to_a.each{|i| #puts "#{ind}#{i.name} #{i.object_id} #{i.super.object_id} [#{count}]: #{i.parsed_str} #{i.super.class}" puts "#{ind}#{i.name} [#{count}]: #{i.to_s.sub(/^(.{30})(.*?)(.{30})$/, '\1...\3')}" # Force parsing a segment if i.kind_of?(X12::Segment) && i.nodes[0] i.find_field(i.nodes[0].name) end i.nodes.each{|j| case when j.kind_of?(X12::Base) then j.show(ind+' ') when j.kind_of?(X12::Field) then puts "#{ind+' '}#{j.name} -> '#{j.to_s}'" end } count += 1 } end
ruby
{ "resource": "" }
q26936
X12.Base.do_repeats
test
def do_repeats(s) if self.repeats.end > 1 possible_repeat = self.dup p_s = possible_repeat.parse(s) if p_s s = p_s self.next_repeat = possible_repeat end # if parsed end # more repeats s end
ruby
{ "resource": "" }
q26937
X12.Base.find
test
def find(e) #puts "Finding [#{e}] in #{self.class} #{name}" case self when X12::Loop # Breadth first res = nodes.find{|i| e==i.name } return res if res # Depth now nodes.each{|i| res = i.find(e) if i.kind_of?(X12::Loop) return res unless res.nil? or EMPTY==res # otherwise keep looping } when X12::Segment return find_field(e).to_s end # case return EMPTY end
ruby
{ "resource": "" }
q26938
X12.Base.method_missing
test
def method_missing(meth, *args, &block) str = meth.id2name str = str[1..str.length] if str =~ /^_\d+$/ # to avoid pure number names like 270, 997, etc. #puts "Missing #{str}" if str =~ /=$/ # Assignment str.chop! #puts str case self when X12::Segment res = find_field(str) throw Exception.new("No field '#{str}' in segment '#{self.name}'") if EMPTY == res res.content = args[0].to_s #puts res.inspect else throw Exception.new("Illegal assignment to #{meth} of #{self.class}") end # case else # Retrieval res = find(str) yield res if block_given? res end # if assignment end
ruby
{ "resource": "" }
q26939
X12.Segment.parse
test
def parse(str) s = str #puts "Parsing segment #{name} from #{s} with regexp [#{regexp.source}]" m = regexp.match(s) #puts "Matched #{m ? m[0] : 'nothing'}" return nil unless m s = m.post_match self.parsed_str = m[0] s = do_repeats(s) #puts "Parsed segment "+self.inspect return s end
ruby
{ "resource": "" }
q26940
X12.Segment.render
test
def render self.to_a.inject(''){|repeat_str, i| if i.repeats.begin < 1 and !i.has_content? # Skip optional empty segments repeat_str else # Have to render no matter how empty repeat_str += i.name+i.nodes.reverse.inject(''){|nodes_str, j| field = j.render (j.required or nodes_str != '' or field != '') ? field_separator+field+nodes_str : nodes_str } + segment_separator end } end
ruby
{ "resource": "" }
q26941
X12.Segment.regexp
test
def regexp unless @regexp if self.nodes.find{|i| i.type =~ /^".+"$/ } # It's a very special regexp if there are constant fields re_str = self.nodes.inject("^#{name}#{Regexp.escape(field_separator)}"){|s, i| field_re = i.simple_regexp(field_separator, segment_separator)+Regexp.escape(field_separator)+'?' field_re = "(#{field_re})?" unless i.required s+field_re } + Regexp.escape(segment_separator) @regexp = Regexp.new(re_str) else # Simple match @regexp = Regexp.new("^#{name}#{Regexp.escape(field_separator)}[^#{Regexp.escape(segment_separator)}]*#{Regexp.escape(segment_separator)}") end #puts sprintf("%s %p", name, @regexp) end @regexp end
ruby
{ "resource": "" }
q26942
X12.Segment.find_field
test
def find_field(str) #puts "Finding field [#{str}] in #{self.class} #{name}" # If there is such a field to begin with field_num = nil self.nodes.each_index{|i| field_num = i if str == self.nodes[i].name } return EMPTY if field_num.nil? #puts field_num # Parse the segment if not parsed already unless @fields @fields = self.to_s.chop.split(Regexp.new(Regexp.escape(field_separator))) self.nodes.each_index{|i| self.nodes[i].content = @fields[i+1] } end #puts self.nodes[field_num].inspect return self.nodes[field_num] end
ruby
{ "resource": "" }
q26943
X12.Parser.parse
test
def parse(loop_name, str) loop = @x12_definition[X12::Loop][loop_name] #puts "Loops to parse #{@x12_definition[X12::Loop].keys}" throw Exception.new("Cannot find a definition for loop #{loop_name}") unless loop loop = loop.dup loop.parse(str) return loop end
ruby
{ "resource": "" }
q26944
X12.Parser.factory
test
def factory(loop_name) loop = @x12_definition[X12::Loop][loop_name] throw Exception.new("Cannot find a definition for loop #{loop_name}") unless loop loop = loop.dup return loop end
ruby
{ "resource": "" }
q26945
X12.Parser.process_loop
test
def process_loop(loop) loop.nodes.each{|i| case i when X12::Loop then process_loop(i) when X12::Segment then process_segment(i) unless i.nodes.size > 0 else return end } end
ruby
{ "resource": "" }
q26946
X12.Parser.process_segment
test
def process_segment(segment) #puts "Trying to process segment #{segment.inspect}" unless @x12_definition[X12::Segment] && @x12_definition[X12::Segment][segment.name] # Try to find it in a separate file if missing from the @x12_definition structure initialize(segment.name+'.xml') segment_definition = @x12_definition[X12::Segment][segment.name] throw Exception.new("Cannot find a definition for segment #{segment.name}") unless segment_definition else segment_definition = @x12_definition[X12::Segment][segment.name] end segment_definition.nodes.each_index{|i| segment.nodes[i] = segment_definition.nodes[i] # Make sure we have the validation table if any for this field. Try to read one in if missing. table = segment.nodes[i].validation if table unless @x12_definition[X12::Table] && @x12_definition[X12::Table][table] initialize(table+'.xml') throw Exception.new("Cannot find a definition for table #{table}") unless @x12_definition[X12::Table] && @x12_definition[X12::Table][table] end end } end
ruby
{ "resource": "" }
q26947
X12.Loop.render
test
def render if self.has_content? self.to_a.inject(''){|loop_str, i| loop_str += i.nodes.inject(''){|nodes_str, j| nodes_str += j.render } } else '' end end
ruby
{ "resource": "" }
q26948
Formbuilder.Entry.calculate_sortable_values
test
def calculate_sortable_values response_fieldable.input_fields.each do |response_field| if (x = response_value(response_field)).present? get_responses["#{response_field.id}_sortable_value"] = response_field.sortable_value(x) end end mark_responses_as_changed! end
ruby
{ "resource": "" }
q26949
Formbuilder.Entry.normalize_responses
test
def normalize_responses return if form.blank? form.response_fields.each do |response_field| if (x = self.response_value(response_field)) response_field.normalize_response(x, get_responses) end end mark_responses_as_changed! end
ruby
{ "resource": "" }
q26950
Formbuilder.Entry.audit_responses
test
def audit_responses form.response_fields.each do |response_field| response_field.audit_response(self.response_value(response_field), get_responses) end mark_responses_as_changed! end
ruby
{ "resource": "" }
q26951
Hexp.Builder.tag!
test
def tag!(tag, *args, &block) text, attributes = nil, {} args.each do |arg| case arg when ::Hash attributes.merge!(arg) when ::String text ||= '' text << arg end end @stack << [tag, attributes, text ? [text] : []] if block _process(&block) end if @stack.length > 1 node = @stack.pop @stack.last[2] << node NodeBuilder.new(node, self) else NodeBuilder.new(@stack.last, self) end end
ruby
{ "resource": "" }
q26952
Hexp.Builder.<<
test
def <<(*args) args.each do |arg| if arg.respond_to?(:to_hexp) @stack.last[2] << arg self else ::Kernel.raise ::Hexp::FormatError, "Inserting literal HTML into a builder with << is deliberately not supported by Hexp" end end end
ruby
{ "resource": "" }
q26953
Hexp.Node.rewrite
test
def rewrite(css_selector = nil, &block) return Rewriter.new(self, block) if css_selector.nil? CssSelection.new(self, css_selector).rewrite(&block) end
ruby
{ "resource": "" }
q26954
Hexp.Node.select
test
def select(css_selector = nil, &block) if css_selector CssSelection.new(self, css_selector).each(&block) else Selection.new(self, block) end end
ruby
{ "resource": "" }
q26955
IronHide.Configuration.add_configuration
test
def add_configuration(config_hash) config_hash.each do |key, val| instance_eval { instance_variable_set("@#{key}",val) } self.class.instance_eval { attr_accessor key } end end
ruby
{ "resource": "" }
q26956
PosixPsutil.POSIX.pid_exists
test
def pid_exists(pid) return false if pid < 0 # According to "man 2 kill" PID 0 has a special meaning: # it refers to <<every process in the process group of the # calling process>> so we don't want to go any further. # If we get here it means this UNIX platform *does* have # a process with id 0. return true if pid == 0 ::Process.kill(0, pid) return true rescue Errno::ESRCH # No such process return false rescue Errno::EPERM # EPERM clearly means there's a process to deny access to return true rescue RangeError # the given pid is invalid. return false end
ruby
{ "resource": "" }
q26957
PosixPsutil.POSIX.wait_pid
test
def wait_pid(pid, timeout=nil) def check_timeout(delay, stop_at, timeout) if timeout raise Timeout::Error.new("when waiting for (pid=#{pid})") if Time.now >= stop_at end sleep(delay) delay * 2 < 0.04 ? delay * 2 : 0.04 end if timeout waitcall = proc { ::Process.wait(pid, ::Process::WNOHANG)} stop_at = Time.now + timeout else waitcall = proc { ::Process.wait(pid)} end delay = 0.0001 loop do begin retpid = waitcall.call() rescue Errno::EINTR delay = check_timeout(delay, stop_at, timeout) next rescue Errno::ECHILD # This has two meanings: # - pid is not a child of Process.pid in which case # we keep polling until it's gone # - pid never existed in the first place # In both cases we'll eventually return nil as we # can't determine its exit status code. loop do return nil unless pid_exists(pid) delay = check_timeout(delay, stop_at, timeout) end end unless retpid # WNOHANG was used, pid is still running delay = check_timeout(delay, stop_at, timeout) next end # process exited due to a signal; return the integer of # that signal if $?.signaled? return $?.termsig # process exited using exit(2) system call; return the # integer exit(2) system call has been called with elsif $?.exited? return $?.exitstatus else # should never happen raise RuntimeError.new("unknown process exit status") end end end
ruby
{ "resource": "" }
q26958
RIM.UploadModuleHelper.upload_module_changes
test
def upload_module_changes(parent_sha1, sha1s) remote_path = fetch_module # search for the first revision that is not tmp_git_path = clone_or_fetch_repository(remote_path, module_tmp_git_path(@remote_path)) RIM::git_session(tmp_git_path) do |dest| local_branch = nil remote_branch = nil infos = nil if @module_info.subdir dest_path = File.join([tmp_git_path] + @module_info.subdir.split("/")) else dest_path = tmp_git_path end RIM::git_session(@ws_root) do |src| infos = get_branches_and_revision_infos(src, dest, parent_sha1, sha1s) if infos.branches.size == 1 remote_branch = infos.branches[0] if dest.has_remote_branch?(remote_branch) infos.rev_infos.each do |rev_info| local_branch = create_update_branch(dest, infos.parent_sha1, rev_info.src_sha1) if !local_branch copy_revision_files( src, rev_info.src_sha1, dest_path, rev_info.rim_info.ignores ) commit_changes(dest, local_branch, rev_info.src_sha1, rev_info.message) end else raise RimException.new("The target revision '#{@module_info.target_revision}' of module #{@module_info.local_path} is not a branch. No push can be performed.") end elsif infos.branches.size > 1 raise RimException.new("There are commits for module #{@module_info.local_path} on multiple target revisions (#{infos.branches.join(", ")}).") end end # Finally we're done. Push the changes if local_branch && dest.rev_sha1(local_branch) != infos.parent_sha1 push_branch = @review && @module_info.remote_branch_format && !@module_info.remote_branch_format.empty? \ ? @module_info.remote_branch_format % remote_branch : remote_branch dest.execute("git push #{@remote_url} #{local_branch}:#{push_branch}") dest.execute("git checkout --detach #{local_branch}") dest.execute("git branch -D #{local_branch}") @logger.info("Commited changes for module #{@module_info.local_path} to remote branch #{push_branch}.") else @logger.info("No changes to module #{@module_info.local_path}.") end end end
ruby
{ "resource": "" }
q26959
RIM.UploadModuleHelper.get_branches_and_revision_infos
test
def get_branches_and_revision_infos(src_session, dest_session, parent_sha1, sha1s) infos = [] branches = [] dest_parent_sha1 = nil (sha1s.size() - 1).step(0, -1) do |i| info = get_revision_info(src_session, dest_session, sha1s[i]) if !info.dest_sha1 && info.rim_info.target_revision infos.unshift(info) branches.push(info.rim_info.target_revision) if !branches.include?(info.rim_info.target_revision) else dest_parent_sha1 = info.dest_sha1 break end end dest_parent_sha1 = get_riminfo_for_revision(src_session, parent_sha1).revision_sha1 if !dest_parent_sha1 dest_parent_sha1 = infos.first.rim_info.revision_sha1 if !dest_parent_sha1 && !infos.empty? return Struct.new(:branches, :parent_sha1, :rev_infos).new(branches, dest_parent_sha1, infos) end
ruby
{ "resource": "" }
q26960
RIM.UploadModuleHelper.get_revision_info
test
def get_revision_info(src_session, dest_session, src_sha1) module_status = StatusBuilder.new.rev_module_status(src_session, src_sha1, @module_info.local_path) rim_info = get_riminfo_for_revision(src_session, src_sha1) dest_sha1 = dest_session.rev_sha1("rim-#{src_sha1}") msg = src_session.execute("git show -s --format=%B #{src_sha1}") RevisionInfo.new(module_status && module_status.dirty? ? dest_sha1 : rim_info.revision_sha1, src_sha1, rim_info, msg) end
ruby
{ "resource": "" }
q26961
RIM.UploadModuleHelper.commit_changes
test
def commit_changes(session, branch, sha1, msg) if session.status.lines.any? # add before commit because the path can be below a not yet added path session.execute("git add --all") msg_file = Tempfile.new('message') begin msg_file << msg msg_file.close session.execute("git commit -F #{msg_file.path}") ensure msg_file.close(true) end # create tag session.execute("git tag rim-#{sha1} refs/heads/#{branch}") end end
ruby
{ "resource": "" }
q26962
RIM.UploadModuleHelper.get_riminfo_for_revision
test
def get_riminfo_for_revision(session, sha1) session.execute("git show #{sha1}:#{File.join(@module_info.local_path, RimInfo::InfoFileName)}") do |out, e| return RimInfo.from_s(!e ? out : "") end end
ruby
{ "resource": "" }
q26963
RIM.UploadModuleHelper.copy_revision_files
test
def copy_revision_files(src_session, src_sha1, dest_dir, ignores) Dir.mktmpdir do |tmp_dir| tmp_dir = Dir.glob(tmp_dir)[0] src_session.execute("git archive --format tar #{src_sha1} #{@module_info.local_path} | tar -C #{tmp_dir} -xf -") tmp_module_dir = File.join(tmp_dir, @module_info.local_path) files = FileHelper.find_matching_files(tmp_module_dir, false, "/**/*", File::FNM_DOTMATCH) files.delete(".") files.delete("..") files.delete(RimInfo::InfoFileName) files -= FileHelper.find_matching_files(tmp_module_dir, false, ignores) # have source files now. Now clear destination folder and copy prepare_empty_folder(dest_dir, ".git/**/*") files.each do |f| src_path = File.join(tmp_module_dir, f) if File.file?(src_path) path = File.join(dest_dir, f) FileUtils.mkdir_p(File.dirname(path)) FileUtils.cp(src_path, path) end end end end
ruby
{ "resource": "" }
q26964
RIM.StatusBuilder.rev_history_status
test
def rev_history_status(git_session, rev, options={}) stop_rev = options[:stop_rev] relevant_revs = {} if stop_rev git_session.execute("git rev-list #{rev} \"^#{stop_rev}\"").split("\n").each do |r| relevant_revs[r] = true end elsif options[:gerrit] # in gerrit mode, stop on all known commits git_session.execute("git rev-list #{rev} --not --all --").split("\n").each do |r| relevant_revs[r] = true end else # remote revs are where we stop traversal git_session.all_reachable_non_remote_revs(rev).each do |r| relevant_revs[r] = true end end # make sure we deal only with sha1s rev = git_session.rev_sha1(rev) build_rev_history_status(git_session, rev, relevant_revs, {}, :fast => options[:fast]) end
ruby
{ "resource": "" }
q26965
RIM.StatusBuilder.rev_status
test
def rev_status(git_session, rev) mod_dirs = module_dirs(git_session, rev) mod_stats = [] # export all relevant modules at once # this makes status calculation significantly faster compared # to exporting each module separately # (e.g. 1.0s instead of 1.5s on linux for a commit with 20 modules) git_session.within_exported_rev(rev, mod_dirs) do |d| mod_dirs.each do |rel_path| mod_stats << build_module_status(d, d+"/"+rel_path) end end stat = RevStatus.new(mod_stats) stat.git_rev = git_session.rev_sha1(rev) stat end
ruby
{ "resource": "" }
q26966
RIM.StatusBuilder.rev_module_status
test
def rev_module_status(git_session, rev, local_path) mod_stat = nil if git_session.execute("git ls-tree -r --name-only #{rev}").split("\n").include?(File.join(local_path, ".riminfo")) git_session.within_exported_rev(rev, [local_path]) do |d| mod_stat = build_module_status(d, File.join(d, local_path)) end end mod_stat end
ruby
{ "resource": "" }
q26967
RIM.StatusBuilder.fs_status
test
def fs_status(dir) RevStatus.new( fs_rim_dirs(dir).collect { |d| build_module_status(dir, d) }) end
ruby
{ "resource": "" }
q26968
RIM.StatusBuilder.build_rev_history_status
test
def build_rev_history_status(gs, rev, relevant_revs, status_cache={}, options={}) return status_cache[rev] if status_cache[rev] stat = nil if relevant_revs[rev] parent_revs = gs.parent_revs(rev) if parent_revs.size > 0 # build status for all parent nodes parent_stats = parent_revs.collect do |p| build_rev_history_status(gs, p, relevant_revs, status_cache, options) end # if this is a merge commit with multiple parents # we decide to use the first commit (git primary parent) # note that it's not really important, which one we choose # just make sure to use the same commit when checking for changed files base_stat = parent_stats.first changed_files = gs.changed_files(rev, parent_revs.first) # build list of modules in this commit module_dirs = base_stat.modules.collect{|m| m.dir} changed_files.each do |f| if File.basename(f.path) == RimInfo::InfoFileName if f.kind == :added module_dirs << File.dirname(f.path) elsif f.kind == :deleted module_dirs.delete(File.dirname(f.path)) end end end # a module needs to be checked if any of the files within were touched check_dirs = module_dirs.select{|d| changed_files.any?{|f| f.path.start_with?(d)} } module_stats = [] # check out all modules to be checked at once if check_dirs.size > 0 gs.within_exported_rev(rev, check_dirs) do |ws| check_dirs.each do |d| module_stats << build_module_status(ws, File.join(ws, d)) end end end (module_dirs - check_dirs).each do |d| base_mod = base_stat.modules.find{|m| m.dir == d} module_stats << RevStatus::ModuleStatus.new(d, base_mod.rim_info, base_mod.dirty?) end stat = RevStatus.new(module_stats) stat.git_rev = gs.rev_sha1(rev) stat.parents.concat(parent_stats) else # no parents, need to do a full check if options[:fast] stat = rev_status_fast(gs, rev) else stat = rev_status(gs, rev) end end else # first "non-relevant", do the full check if options[:fast] stat = rev_status_fast(gs, rev) else stat = rev_status(gs, rev) end end status_cache[rev] = stat end
ruby
{ "resource": "" }
q26969
RIM.StatusBuilder.rev_status_fast
test
def rev_status_fast(git_session, rev) mod_dirs = module_dirs(git_session, rev) mod_stats = [] git_session.within_exported_rev(rev, mod_dirs.collect{|d| "#{d}/#{RimInfo::InfoFileName}"}) do |temp_dir| mod_dirs.each do |rel_path| mod_stats << RevStatus::ModuleStatus.new( rel_path, RimInfo.from_dir("#{temp_dir}/#{rel_path}"), # never dirty false ) end end stat = RevStatus.new(mod_stats) stat.git_rev = git_session.rev_sha1(rev) stat end
ruby
{ "resource": "" }
q26970
RIM.SyncHelper.sync
test
def sync(message = nil, rebase = nil, split = true) # get the name of the current workspace branch RIM::git_session(@ws_root) do |s| branch = s.current_branch || '' rim_branch = "rim/" + branch branch_sha1 = nil changed_modules = nil if branch.empty? raise RimException.new("Not on a git branch.") elsif branch.start_with?("rim/") raise RimException.new("The current git branch '#{branch}' is a rim integration branch. Please switch to a non rim branch to proceed.") else branch = "refs/heads/#{branch}" branch_sha1 = s.rev_sha1(rim_branch) remote_rev = get_latest_remote_revision(s, branch) rev = get_latest_clean_path_revision(s, branch, remote_rev) if !s.has_branch?(rim_branch) || has_ancestor?(s, branch, s.rev_sha1(rim_branch)) || !has_ancestor?(s, rim_branch, remote_rev) s.execute("git branch -f #{rim_branch} #{rev}") branch_sha1 = s.rev_sha1(rim_branch) end remote_url = "file://" + @ws_root @logger.debug("Folder for temporary git repositories: #{@rim_path}") tmpdir = clone_or_fetch_repository(remote_url, module_tmp_git_path(".ws"), "Cloning workspace git...") RIM::git_session(tmpdir) do |tmp_session| tmp_session.execute("git reset --hard") tmp_session.execute("git clean -xdf") # use -f here to prevent git checkout from checking for untracked files which might be overwritten. # this is safe since we removed any untracked files before. # this is a workaround for a name case problem on windows: # if a file's name changes case between the current head and the checkout target, # git checkout will report the file with the new name as untracked and will fail tmp_session.execute("git checkout -B #{rim_branch} -f remotes/origin/#{rim_branch}") changed_modules = sync_modules(tmp_session, message) if !split tmp_session.execute("git reset --soft #{branch_sha1}") commit(tmp_session, message ? message : get_commit_message(changed_modules)) if tmp_session.uncommited_changes? end tmp_session.execute("git push #{remote_url} #{rim_branch}:#{rim_branch}") end end if !changed_modules.empty? if rebase s.execute("git rebase #{rim_branch}") @logger.info("Changes have been commited to branch #{rim_branch} and workspace has been rebased successfully.") else @logger.info("Changes have been commited to branch #{rim_branch}. Rebase to apply changes to workspace.") end else @logger.info("No changes.") end end end
ruby
{ "resource": "" }
q26971
RIM.SyncHelper.sync_modules
test
def sync_modules(session, message) module_helpers = [] @module_infos.each do |module_info| module_helpers.push(SyncModuleHelper.new(session.execute_dir, @ws_root, module_info, @logger)) end changed_modules = [] module_helpers.each do |m| @logger.info("Synchronizing #{m.module_info.local_path}...") if m.sync(message) changed_modules << m.module_info end end changed_modules end
ruby
{ "resource": "" }
q26972
RIM.SyncHelper.has_ancestor?
test
def has_ancestor?(session, rev, ancestor) # make sure we deal only with sha1s rev = session.rev_sha1(rev) return rev == ancestor || session.is_ancestor?(ancestor, rev) end
ruby
{ "resource": "" }
q26973
RIM.SyncHelper.get_parent
test
def get_parent(session, rev) parents = session.parent_revs(rev) !parents.empty? ? parents.first : nil end
ruby
{ "resource": "" }
q26974
RIM.SyncHelper.get_commit_message
test
def get_commit_message(changed_modules) StringIO.open do |s| s.puts "rim sync." s.puts changed_modules.each do |m| s.puts m.local_path end s.string end end
ruby
{ "resource": "" }
q26975
PosixPsutil.PlatformSpecificProcess.pmmap_ext
test
def pmmap_ext(data) pmmap_ext = ['addr', 'perms', 'path', 'rss', 'size', 'pss', 'shared_clean', 'shared_dirty', 'private_clean', 'private_dirty', 'referenced', 'anonymous', 'swap'] os_list = [] data.each do |datum| os = OpenStruct.new pmmap_ext.each_index {|i| os[pmmap_ext[i]] = datum[i]} os_list.push(os) end os_list end
ruby
{ "resource": "" }
q26976
PosixPsutil.PlatformSpecificProcess.pmmap_grouped
test
def pmmap_grouped(data) pmmap_grouped = ['rss', 'size', 'pss', 'shared_clean', 'shared_dirty', 'private_clean', 'private_dirty', 'referenced', 'anonymous', 'swap'] os_list = [] data.each do |k, v| os = OpenStruct.new os.path = k pmmap_grouped.each_index {|i| os[pmmap_grouped[i]] = v[i]} os_list.push(os) end os_list end
ruby
{ "resource": "" }
q26977
RIM.DirtyCheck.calc_checksum
test
def calc_checksum(mi, dir) if check_required_attributes(mi) sha1 = Digest::SHA1.new # all files and directories within dir files = FileHelper.find_matching_files(dir, false, "/**/*", File::FNM_DOTMATCH) # Dir.glob with FNM_DOTMATCH might return . and .. files.delete(".") files.delete("..") # ignore the info file itself files.delete(RimInfo::InfoFileName) # ignores defined by user files -= FileHelper.find_matching_files(dir, false, mi.ignores) # order of files makes a difference # sort to eliminate platform specific glob behavior files.sort! files.each do |fn| update_file(sha1, dir, fn) end ChecksumAttributes.each do |a| sha1.update(mi.send(a)) end sha1.hexdigest else # can't calc checksum nil end end
ruby
{ "resource": "" }
q26978
RIM.GitSession.current_branch
test
def current_branch out = execute "git branch" out.split("\n").each do |l| if !l.include?('(') && (l =~ /^\*\s+(\S+)/) return $1 end end nil end
ruby
{ "resource": "" }
q26979
RIM.GitSession.has_remote_branch?
test
def has_remote_branch?(branch) out = execute("git ls-remote --heads") out.split("\n").each do |l| return true if l.split(/\s+/)[1] == "refs/heads/#{branch}" end false end
ruby
{ "resource": "" }
q26980
RIM.GitSession.rev_sha1
test
def rev_sha1(rev) sha1 = nil execute "git rev-list -n 1 #{rev} --" do |out, e| sha1 = out.strip if !e end sha1 end
ruby
{ "resource": "" }
q26981
RIM.GitSession.rev_infos
test
def rev_infos(rev, desired) info = {} desired.each_pair do |key, value| execute "git log -1 --format=#{value} #{rev} --" do |out, e| info[key] = out.strip if !e end end info end
ruby
{ "resource": "" }
q26982
RIM.GitSession.remote_branch_revs
test
def remote_branch_revs out = execute "git show-ref" out.split("\n").collect { |l| if l =~ /refs\/remotes\// l.split[0] else nil end }.compact end
ruby
{ "resource": "" }
q26983
RIM.GitSession.export_rev
test
def export_rev(rev, dir, paths=[]) paths = paths.dup loop do path_args = "" # max command line length on Windows XP and higher is 8191 # consider the following extra characters which will be added: # up to 3 paths in execute, 1 path for tar, max path length 260 = 1040 # plus some "glue" characters, plus the last path item with 260 max; # use 6000 to be on the safe side while !paths.empty? && path_args.size < 6000 path_args << " " path_args << paths.shift end execute "git archive --format tar #{rev} #{path_args} | tar -C #{dir} -xf -" break if paths.empty? end end
ruby
{ "resource": "" }
q26984
RIM.GitSession.within_exported_rev
test
def within_exported_rev(rev, paths=[]) Dir.mktmpdir("rim") do |d| d = Dir.glob(d)[0] c = File.join(d, "content") FileUtils.mkdir(c) export_rev(rev, c, paths) # return contents of yielded block # mktmpdir returns value return by our block yield c FileUtils.rm_rf(c) # retry to delete if it hasn't been deleted yet # this could be due to Windows keeping the files locked for some time # this is especially a problem if the machine is at its limits retries = 600 while File.exist?(c) && retries > 0 sleep(0.1) FileUtils.rm_rf(c) retries -= 1 end if File.exist?(c) @logger.warn "could not delete temp dir: #{c}" end end end
ruby
{ "resource": "" }
q26985
RIM.UploadHelper.upload
test
def upload # get the name of the current workspace branch RIM::git_session(@ws_root) do |s| branch = s.current_branch if branch.nil? raise RimException.new("Not on a git branch.") elsif !branch.start_with?("rim/") begin sha1 = s.rev_sha1(branch) @logger.info("Uploading modules...") upload_modules(get_upload_revisions(s, sha1)) ensure s.execute("git checkout -B #{branch}") end else raise RimException.new("The current git branch '#{branch}' is a rim integration branch. Please switch to a non rim branch to proceed.") end end end
ruby
{ "resource": "" }
q26986
RIM.UploadHelper.upload_modules
test
def upload_modules(info) each_module_parallel("uploading", @module_helpers) do |m| m.upload(info.parent, info.sha1s) end end
ruby
{ "resource": "" }
q26987
RIM.UploadHelper.get_upload_revisions
test
def get_upload_revisions(session, rev) # remote revs are where we stop traversal non_remote_revs = {} session.all_reachable_non_remote_revs(rev).each do |r| non_remote_revs[r] = true end revisions = [] # make sure we deal only with sha1s rev = session.rev_sha1(rev) while rev && non_remote_revs[rev] revisions.push(rev) parents = session.parent_revs(rev) rev = parents.size > 0 ? parents.first : nil end Struct.new(:parent, :sha1s).new(rev, revisions.reverse!) end
ruby
{ "resource": "" }
q26988
RIM.ModuleHelper.fetch_module
test
def fetch_module FileUtils.mkdir_p git_path RIM::git_session(git_path) do |s| if !File.exist?(git_path + "/config") s.execute("git clone --mirror #{@remote_url} #{git_path}") do |out, e| raise RimException.new("Remote repository '#{@remote_url}' of module '#{@module_info.local_path}' not found.") if e end else s.execute("git remote update") end end git_path end
ruby
{ "resource": "" }
q26989
Cranky.Job.assign
test
def assign(attribute, value) unless value == :skip || attribute == :class if item.respond_to?("#{attribute}=") item.send("#{attribute}=", value) elsif item.is_a?(Hash) item[attribute] = value end end end
ruby
{ "resource": "" }
q26990
Cranky.FactoryBase.debug
test
def debug(*args) item = build(*args) invalid_item = Array(item).find(&:invalid?) if invalid_item if invalid_item.errors.respond_to?(:messages) errors = invalid_item.errors.messages else errors = invalid_item.errors end raise "Oops, the #{invalid_item.class} created by the Factory has the following errors: #{errors}" end item end
ruby
{ "resource": "" }
q26991
Cranky.FactoryBase.crank_it
test
def crank_it(what, overrides) if what.to_s =~ /(.*)_attrs$/ what = $1 overrides = overrides.merge(:_return_attributes => true) end item = "TBD" new_job(what, overrides) do item = self.send(what) # Invoke the factory method item = apply_traits(what, item) end item end
ruby
{ "resource": "" }
q26992
Pres.ViewDelegation.method_missing
test
def method_missing(method, *args, &block) if view_context.respond_to?(method, true) view_context.send(method, *args, &block) else super end end
ruby
{ "resource": "" }
q26993
Pres.Presents.present
test
def present(object, presenter: nil, **args) if object.respond_to?(:to_ary) object.map { |item| present(item, presenter: presenter, **args) } else presenter ||= presenter_klass(object) wrapper = presenter.new(object, view_context, **args) block_given? ? yield(wrapper) : wrapper end end
ruby
{ "resource": "" }
q26994
GemPublisher.Pusher.push
test
def push(gem, method, options = {}) push_command = PUSH_METHODS[method.to_s] or raise "Unknown Gem push method #{method.inspect}." push_command += [gem] push_command += ["--as", options[:as]] if options[:as] @cli_facade.execute(*push_command) end
ruby
{ "resource": "" }
q26995
Interpolation.OneDimensional.interpolate
test
def interpolate interpolant case @opts[:type] when :linear for_each (interpolant) { |x| linear_interpolation(x) } when :cubic cubic_spline_interpolation interpolant else raise ArgumentError, "1 D interpolation of type #{@opts[:type]} not supported" end end
ruby
{ "resource": "" }
q26996
Asciidoctor::IncludeExt.IncludeProcessor.lines_selector_for
test
def lines_selector_for(target, attributes) if (klass = @selectors.find { |s| s.handles? target, attributes }) klass.new(target, attributes, logger: logger) end end
ruby
{ "resource": "" }
q26997
Bottleneck.Core.run
test
def run client_ip = @ip key = "request_count:#{client_ip}" result = { status: Constants::SUCCESS_STATUS, message: Constants::OK_MESSAGE } requests_count = @storage.get(key) unless requests_count @storage.set(key, 0) @storage.expire(key, @limits["time_period_seconds"]) end if requests_count.to_i >= @limits["max_requests_count"] result[:status] = Constants::EXPIRED_STATUS result[:message] = message(period(key)) else @storage.incr(key) end result end
ruby
{ "resource": "" }
q26998
Informant.Standard.date_select
test
def date_select(method, options = {}) options[:include_blank] ||= false options[:start_year] ||= 1801 options[:end_year] ||= Time.now.year options[:label_for] = "#{object_name}_#{method}_1i" build_shell(method, options) { super } end
ruby
{ "resource": "" }
q26999
Informant.Standard.label
test
def label(method, text = nil, options = {}) colon = false if options[:colon].nil? options[:for] = options[:label_for] required = options[:required] # remove special options options.delete :colon options.delete :label_for options.delete :required text = @template.send(:h, text.blank?? method.to_s.humanize : text.to_s) text << ':'.html_safe if colon text << @template.content_tag(:span, "*", :class => "required") if required super end
ruby
{ "resource": "" }