_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 30
4.3k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q26900 | Easyzpl.StoredLabel.add_field | test | def add_field(value)
return if value.nil?
return if value.strip.empty?
# Increment the variable field count
self.variable_fields_count += 1
# | ruby | {
"resource": ""
} |
q26901 | Mongoid.Slug.build_slug | test | def build_slug
if localized?
begin
orig_locale = I18n.locale
all_locales.each do |target_locale|
I18n.locale = target_locale
apply_slug
| ruby | {
"resource": ""
} |
q26902 | Mongoid.Slug.new_with_slugs? | test | def new_with_slugs?
if localized?
# We need to check if slugs are present for the locale without falling back
# to a default
| ruby | {
"resource": ""
} |
q26903 | Mongoid.Slug.persisted_with_slug_changes? | test | def persisted_with_slug_changes?
if localized?
changes = _slugs_change
return (persisted? && false) if changes.nil?
# ensure we check for changes only between the same locale
original = changes.first.try(:fetch, I18n.locale.to_s, nil)
| ruby | {
"resource": ""
} |
q26904 | ETL.Util.distance_of_time_in_words | test | def distance_of_time_in_words(from_time, to_time=Time.now)
from_time = from_time.to_time if from_time.respond_to?(:to_time)
to_time = to_time.to_time if to_time.respond_to?(:to_time)
seconds = (to_time - from_time).round
distance_in_days = (seconds/(60*60*24)).round
seconds = seconds % (60*60*24)
| ruby | {
"resource": ""
} |
q26905 | ETL.Util.approximate_distance_of_time_in_words | test | def approximate_distance_of_time_in_words(from_time, to_time=Time.now, include_seconds=true)
from_time = from_time.to_time if from_time.respond_to?(:to_time)
to_time = to_time.to_time if to_time.respond_to?(:to_time)
distance_in_minutes = (((to_time - from_time).abs)/60).round
distance_in_seconds = ((to_time - from_time).abs).round
case distance_in_minutes
when 0..1
return (distance_in_minutes == 0) ? 'less than a minute' : '1 minute' unless include_seconds
case distance_in_seconds
when 0..4 then 'less than 5 seconds'
when 5..9 then 'less than 10 seconds'
when 10..19 then 'less than 20 seconds'
when 20..39 then 'half a minute'
when 40..59 then 'less than a minute'
else '1 minute'
end
when 2..44 then "#{distance_in_minutes} minutes"
when 45..89 | ruby | {
"resource": ""
} |
q26906 | ETL.Engine.track_error | test | def track_error(control, msg)
errors << msg
control.error_handlers.each do |handler|
| ruby | {
"resource": ""
} |
q26907 | ETL.Engine.process_batch | test | def process_batch(batch)
batch = ETL::Batch::Batch.resolve(batch, self)
say "Processing batch #{batch.file}"
ETL::Engine.batch = ETL::Execution::Batch.create!(
:batch_file => batch.file,
:status => 'executing'
)
| ruby | {
"resource": ""
} |
q26908 | ETL.Engine.pre_process | test | def pre_process(control)
Engine.logger.debug "Pre-processing #{control.file}"
control.pre_processors.each do |processor|
| ruby | {
"resource": ""
} |
q26909 | ETL.Engine.post_process | test | def post_process(control)
say_on_own_line "Executing post processes"
Engine.logger.debug "Post-processing #{control.file}"
control.post_processors.each do |processor|
processor.process | ruby | {
"resource": ""
} |
q26910 | ETL.Engine.execute_dependencies | test | def execute_dependencies(control)
Engine.logger.debug "Executing dependencies"
control.dependencies.flatten.each do |dependency|
case dependency
when Symbol
f = dependency.to_s + '.ctl'
Engine.logger.debug "Executing dependency: #{f}"
say "Executing dependency: #{f}"
process(f)
| ruby | {
"resource": ""
} |
q26911 | ETL.Engine.execute_screens | test | def execute_screens(control, timing = :before_post_process)
screens = case timing
when :after_post_process
control.after_post_process_screens
else # default to before post-process screens
control.screens
end
[:fatal,:error,:warn].each do |type|
screens[type].each do |block|
begin
block.call
rescue => e
case type
| ruby | {
"resource": ""
} |
q26912 | RedisModelExtension.ClassInitialize.redis_key | test | def redis_key *fields
@redis_key_config = fields.flatten
validate_redis_key
#own specification of redis key - delete autoincrement
remove_redis_autoincrement_key unless redis_user_field_config.include?(:id) || @redis_key_config.include?(:id) | ruby | {
"resource": ""
} |
q26913 | RedisModelExtension.ClassInitialize.redis_key_normalize | test | def redis_key_normalize *metrics
@redis_key_normalize_conf ||= []
metrics.each do |metric|
raise ArgumentError, | ruby | {
"resource": ""
} |
q26914 | RedisModelExtension.ClassInitialize.redis_alias | test | def redis_alias name, main_fields, name_of_field_for_order = nil, name_of_field_for_args = nil
#set fields if they are not allready set!
if name_of_field_for_order && name_of_field_for_args
redis_field name_of_field_for_order, :array, [] unless redis_fields_config.has_key?(name_of_field_for_order)
redis_field name_of_field_for_args, :hash, {} unless redis_fields_config.has_key?(name_of_field_for_args)
end
@redis_alias_config ||= {}
#add specification of | ruby | {
"resource": ""
} |
q26915 | RedisModelExtension.StoreKeys.store_redis_keys | test | def store_redis_keys
args = to_arg
#store main key
redis_old_keys[:key] = self.class.generate_key(args) #store main key
#store alias keys
redis_old_keys[:aliases] = []
redis_alias_config.each | ruby | {
"resource": ""
} |
q26916 | RedisModelExtension.ClassOldInitialize.conf | test | def conf
fields = {}
redis_fields_config.each do |key, type|
fields[key] = TYPE_TRANSLATIONS[type] if TYPE_TRANSLATIONS.has_key?(type)
end
{
| ruby | {
"resource": ""
} |
q26917 | RedisModelExtension.ClassRedisKey.exists? | test | def exists? args = {}
RedisModelExtension::Database.redis.exists(self.name.con | ruby | {
"resource": ""
} |
q26918 | RedisModelExtension.ClassRedisKey.alias_exists? | test | def alias_exists? alias_name, args = {}
RedisModelExtension::Database.redis.exists(self.name | ruby | {
"resource": ""
} |
q26919 | RedisModelExtension.ClassValidations.valid_item_for_redis_key? | test | def valid_item_for_redis_key? args, key
(args.has_key?(key) | ruby | {
"resource": ""
} |
q26920 | RedisModelExtension.ClassValidations.validate_redis_key | test | def validate_redis_key
valid_fields = redis_fields_config.select{|k,v| v != :array && v != :hash }.keys
bad_fields = redis_key_config - valid_fields
raise ArgumentError, "Sorry, but you cannot use as redis key [nonexisting | ruby | {
"resource": ""
} |
q26921 | RedisModelExtension.Attributes.to_arg | test | def to_arg
redis_fields_config.inject({}) do |args, (key, type)|
| ruby | {
"resource": ""
} |
q26922 | RedisModelExtension.ClassGetFind.find_by_alias | test | def find_by_alias(alias_name, args = {})
#check if asked dynamic alias exists
raise ArgumentError, "Unknown dynamic alias: '#{alias_name}', use: #{redis_alias_config.keys.join(", ")} " unless redis_alias_config.has_key?(alias_name.to_sym)
#normalize input hash of arguments
args = HashWithIndifferentAccess.new(args)
| ruby | {
"resource": ""
} |
q26923 | RedisModelExtension.ClassGetFind.get | test | def get(args = {})
# when argument is integer - search by id
args = { id: args } if args.is_a?(Integer)
#normalize input hash of arguments
args = HashWithIndifferentAccess.new(args)
klass = | ruby | {
"resource": ""
} |
q26924 | RedisModelExtension.ClassGetFind.get_by_alias_key | test | def get_by_alias_key(alias_key)
klass = self.name.constantize
if RedisModelExtension::Database.redis.exists(alias_key)
out = []
RedisModelExtension::Database.redis.smembers(alias_key).each do |key|
| ruby | {
"resource": ""
} |
q26925 | RedisModelExtension.ClassGetFind.new_by_key | test | def new_by_key(key)
args = RedisModelExtension::Database.redis.hgetall(key)
return nil unless args && args.any?
args.symbolize_keys!
| ruby | {
"resource": ""
} |
q26926 | RedisModelExtension.ValueTransform.value_to_redis | test | def value_to_redis name, value
if redis_fields_config.has_key?(name)
value_transform | ruby | {
"resource": ""
} |
q26927 | RedisModelExtension.ValueTransform.value_transform | test | def value_transform value, type
return nil if value.nil? || value.to_s.size == 0
case type
when :integer then value.to_i
when :autoincrement then value.to_i
when :string then value.to_s
when :float then value.to_f
when :bool then value.to_s
when :symbol then value.to_s
when :marshal then Marshal.dump(value)
when :array then Yajl::Encoder.encode(value)
| ruby | {
"resource": ""
} |
q26928 | RedisModelExtension.ValueTransform.value_parse | test | def value_parse value, type
return nil if value.nil? || value.to_s.size == 0
case type
when :integer then value.to_i
when :autoincrement then value.to_i
when :string then value.to_s
when :float then value.to_f
when :bool then value.to_s.to_bool
when :symbol then value.to_s.to_sym
when :marshal then value.is_a?(String) ? Marshal.load(value) : value
when :array then value.is_a?(String) ? Yajl::Parser.parse(value) : value
| ruby | {
"resource": ""
} |
q26929 | RedisModelExtension.SaveDestroy.update | test | def update args
args.each do |key, value|
method = "#{key}=".to_sym
if self.respond_to? method
| ruby | {
"resource": ""
} |
q26930 | RedisModelExtension.SaveDestroy.destroy_aliases! | test | def destroy_aliases!
#do it only if it is existing object!
if redis_old_keys[:aliases].size > 0
redis_old_keys[:aliases].each do |alias_key|
RedisModelExtension::Database.redis.srem alias_key, redis_old_keys[:key]
#delete alias with 0 keys
| ruby | {
"resource": ""
} |
q26931 | Firim.CommandsGenerator.add | test | def add(username, token)
Firim::AccountManager.new(
user: username,
| ruby | {
"resource": ""
} |
q26932 | Nsq.Discovery.get_nsqds | test | def get_nsqds(lookupd, topic = nil)
uri_scheme = 'http://' unless lookupd.match(%r(https?://))
uri = URI.parse("#{uri_scheme}#{lookupd}")
uri.query = "ts=#{Time.now.to_i}"
if topic
uri.path = '/lookup'
uri.query += "&topic=#{URI.escape(topic)}"
else
uri.path = '/nodes'
end
begin
body = Net::HTTP.get(uri)
data = JSON.parse(body)
producers = data['producers'] || # v1.0.0-compat
(data['data'] && data['data']['producers'])
| ruby | {
"resource": ""
} |
q26933 | Nsq.ClientBase.discover_repeatedly | test | def discover_repeatedly(opts = {})
@discovery_thread = Thread.new do
@discovery = Discovery.new(opts[:nsqlookupds])
loop do
begin
nsqds = nsqds_from_lookupd(opts[:topic])
drop_and_add_connections(nsqds)
| ruby | {
"resource": ""
} |
q26934 | Nsq.Connection.with_retries | test | def with_retries(&block)
base_sleep_seconds = 0.5
max_sleep_seconds = 300 # 5 minutes
# Let's do this thing
attempts = 0
begin
attempts += 1
return block.call(attempts)
rescue Errno::ECONNREFUSED, Errno::ECONNRESET, Errno::EHOSTUNREACH,
Errno::ENETDOWN, Errno::ENETUNREACH, Errno::ETIMEDOUT, Timeout::Error => ex
raise ex if attempts >= 100
# The sleep time | ruby | {
"resource": ""
} |
q26935 | X12.Base.show | test | def show(ind = '')
count = 0
self.to_a.each{|i|
#puts "#{ind}#{i.name} #{i.object_id} #{i.super.object_id} [#{count}]: #{i.parsed_str} #{i.super.class}"
puts "#{ind}#{i.name} [#{count}]: #{i.to_s.sub(/^(.{30})(.*?)(.{30})$/, '\1...\3')}"
# Force parsing a segment
if i.kind_of?(X12::Segment) && i.nodes[0]
i.find_field(i.nodes[0].name)
end
i.nodes.each{|j|
case
| ruby | {
"resource": ""
} |
q26936 | X12.Base.do_repeats | test | def do_repeats(s)
if self.repeats.end > 1
possible_repeat = self.dup
p_s = possible_repeat.parse(s)
| ruby | {
"resource": ""
} |
q26937 | X12.Base.find | test | def find(e)
#puts "Finding [#{e}] in #{self.class} #{name}"
case self
when X12::Loop
# Breadth first
res = nodes.find{|i| e==i.name }
return res if res
# Depth now
nodes.each{|i|
res = i.find(e) if i.kind_of?(X12::Loop)
| ruby | {
"resource": ""
} |
q26938 | X12.Base.method_missing | test | def method_missing(meth, *args, &block)
str = meth.id2name
str = str[1..str.length] if str =~ /^_\d+$/ # to avoid pure number names like 270, 997, etc.
#puts "Missing #{str}"
if str =~ /=$/
# Assignment
str.chop!
#puts str
case self
when X12::Segment
res = find_field(str)
throw Exception.new("No field '#{str}' in segment '#{self.name}'") if EMPTY == res
res.content = args[0].to_s
#puts res.inspect
else
| ruby | {
"resource": ""
} |
q26939 | X12.Segment.parse | test | def parse(str)
s = str
#puts "Parsing segment #{name} from #{s} with regexp [#{regexp.source}]"
m = regexp.match(s)
#puts "Matched #{m ? m[0] : 'nothing'}"
return nil unless m
s = m.post_match
| ruby | {
"resource": ""
} |
q26940 | X12.Segment.render | test | def render
self.to_a.inject(''){|repeat_str, i|
if i.repeats.begin < 1 and !i.has_content?
# Skip optional empty segments
repeat_str
else
# Have to render no matter how empty
repeat_str += i.name+i.nodes.reverse.inject(''){|nodes_str, j|
field = j.render
| ruby | {
"resource": ""
} |
q26941 | X12.Segment.regexp | test | def regexp
unless @regexp
if self.nodes.find{|i| i.type =~ /^".+"$/ }
# It's a very special regexp if there are constant fields
re_str = self.nodes.inject("^#{name}#{Regexp.escape(field_separator)}"){|s, i|
field_re = i.simple_regexp(field_separator, segment_separator)+Regexp.escape(field_separator)+'?'
field_re = "(#{field_re})?" unless i.required
s+field_re
} + Regexp.escape(segment_separator)
@regexp = Regexp.new(re_str)
else
| ruby | {
"resource": ""
} |
q26942 | X12.Segment.find_field | test | def find_field(str)
#puts "Finding field [#{str}] in #{self.class} #{name}"
# If there is such a field to begin with
field_num = nil
self.nodes.each_index{|i|
field_num = i if str == self.nodes[i].name
}
return EMPTY if field_num.nil?
#puts field_num
# Parse the segment if not parsed already
unless @fields
| ruby | {
"resource": ""
} |
q26943 | X12.Parser.parse | test | def parse(loop_name, str)
loop = @x12_definition[X12::Loop][loop_name]
#puts "Loops to parse #{@x12_definition[X12::Loop].keys}"
throw Exception.new("Cannot find | ruby | {
"resource": ""
} |
q26944 | X12.Parser.factory | test | def factory(loop_name)
loop = @x12_definition[X12::Loop][loop_name]
throw Exception.new("Cannot find a definition | ruby | {
"resource": ""
} |
q26945 | X12.Parser.process_loop | test | def process_loop(loop)
loop.nodes.each{|i|
case i
when X12::Loop then process_loop(i)
| ruby | {
"resource": ""
} |
q26946 | X12.Parser.process_segment | test | def process_segment(segment)
#puts "Trying to process segment #{segment.inspect}"
unless @x12_definition[X12::Segment] && @x12_definition[X12::Segment][segment.name]
# Try to find it in a separate file if missing from the @x12_definition structure
initialize(segment.name+'.xml')
segment_definition = @x12_definition[X12::Segment][segment.name]
throw Exception.new("Cannot find a definition for segment #{segment.name}") unless segment_definition
else
segment_definition = @x12_definition[X12::Segment][segment.name]
end
segment_definition.nodes.each_index{|i|
segment.nodes[i] = segment_definition.nodes[i]
# Make sure we have the validation table if any for this field. Try to read one in if missing.
| ruby | {
"resource": ""
} |
q26947 | X12.Loop.render | test | def render
if self.has_content?
self.to_a.inject(''){|loop_str, i|
loop_str += | ruby | {
"resource": ""
} |
q26948 | Formbuilder.Entry.calculate_sortable_values | test | def calculate_sortable_values
response_fieldable.input_fields.each do |response_field|
if (x = response_value(response_field)).present?
| ruby | {
"resource": ""
} |
q26949 | Formbuilder.Entry.normalize_responses | test | def normalize_responses
return if form.blank?
form.response_fields.each do |response_field|
if (x = self.response_value(response_field))
| ruby | {
"resource": ""
} |
q26950 | Formbuilder.Entry.audit_responses | test | def audit_responses
form.response_fields.each do |response_field|
response_field.audit_response(self.response_value(response_field), | ruby | {
"resource": ""
} |
q26951 | Hexp.Builder.tag! | test | def tag!(tag, *args, &block)
text, attributes = nil, {}
args.each do |arg|
case arg
when ::Hash
attributes.merge!(arg)
when ::String
text ||= ''
text << arg
| ruby | {
"resource": ""
} |
q26952 | Hexp.Builder.<< | test | def <<(*args)
args.each do |arg|
if arg.respond_to?(:to_hexp)
@stack.last[2] << arg
self
else
| ruby | {
"resource": ""
} |
q26953 | Hexp.Node.rewrite | test | def rewrite(css_selector = nil, &block)
return Rewriter.new(self, block) if css_selector.nil?
| ruby | {
"resource": ""
} |
q26954 | Hexp.Node.select | test | def select(css_selector = nil, &block)
if css_selector
CssSelection.new(self, | ruby | {
"resource": ""
} |
q26955 | IronHide.Configuration.add_configuration | test | def add_configuration(config_hash)
config_hash.each do |key, val|
instance_eval { instance_variable_set("@#{key}",val) }
| ruby | {
"resource": ""
} |
q26956 | PosixPsutil.POSIX.pid_exists | test | def pid_exists(pid)
return false if pid < 0
# According to "man 2 kill" PID 0 has a special meaning:
# it refers to <<every process in the process group of the
# calling process>> so we don't want to go any further.
# If we get here it means this UNIX platform *does* have
# a process with id 0.
return true if pid == 0
::Process.kill(0, pid)
return true
rescue Errno::ESRCH | ruby | {
"resource": ""
} |
q26957 | PosixPsutil.POSIX.wait_pid | test | def wait_pid(pid, timeout=nil)
def check_timeout(delay, stop_at, timeout)
if timeout
raise Timeout::Error.new("when waiting for (pid=#{pid})") if Time.now >= stop_at
end
sleep(delay)
delay * 2 < 0.04 ? delay * 2 : 0.04
end
if timeout
waitcall = proc { ::Process.wait(pid, ::Process::WNOHANG)}
stop_at = Time.now + timeout
else
waitcall = proc { ::Process.wait(pid)}
end
delay = 0.0001
loop do
begin
retpid = waitcall.call()
rescue Errno::EINTR
delay = check_timeout(delay, stop_at, timeout)
next
rescue Errno::ECHILD
# This has two meanings:
# - pid is not a child of Process.pid in which case
# we keep polling until it's gone
# - pid never existed in the first place
# In both cases we'll eventually return nil as we
# can't determine its exit status code.
loop do
return nil unless pid_exists(pid)
delay = check_timeout(delay, stop_at, timeout)
end
end
unless retpid
# WNOHANG was used, pid is still running
| ruby | {
"resource": ""
} |
q26958 | RIM.UploadModuleHelper.upload_module_changes | test | def upload_module_changes(parent_sha1, sha1s)
remote_path = fetch_module
# search for the first revision that is not
tmp_git_path = clone_or_fetch_repository(remote_path, module_tmp_git_path(@remote_path))
RIM::git_session(tmp_git_path) do |dest|
local_branch = nil
remote_branch = nil
infos = nil
if @module_info.subdir
dest_path = File.join([tmp_git_path] + @module_info.subdir.split("/"))
else
dest_path = tmp_git_path
end
RIM::git_session(@ws_root) do |src|
infos = get_branches_and_revision_infos(src, dest, parent_sha1, sha1s)
if infos.branches.size == 1
remote_branch = infos.branches[0]
if dest.has_remote_branch?(remote_branch)
infos.rev_infos.each do |rev_info|
local_branch = create_update_branch(dest, infos.parent_sha1, rev_info.src_sha1) if !local_branch
copy_revision_files(
src,
rev_info.src_sha1,
dest_path,
rev_info.rim_info.ignores
)
commit_changes(dest, local_branch, rev_info.src_sha1, rev_info.message)
end
else
raise RimException.new("The target revision '#{@module_info.target_revision}' of module #{@module_info.local_path} is not a | ruby | {
"resource": ""
} |
q26959 | RIM.UploadModuleHelper.get_branches_and_revision_infos | test | def get_branches_and_revision_infos(src_session, dest_session, parent_sha1, sha1s)
infos = []
branches = []
dest_parent_sha1 = nil
(sha1s.size() - 1).step(0, -1) do |i|
info = get_revision_info(src_session, dest_session, sha1s[i])
if !info.dest_sha1 && info.rim_info.target_revision
infos.unshift(info)
branches.push(info.rim_info.target_revision) if !branches.include?(info.rim_info.target_revision)
else
dest_parent_sha1 = info.dest_sha1
| ruby | {
"resource": ""
} |
q26960 | RIM.UploadModuleHelper.get_revision_info | test | def get_revision_info(src_session, dest_session, src_sha1)
module_status = StatusBuilder.new.rev_module_status(src_session, src_sha1, @module_info.local_path)
rim_info = get_riminfo_for_revision(src_session, src_sha1) | ruby | {
"resource": ""
} |
q26961 | RIM.UploadModuleHelper.commit_changes | test | def commit_changes(session, branch, sha1, msg)
if session.status.lines.any?
# add before commit because the path can be below a not yet added path
session.execute("git add --all")
msg_file = Tempfile.new('message')
begin
msg_file << msg
msg_file.close
| ruby | {
"resource": ""
} |
q26962 | RIM.UploadModuleHelper.get_riminfo_for_revision | test | def get_riminfo_for_revision(session, sha1)
session.execute("git show #{sha1}:#{File.join(@module_info.local_path, RimInfo::InfoFileName)}") do | ruby | {
"resource": ""
} |
q26963 | RIM.UploadModuleHelper.copy_revision_files | test | def copy_revision_files(src_session, src_sha1, dest_dir, ignores)
Dir.mktmpdir do |tmp_dir|
tmp_dir = Dir.glob(tmp_dir)[0]
src_session.execute("git archive --format tar #{src_sha1} #{@module_info.local_path} | tar -C #{tmp_dir} -xf -")
tmp_module_dir = File.join(tmp_dir, @module_info.local_path)
files = FileHelper.find_matching_files(tmp_module_dir, false, "/**/*", File::FNM_DOTMATCH)
files.delete(".")
files.delete("..")
files.delete(RimInfo::InfoFileName)
files -= FileHelper.find_matching_files(tmp_module_dir, false, ignores)
| ruby | {
"resource": ""
} |
q26964 | RIM.StatusBuilder.rev_history_status | test | def rev_history_status(git_session, rev, options={})
stop_rev = options[:stop_rev]
relevant_revs = {}
if stop_rev
git_session.execute("git rev-list #{rev} \"^#{stop_rev}\"").split("\n").each do |r|
relevant_revs[r] = true
end
elsif options[:gerrit]
# in gerrit mode, stop on all known commits
git_session.execute("git rev-list #{rev} --not --all --").split("\n").each do |r|
relevant_revs[r] = true
end
| ruby | {
"resource": ""
} |
q26965 | RIM.StatusBuilder.rev_status | test | def rev_status(git_session, rev)
mod_dirs = module_dirs(git_session, rev)
mod_stats = []
# export all relevant modules at once
# this makes status calculation significantly faster compared
# to exporting each module separately
# (e.g. 1.0s instead of 1.5s on linux for a commit with 20 modules)
git_session.within_exported_rev(rev, mod_dirs) do |d|
mod_dirs.each do | ruby | {
"resource": ""
} |
q26966 | RIM.StatusBuilder.rev_module_status | test | def rev_module_status(git_session, rev, local_path)
mod_stat = nil
if git_session.execute("git ls-tree -r --name-only #{rev}").split("\n").include?(File.join(local_path, ".riminfo"))
git_session.within_exported_rev(rev, [local_path]) | ruby | {
"resource": ""
} |
q26967 | RIM.StatusBuilder.fs_status | test | def fs_status(dir)
RevStatus.new(
fs_rim_dirs(dir).collect { |d|
| ruby | {
"resource": ""
} |
q26968 | RIM.StatusBuilder.build_rev_history_status | test | def build_rev_history_status(gs, rev, relevant_revs, status_cache={}, options={})
return status_cache[rev] if status_cache[rev]
stat = nil
if relevant_revs[rev]
parent_revs = gs.parent_revs(rev)
if parent_revs.size > 0
# build status for all parent nodes
parent_stats = parent_revs.collect do |p|
build_rev_history_status(gs, p, relevant_revs, status_cache, options)
end
# if this is a merge commit with multiple parents
# we decide to use the first commit (git primary parent)
# note that it's not really important, which one we choose
# just make sure to use the same commit when checking for changed files
base_stat = parent_stats.first
changed_files = gs.changed_files(rev, parent_revs.first)
# build list of modules in this commit
module_dirs = base_stat.modules.collect{|m| m.dir}
changed_files.each do |f|
if File.basename(f.path) == RimInfo::InfoFileName
if f.kind == :added
| ruby | {
"resource": ""
} |
q26969 | RIM.StatusBuilder.rev_status_fast | test | def rev_status_fast(git_session, rev)
mod_dirs = module_dirs(git_session, rev)
mod_stats = []
git_session.within_exported_rev(rev, mod_dirs.collect{|d| "#{d}/#{RimInfo::InfoFileName}"}) do |temp_dir|
mod_dirs.each do |rel_path|
mod_stats << RevStatus::ModuleStatus.new(
rel_path,
| ruby | {
"resource": ""
} |
q26970 | RIM.SyncHelper.sync | test | def sync(message = nil, rebase = nil, split = true)
# get the name of the current workspace branch
RIM::git_session(@ws_root) do |s|
branch = s.current_branch || ''
rim_branch = "rim/" + branch
branch_sha1 = nil
changed_modules = nil
if branch.empty?
raise RimException.new("Not on a git branch.")
elsif branch.start_with?("rim/")
raise RimException.new("The current git branch '#{branch}' is a rim integration branch. Please switch to a non rim branch to proceed.")
else
branch = "refs/heads/#{branch}"
branch_sha1 = s.rev_sha1(rim_branch)
remote_rev = get_latest_remote_revision(s, branch)
rev = get_latest_clean_path_revision(s, branch, remote_rev)
if !s.has_branch?(rim_branch) || has_ancestor?(s, branch, s.rev_sha1(rim_branch)) || !has_ancestor?(s, rim_branch, remote_rev)
s.execute("git branch -f #{rim_branch} #{rev}")
branch_sha1 = s.rev_sha1(rim_branch)
end
remote_url = "file://" + @ws_root
@logger.debug("Folder for temporary git repositories: #{@rim_path}")
tmpdir = clone_or_fetch_repository(remote_url, module_tmp_git_path(".ws"), "Cloning workspace git...")
RIM::git_session(tmpdir) do |tmp_session|
tmp_session.execute("git reset --hard")
tmp_session.execute("git clean -xdf")
# use -f here to prevent git checkout from checking for untracked files which might be overwritten.
# this is safe since we removed any untracked files before.
# this is a workaround for a name case problem on windows:
# if a file's name changes case between the current head and | ruby | {
"resource": ""
} |
q26971 | RIM.SyncHelper.sync_modules | test | def sync_modules(session, message)
module_helpers = []
@module_infos.each do |module_info|
module_helpers.push(SyncModuleHelper.new(session.execute_dir, @ws_root, module_info, @logger))
end
changed_modules = []
| ruby | {
"resource": ""
} |
q26972 | RIM.SyncHelper.has_ancestor? | test | def has_ancestor?(session, rev, ancestor)
# make sure we deal only with sha1s
rev = session.rev_sha1(rev)
| ruby | {
"resource": ""
} |
q26973 | RIM.SyncHelper.get_parent | test | def get_parent(session, rev)
parents = session.parent_revs(rev)
| ruby | {
"resource": ""
} |
q26974 | RIM.SyncHelper.get_commit_message | test | def get_commit_message(changed_modules)
StringIO.open do |s|
s.puts "rim sync."
s.puts
changed_modules.each do |m|
| ruby | {
"resource": ""
} |
q26975 | PosixPsutil.PlatformSpecificProcess.pmmap_ext | test | def pmmap_ext(data)
pmmap_ext = ['addr', 'perms', 'path', 'rss', 'size', 'pss',
'shared_clean', 'shared_dirty', 'private_clean',
| ruby | {
"resource": ""
} |
q26976 | PosixPsutil.PlatformSpecificProcess.pmmap_grouped | test | def pmmap_grouped(data)
pmmap_grouped = ['rss', 'size', 'pss', 'shared_clean',
'shared_dirty', 'private_clean', 'private_dirty',
'referenced', 'anonymous', 'swap']
os_list = []
data.each do |k, v|
| ruby | {
"resource": ""
} |
q26977 | RIM.DirtyCheck.calc_checksum | test | def calc_checksum(mi, dir)
if check_required_attributes(mi)
sha1 = Digest::SHA1.new
# all files and directories within dir
files = FileHelper.find_matching_files(dir, false, "/**/*", File::FNM_DOTMATCH)
# Dir.glob with FNM_DOTMATCH might return . and ..
files.delete(".")
files.delete("..")
# ignore the info file itself
files.delete(RimInfo::InfoFileName)
# ignores defined by user
files -= FileHelper.find_matching_files(dir, false, mi.ignores)
| ruby | {
"resource": ""
} |
q26978 | RIM.GitSession.current_branch | test | def current_branch
out = execute "git branch"
out.split("\n").each do |l|
| ruby | {
"resource": ""
} |
q26979 | RIM.GitSession.has_remote_branch? | test | def has_remote_branch?(branch)
out = execute("git ls-remote --heads")
out.split("\n").each do |l|
return | ruby | {
"resource": ""
} |
q26980 | RIM.GitSession.rev_sha1 | test | def rev_sha1(rev)
sha1 = nil
execute "git rev-list -n 1 #{rev} --" do |out, e|
| ruby | {
"resource": ""
} |
q26981 | RIM.GitSession.rev_infos | test | def rev_infos(rev, desired)
info = {}
desired.each_pair do |key, value|
execute "git | ruby | {
"resource": ""
} |
q26982 | RIM.GitSession.remote_branch_revs | test | def remote_branch_revs
out = execute "git show-ref"
out.split("\n").collect { |l|
if l =~ /refs\/remotes\//
| ruby | {
"resource": ""
} |
q26983 | RIM.GitSession.export_rev | test | def export_rev(rev, dir, paths=[])
paths = paths.dup
loop do
path_args = ""
# max command line length on Windows XP and higher is 8191
# consider the following extra characters which will be added:
# up to 3 paths in execute, 1 path for tar, max path length 260 = 1040
# plus some "glue" characters, plus the last path item with 260 max;
# use 6000 | ruby | {
"resource": ""
} |
q26984 | RIM.GitSession.within_exported_rev | test | def within_exported_rev(rev, paths=[])
Dir.mktmpdir("rim") do |d|
d = Dir.glob(d)[0]
c = File.join(d, "content")
FileUtils.mkdir(c)
export_rev(rev, c, paths)
# return contents of yielded block
# mktmpdir returns value return by our block
yield c
FileUtils.rm_rf(c)
| ruby | {
"resource": ""
} |
q26985 | RIM.UploadHelper.upload | test | def upload
# get the name of the current workspace branch
RIM::git_session(@ws_root) do |s|
branch = s.current_branch
if branch.nil?
raise RimException.new("Not on a git branch.")
elsif !branch.start_with?("rim/")
begin
sha1 = s.rev_sha1(branch)
@logger.info("Uploading modules...")
upload_modules(get_upload_revisions(s, sha1)) | ruby | {
"resource": ""
} |
q26986 | RIM.UploadHelper.upload_modules | test | def upload_modules(info)
each_module_parallel("uploading", @module_helpers) do |m|
| ruby | {
"resource": ""
} |
q26987 | RIM.UploadHelper.get_upload_revisions | test | def get_upload_revisions(session, rev)
# remote revs are where we stop traversal
non_remote_revs = {}
session.all_reachable_non_remote_revs(rev).each do |r|
non_remote_revs[r] = true
end
revisions = []
# make sure we deal only with sha1s
rev = session.rev_sha1(rev)
while rev && non_remote_revs[rev]
| ruby | {
"resource": ""
} |
q26988 | RIM.ModuleHelper.fetch_module | test | def fetch_module
FileUtils.mkdir_p git_path
RIM::git_session(git_path) do |s|
if !File.exist?(git_path + "/config")
s.execute("git clone --mirror #{@remote_url} #{git_path}") do |out, e|
raise RimException.new("Remote repository '#{@remote_url}' | ruby | {
"resource": ""
} |
q26989 | Cranky.Job.assign | test | def assign(attribute, value)
unless value == :skip || attribute == :class
if item.respond_to?("#{attribute}=")
item.send("#{attribute}=", value)
| ruby | {
"resource": ""
} |
q26990 | Cranky.FactoryBase.debug | test | def debug(*args)
item = build(*args)
invalid_item = Array(item).find(&:invalid?)
if invalid_item
if invalid_item.errors.respond_to?(:messages)
errors = invalid_item.errors.messages
else
errors = invalid_item.errors
| ruby | {
"resource": ""
} |
q26991 | Cranky.FactoryBase.crank_it | test | def crank_it(what, overrides)
if what.to_s =~ /(.*)_attrs$/
what = $1
overrides = overrides.merge(:_return_attributes => true)
end
item = "TBD"
new_job(what, overrides) do
| ruby | {
"resource": ""
} |
q26992 | Pres.ViewDelegation.method_missing | test | def method_missing(method, *args, &block)
if view_context.respond_to?(method, true)
| ruby | {
"resource": ""
} |
q26993 | Pres.Presents.present | test | def present(object, presenter: nil, **args)
if object.respond_to?(:to_ary)
object.map { |item| present(item, presenter: presenter, **args) }
else
presenter | ruby | {
"resource": ""
} |
q26994 | GemPublisher.Pusher.push | test | def push(gem, method, options = {})
push_command = PUSH_METHODS[method.to_s] or raise "Unknown Gem push method #{method.inspect}."
push_command += [gem]
push_command | ruby | {
"resource": ""
} |
q26995 | Interpolation.OneDimensional.interpolate | test | def interpolate interpolant
case @opts[:type]
when :linear
for_each (interpolant) { |x| linear_interpolation(x) } | ruby | {
"resource": ""
} |
q26996 | Asciidoctor::IncludeExt.IncludeProcessor.lines_selector_for | test | def lines_selector_for(target, attributes)
if (klass = @selectors.find { |s| s.handles? target, attributes })
| ruby | {
"resource": ""
} |
q26997 | Bottleneck.Core.run | test | def run
client_ip = @ip
key = "request_count:#{client_ip}"
result = { status: Constants::SUCCESS_STATUS, message: Constants::OK_MESSAGE }
requests_count = @storage.get(key)
unless requests_count
@storage.set(key, 0)
@storage.expire(key, @limits["time_period_seconds"])
end
if requests_count.to_i >= | ruby | {
"resource": ""
} |
q26998 | Informant.Standard.date_select | test | def date_select(method, options = {})
options[:include_blank] ||= false
options[:start_year] ||= 1801
options[:end_year] ||= Time.now.year
| ruby | {
"resource": ""
} |
q26999 | Informant.Standard.label | test | def label(method, text = nil, options = {})
colon = false if options[:colon].nil?
options[:for] = options[:label_for]
required = options[:required]
# remove special options
options.delete :colon
options.delete :label_for
options.delete | ruby | {
"resource": ""
} |