_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q26100 | CaTissue.Database.add_position_to_specimen_template | validation | def add_position_to_specimen_template(specimen, template)
pos = specimen.position
# the non-domain position attributes
pas = pos.class.nondomain_attributes
# the template position reflects the old values, if available
ss = pos.snapshot
# the attribute => value hash
vh = ss ? pas.to_compact_hash { |pas| ss[pas] } : pos.value_hash(pas)
vh[:specimen] = template
vh[:storage_container] = pos.storage_container.copy
# the template position reflects the old values
template.position = pos.class.new(vh)
logger.debug { "Work around #{specimen} update anomaly by copying position #{template.position.qp} to update template #{template.qp} as #{template.position.qp} with values #{vh.qp}..." }
end | ruby | {
"resource": ""
} |
q26101 | CaTissue.Database.ensure_primary_annotation_has_hook | validation | def ensure_primary_annotation_has_hook(annotation)
hook = annotation.hook
if hook.nil? then
raise CaRuby::DatabaseError.new("Cannot save annotation #{annotation} since it does not reference a hook entity")
end
if hook.identifier.nil? then
logger.debug { "Ensuring that the annotation #{annotation.qp} hook entity #{hook.qp} exists in the database..." }
ensure_exists(hook)
end
end | ruby | {
"resource": ""
} |
q26102 | CaTissue.Database.copy_annotation_proxy_owner_to_template | validation | def copy_annotation_proxy_owner_to_template(obj, template)
prop = obj.class.proxy_property
# Ignore the proxy attribute if it is defined by caRuby rather than caTissue.
return unless prop and prop.java_property?
rdr, wtr = prop.java_accessors
pxy = obj.send(rdr)
logger.debug { "Setting #{obj.qp} template #{template.qp} proxy owner to #{pxy}..." }
template.send(wtr, pxy)
end | ruby | {
"resource": ""
} |
q26103 | Sycsvpro.Table.create_table_data | validation | def create_table_data
processed_header = false
File.open(infile).each_with_index do |line, index|
line = line.chomp
next if line.empty?
line = unstring(line).chomp
header.process line, processed_header
unless processed_header
processed_header = true
next
end
next if row_filter.process(line, row: index).nil?
@columns = line.split(';')
create_row(create_key, line)
end
end | ruby | {
"resource": ""
} |
q26104 | Sycsvpro.Table.write_to_file | validation | def write_to_file
File.open(outfile, 'w') do |out|
out.puts header.to_s
out.puts create_sum_row if @sum_row_pos == 'TOP'
rows.each do |key, row|
line = [] << row[:key]
header.clear_header_cols.each_with_index do |col, index|
next if index < row[:key].size
line << row[:cols][col]
end
out.puts line.flatten.join(';')
end
out.puts create_sum_row if @sum_row_pos == 'EOF'
end
end | ruby | {
"resource": ""
} |
q26105 | Sycsvpro.Table.to_number | validation | def to_number(value)
value = convert_to_en(value)
return value.to_i unless value =~ /\./
return value.to_f if value =~ /\./
end | ruby | {
"resource": ""
} |
q26106 | Sycsvpro.Table.prepare_sum_row | validation | def prepare_sum_row(pattern)
return if pattern.nil? || pattern.empty?
@sum_row_pos, sum_row_pattern = pattern.split(/(?<=^top|^eof):/i)
@sum_row_pos.upcase!
@sum_row = Hash.new
@sum_row_patterns = split_by_comma_regex(sum_row_pattern)
end | ruby | {
"resource": ""
} |
q26107 | Sycsvpro.Table.add_to_sum_row | validation | def add_to_sum_row(value, column)
return unless @sum_row_patterns
@sum_row_patterns.each do |pattern|
if pattern =~ /^\(?c\d+[=~+.]/
header_column = evaluate(pattern, "")
else
header_column = pattern
end
if header_column == column
@sum_row[header_column] ||= 0
@sum_row[header_column] += value
end
end
end | ruby | {
"resource": ""
} |
q26108 | Sycsvpro.Table.create_sum_row | validation | def create_sum_row
line = []
header.clear_header_cols.each do |col|
line << @sum_row[col] || ""
end
line.flatten.join(';')
end | ruby | {
"resource": ""
} |
q26109 | TranslatedCollection.Wrapper._rewrap_array | validation | def _rewrap_array(result)
if @wrap_results
newcoll = @collection.class.new(result)
self.class.new(newcoll, @wrapfunc_in, @wrapfunc_out)
else
@collection.class.new(result.map(&@wrapfunc_out))
end
end | ruby | {
"resource": ""
} |
q26110 | DeepEnd.Graph.add_dependency | validation | def add_dependency(key, dependencies = [])
raise SelfDependencyError, "An object's dependencies cannot contain itself" if dependencies.include? key
node = node_for_key_or_new key
dependencies.each do |dependency|
node.addEdge(node_for_key_or_new(dependency))
end
resolve_dependencies
end | ruby | {
"resource": ""
} |
q26111 | DeepEnd.Graph.resolve_dependency | validation | def resolve_dependency(node)
node.seen = true
@seen_this_pass << node
node.edges.each do |edge|
unless @resolved.include? edge
unless @seen_this_pass.include? edge
unless edge.seen?
resolve_dependency edge
end
else
raise CircularDependencyError, "Circular reference detected: #{node.key.to_s} - #{edge.key.to_s}"
end
end
end
@resolved << node
end | ruby | {
"resource": ""
} |
q26112 | Turntabler.RoomDirectory.with_friends | validation | def with_friends
data = api('room.directory_graph')
data['rooms'].map do |(attrs, friends)|
Room.new(client, attrs.merge(:friends => friends))
end
end | ruby | {
"resource": ""
} |
q26113 | Turntabler.RoomDirectory.find | validation | def find(query, options = {})
assert_valid_keys(options, :limit, :skip)
options = {:limit => 20, :skip => 0}.merge(options)
data = api('room.search', :query => query, :skip => options[:skip])
data['rooms'].map {|(attrs, *)| Room.new(client, attrs)}
end | ruby | {
"resource": ""
} |
q26114 | Turntabler.Playlist.load | validation | def load(options = {})
assert_valid_keys(options, :minimal)
options = {:minimal => false}.merge(options)
data = api('playlist.all', options)
self.attributes = data
super()
end | ruby | {
"resource": ""
} |
q26115 | Turntabler.Playlist.update | validation | def update(attributes = {})
assert_valid_keys(attributes, :id)
# Update id
id = attributes.delete(:id)
update_id(id) if id
true
end | ruby | {
"resource": ""
} |
q26116 | Turntabler.Playlist.active | validation | def active
@active = client.user.playlists.all.any? {|playlist| playlist == self && playlist.active?} if @active.nil?
@active
end | ruby | {
"resource": ""
} |
q26117 | AttributesDSL.Attributes.add | validation | def add(name, options = {}, &coercer)
name = name.to_sym
value = Attribute.new(name, options, &coercer)
clone_with do
@attributes = attributes.merge(name => value)
@transformer = nil
end
end | ruby | {
"resource": ""
} |
q26118 | CaTissue.CollectionProtocol.add_specimens | validation | def add_specimens(*args)
hash = args.pop
spcs = args
# validate arguments
unless Hash === hash then
raise ArgumentError.new("Collection parameters are missing when adding specimens to protocol #{self}")
end
# Make the default registration, if necessary.
unless hash.has_key?(:registration) || hash.has_key?(:collection_protocol_registration) then
# the participant
pnt = hash.delete(:participant)
unless pnt then
raise ArgumentError.new("Registration or participant missing from collection parameters: #{hash.qp}")
end
hash[:registration] = registration(pnt) || make_cpr(pnt)
end
# the new SCG
scg = SpecimenCollectionGroup.new(hash)
# set each Specimen SCG
spcs.each { |spc| spc.specimen_collection_group = scg }
scg
end | ruby | {
"resource": ""
} |
q26119 | Sightstone.SummonerModule.summoner | validation | def summoner(name_or_id, optional={})
region = optional[:region] || @sightstone.region
uri = if name_or_id.is_a? Integer
"https://prod.api.pvp.net/api/lol/#{region}/v1.3/summoner/#{name_or_id}"
else
"https://prod.api.pvp.net/api/lol/#{region}/v1.3/summoner/by-name/#{URI::encode(name_or_id)}"
end
response = _get_api_response(uri)
_parse_response(response) { |resp|
data = JSON.parse(resp)
s = Summoner.new(data.values[0])
if block_given?
yield s
else
return s
end
}
end | ruby | {
"resource": ""
} |
q26120 | Sightstone.SummonerModule.names | validation | def names(ids, optional={})
region = optional[:region] || @sightstone.region
ids = ids.join(',')
uri = "https://prod.api.pvp.net/api/lol/#{region}/v1.3/summoner/#{ids}/name"
response = _get_api_response(uri)
_parse_response(response) { |resp|
data = JSON.parse(resp)
names_hash = Hash.new
data.each do |id, name|
names_hash[id.to_i] = name
end
if block_given?
yield names_hash
else
return names_hash
end
}
end | ruby | {
"resource": ""
} |
q26121 | Sightstone.SummonerModule.runebook | validation | def runebook(summoner, optional={})
region = optional[:region] || @sightstone.region
id = if summoner.is_a? Summoner
summoner.id
else
summoner
end
uri = "https://prod.api.pvp.net/api/lol/#{region}/v1.3/summoner/#{id}/runes"
response = _get_api_response(uri)
_parse_response(response) { |resp|
data = JSON.parse(resp)
book = RuneBook.new(data.values[0])
if block_given?
yield book
else
return book
end
}
end | ruby | {
"resource": ""
} |
q26122 | Sightstone.SummonerModule.runebooks | validation | def runebooks(summoners, optional={})
return {} if summoners.empty?
region = optional[:region] || @sightstone.region
ids = summoners.collect { |summoner|
if summoner.is_a? Summoner
summoner.id
else
summoner
end
}
uri = "https://prod.api.pvp.net/api/lol/#{region}/v1.3/summoner/#{ids.join(',')}/runes"
response = _get_api_response(uri)
_parse_response(response) { |resp|
data = JSON.parse(resp)
books = {}
data.each do |key, raw_book|
books[key] = RuneBook.new(raw_book)
end
if block_given?
yield books
else
return books
end
}
end | ruby | {
"resource": ""
} |
q26123 | Zipper.ZipFileGenerator.write | validation | def write
buffer = create_zip(@entries, @ignore_entries)
puts "\nwrite file #{@output_file}"
File.open(@output_file, "wb") {|f| f.write buffer.string }
end | ruby | {
"resource": ""
} |
q26124 | CaTissue.Collectible.collect | validation | def collect(opts)
raise Jinx::ValidationError.new("#{self} is already collected") if received?
specimen_event_parameters.merge!(extract_event_parameters(opts))
end | ruby | {
"resource": ""
} |
q26125 | Sycsvpro.Filter.method_missing | validation | def method_missing(id, *args, &block)
boolean_row_regex = %r{
BEGIN(\(*[nsd]\d+[<!=~>]{1,2}
(?:[A-Z][A-Za-z]*\.new\(.*?\)|\d+|['"].*?['"])
(?:\)*(?:&&|\|\||$)
\(*[nsd]\d+[<!=~>]{1,2}
(?:[A-Z][A-Za-z]*\.new\(.*?\)|\d+|['"].*?['"])\)*)*)END
}xi
return boolean_row($1, args, block) if id =~ boolean_row_regex
return equal($1, args, block) if id =~ /^(\d+)$/
return equal_type($1, $2, args, block) if id =~ /^(s|n|d):(\d+)$/
return range($1, $2, args, block) if id =~ /^(\d+)-(\d+)$/
return range_type($1, $2, $3, args, block) if id =~ /^(s|n|d):(\d+)-(\d+)$/
return regex($1, args, block) if id =~ /^\/(.*)\/$/
return col_regex($1, $2, args, block) if id =~ /^(\d+):\/(.*)\/$/
return date($1, $2, $3, args, block) if id =~ /^(\d+):(<|=|>)(\d+.\d+.\d+)$/
return date_range($1, $2, $3, args, block) if id =~ /^(\d+):(\d+.\d+.\d+.)-(\d+.\d+.\d+)$/
return number($1, $2, $3, args, block) if id =~ /^(\d+):(<|=|>)(\d+)$/
return number_range($1, $2, $3, args, block) if id =~ /^(\d):(\d+)-(\d+)$/
super
end | ruby | {
"resource": ""
} |
q26126 | Sycsvpro.Filter.match_boolean_filter? | validation | def match_boolean_filter?(values=[])
return false if boolean_filter.empty? or values.empty?
expression = boolean_filter
columns = expression.scan(/(([nsd])(\d+))([<!=~>]{1,2})(.*?)(?:[\|&]{2}|$)/)
columns.each do |c|
value = case c[1]
when 'n'
values[c[2].to_i].empty? ? '0' : values[c[2].to_i]
when 's'
"\"#{values[c[2].to_i]}\""
when 'd'
begin
Date.strptime(values[c[2].to_i], date_format)
rescue Exception => e
case c[3]
when '<', '<=', '=='
"#{c[4]}+1"
when '>', '>='
'0'
when '!='
c[4]
end
else
"Date.strptime('#{values[c[2].to_i]}', '#{date_format}')"
end
end
expression = expression.gsub(c[0], value)
end
eval(expression)
end | ruby | {
"resource": ""
} |
q26127 | Sycsvpro.Filter.pivot_each_column | validation | def pivot_each_column(values=[])
pivot.each do |column, parameters|
value = values[parameters[:col].to_i]
value = value.strip.gsub(/^"|"$/, "") unless value.nil?
match = false
begin
match = eval(parameters[:operation].gsub('[value]', value))
rescue Exception => e
end
yield column, match
end
end | ruby | {
"resource": ""
} |
q26128 | Sycsvpro.Filter.create_filter | validation | def create_filter(values)
values.scan(/(?<=,|^)(BEGIN.*?END|\/.*?\/|.*?)(?=,|$)/i).flatten.each do |value|
send(value)
end unless values.nil?
end | ruby | {
"resource": ""
} |
q26129 | Sycsvpro.Filter.equal | validation | def equal(value, args, block)
filter << value.to_i unless filter.index(value.to_i)
end | ruby | {
"resource": ""
} |
q26130 | Sycsvpro.Filter.equal_type | validation | def equal_type(type, value, args, block)
filter_size_before = filter.size
equal(value, args, block)
types << type if filter_size_before < filter.size
end | ruby | {
"resource": ""
} |
q26131 | Sycsvpro.Filter.range | validation | def range(start_value, end_value, args, block)
filter << (start_value.to_i..end_value.to_i).to_a
end | ruby | {
"resource": ""
} |
q26132 | Sycsvpro.Filter.range_type | validation | def range_type(type, start_value, end_value, args, block)
filter_size_before = filter.size
range(start_value, end_value, args, block)
(filter.size - filter_size_before).times { types << type }
end | ruby | {
"resource": ""
} |
q26133 | Sycsvpro.Filter.col_regex | validation | def col_regex(col, r, args, block)
operation = "'[value]' =~ Regexp.new('#{r}')"
pivot[r] = { col: col, operation: operation }
end | ruby | {
"resource": ""
} |
q26134 | Sycsvpro.Filter.date | validation | def date(col, comparator, date, args, block)
comparator = '==' if comparator == '='
operation = "Date.strptime(\"[value]\", \"#{date_format}\") #{comparator} " +
"Date.strptime(\"#{date}\", \"#{date_format}\")"
pivot["#{comparator}#{date}"] = { col: col, operation: operation }
end | ruby | {
"resource": ""
} |
q26135 | Sycsvpro.Filter.date_range | validation | def date_range(col, start_date, end_date, args, block)
operation = " Date.strptime(\"#{start_date}\", \"#{date_format}\") " +
"<= Date.strptime(\"[value]\", \"#{date_format}\") && " +
" Date.strptime(\"[value]\", \"#{date_format}\") " +
"<= Date.strptime(\"#{end_date}\", \"#{date_format}\")"
pivot["#{start_date}-#{end_date}"] = { col: col, operation: operation }
end | ruby | {
"resource": ""
} |
q26136 | Sycsvpro.Filter.number | validation | def number(col, comparator, number, args, block)
comparator = '==' if comparator == '='
operation = "[value] #{comparator} #{number}"
pivot["#{comparator}#{number}"] = { col: col, operation: operation }
end | ruby | {
"resource": ""
} |
q26137 | Sycsvpro.Filter.number_range | validation | def number_range(col, start_number, end_number, arg, block)
operation = " #{start_number} <= [value] && [value] <= #{end_number}"
pivot["#{start_number}-#{end_number}"] = { col: col, operation: operation }
end | ruby | {
"resource": ""
} |
q26138 | CaTissue.JBridge.create_annotation | validation | def create_annotation(hook, annotation)
# validate the arguments
if hook.nil? then raise ArgumentError.new("Annotated caTissue object is missing") end
if annotation.nil? then raise ArgumentError.new("Annotation caTissue object is missing") end
# the annotated object must exist in the database
unless hook.identifier then
raise AnnotationError.new("Annotation writer does not support annotation of a caTissue object without an identifier: #{hook}")
end
# load the caRuby annotations if necessary
hook.class.ensure_annotations_loaded
# set the annotation hook reference
annotation.hook = hook
# create the annotation in the database
annotation.create
end | ruby | {
"resource": ""
} |
q26139 | SeriesJoiner.ArrayAdditions.join_as_series | validation | def join_as_series(options = {})
if defined?(I18n)
default_delimiter = I18n.translate(:'series_joiner.default_delimiter', :locale => options[:locale])
default_final_delimiter = I18n.translate(:'series_joiner.default_final_delimiter', :locale => options[:locale])
default_conjunction = I18n.translate(:'series_joiner.default_conjunction', :locale => options[:locale])
else
default_delimiter = ', '
default_final_delimiter = ''
default_conjunction = ' and '
end
delimiter = options[:delimiter] || default_delimiter
final_delimiter = options[:final_delimiter] || default_final_delimiter
conjunction = options[:conjunction] || default_conjunction
sz = self.size
if sz > 0
r = self[0]
if sz > 1
if sz > 2
for i in 1..(sz - 2)
r += delimiter + self[i]
end
r += final_delimiter
end
r += conjunction + self[sz - 1]
end
end
return r
end | ruby | {
"resource": ""
} |
q26140 | Ragabash.AwesomeStringFormatter.awesome_string | validation | def awesome_string(string)
lexers = ::Rouge::Guessers::Source.new(string).filter(R_LEXERS)
if !lexers.empty?
format_syntax_string(string, lexers.first)
elsif string =~ /(?:\r?\n)(?!\z)/
format_multiline_string(string)
else
format_plain_string(string)
end
end | ruby | {
"resource": ""
} |
q26141 | Papercat.Page.meta= | validation | def meta=val
val = JSON.parse(val) if val.is_a?(String)
write_store_attribute(:data, :meta, val)
end | ruby | {
"resource": ""
} |
q26142 | Sycsvpro.Collector.execute | validation | def execute
File.new(infile).each_with_index do |line, index|
row = row_filter.process(line, row: index)
next if row.nil? or row.chomp.empty?
collection.each do |category, elements|
values = elements[:filter].process(row)
values.chomp.split(';').each do |value|
elements[:entries] << value.chomp if elements[:entries].index(value.chomp).nil?
end
end
end
File.open(outfile, 'w') do |out|
collection.each do |category, elements|
out.puts "[#{category}]"
elements[:entries].sort.each { |c| out.puts c }
end
end
end | ruby | {
"resource": ""
} |
q26143 | Sycsvpro.Collector.init_collection | validation | def init_collection(column_filter)
column_filter.split('+').each do |f|
category, filter = f.split(':')
collection[category] = { entries: [], filter: ColumnFilter.new(filter) }
end
end | ruby | {
"resource": ""
} |
q26144 | Spark.Fire.log | validation | def log
out.sync = true
@log ||= Logger.new(out)
@log.formatter = proc do |severity, datetime, progname, msg|
if verbose
string = "#{severity} [#{datetime.strftime('%Y-%m-%d %H:%M:%S.%2N')}]: "
else
string = "[#{datetime.strftime('%H:%M:%S')}]: "
end
string += "#{msg}\n"
string
end
@log
end | ruby | {
"resource": ""
} |
q26145 | MiniAether.Require.require_aether | validation | def require_aether *deps
@mini_aether_require_spec ||= MiniAether::Spec.new
@mini_aether_require_resolver ||= MiniAether::Resolver.new
spec = @mini_aether_require_spec
resolver = @mini_aether_require_resolver
if deps.last.kind_of?(Hash)
hash = deps.pop
[hash[:source], hash[:sources]].flatten.compact.each do |source|
spec.source(source)
end
end
deps.each {|coords| spec.jar(coords) }
resolver.require(spec.dependencies, spec.sources)
nil
end | ruby | {
"resource": ""
} |
q26146 | Sycsvpro.Join.create_lookup_table | validation | def create_lookup_table
File.open(source).each_with_index do |line|
next if line.chomp.empty?
values = unstring(line).chomp.split(';')
next if values.empty?
@joiners.each do |joiner|
key = values[joiner.join[0]]
joiner.lookup[:rows][key] = []
joiner.cols.each do |i|
joiner.lookup[:rows][key] << values[i]
end
end
end
end | ruby | {
"resource": ""
} |
q26147 | Sycsvpro.Join.col_positions | validation | def col_positions(pos, cols)
if pos.nil? || pos.empty?
pos = []
cols.each { |c| pos << Array.new(c.size) { |c| c } }
pos
else
pos.split(';').collect { |p| p.split(',').collect { |p| p.to_i } }
end
end | ruby | {
"resource": ""
} |
q26148 | Sycsvpro.Join.create_joiners | validation | def create_joiners(j, c, p)
js = j.split(';').collect { |j| j.split('=').collect { |j| j.to_i } }
cs = c.split(';').collect { |c| c.split(',').collect { |c| c.to_i } }
ps = col_positions(p, cs)
@joiners = []
(0...js.size).each do |i|
@joiners << Joiner.new(js[i], ps[i], cs[i], { rows: { } })
end
ps.flatten
end | ruby | {
"resource": ""
} |
q26149 | CaTissue.SpecimenProtocol.add_defaults_local | validation | def add_defaults_local
super
self.title ||= short_title
self.short_title ||= title
self.start_date ||= Java::JavaUtil::Date.new
end | ruby | {
"resource": ""
} |
q26150 | Sycsvpro.SpreadSheet.[] | validation | def [](*range)
r, c = range
r ||= 0..(nrows-1)
c ||= 0..(ncols-1)
row_selection = rows.values_at(*r)
col_selection = []
if rows_are_arrays?(row_selection)
row_selection.each do |row|
values = row.values_at(*c)
col_selection << (values.respond_to?(:to_ary) ? values : [values])
end
else
col_selection << row_selection[*c]
end
SpreadSheet.new(*col_selection,
row_labels: row_labels.values_at(*r),
col_labels: col_labels.values_at(*c))
end | ruby | {
"resource": ""
} |
q26151 | Sycsvpro.SpreadSheet.column_collect | validation | def column_collect(&block)
result = []
0.upto(ncols-1) { |i| result << block.call(self[nil,i]) }
result
end | ruby | {
"resource": ""
} |
q26152 | Sycsvpro.SpreadSheet.rename | validation | def rename(opts = {})
if opts[:rows]
opts[:rows] = opts[:rows][0,nrows]
opts[:rows] += row_labels[opts[:rows].size, nrows]
end
if opts[:cols]
opts[:cols] = opts[:cols][0,ncols]
opts[:cols] += col_labels[opts[:cols].size, ncols]
end
@row_labels = opts[:rows] if opts[:rows]
@col_labels = opts[:cols] if opts[:cols]
end | ruby | {
"resource": ""
} |
q26153 | Sycsvpro.SpreadSheet.to_s | validation | def to_s
col_label_sizes = col_labels.collect { |c| c.to_s.size + 2 }
row_label_size = row_labels.collect { |r| r.to_s.size + 2 }.max
row_col_sizes = rows.transpose.collect { |r| r.collect { |c| c.to_s.size } }
i = -1
col_sizes = col_label_sizes.collect do |s|
i += 1
[row_col_sizes[i],s].flatten.max + 1
end
s = (sprintf("%#{row_label_size}s", " "))
col_labels.each_with_index { |l,i| s << (sprintf("%#{col_sizes[i]}s",
"[#{l}]")) }
s << "\n"
rows.each_with_index do |row, i|
s << (sprintf("%#{row_label_size}s", "[#{row_labels[i]}]"))
row.each_with_index { |c,j| s << (sprintf("%#{col_sizes[j]}s", c)) }
s << "\n"
end
s
end | ruby | {
"resource": ""
} |
q26154 | Sycsvpro.SpreadSheet.rows_from_params | validation | def rows_from_params(opts)
col_count = opts[:cols]
row_count = opts[:rows]
size = row_count * col_count if row_count && col_count
rows = []
if values = opts[:values]
if size
values += [NotAvailable] * (size - values.size)
elsif col_count
values += [NotAvailable] * ((col_count - values.size) % col_count)
elsif row_count
values += [NotAvailable] * ((row_count - values.size) % row_count)
col_count = values.size / row_count
else
col_count = Math.sqrt(values.size).ceil
values += [NotAvailable] * ((col_count - values.size) % col_count)
end
values.each_slice(col_count) { |row| rows << row }
elsif opts[:file]
File.foreach(opts[:file]) do |line|
next if line.chomp.empty?
values = line.split(SEMICOLON) rescue str2utf8(line).split(SEMICOLON)
rows << values.collect { |v|
v.strip.empty? ? NotAvailable : str2num(v.chomp, opts[:ds])
}
end
end
rows
end | ruby | {
"resource": ""
} |
q26155 | Sycsvpro.SpreadSheet.equalize_rows | validation | def equalize_rows(rows)
column_sizes = rows.collect { |r| r.size }
return rows if column_sizes.uniq.size == 1
max_size = column_sizes.max
small_rows = []
column_sizes.each_with_index { |c,i| small_rows << i if c < max_size }
small_rows.each do |i|
rows[i] += [NotAvailable] * (max_size - rows[i].size)
end
rows
end | ruby | {
"resource": ""
} |
q26156 | Sycsvpro.SpreadSheet.same_column_size? | validation | def same_column_size?(rows)
offset = opts[:c] ? 1 : 0
return true if rows.size == 1 + offset
(0 + offset).upto(rows.size - 2) do |i|
return false unless rows[i].size == rows[i+1].size
end
true
end | ruby | {
"resource": ""
} |
q26157 | Sycsvpro.SpreadSheet.coerce | validation | def coerce(value)
return SpreadSheet.new([value]) if value.is_a?(Numeric)
return SpreadSheet.new(value) if value.is_a?(Array)
end | ruby | {
"resource": ""
} |
q26158 | Sycsvpro.SpreadSheet.process | validation | def process(operator, s)
s = coerce(s) || s
raise "operand needs to be a SpreadSheet, "+
"Numeric or Array" unless s.is_a?(SpreadSheet)
result = []
rlabel = []
clabel = []
s1_row_count, s1_col_count = dim
s2_row_count, s2_col_count = s.dim
row_count = [s1_row_count, s2_row_count].max
col_count = [s1_col_count, s2_col_count].max
0.upto(row_count - 1) do |r|
r1 = r % s1_row_count
r2 = r % s2_row_count
rlabel << "#{row_labels[r1]}#{operator}#{s.row_labels[r2]}"
element = []
0.upto(col_count - 1) do |c|
c1 = c % s1_col_count
c2 = c % s2_col_count
clabel << "#{col_labels[c1]}#{operator}#{s.col_labels[c2]}"
element << rows[r1][c1].send(operator, s.rows[r2][c2])
end
result << element
end
SpreadSheet.new(*result, row_labels: rlabel, col_labels: clabel)
end | ruby | {
"resource": ""
} |
q26159 | Sycsvpro.Counter.process_count | validation | def process_count
File.new(infile).each_with_index do |line, index|
result = col_filter.process(row_filter.process(line.chomp, row: index))
unless result.nil? or result.empty?
key = unstring(line).split(';').values_at(*key_columns)
key_value = key_values[key] || key_values[key] = { name: key,
elements: Hash.new(0),
sum: 0 }
result.chomp.split(';').each do |column|
heading << column if heading.index(column).nil?
key_value[:elements][column] += 1
key_value[:sum] += 1
sums[column] += 1
end
end
end
end | ruby | {
"resource": ""
} |
q26160 | Sycsvpro.Counter.write_result | validation | def write_result
sum_line = [sum_row_title] + [''] * (key_titles.size - 1)
headline = heading_sort ? heading.sort : original_pivot_sequence_heading
headline << add_sum_col unless sum_col_title.nil?
headline.each do |h|
sum_line << sums[h]
end
row = 0;
File.open(outfile, 'w') do |out|
out.puts sum_line.join(';') if row == sum_row ; row += 1
out.puts (key_titles + headline).join(';')
key_values.each do |k,v|
out.puts sum_line.join(';') if row == sum_row ; row += 1
line = [k]
headline.each do |h|
line << v[:elements][h] unless h == sum_col_title
end
line << v[:sum] unless sum_col_title.nil?
out.puts line.join(';')
end
end
end | ruby | {
"resource": ""
} |
q26161 | Sycsvpro.Counter.init_sum_scheme | validation | def init_sum_scheme(sum_scheme)
return if sum_scheme.nil?
re = /(\w+):(\d+)|(\w+)/
sum_scheme.scan(re).each do |part|
if part.compact.size == 2
@sum_row_title = part[0]
@sum_row = part[1].to_i
else
@sum_col_title = part[2]
end
end
end | ruby | {
"resource": ""
} |
q26162 | Sycsvpro.Counter.init_key_columns | validation | def init_key_columns(key_scheme)
@key_titles = []
@key_columns = []
keys = key_scheme.scan(/(\d+):(\w+)/)
keys.each do |key|
@key_titles << key[1]
@key_columns << key[0].to_i
end
end | ruby | {
"resource": ""
} |
q26163 | Sycsvpro.Counter.original_pivot_sequence_heading | validation | def original_pivot_sequence_heading
(heading.sort - col_filter.pivot.keys << col_filter.pivot.keys).flatten
end | ruby | {
"resource": ""
} |
q26164 | Sycsvpro.RowFilter.process | validation | def process(object, options={})
object = unstring(object)
return object unless has_filter?
filtered = !filter.flatten.uniq.index(options[:row]).nil?
pattern.each do |p|
filtered = (filtered or !(object =~ Regexp.new(p)).nil?)
end
filtered = (filtered or match_boolean_filter?(object.split(';')))
filtered ? object : nil
end | ruby | {
"resource": ""
} |
q26165 | Turntabler.User.messages | validation | def messages
data = api('pm.history', :receiverid => id)
data['history'].map {|attrs| Message.new(client, attrs)}
end | ruby | {
"resource": ""
} |
q26166 | Turntabler.User.stalk | validation | def stalk
become_fan unless client.user.fan_of.include?(self)
client.rooms.with_friends.detect do |room|
room.listener(id)
end
end | ruby | {
"resource": ""
} |
q26167 | Turntabler.User.boot | validation | def boot(reason = '')
api('room.boot_user', :roomid => room.id, :section => room.section, :target_userid => id, :reason => reason)
true
end | ruby | {
"resource": ""
} |
q26168 | Turntabler.User.report | validation | def report(reason = '')
api('room.report_user', :roomid => room.id, :section => room.section, :reported => id, :reason => reason)
true
end | ruby | {
"resource": ""
} |
q26169 | CaTissue.AbstractSpecimen.default_derived_characteristics | validation | def default_derived_characteristics
chrs = specimen_characteristics || return
pas = chrs.class.nondomain_attributes.reject { |pa| pa == :identifier }
chrs.copy(pas)
end | ruby | {
"resource": ""
} |
q26170 | MapKit.MapView.region= | validation | def region=(args)
case args
when Hash
self.setRegion(CoordinateRegion.new(args[:region]).api, animated: args[:animated])
else
self.setRegion(CoordinateRegion.new(args).api, animated: false)
end
end | ruby | {
"resource": ""
} |
q26171 | CaTissue.Participant.collection_site | validation | def collection_site
return unless medical_identifiers.size == 1
site = medical_identifiers.first.site
return if site.nil?
site.site_type == Site::SiteType::COLLECTION ? site : nil
end | ruby | {
"resource": ""
} |
q26172 | CaTissue.SpecimenRequirement.match_characteristics | validation | def match_characteristics(other)
chr = characteristics
ochr = other.characteristics
chr and ochr and chr.tissue_side == ochr.tissue_side and chr.tissue_site == ochr.tissue_site
end | ruby | {
"resource": ""
} |
q26173 | CaTissue.ReceivedEventParameters.default_user | validation | def default_user
scg = specimen_collection_group || (specimen.specimen_collection_group if specimen) || return
cp = scg.collection_protocol || return
cp.coordinators.first || (cp.sites.first.coordinator if cp.sites.size === 1)
end | ruby | {
"resource": ""
} |
q26174 | Dune::Dashboard.I18n.export! | validation | def export!
puts "Exporting translations:\n"
if config[:split]
translations.keys.each do |locale|
if translations[:en].nil?
puts 'Missing english translation'
exit
end
puts "\nLocale: #{locale}"
fallback_english_hash = flat_hash(translations[:en])
translations_hash = flat_hash(translations[locale])
if locale != :en
translations_hash.each do |key, value|
english_fallback = fallback_english_hash[key]
if value == nil || value == ""
puts " #{key} missing!"
puts " taking english default: '#{english_fallback}'"
translations_hash[key] = english_fallback
end
end
end
save(translations_hash, File.join(export_dir, "translations_#{locale}.js"))
end
else
save(flat_hash(translations), File.join(export_dir, 'translations.js'))
end
end | ruby | {
"resource": ""
} |
q26175 | Dune::Dashboard.I18n.save | validation | def save(translations, file)
file = ::Rails.root.join(file)
FileUtils.mkdir_p File.dirname(file)
variable_to_assign = config.fetch(:variable, 'Ember.I18n.translations')
File.open(file, 'w+') do |f|
f << variable_to_assign
f << ' = '
f << JSON.pretty_generate(translations).html_safe
f << ';'
end
end | ruby | {
"resource": ""
} |
q26176 | Dune::Dashboard.I18n.translations | validation | def translations
::I18n.load_path = default_locales_path
::I18n.backend.instance_eval do
init_translations unless initialized?
translations
end
end | ruby | {
"resource": ""
} |
q26177 | CaTissue.PropertiesLoader.load_properties | validation | def load_properties
# the properties file
file = default_properties_file
# the access properties
props = file && File.exists?(file) ? load_properties_file(file) : {}
# Load the Java application jar path.
path = props[:classpath] || props[:path] || infer_classpath
Java.expand_to_class_path(path) if path
# Get the application login properties from the remoteService.xml, if necessary.
unless props.has_key?(:host) or props.has_key?(:port) then
url = remote_service_url
if url then
host, port = url.split(':')
props[:host] = host
props[:port] = port
end
end
unless props.has_key?(:database) then
props.merge(infer_database_properties)
end
props
end | ruby | {
"resource": ""
} |
q26178 | Vatsim.Data.parse | validation | def parse
download_files
parsing_clients = false
parsing_prefile = false
parsing_general = false
parsing_servers = false
parsing_voice_servers = false
File.open(DATA_FILE_PATH, 'r:ascii-8bit').each { |line|
if line.start_with? ";"
parsing_clients = false
parsing_prefile = false
parsing_general = false
parsing_servers = false
parsing_voice_servers = false
elsif parsing_clients
clienttype = line.split(":")[3]
if clienttype.eql? "PILOT"
@pilots << Pilot.new(line)
elsif clienttype.eql? "ATC"
@atc << ATC.new(line)
end
elsif parsing_prefile
@prefiles << Prefile.new(line)
elsif parsing_general
line_split = line.split("=")
@general[line_split[0].strip.downcase.gsub(" ", "_")] = line_split[1].strip
elsif parsing_servers
@servers << Server.new(line)
elsif parsing_voice_servers
@voice_servers << VoiceServer.new(line) if line.length > 2 # ignore last, empty line for voice server that contains 2 characters
end
parsing_clients = true if line.start_with? "!CLIENTS:"
parsing_prefile = true if line.start_with? "!PREFILE:"
parsing_general = true if line.start_with? "!GENERAL:"
parsing_servers = true if line.start_with? "!SERVERS:"
parsing_voice_servers = true if line.start_with? "!VOICE SERVERS:"
}
end | ruby | {
"resource": ""
} |
q26179 | Vatsim.Data.download_files | validation | def download_files
if !File.exists?(STATUS_FILE_PATH) or File.mtime(STATUS_FILE_PATH) < Time.now - STATUS_DOWNLOAD_INTERVAL
download_to_file STATUS_URL, STATUS_FILE_PATH
end
if !File.exists?(DATA_FILE_PATH) or File.mtime(DATA_FILE_PATH) < Time.now - DATA_DOWNLOAD_INTERVAL
download_to_file random_data_url, DATA_FILE_PATH
end
end | ruby | {
"resource": ""
} |
q26180 | Vatsim.Data.download_to_file | validation | def download_to_file url, file
url = URI.parse(URI.encode(url.strip))
File.new(file, File::CREAT)
Net::HTTP.start(url.host) { |http|
resp = http.get(url.path)
open(file, "wb") { |file|
file.write(resp.body)
}
}
end | ruby | {
"resource": ""
} |
q26181 | Vatsim.Data.random_data_url | validation | def random_data_url
url0s = Array.new
file = File.open(STATUS_FILE_PATH)
file.each {|line|
if line.start_with? "url0"
url0s << line.split("=").last
end
}
return url0s[rand(url0s.length)]
end | ruby | {
"resource": ""
} |
q26182 | NewRelicManagement.Util.cachier! | validation | def cachier!(var = nil)
if var && instance_variable_get("@#{var}")
# => Clear the Single Variable
remove_instance_variable("@#{var}")
else
# => Clear the Whole Damned Cache
instance_variables.each { |x| remove_instance_variable(x) }
end
end | ruby | {
"resource": ""
} |
q26183 | NewRelicManagement.Util.write_json | validation | def write_json(file, object)
return unless file && object
begin
File.open(file, 'w') { |f| f.write(JSON.pretty_generate(object)) }
end
end | ruby | {
"resource": ""
} |
q26184 | NewRelicManagement.Util.filestring | validation | def filestring(file, size = 8192)
return unless file
return file unless file.is_a?(String) && File.file?(file) && File.size(file) <= size
File.read(file)
end | ruby | {
"resource": ""
} |
q26185 | NewRelicManagement.Util.common_array | validation | def common_array(ary) # rubocop: disable AbcSize
return ary unless ary.is_a? Array
count = ary.count
return ary if count.zero?
return ary.flatten.uniq if count == 1
common = ary[0] & ary[1]
return common if count == 2
(count - 2).times { |x| common &= ary[x + 2] } if count > 2
common
end | ruby | {
"resource": ""
} |
q26186 | Isimud.EventListener.run | validation | def run
bind_queues and return if test_env?
start_shutdown_thread
start_error_counter_thread
client.on_exception do |e|
count_error(e)
end
client.connect
start_event_thread
puts 'EventListener started. Hit Ctrl-C to exit'
Thread.stop
puts 'Main thread wakeup - exiting.'
client.close
end | ruby | {
"resource": ""
} |
q26187 | Isimud.EventListener.register_observer_class | validation | def register_observer_class(observer_class)
@observer_mutex.synchronize do
return if @observed_models.include?(observer_class)
@observed_models << observer_class
log "EventListener: registering observer class #{observer_class}"
observer_queue.bind(models_exchange, routing_key: "#{Isimud.model_watcher_schema}.#{observer_class.base_class.name}.*")
end
end | ruby | {
"resource": ""
} |
q26188 | Isimud.EventListener.register_observer | validation | def register_observer(observer)
@observer_mutex.synchronize do
log "EventListener: registering observer #{observer.class} #{observer.id}"
@observers[observer_key_for(observer.class, observer.id)] = observer.observe_events(client)
end
end | ruby | {
"resource": ""
} |
q26189 | Isimud.EventListener.unregister_observer | validation | def unregister_observer(observer_class, observer_id)
@observer_mutex.synchronize do
log "EventListener: un-registering observer #{observer_class} #{observer_id}"
if (consumer = @observers.delete(observer_key_for(observer_class, observer_id)))
consumer.cancel
end
end
end | ruby | {
"resource": ""
} |
q26190 | Isimud.EventListener.observer_queue | validation | def observer_queue
@observer_queue ||= client.create_queue([name, 'listener', Socket.gethostname, Process.pid].join('.'),
models_exchange,
queue_options: {exclusive: true},
subscribe_options: {manual_ack: true})
end | ruby | {
"resource": ""
} |
q26191 | Isimud.Event.as_json | validation | def as_json(options = {})
session_id = parameters.delete(:session_id) || Thread.current[:keas_session_id]
data = {type: type,
action: action,
user_id: user_id,
occurred_at: occurred_at,
eventful_type: eventful_type,
eventful_id: eventful_id,
session_id: session_id}
unless options[:omit_parameters]
data[:parameters] = parameters
data[:attributes] = attributes
end
data
end | ruby | {
"resource": ""
} |
q26192 | MrPoole.CLI.do_create | validation | def do_create(action)
options = do_creation_options
options.title ||= @params.first
@helper.send("#{action}_usage") unless options.title
fn = @commands.send(action, options)
puts "#{@src_dir}/#{fn}"
end | ruby | {
"resource": ""
} |
q26193 | MrPoole.CLI.do_move | validation | def do_move(action)
options = do_move_options(action)
path = @params.first
@helper.send("#{action}_usage") unless path
fn = @commands.send(action, path, options)
puts "#{@src_dir}/#{fn}"
end | ruby | {
"resource": ""
} |
q26194 | MrPoole.CLI.do_move_options | validation | def do_move_options(type)
options = OpenStruct.new
opt_parser = OptionParser.new do |opts|
if type == 'publish'
opts.on('-d', '--keep-draft', "Keep draft post") do |d|
options.keep_draft = d
end
else
opts.on('-p', '--keep-post', "Do not delete post") do |p|
options.keep_post = p
end
end
opts.on('-t', '--keep-timestamp', "Keep existing timestamp") do |t|
options.keep_timestamp = t
end
end
opt_parser.parse! @params
options
end | ruby | {
"resource": ""
} |
q26195 | YARD.RelativeMarkdownLinks.resolve_links | validation | def resolve_links(text)
html = Nokogiri::HTML.fragment(text)
html.css("a[href]").each do |link|
href = URI(link["href"])
next unless href.relative? && markup_for_file(nil, href.path) == :markdown
link.replace "{file:#{href} #{link.inner_html}}"
end
super(html.to_s)
end | ruby | {
"resource": ""
} |
q26196 | NewRelicManagement.Client.nr_api | validation | def nr_api
# => Build the Faraday Connection
@conn ||= Faraday::Connection.new('https://api.newrelic.com', conn_opts) do |client|
client.use Faraday::Response::RaiseError
client.use FaradayMiddleware::EncodeJson
client.use FaradayMiddleware::ParseJson, content_type: /\bjson$/
client.response :logger if Config.environment.to_s.casecmp('development').zero? # => Log Requests to STDOUT
client.adapter Faraday.default_adapter #:net_http_persistent
end
end | ruby | {
"resource": ""
} |
q26197 | NewRelicManagement.Client.alert_add_entity | validation | def alert_add_entity(entity_id, condition_id, entity_type = 'Server')
nr_api.put do |req|
req.url url('alerts_entity_conditions', entity_id)
req.params['entity_type'] = entity_type
req.params['condition_id'] = condition_id
end
end | ruby | {
"resource": ""
} |
q26198 | NewRelicManagement.Client.alert_delete_entity | validation | def alert_delete_entity(entity_id, condition_id, entity_type = 'Server')
nr_api.delete do |req|
req.url url('alerts_entity_conditions', entity_id)
req.params['entity_type'] = entity_type
req.params['condition_id'] = condition_id
end
end | ruby | {
"resource": ""
} |
q26199 | NewRelicManagement.Client.get_server_id | validation | def get_server_id(server_id)
return nil unless server_id =~ /^[0-9]+$/
ret = nr_api.get(url('servers', server_id)).body
ret['server']
rescue Faraday::ResourceNotFound, NoMethodError
nil
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.