_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q24300 | DEVp2p.Command.receive | train | def receive(proto, data)
if structure.instance_of?(RLP::Sedes::CountableList)
receive_callbacks.each {|cb| cb.call(proto, data) }
else
receive_callbacks.each {|cb| cb.call(proto, **data) }
end
end | ruby | {
"resource": ""
} |
q24301 | Protobuf.Encoder.write_pair | train | def write_pair(field, value)
key = (field.tag << 3) | field.wire_type
stream << ::Protobuf::Field::VarintField.encode(key)
stream << field.encode(value)
end | ruby | {
"resource": ""
} |
q24302 | Rudy.Backups.get | train | def get(path)
tmp = Rudy::Backup.new path
backups = Rudy::Backups.list :path => path
return nil unless backups.is_a?(Array) && !backups.empty?
backups.first
end | ruby | {
"resource": ""
} |
q24303 | DEVp2p.Multiplexer.pop_frames | train | def pop_frames
protocols = @queues.keys
idx = protocols.index next_protocol
protocols = protocols[idx..-1] + protocols[0,idx]
protocols.each do |id|
frames = pop_frames_for_protocol id
return frames unless frames.empty?
end
[]
end | ruby | {
"resource": ""
} |
q24304 | TrickBag.Formatters.array_diff | train | def array_diff(array1, array2, format = :text)
string1 = array1.join("\n") + "\n"
string2 = array2.join("\n") + "\n"
Diffy::Diff.new(string1, string2).to_s(format)
end | ruby | {
"resource": ""
} |
q24305 | DEVp2p.Crypto.encrypt | train | def encrypt(data, raw_pubkey)
raise ArgumentError, "invalid pubkey of length #{raw_pubkey.size}" unless raw_pubkey.size == 64
Crypto::ECIES.encrypt data, raw_pubkey
end | ruby | {
"resource": ""
} |
q24306 | Scrapifier.Support.sf_eval_uri | train | def sf_eval_uri(uri, exts = [])
doc = Nokogiri::HTML(open(uri).read)
doc.encoding, meta = 'utf-8', { uri: uri }
[:title, :description, :keywords, :lang, :encode, :reply_to, :author].each do |k|
node = doc.xpath(sf_xpaths[k])[0]
meta[k] = node.nil? ? '-' : node.text
end
meta[:images] = sf_fix_imgs(doc.xpath(sf_xpaths[:image]), uri, exts)
meta
rescue SocketError
{}
end | ruby | {
"resource": ""
} |
q24307 | Scrapifier.Support.sf_check_img_ext | train | def sf_check_img_ext(images, allowed = [])
allowed ||= []
if images.is_a?(String)
images = images.split
elsif !images.is_a?(Array)
images = []
end
images.select { |i| i =~ sf_regex(:image, allowed) }
end | ruby | {
"resource": ""
} |
q24308 | Scrapifier.Support.sf_regex | train | def sf_regex(type, *args)
type = type.to_sym unless type.is_a? Symbol
type == :image && sf_img_regex(args.flatten) || sf_uri_regex[type]
end | ruby | {
"resource": ""
} |
q24309 | Scrapifier.Support.sf_xpaths | train | def sf_xpaths
{ title: XPath::TITLE,
description: XPath::DESC,
keywords: XPath::KEYWORDS,
lang: XPath::LANG,
encode: XPath::ENCODE,
reply_to: XPath::REPLY_TO,
author: XPath::AUTHOR,
image: XPath::IMG }
end | ruby | {
"resource": ""
} |
q24310 | Scrapifier.Support.sf_fix_imgs | train | def sf_fix_imgs(imgs, uri, exts = [])
sf_check_img_ext(imgs.map do |img|
img = img.to_s
unless img =~ sf_regex(:protocol)
img = sf_fix_protocol(img, sf_domain(uri))
end
img if img =~ sf_regex(:image)
end.compact, exts)
end | ruby | {
"resource": ""
} |
q24311 | Rudy.Huxtable.known_machine_group? | train | def known_machine_group?
raise NoConfig unless @@config
return true if default_machine_group?
raise NoMachinesConfig unless @@config.machines
return false if !@@config && !@@global
zon, env, rol = @@global.zone, @@global.environment, @@global.role
conf = @@config.machines.find_deferred(@@global.region, zon, [env, rol])
conf ||= @@config.machines.find_deferred(zon, [env, rol])
!conf.nil?
end | ruby | {
"resource": ""
} |
q24312 | Rudy.Huxtable.fetch_routine_config | train | def fetch_routine_config(action)
raise "No action specified" unless action
raise NoConfig unless @@config
raise NoRoutinesConfig unless @@config.routines
raise NoGlobal unless @@global
action = action.to_s.tr('-:', '_')
zon, env, rol = @@global.zone, @@global.environment, @@global.role
disk_defs = fetch_machine_param(:disks) || {}
# We want to find only one routines config with the name +action+.
# This is unlike the routines config where it's okay to merge via
# precedence.
routine = @@config.routines.find_deferred(@@global.environment, @@global.role, action)
routine ||= @@config.routines.find_deferred([@@global.environment, @@global.role], action)
routine ||= @@config.routines.find_deferred(@@global.role, action)
return nil unless routine
return routine unless routine.has_key?(:disks)
routine.disks.each_pair do |raction,disks|
unless disks.kind_of?(Hash)
li "#{raction} is not defined. Check your #{action} routines config.".color(:red)
next
end
disks.each_pair do |path, props|
unless disk_defs.has_key?(path)
li "#{path} is not defined. Check your machines config.".color(:red)
routine.disks[raction].delete(path)
next
end
routine.disks[raction][path] = disk_defs[path].merge(props)
end
end
routine
end | ruby | {
"resource": ""
} |
q24313 | Rudy.Huxtable.default_machine_group? | train | def default_machine_group?
default_env = @@config.defaults.environment || Rudy::DEFAULT_ENVIRONMENT
default_rol = @@config.defaults.role || Rudy::DEFAULT_ROLE
@@global.environment == default_env && @@global.role == default_rol
end | ruby | {
"resource": ""
} |
q24314 | Expectacle.ThrowerBase.check_embed_envvar | train | def check_embed_envvar(param)
return unless param =~ /<%=\s*ENV\[[\'\"]?(.+)[\'\"]\]?\s*%>/
envvar_name = Regexp.last_match(1)
if !ENV.key?(envvar_name)
@logger.error "Variable name: #{envvar_name} is not found in ENV"
elsif ENV[envvar_name] =~ /^\s*$/
@logger.warn "Env var: #{envvar_name} exists, but null string"
end
end | ruby | {
"resource": ""
} |
q24315 | Expectacle.ThrowerBase.embed_var | train | def embed_var(param)
check_embed_envvar(param)
erb = ERB.new(param)
erb.result(binding)
end | ruby | {
"resource": ""
} |
q24316 | ActionPool.Pool.fill_pool | train | def fill_pool
threads = []
if(@open)
@lock.synchronize do
required = min - size
if(required > 0)
required.times do
thread = ActionPool::Thread.new(:pool => self, :respond_thread => @respond_to,
:a_timeout => @action_timeout, :t_timeout => @thread_timeout, :logger => @logger,
:autostart => false)
@threads << thread
threads << thread
end
end
end
end
threads.each{|t|t.start}
threads
end | ruby | {
"resource": ""
} |
q24317 | ActionPool.Pool.flush | train | def flush
mon = Splib::Monitor.new
@threads.size.times{ queue{ mon.wait } }
@queue.wait_empty
sleep(0.01)
mon.broadcast
end | ruby | {
"resource": ""
} |
q24318 | ActionPool.Pool.resize | train | def resize
@logger.info("Pool is being resized to stated maximum: #{max}")
until(size <= max) do
t = nil
t = @threads.find{|x|x.waiting?}
t = @threads.shift unless t
t.stop
end
flush
nil
end | ruby | {
"resource": ""
} |
q24319 | Expectacle.ThrowerBase.open_interactive_process | train | def open_interactive_process(spawn_cmd)
@logger.info "Begin spawn: #{spawn_cmd}"
PTY.spawn(spawn_cmd) do |reader, writer, _pid|
@enable_mode = false
@reader = reader
@writer = writer
@writer.sync = true
yield
end
end | ruby | {
"resource": ""
} |
q24320 | Expectacle.ThrowerBase.do_on_interactive_process | train | def do_on_interactive_process
until @reader.closed? || @reader.eof?
@reader.expect(expect_regexp, @timeout) do |match|
yield match
end
end
rescue Errno::EIO => error
# on linux, PTY raises Errno::EIO when spawned process closed.
@logger.debug "PTY raises Errno::EIO, #{error.message}"
end | ruby | {
"resource": ""
} |
q24321 | WebPurify.ImageFilters.imgcheck | train | def imgcheck(imgurl, options={})
params = {
:method => WebPurify::Constants.methods[:imgcheck],
:imgurl => imgurl
}
parsed = WebPurify::Request.query(image_request_base, @query_base, params.merge(options))
return parsed[:imgid]
end | ruby | {
"resource": ""
} |
q24322 | WebPurify.ImageFilters.imgstatus | train | def imgstatus(imgid, options={})
params = {
:method => WebPurify::Constants.methods[:imgstatus],
:imgid => imgid
}
parsed = WebPurify::Request.query(image_request_base, @query_base, params.merge(options))
return parsed[:status]
end | ruby | {
"resource": ""
} |
q24323 | WebPurify.ImageFilters.imgaccount | train | def imgaccount
params = {
:method => WebPurify::Constants.methods[:imgaccount]
}
parsed = WebPurify::Request.query(image_request_base, @query_base, params)
return parsed[:remaining].to_i
end | ruby | {
"resource": ""
} |
q24324 | DEVp2p.Utils.sxor | train | def sxor(s1, s2)
raise ArgumentError, "strings must have equal size" unless s1.size == s2.size
s1.bytes.zip(s2.bytes).map {|a, b| (a ^ b).chr }.join
end | ruby | {
"resource": ""
} |
q24325 | Capistrano.Configuration.colorize | train | def colorize(options)
if options.class == Array
options.each do |opt|
Capistrano::Logger.add_color_matcher( opt )
end
else
Capistrano::Logger.add_color_matcher( options )
end
end | ruby | {
"resource": ""
} |
q24326 | Nutcracker.Wrapper.start | train | def start *args
return self if attached? or running?
@pid = ::Process.spawn Nutcracker.executable, *command
Process.detach(@pid)
sleep 2
raise "Nutcracker failed to start" unless running?
Kernel.at_exit { kill if running? }
self
end | ruby | {
"resource": ""
} |
q24327 | Nutcracker.Wrapper.use | train | def use plugin, *args
Nutcracker.const_get(plugin.to_s.capitalize).start(self,*args)
end | ruby | {
"resource": ""
} |
q24328 | Nutcracker.Wrapper.overview | train | def overview
data = { :clusters => [], :config => config }
stats.each do |cluster_name, cluster_data|
# Setting global server attributes ( like hostname, version etc...)
unless cluster_data.is_a? Hash
data[cluster_name] = cluster_data
next
end
#next unless redis? cluster_name # skip memcached clusters
aliases = node_aliases cluster_name
cluster = { nodes: [], name: cluster_name }
cluster_data.each do |node, node_value|
# Adding node
if node_value.kind_of? Hash
node_data = cluster_data[node]
node = aliases[node] || node
url = ( node =~ /redis\:\/\// ) ? node : "redis://#{node}"
info = redis_info(url, config[cluster_name]["redis_auth"])
cluster[:nodes] << {
server_url: url, info: info, running: info.any?
}.merge(node_data)
else # Cluster attribute
cluster[node] = node_value
end
end
data[:clusters].push cluster
end
data
end | ruby | {
"resource": ""
} |
q24329 | Nutcracker.Wrapper.redis_info | train | def redis_info url, password
begin
r = Redis.new url: url, password: password
info = r.info.merge 'dbsize' => r.dbsize
rescue Exception => e
STDERR.puts "[ERROR][#{__FILE__}:#{__LINE__}] Failed to get data from Redis - " +
"#{url.inspect} (using password #{password.inspect}): #{e.message}\n#{e.backtrace.join("\n")}"
return {}
end
begin
info['maxmemory'] = @options.fetch(:max_memory) { r.config(:get, 'maxmemory')['maxmemory'] }
rescue Exception
info['maxmemory'] = info['used_memory_rss']
end
r.quit
{
'connections' => info['connected_clients'].to_i,
'used_memory' => info['used_memory'].to_f,
'used_memory_rss' => info['used_memory_rss'].to_f,
'fragmentation' => info['mem_fragmentation_ratio'].to_f,
'expired_keys' => info['expired_keys'].to_i,
'evicted_keys' => info['evicted_keys'].to_i,
'hits' => info['keyspace_hits'].to_i,
'misses' => info['keyspace_misses'].to_i,
'keys' => info['dbsize'].to_i,
'max_memory' => info['maxmemory'].to_i,
'hit_ratio' => 0
}.tap {|d| d['hit_ratio'] = d['hits'].to_f / (d['hits']+d['misses']).to_f if d['hits'] > 0 }
end | ruby | {
"resource": ""
} |
q24330 | WebPurify.TextFilters.check | train | def check(text, options={})
params = {
:method => WebPurify::Constants.methods[:check],
:text => text
}
parsed = WebPurify::Request.query(text_request_base, @query_base, params.merge(options))
return parsed[:found]=='1'
end | ruby | {
"resource": ""
} |
q24331 | WebPurify.TextFilters.check_count | train | def check_count(text, options={})
params = {
:method => WebPurify::Constants.methods[:check_count],
:text => text
}
parsed = WebPurify::Request.query(text_request_base, @query_base, params.merge(options))
return parsed[:found].to_i
end | ruby | {
"resource": ""
} |
q24332 | WebPurify.TextFilters.replace | train | def replace(text, symbol, options={})
params = {
:method => WebPurify::Constants.methods[:replace],
:text => text,
:replacesymbol => symbol
}
parsed = WebPurify::Request.query(text_request_base, @query_base, params.merge(options))
return parsed[:text]
end | ruby | {
"resource": ""
} |
q24333 | WebPurify.TextFilters.return | train | def return(text, options={})
params = {
:method => WebPurify::Constants.methods[:return],
:text => text
}
parsed = WebPurify::Request.query(text_request_base, @query_base, params.merge(options))
if parsed[:expletive].is_a?(String)
return [] << parsed[:expletive]
else
return parsed.fetch(:expletive, [])
end
end | ruby | {
"resource": ""
} |
q24334 | Rudy::AWS.EC2::Group.to_s | train | def to_s(with_title=false)
lines = [liner_note]
(self.addresses || {}).each_pair do |address,rules|
lines << "%18s -> %s" % [address.to_s, rules.collect { |p| p.to_s}.join(', ')]
end
lines.join($/)
end | ruby | {
"resource": ""
} |
q24335 | ActionPool.Thread.start_thread | train | def start_thread
begin
@logger.info("New pool thread is starting (#{self})")
until(@kill) do
begin
@action = nil
if(@pool.size > @pool.min && !@thread_timeout.zero?)
Timeout::timeout(@thread_timeout) do
@action = @pool.action
end
else
@action = @pool.action
end
run(@action[0], @action[1]) unless @action.nil?
rescue Timeout::Error
@kill = true
rescue Wakeup
@logger.info("Thread #{::Thread.current} was woken up.")
rescue Retimeout
@logger.warn('Thread was woken up to reset thread timeout')
rescue Exception => boom
@logger.error("Pool thread caught an exception: #{boom}\n#{boom.backtrace.join("\n")}")
@respond_to.raise boom
end
end
rescue Retimeout
@logger.warn('Thread was woken up to reset thread timeout')
retry
rescue Wakeup
@logger.info("Thread #{::Thread.current} was woken up.")
rescue Exception => boom
@logger.error("Pool thread caught an exception: #{boom}\n#{boom.backtrace.join("\n")}")
@respond_to.raise boom
ensure
@logger.info("Pool thread is shutting down (#{self})")
@pool.remove(self)
end
end | ruby | {
"resource": ""
} |
q24336 | Paymill.Subscription.parse_timestamps | train | def parse_timestamps
super
@next_capture_at = Time.at(next_capture_at) if next_capture_at
@canceled_at = Time.at(canceled_at) if canceled_at
@trial_start = Time.at(trial_start) if trial_start
@trial_end = Time.at(trial_end) if trial_end
end | ruby | {
"resource": ""
} |
q24337 | LeSSL.DNS.challenge_record_valid? | train | def challenge_record_valid?(domain, key)
record = challenge_record(domain)
return record && record.data == key
end | ruby | {
"resource": ""
} |
q24338 | YAMG.Splash.splash_composite | train | def splash_composite
max = size.min / 9
assets.each do |over|
other = MiniMagick::Image.open(File.join(src, over))
other.resize(max) if other.dimensions.max >= max
self.img = compose(other, over)
end
end | ruby | {
"resource": ""
} |
q24339 | YAMG.Splash.image | train | def image(out = nil)
splash_start
splash_composite
return img unless out
FileUtils.mkdir_p File.dirname(out)
img.write(out)
rescue Errno::ENOENT
YAMG.puts_and_exit("Path not found '#{out}'")
end | ruby | {
"resource": ""
} |
q24340 | Collins.Client.manage_process | train | def manage_process name = nil
name = @managed_process if name.nil?
if name then
begin
Collins.const_get(name).new(self).run
rescue Exception => e
raise CollinsError.new(e.message)
end
else
raise CollinsError.new("No managed process specified")
end
end | ruby | {
"resource": ""
} |
q24341 | Tabular.Row.[]= | train | def []=(key, value)
if columns.key?(key)
@array[columns.index(key)] = value
else
@array << value
columns << key
end
hash[key] = value
end | ruby | {
"resource": ""
} |
q24342 | Referee.CodeGenerator.create_dictionary_representation | train | def create_dictionary_representation
dict = { storyboards: [],
table_cells: [],
collection_cells: [],
view_controllers: [],
segues: [],
prefix: @config.prefix }
@project.resources.each do |group|
dict[:storyboards] << group.storyboard
dict[:table_cells] += group.table_cells
dict[:collection_cells] += group.collection_cells
dict[:view_controllers] += group.view_controllers
dict[:segues] += group.segues
end
# Build up flags.
dict[:has_table_cells] = (dict[:table_cells].count > 0)
dict[:has_collection_cells] = (dict[:collection_cells].count > 0)
dict[:has_segues] = (dict[:segues].count > 0)
dict
end | ruby | {
"resource": ""
} |
q24343 | YAMG.Screenshot.work | train | def work(path)
out = "#{path}/#{@name}.png"
@fetcher.fetch(output: out, width: @size[0], height: @size[1], dpi: @dpi)
rescue Screencap::Error
puts "Fail to capture screenshot #{@url}"
end | ruby | {
"resource": ""
} |
q24344 | Tabular.Columns.<< | train | def <<(key)
column = Column.new(@table, self, key)
return if is_blank?(column.key) || key?(key)
@column_indexes[column.key] = @columns.size
@column_indexes[@columns.size] = column
@columns_by_key[column.key] = column
@columns << column
end | ruby | {
"resource": ""
} |
q24345 | ElmSprockets.Processor.add_elm_dependencies | train | def add_elm_dependencies(filepath, context)
# Turn e.g. ~/NoRedInk/app/assets/javascripts/Quiz/QuestionStoreAPI.js.elm
# into just ~/NoRedInk/app/assets/javascripts/
dirname = context.pathname.to_s.gsub Regexp.new(context.logical_path + ".+$"), ""
File.read(filepath).each_line do |line|
# e.g. `import Quiz.QuestionStore exposing (..)`
match = line.match(/^import\s+([^\s]+)/)
next unless match
# e.g. Quiz.QuestionStore
module_name = match.captures[0]
# e.g. Quiz/QuestionStore
dependency_logical_name = module_name.tr(".", "/")
# e.g. ~/NoRedInk/app/assets/javascripts/Quiz/QuestionStore.elm
dependency_filepath = dirname + dependency_logical_name + ".elm"
# If we don't find the dependency in our filesystem, assume it's because
# it comes in through a third-party package rather than our sources.
next unless File.file? dependency_filepath
context.depend_on dependency_logical_name
add_elm_dependencies dependency_filepath, context
end
end | ruby | {
"resource": ""
} |
q24346 | Collins.Api.trace | train | def trace(progname = nil, &block)
if logger.respond_to?(:trace) then
logger.trace(progname, &block)
else
logger.debug(progname, &block)
end
end | ruby | {
"resource": ""
} |
q24347 | FreebaseAPI.Session.surl | train | def surl(service)
service_url = @env == :stable ? API_URL : SANDBOX_API_URL
service_url = service_url + "/" + service
service_url.gsub!('www', 'usercontent') if service.to_s == 'image'
service_url
end | ruby | {
"resource": ""
} |
q24348 | FreebaseAPI.Session.get | train | def get(url, params={}, options={})
FreebaseAPI.logger.debug("GET #{url}")
params[:key] = @key if @key
options = { format: options[:format], query: params }
options.merge!(@proxy_options)
response = self.class.get(url, options)
handle_response(response)
end | ruby | {
"resource": ""
} |
q24349 | FreebaseAPI.Session.handle_response | train | def handle_response(response)
case response.code
when 200..299
response
else
if response.request.format == :json
raise FreebaseAPI::ServiceError.new(response['error'])
else
raise FreebaseAPI::NetError.new('code' => response.code, 'message' => response.response.message)
end
end
end | ruby | {
"resource": ""
} |
q24350 | FreebaseAPI.Session.get_proxy_options | train | def get_proxy_options(url=nil)
options = {}
if url = url || ENV['HTTPS_PROXY'] || ENV['https_proxy'] || ENV['HTTP_PROXY'] || ENV['http_proxy']
proxy_uri = URI.parse(url)
options[:http_proxyaddr] = proxy_uri.host
options[:http_proxyport] = proxy_uri.port
options[:http_proxyuser] = proxy_uri.user
options[:http_proxypass] = proxy_uri.password
end
options
end | ruby | {
"resource": ""
} |
q24351 | Tabular.Column.precision | train | def precision
@precision ||= cells.map(&:to_f).map { |n| n.round(3) }.map { |n| n.to_s.split(".").last.gsub(/0+$/, "").length }.max
end | ruby | {
"resource": ""
} |
q24352 | Markdiff.Differ.apply_patch | train | def apply_patch(operations, node)
i = 0
operations.sort_by {|operation| i += 1; [-operation.priority, i] }.each do |operation|
case operation
when ::Markdiff::Operations::AddChildOperation
operation.target_node.add_child(operation.inserted_node)
mark_li_or_tr_as_changed(operation.target_node)
mark_top_level_node_as_changed(operation.target_node)
when ::Markdiff::Operations::AddDataBeforeHrefOperation
operation.target_node["data-before-href"] = operation.target_node["href"]
operation.target_node["href"] = operation.after_href
mark_li_or_tr_as_changed(operation.target_node)
mark_top_level_node_as_changed(operation.target_node)
when ::Markdiff::Operations::AddDataBeforeTagNameOperation
operation.target_node["data-before-tag-name"] = operation.target_node.name
operation.target_node.name = operation.after_tag_name
mark_li_or_tr_as_changed(operation.target_node)
mark_top_level_node_as_changed(operation.target_node)
when ::Markdiff::Operations::AddPreviousSiblingOperation
operation.target_node.add_previous_sibling(operation.inserted_node)
mark_li_or_tr_as_changed(operation.target_node) if operation.target_node.name != "li" && operation.target_node.name != "tr"
mark_top_level_node_as_changed(operation.target_node.parent)
when ::Markdiff::Operations::RemoveOperation
operation.target_node.replace(operation.inserted_node) if operation.target_node != operation.inserted_node
mark_li_or_tr_as_changed(operation.target_node)
mark_top_level_node_as_changed(operation.target_node)
when ::Markdiff::Operations::TextDiffOperation
parent = operation.target_node.parent
operation.target_node.replace(operation.inserted_node)
mark_li_or_tr_as_changed(parent)
mark_top_level_node_as_changed(parent)
end
end
node
end | ruby | {
"resource": ""
} |
q24353 | Markdiff.Differ.create_patch | train | def create_patch(before_node, after_node)
if before_node.to_html == after_node.to_html
[]
else
create_patch_from_children(before_node, after_node)
end
end | ruby | {
"resource": ""
} |
q24354 | Markdiff.Differ.render | train | def render(before_string, after_string)
before_node = ::Nokogiri::HTML.fragment(before_string)
after_node = ::Nokogiri::HTML.fragment(after_string)
patch = create_patch(before_node, after_node)
apply_patch(patch, before_node)
end | ruby | {
"resource": ""
} |
q24355 | Markdiff.Differ.create_patch_from_children | train | def create_patch_from_children(before_node, after_node)
operations = []
identity_map = {}
inverted_identity_map = {}
::Diff::LCS.sdiff(before_node.children.map(&:to_s), after_node.children.map(&:to_s)).each do |element|
type, before, after = *element
if type == "="
before_child = before_node.children[before[0]]
after_child = after_node.children[after[0]]
identity_map[before_child] = after_child
inverted_identity_map[after_child] = before_child
end
end
# Partial matching
before_node.children.each do |before_child|
if identity_map[before_child]
next
end
after_node.children.each do |after_child|
case
when identity_map[before_child]
break
when inverted_identity_map[after_child]
when before_child.text?
if after_child.text?
identity_map[before_child] = after_child
inverted_identity_map[after_child] = before_child
operations << ::Markdiff::Operations::TextDiffOperation.new(target_node: before_child, after_node: after_child)
end
when before_child.name == after_child.name
if before_child.attributes == after_child.attributes
identity_map[before_child] = after_child
inverted_identity_map[after_child] = before_child
operations += create_patch(before_child, after_child)
elsif detect_href_difference(before_child, after_child)
operations << ::Markdiff::Operations::AddDataBeforeHrefOperation.new(after_href: after_child["href"], target_node: before_child)
identity_map[before_child] = after_child
inverted_identity_map[after_child] = before_child
operations += create_patch(before_child, after_child)
end
when detect_heading_level_difference(before_child, after_child)
operations << ::Markdiff::Operations::AddDataBeforeTagNameOperation.new(after_tag_name: after_child.name, target_node: before_child)
identity_map[before_child] = after_child
inverted_identity_map[after_child] = before_child
end
end
end
before_node.children.each do |before_child|
unless identity_map[before_child]
operations << ::Markdiff::Operations::RemoveOperation.new(target_node: before_child)
end
end
after_node.children.each do |after_child|
unless inverted_identity_map[after_child]
right_node = after_child.next_sibling
loop do
case
when inverted_identity_map[right_node]
operations << ::Markdiff::Operations::AddPreviousSiblingOperation.new(inserted_node: after_child, target_node: inverted_identity_map[right_node])
break
when right_node.nil?
operations << ::Markdiff::Operations::AddChildOperation.new(inserted_node: after_child, target_node: before_node)
break
else
right_node = right_node.next_sibling
end
end
end
end
operations
end | ruby | {
"resource": ""
} |
q24356 | Tabular.Table.delete_blank_columns! | train | def delete_blank_columns!(*options)
exceptions = extract_exceptions(options)
(columns.map(&:key) - exceptions).each do |key|
if rows.all? { |row| is_blank?(row[key]) || is_zero?(row[key]) } # rubocop:disable Style/IfUnlessModifier
delete_column key
end
end
end | ruby | {
"resource": ""
} |
q24357 | Tabular.Table.delete_homogenous_columns! | train | def delete_homogenous_columns!(*options)
return if rows.size < 2
exceptions = extract_exceptions(options)
(columns.map(&:key) - exceptions).each do |key|
value = rows.first[key]
delete_column key if rows.all? { |row| row[key] == value }
end
end | ruby | {
"resource": ""
} |
q24358 | Tabular.Table.strip! | train | def strip!
rows.each do |row|
columns.each do |column|
value = row[column.key]
if value.respond_to?(:strip)
row[column.key] = value.strip
elsif value.is_a?(Float)
row[column.key] = strip_decimal(value)
end
end
end
end | ruby | {
"resource": ""
} |
q24359 | Collins.Asset.gateway_address | train | def gateway_address pool = "default"
address = addresses.select{|a| a.pool == pool}.map{|a| a.gateway}.first
return address if address
if addresses.length > 0 then
addresses.first.gateway
else
nil
end
end | ruby | {
"resource": ""
} |
q24360 | Collins.Util.deep_copy_hash | train | def deep_copy_hash hash
require_that(hash.is_a?(Hash), "deep_copy_hash requires a hash be specified, got #{hash.class}")
Marshal.load Marshal.dump(hash)
end | ruby | {
"resource": ""
} |
q24361 | Collins.Util.require_non_empty | train | def require_non_empty value, message, return_value = false
guard_value = if return_value == true then
value
elsif return_value != false then
return_value
else
false
end
if value.is_a?(String) then
require_that(!value.strip.empty?, message, guard_value)
elsif value.respond_to?(:empty?) then
require_that(!value.empty?, message, guard_value)
else
require_that(!value.nil?, message, guard_value)
end
end | ruby | {
"resource": ""
} |
q24362 | Collins.Util.require_that | train | def require_that guard, message, return_guard = false
if not guard then
raise ExpectationFailedError.new(message)
end
if return_guard == true then
guard
elsif return_guard != false then
return_guard
end
end | ruby | {
"resource": ""
} |
q24363 | Collins.Util.get_asset_or_tag | train | def get_asset_or_tag asset_or_tag
asset =
case asset_or_tag
when Collins::Asset then asset_or_tag
when String then Collins::Asset.new(asset_or_tag)
when Symbol then Collins::Asset.new(asset_or_tag.to_s)
else
error_message = "Expected Collins::Asset, String or Symbol. Got #{asset_or_tag.class}"
raise ExpectationFailedError.new(error_message)
end
if asset.nil? || asset.tag.nil? then
raise ExpectationFailedError.new("Empty asset tag, but a tag is required")
end
asset
end | ruby | {
"resource": ""
} |
q24364 | Collins.Util.symbolize_hash | train | def symbolize_hash hash, options = {}
return {} if (hash.nil? or hash.empty?)
(raise ExpectationFailedError.new("symbolize_hash called without a hash")) unless hash.is_a?(Hash)
hash.inject({}) do |result, (k,v)|
key = options[:downcase] ? k.to_s.downcase.to_sym : k.to_s.to_sym
if v.is_a?(Hash) then
result[key] = symbolize_hash(v)
elsif v.is_a?(Regexp) && options[:rewrite_regex] then
result[key] = v.inspect[1..-2]
else
result[key] = v
end
result
end
end | ruby | {
"resource": ""
} |
q24365 | Collins.Util.stringify_hash | train | def stringify_hash hash, options = {}
(raise ExpectationFailedError.new("stringify_hash called without a hash")) unless hash.is_a?(Hash)
hash.inject({}) do |result, (k,v)|
key = options[:downcase] ? k.to_s.downcase : k.to_s
if v.is_a?(Hash) then
result[key] = stringify_hash(v)
elsif v.is_a?(Regexp) && options[:rewrite_regex] then
result[key] = v.inspect[1..-2]
else
result[key] = v
end
result
end
end | ruby | {
"resource": ""
} |
q24366 | Okura.Tagger.parse | train | def parse str
chars=str.split(//)
nodes=Nodes.new(chars.length+2,@mat)
nodes.add(0,Node.mk_bos_eos)
nodes.add(chars.length+1,Node.mk_bos_eos)
str.length.times{|i|
@dic.possible_words(str,i).each{|w|
nodes.add(i+1,Node.new(w))
}
}
nodes
end | ruby | {
"resource": ""
} |
q24367 | Okura.UnkDic.define | train | def define type_name,left,right,cost
type=@char_types.named type_name
(@templates[type_name]||=[]).push Word.new '',left,right,cost
end | ruby | {
"resource": ""
} |
q24368 | Tabular.Keys.key_to_sym | train | def key_to_sym(key)
case key
when Column
key.key
when String
key.to_sym
else
key
end
end | ruby | {
"resource": ""
} |
q24369 | LeSSL.Manager.authorize_for_domain | train | def authorize_for_domain(domain, options={})
authorization = client.authorize(domain: domain)
# Default challenge is via HTTP
# but the developer can also use
# a DNS TXT record to authorize.
if options[:challenge] == :dns
challenge = authorization.dns01
unless options[:skip_puts]
puts "===================================================================="
puts "Record:"
puts
puts " - Name: #{challenge.record_name}.#{domain}"
puts " - Type: #{challenge.record_type}"
puts " - Value: #{challenge.record_content}"
puts
puts "Create the record; Wait a minute (or two); Request for verification!"
puts "===================================================================="
end
# With this option the dns verification is
# done automatically. LeSSL waits until a
# valid record on your DNS servers was found
# and requests a verification.
#
# CAUTION! This is a blocking the thread!
if options[:automatic_verification]
dns = begin
if ns = options[:custom_nameservers]
LeSSL::DNS.new(ns)
else
LeSSL::DNS.new
end
end
puts
puts 'Wait until the TXT record was set...'
# Wait with verification until the
# challenge record is valid.
while dns.challenge_record_invalid?(domain, challenge.record_content)
puts 'DNS record not valid' if options[:verbose]
sleep(2) # Wait 2 seconds
end
puts 'Valid TXT record found. Continue with verification...'
return request_verification(challenge)
else
return challenge
end
else
challenge = authorization.http01
file_name = Rails.root.join('public', challenge.filename)
dir = File.dirname(Rails.root.join('public', challenge.filename))
FileUtils.mkdir_p(dir)
File.write(file_name, challenge.file_content)
return challenge.verify_status
end
end | ruby | {
"resource": ""
} |
q24370 | Collins.AssetClient.method_missing | train | def method_missing meth, *args, &block
if @client.respond_to?(meth) then
method_parameters = @client.class.instance_method(meth).parameters
asset_idx = method_parameters.find_index do |item|
item[1] == :asset_or_tag
end
if asset_idx.nil? then
@client.send(meth, *args, &block)
else
args_with_asset = args.insert(asset_idx, @tag)
logger.debug("Doing #{meth}(#{args_with_asset.join(',')}) for #{@tag}")
@client.send(meth, *args_with_asset, &block)
end
else
super
end
end | ruby | {
"resource": ""
} |
q24371 | FileboundClient.Client.get | train | def get(url, query_params = nil)
JSON.parse(perform('get', url, query: query_params), symbolize_names: true, quirks_mode: true)
end | ruby | {
"resource": ""
} |
q24372 | Collins.Option.or_else | train | def or_else *default
if empty? then
res = if block_given? then
yield
else
default.first
end
if res.is_a?(Option) then
res
else
::Collins::Option(res)
end
else
self
end
end | ruby | {
"resource": ""
} |
q24373 | Collins.Option.map | train | def map &block
if empty? then
None.new
else
Some.new(block.call(get))
end
end | ruby | {
"resource": ""
} |
q24374 | Collins.Option.flat_map | train | def flat_map &block
if empty? then
None.new
else
res = block.call(get)
if res.is_a?(Some) then
res
else
Some.new(res)
end
end
end | ruby | {
"resource": ""
} |
q24375 | Restify.Resource.relation | train | def relation(name)
if @relations.key? name
Relation.new @context, @relations.fetch(name)
else
Relation.new @context, @relations.fetch(name.to_s)
end
end | ruby | {
"resource": ""
} |
q24376 | AnnotateModels.ModelAnnotationGenerator.apply_annotation | train | def apply_annotation(path, suffix=nil, extension="rb", plural=false)
pn_models = Pathname.new(path)
return unless pn_models.exist?
suffix = "_#{suffix}" unless suffix == nil
extension = (extension == nil) ? "" : ".#{extension}"
@annotations.each do |model, annotation|
prefix = (plural) ? model.name.pluralize : model.name
pn = pn_models + "#{ActiveSupport::Inflector.underscore(prefix)}#{suffix}#{extension}"
text = File.open(pn.to_path) { |fp| fp.read }
re = Regexp.new("^#-(?:--)+-\n# #{model.name}.*\n(?:#.+\n)+#-(?:--)+-\n", Regexp::MULTILINE)
if re =~ text
text = text.sub(re, annotation)
else
text = "#{text}\n#{annotation}"
end
File.open(pn.to_path, "w") { |fp| fp.write(text) }
puts " Annotated #{pn.to_path}."
end
end | ruby | {
"resource": ""
} |
q24377 | AnnotateModels.ModelAnnotationGenerator.generate | train | def generate
Dir["app/models/*.rb"].each do |path|
result = File.basename(path).scan(/^(.+)\.rb/)[0][0]
model = eval(ActiveSupport::Inflector.camelize(result))
next if model.respond_to?(:abstract_class) && model.abstract_class
next unless model < ActiveRecord::Base
@annotations[model] = generate_annotation(model) unless @annotations.keys.include?(model)
end
end | ruby | {
"resource": ""
} |
q24378 | AnnotateModels.ModelAnnotationGenerator.generate_annotation | train | def generate_annotation(model)
max_column_length = model.columns.collect { |c| c.name.length }.max
annotation = []
annotation << "#-#{'--' * 38}-"
annotation << "# #{model.name}"
annotation << "#"
annotation << sprintf("# %-#{max_column_length}s SQL Type Null Primary Default", "Name")
annotation << sprintf("# %s -------------------- ------- ------- ----------", "-" * max_column_length)
format = "# %-#{max_column_length}s %-20s %-7s %-7s %-10s"
model.columns.each do |column|
annotation << sprintf(
format,
column.name,
column.sql_type,
column.null,
column.name == model.primary_key,
(column.default || "")
)
end
annotation << "#"
annotation << "#-#{'--' * 38}-"
annotation.join("\n") + "\n"
end | ruby | {
"resource": ""
} |
q24379 | FileboundClient.Connection.get | train | def get(url, params)
request = HTTPI::Request.new(resource_url(url, query_params(params[:query])))
execute_request(:get, request, params)
end | ruby | {
"resource": ""
} |
q24380 | FileboundClient.Connection.put | train | def put(url, params)
request = HTTPI::Request.new(resource_url(url, query_params(params[:query])))
request.body = params[:body].to_json
execute_request(:put, request, params)
end | ruby | {
"resource": ""
} |
q24381 | FileboundClient.Connection.login | train | def login
response = post('/login', body: { username: configuration.username, password: configuration.password },
headers: { 'Content-Type' => 'application/json' })
if response.code == 200
@token = JSON.parse(response.body, symbolize_names: true, quirks_mode: true)
true
else
false
end
end | ruby | {
"resource": ""
} |
q24382 | YAMG.Icon.write_out | train | def write_out(path = nil)
return img unless path
FileUtils.mkdir_p File.dirname(path)
img.write(path)
path
rescue Errno::ENOENT
puts_and_exit("Path not found '#{path}'")
end | ruby | {
"resource": ""
} |
q24383 | Restify.ResponseError.errors | train | def errors
if response.decoded_body
response.decoded_body['errors'] ||
response.decoded_body[:errors] ||
response.decoded_body
else
response.body
end
end | ruby | {
"resource": ""
} |
q24384 | CsvMapper.RowMap.read_attributes_from_file | train | def read_attributes_from_file aliases = {}
attributes = FasterCSV.new(@csv_data, @parser_options).readline
@start_at_row = [ @start_at_row, 1 ].max
@csv_data.rewind
attributes.each_with_index do |name, index|
name.strip!
use_name = aliases[name] || name.gsub(/\s+/, '_').gsub(/[\W]+/, '').downcase
add_attribute use_name, index
end
end | ruby | {
"resource": ""
} |
q24385 | CsvMapper.RowMap.parse | train | def parse(csv_row)
target = self.map_to_class.new
@before_filters.each {|filter| filter.call(csv_row, target) }
self.mapped_attributes.each do |attr_map|
target.send("#{attr_map.name}=", attr_map.parse(csv_row))
end
@after_filters.each {|filter| filter.call(csv_row, target) }
return target
end | ruby | {
"resource": ""
} |
q24386 | Economic.CurrentInvoiceProxy.initialize_properties_with_values_from_owner | train | def initialize_properties_with_values_from_owner(invoice)
if owner.is_a?(Debtor)
invoice.debtor = owner
invoice.debtor_name ||= owner.name
invoice.debtor_address ||= owner.address
invoice.debtor_postal_code ||= owner.postal_code
invoice.debtor_city ||= owner.city
invoice.term_of_payment_handle ||= owner.term_of_payment_handle
invoice.layout_handle ||= owner.layout_handle
invoice.currency_handle ||= owner.currency_handle
end
end | ruby | {
"resource": ""
} |
q24387 | Economic.CurrentInvoiceLineProxy.find | train | def find(handle)
handle = Entity::Handle.build(:number => handle) unless handle.is_a?(Entity::Handle)
super(handle)
end | ruby | {
"resource": ""
} |
q24388 | Imprint.LogHelpers.log_entrypoint | train | def log_entrypoint
raise "you must call Imprint.configuration and configure the gem before using LogHelpers" if Imprint.configuration.nil?
log_filter = ActionDispatch::Http::ParameterFilter.new(Imprint.configuration[:log_filters] || Rails.application.config.filter_parameters)
# I should probably switch this to be a whitelist as well, or support both white and black lists for both cookies and headers
header_blacklist = Imprint.configuration[:header_blacklist] || []
cookies_whitelist = Imprint.configuration[:cookies_whitelist] || []
param_level = Imprint.configuration[:param_level] || Imprint::QUERY_PARAMS
http_request_headers = request.headers.select{|header_name, header_value| header_name.match("^HTTP.*") && !header_blacklist.include?(header_name) }
data_append = "headers: "
if http_request_headers.respond_to?(:each_pair)
http_request_headers.each_pair{|k,v| data_append << " #{k}=\"#{v}\"" }
else
http_request_headers.each{|el| data_append << " #{el.first}=\"#{el.last}\"" }
end
data_append << " params: "
if param_level==Imprint::FULL_PARAMS
set_full_params(log_filter, data_append)
elsif param_level==Imprint::FULL_GET_PARAMS
if request.get?
set_full_params(log_filter, data_append)
else
set_query_params(log_filter, data_append)
end
else
set_query_params(log_filter, data_append)
end
if defined? cookies
cookies_whitelist.each do |cookie_key|
cookie_val = cookies[cookie_key] ? cookies[cookie_key] : 'nil'
data_append << " #{cookie_key}=\"#{cookie_val}\""
end
end
logger.info "Started request_method=#{request.method.inspect} request_url=\"#{request.path}\" request_time=\"#{Time.now.to_default_s}\" request_ip=#{request.remote_ip.inspect} #{data_append}"
rescue => e
logger.error "error logging log_entrypoint for request: #{e.inspect}"
logger.error e.backtrace.take(10).join("\n")
end | ruby | {
"resource": ""
} |
q24389 | Economic.FindByDateInterval.find_by_date_interval | train | def find_by_date_interval(from, unto)
response = request(:find_by_date_interval, "first" => from.iso8601,
"last" => unto.iso8601)
handle_key = "#{Support::String.underscore(entity_class_name)}_handle".intern
handles = [response[handle_key]].flatten.reject(&:blank?).collect do |handle|
Entity::Handle.build(handle)
end
get_data_array(handles).collect do |entity_hash|
entity = build(entity_hash)
entity.persisted = true
entity
end
end | ruby | {
"resource": ""
} |
q24390 | Economic.Entity.get_data | train | def get_data
response = proxy.get_data(handle)
update_properties(response)
self.partial = false
self.persisted = true
end | ruby | {
"resource": ""
} |
q24391 | Economic.Entity.destroy | train | def destroy
handleKey = "#{Support::String.camel_back(class_name)}Handle"
response = request(:delete, handleKey => handle.to_hash)
@persisted = false
@partial = true
response
end | ruby | {
"resource": ""
} |
q24392 | Economic.Entity.update_properties | train | def update_properties(hash)
hash.each do |key, value|
setter_method = "#{key}="
if respond_to?(setter_method)
send(setter_method, value)
end
end
end | ruby | {
"resource": ""
} |
q24393 | VoipfoneClient.SMS.send | train | def send
if @to.nil? || @from.nil? || @message.nil?
raise ArgumentError, "You need to include 'to' and 'from' numbers and a message to send an SMS"
end
to = @to.gsub(" ","")
from = @from.gsub(" ","")
parameters = {
"sms-send-to" => to,
"sms-send-from" => from,
"sms-message" => @message[0..159]
}
request = @browser.post("#{VoipfoneClient::API_POST_URL}?smsSend", parameters)
response = parse_response(request)
if response == "ok"
return true
else
raise VoipfoneAPIError, response
end
end | ruby | {
"resource": ""
} |
q24394 | Economic.EntityProxy.all | train | def all
response = request(:get_all)
handles = response.values.flatten.collect { |handle| Entity::Handle.build(handle) }
get_data_for_handles(handles)
self
end | ruby | {
"resource": ""
} |
q24395 | Economic.EntityProxy.find | train | def find(handle)
handle = build_handle(handle)
entity_hash = get_data(handle)
entity = build(entity_hash)
entity.persisted = true
entity
end | ruby | {
"resource": ""
} |
q24396 | Economic.EntityProxy.get_data | train | def get_data(handle)
handle = Entity::Handle.new(handle)
entity_hash = request(:get_data, "entityHandle" => handle.to_hash)
entity_hash
end | ruby | {
"resource": ""
} |
q24397 | Economic.EntityProxy.get_data_array | train | def get_data_array(handles)
return [] unless handles && handles.any?
entity_class_name_for_soap_request = entity_class.name.split("::").last
response = request(:get_data_array, "entityHandles" => {"#{entity_class_name_for_soap_request}Handle" => handles.collect(&:to_hash)})
[response["#{entity_class.key}_data".intern]].flatten
end | ruby | {
"resource": ""
} |
q24398 | Economic.EntityProxy.request | train | def request(action, data = nil)
session.request(
Endpoint.new.soap_action_name(entity_class, action),
data
)
end | ruby | {
"resource": ""
} |
q24399 | Economic.OrderProxy.current | train | def current
response = request(:get_all_current)
handles = response.values.flatten.collect { |handle| Entity::Handle.build(handle) }
initialize_items
get_data_for_handles(handles)
self
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.