_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q24400 | Rammer.ModuleGenerator.require_module_to_base | train | def require_module_to_base
file = File.open("#{@target_dir}/app/apis/#{@project_name}/base.rb", "r+")
file.each do |line|
while line == "require_relative './modules/#{@module_name}_apis'\n" do
$stdout.puts "\e[33mModule already mounted.\e[0m"
return true
end
end
File.open("#{@target_dir}/app/apis/#{@project_name}/base.rb", "r+") do |f|
pos = f.pos
rest = f.read
f.seek pos
f.write("require_relative './modules/")
f.write(@module_name)
f.write("_apis'\n")
f.write(rest)
end
return false
end | ruby | {
"resource": ""
} |
q24401 | Rammer.ModuleGenerator.copy_module | train | def copy_module
src = "#{@gem_path}/lib/modules/#{@module_name}/#{@module_name}_apis.rb"
dest = "#{@target_dir}/app/apis/#{@project_name}/modules"
presence = File.exists?("#{dest}/#{@module_name}_apis.rb")? true : false
FileUtils.mkdir dest unless File.exists?(dest)
FileUtils.cp(src,dest) unless presence
configure_module_files
$stdout.puts "\e[1;32m \tcreate\e[0m\tapp/apis/#{@project_name}/modules/#{@module_name}_apis.rb" unless presence
end | ruby | {
"resource": ""
} |
q24402 | Rammer.ModuleGenerator.create_migrations_and_models | train | def create_migrations_and_models
src = "#{@gem_path}/lib/modules/migrations"
dest = "#{@target_dir}/db/migrate"
copy_files(src,dest,AUTH_MIGRATE)
if @module_name == "oauth"
copy_files(src,dest,OAUTH_MIGRATE)
end
src_path = "#{@gem_path}/lib/modules/models"
dest_path = "#{@target_dir}/app/models"
copy_files(src_path,dest_path,AUTH_MODELS)
if @module_name == "oauth"
copy_files(src_path,dest_path,OAUTH_MODELS)
end
end | ruby | {
"resource": ""
} |
q24403 | Rammer.ModuleGenerator.copy_files | train | def copy_files(src,dest,module_model)
module_model.each do |file|
presence = File.exists?("#{dest}/#{file}")? true : false
unless presence
FileUtils.cp("#{src}/#{file}",dest)
path = if dest.include? "app" then "app/models" else "db/migrate" end
$stdout.puts "\e[1;32m \tcreate\e[0m\t#{path}/#{file}"
end
end
end | ruby | {
"resource": ""
} |
q24404 | Rammer.ModuleGenerator.configure_module_files | train | def configure_module_files
source = "#{@target_dir}/app/apis/#{@project_name}/modules/#{@module_name}_apis.rb"
application_module = @project_name.split('_').map(&:capitalize)*''
file = File.read(source)
replace = file.gsub(/module Rammer/, "module #{application_module}")
File.open(source, "w"){|f|
f.puts replace
}
end | ruby | {
"resource": ""
} |
q24405 | Rammer.ModuleGenerator.add_gems | train | def add_gems
file = File.open("#{@target_dir}/Gemfile", "r+")
file.each do |line|
while line == "gem 'oauth2'\n" do
return
end
end
File.open("#{@target_dir}/Gemfile", "a+") do |f|
f.write("gem 'multi_json'\ngem 'oauth2'\ngem 'songkick-oauth2-provider'\ngem 'ruby_regex'\ngem 'oauth'\n")
end
$stdout.puts "\e[1;35m \tGemfile\e[0m\tgem 'multi_json'\n\t\tgem 'oauth2'
\t\tgem 'songkick-oauth2-provider'\n\t\tgem 'ruby_regex'\n\t\tgem 'oauth'\n"
$stdout.puts "\e[1;32m \trun\e[0m\tbundle install"
system("bundle install")
end | ruby | {
"resource": ""
} |
q24406 | Rammer.ModuleGenerator.unmount_module | train | def unmount_module
path = "#{@target_dir}/app/apis/#{@project_name}"
temp_file = "#{path}/tmp.rb"
source = "#{path}/base.rb"
delete_file = "#{path}/modules/#{@module_name}_apis.rb"
File.open(temp_file, "w") do |out_file|
File.foreach(source) do |line|
unless line == "require_relative './modules/#{@module_name}_apis'\n"
out_file.puts line unless line == "\t\tmount #{@module_class}\n"
end
end
FileUtils.mv(temp_file, source)
end
if File.exists?(delete_file)
FileUtils.rm(delete_file)
$stdout.puts "\e[1;35m\tunmounted\e[0m\t#{@module_class}"
$stdout.puts "\e[1;31m\tdelete\e[0m\t\tapp/apis/#{@project_name}/modules/#{@module_name}_apis.rb"
else
$stdout.puts "\e[33mModule already unmounted.\e[0m"
end
end | ruby | {
"resource": ""
} |
q24407 | Validation.Rules.rule | train | def rule(field, definition)
field = field.to_sym
rules[field] = [] if rules[field].nil?
begin
if definition.respond_to?(:each_pair)
add_parameterized_rules(field, definition)
elsif definition.respond_to?(:each)
definition.each do |item|
if item.respond_to?(:each_pair)
add_parameterized_rules(field, item)
else
add_single_rule(field, item)
end
end
else
add_single_rule(field, definition)
end
rescue NameError => e
raise InvalidRule.new(e)
end
self
end | ruby | {
"resource": ""
} |
q24408 | Validation.Rules.valid? | train | def valid?
valid = true
rules.each_pair do |field, rules|
if ! @obj.respond_to?(field)
raise InvalidKey, "cannot validate non-existent field '#{field}'"
end
rules.each do |r|
if ! r.valid_value?(@obj.send(field))
valid = false
errors[field] = {:rule => r.error_key, :params => r.params}
break
end
end
end
@valid = valid
end | ruby | {
"resource": ""
} |
q24409 | Validation.Rules.add_single_rule | train | def add_single_rule(field, key_or_klass, params = nil)
klass = if key_or_klass.respond_to?(:new)
key_or_klass
else
get_rule_class_by_name(key_or_klass)
end
args = [params].compact
rule = klass.new(*args)
rule.obj = @obj if rule.respond_to?(:obj=)
rules[field] << rule
end | ruby | {
"resource": ""
} |
q24410 | Validation.Rules.add_parameterized_rules | train | def add_parameterized_rules(field, rules)
rules.each_pair do |key, params|
add_single_rule(field, key, params)
end
end | ruby | {
"resource": ""
} |
q24411 | Validation.Rules.get_rule_class_by_name | train | def get_rule_class_by_name(klass)
klass = camelize(klass)
Validation::Rule.const_get(klass)
rescue NameError => e
raise InvalidRule.new(e)
end | ruby | {
"resource": ""
} |
q24412 | Validation.Rules.camelize | train | def camelize(term)
string = term.to_s
string = string.sub(/^[a-z\d]*/) { $&.capitalize }
string.gsub(/(?:_|(\/))([a-z\d]*)/i) { $2.capitalize }.gsub('/', '::')
end | ruby | {
"resource": ""
} |
q24413 | ValidationReflection.ClassMethods.remember_validation_metadata | train | def remember_validation_metadata(validation_type, *attr_names)
configuration = attr_names.last.is_a?(::Hash) ? attr_names.pop : {}
self.validations ||= []
attr_names.flatten.each do |attr_name|
self.validations << ::ActiveRecord::Reflection::MacroReflection.new(validation_type, attr_name.to_sym, configuration, self)
end
end | ruby | {
"resource": ""
} |
q24414 | Capypage.Element.element | train | def element(name, selector, options = {})
define_singleton_method(name) { Element.new(selector, options.merge(:base_element => self)) }
end | ruby | {
"resource": ""
} |
q24415 | Rammer.RammerGenerator.create_base_dirs | train | def create_base_dirs
BASE_DIR.each do |dir|
FileUtils.mkdir "#{@project_name}/#{dir}"
$stdout.puts "\e[1;32m \tcreate\e[0m\t#{dir}"
end
FileUtils.mkdir "#{@project_name}/app/apis/#{@project_name}"
$stdout.puts "\e[1;32m \tcreate\e[0m\tapp/apis/#{@project_name}"
end | ruby | {
"resource": ""
} |
q24416 | Rammer.RammerGenerator.create_api_module | train | def create_api_module
File.open("#{@project_name}/app/apis/#{@project_name}/base.rb", "w") do |f|
f.write('module ')
f.puts(@module_name)
f.write("\tclass Base < Grape::API\n\tend\nend")
end
$stdout.puts "\e[1;32m \tcreate\e[0m\tapp/apis/#{@project_name}/base.rb"
end | ruby | {
"resource": ""
} |
q24417 | Rammer.RammerGenerator.config_server | train | def config_server
file = File.open("#{@project_name}/server.rb", "r+")
file.each do |line|
while line == " def response(env)\n" do
pos = file.pos
rest = file.read
file.seek pos
file.write("\t::")
file.write(@module_name)
file.write("::Base.call(env)\n")
file.write(rest)
$stdout.puts "\e[1;35m \tconfig\e[0m\tserver.rb"
return
end
end
end | ruby | {
"resource": ""
} |
q24418 | Rammer.RammerGenerator.copy_files_to_target | train | def copy_files_to_target
COMMON_RAMMER_FILES.each do |file|
source = File.join("#{@gem_path}/lib/modules/common/",file)
FileUtils.cp(source,"#{@project_name}")
$stdout.puts "\e[1;32m \tcreate\e[0m\t#{file}"
end
end | ruby | {
"resource": ""
} |
q24419 | Rammer.ScaffoldGenerator.create_model_file | train | def create_model_file
dir = "/app/models/#{@scaffold_name}.rb"
unless File.exists?(File.join(Dir.pwd,dir))
File.join(Dir.pwd,dir)
source = "#{@gem_path}/lib/modules/scaffold/model.rb"
FileUtils.cp(source,File.join(Dir.pwd,dir))
config_model
@valid = true
$stdout.puts "\e[1;32m \tcreate\e[0m\t#{dir}"
else
$stdout.puts "\e[1;31mError:\e[0m Model named #{@scaffold_name} already exists, aborting."
end
end | ruby | {
"resource": ""
} |
q24420 | Rammer.ScaffoldGenerator.create_migration | train | def create_migration
migration_version = Time.now.to_i
dir = "/db/migrate/#{migration_version}_create_#{@scaffold_name}s.rb"
unless File.exists?(File.join(Dir.pwd,dir))
source = "#{@gem_path}/lib/modules/scaffold/migration.rb"
FileUtils.cp(source,File.join(Dir.pwd,dir))
config_migration(migration_version)
$stdout.puts "\e[1;32m \tcreate\e[0m\t#{dir}"
end
end | ruby | {
"resource": ""
} |
q24421 | Rammer.ScaffoldGenerator.config_migration | train | def config_migration(migration_version)
source = "#{Dir.pwd}/db/migrate/#{migration_version}_create_#{@scaffold_name}s.rb"
modify_content(source, 'CreateMigration', "Create#{@model_class}s")
modify_content(source, 'migration', "#{@scaffold_name}s")
@arguments.each do |value|
@attributes << value.split(':').first
@data_types << value.split(':').last
end
attribute_data_types = @data_types.reverse
@attributes.reverse.each_with_index do |value,index|
add_attributes(source, value, attribute_data_types[index])
end
end | ruby | {
"resource": ""
} |
q24422 | Rammer.ScaffoldGenerator.add_attributes | train | def add_attributes(source,attribute,data_type)
file = File.open(source, "r+")
file.each do |line|
while line == " create_table :#{@scaffold_name}s do |t|\n" do
pos = file.pos
rest = file.read
file.seek pos
file.write(" t.#{data_type} :#{attribute}\n")
file.write(rest)
break
end
end
end | ruby | {
"resource": ""
} |
q24423 | Rammer.ScaffoldGenerator.enable_apis | train | def enable_apis
dir = "/app/apis/#{@project_name}/#{@scaffold_name}s/base_apis.rb"
base_dir = "#{Dir.pwd}/app/apis/#{@project_name}/#{@scaffold_name}s"
unless File.exists?(File.join(Dir.pwd,dir))
FileUtils.mkdir base_dir unless File.exists?(base_dir)
source = "#{@gem_path}/lib/modules/scaffold/base_apis.rb"
FileUtils.cp(source,File.join(Dir.pwd,dir))
config_apis
$stdout.puts "\e[1;32m \tcreate\e[0m\t#{dir}"
mount_apis
end
end | ruby | {
"resource": ""
} |
q24424 | Rammer.ScaffoldGenerator.config_apis | train | def config_apis
source = "#{Dir.pwd}/app/apis/#{@project_name}/#{@scaffold_name}s/base_apis.rb"
content = ['AppName','ScaffoldName', 'Model', 'model']
replacement = ["#{@project_class}", "#{model_class}s", "#{model_class}", "#{@scaffold_name}"]
for i in 0..3 do
modify_content(source, content[i], replacement[i])
end
end | ruby | {
"resource": ""
} |
q24425 | Rammer.ScaffoldGenerator.mount_apis | train | def mount_apis
require_apis_to_base
mount_class = "::#{@project_class}::#{@model_class}s::BaseApis"
file = File.open("#{Dir.pwd}/app/apis/#{@project_name}/base.rb", "r+")
file.each do |line|
while line == "\tclass Base < Grape::API\n" do
pos = file.pos
rest = file.read
file.seek pos
file.write("\t\tmount ")
file.puts(mount_class)
file.write(rest)
break
end
end
$stdout.puts "\e[1;35m\tmounted\e[0m\t#{mount_class}"
end | ruby | {
"resource": ""
} |
q24426 | Rammer.ScaffoldGenerator.require_apis_to_base | train | def require_apis_to_base
File.open("#{Dir.pwd}/app/apis/#{@project_name}/base.rb", "r+") do |f|
pos = f.pos
rest = f.read
f.seek pos
f.write("require_relative '#{@scaffold_name}s/base_apis'\n")
f.write(rest)
end
end | ruby | {
"resource": ""
} |
q24427 | Rammer.ScaffoldGenerator.to_underscore | train | def to_underscore(value)
underscore_value = value.gsub(/::/, '/').gsub(/([A-Z]+)([A-Z][a-z])/,'\1_\2').
gsub(/([a-z\d])([A-Z])/,'\1_\2').tr("-", "_").downcase
return underscore_value
end | ruby | {
"resource": ""
} |
q24428 | Reth.ChainService.knows_block | train | def knows_block(blockhash)
return true if @chain.include?(blockhash)
@block_queue.queue.any? {|(block, proto)| block.header.full_hash == blockhash }
end | ruby | {
"resource": ""
} |
q24429 | HaystackRuby.Config.load! | train | def load!(path, environment = nil)
require 'yaml'
environment ||= Rails.env
conf = YAML.load(File.new(path).read).with_indifferent_access[environment]
load_configuration(conf)
end | ruby | {
"resource": ""
} |
q24430 | OpenNlp.Chunker.chunk | train | def chunk(str)
raise ArgumentError, 'str must be a String' unless str.is_a?(String)
tokens = tokenizer.tokenize(str)
pos_tags = pos_tagger.tag(tokens).to_ary
chunks = j_instance.chunk(tokens.to_java(:String), pos_tags.to_java(:String)).to_ary
build_chunks(chunks, tokens, pos_tags)
end | ruby | {
"resource": ""
} |
q24431 | Greeklish.GreeklishGenerator.generate_greeklish_words | train | def generate_greeklish_words(greek_words)
@greeklish_list.clear
greek_words.each do |greek_word|
@per_word_greeklish.clear
initial_token = greek_word
digraphs.each_key do |key|
greek_word = greek_word.gsub(key, digraphs[key])
end
# Convert it back to array of characters. The iterations of each
# character will take place through this array.
input_token = greek_word.split(//)
# Iterate through the characters of the token and generate
# greeklish words.
input_token.each do |greek_char|
add_character(conversions[greek_char])
end
@greeklish_list << per_word_greeklish.flatten
end
@greeklish_list.flatten
end | ruby | {
"resource": ""
} |
q24432 | OpenNlp.SentenceDetector.detect | train | def detect(str)
raise ArgumentError, 'str must be a String' unless str.is_a?(String)
j_instance.sentDetect(str).to_ary
end | ruby | {
"resource": ""
} |
q24433 | OpenNlp.SentenceDetector.pos_detect | train | def pos_detect(str)
raise ArgumentError, 'str must be a String' unless str.is_a?(String)
j_instance.sentPosDetect(str).map do |span|
OpenNlp::Util::Span.new(span.getStart, span.getEnd)
end
end | ruby | {
"resource": ""
} |
q24434 | Greeklish.GreekReverseStemmer.generate_greek_variants | train | def generate_greek_variants(token_string)
# clear the list from variations of the previous greek token
@greek_words.clear
# add the initial greek token in the greek words
@greek_words << token_string
# Find the first matching suffix and generate the variants
# of this word.
SUFFIX_STRINGS.each do |suffix|
if (token_string.end_with?(suffix[0]))
# Add to greek_words the tokens with the desired suffixes
generate_more_greek_words(token_string, suffix[0])
break
end
end
greek_words
end | ruby | {
"resource": ""
} |
q24435 | OpenNlp.NamedEntityDetector.detect | train | def detect(tokens)
raise ArgumentError, 'tokens must be an instance of Array' unless tokens.is_a?(Array)
j_instance.find(tokens.to_java(:String)).to_ary
end | ruby | {
"resource": ""
} |
q24436 | Reth.Synchronizer.receive_newblock | train | def receive_newblock(proto, t_block, chain_difficulty)
logger.debug 'newblock', proto: proto, block: t_block, chain_difficulty: chain_difficulty, client: proto.peer.remote_client_version
if @chain.include?(t_block.header.full_hash)
raise AssertError, 'chain difficulty mismatch' unless chain_difficulty == @chain.get(t_block.header.full_hash).chain_difficulty
end
@protocols[proto] = chain_difficulty
if @chainservice.knows_block(t_block.header.full_hash)
logger.debug 'known block'
return
end
expected_difficulty = @chain.head.chain_difficulty + t_block.header.difficulty
if chain_difficulty >= @chain.head.chain_difficulty
# broadcast duplicates filtering is done in chainservice
logger.debug 'sufficient difficulty, broadcasting', client: proto.peer.remote_client_version
@chainservice.broadcast_newblock t_block, chain_difficulty, proto
else
age = @chain.head.number - t_block.header.number
logger.debug "low difficulty", client: proto.peer.remote_client_version, chain_difficulty: chain_difficulty, expected_difficulty: expected_difficulty, block_age: age
if age > MAX_NEWBLOCK_AGE
logger.debug 'newblock is too old, not adding', block_age: age, max_age: MAX_NEWBLOCK_AGE
return
end
end
if @chainservice.knows_block(t_block.header.prevhash)
logger.debug 'adding block'
@chainservice.add_block t_block, proto
else
logger.debug 'missing parent'
if @synctask
logger.debug 'existing task, discarding'
else
@synctask = SyncTask.new self, proto, t_block.header.full_hash, chain_difficulty
end
end
end | ruby | {
"resource": ""
} |
q24437 | Reth.Synchronizer.receive_status | train | def receive_status(proto, blockhash, chain_difficulty)
logger.debug 'status received', proto: proto, chain_difficulty: chain_difficulty
@protocols[proto] = chain_difficulty
if @chainservice.knows_block(blockhash) || @synctask
logger.debug 'existing task or known hash, discarding'
return
end
if @force_sync
blockhash, difficulty = force_sync
logger.debug 'starting forced synctask', blockhash: Utils.encode_hex(blockhash)
@synctask = SyncTask.new self, proto, blockhash, difficulty
elsif chain_difficulty > @chain.head.chain_difficulty
logger.debug 'sufficient difficulty'
@synctask = SyncTask.new self, proto, blockhash, chain_difficulty
end
rescue
logger.debug $!
logger.debug $!.backtrace[0,10].join("\n")
end | ruby | {
"resource": ""
} |
q24438 | Reth.Synchronizer.receive_newblockhashes | train | def receive_newblockhashes(proto, newblockhashes)
logger.debug 'received newblockhashes', num: newblockhashes.size, proto: proto
newblockhashes = newblockhashes.select {|h| !@chainservice.knows_block(h) }
known = @protocols.include?(proto)
if !known || newblockhashes.empty? || @synctask
logger.debug 'discarding', known: known, synctask: syncing?, num: newblockhashes.size
return
end
if newblockhashes.size != 1
logger.warn 'supporting only one newblockhash', num: newblockhashes.size
end
blockhash = newblockhashes[0]
logger.debug 'starting synctask for newblockhashes', blockhash: Utils.encode_hex(blockhash)
@synctask = SyncTask.new self, proto, blockhash, 0, true
end | ruby | {
"resource": ""
} |
q24439 | HaystackRuby.Project.api_eval | train | def api_eval(expr_str)
body = ["ver:\"#{@haystack_version}\""]
body << "expr"
body << '"'+expr_str+'"'
res = self.connection.post('eval') do |req|
req.headers['Content-Type'] = 'text/plain'
req.body = body.join("\n")
end
JSON.parse! res.body
end | ruby | {
"resource": ""
} |
q24440 | HaystackRuby.Project.equip_point_meta | train | def equip_point_meta
begin
equips = read({filter: '"equip"'})['rows']
puts equips
equips.map! do |eq|
eq.delete('disMacro')
eq['description'] = eq['id'].match(/[(NWTC)|(\$siteRef)] (.*)/)[1]
eq['id'] = eq['id'].match(/:([a-z0-9\-]*)/)[1]
eq['points'] = []
read({filter: "\"point and equipRef==#{eq['id']}\""})['rows'].each do |p|
p.delete('analytics')
p.delete('disMacro')
p.delete('csvUnit')
p.delete('csvColumn')
p.delete('equipRef')
p.delete('point')
p.delete('siteRef')
p['id'] = p['id'].match(/:([a-z0-9\-]*)/)[1]
p['name'] = p['navName']
p.delete('navName')
eq['points'] << p
end
eq
end
rescue Exception => e
puts "error: #{e}"
nil
end
end | ruby | {
"resource": ""
} |
q24441 | OpenNlp.Parser.parse | train | def parse(text)
raise ArgumentError, 'passed text must be a String' unless text.is_a?(String)
text.empty? ? {} : parse_tokens(tokenizer.tokenize(text), text)
end | ruby | {
"resource": ""
} |
q24442 | IGMarkets.ResponseParser.parse | train | def parse(response)
if response.is_a? Hash
response.each_with_object({}) do |(key, value), new_hash|
new_hash[camel_case_to_snake_case(key).to_sym] = parse(value)
end
elsif response.is_a? Array
response.map { |item| parse item }
else
response
end
end | ruby | {
"resource": ""
} |
q24443 | OpenNlp.Categorizer.categorize | train | def categorize(str)
raise ArgumentError, 'str param must be a String' unless str.is_a?(String)
outcomes = j_instance.categorize(str)
j_instance.getBestCategory(outcomes)
end | ruby | {
"resource": ""
} |
q24444 | Udongo::Pages.TreeNode.data | train | def data
{
text: @page.description,
type: :file,
li_attr: list_attributes,
data: {
id: @page.id,
url: @context.edit_translation_backend_page_path(@page, Udongo.config.i18n.app.default_locale),
delete_url: @context.backend_page_path(@page, format: :json),
deletable: @page.deletable?,
draggable: @page.draggable?,
update_position_url: @context.tree_drag_and_drop_backend_page_path(@page),
visible: @page.visible?,
toggle_visibility_url: @context.toggle_visibility_backend_page_path(@page, format: :json)
},
children: [] # This gets filled through Udongo::Pages::Tree
}
end | ruby | {
"resource": ""
} |
q24445 | Greeklish.GreeklishConverter.identify_greek_word | train | def identify_greek_word(input)
input.each_char do |char|
if (!GREEK_CHARACTERS.include?(char))
return false
end
end
true
end | ruby | {
"resource": ""
} |
q24446 | IGMarkets.Position.close | train | def close(options = {})
options[:deal_id] = deal_id
options[:direction] = { buy: :sell, sell: :buy }.fetch(direction)
options[:size] ||= size
model = PositionCloseAttributes.build options
model.validate
body = RequestBodyFormatter.format model
@dealing_platform.session.delete('positions/otc', body).fetch :deal_reference
end | ruby | {
"resource": ""
} |
q24447 | IGMarkets.Position.update | train | def update(new_attributes)
new_attributes = { limit_level: limit_level, stop_level: stop_level, trailing_stop: trailing_stop?,
trailing_stop_distance: trailing_stop_distance, trailing_stop_increment: trailing_step }
.merge new_attributes
unless new_attributes[:trailing_stop]
new_attributes[:trailing_stop_distance] = new_attributes[:trailing_stop_increment] = nil
end
body = RequestBodyFormatter.format PositionUpdateAttributes.new(new_attributes)
@dealing_platform.session.put("positions/otc/#{deal_id}", body, API_V2).fetch(:deal_reference)
end | ruby | {
"resource": ""
} |
q24448 | IGMarkets.PasswordEncryptor.encoded_public_key= | train | def encoded_public_key=(encoded_public_key)
self.public_key = OpenSSL::PKey::RSA.new Base64.strict_decode64 encoded_public_key
end | ruby | {
"resource": ""
} |
q24449 | IGMarkets.PasswordEncryptor.encrypt | train | def encrypt(password)
encoded_password = Base64.strict_encode64 "#{password}|#{time_stamp}"
encrypted_password = public_key.public_encrypt encoded_password
Base64.strict_encode64 encrypted_password
end | ruby | {
"resource": ""
} |
q24450 | Smurfville.TypographyParser.is_typography_selector? | train | def is_typography_selector?(node)
node.is_a?(Sass::Tree::RuleNode) && node.rule[0].start_with?("%f-") rescue false
end | ruby | {
"resource": ""
} |
q24451 | Reth.AccountService.coinbase | train | def coinbase
cb_hex = (app.config[:pow] || {})[:coinbase_hex]
if cb_hex
raise ValueError, 'coinbase must be String' unless cb_hex.is_a?(String)
begin
cb = Utils.decode_hex Utils.remove_0x_head(cb_hex)
rescue TypeError
raise ValueError, 'invalid coinbase'
end
else
accts = accounts_with_address
return DEFAULT_COINBASE if accts.empty?
cb = accts[0].address
end
raise ValueError, 'wrong coinbase length' if cb.size != 20
if config[:accounts][:must_include_coinbase]
raise ValueError, 'no account for coinbase' if !@accounts.map(&:address).include?(cb)
end
cb
end | ruby | {
"resource": ""
} |
q24452 | Reth.AccountService.add_account | train | def add_account(account, store=true, include_address=true, include_id=true)
logger.info "adding account", account: account
if account.uuid && @accounts.any? {|acct| acct.uuid == account.uuid }
logger.error 'could not add account (UUID collision)', uuid: account.uuid
raise ValueError, 'Could not add account (UUID collision)'
end
if store
raise ValueError, 'Cannot store account without path' if account.path.nil?
if File.exist?(account.path)
logger.error 'File does already exist', path: account.path
raise IOError, 'File does already exist'
end
raise AssertError if @accounts.any? {|acct| acct.path == account.path }
begin
directory = File.dirname account.path
FileUtils.mkdir_p(directory) unless File.exist?(directory)
File.open(account.path, 'w') do |f|
f.write account.dump(include_address, include_id)
end
rescue IOError => e
logger.error "Could not write to file", path: account.path, message: e.to_s
raise e
end
end
@accounts.push account
@accounts.sort_by! {|acct| acct.path.to_s }
end | ruby | {
"resource": ""
} |
q24453 | Reth.AccountService.update_account | train | def update_account(account, new_password, include_address=true, include_id=true)
raise ValueError, "Account not managed by account service" unless @accounts.include?(account)
raise ValueError, "Cannot update locked account" if account.locked?
raise ValueError, 'Account not stored on disk' unless account.path
logger.debug "creating new account"
new_account = Account.create new_password, account.privkey, account.uuid, account.path
backup_path = account.path + '~'
i = 1
while File.exist?(backup_path)
backup_path = backup_path[0, backup_path.rindex('~')+1] + i.to_s
i += 1
end
raise AssertError if File.exist?(backup_path)
logger.info 'moving old keystore file to backup location', from: account.path, to: backup_path
begin
FileUtils.mv account.path, backup_path
rescue
logger.error "could not backup keystore, stopping account update", from: account.path, to: backup_path
raise $!
end
raise AssertError unless File.exist?(backup_path)
raise AssertError if File.exist?(new_account.path)
account.path = backup_path
@accounts.delete account
begin
add_account new_account, include_address, include_id
rescue
logger.error 'adding new account failed, recovering from backup'
FileUtils.mv backup_path, new_account.path
account.path = new_account.path
@accounts.push account
@accounts.sort_by! {|acct| acct.path.to_s }
raise $!
end
raise AssertError unless File.exist?(new_account.path)
logger.info "deleting backup of old keystore", path: backup_path
begin
FileUtils.rm backup_path
rescue
logger.error 'failed to delete no longer needed backup of old keystore', path: account.path
raise $!
end
account.keystore = new_account.keystore
account.path = new_account.path
@accounts.push account
@accounts.delete new_account
@accounts.sort_by! {|acct| acct.path.to_s }
logger.debug "account update successful"
end | ruby | {
"resource": ""
} |
q24454 | Reth.AccountService.find | train | def find(identifier)
identifier = identifier.downcase
if identifier =~ /\A[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\z/ # uuid
return get_by_id(identifier)
end
begin
address = Address.new(identifier).to_bytes
raise AssertError unless address.size == 20
return self[address]
rescue
# do nothing
end
index = identifier.to_i
raise ValueError, 'Index must be 1 or greater' if index <= 0
raise KeyError if index > @accounts.size
@accounts[index-1]
end | ruby | {
"resource": ""
} |
q24455 | Reth.AccountService.get_by_id | train | def get_by_id(id)
accts = @accounts.select {|acct| acct.uuid == id }
if accts.size == 0
raise KeyError, "account with id #{id} unknown"
elsif accts.size > 1
logger.warn "multiple accounts with same UUID found", uuid: id
end
accts[0]
end | ruby | {
"resource": ""
} |
q24456 | Reth.AccountService.get_by_address | train | def get_by_address(address)
raise ArgumentError, 'address must be 20 bytes' unless address.size == 20
accts = @accounts.select {|acct| acct.address == address }
if accts.size == 0
raise KeyError, "account not found by address #{Utils.encode_hex(address)}"
elsif accts.size > 1
logger.warn "multiple accounts with same address found", address: Utils.encode_hex(address)
end
accts[0]
end | ruby | {
"resource": ""
} |
q24457 | IGMarkets.Model.to_h | train | def to_h
attributes.each_with_object({}) do |(key, value), hash|
hash[key] = if value.is_a? Model
value.to_h
else
value
end
end
end | ruby | {
"resource": ""
} |
q24458 | IGMarkets.DealingPlatform.sign_in | train | def sign_in(username, password, api_key, platform)
session.username = username
session.password = password
session.api_key = api_key
session.platform = platform
result = session.sign_in
@client_account_summary = instantiate_models ClientAccountSummary, result
end | ruby | {
"resource": ""
} |
q24459 | IGMarkets.Format.colored_currency | train | def colored_currency(amount, currency_name)
return '' unless amount
color = amount < 0 ? :red : :green
ColorizedString[currency(amount, currency_name)].colorize color
end | ruby | {
"resource": ""
} |
q24460 | IGMarkets.RequestBodyFormatter.snake_case_to_camel_case | train | def snake_case_to_camel_case(value)
pieces = value.to_s.split '_'
(pieces.first + pieces[1..-1].map(&:capitalize).join).to_sym
end | ruby | {
"resource": ""
} |
q24461 | Reth.Account.dump | train | def dump(include_address=true, include_id=true)
h = {}
h[:crypto] = @keystore[:crypto]
h[:version] = @keystore[:version]
h[:address] = Utils.encode_hex address if include_address && address
h[:id] = uuid if include_id && uuid
JSON.dump(h)
end | ruby | {
"resource": ""
} |
q24462 | Reth.Account.sign_tx | train | def sign_tx(tx)
if privkey
logger.info "signing tx", tx: tx, account: self
tx.sign privkey
else
raise ValueError, "Locked account cannot sign tx"
end
end | ruby | {
"resource": ""
} |
q24463 | SimpleXml.Document.detect_unstratified | train | def detect_unstratified
missing_populations = []
# populations are keyed off of values rather than the codes
existing_populations = @populations.map{|p| p.values.join('-')}.uniq
@populations.each do |population|
keys = population.keys - ['STRAT','stratification']
missing_populations |= [population.values_at(*keys).compact.join('-')]
end
missing_populations -= existing_populations
# reverse the order and prepend them to @populations
missing_populations.reverse.each do |population|
p = {}
population.split('-').each do |code|
p[code.split('_').first] = code
end
@populations.unshift p
end
end | ruby | {
"resource": ""
} |
q24464 | Ganapati.Client.put | train | def put(localpath, destpath)
create(destpath) { |dest|
Kernel.open(localpath) { |source|
# read 1 MB at a time
while record = source.read(1048576)
dest.write(record)
end
}
}
end | ruby | {
"resource": ""
} |
q24465 | Ganapati.Client.get | train | def get(remotepath, destpath)
Kernel.open(destpath, 'w') { |dest|
readchunks(remotepath) { |chunk|
dest.write chunk
}
}
end | ruby | {
"resource": ""
} |
q24466 | Ganapati.Client.readchunks | train | def readchunks(path, chunksize=1048576)
open(path) { |source|
size = source.length
index = 0
while index < size
yield source.read(index, chunksize)
index += chunksize
end
}
end | ruby | {
"resource": ""
} |
q24467 | OpenNlp.Tokenizer.tokenize | train | def tokenize(str)
raise ArgumentError, 'str must be a String' unless str.is_a?(String)
j_instance.tokenize(str).to_ary
end | ruby | {
"resource": ""
} |
q24468 | OpenNlp.POSTagger.tag | train | def tag(tokens)
!tokens.is_a?(Array) && !tokens.is_a?(String) &&
raise(ArgumentError, 'tokens must be an instance of String or Array')
j_instance.tag(tokens.to_java(:String))
end | ruby | {
"resource": ""
} |
q24469 | IGMarkets.Account.reload | train | def reload
self.attributes = @dealing_platform.account.all.detect { |a| a.account_id == account_id }.attributes
end | ruby | {
"resource": ""
} |
q24470 | Contentstack.Query.only | train | def only(fields, fields_with_base=nil)
q = {}
if [Array, String].include?(fields_with_base.class)
fields_with_base = [fields_with_base] if fields_with_base.class == String
q[fields.to_sym] = fields_with_base
else
fields = [fields] if fields.class == String
q = {BASE: fields}
end
@query[:only] = q
self
end | ruby | {
"resource": ""
} |
q24471 | IGMarkets.WorkingOrder.update | train | def update(new_attributes)
existing_attributes = { good_till_date: good_till_date, level: order_level, limit_distance: limit_distance,
stop_distance: stop_distance, time_in_force: time_in_force, type: order_type }
model = WorkingOrderUpdateAttributes.new existing_attributes, new_attributes
model.validate
body = RequestBodyFormatter.format model
@dealing_platform.session.put("workingorders/otc/#{deal_id}", body, API_V2).fetch(:deal_reference)
end | ruby | {
"resource": ""
} |
q24472 | Smurfville.ColorVariableParser.add_color | train | def add_color(node, key = nil)
key ||= node.expr.to_s
self.colors[key] ||= { :variables => [], :alternate_values => [] }
self.colors[key][:variables] << node.name
self.colors[key][:alternate_values] |= ([node.expr.to_sass, node.expr.inspect] - [key])
end | ruby | {
"resource": ""
} |
q24473 | Rack.Rescue.apply_layout | train | def apply_layout(env, content, opts)
if layout = env['layout']
layout.format = opts[:format]
layout.content = content
layout.template_name = opts[:layout] if layout.template_name?(opts[:layout], opts)
layout
else
content
end
end | ruby | {
"resource": ""
} |
q24474 | MemoryIO.Process.read | train | def read(addr, num_elements, **options)
mem_io(:read) { |io| io.read(num_elements, from: MemoryIO::Util.safe_eval(addr, bases), **options) }
end | ruby | {
"resource": ""
} |
q24475 | MemoryIO.Process.write | train | def write(addr, objects, **options)
mem_io(:write) { |io| io.write(objects, from: MemoryIO::Util.safe_eval(addr, bases), **options) }
end | ruby | {
"resource": ""
} |
q24476 | Turbot.Helpers.netrc_path | train | def netrc_path
unencrypted = Netrc.default_path
encrypted = unencrypted + '.gpg'
if File.exists?(encrypted)
encrypted
else
unencrypted
end
end | ruby | {
"resource": ""
} |
q24477 | Turbot.Helpers.open_netrc | train | def open_netrc
begin
Netrc.read(netrc_path)
rescue Netrc::Error => e
error e.message
end
end | ruby | {
"resource": ""
} |
q24478 | Turbot.Helpers.save_netrc_entry | train | def save_netrc_entry(email_address, api_key)
netrc = open_netrc
netrc["api.#{host}"] = [email_address, api_key]
netrc.save
end | ruby | {
"resource": ""
} |
q24479 | Humus.Snapshots.seed_from_dump | train | def seed_from_dump id
target_path = File.expand_path("../../fixtures/trunk-#{id}.dump", __FILE__)
raise "Dump #{id} could not be found." unless File.exists? target_path
puts "Restoring #{ENV['RACK_ENV']} database from #{target_path}"
# Ensure we're starting from a clean DB.
system "dropdb trunk_cocoapods_org_test"
system "createdb trunk_cocoapods_org_test"
# Restore the DB.
command = "pg_restore --no-privileges --clean --no-acl --no-owner -h localhost -d trunk_cocoapods_org_test #{target_path}"
puts "Executing:"
puts command
puts
result = system command
if result
puts "Database #{ENV['RACK_ENV']} restored from #{target_path}"
else
warn "Database #{ENV['RACK_ENV']} restored from #{target_path} with some errors."
# exit 1
end
end | ruby | {
"resource": ""
} |
q24480 | Humus.Snapshots.dump_prod | train | def dump_prod id
target_path = File.expand_path("../../fixtures/trunk-#{id}.dump", __FILE__)
puts "Dumping production database from Heroku (works only if you have access to the database)"
command = "curl -o #{target_path} \`heroku pg:backups public-url #{id} -a cocoapods-trunk-service\`"
puts "Executing command:"
puts command
result = system command
if result
puts "Production database snapshot #{id} dumped into #{target_path}"
else
raise "Could not dump #{id} from production database."
end
end | ruby | {
"resource": ""
} |
q24481 | Win.Library.try_function | train | def try_function(name, params, returns, options={}, &def_block)
begin
function name, params, returns, options, &def_block
rescue Win::Errors::NotFoundError
"This platform does not support function #{name}"
end
end | ruby | {
"resource": ""
} |
q24482 | Win.Library.define_api | train | def define_api(name, camel_name, effective_names, params, returns, options)
params, returns = generate_signature(params.dup, returns)
ffi_lib *(ffi_libraries.map(&:name) << options[:dll]) if options[:dll]
libs = ffi_libraries.map(&:name)
alternative = options.delete(:alternative) # Function may have alternative signature
effective_name = if alternative
alt_params, alt_returns, condition = generate_signature(*alternative)
api = function name, params, returns,
options.merge( camel_only: true, camel_name: "#{camel_name}Original")
alt_api = function name, alt_params, alt_returns,
options.merge( camel_only: true, camel_name: "#{camel_name}Alternative")
define_method camel_name do |*args|
(condition[*args] ? alt_api : api).call(*args)
end
module_function camel_name
public camel_name
api.effective_name
else
effective_names.inject(nil) do |func, effective_name|
func || begin
# Try to attach basic CamelCase method via FFI
attach_function(camel_name, effective_name, params.dup, returns)
effective_name
rescue FFI::NotFoundError
nil
end
end
end
raise Win::Errors::NotFoundError.new(name, libs) unless effective_name
# Create API object that holds information about defined and effective function names, params, etc.
# This object is further used by enhanced snake_case method to reflect on underlying API and
# intelligently call it.
API.new(namespace, camel_name, effective_name, params, returns, libs)
end | ruby | {
"resource": ""
} |
q24483 | Yap::Shell.Evaluation.recursively_find_and_replace_command_substitutions | train | def recursively_find_and_replace_command_substitutions(input)
input = input.dup
Parser.each_command_substitution_for(input) do |substitution_result, start_position, end_position|
debug_log "found command substitution at position=#{(start_position..end_position)} #{substitution_result.inspect}"
result = recursively_find_and_replace_command_substitutions(substitution_result.str)
position = substitution_result.position
ast = Parser.parse(result)
with_standard_streams do |stdin, stdout, stderr|
r,w = IO.pipe
@stdout = w
ast.accept(self)
output = r.read.chomp
# Treat consecutive newlines in output as a single space
output = output.gsub(/\n+/, ' ')
# Double quote the output and escape any double quotes already
# existing
output = %|"#{output.gsub(/"/, '\\"')}"|
# Put thd output back into the original input
debug_log "replacing command substitution at position=#{(position.min...position.max)} with #{output.inspect}"
input[position.min...position.max] = output
end
end
input
end | ruby | {
"resource": ""
} |
q24484 | Yap::Shell.Evaluation.visit_CommandNode | train | def visit_CommandNode(node)
debug_visit(node)
@aliases_expanded ||= []
@command_node_args_stack ||= []
with_standard_streams do |stdin, stdout, stderr|
args = process_ArgumentNodes(node.args)
if !node.literal? && !@aliases_expanded.include?(node.command) && Aliases.instance.has_key?(node.command)
_alias=Aliases.instance.fetch_alias(node.command)
@suppress_events = true
@command_node_args_stack << args
ast = Parser.parse(_alias)
@aliases_expanded.push(node.command)
ast.accept(self)
@aliases_expanded.pop
@suppress_events = false
else
cmd2execute = variable_expand(node.command)
final_args = (args + @command_node_args_stack).flatten.map(&:shellescape)
expanded_args = final_args
command = CommandFactory.build_command_for(
world: world,
command: cmd2execute,
args: expanded_args,
heredoc: (node.heredoc && node.heredoc.value),
internally_evaluate: node.internally_evaluate?,
line: @input)
@stdin, @stdout, @stderr = stream_redirections_for(node)
set_last_result @blk.call command, @stdin, @stdout, @stderr, pipeline_stack.empty?
@command_node_args_stack.clear
end
end
end | ruby | {
"resource": ""
} |
q24485 | Danger.DangerJazzy.undocumented | train | def undocumented(scope = :modified)
return [] unless scope != :ignore && File.exist?(undocumented_path)
@undocumented = { modified: [], all: [] } if @undocumented.nil?
load_undocumented(scope) if @undocumented[scope].empty?
@undocumented[scope]
end | ruby | {
"resource": ""
} |
q24486 | Spree.LiqpayStatusController.update | train | def update
@payment_method = PaymentMethod.find params[:payment_method_id]
data = JSON.parse Base64.strict_decode64 params[:data]
render text: "Bad signature\n", status: 401 and return unless @payment_method.check_signature params[:data], params[:signature]
@order = Order.find data['order_id']
raise ArgumentError unless @order.payments.completed.empty? &&
data['currency'] == @order.currency &&
BigDecimal(data['amount']) == @order.total &&
data['type'] == 'buy' &&
(data['status'] == 'success' || (@payment_method.preferred_test_mode && data['status'] == 'sandbox'))
payment = @order.payments.create amount: @order.total, payment_method: @payment_method
payment.complete!
render text: "Thank you.\n"
end | ruby | {
"resource": ""
} |
q24487 | Kazoo.Cluster.brokers | train | def brokers
@brokers_mutex.synchronize do
@brokers ||= begin
brokers = zk.get_children(path: "/brokers/ids")
if brokers.fetch(:rc) != Zookeeper::Constants::ZOK
raise NoClusterRegistered, "No Kafka cluster registered on this Zookeeper location."
end
result, mutex = {}, Mutex.new
threads = brokers.fetch(:children).map do |id|
Thread.new do
Thread.abort_on_exception = true
broker_info = zk.get(path: "/brokers/ids/#{id}")
raise Kazoo::Error, "Failed to retrieve broker info. Error code: #{broker_info.fetch(:rc)}" unless broker_info.fetch(:rc) == Zookeeper::Constants::ZOK
broker = Kazoo::Broker.from_json(self, id, JSON.parse(broker_info.fetch(:data)))
mutex.synchronize { result[id.to_i] = broker }
end
end
threads.each(&:join)
result
end
end
end | ruby | {
"resource": ""
} |
q24488 | Kazoo.Cluster.consumergroups | train | def consumergroups
@consumergroups ||= begin
consumers = zk.get_children(path: "/consumers")
consumers.fetch(:children).map { |name| Kazoo::Consumergroup.new(self, name) }
end
end | ruby | {
"resource": ""
} |
q24489 | Kazoo.Cluster.topics | train | def topics(preload: Kazoo::Topic::DEFAULT_PRELOAD_METHODS)
@topics_mutex.synchronize do
@topics ||= begin
topics = zk.get_children(path: "/brokers/topics")
raise Kazoo::Error, "Failed to list topics. Error code: #{topics.fetch(:rc)}" unless topics.fetch(:rc) == Zookeeper::Constants::ZOK
preload_topics_from_names(topics.fetch(:children), preload: preload)
end
end
end | ruby | {
"resource": ""
} |
q24490 | Kazoo.Cluster.create_topic | train | def create_topic(name, partitions: nil, replication_factor: nil, config: nil)
raise ArgumentError, "partitions must be a positive integer" if Integer(partitions) <= 0
raise ArgumentError, "replication_factor must be a positive integer" if Integer(replication_factor) <= 0
Kazoo::Topic.create(self, name, partitions: Integer(partitions), replication_factor: Integer(replication_factor), config: config)
end | ruby | {
"resource": ""
} |
q24491 | Kazoo.Cluster.preferred_leader_election | train | def preferred_leader_election(partitions: nil)
partitions = self.partitions if partitions.nil?
result = zk.create(path: "/admin/preferred_replica_election", data: JSON.generate(version: 1, partitions: partitions))
case result.fetch(:rc)
when Zookeeper::Constants::ZOK
return true
when Zookeeper::Constants::ZNODEEXISTS
raise Kazoo::Error, "Another preferred leader election is still in progress"
else
raise Kazoo::Error, "Failed to start preferred leadership election. Result code: #{result.fetch(:rc)}"
end
end | ruby | {
"resource": ""
} |
q24492 | Kazoo.Cluster.recursive_create | train | def recursive_create(path: nil)
raise ArgumentError, "path is a required argument" if path.nil?
result = zk.stat(path: path)
case result.fetch(:rc)
when Zookeeper::Constants::ZOK
return
when Zookeeper::Constants::ZNONODE
recursive_create(path: File.dirname(path))
result = zk.create(path: path)
case result.fetch(:rc)
when Zookeeper::Constants::ZOK, Zookeeper::Constants::ZNODEEXISTS
return
else
raise Kazoo::Error, "Failed to create node #{path}. Result code: #{result.fetch(:rc)}"
end
else
raise Kazoo::Error, "Failed to create node #{path}. Result code: #{result.fetch(:rc)}"
end
end | ruby | {
"resource": ""
} |
q24493 | Kazoo.Cluster.recursive_delete | train | def recursive_delete(path: nil)
raise ArgumentError, "path is a required argument" if path.nil?
result = zk.get_children(path: path)
raise Kazoo::Error, "Failed to list children of #{path} to delete them. Result code: #{result.fetch(:rc)}" if result.fetch(:rc) != Zookeeper::Constants::ZOK
threads = result.fetch(:children).map do |name|
Thread.new do
Thread.abort_on_exception = true
recursive_delete(path: File.join(path, name))
end
end
threads.each(&:join)
result = zk.delete(path: path)
raise Kazoo::Error, "Failed to delete node #{path}. Result code: #{result.fetch(:rc)}" if result.fetch(:rc) != Zookeeper::Constants::ZOK
end | ruby | {
"resource": ""
} |
q24494 | Ikku.Reviewer.find | train | def find(text)
nodes = parser.parse(text)
nodes.length.times.find do |index|
if (song = Song.new(nodes[index..-1], rule: @rule)).valid?
break song
end
end
end | ruby | {
"resource": ""
} |
q24495 | Ikku.Reviewer.judge | train | def judge(text)
Song.new(parser.parse(text), exactly: true, rule: @rule).valid?
end | ruby | {
"resource": ""
} |
q24496 | Ikku.Reviewer.search | train | def search(text)
nodes = parser.parse(text)
nodes.length.times.map do |index|
Song.new(nodes[index..-1], rule: @rule)
end.select(&:valid?)
end | ruby | {
"resource": ""
} |
q24497 | Kazoo.Broker.led_partitions | train | def led_partitions
result, mutex = [], Mutex.new
threads = cluster.partitions.map do |partition|
Thread.new do
Thread.abort_on_exception = true
select = partition.leader == self
mutex.synchronize { result << partition } if select
end
end
threads.each(&:join)
result
end | ruby | {
"resource": ""
} |
q24498 | Kazoo.Broker.replicated_partitions | train | def replicated_partitions
result, mutex = [], Mutex.new
threads = cluster.partitions.map do |partition|
Thread.new do
Thread.abort_on_exception = true
select = partition.replicas.include?(self)
mutex.synchronize { result << partition } if select
end
end
threads.each(&:join)
result
end | ruby | {
"resource": ""
} |
q24499 | Kazoo.Broker.critical? | train | def critical?(replicas: 1)
result, mutex = false, Mutex.new
threads = replicated_partitions.map do |partition|
Thread.new do
Thread.abort_on_exception = true
isr = partition.isr.reject { |r| r == self }
mutex.synchronize { result = true if isr.length < Integer(replicas) }
end
end
threads.each(&:join)
result
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.