_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q25200 | Spidr.Agent.visit? | validation | def visit?(url)
!visited?(url) &&
visit_scheme?(url.scheme) &&
visit_host?(url.host) &&
visit_port?(url.port) &&
visit_link?(url.to_s) &&
visit_url?(url) &&
visit_ext?(url.path) &&
robot_allowed?(url.to_s)
end | ruby | {
"resource": ""
} |
q25201 | Spidr.Rules.accept? | validation | def accept?(data)
unless @accept.empty?
@accept.any? { |rule| test_data(data,rule) }
else
!@reject.any? { |rule| test_data(data,rule) }
end
end | ruby | {
"resource": ""
} |
q25202 | Spidr.Agent.every_html_doc | validation | def every_html_doc
every_page do |page|
if (block_given? && page.html?)
if (doc = page.doc)
yield doc
end
end
end
end | ruby | {
"resource": ""
} |
q25203 | Spidr.Agent.every_xml_doc | validation | def every_xml_doc
every_page do |page|
if (block_given? && page.xml?)
if (doc = page.doc)
yield doc
end
end
end
end | ruby | {
"resource": ""
} |
q25204 | Spidr.Agent.every_rss_doc | validation | def every_rss_doc
every_page do |page|
if (block_given? && page.rss?)
if (doc = page.doc)
yield doc
end
end
end
end | ruby | {
"resource": ""
} |
q25205 | Spidr.Agent.every_atom_doc | validation | def every_atom_doc
every_page do |page|
if (block_given? && page.atom?)
if (doc = page.doc)
yield doc
end
end
end
end | ruby | {
"resource": ""
} |
q25206 | Spidr.Agent.initialize_filters | validation | def initialize_filters(options={})
@schemes = []
if options[:schemes]
self.schemes = options[:schemes]
else
@schemes << 'http'
begin
require 'net/https'
@schemes << 'https'
rescue Gem::LoadError => e
raise(e)
rescue ::LoadError
warn "Warning: cannot load 'net/https', https support disabled"
end
end
@host_rules = Rules.new(
accept: options[:hosts],
reject: options[:ignore_hosts]
)
@port_rules = Rules.new(
accept: options[:ports],
reject: options[:ignore_ports]
)
@link_rules = Rules.new(
accept: options[:links],
reject: options[:ignore_links]
)
@url_rules = Rules.new(
accept: options[:urls],
reject: options[:ignore_urls]
)
@ext_rules = Rules.new(
accept: options[:exts],
reject: options[:ignore_exts]
)
if options[:host]
visit_hosts_like(options[:host])
end
end | ruby | {
"resource": ""
} |
q25207 | Spidr.Page.each_meta_redirect | validation | def each_meta_redirect
return enum_for(__method__) unless block_given?
if (html? && doc)
search('//meta[@http-equiv and @content]').each do |node|
if node.get_attribute('http-equiv') =~ /refresh/i
content = node.get_attribute('content')
if (redirect = content.match(/url=(\S+)$/))
yield redirect[1]
end
end
end
end
end | ruby | {
"resource": ""
} |
q25208 | Spidr.Page.each_redirect | validation | def each_redirect(&block)
return enum_for(__method__) unless block
if (locations = @response.get_fields('Location'))
# Location headers override any meta-refresh redirects in the HTML
locations.each(&block)
else
# check page-level meta redirects if there isn't a location header
each_meta_redirect(&block)
end
end | ruby | {
"resource": ""
} |
q25209 | Spidr.Page.each_link | validation | def each_link
return enum_for(__method__) unless block_given?
filter = lambda { |url|
yield url unless (url.nil? || url.empty?)
}
each_redirect(&filter) if is_redirect?
if (html? && doc)
doc.search('//a[@href]').each do |a|
filter.call(a.get_attribute('href'))
end
doc.search('//frame[@src]').each do |iframe|
filter.call(iframe.get_attribute('src'))
end
doc.search('//iframe[@src]').each do |iframe|
filter.call(iframe.get_attribute('src'))
end
doc.search('//link[@href]').each do |link|
filter.call(link.get_attribute('href'))
end
doc.search('//script[@src]').each do |script|
filter.call(script.get_attribute('src'))
end
end
end | ruby | {
"resource": ""
} |
q25210 | Spidr.Page.each_url | validation | def each_url
return enum_for(__method__) unless block_given?
each_link do |link|
if (url = to_absolute(link))
yield url
end
end
end | ruby | {
"resource": ""
} |
q25211 | Spidr.Page.to_absolute | validation | def to_absolute(link)
link = link.to_s
new_url = begin
url.merge(link)
rescue Exception
return
end
if (!new_url.opaque) && (path = new_url.path)
# ensure that paths begin with a leading '/' for URI::FTP
if (new_url.scheme == 'ftp' && !path.start_with?('/'))
path.insert(0,'/')
end
# make sure the path does not contain any .. or . directories,
# since URI::Generic#merge cannot normalize paths such as
# "/stuff/../"
new_url.path = URI.expand_path(path)
end
return new_url
end | ruby | {
"resource": ""
} |
q25212 | Roar.HttpVerbs.post | validation | def post(options={}, &block)
response = http.post_uri(options.merge(:body => serialize), &block)
handle_response(response)
end | ruby | {
"resource": ""
} |
q25213 | Roar.HttpVerbs.get | validation | def get(options={}, &block)
response = http.get_uri(options, &block)
handle_response(response)
end | ruby | {
"resource": ""
} |
q25214 | Roar.HttpVerbs.put | validation | def put(options={}, &block)
response = http.put_uri(options.merge(:body => serialize), &block)
handle_response(response)
self
end | ruby | {
"resource": ""
} |
q25215 | Chain.Query.each | validation | def each
page = fetch(@first_query)
loop do
if page['items'].empty? # we consume this array as we iterate
break if page['last_page']
page = fetch(page['next'])
# The second predicate (empty?) *should* be redundant, but we check it
# anyway as a defensive measure.
break if page['items'].empty?
end
item = page['items'].shift
yield translate(item)
end
end | ruby | {
"resource": ""
} |
q25216 | Chain.HSMSigner.sign | validation | def sign(tx_template)
return tx_template if @xpubs_by_signer.empty?
@xpubs_by_signer.each do |signer_conn, xpubs|
tx_template = signer_conn.singleton_batch_request(
'/sign-transaction',
transactions: [tx_template],
xpubs: xpubs,
) { |item| Transaction::Template.new(item) }
end
tx_template
end | ruby | {
"resource": ""
} |
q25217 | Chain.HSMSigner.sign_batch | validation | def sign_batch(tx_templates)
if @xpubs_by_signer.empty?
# Treat all templates as if signed successfully.
successes = tx_templates.each_with_index.reduce({}) do |memo, (t, i)|
memo[i] = t
memo
end
BatchResponse.new(successes: successes)
end
# We need to work towards a single, final BatchResponse that uses the
# original indexes. For the next cycle, we should retain only those
# templates for which the most recent sign response was successful, and
# maintain a mapping of each template's index in the upcoming request
# to its original index.
orig_index = (0...tx_templates.size).to_a
errors = {}
@xpubs_by_signer.each do |signer_conn, xpubs|
next_tx_templates = []
next_orig_index = []
batch = signer_conn.batch_request(
'/sign-transaction',
transactions: tx_templates,
xpubs: xpubs,
) { |item| Transaction::Template.new(item) }
batch.successes.each do |i, template|
next_tx_templates << template
next_orig_index << orig_index[i]
end
batch.errors.each do |i, err|
errors[orig_index[i]] = err
end
tx_templates = next_tx_templates
orig_index = next_orig_index
# Early-exit if all templates have encountered an error.
break if tx_templates.empty?
end
successes = tx_templates.each_with_index.reduce({}) do |memo, (t, i)|
memo[orig_index[i]] = t
memo
end
BatchResponse.new(
successes: successes,
errors: errors,
)
end | ruby | {
"resource": ""
} |
q25218 | FHIR.Client.set_no_auth | validation | def set_no_auth
FHIR.logger.info 'Configuring the client to use no authentication.'
@use_oauth2_auth = false
@use_basic_auth = false
@security_headers = {}
@client = RestClient
@client.proxy = proxy unless proxy.nil?
@client
end | ruby | {
"resource": ""
} |
q25219 | FHIR.Client.set_basic_auth | validation | def set_basic_auth(client, secret)
FHIR.logger.info 'Configuring the client to use HTTP Basic authentication.'
token = Base64.encode64("#{client}:#{secret}")
value = "Basic #{token}"
@security_headers = { 'Authorization' => value }
@use_oauth2_auth = false
@use_basic_auth = true
@client = RestClient
@client.proxy = proxy unless proxy.nil?
@client
end | ruby | {
"resource": ""
} |
q25220 | FHIR.Client.set_bearer_token | validation | def set_bearer_token(token)
FHIR.logger.info 'Configuring the client to use Bearer Token authentication.'
value = "Bearer #{token}"
@security_headers = { 'Authorization' => value }
@use_oauth2_auth = false
@use_basic_auth = true
@client = RestClient
@client.proxy = proxy unless proxy.nil?
@client
end | ruby | {
"resource": ""
} |
q25221 | FHIR.Client.request_payload | validation | def request_payload(resource, headers)
if headers
format_specified = headers['Content-Type']
if format_specified.nil?
resource.to_xml
elsif format_specified.downcase.include?('xml')
resource.to_xml
elsif format_specified.downcase.include?('json')
resource.to_json
else
resource.to_xml
end
else
resource.to_xml
end
end | ruby | {
"resource": ""
} |
q25222 | IPAddress.IPv4.split | validation | def split(subnets=2)
unless (1..(2**@prefix.host_prefix)).include? subnets
raise ArgumentError, "Value #{subnets} out of range"
end
networks = subnet(newprefix(subnets))
until networks.size == subnets
networks = sum_first_found(networks)
end
return networks
end | ruby | {
"resource": ""
} |
q25223 | IPAddress.IPv4.supernet | validation | def supernet(new_prefix)
raise ArgumentError, "New prefix must be smaller than existing prefix" if new_prefix >= @prefix.to_i
return self.class.new("0.0.0.0/0") if new_prefix < 1
return self.class.new(@address+"/#{new_prefix}").network
end | ruby | {
"resource": ""
} |
q25224 | IPAddress.IPv4.subnet | validation | def subnet(subprefix)
unless ((@prefix.to_i)..32).include? subprefix
raise ArgumentError, "New prefix must be between #@prefix and 32"
end
Array.new(2**(subprefix-@prefix.to_i)) do |i|
self.class.parse_u32(network_u32+(i*(2**(32-subprefix))), subprefix)
end
end | ruby | {
"resource": ""
} |
q25225 | IPAddress.Prefix.- | validation | def -(oth)
if oth.is_a? Integer
self.prefix - oth
else
(self.prefix - oth.prefix).abs
end
end | ruby | {
"resource": ""
} |
q25226 | Chatterbot.DSL.bot | validation | def bot
return @bot unless @bot.nil?
@bot_command = nil
#
# parse any command-line options and use them to initialize the bot
#
params = {}
#:nocov:
opts = OptionParser.new
opts.banner = "Usage: #{File.basename($0)} [options]"
opts.separator ""
opts.separator "Specific options:"
opts.on('-c', '--config [ARG]', "Specify a config file to use") { |c| ENV["chatterbot_config"] = c }
opts.on('-t', '--test', "Run the bot without actually sending any tweets") { params[:debug_mode] = true }
opts.on('-v', '--verbose', "verbose output to stdout") { params[:verbose] = true }
opts.on('--dry-run', "Run the bot in test mode, and also don't update the database") { params[:debug_mode] = true ; params[:no_update] = true }
opts.on('-r', '--reset', "Reset your bot to ignore old tweets") {
@bot_command = :reset_since_id_counters
}
opts.on('--profile [ARG]', "get/set your bot's profile text") { |p|
@bot_command = :profile_text
@bot_command_args = [ p ]
}
opts.on('--website [ARG]', "get/set your bot's profile URL") { |u|
@bot_command = :profile_website
@bot_command_args = [ u ]
}
opts.on_tail("-h", "--help", "Show this message") do
puts opts
exit
end
opts.parse!(ARGV)
#:nocov:
@bot = Chatterbot::Bot.new(params)
if @bot_command != nil
@bot.skip_run = true
result = @bot.send(@bot_command, *@bot_command_args)
puts result
end
@bot
end | ruby | {
"resource": ""
} |
q25227 | Chatterbot.DSL.blocklist | validation | def blocklist(*args)
list = flatten_list_of_strings(args)
if list.nil? || list.empty?
bot.blocklist = []
else
bot.blocklist += list
end
end | ruby | {
"resource": ""
} |
q25228 | Chatterbot.DSL.safelist | validation | def safelist(*args)
list = flatten_list_of_strings(args)
if list.nil? || list.empty?
bot.safelist = []
else
bot.safelist += list
end
end | ruby | {
"resource": ""
} |
q25229 | Chatterbot.DSL.exclude | validation | def exclude(*args)
e = flatten_list_of_strings(args)
if e.nil? || e.empty?
bot.exclude = []
else
bot.exclude += e
end
end | ruby | {
"resource": ""
} |
q25230 | Chatterbot.DSL.consumer_secret | validation | def consumer_secret(s)
bot.deprecated "Setting consumer_secret outside of your config file is deprecated!", Kernel.caller.first
bot.config[:consumer_secret] = s
end | ruby | {
"resource": ""
} |
q25231 | Chatterbot.DSL.consumer_key | validation | def consumer_key(k)
bot.deprecated "Setting consumer_key outside of your config file is deprecated!", Kernel.caller.first
bot.config[:consumer_key] = k
end | ruby | {
"resource": ""
} |
q25232 | Chatterbot.DSL.secret | validation | def secret(s)
bot.deprecated "Setting access_token_secret outside of your config file is deprecated!", Kernel.caller.first
bot.config[:access_token_secret] = s
end | ruby | {
"resource": ""
} |
q25233 | Chatterbot.DSL.token | validation | def token(s)
bot.deprecated "Setting access_token outside of your config file is deprecated!", Kernel.caller.first
bot.config[:access_token] = s
end | ruby | {
"resource": ""
} |
q25234 | Chatterbot.DSL.flatten_list_of_strings | validation | def flatten_list_of_strings(args)
args.collect do |b|
if b.is_a?(String)
# string, split on commas and turn into array
b.split(",").collect { |s| s.strip }
else
# presumably an array
b
end
end.flatten
end | ruby | {
"resource": ""
} |
q25235 | Chatterbot.Favorite.favorite | validation | def favorite(id=@current_tweet)
return if require_login == false
id = id_from_tweet(id)
#:nocov:
if debug_mode?
debug "I'm in debug mode, otherwise I would favorite tweet id: #{id}"
return
end
#:nocov:
client.favorite id
end | ruby | {
"resource": ""
} |
q25236 | Chatterbot.HomeTimeline.home_timeline | validation | def home_timeline(*args, &block)
return unless require_login
debug "check for home_timeline tweets since #{since_id_home_timeline}"
opts = {
:since_id => since_id_home_timeline,
:count => 200
}
results = client.home_timeline(opts)
@current_tweet = nil
results.each { |s|
update_since_id_home_timeline(s)
if block_given? && valid_tweet?(s)
@current_tweet = s
yield s
end
}
@current_tweet = nil
end | ruby | {
"resource": ""
} |
q25237 | Chatterbot.Safelist.on_safelist? | validation | def on_safelist?(s)
search = from_user(s).downcase
safelist.any? { |b| search.include?(b.downcase) }
end | ruby | {
"resource": ""
} |
q25238 | Chatterbot.Search.search | validation | def search(queries, opts = {}, &block)
debug "check for tweets since #{since_id}"
max_tweets = opts.delete(:limit) || MAX_SEARCH_TWEETS
exact_match = if opts.key?(:exact)
opts.delete(:exact)
else
true
end
if queries.is_a?(String)
queries = [
queries
]
end
query = queries.map { |q|
if exact_match == true
q = wrap_search_query(q)
end
q
}.join(" OR ")
#
# search twitter
#
debug "search: #{query} #{default_opts.merge(opts)}"
@current_tweet = nil
client.search( query, default_opts.merge(opts) ).take(max_tweets).each { |s|
update_since_id(s)
debug s.text
if block_given? && valid_tweet?(s)
@current_tweet = s
yield s
end
}
@current_tweet = nil
end | ruby | {
"resource": ""
} |
q25239 | Chatterbot.Tweet.tweet | validation | def tweet(txt, params = {}, original = nil)
return if require_login == false
txt = replace_variables(txt, original)
if debug_mode?
debug "I'm in debug mode, otherwise I would tweet: #{txt}"
else
debug txt
if params.has_key?(:media)
file = params.delete(:media)
if ! file.is_a?(File)
file = File.new(file)
end
client.update_with_media txt, file, params
else
client.update txt, params
end
end
rescue Twitter::Error::Forbidden => e
#:nocov:
debug e
false
#:nocov:
end | ruby | {
"resource": ""
} |
q25240 | Chatterbot.Tweet.reply | validation | def reply(txt, source, params = {})
debug txt
params = {:in_reply_to_status_id => source.id}.merge(params)
tweet txt, params, source
end | ruby | {
"resource": ""
} |
q25241 | Chatterbot.DirectMessages.direct_message | validation | def direct_message(txt, user=nil)
return unless require_login
if user.nil?
user = current_user
end
client.create_direct_message(user, txt)
end | ruby | {
"resource": ""
} |
q25242 | Chatterbot.DirectMessages.direct_messages | validation | def direct_messages(opts = {}, &block)
return unless require_login
debug "check for DMs since #{since_id_dm}"
#
# search twitter
#
@current_tweet = nil
client.direct_messages_received(since_id:since_id_dm, count:200).each { |s|
update_since_id_dm(s)
debug s.text
if has_safelist? && !on_safelist?(s.sender)
debug "skipping because user not on safelist"
elsif block_given? && !on_blocklist?(s.sender) && !skip_me?(s)
@current_tweet = s
yield s
end
}
@current_tweet = nil
rescue Twitter::Error::Forbidden => e
puts "sorry, looks like we're not allowed to check DMs for this account"
end | ruby | {
"resource": ""
} |
q25243 | Chatterbot.UI.get_oauth_verifier | validation | def get_oauth_verifier
green "****************************************"
green "****************************************"
green "**** BOT AUTH TIME! ****"
green "****************************************"
green "****************************************"
puts "You need to authorize your bot with Twitter.\n\nPlease login to Twitter under the bot's account. When you're ready, hit Enter.\n\nYour browser will open with the following URL, where you can authorize the bot.\n\n"
url = request_token.authorize_url
puts url
puts "\nIf that doesn't work, you can open the URL in your browser manually."
puts "\n\nHit enter to start.\n\n"
STDIN.readline.chomp
Launchy.open(url)
# sleep here so that if launchy has any output (which it does
# sometimes), it doesn't interfere with our input prompt
sleep(2)
puts "Paste your PIN and hit enter when you have completed authorization.\n\n"
print "> "
STDIN.readline.chomp.strip
rescue Interrupt => e
exit
end | ruby | {
"resource": ""
} |
q25244 | Chatterbot.UI.get_api_key | validation | def get_api_key
green "****************************************"
green "****************************************"
green "**** API SETUP TIME! ****"
green "****************************************"
green "****************************************"
puts "\n\nWelcome to Chatterbot. Let's walk through the steps to get a bot running.\n\n"
#
# At this point, we don't have any API credentials at all for
# this bot, but it's possible the user has already setup an app.
# Let's ask!
#
puts "Hey, looks like you need to get an API key from Twitter before you can get started.\n\n"
app_already_exists = ask_yes_no("Have you already set up an app with Twitter?")
if app_already_exists
puts "Terrific! Let's get your bot running!\n\n"
else
puts "OK, I can help with that!\n\n"
send_to_app_creation
end
print "\n\nPaste the 'Consumer Key' here: "
STDOUT.flush
config[:consumer_key] = STDIN.readline.chomp.strip
print "Paste the 'Consumer Secret' here: "
STDOUT.flush
config[:consumer_secret] = STDIN.readline.chomp.strip
puts "\n\nNow it's time to authorize your bot!\n\n"
if ! app_already_exists && ask_yes_no("Do you want to authorize a bot using the account that created the app?")
puts "OK, on the app page, you can click the 'Create my access token' button to proceed.\n\n"
print "Paste the 'Access Token' here: "
STDOUT.flush
config[:access_token] = STDIN.readline.chomp.strip
print "\n\nPaste the 'Access Token Secret' here: "
STDOUT.flush
config[:access_token_secret] = STDIN.readline.chomp.strip
# reset the client so we can re-init with new OAuth credentials
reset_client
# at this point we should have a fully validated client, so grab
# the screen name
@screen_name = client.user.screen_name rescue nil
else
reset_client
end
#
# capture ctrl-c and exit without a stack trace
#
rescue Interrupt => e
exit
end | ruby | {
"resource": ""
} |
q25245 | Chatterbot.Config.max_id_from | validation | def max_id_from(s)
if ! s.respond_to?(:max)
if s.respond_to?(:id)
return s.id
else
return s
end
end
sorted = s.max { |a, b| a.id.to_i <=> b.id.to_i }
sorted && sorted.id
end | ruby | {
"resource": ""
} |
q25246 | Chatterbot.Config.slurp_file | validation | def slurp_file(f)
f = File.expand_path(f)
tmp = {}
if File.exist?(f)
File.open( f ) { |yf|
tmp = YAML::load( yf )
}
end
tmp.symbolize_keys! unless tmp == false
end | ruby | {
"resource": ""
} |
q25247 | Chatterbot.Config.global_config | validation | def global_config
tmp = {}
global_config_files.each { |f|
tmp.merge!(slurp_file(f) || {})
}
tmp
end | ruby | {
"resource": ""
} |
q25248 | Chatterbot.Config.bot_config | validation | def bot_config
{
:consumer_key => ENV["chatterbot_consumer_key"],
:consumer_secret => ENV["chatterbot_consumer_secret"],
:access_token => ENV["chatterbot_access_token"],
:access_token_secret => ENV["chatterbot_access_secret"] || ENV["chatterbot_access_token_secret"]
}.delete_if { |k, v| v.nil? }.merge(slurp_file(config_file) || {})
end | ruby | {
"resource": ""
} |
q25249 | Chatterbot.Config.load_config | validation | def load_config(params={})
read_only_data = global_config.merge(bot_config).merge(params)
@config = Chatterbot::ConfigManager.new(config_file, read_only_data)
end | ruby | {
"resource": ""
} |
q25250 | Chatterbot.Retweet.retweet | validation | def retweet(id=@current_tweet)
return if require_login == false || id.nil?
id = id_from_tweet(id)
#:nocov:
if debug_mode?
debug "I'm in debug mode, otherwise I would retweet with tweet id: #{id}"
return
end
#:nocov:
client.retweet id
end | ruby | {
"resource": ""
} |
q25251 | Chatterbot.Bot.run! | validation | def run!
before_run
HANDLER_CALLS.each { |c|
if (h = @handlers[c])
send(c, *(h.opts)) do |obj|
h.call(obj)
end
end
}
after_run
end | ruby | {
"resource": ""
} |
q25252 | Chatterbot.Reply.replies | validation | def replies(*args, &block)
return unless require_login
debug "check for replies since #{since_id_reply}"
opts = {
:since_id => since_id_reply,
:count => 200
}
results = client.mentions_timeline(opts)
@current_tweet = nil
results.each { |s|
update_since_id_reply(s)
if block_given? && valid_tweet?(s)
@current_tweet = s
yield s
end
}
@current_tweet = nil
end | ruby | {
"resource": ""
} |
q25253 | Chatterbot.Blocklist.skip_me? | validation | def skip_me?(s)
search = s.respond_to?(:text) ? s.text : s
exclude.detect { |e| search.downcase.include?(e) } != nil
end | ruby | {
"resource": ""
} |
q25254 | Chatterbot.Blocklist.on_blocklist? | validation | def on_blocklist?(s)
search = if s.is_a?(Twitter::User)
s.name
elsif s.respond_to?(:user) && !s.is_a?(Twitter::NullObject)
from_user(s)
else
s
end.downcase
blocklist.any? { |b| search.include?(b.downcase) }
end | ruby | {
"resource": ""
} |
q25255 | Chatterbot.Client.reset_since_id | validation | def reset_since_id
config[:since_id] = 1
# do a search of recent tweets with the letter 'a' in them to
# get a rough max tweet id
result = client.search("a", since:Time.now - 10).max_by(&:id)
update_since_id(result)
end | ruby | {
"resource": ""
} |
q25256 | Chatterbot.Client.generate_authorize_url | validation | def generate_authorize_url(request_token)
request = consumer.create_signed_request(:get,
consumer.authorize_path, request_token,
{:oauth_callback => 'oob'})
params = request['Authorization'].sub(/^OAuth\s+/, '').split(/,\s+/).map do |param|
key, value = param.split('=')
value =~ /"(.*?)"/
"#{key}=#{CGI::escape($1)}"
end.join('&')
"#{base_url}#{request.path}?#{params}"
end | ruby | {
"resource": ""
} |
q25257 | Chatterbot.Client.get_screen_name | validation | def get_screen_name(t = @access_token)
return unless @screen_name.nil?
return if t.nil?
oauth_response = t.get('/1.1/account/verify_credentials.json')
@screen_name = JSON.parse(oauth_response.body)["screen_name"]
end | ruby | {
"resource": ""
} |
q25258 | Chatterbot.Client.login | validation | def login(do_update_config=true)
if needs_api_key?
get_api_key
end
if needs_auth_token?
pin = get_oauth_verifier
return false if pin.nil?
begin
# this will throw an error that we can try and catch
@access_token = request_token.get_access_token(:oauth_verifier => pin.chomp)
get_screen_name
self.config[:access_token] = @access_token.token
self.config[:access_token_secret] = @access_token.secret
#update_config unless ! do_update_config
reset_client
rescue OAuth::Unauthorized => e
display_oauth_error
warn e.inspect
return false
end
end
return true
end | ruby | {
"resource": ""
} |
q25259 | Voom.ContainerMethods.reset! | validation | def reset!
registered_keys.each { |key| ClassConstants.new(key).deconstantize }
@registered_keys = []
container._container.clear
end | ruby | {
"resource": ""
} |
q25260 | Voom.Symbol.class_name | validation | def class_name(classname)
classname = sym_to_str(classname)
classname.split('.').map { |m| inflector.camelize(m) }.join('::')
end | ruby | {
"resource": ""
} |
q25261 | Zlib.ZWriter.close | validation | def close
flush()
@deflate_buffer << @deflater.finish unless @deflater.finished?
begin
until @deflate_buffer.empty? do
@deflate_buffer.slice!(0, delegate.write(@deflate_buffer))
end
rescue Errno::EAGAIN, Errno::EINTR
retry if write_ready?
end
@checksum = @deflater.adler
@compressed_size = @deflater.total_out
@uncompressed_size = @deflater.total_in
@deflater.close
super()
nil
end | ruby | {
"resource": ""
} |
q25262 | Archive.DOSTime.to_time | validation | def to_time
second = ((0b11111 & @dos_time) ) * 2
minute = ((0b111111 << 5 & @dos_time) >> 5)
hour = ((0b11111 << 11 & @dos_time) >> 11)
day = ((0b11111 << 16 & @dos_time) >> 16)
month = ((0b1111 << 21 & @dos_time) >> 21)
year = ((0b1111111 << 25 & @dos_time) >> 25) + 1980
return Time.local(year, month, day, hour, minute, second)
end | ruby | {
"resource": ""
} |
q25263 | Archive.Zip.each | validation | def each(&b)
raise IOError, 'non-readable archive' unless readable?
raise IOError, 'closed archive' if closed?
unless @parse_complete then
parse(@archive)
@parse_complete = true
end
@entries.each(&b)
end | ruby | {
"resource": ""
} |
q25264 | Archive.Zip.add_entry | validation | def add_entry(entry)
raise IOError, 'non-writable archive' unless writable?
raise IOError, 'closed archive' if closed?
unless entry.kind_of?(Entry) then
raise ArgumentError, 'Archive::Zip::Entry instance required'
end
@entries << entry
self
end | ruby | {
"resource": ""
} |
q25265 | Archive.Zip.extract | validation | def extract(destination, options = {})
raise IOError, 'non-readable archive' unless readable?
raise IOError, 'closed archive' if closed?
# Ensure that unspecified options have default values.
options[:directories] = true unless options.has_key?(:directories)
options[:symlinks] = false unless options.has_key?(:symlinks)
options[:overwrite] = :all unless options[:overwrite] == :older ||
options[:overwrite] == :never
options[:create] = true unless options.has_key?(:create)
options[:flatten] = false unless options.has_key?(:flatten)
# Flattening the archive structure implies that directory entries are
# skipped.
options[:directories] = false if options[:flatten]
# First extract all non-directory entries.
directories = []
each do |entry|
# Compute the target file path.
file_path = entry.zip_path
file_path = File.basename(file_path) if options[:flatten]
file_path = File.join(destination, file_path)
# Cache some information about the file path.
file_exists = File.exist?(file_path)
file_mtime = File.mtime(file_path) if file_exists
begin
# Skip this entry if so directed.
if (! file_exists && ! options[:create]) ||
(file_exists &&
(options[:overwrite] == :never ||
options[:overwrite] == :older && entry.mtime <= file_mtime)) ||
(! options[:exclude].nil? && options[:exclude][entry]) then
next
end
# Set the decryption key for the entry.
if options[:password].kind_of?(String) then
entry.password = options[:password]
elsif ! options[:password].nil? then
entry.password = options[:password][entry]
end
if entry.directory? then
# Record the directories as they are encountered.
directories << entry
elsif entry.file? || (entry.symlink? && options[:symlinks]) then
# Extract files and symlinks.
entry.extract(
options.merge(:file_path => file_path)
)
end
rescue StandardError => error
unless options[:on_error].nil? then
case options[:on_error][entry, error]
when :retry
retry
when :skip
else
raise
end
else
raise
end
end
end
if options[:directories] then
# Then extract the directory entries in depth first order so that time
# stamps, ownerships, and permissions can be properly restored.
directories.sort { |a, b| b.zip_path <=> a.zip_path }.each do |entry|
begin
entry.extract(
options.merge(
:file_path => File.join(destination, entry.zip_path)
)
)
rescue StandardError => error
unless options[:on_error].nil? then
case options[:on_error][entry, error]
when :retry
retry
when :skip
else
raise
end
else
raise
end
end
end
end
nil
end | ruby | {
"resource": ""
} |
q25266 | Archive.Zip.find_central_directory | validation | def find_central_directory(io)
# First find the offset to the end of central directory record.
# It is expected that the variable length comment field will usually be
# empty and as a result the initial value of eocd_offset is all that is
# necessary.
#
# NOTE: A cleverly crafted comment could throw this thing off if the
# comment itself looks like a valid end of central directory record.
eocd_offset = -22
loop do
io.seek(eocd_offset, IO::SEEK_END)
if IOExtensions.read_exactly(io, 4) == EOCD_SIGNATURE then
io.seek(16, IO::SEEK_CUR)
if IOExtensions.read_exactly(io, 2).unpack('v')[0] ==
(eocd_offset + 22).abs then
break
end
end
eocd_offset -= 1
end
# At this point, eocd_offset should point to the location of the end of
# central directory record relative to the end of the archive.
# Now, jump into the location in the record which contains a pointer to
# the start of the central directory record and return the value.
io.seek(eocd_offset + 16, IO::SEEK_END)
return IOExtensions.read_exactly(io, 4).unpack('V')[0]
rescue Errno::EINVAL
raise Zip::UnzipError, 'unable to locate end-of-central-directory record'
end | ruby | {
"resource": ""
} |
q25267 | Archive.Zip.dump | validation | def dump(io)
bytes_written = 0
@entries.each do |entry|
bytes_written += entry.dump_local_file_record(io, bytes_written)
end
central_directory_offset = bytes_written
@entries.each do |entry|
bytes_written += entry.dump_central_file_record(io)
end
central_directory_length = bytes_written - central_directory_offset
bytes_written += io.write(EOCD_SIGNATURE)
bytes_written += io.write(
[
0,
0,
@entries.length,
@entries.length,
central_directory_length,
central_directory_offset,
comment.bytesize
].pack('vvvvVVv')
)
bytes_written += io.write(comment)
bytes_written
end | ruby | {
"resource": ""
} |
q25268 | Effective.Datatable.view= | validation | def view=(view)
@view = (view.respond_to?(:view_context) ? view.view_context : view)
raise 'expected view to respond to params' unless @view.respond_to?(:params)
load_cookie!
assert_cookie!
load_attributes!
# We need early access to filter and scope, to define defaults from the model first
# This means filters do knows about attributes but not about columns.
initialize_filters if respond_to?(:initialize_filters)
load_filters!
load_state!
# Bulk actions called first so it can add the bulk_actions_col first
initialize_bulk_actions if respond_to?(:initialize_bulk_actions)
# Now we initialize all the columns. columns knows about attributes and filters and scope
initialize_datatable if respond_to?(:initialize_datatable)
load_columns!
# Execute any additional DSL methods
initialize_charts if respond_to?(:initialize_charts)
# Load the collection. This is the first time def collection is called on the Datatable itself
initialize_collection if respond_to?(:initialize_collection)
load_collection!
# Figure out the class, and if it's activerecord, do all the resource discovery on it
load_resource!
apply_belongs_to_attributes!
load_resource_search!
# Check everything is okay
validate_datatable!
# Save for next time
save_cookie!
end | ruby | {
"resource": ""
} |
q25269 | Effective.DatatablesController.show | validation | def show
begin
@datatable = EffectiveDatatables.find(params[:id])
@datatable.view = view_context
EffectiveDatatables.authorize!(self, :index, @datatable.collection_class)
render json: @datatable.to_json
rescue => e
EffectiveDatatables.authorized?(self, :index, @datatable.try(:collection_class))
render json: error_json(e)
ExceptionNotifier.notify_exception(e) if defined?(ExceptionNotifier)
raise e if Rails.env.development?
end
end | ruby | {
"resource": ""
} |
q25270 | Plezi.Renderer.register | validation | def register(extention, handler = nil, &block)
handler ||= block
raise 'Handler or block required.' unless handler
@render_library[extention.to_s] = handler
handler
end | ruby | {
"resource": ""
} |
q25271 | Plezi.Controller.requested_method | validation | def requested_method
params['_method'.freeze] = (params['_method'.freeze] || request.request_method.downcase).to_sym
self.class._pl_params2method(params, request.env)
end | ruby | {
"resource": ""
} |
q25272 | Plezi.Controller.send_data | validation | def send_data(data, options = {})
response.write data if data
filename = options[:filename]
# set headers
content_disposition = options[:inline] ? 'inline'.dup : 'attachment'.dup
content_disposition << "; filename=#{::File.basename(options[:filename])}" if filename
cont_type = (options[:mime] ||= filename && Rack::Mime.mime_type(::File.extname(filename)))
response['content-type'.freeze] = cont_type if cont_type
response['content-disposition'.freeze] = content_disposition
true
end | ruby | {
"resource": ""
} |
q25273 | InventoryRefresh.Graph.build_feedback_edge_set | validation | def build_feedback_edge_set(edges, fixed_edges)
edges = edges.dup
acyclic_edges = fixed_edges.dup
feedback_edge_set = []
while edges.present?
edge = edges.shift
if detect_cycle(edge, acyclic_edges)
feedback_edge_set << edge
else
acyclic_edges << edge
end
end
feedback_edge_set
end | ruby | {
"resource": ""
} |
q25274 | InventoryRefresh.Graph.detect_cycle | validation | def detect_cycle(edge, acyclic_edges, escalation = nil)
# Test if adding edge creates a cycle, ew will traverse the graph from edge Node, through all it's
# dependencies
starting_node = edge.second
edges = [edge] + acyclic_edges
traverse_dependecies([], starting_node, starting_node, edges, node_edges(edges, starting_node), escalation)
end | ruby | {
"resource": ""
} |
q25275 | InventoryRefresh.Graph.traverse_dependecies | validation | def traverse_dependecies(traversed_nodes, starting_node, current_node, edges, dependencies, escalation)
dependencies.each do |node_edge|
node = node_edge.first
traversed_nodes << node
if traversed_nodes.include?(starting_node)
if escalation == :exception
raise "Cycle from #{current_node} to #{node}, starting from #{starting_node} passing #{traversed_nodes}"
else
return true
end
end
return true if traverse_dependecies(traversed_nodes, starting_node, node, edges, node_edges(edges, node), escalation)
end
false
end | ruby | {
"resource": ""
} |
q25276 | InventoryRefresh.ApplicationRecordIterator.find_in_batches | validation | def find_in_batches(batch_size: 1000, attributes_index: {})
attributes_index.each_slice(batch_size) do |batch|
yield(inventory_collection.db_collection_for_comparison_for(batch))
end
end | ruby | {
"resource": ""
} |
q25277 | InventoryRefresh.InventoryObject.assign_attributes | validation | def assign_attributes(attributes)
attributes.each do |k, v|
# We don't want timestamps or resource versions to be overwritten here, since those are driving the conditions
next if %i(resource_timestamps resource_timestamps_max resource_timestamp).include?(k)
next if %i(resource_counters resource_counters_max resource_counter).include?(k)
if data[:resource_timestamp] && attributes[:resource_timestamp]
assign_only_newest(:resource_timestamp, :resource_timestamps, attributes, data, k, v)
elsif data[:resource_counter] && attributes[:resource_counter]
assign_only_newest(:resource_counter, :resource_counters, attributes, data, k, v)
else
public_send("#{k}=", v)
end
end
if attributes[:resource_timestamp]
assign_full_row_version_attr(:resource_timestamp, attributes, data)
elsif attributes[:resource_counter]
assign_full_row_version_attr(:resource_counter, attributes, data)
end
self
end | ruby | {
"resource": ""
} |
q25278 | InventoryRefresh.InventoryObject.assign_only_newest | validation | def assign_only_newest(full_row_version_attr, partial_row_version_attr, attributes, data, k, v)
# If timestamps are in play, we will set only attributes that are newer
specific_attr_timestamp = attributes[partial_row_version_attr].try(:[], k)
specific_data_timestamp = data[partial_row_version_attr].try(:[], k)
assign = if !specific_attr_timestamp
# Data have no timestamp, we will ignore the check
true
elsif specific_attr_timestamp && !specific_data_timestamp
# Data specific timestamp is nil and we have new specific timestamp
if data.key?(k)
if attributes[full_row_version_attr] >= data[full_row_version_attr]
# We can save if the full timestamp is bigger, if the data already contains the attribute
true
end
else
# Data do not contain the attribute, so we are saving the newest
true
end
true
elsif specific_attr_timestamp > specific_data_timestamp
# both partial timestamps are there, newer must be bigger
true
end
if assign
public_send("#{k}=", v) # Attribute is newer than current one, lets use it
(data[partial_row_version_attr] ||= {})[k] = specific_attr_timestamp if specific_attr_timestamp # and set the latest timestamp
end
end | ruby | {
"resource": ""
} |
q25279 | InventoryRefresh.InventoryObject.assign_full_row_version_attr | validation | def assign_full_row_version_attr(full_row_version_attr, attributes, data)
if attributes[full_row_version_attr] && data[full_row_version_attr]
# If both timestamps are present, store the bigger one
data[full_row_version_attr] = attributes[full_row_version_attr] if attributes[full_row_version_attr] > data[full_row_version_attr]
elsif attributes[full_row_version_attr] && !data[full_row_version_attr]
# We are assigning timestamp that was missing
data[full_row_version_attr] = attributes[full_row_version_attr]
end
end | ruby | {
"resource": ""
} |
q25280 | InventoryRefresh.InventoryCollection.uniq_keys_candidates | validation | def uniq_keys_candidates(keys)
# Find all uniq indexes that that are covering our keys
uniq_key_candidates = unique_indexes.each_with_object([]) { |i, obj| obj << i if (keys - i.columns.map(&:to_sym)).empty? }
if unique_indexes.blank? || uniq_key_candidates.blank?
raise "#{self} and its table #{model_class.table_name} must have a unique index defined "\
"covering columns #{keys} to be able to use saver_strategy :concurrent_safe_batch."
end
uniq_key_candidates
end | ruby | {
"resource": ""
} |
q25281 | InventoryRefresh.InventoryCollection.filtered_dependency_attributes | validation | def filtered_dependency_attributes
filtered_attributes = dependency_attributes
if attributes_blacklist.present?
filtered_attributes = filtered_attributes.reject { |key, _value| attributes_blacklist.include?(key) }
end
if attributes_whitelist.present?
filtered_attributes = filtered_attributes.select { |key, _value| attributes_whitelist.include?(key) }
end
filtered_attributes
end | ruby | {
"resource": ""
} |
q25282 | InventoryRefresh.InventoryCollection.fixed_attributes | validation | def fixed_attributes
if model_class
presence_validators = model_class.validators.detect { |x| x.kind_of?(ActiveRecord::Validations::PresenceValidator) }
end
# Attributes that has to be always on the entity, so attributes making unique index of the record + attributes
# that have presence validation
fixed_attributes = manager_ref
fixed_attributes += presence_validators.attributes if presence_validators.present?
fixed_attributes
end | ruby | {
"resource": ""
} |
q25283 | InventoryRefresh.InventoryCollection.fixed_dependencies | validation | def fixed_dependencies
fixed_attrs = fixed_attributes
filtered_dependency_attributes.each_with_object(Set.new) do |(key, value), fixed_deps|
fixed_deps.merge(value) if fixed_attrs.include?(key)
end.reject(&:saved?)
end | ruby | {
"resource": ""
} |
q25284 | InventoryRefresh.InventoryCollection.dependency_attributes_for | validation | def dependency_attributes_for(inventory_collections)
attributes = Set.new
inventory_collections.each do |inventory_collection|
attributes += filtered_dependency_attributes.select { |_key, value| value.include?(inventory_collection) }.keys
end
attributes
end | ruby | {
"resource": ""
} |
q25285 | InventoryRefresh.InventoryCollection.records_identities | validation | def records_identities(records)
records = [records] unless records.respond_to?(:map)
records.map { |record| record_identity(record) }
end | ruby | {
"resource": ""
} |
q25286 | InventoryRefresh.InventoryCollection.record_identity | validation | def record_identity(record)
identity = record.try(:[], :id) || record.try(:[], "id") || record.try(:id)
raise "Cannot obtain identity of the #{record}" if identity.blank?
{
:id => identity
}
end | ruby | {
"resource": ""
} |
q25287 | ActiveadminSettingsCached.DSL.active_admin_settings_page | validation | def active_admin_settings_page(options = {}, &block)
options.assert_valid_keys(*ActiveadminSettingsCached::Options::VALID_OPTIONS)
options = ActiveadminSettingsCached::Options.options_for(options)
coercion =
ActiveadminSettingsCached::Coercions.new(options[:template_object].defaults, options[:template_object].display)
content title: options[:title] do
render partial: options[:template], locals: { settings_model: options[:template_object] }
end
page_action :update, method: :post do
settings_params = params.require(:settings).permit!
coercion.cast_params(settings_params) do |name, value|
options[:template_object].save(name, value)
end
flash[:success] = t('activeadmin_settings_cached.settings.update.success'.freeze)
Rails.version.to_i >= 5 ? redirect_back(fallback_location: admin_root_path) : redirect_to(:back)
options[:after_save].call if options[:after_save].respond_to?(:call)
end
instance_eval(&block) if block_given?
end | ruby | {
"resource": ""
} |
q25288 | Monadic.Either.or | validation | def or(value=nil, &block)
return Failure(block.call(@value)) if failure? && block_given?
return Failure(value) if failure?
return self
end | ruby | {
"resource": ""
} |
q25289 | Rubex.CodeWriter.write_func_declaration | validation | def write_func_declaration type:, c_name:, args: [], static: true
write_func_prototype type, c_name, args, static: static
@code << ";"
new_line
end | ruby | {
"resource": ""
} |
q25290 | Benchmark.Trend.range | validation | def range(start, limit, ratio: 8)
check_greater(start, 0)
check_greater(limit, start)
check_greater(ratio, 2)
items = []
count = start
items << count
(limit / ratio).times do
count *= ratio
break if count >= limit
items << count
end
items << limit if start != limit
items
end | ruby | {
"resource": ""
} |
q25291 | Benchmark.Trend.measure_execution_time | validation | def measure_execution_time(data = nil, repeat: 1, &work)
inputs = data || range(1, 10_000)
times = []
inputs.each_with_index do |input, i|
GC.start
measurements = []
repeat.times do
measurements << clock_time { work.(input, i) }
end
times << measurements.reduce(&:+).to_f / measurements.size
end
[inputs, times]
end | ruby | {
"resource": ""
} |
q25292 | Benchmark.Trend.fit_logarithmic | validation | def fit_logarithmic(xs, ys)
fit(xs, ys, tran_x: ->(x) { Math.log(x) })
end | ruby | {
"resource": ""
} |
q25293 | Benchmark.Trend.fit_power | validation | def fit_power(xs, ys)
a, b, rr = fit(xs, ys, tran_x: ->(x) { Math.log(x) },
tran_y: ->(y) { Math.log(y) })
[a, Math.exp(b), rr]
end | ruby | {
"resource": ""
} |
q25294 | Benchmark.Trend.fit_exponential | validation | def fit_exponential(xs, ys)
a, b, rr = fit(xs, ys, tran_y: ->(y) { Math.log(y) })
[Math.exp(a), Math.exp(b), rr]
end | ruby | {
"resource": ""
} |
q25295 | Benchmark.Trend.fit | validation | def fit(xs, ys, tran_x: ->(x) { x }, tran_y: ->(y) { y })
eps = (10 ** -10)
n = 0
sum_x = 0.0
sum_x2 = 0.0
sum_y = 0.0
sum_y2 = 0.0
sum_xy = 0.0
xs.zip(ys).each do |x, y|
n += 1
sum_x += tran_x.(x)
sum_y += tran_y.(y)
sum_x2 += tran_x.(x) ** 2
sum_y2 += tran_y.(y) ** 2
sum_xy += tran_x.(x) * tran_y.(y)
end
txy = n * sum_xy - sum_x * sum_y
tx = n * sum_x2 - sum_x ** 2
ty = n * sum_y2 - sum_y ** 2
is_linear = tran_x.(Math::E) * tran_y.(Math::E) == Math::E ** 2
if tx.abs < eps # no variation in xs
raise ArgumentError, "No variation in data #{xs}"
elsif ty.abs < eps && is_linear # no variation in ys - constant fit
slope = 0
intercept = sum_y / n
residual_sq = 1 # doesn't exist
else
slope = txy / tx
intercept = (sum_y - slope * sum_x) / n
residual_sq = (txy ** 2) / (tx * ty)
end
[slope, intercept, residual_sq]
end | ruby | {
"resource": ""
} |
q25296 | Benchmark.Trend.fit_at | validation | def fit_at(type, slope: nil, intercept: nil, n: nil)
raise ArgumentError, "Incorrect input size: #{n}" unless n > 0
case type
when :logarithmic, :log
intercept + slope * Math.log(n)
when :linear
intercept + slope * n
when :power
intercept * (n ** slope)
when :exponential, :exp
intercept * (slope ** n)
else
raise ArgumentError, "Unknown fit type: #{type}"
end
end | ruby | {
"resource": ""
} |
q25297 | Modulation.ModuleMixin.export | validation | def export(*symbols)
symbols = symbols.first if symbols.first.is_a?(Array)
__exported_symbols.concat(symbols)
end | ruby | {
"resource": ""
} |
q25298 | Modulation.ModuleMixin.__expose! | validation | def __expose!
singleton = singleton_class
singleton.private_instance_methods.each do |sym|
singleton.send(:public, sym)
end
__module_info[:private_constants].each do |sym|
const_set(sym, singleton.const_get(sym))
end
self
end | ruby | {
"resource": ""
} |
q25299 | Rack.Unreloader.require | validation | def require(paths, &block)
if @reloader
@reloader.require_dependencies(paths, &block)
else
Unreloader.expand_directory_paths(paths).each{|f| super(f)}
end
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.