repository_name
stringlengths 7
56
| func_path_in_repository
stringlengths 10
101
| func_name
stringlengths 12
78
| language
stringclasses 1
value | func_code_string
stringlengths 74
11.9k
| func_documentation_string
stringlengths 3
8.03k
| split_name
stringclasses 1
value | func_code_url
stringlengths 98
213
| enclosing_scope
stringlengths 42
98.2k
|
---|---|---|---|---|---|---|---|---|
wied03/opal-factory_girl | opal/opal/active_support/inflector/methods.rb | ActiveSupport.Inflector.titleize | ruby | def titleize(word)
# negative lookbehind doesn't work in Firefox / Safari
# humanize(underscore(word)).gsub(/\b(?<!['’`])[a-z]/) { |match| match.capitalize }
humanized = humanize(underscore(word))
humanized.reverse.gsub(/[a-z](?!['’`])\b/) { |match| match.capitalize }.reverse
end | Capitalizes all the words and replaces some characters in the string to
create a nicer looking title. +titleize+ is meant for creating pretty
output. It is not used in the Rails internals.
+titleize+ is also aliased as +titlecase+.
titleize('man from the boondocks') # => "Man From The Boondocks"
titleize('x-men: the last stand') # => "X Men: The Last Stand"
titleize('TheManWithoutAPast') # => "The Man Without A Past"
titleize('raiders_of_the_lost_ark') # => "Raiders Of The Lost Ark" | train | https://github.com/wied03/opal-factory_girl/blob/697114a8c63f4cba38b84d27d1f7b823c8d0bb38/opal/opal/active_support/inflector/methods.rb#L160-L165 | module Inflector
extend self
# Returns the plural form of the word in the string.
#
# If passed an optional +locale+ parameter, the word will be
# pluralized using rules defined for that language. By default,
# this parameter is set to <tt>:en</tt>.
#
# pluralize('post') # => "posts"
# pluralize('octopus') # => "octopi"
# pluralize('sheep') # => "sheep"
# pluralize('words') # => "words"
# pluralize('CamelOctopus') # => "CamelOctopi"
# pluralize('ley', :es) # => "leyes"
def pluralize(word, locale = :en)
apply_inflections(word, inflections(locale).plurals)
end
# The reverse of #pluralize, returns the singular form of a word in a
# string.
#
# If passed an optional +locale+ parameter, the word will be
# singularized using rules defined for that language. By default,
# this parameter is set to <tt>:en</tt>.
#
# singularize('posts') # => "post"
# singularize('octopi') # => "octopus"
# singularize('sheep') # => "sheep"
# singularize('word') # => "word"
# singularize('CamelOctopi') # => "CamelOctopus"
# singularize('leyes', :es) # => "ley"
def singularize(word, locale = :en)
apply_inflections(word, inflections(locale).singulars)
end
# Converts strings to UpperCamelCase.
# If the +uppercase_first_letter+ parameter is set to false, then produces
# lowerCamelCase.
#
# Also converts '/' to '::' which is useful for converting
# paths to namespaces.
#
# camelize('active_model') # => "ActiveModel"
# camelize('active_model', false) # => "activeModel"
# camelize('active_model/errors') # => "ActiveModel::Errors"
# camelize('active_model/errors', false) # => "activeModel::Errors"
#
# As a rule of thumb you can think of +camelize+ as the inverse of
# #underscore, though there are cases where that does not hold:
#
# camelize(underscore('SSLError')) # => "SslError"
def camelize(term, uppercase_first_letter = true)
string = term.to_s
if uppercase_first_letter
string = string.sub(/^[a-z\d]*/) { |match| inflections.acronyms[match] || match.capitalize }
else
string = string.sub(/^(?:#{inflections.acronym_regex}(?=\b|[A-Z_])|\w)/) { |match| match.downcase }
end
string.gsub!(/(?:_|(\/))([a-z\d]*)/i) { "#{$1}#{inflections.acronyms[$2] || $2.capitalize}" }
string.gsub!('/'.freeze, '::'.freeze)
string
end
# Makes an underscored, lowercase form from the expression in the string.
#
# Changes '::' to '/' to convert namespaces to paths.
#
# underscore('ActiveModel') # => "active_model"
# underscore('ActiveModel::Errors') # => "active_model/errors"
#
# As a rule of thumb you can think of +underscore+ as the inverse of
# #camelize, though there are cases where that does not hold:
#
# camelize(underscore('SSLError')) # => "SslError"
def underscore(camel_cased_word)
return camel_cased_word unless camel_cased_word =~ /[A-Z-]|::/
word = camel_cased_word.to_s.gsub('::'.freeze, '/'.freeze)
word.gsub!(/(?:(?<=([A-Za-z\d]))|\b)(#{inflections.acronym_regex})(?=\b|[^a-z])/) { "#{$1 && '_'.freeze }#{$2.downcase}" }
word.gsub!(/([A-Z\d]+)([A-Z][a-z])/, '\1_\2'.freeze)
word.gsub!(/([a-z\d])([A-Z])/, '\1_\2'.freeze)
word.tr!("-".freeze, "_".freeze)
word.downcase!
word
end
# Tweaks an attribute name for display to end users.
#
# Specifically, performs these transformations:
#
# * Applies human inflection rules to the argument.
# * Deletes leading underscores, if any.
# * Removes a "_id" suffix if present.
# * Replaces underscores with spaces, if any.
# * Downcases all words except acronyms.
# * Capitalizes the first word.
#
# The capitalization of the first word can be turned off by setting the
# +:capitalize+ option to false (default is true).
#
# humanize('employee_salary') # => "Employee salary"
# humanize('author_id') # => "Author"
# humanize('author_id', capitalize: false) # => "author"
# humanize('_id') # => "Id"
#
# If "SSL" was defined to be an acronym:
#
# humanize('ssl_error') # => "SSL error"
#
def humanize(lower_case_and_underscored_word, options = {})
result = lower_case_and_underscored_word.to_s.dup
# no humans attr exists on inflections, need to port that over
# opal - string mutation
inflections.humans.each { |(rule, replacement)| break if (result = result.sub(rule, replacement)) }
# opal - \A and \z not supported
#result = result.sub(/\A_+/, ''.freeze)
result = result.sub(/^_+/, ''.freeze)
#result = result.sub(/_id\z/, ''.freeze)
result = result.sub(/_id$/, ''.freeze)
result = result.tr('_'.freeze, ' '.freeze)
result = result.gsub(/([a-z\d]*)/i) do |match|
"#{inflections.acronyms[match] || match.downcase}"
end
if options.fetch(:capitalize, true)
#result = result.sub(/\A\w/) { |match| match.upcase }
result = result.sub(/^\w/) { |match| match.upcase }
end
result
end
# Capitalizes all the words and replaces some characters in the string to
# create a nicer looking title. +titleize+ is meant for creating pretty
# output. It is not used in the Rails internals.
#
# +titleize+ is also aliased as +titlecase+.
#
# titleize('man from the boondocks') # => "Man From The Boondocks"
# titleize('x-men: the last stand') # => "X Men: The Last Stand"
# titleize('TheManWithoutAPast') # => "The Man Without A Past"
# titleize('raiders_of_the_lost_ark') # => "Raiders Of The Lost Ark"
# Creates the name of a table like Rails does for models to table names.
# This method uses the #pluralize method on the last word in the string.
#
# tableize('RawScaledScorer') # => "raw_scaled_scorers"
# tableize('ham_and_egg') # => "ham_and_eggs"
# tableize('fancyCategory') # => "fancy_categories"
def tableize(class_name)
pluralize(underscore(class_name))
end
# Creates a class name from a plural table name like Rails does for table
# names to models. Note that this returns a string and not a Class (To
# convert to an actual class follow +classify+ with #constantize).
#
# classify('ham_and_eggs') # => "HamAndEgg"
# classify('posts') # => "Post"
#
# Singular names are not handled correctly:
#
# classify('calculus') # => "Calculu"
def classify(table_name)
# strip out any leading schema name
camelize(singularize(table_name.to_s.sub(/.*\./, ''.freeze)))
end
# Replaces underscores with dashes in the string.
#
# dasherize('puni_puni') # => "puni-puni"
def dasherize(underscored_word)
underscored_word.tr('_'.freeze, '-'.freeze)
end
# Removes the module part from the expression in the string.
#
# demodulize('ActiveRecord::CoreExtensions::String::Inflections') # => "Inflections"
# demodulize('Inflections') # => "Inflections"
# demodulize('::Inflections') # => "Inflections"
# demodulize('') # => ""
#
# See also #deconstantize.
def demodulize(path)
path = path.to_s
if i = path.rindex('::')
path[(i+2)..-1]
else
path
end
end
# Removes the rightmost segment from the constant expression in the string.
#
# deconstantize('Net::HTTP') # => "Net"
# deconstantize('::Net::HTTP') # => "::Net"
# deconstantize('String') # => ""
# deconstantize('::String') # => ""
# deconstantize('') # => ""
#
# See also #demodulize.
def deconstantize(path)
path.to_s[0, path.rindex('::') || 0] # implementation based on the one in facets' Module#spacename
end
# Creates a foreign key name from a class name.
# +separate_class_name_and_id_with_underscore+ sets whether
# the method should put '_' between the name and 'id'.
#
# foreign_key('Message') # => "message_id"
# foreign_key('Message', false) # => "messageid"
# foreign_key('Admin::Post') # => "post_id"
def foreign_key(class_name, separate_class_name_and_id_with_underscore = true)
underscore(demodulize(class_name)) + (separate_class_name_and_id_with_underscore ? "_id" : "id")
end
# Tries to find a constant with the name specified in the argument string.
#
# 'Module'.constantize # => Module
# 'Foo::Bar'.constantize # => Foo::Bar
#
# The name is assumed to be the one of a top-level constant, no matter
# whether it starts with "::" or not. No lexical context is taken into
# account:
#
# C = 'outside'
# module M
# C = 'inside'
# C # => 'inside'
# 'C'.constantize # => 'outside', same as ::C
# end
#
# NameError is raised when the name is not in CamelCase or the constant is
# unknown.
def constantize(camel_cased_word)
names = camel_cased_word.split('::'.freeze)
# Trigger a built-in NameError exception including the ill-formed constant in the message.
Object.const_get(camel_cased_word) if names.empty?
# Remove the first blank element in case of '::ClassName' notation.
names.shift if names.size > 1 && names.first.empty?
names.inject(Object) do |constant, name|
if constant == Object
constant.const_get(name)
else
candidate = constant.const_get(name)
next candidate if constant.const_defined?(name, false)
next candidate unless Object.const_defined?(name)
# Go down the ancestors to check if it is owned directly. The check
# stops when we reach Object or the end of ancestors tree.
constant = constant.ancestors.inject do |const, ancestor|
break const if ancestor == Object
break ancestor if ancestor.const_defined?(name, false)
const
end
# owner is in Object, so raise
constant.const_get(name, false)
end
end
end
# Tries to find a constant with the name specified in the argument string.
#
# safe_constantize('Module') # => Module
# safe_constantize('Foo::Bar') # => Foo::Bar
#
# The name is assumed to be the one of a top-level constant, no matter
# whether it starts with "::" or not. No lexical context is taken into
# account:
#
# C = 'outside'
# module M
# C = 'inside'
# C # => 'inside'
# safe_constantize('C') # => 'outside', same as ::C
# end
#
# +nil+ is returned when the name is not in CamelCase or the constant (or
# part of it) is unknown.
#
# safe_constantize('blargle') # => nil
# safe_constantize('UnknownModule') # => nil
# safe_constantize('UnknownModule::Foo::Bar') # => nil
def safe_constantize(camel_cased_word)
constantize(camel_cased_word)
rescue NameError => e
raise if e.name && !(camel_cased_word.to_s.split("::").include?(e.name.to_s) ||
e.name.to_s == camel_cased_word.to_s)
rescue ArgumentError => e
raise unless e.message =~ /not missing constant #{const_regexp(camel_cased_word)}\!$/
end
# Returns the suffix that should be added to a number to denote the position
# in an ordered sequence such as 1st, 2nd, 3rd, 4th.
#
# ordinal(1) # => "st"
# ordinal(2) # => "nd"
# ordinal(1002) # => "nd"
# ordinal(1003) # => "rd"
# ordinal(-11) # => "th"
# ordinal(-1021) # => "st"
def ordinal(number)
abs_number = number.to_i.abs
if (11..13).include?(abs_number % 100)
"th"
else
case abs_number % 10
when 1;
"st"
when 2;
"nd"
when 3;
"rd"
else
"th"
end
end
end
# Turns a number into an ordinal string used to denote the position in an
# ordered sequence such as 1st, 2nd, 3rd, 4th.
#
# ordinalize(1) # => "1st"
# ordinalize(2) # => "2nd"
# ordinalize(1002) # => "1002nd"
# ordinalize(1003) # => "1003rd"
# ordinalize(-11) # => "-11th"
# ordinalize(-1021) # => "-1021st"
def ordinalize(number)
"#{number}#{ordinal(number)}"
end
private
# Mounts a regular expression, returned as a string to ease interpolation,
# that will match part by part the given constant.
#
# const_regexp("Foo::Bar::Baz") # => "Foo(::Bar(::Baz)?)?"
# const_regexp("::") # => "::"
def const_regexp(camel_cased_word) #:nodoc:
parts = camel_cased_word.split("::".freeze)
return Regexp.escape(camel_cased_word) if parts.blank?
last = parts.pop
parts.reverse.inject(last) do |acc, part|
part.empty? ? acc : "#{part}(::#{acc})?"
end
end
# Applies inflection rules for +singularize+ and +pluralize+.
#
# apply_inflections('post', inflections.plurals) # => "posts"
# apply_inflections('posts', inflections.singulars) # => "post"
def apply_inflections(word, rules)
result = word.to_s.dup
if word.empty? || inflections.uncountables.uncountable?(result)
result
else
rules.each { |(rule, replacement)| break if result.sub!(rule, replacement) }
result
end
end
end
|
bitbucket-rest-api/bitbucket | lib/bitbucket_rest_api/teams.rb | BitBucket.Teams.followers | ruby | def followers(team_name)
response = get_request("/2.0/teams/#{team_name.to_s}/followers")
return response["values"] unless block_given?
response["values"].each { |el| yield el }
end | List followers of the provided team
= Examples
bitbucket = BitBucket.new :oauth_token => '...', :oauth_secret => '...'
bitbucket.teams.followers(:team_name_here)
bitbucket.teams.followers(:team_name_here) { |follower| ... } | train | https://github.com/bitbucket-rest-api/bitbucket/blob/e03b6935104d59b3d9a922474c3dc210a5ef76d2/lib/bitbucket_rest_api/teams.rb#L54-L58 | class Teams < API
extend AutoloadHelper
def initialize(options = { })
super(options)
end
# List teams for the authenticated user where the user has the provided role
# Roles are :admin, :contributor, :member
#
# = Examples
# bitbucket = BitBucket.new :oauth_token => '...', :oauth_secret => '...'
# bitbucket.teams.list(:admin)
# bitbucket.teams.list('member')
# bitbucket.teams.list(:contributor) { |team| ... }
def list(user_role)
response = get_request("/2.0/teams/?role=#{user_role.to_s}")
return response["values"] unless block_given?
response["values"].each { |el| yield el }
end
alias :all :list
# Return the profile for the provided team
#
# = Example
# bitbucket = BitBucket.new :oauth_token => '...', :oauth_secret => '...'
# bitbucket.teams.profile(:team_name_here)
def profile(team_name)
get_request("/2.0/teams/#{team_name.to_s}")
end
# List members of the provided team
#
# = Examples
# bitbucket = BitBucket.new :oauth_token => '...', :oauth_secret => '...'
# bitbucket.teams.members(:team_name_here)
# bitbucket.teams.members(:team_name_here) { |member| ... }
def members(team_name)
response = get_request("/2.0/teams/#{team_name.to_s}/members")
return response["values"] unless block_given?
response["values"].each { |el| yield el }
end
# List followers of the provided team
#
# = Examples
# bitbucket = BitBucket.new :oauth_token => '...', :oauth_secret => '...'
# bitbucket.teams.followers(:team_name_here)
# bitbucket.teams.followers(:team_name_here) { |follower| ... }
# List accounts following the provided team
#
# = Examples
# bitbucket = BitBucket.new :oauth_token => '...', :oauth_secret => '...'
# bitbucket.teams.following(:team_name_here)
# bitbucket.teams.following(:team_name_here) { |followee| ... }
def following(team_name)
response = get_request("/2.0/teams/#{team_name.to_s}/following")
return response["values"] unless block_given?
response["values"].each { |el| yield el }
end
# List repos for provided team
# Private repos will only be returned if the user is authorized to view them
#
# = Examples
# bitbucket = BitBucket.new :oauth_token => '...', :oauth_secret => '...'
# bitbucket.teams.repos(:team_name_here)
# bitbucket.teams.repos(:team_name_here) { |repo| ... }
def repos(team_name)
response = get_request("/2.0/repositories/#{team_name.to_s}")
return response["values"] unless block_given?
response["values"].each { |el| yield el }
end
alias :repositories :repos
end # Team
|
senchalabs/jsduck | lib/jsduck/news.rb | JsDuck.News.filter_new_members | ruby | def filter_new_members(cls)
members = cls.all_local_members.find_all do |m|
visible?(m) && (m[:new] || new_params?(m))
end
members = discard_accessors(members)
members.sort! {|a, b| a[:name] <=> b[:name] }
end | Returns all members of a class that have been marked as new, or
have parameters marked as new. | train | https://github.com/senchalabs/jsduck/blob/febef5558ecd05da25f5c260365acc3afd0cafd8/lib/jsduck/news.rb#L68-L74 | class News
# Creates News object from relations data when --import option
# specified.
def self.create(relations, doc_formatter, opts)
if opts.import.length > 0
News.new(relations, doc_formatter)
else
Util::NullObject.new(:to_html => "")
end
end
# Generates list of new classes & members in this version.
def initialize(relations, doc_formatter)
@doc_formatter = doc_formatter
@columns = Columns.new(:members)
@new_items = filter_new_items(relations)
end
# Returns the HTML
def to_html(style="")
return [
"<div id='news-content' style='#{style}'>",
"<div class='section'>",
"<h1>New in this version</h1>",
render_news(@new_items),
"<div style='clear:both'></div>",
"</div>",
"</div>",
].flatten.join("\n")
end
private
def filter_new_items(relations)
classes = []
new_items = []
relations.each do |cls|
if !cls[:private]
if cls[:new]
classes << cls
else
members = filter_new_members(cls)
if members.length > 0
new_items << {:name => cls[:name], :members => members}
end
end
end
end
new_items.sort! {|a, b| a[:name] <=> b[:name] }
# Place the new classes section at the beginning
if classes.length > 0
new_items.unshift({:name => "New classes", :members => classes})
end
new_items
end
# Returns all members of a class that have been marked as new, or
# have parameters marked as new.
def visible?(member)
!member[:private] && !member[:hide]
end
def new_params?(member)
Array(member[:params]).any? {|p| p[:new] }
end
def discard_accessors(members)
accessors = {}
members.find_all {|m| m[:accessor] }.each do |cfg|
accessors["set" + upcase_first(cfg[:name])] = true
accessors["get" + upcase_first(cfg[:name])] = true
accessors[cfg[:name].downcase + "change"] = true if cfg[:evented]
end
members.reject {|m| accessors[m[:name]] }
end
def upcase_first(str)
str[0,1].upcase + str[1..-1]
end
def render_news(new_items)
if new_items.length > 0
render_columns(new_items)
else
"<h3>Nothing new.</h3>"
end
end
def render_columns(new_items)
align = ["left-column", "middle-column", "right-column"]
i = -1
return @columns.split(new_items, 3).map do |col|
i += 1
[
"<div class='#{align[i]}'>",
render_col(col),
"</div>",
]
end
end
def render_col(col)
return col.map do |item|
[
"<h3>#{item[:name]}</h3>",
"<ul class='links'>",
item[:members].map {|m| "<li>" + link(m) + "</li>" },
"</ul>",
]
end
end
def link(m)
if m[:tagname] == :class
@doc_formatter.link(m[:name], nil, m[:name])
else
@doc_formatter.link(m[:owner], m[:name], m[:name], m[:tagname], m[:static]) + params_note(m)
end
end
def params_note(m)
if !m[:new] && new_params?(m)
" <small>+parameters</small>"
else
""
end
end
end
|
rossf7/elasticrawl | lib/elasticrawl/config.rb | Elasticrawl.Config.load_config | ruby | def load_config(config_file)
if dir_exists?
begin
config_file = File.join(config_dir, "#{config_file}.yml")
config = YAML::load(File.open(config_file))
rescue StandardError => e
raise FileAccessError, e.message
end
else
raise ConfigDirMissingError, 'Config dir missing. Run init command'
end
end | Loads a YAML configuration file. | train | https://github.com/rossf7/elasticrawl/blob/db70bb6819c86805869f389daf1920f3acc87cef/lib/elasticrawl/config.rb#L57-L69 | class Config
CONFIG_DIR = '.elasticrawl'
DATABASE_FILE = 'elasticrawl.sqlite3'
TEMPLATES_DIR = '../../templates'
TEMPLATE_FILES = ['aws.yml', 'cluster.yml', 'jobs.yml']
attr_reader :access_key_id
attr_reader :secret_access_key
# Sets the AWS access credentials needed for the S3 and EMR API calls.
def initialize(access_key_id = nil, secret_access_key = nil)
# Credentials have been provided to the init command.
@access_key_id = access_key_id
@secret_access_key = secret_access_key
# If credentials are not set then check if they are available in aws.yml.
if dir_exists?
config = load_config('aws')
key = config['access_key_id']
secret = config['secret_access_key']
@access_key_id ||= key unless key == 'ACCESS_KEY_ID'
@secret_access_key ||= secret unless secret == 'SECRET_ACCESS_KEY'
end
# If credentials are still not set then check AWS environment variables.
@access_key_id ||= ENV['AWS_ACCESS_KEY_ID']
@secret_access_key ||= ENV['AWS_SECRET_ACCESS_KEY']
# Set AWS credentials for use when accessing the S3 API.
AWS.config(:access_key_id => @access_key_id,
:secret_access_key => @secret_access_key)
end
# Returns the location of the config directory.
def config_dir
File.join(Dir.home, CONFIG_DIR)
end
# Checks if the configuration directory exists.
def dir_exists?
Dir.exists?(config_dir)
end
# Loads a YAML configuration file.
# Loads the sqlite database. If no database exists it will be created
# and the database migrations will be run.
def load_database
if dir_exists?
config = {
'adapter' => 'sqlite3',
'database' => File.join(config_dir, DATABASE_FILE),
'pool' => 5,
'timeout' => 5000
}
begin
ActiveRecord::Base.establish_connection(config)
ActiveRecord::Migrator.migrate(File.join(File.dirname(__FILE__), \
'../../db/migrate'), ENV['VERSION'] ? ENV['VERSION'].to_i : nil )
rescue StandardError => e
raise DatabaseAccessError, e.message
end
else
raise ConfigDirMissingError, 'Config dir missing. Run init command'
end
end
# Checks if a S3 bucket name is in use.
def bucket_exists?(bucket_name)
begin
s3 = AWS::S3.new
s3.buckets[bucket_name].exists?
rescue AWS::S3::Errors::SignatureDoesNotMatch => e
raise AWSCredentialsInvalidError, 'AWS access credentials are invalid'
rescue AWS::Errors::Base => s3e
raise S3AccessError.new(s3e.http_response), s3e.message
end
end
# Creates the S3 bucket and config directory. Deploys the config templates
# and creates the sqlite database.
def create(bucket_name)
create_bucket(bucket_name)
deploy_templates(bucket_name)
load_database
status_message(bucket_name, 'created')
end
# Deletes the S3 bucket and config directory.
def delete
bucket_name = load_config('jobs')['s3_bucket_name']
delete_bucket(bucket_name)
delete_config_dir
status_message(bucket_name, 'deleted')
end
# Displayed by destroy command to confirm deletion.
def delete_warning
bucket_name = load_config('jobs')['s3_bucket_name']
message = ['WARNING:']
message << "Bucket s3://#{bucket_name} and its data will be deleted"
message << "Config dir #{config_dir} will be deleted"
message.join("\n")
end
# Displayed by init command.
def access_key_prompt
prompt = "Enter AWS Access Key ID:"
prompt += " [#{@access_key_id}]" if @access_key_id.present?
prompt
end
# Displayed by init command.
def secret_key_prompt
prompt = "Enter AWS Secret Access Key:"
prompt += " [#{@secret_access_key}]" if @secret_access_key.present?
prompt
end
private
# Creates a bucket using the S3 API.
def create_bucket(bucket_name)
begin
s3 = AWS::S3.new
s3.buckets.create(bucket_name)
rescue AWS::Errors::Base => s3e
raise S3AccessError.new(s3e.http_response), s3e.message
end
end
# Deletes a bucket and its contents using the S3 API.
def delete_bucket(bucket_name)
begin
s3 = AWS::S3.new
bucket = s3.buckets[bucket_name]
bucket.delete!
rescue AWS::Errors::Base => s3e
raise S3AccessError.new(s3e.http_response), s3e.message
end
end
# Creates config directory and copies config templates into it.
# Saves S3 bucket name to jobs.yml and AWS credentials to aws.yml.
def deploy_templates(bucket_name)
begin
Dir.mkdir(config_dir, 0755) if dir_exists? == false
TEMPLATE_FILES.each do |template_file|
FileUtils.cp(File.join(File.dirname(__FILE__), TEMPLATES_DIR, template_file),
File.join(config_dir, template_file))
end
save_config('jobs', { 'BUCKET_NAME' => bucket_name })
save_aws_config
rescue StandardError => e
raise FileAccessError, e.message
end
end
# Saves AWS access credentials to aws.yml unless they are configured as
# environment variables.
def save_aws_config
env_key = ENV['AWS_ACCESS_KEY_ID']
env_secret = ENV['AWS_SECRET_ACCESS_KEY']
creds = {}
creds['ACCESS_KEY_ID'] = @access_key_id unless @access_key_id == env_key
creds['SECRET_ACCESS_KEY'] = @secret_access_key \
unless @secret_access_key == env_secret
save_config('aws', creds)
end
# Saves config values by overwriting placeholder values in template.
def save_config(template, params)
config_file = File.join(config_dir, "#{template}.yml")
config = File.read(config_file)
params.map { |key, value| config = config.gsub(key, value) }
File.open(config_file, 'w') { |file| file.write(config) }
end
# Deletes the config directory including its contents.
def delete_config_dir
begin
FileUtils.rm_r(config_dir) if dir_exists?
rescue StandardError => e
raise FileAccessError, e.message
end
end
# Notifies user of results of init or destroy commands.
def status_message(bucket_name, state)
message = ['', "Bucket s3://#{bucket_name} #{state}"]
message << "Config dir #{config_dir} #{state}"
state = 'complete' if state == 'created'
message << "Config #{state}"
message.join("\n")
end
end
|
tarcieri/cool.io | lib/cool.io/meta.rb | Coolio.Meta.watcher_delegate | ruby | def watcher_delegate(proxy_var)
%w{attach attached? detach enable disable}.each do |method|
module_eval <<-EOD
def #{method}(*args)
if defined? #{proxy_var} and #{proxy_var}
#{proxy_var}.#{method}(*args)
return self
end
super
end
EOD
end
end | Use an alternate watcher with the attach/detach/enable/disable methods
if it is presently assigned. This is useful if you are waiting for
an event to occur before the current watcher can be used in earnest,
such as making an outgoing TCP connection. | train | https://github.com/tarcieri/cool.io/blob/0fd3fd1d8e8d81e24f79f809979367abc3f52b92/lib/cool.io/meta.rb#L13-L26 | module Meta
# Use an alternate watcher with the attach/detach/enable/disable methods
# if it is presently assigned. This is useful if you are waiting for
# an event to occur before the current watcher can be used in earnest,
# such as making an outgoing TCP connection.
# Define callbacks whose behavior can be changed on-the-fly per instance.
# This is done by giving a block to the callback method, which is captured
# as a proc and stored for later. If the method is called without a block,
# the stored block is executed if present, otherwise it's a noop.
def event_callback(*methods)
methods.each do |method|
module_eval <<-EOD
remove_method "#{method}"
def #{method}(*args, &block)
if block
@#{method}_callback = block
return
end
if defined? @#{method}_callback and @#{method}_callback
@#{method}_callback.call(*args)
end
end
EOD
end
end
end
|
dennisreimann/masq | app/controllers/masq/server_controller.rb | Masq.ServerController.decide | ruby | def decide
@site = current_account.sites.find_or_initialize_by_url(checkid_request.trust_root)
@site.persona = current_account.personas.find(params[:persona_id] || :first) if sreg_request || ax_store_request || ax_fetch_request
end | Displays the decision page on that the user can confirm the request and
choose which data should be transfered to the relying party. | train | https://github.com/dennisreimann/masq/blob/bc6b6d84fe06811b9de19e7863c53c6bfad201fe/app/controllers/masq/server_controller.rb#L64-L67 | class ServerController < BaseController
# CSRF-protection must be skipped, because incoming
# OpenID requests lack an authenticity token
skip_before_filter :verify_authenticity_token
# Error handling
rescue_from OpenID::Server::ProtocolError, :with => :render_openid_error
# Actions other than index require a logged in user
before_filter :login_required, :except => [:index, :cancel, :seatbelt_config, :seatbelt_login_state]
before_filter :ensure_valid_checkid_request, :except => [:index, :cancel, :seatbelt_config, :seatbelt_login_state]
after_filter :clear_checkid_request, :only => [:cancel, :complete]
# These methods are used to display information about the request to the user
helper_method :sreg_request, :ax_fetch_request, :ax_store_request
# This is the server endpoint which handles all incoming OpenID requests.
# Associate and CheckAuth requests are answered directly - functionality
# therefor is provided by the ruby-openid gem. Handling of CheckId requests
# dependents on the users login state (see handle_checkid_request).
# Yadis requests return information about this endpoint.
def index
clear_checkid_request
respond_to do |format|
format.html do
if openid_request.is_a?(OpenID::Server::CheckIDRequest)
handle_checkid_request
elsif openid_request
handle_non_checkid_request
else
render :text => t(:this_is_openid_not_a_human_ressource)
end
end
format.xrds
end
end
# This action decides how to process the current request and serves as
# dispatcher and re-entry in case the request could not be processed
# directly (for instance if the user had to log in first).
# When the user has already trusted the relying party, the request will
# be answered based on the users release policy. If the request is immediate
# (relying party wants no user interaction, used e.g. for ajax requests)
# the request can only be answered if no further information (like simple
# registration data) is requested. Otherwise the user will be redirected
# to the decision page.
def proceed
identity = identifier(current_account)
if @site = current_account.sites.find_by_url(checkid_request.trust_root)
resp = checkid_request.answer(true, nil, identity)
resp = add_sreg(resp, @site.sreg_properties) if sreg_request
resp = add_ax(resp, @site.ax_properties) if ax_fetch_request
resp = add_pape(resp, auth_policies, auth_level, auth_time)
render_response(resp)
elsif checkid_request.immediate && (sreg_request || ax_store_request || ax_fetch_request)
render_response(checkid_request.answer(false))
elsif checkid_request.immediate
render_response(checkid_request.answer(true, nil, identity))
else
redirect_to decide_path
end
end
# Displays the decision page on that the user can confirm the request and
# choose which data should be transfered to the relying party.
# This action is called by submitting the decision form, the information entered by
# the user is used to answer the request. If the user decides to always trust the
# relying party, a new site according to the release policies the will be created.
def complete
if params[:cancel]
cancel
else
resp = checkid_request.answer(true, nil, identifier(current_account))
if params[:always]
@site = current_account.sites.find_or_create_by_persona_id_and_url(params[:site][:persona_id], params[:site][:url])
@site.update_attributes(params[:site])
elsif sreg_request || ax_fetch_request
@site = current_account.sites.find_or_initialize_by_persona_id_and_url(params[:site][:persona_id], params[:site][:url])
@site.attributes = params[:site]
elsif ax_store_request
@site = current_account.sites.find_or_initialize_by_persona_id_and_url(params[:site][:persona_id], params[:site][:url])
not_supported, not_accepted, accepted = [], [], []
ax_store_request.data.each do |type_uri, values|
if property = Persona.attribute_name_for_type_uri(type_uri)
store_attribute = params[:site][:ax_store][property.to_sym]
if store_attribute && !store_attribute[:value].blank?
@site.persona.update_attribute(property, values.first)
accepted << type_uri
else
not_accepted << type_uri
end
else
not_supported << type_uri
end
end
ax_store_response = (accepted.count > 0) ? OpenID::AX::StoreResponse.new : OpenID::AX::StoreResponse.new(false, "None of the attributes were accepted.")
resp.add_extension(ax_store_response)
end
resp = add_pape(resp, auth_policies, auth_level, auth_time)
resp = add_sreg(resp, @site.sreg_properties) if sreg_request && @site.sreg_properties
resp = add_ax(resp, @site.ax_properties) if ax_fetch_request && @site.ax_properties
render_response(resp)
end
end
# Cancels the current OpenID request
def cancel
redirect_to checkid_request.cancel_url
end
protected
# Decides how to process an incoming checkid request. If the user is
# already logged in he will be forwarded to the proceed action. If
# the user is not logged in and the request is immediate, the request
# cannot be answered successfully. In case the user is not logged in,
# the request will be stored and the user is asked to log in.
def handle_checkid_request
if allow_verification?
save_checkid_request
redirect_to proceed_path
elsif openid_request.immediate
render_response(openid_request.answer(false))
else
reset_session
request = save_checkid_request
session[:return_to] = proceed_path
redirect_to( request.from_trusted_domain? ? login_path : safe_login_path )
end
end
# Stores the current OpenID request.
# Returns the OpenIdRequest
def save_checkid_request
clear_checkid_request
request = OpenIdRequest.create!(:parameters => openid_params)
session[:request_token] = request.token
request
end
# Deletes the old request when a new one comes in.
def clear_checkid_request
unless session[:request_token].blank?
OpenIdRequest.destroy_all :token => session[:request_token]
session[:request_token] = nil
end
end
# Use this as before_filter for every CheckID request based action.
# Loads the current openid request and cancels if none can be found.
# The user has to log in, if he has not verified his ownership of
# the identifier, yet.
def ensure_valid_checkid_request
self.openid_request = checkid_request
if !openid_request.is_a?(OpenID::Server::CheckIDRequest)
redirect_to root_path, :alert => t(:identity_verification_request_invalid)
elsif !allow_verification?
flash[:notice] = logged_in? && !pape_requirements_met?(auth_time) ?
t(:service_provider_requires_reauthentication_last_login_too_long_ago) :
t(:login_to_verify_identity)
session[:return_to] = proceed_path
redirect_to login_path
end
end
# The user must be logged in, he must be the owner of the claimed identifier
# and the PAPE requirements must be met if applicable.
def allow_verification?
logged_in? && correct_identifier? && pape_requirements_met?(auth_time)
end
# Is the user allowed to verify the claimed identifier? The user
# must be logged in, so that we know his identifier or the identifier
# has to be selected by the server (id_select).
def correct_identifier?
(openid_request.identity == identifier(current_account) || openid_request.id_select)
end
# Clears the stored request and answers
def render_response(resp)
clear_checkid_request
render_openid_response(resp)
end
# Transforms the parameters from the form to valid AX response values
def transform_ax_data(parameters)
data = {}
parameters.each_pair do |key, details|
if details['value']
data["type.#{key}"] = details['type']
data["value.#{key}"] = details['value']
end
end
data
end
# Renders the exception message as text output
def render_openid_error(exception)
error = case exception
when OpenID::Server::MalformedTrustRoot then "Malformed trust root '#{exception.to_s}'"
else exception.to_s
end
render :text => h("Invalid OpenID request: #{error}"), :status => 500
end
private
# The NIST Assurance Level, see:
# http://openid.net/specs/openid-provider-authentication-policy-extension-1_0-01.html#anchor12
def auth_level
if Masq::Engine.config.masq['use_ssl']
current_account.last_authenticated_with_yubikey? ? 3 : 2
else
0
end
end
def auth_time
current_account.last_authenticated_at
end
def auth_policies
current_account.last_authenticated_with_yubikey? ?
[OpenID::PAPE::AUTH_MULTI_FACTOR, OpenID::PAPE::AUTH_PHISHING_RESISTANT] :
[]
end
end
|
arvicco/win_gui | old_code/lib/win_gui/def_api.rb | WinGui.DefApi.def_api | ruby | def def_api(function, params, returns, options={}, &define_block)
name, aliases = generate_names(function, options)
boolean = options[:boolean]
zeronil = options[:zeronil]
proto = params.respond_to?(:join) ? params.join : params # Convert params into prototype string
api = Win32::API.new(function, proto.upcase, returns.upcase, options[:dll] || DEFAULT_DLL)
define_method(function) {|*args| api.call(*args)} # define CamelCase method wrapper for api call
define_method(name) do |*args, &runtime_block| # define snake_case method with enhanced api
return api if args == [:api]
return define_block[api, *args, &runtime_block] if define_block
WinGui.enforce_count(args, proto)
result = api.call(*args)
result = runtime_block[result] if runtime_block
return result != 0 if boolean # Boolean function returns true/false instead of nonzero/zero
return nil if zeronil && result == 0 # Zeronil function returns nil instead of zero
result
end
aliases.each {|ali| alias_method ali, name } # define aliases
end | Defines new method wrappers for Windows API function call:
- Defines method with original (CamelCase) API function name and original signature (matches MSDN description)
- Defines method with snake_case name (converted from CamelCase function name) with enhanced API signature
When the defined wrapper method is called, it checks the argument count, executes underlying API
function call and (optionally) transforms the result before returning it. If block is attached to
method invocation, raw result is yielded to this block before final transformations
- Defines aliases for enhanced method with more Rubyesque names for getters, setters and tests:
GetWindowText -> window_test, SetWindowText -> window_text=, IsZoomed -> zoomed?
You may modify default behavior of defined method by providing optional &define_block to def_api.
If you do so, instead of directly calling API function, defined method just yields callable api
object, arguments and (optional) runtime block to your &define_block and returns result coming out of it.
So, &define_block should define all the behavior of defined method. You can use define_block to:
- Change original signature of API function, provide argument defaults, check argument types
- Pack arguments into strings for [in] or [in/out] parameters that expect a pointer
- Allocate string buffers for pointers required by API functions [out] parameters
- Unpack [out] and [in/out] parameters returned as pointers
- Explicitly return results of API call that are returned in [out] and [in/out] parameters
- Convert attached runtime blocks into callback functions and stuff them into [in] callback parameters
Accepts following options:
:dll:: Use this dll instead of default 'user32'
:rename:: Use this name instead of standard (conventional) function name
:alias(es):: Provides additional alias(es) for defined method
:boolean:: Forces method to return true/false instead of nonzero/zero
:zeronil:: Forces method to return nil if function result is zero | train | https://github.com/arvicco/win_gui/blob/a3a4c18db2391144fcb535e4be2f0fb47e9dcec7/old_code/lib/win_gui/def_api.rb#L36-L56 | module DefApi
# DLL to use with API decarations by default ('user32')
DEFAULT_DLL = 'user32'
##
# Defines new method wrappers for Windows API function call:
# - Defines method with original (CamelCase) API function name and original signature (matches MSDN description)
# - Defines method with snake_case name (converted from CamelCase function name) with enhanced API signature
# When the defined wrapper method is called, it checks the argument count, executes underlying API
# function call and (optionally) transforms the result before returning it. If block is attached to
# method invocation, raw result is yielded to this block before final transformations
# - Defines aliases for enhanced method with more Rubyesque names for getters, setters and tests:
# GetWindowText -> window_test, SetWindowText -> window_text=, IsZoomed -> zoomed?
#
# You may modify default behavior of defined method by providing optional &define_block to def_api.
# If you do so, instead of directly calling API function, defined method just yields callable api
# object, arguments and (optional) runtime block to your &define_block and returns result coming out of it.
# So, &define_block should define all the behavior of defined method. You can use define_block to:
# - Change original signature of API function, provide argument defaults, check argument types
# - Pack arguments into strings for [in] or [in/out] parameters that expect a pointer
# - Allocate string buffers for pointers required by API functions [out] parameters
# - Unpack [out] and [in/out] parameters returned as pointers
# - Explicitly return results of API call that are returned in [out] and [in/out] parameters
# - Convert attached runtime blocks into callback functions and stuff them into [in] callback parameters
#
# Accepts following options:
# :dll:: Use this dll instead of default 'user32'
# :rename:: Use this name instead of standard (conventional) function name
# :alias(es):: Provides additional alias(es) for defined method
# :boolean:: Forces method to return true/false instead of nonzero/zero
# :zeronil:: Forces method to return nil if function result is zero
#
# Generates name and aliases for defined method based on function name,
# sets boolean flag for test functions (Is...)
#
def generate_names(function, options)
aliases = ([options[:alias]] + [options[:aliases]]).flatten.compact
name = options[:rename] || function.snake_case
case name
when /^is_/
aliases << name.sub(/^is_/, '') + '?'
options[:boolean] = true
when /^set_/
aliases << name.sub(/^set_/, '')+ '='
when /^get_/
aliases << name.sub(/^get_/, '')
end
[name, aliases]
end
# Ensures that args count is equal to params count plus diff
#
def enforce_count(args, params, diff = 0)
num_args = args.size
num_params = params == 'V' ? 0 : params.size + diff
if num_args != num_params
raise ArgumentError, "wrong number of parameters: expected #{num_params}, got #{num_args}"
end
end
# Converts block into API::Callback object that can be used as API callback argument
#
def callback(params, returns, &block)
Win32::API::Callback.new(params, returns, &block)
end
private # Helper methods:
# # Returns FFI string buffer - used to supply string pointer reference to API functions
# #
# def buffer(size = 1024, char = "\x00")
# FFI.MemoryPointer.from_string(char * size)
# end
# Returns array of given args if none of them is zero,
# if any arg is zero, returns array of nils
#
def nonzero_array(*args)
args.any?{|arg| arg == 0 } ? args.map{||nil} : args
end
# Procedure that returns (possibly encoded) string as a result of api function call
# or nil if zero characters was returned by api call
#
def return_string( encode = nil )
lambda do |api, *args|
WinGui.enforce_count( args, api.prototype, -2)
args += [string = buffer, string.length]
num_chars = api.call(*args)
return nil if num_chars == 0
string = string.force_encoding('utf-16LE').encode(encode) if encode
string.rstrip
end
end
# Procedure that calls api function expecting a callback. If runtime block is given
# it is converted into actual callback, otherwise procedure returns an array of all
# handles pushed into callback by api enumeration
#
def return_enum
lambda do |api, *args, &block|
WinGui.enforce_count( args, api.prototype, -1)
handles = []
cb = if block
callback('LP', 'I', &block)
else
callback('LP', 'I') do |handle, message|
handles << handle
true
end
end
args[api.prototype.find_index('K'), 0] = cb # Insert callback into appropriate place of args Array
api.call *args
handles
end
end
# Procedure that calls (DdeInitialize) function expecting a DdeCallback. Runtime block is converted
# into Dde callback and registered with DdeInitialize. Returns DDE init status and DDE instance id.
#
# TODO: Pushed into this module since RubyMine (wrongly) reports error on lambda args
#
def return_id_status
lambda do |api, id=0, cmd, &block|
raise ArgumentError, 'No callback block' unless block
callback = callback 'IIPPPPPP', 'L', &block
status = api.call(id = [id].pack('L'), callback, cmd, 0)
id = status == 0 ? id.unpack('L').first : nil
[id, status]
end
end
end
|
sup-heliotrope/sup | lib/sup/index.rb | Redwood.Index.parse_query | ruby | def parse_query s
query = {}
subs = HookManager.run("custom-search", :subs => s) || s
begin
subs = SearchManager.expand subs
rescue SearchManager::ExpansionError => e
raise ParseError, e.message
end
subs = subs.gsub(/\b(to|from):(\S+)\b/) do
field, value = $1, $2
email_field, name_field = %w(email name).map { |x| "#{field}_#{x}" }
if(p = ContactManager.contact_for(value))
"#{email_field}:#{p.email}"
elsif value == "me"
'(' + AccountManager.user_emails.map { |e| "#{email_field}:#{e}" }.join(' OR ') + ')'
else
"(#{email_field}:#{value} OR #{name_field}:#{value})"
end
end
## gmail style "is" operator
subs = subs.gsub(/\b(is|has):(\S+)\b/) do
field, label = $1, $2
case label
when "read"
"-label:unread"
when "spam"
query[:load_spam] = true
"label:spam"
when "deleted"
query[:load_deleted] = true
"label:deleted"
else
"label:#{$2}"
end
end
## labels are stored lower-case in the index
subs = subs.gsub(/\blabel:(\S+)\b/) do
label = $1
"label:#{label.downcase}"
end
## if we see a label:deleted or a label:spam term anywhere in the query
## string, we set the extra load_spam or load_deleted options to true.
## bizarre? well, because the query allows arbitrary parenthesized boolean
## expressions, without fully parsing the query, we can't tell whether
## the user is explicitly directing us to search spam messages or not.
## e.g. if the string is -(-(-(-(-label:spam)))), does the user want to
## search spam messages or not?
##
## so, we rely on the fact that turning these extra options ON turns OFF
## the adding of "-label:deleted" or "-label:spam" terms at the very
## final stage of query processing. if the user wants to search spam
## messages, not adding that is the right thing; if he doesn't want to
## search spam messages, then not adding it won't have any effect.
query[:load_spam] = true if subs =~ /\blabel:spam\b/
query[:load_deleted] = true if subs =~ /\blabel:deleted\b/
query[:load_killed] = true if subs =~ /\blabel:killed\b/
## gmail style attachments "filename" and "filetype" searches
subs = subs.gsub(/\b(filename|filetype):(\((.+?)\)\B|(\S+)\b)/) do
field, name = $1, ($3 || $4)
case field
when "filename"
debug "filename: translated #{field}:#{name} to attachment:\"#{name.downcase}\""
"attachment:\"#{name.downcase}\""
when "filetype"
debug "filetype: translated #{field}:#{name} to attachment_extension:#{name.downcase}"
"attachment_extension:#{name.downcase}"
end
end
lastdate = 2<<32 - 1
firstdate = 0
subs = subs.gsub(/\b(before|on|in|during|after):(\((.+?)\)\B|(\S+)\b)/) do
field, datestr = $1, ($3 || $4)
realdate = Chronic.parse datestr, :guess => false, :context => :past
if realdate
case field
when "after"
debug "chronic: translated #{field}:#{datestr} to #{realdate.end}"
"date:#{realdate.end.to_i}..#{lastdate}"
when "before"
debug "chronic: translated #{field}:#{datestr} to #{realdate.begin}"
"date:#{firstdate}..#{realdate.end.to_i}"
else
debug "chronic: translated #{field}:#{datestr} to #{realdate}"
"date:#{realdate.begin.to_i}..#{realdate.end.to_i}"
end
else
raise ParseError, "can't understand date #{datestr.inspect}"
end
end
## limit:42 restrict the search to 42 results
subs = subs.gsub(/\blimit:(\S+)\b/) do
lim = $1
if lim =~ /^\d+$/
query[:limit] = lim.to_i
''
else
raise ParseError, "non-numeric limit #{lim.inspect}"
end
end
debug "translated query: #{subs.inspect}"
qp = Xapian::QueryParser.new
qp.database = @xapian
qp.stemmer = Xapian::Stem.new($config[:stem_language])
qp.stemming_strategy = Xapian::QueryParser::STEM_SOME
qp.default_op = Xapian::Query::OP_AND
valuerangeprocessor = Xapian::NumberValueRangeProcessor.new(DATE_VALUENO,
'date:', true)
qp.add_valuerangeprocessor(valuerangeprocessor)
NORMAL_PREFIX.each { |k,info| info[:prefix].each {
|v| qp.add_prefix k, v }
}
BOOLEAN_PREFIX.each { |k,info| info[:prefix].each {
|v| qp.add_boolean_prefix k, v, info[:exclusive] }
}
begin
xapian_query = qp.parse_query(subs, Xapian::QueryParser::FLAG_PHRASE |
Xapian::QueryParser::FLAG_BOOLEAN |
Xapian::QueryParser::FLAG_LOVEHATE |
Xapian::QueryParser::FLAG_WILDCARD)
rescue RuntimeError => e
raise ParseError, "xapian query parser error: #{e}"
end
debug "parsed xapian query: #{Util::Query.describe(xapian_query, subs)}"
if xapian_query.nil? or xapian_query.empty?
raise ParseError, "couldn't parse \"#{s}\" as xapian query " \
"(special characters aren't indexed)"
end
query[:qobj] = xapian_query
query[:text] = s
query
end | parse a query string from the user. returns a query object
that can be passed to any index method with a 'query'
argument.
raises a ParseError if something went wrong. | train | https://github.com/sup-heliotrope/sup/blob/36f95462e3014c354c577d63a78ba030c4b84474/lib/sup/index.rb#L405-L548 | class Index
include InteractiveLock
INDEX_VERSION = '4'
## dates are converted to integers for xapian, and are used for document ids,
## so we must ensure they're reasonably valid. this typically only affect
## spam.
MIN_DATE = Time.at 0
MAX_DATE = Time.at(2**31-1)
HookManager.register "custom-search", <<EOS
Executes before a string search is applied to the index,
returning a new search string.
Variables:
subs: The string being searched.
EOS
class LockError < StandardError
def initialize h
@h = h
end
def method_missing m; @h[m.to_s] end
end
include Redwood::Singleton
def initialize dir=BASE_DIR
@dir = dir
FileUtils.mkdir_p @dir
@lock = Lockfile.new lockfile, :retries => 0, :max_age => nil
@sync_worker = nil
@sync_queue = Queue.new
@index_mutex = Monitor.new
end
def lockfile; File.join @dir, "lock" end
def lock
debug "locking #{lockfile}..."
begin
@lock.lock
rescue Lockfile::MaxTriesLockError
raise LockError, @lock.lockinfo_on_disk
end
end
def start_lock_update_thread
@lock_update_thread = Redwood::reporting_thread("lock update") do
while true
sleep 30
@lock.touch_yourself
end
end
end
def stop_lock_update_thread
@lock_update_thread.kill if @lock_update_thread
@lock_update_thread = nil
end
def unlock
if @lock && @lock.locked?
debug "unlocking #{lockfile}..."
@lock.unlock
end
end
def load failsafe=false
SourceManager.load_sources
load_index failsafe
end
def save
debug "saving index and sources..."
FileUtils.mkdir_p @dir unless File.exist? @dir
SourceManager.save_sources
save_index
end
def get_xapian
@xapian
end
def load_index failsafe=false
path = File.join(@dir, 'xapian')
if File.exist? path
@xapian = Xapian::WritableDatabase.new(path, Xapian::DB_OPEN)
db_version = @xapian.get_metadata 'version'
db_version = '0' if db_version.empty?
if false
info "Upgrading index format #{db_version} to #{INDEX_VERSION}"
@xapian.set_metadata 'version', INDEX_VERSION
elsif db_version != INDEX_VERSION
fail "This Sup version expects a v#{INDEX_VERSION} index, but you have an existing v#{db_version} index. Please run sup-dump to save your labels, move #{path} out of the way, and run sup-sync --restore."
end
else
@xapian = Xapian::WritableDatabase.new(path, Xapian::DB_CREATE)
@xapian.set_metadata 'version', INDEX_VERSION
@xapian.set_metadata 'rescue-version', '0'
end
@enquire = Xapian::Enquire.new @xapian
@enquire.weighting_scheme = Xapian::BoolWeight.new
@enquire.docid_order = Xapian::Enquire::ASCENDING
end
def add_message m; sync_message m, true end
def update_message m; sync_message m, true end
def update_message_state m; sync_message m[0], false, m[1] end
def save_index
info "Flushing Xapian updates to disk. This may take a while..."
@xapian.flush
end
def contains_id? id
synchronize { find_docid(id) && true }
end
def contains? m; contains_id? m.id end
def size
synchronize { @xapian.doccount }
end
def empty?; size == 0 end
## Yields a message-id and message-building lambda for each
## message that matches the given query, in descending date order.
## You should probably not call this on a block that doesn't break
## rather quickly because the results can be very large.
def each_id_by_date query={}
each_id(query) { |id| yield id, lambda { build_message id } }
end
## Return the number of matches for query in the index
def num_results_for query={}
xapian_query = build_xapian_query query
matchset = run_query xapian_query, 0, 0, 100
matchset.matches_estimated
end
## check if a message is part of a killed thread
## (warning: duplicates code below)
## NOTE: We can be more efficient if we assume every
## killed message that hasn't been initially added
## to the indexi s this way
def message_joining_killed? m
return false unless doc = find_doc(m.id)
queue = doc.value(THREAD_VALUENO).split(',')
seen_threads = Set.new
seen_messages = Set.new [m.id]
while not queue.empty?
thread_id = queue.pop
next if seen_threads.member? thread_id
return true if thread_killed?(thread_id)
seen_threads << thread_id
docs = term_docids(mkterm(:thread, thread_id)).map { |x| @xapian.document x }
docs.each do |doc|
msgid = doc.value MSGID_VALUENO
next if seen_messages.member? msgid
seen_messages << msgid
queue.concat doc.value(THREAD_VALUENO).split(',')
end
end
false
end
## yield all messages in the thread containing 'm' by repeatedly
## querying the index. yields pairs of message ids and
## message-building lambdas, so that building an unwanted message
## can be skipped in the block if desired.
##
## only two options, :limit and :skip_killed. if :skip_killed is
## true, stops loading any thread if a message with a :killed flag
## is found.
def each_message_in_thread_for m, opts={}
# TODO thread by subject
return unless doc = find_doc(m.id)
queue = doc.value(THREAD_VALUENO).split(',')
msgids = [m.id]
seen_threads = Set.new
seen_messages = Set.new [m.id]
while not queue.empty?
thread_id = queue.pop
next if seen_threads.member? thread_id
return false if opts[:skip_killed] && thread_killed?(thread_id)
seen_threads << thread_id
docs = term_docids(mkterm(:thread, thread_id)).map { |x| @xapian.document x }
docs.each do |doc|
msgid = doc.value MSGID_VALUENO
next if seen_messages.member? msgid
msgids << msgid
seen_messages << msgid
queue.concat doc.value(THREAD_VALUENO).split(',')
end
end
msgids.each { |id| yield id, lambda { build_message id } }
true
end
## Load message with the given message-id from the index
def build_message id
entry = synchronize { get_entry id }
return unless entry
locations = entry[:locations].map do |source_id,source_info|
source = SourceManager[source_id]
raise "invalid source #{source_id}" unless source
Location.new source, source_info
end
m = Message.new :locations => locations,
:labels => entry[:labels],
:snippet => entry[:snippet]
# Try to find person from contacts before falling back to
# generating it from the address.
mk_person = lambda { |x| Person.from_name_and_email(*x.reverse!) }
entry[:from] = mk_person[entry[:from]]
entry[:to].map!(&mk_person)
entry[:cc].map!(&mk_person)
entry[:bcc].map!(&mk_person)
m.load_from_index! entry
m
end
## Delete message with the given message-id from the index
def delete id
synchronize { @xapian.delete_document mkterm(:msgid, id) }
end
## Given an array of email addresses, return an array of Person objects that
## have sent mail to or received mail from any of the given addresses.
def load_contacts email_addresses, opts={}
contacts = Set.new
num = opts[:num] || 20
each_id_by_date :participants => email_addresses do |id,b|
break if contacts.size >= num
m = b.call
([m.from]+m.to+m.cc+m.bcc).compact.each { |p| contacts << [p.name, p.email] }
end
contacts.to_a.compact[0...num].map { |n,e| Person.from_name_and_email n, e }
end
## Yield each message-id matching query
EACH_ID_PAGE = 100
def each_id query={}, ignore_neg_terms = true
offset = 0
page = EACH_ID_PAGE
xapian_query = build_xapian_query query, ignore_neg_terms
while true
ids = run_query_ids xapian_query, offset, (offset+page)
ids.each { |id| yield id }
break if ids.size < page
offset += page
end
end
## Yield each message matching query
## The ignore_neg_terms parameter is used to display result even if
## it contains "forbidden" labels such as :deleted, it is used in
## Poll#poll_from when we need to get the location of a message that
## may contain these labels
def each_message query={}, ignore_neg_terms = true, &b
each_id query, ignore_neg_terms do |id|
yield build_message(id)
end
end
# Search messages. Returns an Enumerator.
def find_messages query_expr
enum_for :each_message, parse_query(query_expr)
end
# wrap all future changes inside a transaction so they're done atomically
def begin_transaction
synchronize { @xapian.begin_transaction }
end
# complete the transaction and write all previous changes to disk
def commit_transaction
synchronize { @xapian.commit_transaction }
end
# abort the transaction and revert all changes made since begin_transaction
def cancel_transaction
synchronize { @xapian.cancel_transaction }
end
## xapian-compact takes too long, so this is a no-op
## until we think of something better
def optimize
end
## Return the id source of the source the message with the given message-id
## was synced from
def source_for_id id
synchronize { get_entry(id)[:source_id] }
end
## Yields each term in the index that starts with prefix
def each_prefixed_term prefix
term = @xapian._dangerous_allterms_begin prefix
lastTerm = @xapian._dangerous_allterms_end prefix
until term.equals lastTerm
yield term.term
term.next
end
nil
end
## Yields (in lexicographical order) the source infos of all locations from
## the given source with the given source_info prefix
def each_source_info source_id, prefix='', &b
p = mkterm :location, source_id, prefix
each_prefixed_term p do |x|
yield prefix + x[p.length..-1]
end
end
class ParseError < StandardError; end
# Stemmed
NORMAL_PREFIX = {
'subject' => {:prefix => 'S', :exclusive => false},
'body' => {:prefix => 'B', :exclusive => false},
'from_name' => {:prefix => 'FN', :exclusive => false},
'to_name' => {:prefix => 'TN', :exclusive => false},
'name' => {:prefix => %w(FN TN), :exclusive => false},
'attachment' => {:prefix => 'A', :exclusive => false},
'email_text' => {:prefix => 'E', :exclusive => false},
'' => {:prefix => %w(S B FN TN A E), :exclusive => false},
}
# Unstemmed
BOOLEAN_PREFIX = {
'type' => {:prefix => 'K', :exclusive => true},
'from_email' => {:prefix => 'FE', :exclusive => false},
'to_email' => {:prefix => 'TE', :exclusive => false},
'email' => {:prefix => %w(FE TE), :exclusive => false},
'date' => {:prefix => 'D', :exclusive => true},
'label' => {:prefix => 'L', :exclusive => false},
'source_id' => {:prefix => 'I', :exclusive => true},
'attachment_extension' => {:prefix => 'O', :exclusive => false},
'msgid' => {:prefix => 'Q', :exclusive => true},
'id' => {:prefix => 'Q', :exclusive => true},
'thread' => {:prefix => 'H', :exclusive => false},
'ref' => {:prefix => 'R', :exclusive => false},
'location' => {:prefix => 'J', :exclusive => false},
}
PREFIX = NORMAL_PREFIX.merge BOOLEAN_PREFIX
COMPL_OPERATORS = %w[AND OR NOT]
COMPL_PREFIXES = (
%w[
from to
is has label
filename filetypem
before on in during after
limit
] + NORMAL_PREFIX.keys + BOOLEAN_PREFIX.keys
).map{|p|"#{p}:"} + COMPL_OPERATORS
## parse a query string from the user. returns a query object
## that can be passed to any index method with a 'query'
## argument.
##
## raises a ParseError if something went wrong.
def save_message m, sync_back = true
if @sync_worker
@sync_queue << [m, sync_back]
else
update_message_state [m, sync_back]
end
m.clear_dirty
end
def save_thread t, sync_back = true
t.each_dirty_message do |m|
save_message m, sync_back
end
end
def start_sync_worker
@sync_worker = Redwood::reporting_thread('index sync') { run_sync_worker }
end
def stop_sync_worker
return unless worker = @sync_worker
@sync_worker = nil
@sync_queue << :die
worker.join
end
def run_sync_worker
while m = @sync_queue.deq
return if m == :die
update_message_state m
# Necessary to keep Xapian calls from lagging the UI too much.
sleep 0.03
end
end
private
MSGID_VALUENO = 0
THREAD_VALUENO = 1
DATE_VALUENO = 2
MAX_TERM_LENGTH = 245
# Xapian can very efficiently sort in ascending docid order. Sup always wants
# to sort by descending date, so this method maps between them. In order to
# handle multiple messages per second, we use a logistic curve centered
# around MIDDLE_DATE so that the slope (docid/s) is greatest in this time
# period. A docid collision is not an error - the code will pick the next
# smallest unused one.
DOCID_SCALE = 2.0**32
TIME_SCALE = 2.0**27
MIDDLE_DATE = Time.gm(2011)
def assign_docid m, truncated_date
t = (truncated_date.to_i - MIDDLE_DATE.to_i).to_f
docid = (DOCID_SCALE - DOCID_SCALE/(Math::E**(-(t/TIME_SCALE)) + 1)).to_i
while docid > 0 and docid_exists? docid
docid -= 1
end
docid > 0 ? docid : nil
end
# XXX is there a better way?
def docid_exists? docid
begin
@xapian.doclength docid
true
rescue RuntimeError #Xapian::DocNotFoundError
raise unless $!.message =~ /DocNotFoundError/
false
end
end
def term_docids term
@xapian.postlist(term).map { |x| x.docid }
end
def find_docid id
docids = term_docids(mkterm(:msgid,id))
fail unless docids.size <= 1
docids.first
end
def find_doc id
return unless docid = find_docid(id)
@xapian.document docid
end
def get_id docid
return unless doc = @xapian.document(docid)
doc.value MSGID_VALUENO
end
def get_entry id
return unless doc = find_doc(id)
doc.entry
end
def thread_killed? thread_id
not run_query(Q.new(Q::OP_AND, mkterm(:thread, thread_id), mkterm(:label, :Killed)), 0, 1).empty?
end
def synchronize &b
@index_mutex.synchronize &b
end
def run_query xapian_query, offset, limit, checkatleast=0
synchronize do
@enquire.query = xapian_query
@enquire.mset(offset, limit-offset, checkatleast)
end
end
def run_query_ids xapian_query, offset, limit
matchset = run_query xapian_query, offset, limit
matchset.matches.map { |r| r.document.value MSGID_VALUENO }
end
Q = Xapian::Query
def build_xapian_query opts, ignore_neg_terms = true
labels = ([opts[:label]] + (opts[:labels] || [])).compact
neglabels = [:spam, :deleted, :killed].reject { |l| (labels.include? l) || opts.member?("load_#{l}".intern) }
pos_terms, neg_terms = [], []
pos_terms << mkterm(:type, 'mail')
pos_terms.concat(labels.map { |l| mkterm(:label,l) })
pos_terms << opts[:qobj] if opts[:qobj]
pos_terms << mkterm(:source_id, opts[:source_id]) if opts[:source_id]
pos_terms << mkterm(:location, *opts[:location]) if opts[:location]
if opts[:participants]
participant_terms = opts[:participants].map { |p| [:from,:to].map { |d| mkterm(:email, d, (Redwood::Person === p) ? p.email : p) } }.flatten
pos_terms << Q.new(Q::OP_OR, participant_terms)
end
neg_terms.concat(neglabels.map { |l| mkterm(:label,l) }) if ignore_neg_terms
pos_query = Q.new(Q::OP_AND, pos_terms)
neg_query = Q.new(Q::OP_OR, neg_terms)
if neg_query.empty?
pos_query
else
Q.new(Q::OP_AND_NOT, [pos_query, neg_query])
end
end
def sync_message m, overwrite, sync_back = true
## TODO: we should not save the message if the sync_back failed
## since it would overwrite the location field
m.sync_back if sync_back
doc = synchronize { find_doc(m.id) }
existed = doc != nil
doc ||= Xapian::Document.new
do_index_static = overwrite || !existed
old_entry = !do_index_static && doc.entry
snippet = do_index_static ? m.snippet : old_entry[:snippet]
entry = {
:message_id => m.id,
:locations => m.locations.map { |x| [x.source.id, x.info] },
:date => truncate_date(m.date),
:snippet => snippet,
:labels => m.labels.to_a,
:from => [m.from.email, m.from.name],
:to => m.to.map { |p| [p.email, p.name] },
:cc => m.cc.map { |p| [p.email, p.name] },
:bcc => m.bcc.map { |p| [p.email, p.name] },
:subject => m.subj,
:refs => m.refs.to_a,
:replytos => m.replytos.to_a,
}
if do_index_static
doc.clear_terms
doc.clear_values
index_message_static m, doc, entry
end
index_message_locations doc, entry, old_entry
index_message_threading doc, entry, old_entry
index_message_labels doc, entry[:labels], (do_index_static ? [] : old_entry[:labels])
doc.entry = entry
synchronize do
unless docid = existed ? doc.docid : assign_docid(m, truncate_date(m.date))
# Could be triggered by spam
warn "docid underflow, dropping #{m.id.inspect}"
return
end
@xapian.replace_document docid, doc
end
m.labels.each { |l| LabelManager << l }
true
end
## Index content that can't be changed by the user
def index_message_static m, doc, entry
# Person names are indexed with several prefixes
person_termer = lambda do |d|
lambda do |p|
doc.index_text p.name, PREFIX["#{d}_name"][:prefix] if p.name
doc.index_text p.email, PREFIX['email_text'][:prefix]
doc.add_term mkterm(:email, d, p.email)
end
end
person_termer[:from][m.from] if m.from
(m.to+m.cc+m.bcc).each(&(person_termer[:to]))
# Full text search content
subject_text = m.indexable_subject
body_text = m.indexable_body
doc.index_text subject_text, PREFIX['subject'][:prefix]
doc.index_text body_text, PREFIX['body'][:prefix]
m.attachments.each { |a| doc.index_text a, PREFIX['attachment'][:prefix] }
# Miscellaneous terms
doc.add_term mkterm(:date, m.date) if m.date
doc.add_term mkterm(:type, 'mail')
doc.add_term mkterm(:msgid, m.id)
m.attachments.each do |a|
a =~ /\.(\w+)$/ or next
doc.add_term mkterm(:attachment_extension, $1)
end
# Date value for range queries
date_value = begin
Xapian.sortable_serialise m.date.to_i
rescue TypeError
Xapian.sortable_serialise 0
end
doc.add_value MSGID_VALUENO, m.id
doc.add_value DATE_VALUENO, date_value
end
def index_message_locations doc, entry, old_entry
old_entry[:locations].map { |x| x[0] }.uniq.each { |x| doc.remove_term mkterm(:source_id, x) } if old_entry
entry[:locations].map { |x| x[0] }.uniq.each { |x| doc.add_term mkterm(:source_id, x) }
old_entry[:locations].each { |x| (doc.remove_term mkterm(:location, *x) rescue nil) } if old_entry
entry[:locations].each { |x| doc.add_term mkterm(:location, *x) }
end
def index_message_labels doc, new_labels, old_labels
return if new_labels == old_labels
added = new_labels.to_a - old_labels.to_a
removed = old_labels.to_a - new_labels.to_a
added.each { |t| doc.add_term mkterm(:label,t) }
removed.each { |t| doc.remove_term mkterm(:label,t) }
end
## Assign a set of thread ids to the document. This is a hybrid of the runtime
## search done by the Ferret index and the index-time union done by previous
## versions of the Xapian index. We first find the thread ids of all messages
## with a reference to or from us. If that set is empty, we use our own
## message id. Otherwise, we use all the thread ids we previously found. In
## the common case there's only one member in that set, but if we're the
## missing link between multiple previously unrelated threads we can have
## more. XapianIndex#each_message_in_thread_for follows the thread ids when
## searching so the user sees a single unified thread.
def index_message_threading doc, entry, old_entry
return if old_entry && (entry[:refs] == old_entry[:refs]) && (entry[:replytos] == old_entry[:replytos])
children = term_docids(mkterm(:ref, entry[:message_id])).map { |docid| @xapian.document docid }
parent_ids = entry[:refs] + entry[:replytos]
parents = parent_ids.map { |id| find_doc id }.compact
thread_members = SavingHash.new { [] }
(children + parents).each do |doc2|
thread_ids = doc2.value(THREAD_VALUENO).split ','
thread_ids.each { |thread_id| thread_members[thread_id] << doc2 }
end
thread_ids = thread_members.empty? ? [entry[:message_id]] : thread_members.keys
thread_ids.each { |thread_id| doc.add_term mkterm(:thread, thread_id) }
parent_ids.each { |ref| doc.add_term mkterm(:ref, ref) }
doc.add_value THREAD_VALUENO, (thread_ids * ',')
end
def truncate_date date
if date < MIN_DATE
debug "warning: adjusting too-low date #{date} for indexing"
MIN_DATE
elsif date > MAX_DATE
debug "warning: adjusting too-high date #{date} for indexing"
MAX_DATE
else
date
end
end
# Construct a Xapian term
def mkterm type, *args
case type
when :label
PREFIX['label'][:prefix] + args[0].to_s.downcase
when :type
PREFIX['type'][:prefix] + args[0].to_s.downcase
when :date
PREFIX['date'][:prefix] + args[0].getutc.strftime("%Y%m%d%H%M%S")
when :email
case args[0]
when :from then PREFIX['from_email'][:prefix]
when :to then PREFIX['to_email'][:prefix]
else raise "Invalid email term type #{args[0]}"
end + args[1].to_s.downcase
when :source_id
PREFIX['source_id'][:prefix] + args[0].to_s.downcase
when :location
PREFIX['location'][:prefix] + [args[0]].pack('n') + args[1].to_s
when :attachment_extension
PREFIX['attachment_extension'][:prefix] + args[0].to_s.downcase
when :msgid, :ref, :thread
PREFIX[type.to_s][:prefix] + args[0][0...(MAX_TERM_LENGTH-1)]
else
raise "Invalid term type #{type}"
end
end
end
|
rmagick/rmagick | lib/rmagick_internal.rb | Magick.ImageList.method_missing | ruby | def method_missing(meth_id, *args, &block)
if @scene
@images[@scene].send(meth_id, *args, &block)
else
super
end
rescue NoMethodError
Kernel.raise NoMethodError, "undefined method `#{meth_id.id2name}' for #{self.class}"
rescue Exception
$ERROR_POSITION.delete_if { |s| /:in `send'$/.match(s) || /:in `method_missing'$/.match(s) }
Kernel.raise
end | The ImageList class supports the Magick::Image class methods by simply sending
the method to the current image. If the method isn't explicitly supported,
send it to the current image in the array. If there are no images, send
it up the line. Catch a NameError and emit a useful message. | train | https://github.com/rmagick/rmagick/blob/ef6688ed9d76bf123c2ea1a483eff8635051adb7/lib/rmagick_internal.rb#L1603-L1614 | class ImageList
include Comparable
include Enumerable
attr_reader :scene
private
def get_current
@images[@scene].__id__
rescue StandardError
nil
end
protected
def is_an_image(obj)
Kernel.raise ArgumentError, "Magick::Image required (#{obj.class} given)" unless obj.is_a? Magick::Image
true
end
# Ensure array is always an array of Magick::Image objects
def is_an_image_array(ary)
Kernel.raise ArgumentError, "Magick::ImageList or array of Magick::Images required (#{ary.class} given)" unless ary.respond_to? :each
ary.each { |obj| is_an_image obj }
true
end
# Find old current image, update scene number
# current is the id of the old current image.
def set_current(current)
if length.zero?
self.scene = nil
return
# Don't bother looking for current image
elsif scene.nil? || scene >= length
self.scene = length - 1
return
elsif !current.nil?
# Find last instance of "current" in the list.
# If "current" isn't in the list, set current to last image.
self.scene = length - 1
each_with_index do |f, i|
self.scene = i if f.__id__ == current
end
return
end
self.scene = length - 1
end
public
# Allow scene to be set to nil
def scene=(n)
if n.nil?
Kernel.raise IndexError, 'scene number out of bounds' unless @images.length.zero?
@scene = nil
return @scene
elsif @images.length.zero?
Kernel.raise IndexError, 'scene number out of bounds'
end
n = Integer(n)
Kernel.raise IndexError, 'scene number out of bounds' if n < 0 || n > length - 1
@scene = n
@scene
end
# All the binary operators work the same way.
# 'other' should be either an ImageList or an Array
%w[& + - |].each do |op|
module_eval <<-END_BINOPS
def #{op}(other)
ilist = self.class.new
begin
a = other #{op} @images
rescue TypeError
Kernel.raise ArgumentError, "Magick::ImageList expected, got " + other.class.to_s
end
current = get_current()
a.each do |image|
is_an_image image
ilist << image
end
ilist.set_current current
return ilist
end
END_BINOPS
end
def *(other)
Kernel.raise ArgumentError, "Integer required (#{other.class} given)" unless other.is_a? Integer
current = get_current
ilist = self.class.new
(@images * other).each { |image| ilist << image }
ilist.set_current current
ilist
end
def <<(obj)
is_an_image obj
@images << obj
@scene = @images.length - 1
self
end
# Compare ImageLists
# Compare each image in turn until the result of a comparison
# is not 0. If all comparisons return 0, then
# return if A.scene != B.scene
# return A.length <=> B.length
def <=>(other)
Kernel.raise TypeError, "#{self.class} required (#{other.class} given)" unless other.is_a? self.class
size = [length, other.length].min
size.times do |x|
r = self[x] <=> other[x]
return r unless r.zero?
end
return 0 if @scene.nil? && other.scene.nil?
Kernel.raise TypeError, "cannot convert nil into #{other.scene.class}" if @scene.nil? && !other.scene.nil?
Kernel.raise TypeError, "cannot convert nil into #{scene.class}" if !@scene.nil? && other.scene.nil?
r = scene <=> other.scene
return r unless r.zero?
length <=> other.length
end
def [](*args)
a = @images[*args]
if a.respond_to?(:each)
ilist = self.class.new
a.each { |image| ilist << image }
a = ilist
end
a
end
def []=(*args)
obj = @images.[]=(*args)
if obj && obj.respond_to?(:each)
is_an_image_array(obj)
set_current obj.last.__id__
elsif obj
is_an_image(obj)
set_current obj.__id__
else
set_current nil
end
obj
end
%i[at each each_index empty? fetch
first hash include? index length rindex sort!].each do |mth|
module_eval <<-END_SIMPLE_DELEGATES
def #{mth}(*args, &block)
@images.#{mth}(*args, &block)
end
END_SIMPLE_DELEGATES
end
alias size length
# Array#nitems is not available in 1.9
if Array.instance_methods.include?('nitems')
def nitems
@images.nitems
end
end
def clear
@scene = nil
@images.clear
end
def clone
ditto = dup
ditto.freeze if frozen?
ditto
end
# override Enumerable#collect
def collect(&block)
current = get_current
a = @images.collect(&block)
ilist = self.class.new
a.each { |image| ilist << image }
ilist.set_current current
ilist
end
def collect!(&block)
@images.collect!(&block)
is_an_image_array @images
self
end
# Make a deep copy
def copy
ditto = self.class.new
@images.each { |f| ditto << f.copy }
ditto.scene = @scene
ditto.taint if tainted?
ditto
end
# Return the current image
def cur_image
Kernel.raise IndexError, 'no images in this list' unless @scene
@images[@scene]
end
# ImageList#map took over the "map" name. Use alternatives.
alias __map__ collect
alias map! collect!
alias __map__! collect!
# ImageMagic used affinity in 6.4.3, switch to remap in 6.4.4.
alias affinity remap
def compact
current = get_current
ilist = self.class.new
a = @images.compact
a.each { |image| ilist << image }
ilist.set_current current
ilist
end
def compact!
current = get_current
a = @images.compact! # returns nil if no changes were made
set_current current
a.nil? ? nil : self
end
def concat(other)
is_an_image_array other
other.each { |image| @images << image }
@scene = length - 1
self
end
# Set same delay for all images
def delay=(d)
raise ArgumentError, 'delay must be greater than or equal to 0' if Integer(d) < 0
@images.each { |f| f.delay = Integer(d) }
end
def delete(obj, &block)
is_an_image obj
current = get_current
a = @images.delete(obj, &block)
set_current current
a
end
def delete_at(ndx)
current = get_current
a = @images.delete_at(ndx)
set_current current
a
end
def delete_if(&block)
current = get_current
@images.delete_if(&block)
set_current current
self
end
def dup
ditto = self.class.new
@images.each { |img| ditto << img }
ditto.scene = @scene
ditto.taint if tainted?
ditto
end
def eql?(other)
is_an_image_array other
eql = other.eql?(@images)
begin # "other" is another ImageList
eql &&= @scene == other.scene
rescue NoMethodError
# "other" is a plain Array
end
eql
end
def fill(*args, &block)
is_an_image args[0] unless block_given?
current = get_current
@images.fill(*args, &block)
is_an_image_array self
set_current current
self
end
# Override Enumerable's find_all
def find_all(&block)
current = get_current
a = @images.find_all(&block)
ilist = self.class.new
a.each { |image| ilist << image }
ilist.set_current current
ilist
end
alias select find_all
def from_blob(*blobs, &block)
Kernel.raise ArgumentError, 'no blobs given' if blobs.length.zero?
blobs.each do |b|
Magick::Image.from_blob(b, &block).each { |n| @images << n }
end
@scene = length - 1
self
end
# Initialize new instances
def initialize(*filenames, &block)
@images = []
@scene = nil
filenames.each do |f|
Magick::Image.read(f, &block).each { |n| @images << n }
end
if length > 0
@scene = length - 1 # last image in array
end
self
end
def insert(index, *args)
args.each { |image| is_an_image image }
current = get_current
@images.insert(index, *args)
set_current current
self
end
# Call inspect for all the images
def inspect
img = []
@images.each { |image| img << image.inspect }
img = '[' + img.join(",\n") + "]\nscene=#{@scene}"
end
# Set the number of iterations of an animated GIF
def iterations=(n)
n = Integer(n)
Kernel.raise ArgumentError, 'iterations must be between 0 and 65535' if n < 0 || n > 65_535
@images.each { |f| f.iterations = n }
self
end
def last(*args)
if args.length.zero?
a = @images.last
else
a = @images.last(*args)
ilist = self.class.new
a.each { |img| ilist << img }
@scene = a.length - 1
a = ilist
end
a
end
# Custom marshal/unmarshal for Ruby 1.8.
def marshal_dump
ary = [@scene]
@images.each { |i| ary << Marshal.dump(i) }
ary
end
def marshal_load(ary)
@scene = ary.shift
@images = []
ary.each { |a| @images << Marshal.load(a) }
end
# The ImageList class supports the Magick::Image class methods by simply sending
# the method to the current image. If the method isn't explicitly supported,
# send it to the current image in the array. If there are no images, send
# it up the line. Catch a NameError and emit a useful message.
# Create a new image and add it to the end
def new_image(cols, rows, *fill, &info_blk)
self << Magick::Image.new(cols, rows, *fill, &info_blk)
end
def partition(&block)
a = @images.partition(&block)
t = self.class.new
a[0].each { |img| t << img }
t.set_current nil
f = self.class.new
a[1].each { |img| f << img }
f.set_current nil
[t, f]
end
# Ping files and concatenate the new images
def ping(*files, &block)
Kernel.raise ArgumentError, 'no files given' if files.length.zero?
files.each do |f|
Magick::Image.ping(f, &block).each { |n| @images << n }
end
@scene = length - 1
self
end
def pop
current = get_current
a = @images.pop # can return nil
set_current current
a
end
def push(*objs)
objs.each do |image|
is_an_image image
@images << image
end
@scene = length - 1
self
end
# Read files and concatenate the new images
def read(*files, &block)
Kernel.raise ArgumentError, 'no files given' if files.length.zero?
files.each do |f|
Magick::Image.read(f, &block).each { |n| @images << n }
end
@scene = length - 1
self
end
# override Enumerable's reject
def reject(&block)
current = get_current
ilist = self.class.new
a = @images.reject(&block)
a.each { |image| ilist << image }
ilist.set_current current
ilist
end
def reject!(&block)
current = get_current
a = @images.reject!(&block)
@images = a unless a.nil?
set_current current
a.nil? ? nil : self
end
def replace(other)
is_an_image_array other
current = get_current
@images.clear
other.each { |image| @images << image }
@scene = length.zero? ? nil : 0
set_current current
self
end
# Ensure respond_to? answers correctly when we are delegating to Image
alias __respond_to__? respond_to?
def respond_to?(meth_id, priv = false)
return true if __respond_to__?(meth_id, priv)
if @scene
@images[@scene].respond_to?(meth_id, priv)
else
super
end
end
def reverse
current = get_current
a = self.class.new
@images.reverse_each { |image| a << image }
a.set_current current
a
end
def reverse!
current = get_current
@images.reverse!
set_current current
self
end
def reverse_each
@images.reverse_each { |image| yield(image) }
self
end
def shift
current = get_current
a = @images.shift
set_current current
a
end
def slice(*args)
slice = @images.slice(*args)
if slice
ilist = self.class.new
if slice.respond_to?(:each)
slice.each { |image| ilist << image }
else
ilist << slice
end
else
ilist = nil
end
ilist
end
def slice!(*args)
current = get_current
a = @images.slice!(*args)
set_current current
a
end
def ticks_per_second=(t)
Kernel.raise ArgumentError, 'ticks_per_second must be greater than or equal to 0' if Integer(t) < 0
@images.each { |f| f.ticks_per_second = Integer(t) }
end
def to_a
a = []
@images.each { |image| a << image }
a
end
def uniq
current = get_current
a = self.class.new
@images.uniq.each { |image| a << image }
a.set_current current
a
end
def uniq!(*_args)
current = get_current
a = @images.uniq!
set_current current
a.nil? ? nil : self
end
# @scene -> new object
def unshift(obj)
is_an_image obj
@images.unshift(obj)
@scene = 0
self
end
def values_at(*args)
a = @images.values_at(*args)
a = self.class.new
@images.values_at(*args).each { |image| a << image }
a.scene = a.length - 1
a
end
alias indexes values_at
alias indices values_at
end # Magick::ImageList
|
documentcloud/jammit | lib/jammit/helper.rb | Jammit.Helper.tags_with_options | ruby | def tags_with_options(packages, options)
packages.dup.map {|package|
yield package
}.flatten.map {|package|
stylesheet_link_tag package, options
}.join("\n")
end | Generate the stylesheet tags for a batch of packages, with options, by
yielding each package to a block. | train | https://github.com/documentcloud/jammit/blob/dc866f1ac3eb069d65215599c451db39d66119a7/lib/jammit/helper.rb#L76-L82 | module Helper
DATA_URI_START = "<!--[if (!IE)|(gte IE 8)]><!-->" unless defined?(DATA_URI_START)
DATA_URI_END = "<!--<![endif]-->" unless defined?(DATA_URI_END)
MHTML_START = "<!--[if lte IE 7]>" unless defined?(MHTML_START)
MHTML_END = "<![endif]-->" unless defined?(MHTML_END)
# If embed_assets is turned on, writes out links to the Data-URI and MHTML
# versions of the stylesheet package, otherwise the package is regular
# compressed CSS, and in development the stylesheet URLs are passed verbatim.
def include_stylesheets(*packages)
options = packages.extract_options!
return html_safe(individual_stylesheets(packages, options)) unless should_package?
disabled = (options.delete(:embed_assets) == false) || (options.delete(:embed_images) == false)
return html_safe(packaged_stylesheets(packages, options)) if disabled || !Jammit.embed_assets
return html_safe(embedded_image_stylesheets(packages, options))
end
# Writes out the URL to the bundled and compressed javascript package,
# except in development, where it references the individual scripts.
def include_javascripts(*packages)
options = packages.extract_options!
options.merge!(:extname=>false)
html_safe packages.map {|pack|
should_package? ? Jammit.asset_url(pack, :js) : Jammit.packager.individual_urls(pack.to_sym, :js)
}.flatten.map {|pack|
"<script src=\"#{pack}\"></script>"
}.join("\n")
end
# Writes out the URL to the concatenated and compiled JST file -- we always
# have to pre-process it, even in development.
def include_templates(*packages)
raise DeprecationError, "Jammit 0.5+ no longer supports separate packages for templates.\nYou can include your JST alongside your JS, and use include_javascripts."
end
private
def should_package?
Jammit.package_assets && !(Jammit.allow_debugging && params[:debug_assets])
end
def html_safe(string)
string.respond_to?(:html_safe) ? string.html_safe : string
end
# HTML tags, in order, for all of the individual stylesheets.
def individual_stylesheets(packages, options)
tags_with_options(packages, options) {|p| Jammit.packager.individual_urls(p.to_sym, :css) }
end
# HTML tags for the stylesheet packages.
def packaged_stylesheets(packages, options)
tags_with_options(packages, options) {|p| Jammit.asset_url(p, :css) }
end
# HTML tags for the 'datauri', and 'mhtml' versions of the packaged
# stylesheets, using conditional comments to load the correct variant.
def embedded_image_stylesheets(packages, options)
datauri_tags = tags_with_options(packages, options) {|p| Jammit.asset_url(p, :css, :datauri) }
ie_tags = Jammit.mhtml_enabled ?
tags_with_options(packages, options) {|p| Jammit.asset_url(p, :css, :mhtml) } :
packaged_stylesheets(packages, options)
[DATA_URI_START, datauri_tags, DATA_URI_END, MHTML_START, ie_tags, MHTML_END].join("\n")
end
# Generate the stylesheet tags for a batch of packages, with options, by
# yielding each package to a block.
end
|
quixoten/queue_to_the_future | lib/queue_to_the_future/job.rb | QueueToTheFuture.Job.method_missing | ruby | def method_missing(*args, &block)
Thread.pass until defined?(@result)
case @result
when Exception
def self.method_missing(*args, &block); raise @result; end
else
def self.method_missing(*args, &block); @result.send(*args, &block); end
end
self.method_missing(*args, &block)
end | Allows the job to behave as the return value of the block.
Accessing any method on the job will cause code to block
until the job is completed. | train | https://github.com/quixoten/queue_to_the_future/blob/dd8260fa165ee42b95e6d76bc665fdf68339dfd6/lib/queue_to_the_future/job.rb#L34-L45 | class Job
instance_methods.each { |meth| undef_method(meth) unless %w(__send__ __id__ object_id inspect).include?(meth.to_s) }
# Creates a job and schedules it by calling {Coordinator#schedule}.
#
# @param [List] *args The list of arguments to pass to the given block
# @param [Proc] &block The block to be executed
def initialize(*args, &block)
@args = args
@block = block
Coordinator.schedule(self)
end
# Execute the job.
#
# This is called by the worker the job gets assigned to.
# @return [nil]
def __execute__
@result = @block[*@args]
rescue Exception => e
@result = e
ensure
# Prevent multiple executions
def self.__execute__; nil; end
end
# Allows the job to behave as the return value of the block.
#
# Accessing any method on the job will cause code to block
# until the job is completed.
end
|
simplymadeapps/simple_scheduler | lib/simple_scheduler/task.rb | SimpleScheduler.Task.future_run_times | ruby | def future_run_times
future_run_times = existing_run_times.dup
last_run_time = future_run_times.last || at - frequency
last_run_time = last_run_time.in_time_zone(time_zone)
# Ensure there are at least two future jobs scheduled and that the queue ahead time is filled
while future_run_times.length < 2 || minutes_queued_ahead(last_run_time) < queue_ahead
last_run_time = frequency.from_now(last_run_time)
# The hour may not match because of a shift caused by DST in previous run times,
# so we need to ensure that the hour matches the specified hour if given.
last_run_time = last_run_time.change(hour: at.hour, min: at.min) if at.hour?
future_run_times << last_run_time
end
future_run_times
end | Returns an array Time objects for future run times based on
the current time and the given minutes to look ahead.
@return [Array<Time>]
rubocop:disable Metrics/AbcSize | train | https://github.com/simplymadeapps/simple_scheduler/blob/4d186042507c1397ee79a5e8fe929cc14008c026/lib/simple_scheduler/task.rb#L67-L82 | class Task
attr_reader :job_class, :params
DEFAULT_QUEUE_AHEAD_MINUTES = 360
# Initializes a task by parsing the params so the task can be queued in the future.
# @param params [Hash]
# @option params [String] :class The class of the Active Job or Sidekiq Worker
# @option params [String] :every How frequently the job will be performed
# @option params [String] :at The starting time for the interval
# @option params [String] :expires_after The interval used to determine how late the job is allowed to run
# @option params [Integer] :queue_ahead The number of minutes that jobs should be queued in the future
# @option params [String] :task_name The name of the task as defined in the YAML config
# @option params [String] :tz The time zone to use when parsing the `at` option
def initialize(params)
validate_params!(params)
@params = params
end
# The task's first run time as a Time-like object.
# @return [SimpleScheduler::At]
def at
@at ||= At.new(@params[:at], time_zone)
end
# The time between the scheduled and actual run time that should cause the job not to run.
# @return [String]
def expires_after
@params[:expires_after]
end
# Returns an array of existing jobs matching the job class of the task.
# @return [Array<Sidekiq::SortedEntry>]
def existing_jobs
@existing_jobs ||= SimpleScheduler::Task.scheduled_set.select do |job|
next unless job.display_class == "SimpleScheduler::FutureJob"
task_params = job.display_args[0].symbolize_keys
task_params[:class] == job_class_name && task_params[:name] == name
end.to_a
end
# Returns an array of existing future run times that have already been scheduled.
# @return [Array<Time>]
def existing_run_times
@existing_run_times ||= existing_jobs.map(&:at)
end
# How often the job will be run.
# @return [ActiveSupport::Duration]
def frequency
@frequency ||= parse_frequency(@params[:every])
end
# Returns an array Time objects for future run times based on
# the current time and the given minutes to look ahead.
# @return [Array<Time>]
# rubocop:disable Metrics/AbcSize
# rubocop:enable Metrics/AbcSize
# The class name of the job or worker.
# @return [String]
def job_class_name
@params[:class]
end
# The name of the task as defined in the YAML config.
# @return [String]
def name
@params[:name]
end
# The number of minutes that jobs should be queued in the future.
# @return [Integer]
def queue_ahead
@queue_ahead ||= @params[:queue_ahead] || DEFAULT_QUEUE_AHEAD_MINUTES
end
# The time zone to use when parsing the `at` option.
# @return [ActiveSupport::TimeZone]
def time_zone
@time_zone ||= params[:tz] ? ActiveSupport::TimeZone.new(params[:tz]) : Time.zone
end
# Loads the scheduled jobs from Sidekiq once to avoid loading from
# Redis for each task when looking up existing scheduled jobs.
# @return [Sidekiq::ScheduledSet]
def self.scheduled_set
@scheduled_set ||= Sidekiq::ScheduledSet.new
end
private
def minutes_queued_ahead(last_run_time)
(last_run_time - Time.now) / 60
end
def parse_frequency(every_string)
split_duration = every_string.split(".")
frequency = split_duration[0].to_i
frequency_units = split_duration[1]
frequency.send(frequency_units)
end
def validate_params!(params)
raise ArgumentError, "Missing param `class` specifying the class of the job to run." unless params.key?(:class)
raise ArgumentError, "Missing param `every` specifying how often the job should run." unless params.key?(:every)
@job_class = params[:class].constantize
params[:name] ||= params[:class]
end
end
|
SamSaffron/message_bus | lib/message_bus/http_client.rb | MessageBus.HTTPClient.start | ruby | def start
@mutex.synchronize do
return if started?
@status = STARTED
thread = Thread.new do
begin
while started?
unless @channels.empty?
poll
@stats.success += 1
@stats.failed = 0
end
sleep interval
end
rescue StandardError => e
@stats.failed += 1
warn("#{e.class} #{e.message}: #{e.backtrace.join("\n")}")
sleep interval
retry
ensure
stop
end
end
thread.abort_on_exception = true
end
self
end | @param base_url [String] Base URL of the message_bus server to connect to
@param enable_long_polling [Boolean] Enable long polling
@param enable_chunked_encoding [Boolean] Enable chunk encoding
@param min_poll_interval [Float, Integer] Min poll interval when long polling in seconds
@param max_poll_interval [Float, Integer] Max poll interval when long polling in seconds.
When requests fail, the client will backoff and this is the upper limit.
@param background_callback_interval [Float, Integer] Interval to poll when
when polling in seconds.
@param headers [Hash] extra HTTP headers to be set on the polling requests.
@return [Object] Instance of MessageBus::HTTPClient
Starts a background thread that polls the message bus endpoint
for the given base_url.
Intervals for long polling can be configured via min_poll_interval and
max_poll_interval.
Intervals for polling can be configured via background_callback_interval.
@return [Object] Instance of MessageBus::HTTPClient | train | https://github.com/SamSaffron/message_bus/blob/90fba639eb5d332ca8e87fd35f1d603a5743076d/lib/message_bus/http_client.rb#L96-L127 | class HTTPClient
class InvalidChannel < StandardError; end
class MissingBlock < StandardError; end
attr_reader :channels,
:stats
attr_accessor :enable_long_polling,
:status,
:enable_chunked_encoding,
:min_poll_interval,
:max_poll_interval,
:background_callback_interval
CHUNK_SEPARATOR = "\r\n|\r\n".freeze
private_constant :CHUNK_SEPARATOR
STATUS_CHANNEL = "/__status".freeze
private_constant :STATUS_CHANNEL
STOPPED = 0
STARTED = 1
Stats = Struct.new(:failed, :success)
private_constant :Stats
# @param base_url [String] Base URL of the message_bus server to connect to
# @param enable_long_polling [Boolean] Enable long polling
# @param enable_chunked_encoding [Boolean] Enable chunk encoding
# @param min_poll_interval [Float, Integer] Min poll interval when long polling in seconds
# @param max_poll_interval [Float, Integer] Max poll interval when long polling in seconds.
# When requests fail, the client will backoff and this is the upper limit.
# @param background_callback_interval [Float, Integer] Interval to poll when
# when polling in seconds.
# @param headers [Hash] extra HTTP headers to be set on the polling requests.
#
# @return [Object] Instance of MessageBus::HTTPClient
def initialize(base_url, enable_long_polling: true,
enable_chunked_encoding: true,
min_poll_interval: 0.1,
max_poll_interval: 180,
background_callback_interval: 60,
headers: {})
@uri = URI(base_url)
@enable_long_polling = enable_long_polling
@enable_chunked_encoding = enable_chunked_encoding
@min_poll_interval = min_poll_interval
@max_poll_interval = max_poll_interval
@background_callback_interval = background_callback_interval
@headers = headers
@client_id = SecureRandom.hex
@channels = {}
@status = STOPPED
@mutex = Mutex.new
@stats = Stats.new(0, 0)
end
# Starts a background thread that polls the message bus endpoint
# for the given base_url.
#
# Intervals for long polling can be configured via min_poll_interval and
# max_poll_interval.
#
# Intervals for polling can be configured via background_callback_interval.
#
# @return [Object] Instance of MessageBus::HTTPClient
# Stops the client from polling the message bus endpoint.
#
# @return [Integer] the current status of the client
def stop
@status = STOPPED
end
# Subscribes to a channel which executes the given callback when a message
# is published to the channel
#
# @example Subscribing to a channel for message
# client = MessageBus::HTTPClient.new('http://some.test.com')
#
# client.subscribe("/test") do |payload, _message_id, _global_id|
# puts payload
# end
#
# A last_message_id may be provided.
# * -1 will subscribe to all new messages
# * -2 will recieve last message + all new messages
# * -3 will recieve last 2 message + all new messages
#
# @example Subscribing to a channel with `last_message_id`
# client.subscribe("/test", last_message_id: -2) do |payload|
# puts payload
# end
#
# @param channel [String] channel to listen for messages on
# @param last_message_id [Integer] last message id to start polling on.
#
# @yield [data, message_id, global_id]
# callback to be executed whenever a message is received
#
# @yieldparam data [Hash] data payload of the message received on the channel
# @yieldparam message_id [Integer] id of the message in the channel
# @yieldparam global_id [Integer] id of the message in the global backlog
# @yieldreturn [void]
#
# @return [Integer] the current status of the client
def subscribe(channel, last_message_id: nil, &callback)
raise InvalidChannel unless channel.to_s.start_with?("/")
raise MissingBlock unless block_given?
last_message_id = -1 if last_message_id && !last_message_id.is_a?(Integer)
@channels[channel] ||= Channel.new
channel = @channels[channel]
channel.last_message_id = last_message_id if last_message_id
channel.callbacks.push(callback)
start if stopped?
end
# unsubscribes from a channel
#
# @example Unsubscribing from a channel
# client = MessageBus::HTTPClient.new('http://some.test.com')
# callback = -> { |payload| puts payload }
# client.subscribe("/test", &callback)
# client.unsubscribe("/test")
#
# If a callback is given, only the specific callback will be unsubscribed.
#
# @example Unsubscribing a callback from a channel
# client.unsubscribe("/test", &callback)
#
# When the client does not have any channels left, it will stop polling and
# waits until a new subscription is started.
#
# @param channel [String] channel to unsubscribe
# @yield [data, global_id, message_id] specific callback to unsubscribe
#
# @return [Integer] the current status of the client
def unsubscribe(channel, &callback)
if callback
@channels[channel].callbacks.delete(callback)
remove_channel(channel) if @channels[channel].callbacks.empty?
else
remove_channel(channel)
end
stop if @channels.empty?
@status
end
private
def stopped?
@status == STOPPED
end
def started?
@status == STARTED
end
def remove_channel(channel)
@channels.delete(channel)
end
def interval
if @enable_long_polling
if (failed_count = @stats.failed) > 2
(@min_poll_interval * 2**failed_count).clamp(
@min_poll_interval, @max_poll_interval
)
else
@min_poll_interval
end
else
@background_callback_interval
end
end
def poll
http = Net::HTTP.new(@uri.host, @uri.port)
http.use_ssl = true if @uri.scheme == 'https'
request = Net::HTTP::Post.new(request_path, headers)
request.body = poll_payload
if @enable_long_polling
buffer = ''
http.request(request) do |response|
response.read_body do |chunk|
unless chunk.empty?
buffer << chunk
process_buffer(buffer)
end
end
end
else
response = http.request(request)
notify_channels(JSON.parse(response.body))
end
end
def is_chunked?
!headers["Dont-Chunk"]
end
def process_buffer(buffer)
index = buffer.index(CHUNK_SEPARATOR)
if is_chunked?
return unless index
messages = buffer[0..(index - 1)]
buffer.slice!("#{messages}#{CHUNK_SEPARATOR}")
else
messages = buffer[0..-1]
buffer.slice!(messages)
end
notify_channels(JSON.parse(messages))
end
def notify_channels(messages)
messages.each do |message|
current_channel = message['channel']
if current_channel == STATUS_CHANNEL
message["data"].each do |channel_name, last_message_id|
if (channel = @channels[channel_name])
channel.last_message_id = last_message_id
end
end
else
@channels.each do |channel_name, channel|
next unless channel_name == current_channel
channel.last_message_id = message['message_id']
channel.callbacks.each do |callback|
callback.call(
message['data'],
channel.last_message_id,
message['global_id']
)
end
end
end
end
end
def poll_payload
payload = {}
@channels.each do |channel_name, channel|
payload[channel_name] = channel.last_message_id
end
payload.to_json
end
def request_path
"/message-bus/#{@client_id}/poll"
end
def headers
headers = {}
headers['Content-Type'] = 'application/json'
headers['X-Silence-logger'] = 'true'
if !@enable_long_polling || !@enable_chunked_encoding
headers['Dont-Chunk'] = 'true'
end
headers.merge!(@headers)
end
end
|
bitbucket-rest-api/bitbucket | lib/bitbucket_rest_api/issues/components.rb | BitBucket.Issues::Components.get | ruby | def get(user_name, repo_name, component_id, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
_validate_presence_of component_id
normalize! params
get_request("/1.0/repositories/#{user}/#{repo.downcase}/issues/components/#{component_id}", params)
end | Get a single component
= Examples
bitbucket = BitBucket.new
bitbucket.issues.components.find 'user-name', 'repo-name', 'component-id' | train | https://github.com/bitbucket-rest-api/bitbucket/blob/e03b6935104d59b3d9a922474c3dc210a5ef76d2/lib/bitbucket_rest_api/issues/components.rb#L36-L43 | class Issues::Components < API
VALID_COMPONENT_INPUTS = %w[ name ].freeze
# Creates new Issues::Components API
def initialize(options = {})
super(options)
end
# List all components for a repository
#
# = Examples
# bitbucket = BitBucket.new :user => 'user-name', :repo => 'repo-name'
# bitbucket.issues.components.list
# bitbucket.issues.components.list { |component| ... }
#
def list(user_name, repo_name, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
normalize! params
response = get_request("/1.0/repositories/#{user}/#{repo.downcase}/issues/components", params)
return response unless block_given?
response.each { |el| yield el }
end
alias :all :list
# Get a single component
#
# = Examples
# bitbucket = BitBucket.new
# bitbucket.issues.components.find 'user-name', 'repo-name', 'component-id'
#
alias :find :get
# Create a component
#
# = Inputs
# <tt>:name</tt> - Required string
#
# = Examples
# bitbucket = BitBucket.new :user => 'user-name', :repo => 'repo-name'
# bitbucket.issues.components.create :name => 'API'
#
def create(user_name, repo_name, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
normalize! params
filter! VALID_COMPONENT_INPUTS, params
assert_required_keys(VALID_COMPONENT_INPUTS, params)
post_request("/1.0/repositories/#{user}/#{repo.downcase}/issues/components", params)
end
# Update a component
#
# = Inputs
# <tt>:name</tt> - Required string
#
# = Examples
# @bitbucket = BitBucket.new
# @bitbucket.issues.components.update 'user-name', 'repo-name', 'component-id',
# :name => 'API'
#
def update(user_name, repo_name, component_id, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
_validate_presence_of component_id
normalize! params
filter! VALID_COMPONENT_INPUTS, params
assert_required_keys(VALID_COMPONENT_INPUTS, params)
put_request("/1.0/repositories/#{user}/#{repo.downcase}/issues/components/#{component_id}", params)
end
alias :edit :update
# Delete a component
#
# = Examples
# bitbucket = BitBucket.new
# bitbucket.issues.components.delete 'user-name', 'repo-name', 'component-id'
#
def delete(user_name, repo_name, component_id, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
_validate_presence_of component_id
normalize! params
delete_request("/1.0/repositories/#{user}/#{repo.downcase}/issues/components/#{component_id}", params)
end
end # Issues::Components
|
cookpad/rrrspec | rrrspec-client/lib/rrrspec/redis_models.rb | RRRSpec.Task.add_trial | ruby | def add_trial(trial)
RRRSpec.redis.rpush(RRRSpec.make_key(key, 'trial'),
trial.key)
end | Public: Add a trial of the task. | train | https://github.com/cookpad/rrrspec/blob/a5bde2b062ce68b1e32b8caddf194389c2ce28b0/rrrspec-client/lib/rrrspec/redis_models.rb#L640-L643 | class Task
attr_reader :key
def initialize(task_key)
@key = task_key
end
def self.create(taskset, estimate_sec, spec_file)
task_key = RRRSpec.make_key(taskset.key, 'task', spec_file)
RRRSpec.redis.hmset(
task_key,
'taskset', taskset.key,
'estimate_sec', estimate_sec,
'spec_file', spec_file
)
return new(task_key)
end
def ==(other)
@key == other.key
end
# ==========================================================================
# Property
# Public: Estimate time to finishe the task.
#
# Returns seconds or nil if there is no estimation
def estimate_sec
v = RRRSpec.redis.hget(key, 'estimate_sec')
v.present? ? v.to_i : nil
end
# Public: Spec file to run.
#
# Returns a path to the spec
def spec_file
RRRSpec.redis.hget(key, 'spec_file')
end
# Public: Included taskset
#
# Returns a Taskset
def taskset
Taskset.new(RRRSpec.redis.hget(key, 'taskset'))
end
# ==========================================================================
# Trial
# Public: Returns the trials of the task.
# The return value should be sorted in the order added.
#
# Returns an array of the Trials
def trials
RRRSpec.redis.lrange(RRRSpec.make_key(key, 'trial'), 0, -1).map do |key|
Trial.new(key)
end
end
# Public: Add a trial of the task.
# ==========================================================================
# Status
# Public: Current status
#
# Returns either nil, "running", "passed", "pending" or "failed"
def status
RRRSpec.redis.hget(key, 'status')
end
# Public: Update the status. It should be one of:
# [nil, "running", "passed", "pending", "failed"]
def update_status(status)
if status.present?
RRRSpec.redis.hset(key, 'status', status)
else
RRRSpec.redis.hdel(key, 'status')
end
end
# ==========================================================================
# Serialize
def to_h
h = RRRSpec.redis.hgetall(key)
h['key'] = key
h['trials'] = trials.map { |trial| { 'key' => trial.key } }
h['taskset'] = { 'key' => h['taskset'] }
RRRSpec.convert_if_present(h, 'estimate_sec') { |v| v.to_i }
h
end
def to_json(options=nil)
to_h.to_json(options)
end
# ==========================================================================
# Persistence
def expire(sec)
trials.each { |trial| trial.expire(sec) }
RRRSpec.redis.expire(key, sec)
RRRSpec.redis.expire(RRRSpec.make_key(key, 'trial'), sec)
end
end
|
ImpressCMS/vagrant-impressbox | lib/vagrant-impressbox/provisioner.rb | Impressbox.Provisioner.xaml_config | ruby | def xaml_config
require_relative File.join('objects', 'config_file')
file = detect_file(config.file)
@machine.ui.info "\t" + I18n.t('config.loaded_from_file', file: file)
Impressbox::Objects::ConfigFile.new file
end | Loads xaml config
@return [::Impressbox::Objects::ConfigFile] | train | https://github.com/ImpressCMS/vagrant-impressbox/blob/78dcd119a15ea6fbfd1f28c1e78f1cbf371bc6a2/lib/vagrant-impressbox/provisioner.rb#L81-L86 | class Provisioner < Vagrant.plugin('2', :provisioner)
# Stores loaded ConfigFile instance
#
#@return [::Impressbox::Objects::ConfigFile,nil]
@@__loaded_config = nil
# Object with loaded config from file
#
#@return [::Impressbox::Objects::ConfigFile,nil]
def self.loaded_config
@@__loaded_config
end
# Cleanup operations
def cleanup
end
# Do configuration operations
#
#@param root_config [Object] Current Vagrantfile configuration instance
def configure(root_config)
@@__loaded_config = xaml_config
run_primaty_configuration root_config
end
# Do provision tasks
def provision
mass_loader('provision').each do |configurator|
next unless configurator.can_be_configured?(@machine, @@__loaded_config)
@machine.ui.info configurator.description if configurator.description
configurator.configure @machine, @@__loaded_config
end
end
private
# Runs primary configuration
#
#@param root_config [Object] Root Vagrant config
def run_primaty_configuration(root_config)
old_root = root_config.dup
old_loaded = @@__loaded_config.dup
mass_loader('primary').each do |configurator|
next unless configurator.can_be_configured?(old_root, old_loaded)
@machine.ui.info configurator.description if configurator.description
configurator.configure root_config, old_loaded
end
end
# Gets preconfigured MassFileLoader instance
#
#@param type [String] Files type
#
#@return [::Impressbox::Objects::MassFileLoader]
def mass_loader(type)
namespace = 'Impressbox::Configurators::' + ucfirst(type)
path = File.join('..', 'configurators', type)
Impressbox::Objects::MassFileLoader.new namespace, path
end
# Makes first latter of tsuplied world in uppercase
#
#@param str [String] String to do what needed to do
#
#@return [String]
def ucfirst(str)
str[0] = str[0, 1].upcase
str
end
# Loads xaml config
#
#@return [::Impressbox::Objects::ConfigFile]
# Try to detect config.yaml file
#
#@param file [String] Tries file and if not returns default file
#
#@return [String]
def detect_file(file)
return file if File.exist?(file)
'config.yaml'
end
end
|
grpc/grpc | src/ruby/lib/grpc/generic/client_stub.rb | GRPC.ClientStub.request_response | ruby | def request_response(method, req, marshal, unmarshal,
deadline: nil,
return_op: false,
parent: nil,
credentials: nil,
metadata: {})
c = new_active_call(method, marshal, unmarshal,
deadline: deadline,
parent: parent,
credentials: credentials)
interception_context = @interceptors.build_context
intercept_args = {
method: method,
request: req,
call: c.interceptable,
metadata: metadata
}
if return_op
# return the operation view of the active_call; define #execute as a
# new method for this instance that invokes #request_response.
c.merge_metadata_to_send(metadata)
op = c.operation
op.define_singleton_method(:execute) do
interception_context.intercept!(:request_response, intercept_args) do
c.request_response(req, metadata: metadata)
end
end
op
else
interception_context.intercept!(:request_response, intercept_args) do
c.request_response(req, metadata: metadata)
end
end
end | Creates a new ClientStub.
Minimally, a stub is created with the just the host of the gRPC service
it wishes to access, e.g.,
my_stub = ClientStub.new(example.host.com:50505,
:this_channel_is_insecure)
If a channel_override argument is passed, it will be used as the
underlying channel. Otherwise, the channel_args argument will be used
to construct a new underlying channel.
There are some specific keyword args that are not used to configure the
channel:
- :channel_override
when present, this must be a pre-created GRPC::Core::Channel. If it's
present the host and arbitrary keyword arg areignored, and the RPC
connection uses this channel.
- :timeout
when present, this is the default timeout used for calls
@param host [String] the host the stub connects to
@param creds [Core::ChannelCredentials|Symbol] the channel credentials, or
:this_channel_is_insecure, which explicitly indicates that the client
should be created with an insecure connection. Note: this argument is
ignored if the channel_override argument is provided.
@param channel_override [Core::Channel] a pre-created channel
@param timeout [Number] the default timeout to use in requests
@param propagate_mask [Number] A bitwise combination of flags in
GRPC::Core::PropagateMasks. Indicates how data should be propagated
from parent server calls to child client calls if this client is being
used within a gRPC server.
@param channel_args [Hash] the channel arguments. Note: this argument is
ignored if the channel_override argument is provided.
@param interceptors [Array<GRPC::ClientInterceptor>] An array of
GRPC::ClientInterceptor objects that will be used for
intercepting calls before they are executed
Interceptors are an EXPERIMENTAL API.
request_response sends a request to a GRPC server, and returns the
response.
== Flow Control ==
This is a blocking call.
* it does not return until a response is received.
* the requests is sent only when GRPC core's flow control allows it to
be sent.
== Errors ==
An RuntimeError is raised if
* the server responds with a non-OK status
* the deadline is exceeded
== Return Value ==
If return_op is false, the call returns the response
If return_op is true, the call returns an Operation, calling execute
on the Operation returns the response.
@param method [String] the RPC method to call on the GRPC server
@param req [Object] the request sent to the server
@param marshal [Function] f(obj)->string that marshals requests
@param unmarshal [Function] f(string)->obj that unmarshals responses
@param deadline [Time] (optional) the time the request should complete
@param return_op [true|false] return an Operation if true
@param parent [Core::Call] a prior call whose reserved metadata
will be propagated by this one.
@param credentials [Core::CallCredentials] credentials to use when making
the call
@param metadata [Hash] metadata to be sent to the server
@return [Object] the response received from the server | train | https://github.com/grpc/grpc/blob/f3937f0e55227a4ef3a23f895d3b204a947610f8/src/ruby/lib/grpc/generic/client_stub.rb#L148-L181 | class ClientStub
include Core::StatusCodes
include Core::TimeConsts
# Default timeout is infinity.
DEFAULT_TIMEOUT = INFINITE_FUTURE
# setup_channel is used by #initialize to constuct a channel from its
# arguments.
def self.setup_channel(alt_chan, host, creds, channel_args = {})
unless alt_chan.nil?
fail(TypeError, '!Channel') unless alt_chan.is_a?(Core::Channel)
return alt_chan
end
if channel_args['grpc.primary_user_agent'].nil?
channel_args['grpc.primary_user_agent'] = ''
else
channel_args['grpc.primary_user_agent'] += ' '
end
channel_args['grpc.primary_user_agent'] += "grpc-ruby/#{VERSION}"
unless creds.is_a?(Core::ChannelCredentials) || creds.is_a?(Symbol)
fail(TypeError, '!ChannelCredentials or Symbol')
end
Core::Channel.new(host, channel_args, creds)
end
# Allows users of the stub to modify the propagate mask.
#
# This is an advanced feature for use when making calls to another gRPC
# server whilst running in the handler of an existing one.
attr_writer :propagate_mask
# Creates a new ClientStub.
#
# Minimally, a stub is created with the just the host of the gRPC service
# it wishes to access, e.g.,
#
# my_stub = ClientStub.new(example.host.com:50505,
# :this_channel_is_insecure)
#
# If a channel_override argument is passed, it will be used as the
# underlying channel. Otherwise, the channel_args argument will be used
# to construct a new underlying channel.
#
# There are some specific keyword args that are not used to configure the
# channel:
#
# - :channel_override
# when present, this must be a pre-created GRPC::Core::Channel. If it's
# present the host and arbitrary keyword arg areignored, and the RPC
# connection uses this channel.
#
# - :timeout
# when present, this is the default timeout used for calls
#
# @param host [String] the host the stub connects to
# @param creds [Core::ChannelCredentials|Symbol] the channel credentials, or
# :this_channel_is_insecure, which explicitly indicates that the client
# should be created with an insecure connection. Note: this argument is
# ignored if the channel_override argument is provided.
# @param channel_override [Core::Channel] a pre-created channel
# @param timeout [Number] the default timeout to use in requests
# @param propagate_mask [Number] A bitwise combination of flags in
# GRPC::Core::PropagateMasks. Indicates how data should be propagated
# from parent server calls to child client calls if this client is being
# used within a gRPC server.
# @param channel_args [Hash] the channel arguments. Note: this argument is
# ignored if the channel_override argument is provided.
# @param interceptors [Array<GRPC::ClientInterceptor>] An array of
# GRPC::ClientInterceptor objects that will be used for
# intercepting calls before they are executed
# Interceptors are an EXPERIMENTAL API.
def initialize(host, creds,
channel_override: nil,
timeout: nil,
propagate_mask: nil,
channel_args: {},
interceptors: [])
@ch = ClientStub.setup_channel(channel_override, host, creds,
channel_args)
alt_host = channel_args[Core::Channel::SSL_TARGET]
@host = alt_host.nil? ? host : alt_host
@propagate_mask = propagate_mask
@timeout = timeout.nil? ? DEFAULT_TIMEOUT : timeout
@interceptors = InterceptorRegistry.new(interceptors)
end
# request_response sends a request to a GRPC server, and returns the
# response.
#
# == Flow Control ==
# This is a blocking call.
#
# * it does not return until a response is received.
#
# * the requests is sent only when GRPC core's flow control allows it to
# be sent.
#
# == Errors ==
# An RuntimeError is raised if
#
# * the server responds with a non-OK status
#
# * the deadline is exceeded
#
# == Return Value ==
#
# If return_op is false, the call returns the response
#
# If return_op is true, the call returns an Operation, calling execute
# on the Operation returns the response.
#
# @param method [String] the RPC method to call on the GRPC server
# @param req [Object] the request sent to the server
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param deadline [Time] (optional) the time the request should complete
# @param return_op [true|false] return an Operation if true
# @param parent [Core::Call] a prior call whose reserved metadata
# will be propagated by this one.
# @param credentials [Core::CallCredentials] credentials to use when making
# the call
# @param metadata [Hash] metadata to be sent to the server
# @return [Object] the response received from the server
# client_streamer sends a stream of requests to a GRPC server, and
# returns a single response.
#
# requests provides an 'iterable' of Requests. I.e. it follows Ruby's
# #each enumeration protocol. In the simplest case, requests will be an
# array of marshallable objects; in typical case it will be an Enumerable
# that allows dynamic construction of the marshallable objects.
#
# == Flow Control ==
# This is a blocking call.
#
# * it does not return until a response is received.
#
# * each requests is sent only when GRPC core's flow control allows it to
# be sent.
#
# == Errors ==
# An RuntimeError is raised if
#
# * the server responds with a non-OK status
#
# * the deadline is exceeded
#
# == Return Value ==
#
# If return_op is false, the call consumes the requests and returns
# the response.
#
# If return_op is true, the call returns the response.
#
# @param method [String] the RPC method to call on the GRPC server
# @param requests [Object] an Enumerable of requests to send
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param deadline [Time] (optional) the time the request should complete
# @param return_op [true|false] return an Operation if true
# @param parent [Core::Call] a prior call whose reserved metadata
# will be propagated by this one.
# @param credentials [Core::CallCredentials] credentials to use when making
# the call
# @param metadata [Hash] metadata to be sent to the server
# @return [Object|Operation] the response received from the server
def client_streamer(method, requests, marshal, unmarshal,
deadline: nil,
return_op: false,
parent: nil,
credentials: nil,
metadata: {})
c = new_active_call(method, marshal, unmarshal,
deadline: deadline,
parent: parent,
credentials: credentials)
interception_context = @interceptors.build_context
intercept_args = {
method: method,
requests: requests,
call: c.interceptable,
metadata: metadata
}
if return_op
# return the operation view of the active_call; define #execute as a
# new method for this instance that invokes #client_streamer.
c.merge_metadata_to_send(metadata)
op = c.operation
op.define_singleton_method(:execute) do
interception_context.intercept!(:client_streamer, intercept_args) do
c.client_streamer(requests)
end
end
op
else
interception_context.intercept!(:client_streamer, intercept_args) do
c.client_streamer(requests, metadata: metadata)
end
end
end
# server_streamer sends one request to the GRPC server, which yields a
# stream of responses.
#
# responses provides an enumerator over the streamed responses, i.e. it
# follows Ruby's #each iteration protocol. The enumerator blocks while
# waiting for each response, stops when the server signals that no
# further responses will be supplied. If the implicit block is provided,
# it is executed with each response as the argument and no result is
# returned.
#
# == Flow Control ==
# This is a blocking call.
#
# * the request is sent only when GRPC core's flow control allows it to
# be sent.
#
# * the request will not complete until the server sends the final
# response followed by a status message.
#
# == Errors ==
# An RuntimeError is raised if
#
# * the server responds with a non-OK status when any response is
# * retrieved
#
# * the deadline is exceeded
#
# == Return Value ==
#
# if the return_op is false, the return value is an Enumerator of the
# results, unless a block is provided, in which case the block is
# executed with each response.
#
# if return_op is true, the function returns an Operation whose #execute
# method runs server streamer call. Again, Operation#execute either
# calls the given block with each response or returns an Enumerator of the
# responses.
#
# == Keyword Args ==
#
# Unspecified keyword arguments are treated as metadata to be sent to the
# server.
#
# @param method [String] the RPC method to call on the GRPC server
# @param req [Object] the request sent to the server
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param deadline [Time] (optional) the time the request should complete
# @param return_op [true|false]return an Operation if true
# @param parent [Core::Call] a prior call whose reserved metadata
# will be propagated by this one.
# @param credentials [Core::CallCredentials] credentials to use when making
# the call
# @param metadata [Hash] metadata to be sent to the server
# @param blk [Block] when provided, is executed for each response
# @return [Enumerator|Operation|nil] as discussed above
def server_streamer(method, req, marshal, unmarshal,
deadline: nil,
return_op: false,
parent: nil,
credentials: nil,
metadata: {},
&blk)
c = new_active_call(method, marshal, unmarshal,
deadline: deadline,
parent: parent,
credentials: credentials)
interception_context = @interceptors.build_context
intercept_args = {
method: method,
request: req,
call: c.interceptable,
metadata: metadata
}
if return_op
# return the operation view of the active_call; define #execute
# as a new method for this instance that invokes #server_streamer
c.merge_metadata_to_send(metadata)
op = c.operation
op.define_singleton_method(:execute) do
interception_context.intercept!(:server_streamer, intercept_args) do
c.server_streamer(req, &blk)
end
end
op
else
interception_context.intercept!(:server_streamer, intercept_args) do
c.server_streamer(req, metadata: metadata, &blk)
end
end
end
# bidi_streamer sends a stream of requests to the GRPC server, and yields
# a stream of responses.
#
# This method takes an Enumerable of requests, and returns and enumerable
# of responses.
#
# == requests ==
#
# requests provides an 'iterable' of Requests. I.e. it follows Ruby's
# #each enumeration protocol. In the simplest case, requests will be an
# array of marshallable objects; in typical case it will be an
# Enumerable that allows dynamic construction of the marshallable
# objects.
#
# == responses ==
#
# This is an enumerator of responses. I.e, its #next method blocks
# waiting for the next response. Also, if at any point the block needs
# to consume all the remaining responses, this can be done using #each or
# #collect. Calling #each or #collect should only be done if
# the_call#writes_done has been called, otherwise the block will loop
# forever.
#
# == Flow Control ==
# This is a blocking call.
#
# * the call completes when the next call to provided block returns
# false
#
# * the execution block parameters are two objects for sending and
# receiving responses, each of which blocks waiting for flow control.
# E.g, calles to bidi_call#remote_send will wait until flow control
# allows another write before returning; and obviously calls to
# responses#next block until the next response is available.
#
# == Termination ==
#
# As well as sending and receiving messages, the block passed to the
# function is also responsible for:
#
# * calling bidi_call#writes_done to indicate no further reqs will be
# sent.
#
# * returning false if once the bidi stream is functionally completed.
#
# Note that response#next will indicate that there are no further
# responses by throwing StopIteration, but can only happen either
# if bidi_call#writes_done is called.
#
# To properly terminate the RPC, the responses should be completely iterated
# through; one way to do this is to loop on responses#next until no further
# responses are available.
#
# == Errors ==
# An RuntimeError is raised if
#
# * the server responds with a non-OK status when any response is
# * retrieved
#
# * the deadline is exceeded
#
#
# == Return Value ==
#
# if the return_op is false, the return value is an Enumerator of the
# results, unless a block is provided, in which case the block is
# executed with each response.
#
# if return_op is true, the function returns an Operation whose #execute
# method runs the Bidi call. Again, Operation#execute either calls a
# given block with each response or returns an Enumerator of the
# responses.
#
# @param method [String] the RPC method to call on the GRPC server
# @param requests [Object] an Enumerable of requests to send
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param deadline [Time] (optional) the time the request should complete
# @param return_op [true|false] return an Operation if true
# @param parent [Core::Call] a prior call whose reserved metadata
# will be propagated by this one.
# @param credentials [Core::CallCredentials] credentials to use when making
# the call
# @param metadata [Hash] metadata to be sent to the server
# @param blk [Block] when provided, is executed for each response
# @return [Enumerator|nil|Operation] as discussed above
def bidi_streamer(method, requests, marshal, unmarshal,
deadline: nil,
return_op: false,
parent: nil,
credentials: nil,
metadata: {},
&blk)
c = new_active_call(method, marshal, unmarshal,
deadline: deadline,
parent: parent,
credentials: credentials)
interception_context = @interceptors.build_context
intercept_args = {
method: method,
requests: requests,
call: c.interceptable,
metadata: metadata
}
if return_op
# return the operation view of the active_call; define #execute
# as a new method for this instance that invokes #bidi_streamer
c.merge_metadata_to_send(metadata)
op = c.operation
op.define_singleton_method(:execute) do
interception_context.intercept!(:bidi_streamer, intercept_args) do
c.bidi_streamer(requests, &blk)
end
end
op
else
interception_context.intercept!(:bidi_streamer, intercept_args) do
c.bidi_streamer(requests, metadata: metadata, &blk)
end
end
end
private
# Creates a new active stub
#
# @param method [string] the method being called.
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param parent [Grpc::Call] a parent call, available when calls are
# made from server
# @param credentials [Core::CallCredentials] credentials to use when making
# the call
def new_active_call(method, marshal, unmarshal,
deadline: nil,
parent: nil,
credentials: nil)
deadline = from_relative_time(@timeout) if deadline.nil?
# Provide each new client call with its own completion queue
call = @ch.create_call(parent, # parent call
@propagate_mask, # propagation options
method,
nil, # host use nil,
deadline)
call.set_credentials! credentials unless credentials.nil?
ActiveCall.new(call, marshal, unmarshal, deadline,
started: false)
end
end
|
CocoaPods/Xcodeproj | lib/xcodeproj/workspace.rb | Xcodeproj.Workspace.save_as | ruby | def save_as(path)
FileUtils.mkdir_p(path)
File.open(File.join(path, 'contents.xcworkspacedata'), 'w') do |out|
out << to_s
end
end | Saves the workspace at the given `xcworkspace` path.
@param [String] path
the path where to save the project.
@return [void] | train | https://github.com/CocoaPods/Xcodeproj/blob/3be1684437a6f8e69c7836ad4c85a2b78663272f/lib/xcodeproj/workspace.rb#L179-L184 | class Workspace
# @return [REXML::Document] the parsed XML model for the workspace contents
attr_reader :document
# @return [Hash<String => String>] a mapping from scheme name to project full path
# containing the scheme
attr_reader :schemes
# @return [Array<FileReference>] the paths of the projects contained in the
# workspace.
#
def file_references
return [] unless @document
@document.get_elements('/Workspace//FileRef').map do |node|
FileReference.from_node(node)
end
end
# @return [Array<GroupReference>] the groups contained in the workspace
#
def group_references
return [] unless @document
@document.get_elements('/Workspace//Group').map do |node|
GroupReference.from_node(node)
end
end
# @param [REXML::Document] document @see document
# @param [Array<FileReference>] file_references additional projects to add
#
# @note The document parameter is passed to the << operator if it is not a
# valid REXML::Document. It is optional, but may also be passed as nil
#
def initialize(document, *file_references)
@schemes = {}
if document.nil?
@document = REXML::Document.new(root_xml(''))
elsif document.is_a?(REXML::Document)
@document = document
else
@document = REXML::Document.new(root_xml(''))
self << document
end
file_references.each { |ref| self << ref }
end
#-------------------------------------------------------------------------#
# Returns a workspace generated by reading the contents of the given path.
#
# @param [String] path
# the path of the `xcworkspace` file.
#
# @return [Workspace] the generated workspace.
#
def self.new_from_xcworkspace(path)
from_s(File.read(File.join(path, 'contents.xcworkspacedata')),
File.expand_path(path))
rescue Errno::ENOENT
new(nil)
end
#-------------------------------------------------------------------------#
# Returns a workspace generated by reading the contents of the given
# XML representation.
#
# @param [String] xml
# the XML representation of the workspace.
#
# @return [Workspace] the generated workspace.
#
def self.from_s(xml, workspace_path = '')
document = REXML::Document.new(xml)
instance = new(document)
instance.load_schemes(workspace_path)
instance
end
# Adds a new path to the list of the of projects contained in the
# workspace.
# @param [String, Xcodeproj::Workspace::FileReference] path_or_reference
# A string or Xcode::Workspace::FileReference containing a path to an Xcode project
#
# @raise [ArgumentError] Raised if the input is neither a String nor a FileReference
#
# @return [void]
#
def <<(path_or_reference)
return unless @document && @document.respond_to?(:root)
case path_or_reference
when String
project_file_reference = Xcodeproj::Workspace::FileReference.new(path_or_reference)
when Xcodeproj::Workspace::FileReference
project_file_reference = path_or_reference
projpath = nil
else
raise ArgumentError, "Input to the << operator must be a file path or FileReference, got #{path_or_reference.inspect}"
end
@document.root.add_element(project_file_reference.to_node)
load_schemes_from_project File.expand_path(projpath || project_file_reference.path)
end
#-------------------------------------------------------------------------#
# Adds a new group container to the workspace
# workspace.
#
# @param [String] name The name of the group
#
# @yield [Xcodeproj::Workspace::GroupReference, REXML::Element]
# Yields the GroupReference and underlying XML element for mutation
#
# @return [Xcodeproj::Workspace::GroupReference] The added group reference
#
def add_group(name)
return nil unless @document
group = Xcodeproj::Workspace::GroupReference.new(name)
elem = @document.root.add_element(group.to_node)
yield group, elem if block_given?
group
end
# Checks if the workspace contains the project with the given file
# reference.
#
# @param [FileReference] file_reference
# The file_reference to the project.
#
# @return [Boolean] whether the project is contained in the workspace.
#
def include?(file_reference)
file_references.include?(file_reference)
end
# @return [String] the XML representation of the workspace.
#
def to_s
contents = ''
stack = []
@document.root.each_recursive do |elem|
until stack.empty?
last = stack.last
break if last == elem.parent
contents += xcworkspace_element_end_xml(stack.length, last)
stack.pop
end
stack << elem
contents += xcworkspace_element_start_xml(stack.length, elem)
end
until stack.empty?
contents += xcworkspace_element_end_xml(stack.length, stack.last)
stack.pop
end
root_xml(contents)
end
# Saves the workspace at the given `xcworkspace` path.
#
# @param [String] path
# the path where to save the project.
#
# @return [void]
#
#-------------------------------------------------------------------------#
# Load all schemes from all projects in workspace or in the workspace container itself
#
# @param [String] workspace_dir_path
# path of workspaces dir
#
# @return [void]
#
def load_schemes(workspace_dir_path)
# Normalizes path to directory of workspace needed for file_reference.absolute_path
workspaces_dir = workspace_dir_path
if File.extname(workspace_dir_path) == '.xcworkspace'
workspaces_dir = File.expand_path('..', workspaces_dir)
end
file_references.each do |file_reference|
project_full_path = file_reference.absolute_path(workspaces_dir)
load_schemes_from_project(project_full_path)
end
# Load schemes that are in the workspace container.
workspace_abs_path = File.absolute_path(workspace_dir_path)
Dir[File.join(workspace_dir_path, 'xcshareddata', 'xcschemes', '*.xcscheme')].each do |scheme|
scheme_name = File.basename(scheme, '.xcscheme')
@schemes[scheme_name] = workspace_abs_path
end
end
private
# Load all schemes from project
#
# @param [String] project_full_path
# project full path
#
# @return [void]
#
def load_schemes_from_project(project_full_path)
schemes = Xcodeproj::Project.schemes project_full_path
schemes.each do |scheme_name|
@schemes[scheme_name] = project_full_path
end
end
# @return [String] The template of the workspace XML as formated by Xcode.
#
# @param [String] contents The XML contents of the workspace.
#
def root_xml(contents)
<<-DOC
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
#{contents.rstrip}
</Workspace>
DOC
end
#
# @param [Integer] depth The depth of the element in the tree
# @param [REXML::Document::Element] elem The XML element to format.
#
# @return [String] The Xcode-specific XML formatting of an element start
#
def xcworkspace_element_start_xml(depth, elem)
attributes = case elem.name
when 'Group'
%w(location name)
when 'FileRef'
%w(location)
end
contents = "<#{elem.name}"
indent = ' ' * depth
attributes.each { |name| contents += "\n #{name} = \"#{elem.attribute(name)}\"" }
contents.split("\n").map { |line| "#{indent}#{line}" }.join("\n") + ">\n"
end
#
# @param [Integer] depth The depth of the element in the tree
# @param [REXML::Document::Element] elem The XML element to format.
#
# @return [String] The Xcode-specific XML formatting of an element end
#
def xcworkspace_element_end_xml(depth, elem)
"#{' ' * depth}</#{elem.name}>\n"
end
#-------------------------------------------------------------------------#
end
|
End of preview. Expand
in Dataset Viewer.
- Downloads last month
- 34