repository_name
stringlengths 7
56
| func_path_in_repository
stringlengths 10
101
| func_name
stringlengths 12
78
| language
stringclasses 1
value | func_code_string
stringlengths 74
11.9k
| func_documentation_string
stringlengths 3
8.03k
| split_name
stringclasses 1
value | func_code_url
stringlengths 98
213
| enclosing_scope
stringlengths 42
98.2k
|
---|---|---|---|---|---|---|---|---|
barkerest/incline | lib/incline/extensions/jbuilder_template.rb | Incline::Extensions.JbuilderTemplate.api_errors! | ruby | def api_errors!(model_name, model_errors)
base_error = model_errors[:base]
field_errors = model_errors.reject{ |k,_| k == :base }
unless base_error.blank?
set! 'error', "#{model_name.humanize} #{base_error.map{|e| h(e.to_s)}.join("<br>\n#{model_name.humanize} ")}"
end
unless field_errors.blank?
set! 'fieldErrors' do
array! field_errors do |k,v|
set! 'name', "#{model_name}.#{k}"
set! 'status', v.is_a?(::Array) ?
"#{k.to_s.humanize} #{v.map{|e| h(e.to_s)}.join("<br>\n#{k.to_s.humanize} ")}" :
"#{k.to_s.humanize} #{h v.to_s}"
end
end
end
end | List out the errors for the model.
model_name:: The singular name for the model (e.g. - "user_account")
model_errors:: The errors collection from the model.
json.api_errors! "user_account", user.errors | train | https://github.com/barkerest/incline/blob/1ff08db7aa8ab7f86b223268b700bc67d15bb8aa/lib/incline/extensions/jbuilder_template.rb#L17-L33 | module JbuilderTemplate
##
# List out the errors for the model.
#
# model_name:: The singular name for the model (e.g. - "user_account")
# model_errors:: The errors collection from the model.
#
# json.api_errors! "user_account", user.errors
#
end
|
ankane/ahoy | lib/ahoy/tracker.rb | Ahoy.Tracker.track | ruby | def track(name, properties = {}, options = {})
if exclude?
debug "Event excluded"
elsif missing_params?
debug "Missing required parameters"
else
data = {
visit_token: visit_token,
user_id: user.try(:id),
name: name.to_s,
properties: properties,
time: trusted_time(options[:time]),
event_id: options[:id] || generate_id
}.select { |_, v| v }
@store.track_event(data)
end
true
rescue => e
report_exception(e)
end | can't use keyword arguments here | train | https://github.com/ankane/ahoy/blob/514e4f9aed4ff87be791e4d8b73b0f2788233ba8/lib/ahoy/tracker.rb#L18-L38 | class Tracker
UUID_NAMESPACE = "a82ae811-5011-45ab-a728-569df7499c5f"
attr_reader :request, :controller
def initialize(**options)
@store = Ahoy::Store.new(options.merge(ahoy: self))
@controller = options[:controller]
@request = options[:request] || @controller.try(:request)
@visit_token = options[:visit_token]
@options = options
end
# can't use keyword arguments here
def track_visit(defer: false, started_at: nil)
if exclude?
debug "Visit excluded"
elsif missing_params?
debug "Missing required parameters"
else
if defer
set_cookie("ahoy_track", true, nil, false)
else
delete_cookie("ahoy_track")
data = {
visit_token: visit_token,
visitor_token: visitor_token,
user_id: user.try(:id),
started_at: trusted_time(started_at),
}.merge(visit_properties).select { |_, v| v }
@store.track_visit(data)
Ahoy::GeocodeV2Job.perform_later(visit_token, data[:ip]) if Ahoy.geocode
end
end
true
rescue => e
report_exception(e)
end
def geocode(data)
if exclude?
debug "Geocode excluded"
else
data = {
visit_token: visit_token
}.merge(data).select { |_, v| v }
@store.geocode(data)
true
end
rescue => e
report_exception(e)
end
def authenticate(user)
if exclude?
debug "Authentication excluded"
else
@store.user = user
data = {
visit_token: visit_token,
user_id: user.try(:id)
}
@store.authenticate(data)
end
true
rescue => e
report_exception(e)
end
def visit
@visit ||= @store.visit
end
def visit_or_create
@visit ||= @store.visit_or_create
end
def new_visit?
Ahoy.cookies ? !existing_visit_token : visit.nil?
end
def new_visitor?
!existing_visitor_token
end
def set_visit_cookie
set_cookie("ahoy_visit", visit_token, Ahoy.visit_duration)
end
def set_visitor_cookie
if new_visitor?
set_cookie("ahoy_visitor", visitor_token, Ahoy.visitor_duration)
end
end
def user
@user ||= @store.user
end
# TODO better name
def visit_properties
@visit_properties ||= Ahoy::VisitProperties.new(request, api: api?).generate
end
def visit_token
@visit_token ||= ensure_token(visit_token_helper)
end
alias_method :visit_id, :visit_token
def visitor_token
@visitor_token ||= ensure_token(visitor_token_helper)
end
alias_method :visitor_id, :visitor_token
def reset
reset_visit
delete_cookie("ahoy_visitor")
end
def reset_visit
delete_cookie("ahoy_visit")
delete_cookie("ahoy_events")
delete_cookie("ahoy_track")
end
protected
def api?
@options[:api]
end
def missing_params?
if Ahoy.cookies && api? && Ahoy.protect_from_forgery
!(existing_visit_token && existing_visitor_token)
else
false
end
end
def set_cookie(name, value, duration = nil, use_domain = true)
# safety net
return unless Ahoy.cookies
cookie = {
value: value
}
cookie[:expires] = duration.from_now if duration
domain = Ahoy.cookie_domain
cookie[:domain] = domain if domain && use_domain
request.cookie_jar[name] = cookie
end
def delete_cookie(name)
request.cookie_jar.delete(name) if request.cookie_jar[name]
end
def trusted_time(time = nil)
if !time || (api? && !(1.minute.ago..Time.now).cover?(time))
Time.zone.now
else
time
end
end
def exclude?
@store.exclude?
end
def report_exception(e)
raise e if Rails.env.development? || Rails.env.test?
Safely.report_exception(e)
end
def generate_id
@store.generate_id
end
def visit_token_helper
@visit_token_helper ||= begin
token = existing_visit_token
token ||= visit_anonymity_set unless Ahoy.cookies
token ||= generate_id unless Ahoy.api_only
token
end
end
def visitor_token_helper
@visitor_token_helper ||= begin
token = existing_visitor_token
token ||= visitor_anonymity_set unless Ahoy.cookies
token ||= generate_id unless Ahoy.api_only
token
end
end
def existing_visit_token
@existing_visit_token ||= begin
token = visit_header
token ||= visit_cookie if Ahoy.cookies && !(api? && Ahoy.protect_from_forgery)
token ||= visit_param if api?
token
end
end
def existing_visitor_token
@existing_visitor_token ||= begin
token = visitor_header
token ||= visitor_cookie if Ahoy.cookies && !(api? && Ahoy.protect_from_forgery)
token ||= visitor_param if api?
token
end
end
def visit_anonymity_set
@visit_anonymity_set ||= Digest::UUID.uuid_v5(UUID_NAMESPACE, ["visit", Ahoy.mask_ip(request.remote_ip), request.user_agent].join("/"))
end
def visitor_anonymity_set
@visitor_anonymity_set ||= Digest::UUID.uuid_v5(UUID_NAMESPACE, ["visitor", Ahoy.mask_ip(request.remote_ip), request.user_agent].join("/"))
end
def visit_cookie
@visit_cookie ||= request && request.cookies["ahoy_visit"]
end
def visitor_cookie
@visitor_cookie ||= request && request.cookies["ahoy_visitor"]
end
def visit_header
@visit_header ||= request && request.headers["Ahoy-Visit"]
end
def visitor_header
@visitor_header ||= request && request.headers["Ahoy-Visitor"]
end
def visit_param
@visit_param ||= request && request.params["visit_token"]
end
def visitor_param
@visitor_param ||= request && request.params["visitor_token"]
end
def ensure_token(token)
token = Ahoy::Utils.ensure_utf8(token)
token.to_s.gsub(/[^a-z0-9\-]/i, "").first(64) if token
end
def debug(message)
Ahoy.log message
end
end
|
wvanbergen/request-log-analyzer | lib/request_log_analyzer/output/html.rb | RequestLogAnalyzer::Output.HTML.tag | ruby | def tag(tag, content = nil, attributes = nil)
if block_given?
attributes = content.nil? ? '' : ' ' + content.map { |(key, value)| "#{key}=\"#{value}\"" }.join(' ')
content_string = ''
content = yield(content_string)
content = content_string unless content_string.empty?
"<#{tag}#{attributes}>#{content}</#{tag}>"
else
attributes = attributes.nil? ? '' : ' ' + attributes.map { |(key, value)| "#{key}=\"#{value}\"" }.join(' ')
if content.nil?
"<#{tag}#{attributes} />"
else
if content.class == Float
"<#{tag}#{attributes}><div class='color_bar' style=\"width:#{(content * 200).floor}px;\"/></#{tag}>"
else
"<#{tag}#{attributes}>#{content}</#{tag}>"
end
end
end
end | HTML tag writer helper
<tt>tag</tt> The tag to generate
<tt>content</tt> The content inside the tag
<tt>attributes</tt> Attributes to write in the tag | train | https://github.com/wvanbergen/request-log-analyzer/blob/b83865d440278583ac8e4901bb33878244fd7c75/lib/request_log_analyzer/output/html.rb#L163-L182 | class HTML < Base
# def initialize(io, options = {})
# super(io, options)
# end
def colorize(text, *style)
if style.include?(:bold)
tag(:strong, text)
else
text
end
end
# Print a string to the io object.
def print(str)
@io << str
end
alias_method :<<, :print
# Put a string with newline
def puts(str = '')
@io << str << "<br/>\n"
end
# Place a title
def title(title)
@io.puts(tag(:h2, title))
end
# Render a single line
# <tt>*font</tt> The font.
def line(*_font)
@io.puts(tag(:hr))
end
# Write a link
# <tt>text</tt> The text in the link
# <tt>url</tt> The url to link to.
def link(text, url = nil)
url = text if url.nil?
tag(:a, text, href: url)
end
# Generate a report table in HTML and push it into the output object.
# <tt>*colums<tt> Columns hash
# <tt>&block</tt>: A block yeilding the rows.
def table(*columns, &_block)
rows = []
yield(rows)
@io << tag(:table, class: 'rla-report-table', cellspacing: 0) do |content|
if table_has_header?(columns)
content << tag(:tr) do
columns.map { |col| tag(:th, col[:title]) }.join("\n")
end
end
odd = false
rows.each do |row|
odd = !odd
content << tag(:tr) do
if odd
row.map { |cell| tag(:td, cell, class: 'alt') }.join("\n")
else
row.map { |cell| tag(:td, cell) }.join("\n")
end
end
end
end
end
# Genrate HTML header and associated stylesheet
def header
@io.content_type = content_type if @io.respond_to?(:content_type)
@io << '<html>'
@io << tag(:head) do |headers|
headers << tag(:title, 'Request-log-analyzer report')
headers << tag(:style, '
body {
font: normal 11px auto "Trebuchet MS", Verdana, Arial, Helvetica, sans-serif;
color: #4f6b72;
background: #E6EAE9;
padding-left:20px;
padding-top:20px;
padding-bottom:20px;
}
a {
color: #c75f3e;
}
.color_bar {
border: 1px solid;
height:10px;
background: #CAE8EA;
}
.rla-report-table {
width: 700px;
padding: 0;
margin: 0;
padding-bottom:10px;
}
caption {
padding: 0 0 5px 0;
width: 700px;
font: italic 11px "Trebuchet MS", Verdana, Arial, Helvetica, sans-serif;
text-align: right;
}
th {
font: bold 11px "Trebuchet MS", Verdana, Arial, Helvetica, sans-serif;
color: #4f6b72;
border-right: 1px solid #C1DAD7;
border-bottom: 1px solid #C1DAD7;
border-top: 1px solid #C1DAD7;
letter-spacing: 2px;
text-transform: uppercase;
text-align: left;
padding: 6px 6px 6px 12px;
background: #CAE8EA url(images/bg_header.jpg) no-repeat;
}
td {
font: bold 11px "Trebuchet MS", Verdana, Arial, Helvetica, sans-serif;
border-right: 1px solid #C1DAD7;
border-bottom: 1px solid #C1DAD7;
background: #fff;
padding: 6px 6px 6px 12px;
color: #4f6b72;
}
td.alt {
background: #F5FAFA;
color: #797268;
}
', type: 'text/css')
end
@io << '<body>'
@io << tag(:h1, 'Request-log-analyzer summary report')
@io << tag(:p, "Version #{RequestLogAnalyzer::VERSION} - written by Willem van Bergen and Bart ten Brinke")
end
# Generate a footer for a report
def footer
@io << tag(:hr) << tag(:h2, 'Thanks for using request-log-analyzer')
@io << tag(:p, 'For more information please visit the ' + link('Request-log-analyzer website', 'http://github.com/wvanbergen/request-log-analyzer'))
@io << tag(:p, 'If you need an expert who can analyze your application, mail to ' + link('contact@railsdoctors.com', 'mailto:contact@railsdoctors.com') + ' or visit us at ' + link('http://railsdoctors.com', 'http://railsdoctors.com') + '.')
@io << "</body></html>\n"
end
protected
# HTML tag writer helper
# <tt>tag</tt> The tag to generate
# <tt>content</tt> The content inside the tag
# <tt>attributes</tt> Attributes to write in the tag
end
|
yaauie/implements | lib/implements/implementation/registry/finder.rb | Implements.Implementation::Registry::Finder.find | ruby | def find(*args)
@registry.elements(@selectors).each do |config|
next unless config.check?(*args)
return config.implementation
end
fail(Implementation::NotFound,
"no compatible implementation for #{inspect}")
end | Find a suitable implementation of the given interface,
given the args that would be passed to its #initialize
and our selectors
@api private | train | https://github.com/yaauie/implements/blob/27c698d283dbf71d04721b4cf4929d53b4a99cb7/lib/implements/implementation/registry/finder.rb#L27-L35 | class Implementation::Registry::Finder
# @api private
# @param registry [Implementation::Registry]
# @param selectors [Array<#===>] Typically an array of strings
def initialize(registry, selectors)
@registry = registry
@selectors = selectors
end
# Returns an instance of the @registry.interface that supports the given
# arguments.
# @api private
def new(*args, &block)
implementation = find(*args)
implementation.new(*args, &block)
end
# Find a suitable implementation of the given interface,
# given the args that would be passed to its #initialize
# and our selectors
# @api private
# @api private
def inspect
"<#{@registry.interface}::implementation(#{@selectors.join(', ')})>"
end
end
|
projectcypress/health-data-standards | lib/hqmf-parser/2.0/population_criteria.rb | HQMF2.PopulationCriteria.handle_observation_criteria | ruby | def handle_observation_criteria
exp = @entry.at_xpath('./cda:measureObservationDefinition/cda:value/cda:expression/@value',
HQMF2::Document::NAMESPACES)
# Measure Observations criteria rely on computed expressions. If it doesn't have one,
# then it is likely formatted improperly.
fail 'Measure Observations criteria is missing computed expression(s) ' if exp.nil?
parts = exp.to_s.split('-')
dc = parse_parts_to_dc(parts)
@doc.add_data_criteria(dc)
# Update reference_ids with any newly referenced data criteria
dc.children_criteria.each { |cc| @doc.add_reference_id(cc) } unless dc.children_criteria.nil?
dc
end | extracts out any measure observation definitons, creating from them the proper criteria to generate a precondition | train | https://github.com/projectcypress/health-data-standards/blob/252d4f0927c513eacde6b9ea41b76faa1423c34b/lib/hqmf-parser/2.0/population_criteria.rb#L69-L81 | class PopulationCriteria
include HQMF2::Utilities
attr_reader :preconditions, :id, :hqmf_id, :title, :aggregator, :comments
# need to do this to allow for setting the type to OBSERV for
attr_accessor :type
# Create a new population criteria from the supplied HQMF entry
# @param [Nokogiri::XML::Element] the HQMF entry
def initialize(entry, doc, id_generator)
@id_generator = id_generator
@doc = doc
@entry = entry
setup_derived_entry_elements(id_generator)
# modify type to meet current expected population names
@type = 'IPP' if @type == 'IPOP' || @type == 'IPPOP'
@comments = nil if comments.empty?
# MEAN is handled in current code. Changed since it should have the same effect
@aggregator = 'MEAN' if @aggregator == 'AVERAGE'
@hqmf_id = @type unless @hqmf_id # The id extension is not required, if it's not provided use the code
handle_type(id_generator)
end
# Handles how the code should deal with the type definition (aggregate vs non-aggregate)
def handle_type(id_generator)
if @type != 'AGGREGATE'
# Generate the precondition for this population
if @preconditions.length > 1 ||
(@preconditions.length == 1 && @preconditions[0].conjunction != conjunction_code)
@preconditions = [Precondition.new(id_generator.next_id, conjunction_code, @preconditions)]
end
else
# Extract the data criteria this population references
dc = handle_observation_criteria
@preconditions = [Precondition.new(id_generator.next_id, nil, nil, false, HQMF2::Reference.new(dc.id))]
end
end
# Handles extracting elements from the entry
def setup_derived_entry_elements(id_generator)
@hqmf_id = attr_val('./*/cda:id/@root') || attr_val('./*/cda:typeId/@extension')
@title = attr_val('./*/cda:code/cda:displayName/@value').try(:titleize)
@type = attr_val('./*/cda:code/@code')
@comments = @entry.xpath('./*/cda:text/cda:xml/cda:qdmUserComments/cda:item/text()', HQMF2::Document::NAMESPACES)
.map(&:content)
handle_preconditions(id_generator)
obs_test = attr_val('./cda:measureObservationDefinition/@classCode')
# If there are no measure observations, or there is a title, then there are no aggregations to extract
return unless !@title && obs_test.to_s == 'OBS'
@title = attr_val('../cda:code/cda:displayName/@value')
@aggregator = attr_val('./cda:measureObservationDefinition/cda:methodCode/cda:item/@code')
end
# specifically handles extracting the preconditions for the population criteria
def handle_preconditions(id_generator)
# Nest multiple preconditions under a single root precondition
@preconditions = @entry.xpath('./*/cda:precondition[not(@nullFlavor)]', HQMF2::Document::NAMESPACES)
.collect do |pre|
precondition = Precondition.parse(pre, @doc, id_generator)
precondition.reference.nil? && precondition.preconditions.empty? ? nil : precondition
end
# Remove uneeded nils from the array
@preconditions.compact!
end
# extracts out any measure observation definitons, creating from them the proper criteria to generate a precondition
# generates the value given in an expression based on the number of criteria it references.
def parse_parts_to_dc(parts)
case parts.length
when 1
# If there is only one part, it is a reference to an existing data criteria's value
@doc.find_criteria_by_lvn(parts.first.strip.split('.')[0])
when 2
# If there are two parts, there is a computation performed, specifically time difference, on the two criteria
children = parts.collect { |p| @doc.find_criteria_by_lvn(p.strip.split('.')[0]).id }
id = "GROUP_TIMEDIFF_#{@id_generator.next_id}"
HQMF2::DataCriteriaWrapper.new(id: id,
title: id,
subset_operators: [HQMF::SubsetOperator.new('DATETIMEDIFF', nil)],
children_criteria: children,
derivation_operator: HQMF::DataCriteria::XPRODUCT,
type: 'derived',
definition: 'derived',
negation: false,
source_data_criteria: id
)
else
# If there are neither one or 2 parts, the code should fail
fail "No defined extraction method to handle #{parts.length} parts"
end
end
def create_human_readable_id(id)
@id = id
end
# Get the conjunction code, ALL_TRUE or AT_LEAST_ONE_TRUE
# @return [String] conjunction code
def conjunction_code
case @type
when HQMF::PopulationCriteria::IPP, HQMF::PopulationCriteria::DENOM, HQMF::PopulationCriteria::NUMER,
HQMF::PopulationCriteria::MSRPOPL, HQMF::PopulationCriteria::STRAT
HQMF::Precondition::ALL_TRUE
when HQMF::PopulationCriteria::DENEXCEP, HQMF::PopulationCriteria::DENEX, HQMF::PopulationCriteria::MSRPOPLEX,
HQMF::PopulationCriteria::NUMEX
HQMF::Precondition::AT_LEAST_ONE_TRUE
else
fail "Unknown population type [#{@type}]"
end
end
# Generates this classes hqmf-model equivalent
def to_model
mps = preconditions.collect(&:to_model)
HQMF::PopulationCriteria.new(id, hqmf_id, type, mps, title, aggregator, comments)
end
end
|
sugaryourcoffee/syclink | lib/syclink/website.rb | SycLink.Website.link_attribute_list | ruby | def link_attribute_list(attribute, separator = nil)
links.map {|link| link.send(attribute).split(separator)}.flatten.uniq.sort
end | List all attributes of the links | train | https://github.com/sugaryourcoffee/syclink/blob/941ee2045c946daa1e0db394eb643aa82c1254cc/lib/syclink/website.rb#L106-L108 | class Website
include Exporter
include Formatter
# The links of the website
attr_reader :links
# The title of the website
attr_reader :title
# Create a new Website
def initialize(title = "Link List")
@links = []
@title = title
end
# Add a link to the website
def add_link(link)
links << link
end
# Remove a link from the website
def remove_link(link)
links.delete(link)
end
# List links that match the attributes
def list_links(args = {})
if args.empty?
links
else
links.select { |link| link.match? args }
end
end
# Check availability of links. Returns the links' url with code '200' if
# reachable other wise with code 'Error'.
def report_links_availability
links.map { |link| [link.url, link.response] }
end
# Finds all links that contain the search string
def find_links(search)
links.select { |link| link.contains? search }
end
# Merge links based on the provided attribue to one link by combining the
# values. The first link will be updated and the obsolete links are deleted
# and will be returned
def merge_links_on(attribute, concat_string = ',')
links_group_by(attribute)
.select { |key, link_list| links.size > 1 }
.map do |key, link_list|
merge_attributes = Link::ATTRS - [attribute]
link_list.first
.update(Hash[extract_columns(link_list, merge_attributes)
.map { |c| c.uniq.join(concat_string) }
.collect { |v| [merge_attributes.shift, v] }])
link_list.shift
link_list.each { |link| links.delete(link) }
end
end
# Groups the links on the provided attribute. If no links array is provided
# the links from self are used
def links_group_by(attribute, linkz = links)
linkz.map { |link| { key: link.send(attribute), link: link } }
.group_by { |entry| entry[:key] }
.each { |key, link| link.map! { |l| l[:link] }}
end
# Groups the links on the provided attribute. If the attribute's value
# contains the provided separator, the value is split up and each of the
# values is used as group key
def links_group_by_separated(attribute, separator)
links_group_by(attribute, links_duplicate_on(attribute, separator))
end
# Create multiple Links based on the attribute provided. The specified
# spearator will splitt the attribute value in distinct values and for each
# different value a Link will be created
def links_duplicate_on(attribute, separator)
links.map do |link|
link.send(attribute).split(separator).collect do |value|
link.dup.update(Hash[attribute, value])
end
end.flatten
end
# Return an array of all link values as rows
def rows
links.map { |link| link.row }
end
# List all attributes of the links
end
|
senchalabs/jsduck | lib/jsduck/type_parser.rb | JsDuck.TypeParser.type_name | ruby | def type_name
name = @input.scan(/[a-zA-Z0-9_]+(\.[a-zA-Z0-9_]+)*|\*/)
if !name
return false
elsif @relations[name]
@out << @formatter.link(name, nil, name)
elsif @primitives[name]
if @relations[@primitives[name]]
@out << @formatter.link(@primitives[name], nil, name)
else
@out << name
end
elsif @relations.ignore?(name) || name == "*"
@out << name
else
@error = :name
return false
end
# All type names besides * can be followed by .<arguments>
if name != "*" && @input.scan(/\.</)
@out << ".<"
return false unless type_arguments
return false unless @input.scan(/>/)
@out << ">"
end
true
end | <type-name> ::= <type-application> | "*"
<type-application> ::= <ident-chain> [ "." "<" <type-arguments> ">" ]
<type-arguments> ::= <alteration-type> [ "," <alteration-type> ]*
<ident-chain> ::= <ident> [ "." <ident> ]*
<ident> ::= [a-zA-Z0-9_]+ | train | https://github.com/senchalabs/jsduck/blob/febef5558ecd05da25f5c260365acc3afd0cafd8/lib/jsduck/type_parser.rb#L327-L356 | class TypeParser
# Allows to check the type of error that was encountered.
# It will be either of the two:
# - :syntax - type definition syntax is incorrect
# - :name - one of the names of the types is unknown
attr_reader :error
# When parsing was successful, then contains the output HTML - the
# input type-definition with types themselves replaced with links.
attr_reader :out
# Initializes the parser with a Format::Doc instance.
def initialize(formatter)
@relations = formatter.relations
@formatter = formatter
@primitives = {
"boolean" => "Boolean",
"number" => "Number",
"string" => "String",
"null" => "null",
"undefined" => "undefined",
"void" => "void",
}
end
# Parses the type definition
#
# <type> ::= <alteration-type>
#
def parse(str)
@input = StringScanner.new(str)
@error = :syntax
@out = []
# Return immediately if the base type doesn't match
return false unless alteration_type
# Concatenate all output
@out = @out.join
# Success if we have reached the end of input
return @input.eos?
end
private
#
# <alteration-type> ::= <varargs-type> [ ("/" | "|") <varargs-type> ]*
#
def alteration_type
skip_whitespace
# Return immediately if varargs-type doesn't match
return false unless varargs_type
skip_whitespace
# Go through enumeration of types, separated with "/" or "|"
while @input.check(/[\/|]/)
@out << @input.scan(/[\/|]/)
skip_whitespace
return false unless varargs_type
skip_whitespace
end
true
end
#
# <varargs-type> ::= "..." <null-type>
# | "..." "[" <null-type> "]"
# | <null-type> "..."
# | <null-type>
#
def varargs_type
if @input.scan(/\.\.\./)
varargs = true
@out << "..."
if @input.scan(/\[/)
varargs_bracketed = true
@out << "["
end
end
return false unless null_type
if !varargs
@out << "..." if @input.scan(/\.\.\./)
end
if varargs_bracketed
return false unless @input.scan(/\]/)
@out << "]"
end
true
end
#
# <null-type> ::= [ "?" | "!" ] <array-type>
#
# <array-type> ::= <atomic-type> [ "[]" ]*
#
# <atomic-type> ::= <union-type> | <record-type> | <function-type> | <string-literal> | <type-name>
#
def null_type
if nullability = @input.scan(/[?!]/)
@out << nullability
end
if @input.check(/\(/)
return false unless union_type
elsif @input.check(/\{/)
return false unless record_type
elsif @input.check(/function\(/)
return false unless function_type
elsif @input.check(/['"]/)
return false unless string_literal
elsif @input.check(/[\d-]/)
return false unless number_literal
else
return false unless type_name
end
while @input.scan(/\[\]/)
@out << "[]"
end
true
end
#
# <union-type> ::= "(" <alteration-type> ")"
#
def union_type
@out << @input.scan(/\(/)
return false unless alteration_type
return false unless @input.scan(/\)/)
@out << ")"
true
end
#
# <record-type> ::= "{" <rtype-item> [ "," <rtype-item> ]* "}"
#
def record_type
@out << @input.scan(/\{/)
return false unless rtype_item
while @input.scan(/,/)
@out << ","
return false unless rtype_item
end
return false unless @input.scan(/\}/)
@out << "}"
true
end
#
# <rtype-item> ::= <ident> ":" <null-type>
# | <ident>
#
def rtype_item
skip_whitespace
key = @input.scan(/[a-zA-Z0-9_]+/)
return false unless key
@out << key
skip_whitespace
if @input.scan(/:/)
@out << ":"
skip_whitespace
return false unless null_type
skip_whitespace
end
true
end
#
# <function-type> ::= "function(" <function-type-arguments> ")" [ ":" <null-type> ]
#
def function_type
@out << @input.scan(/function\(/)
skip_whitespace
if !@input.check(/\)/)
return false unless function_type_arguments
end
return false unless @input.scan(/\)/)
@out << ")"
skip_whitespace
if @input.scan(/:/)
@out << ":"
skip_whitespace
return false unless null_type
end
true
end
#
# <function-type-arguments> ::= <ftype-first-arg> [ "," <ftype-arg> ]*
#
# <ftype-first-arg> ::= "new" ":" <type-name>
# | "this" ":" <type-name>
# | <ftype-arg>
#
def function_type_arguments
skip_whitespace
# First argument is special
if s = @input.scan(/new\s*:\s*/)
@out << s
return false unless type_name
elsif s = @input.scan(/this\s*:\s*/)
@out << s
return false unless type_name
else
return false unless ftype_arg
end
skip_whitespace
# Go through additional arguments, separated with ","
while @input.check(/,/)
@out << @input.scan(/,/)
return false unless ftype_arg
end
true
end
#
# <ftype-arg> ::= <alteration-type> [ "=" ]
#
def ftype_arg
return false unless alteration_type
# Each argument can be optional (ending with "=")
@out << "=" if @input.scan(/[=]/)
skip_whitespace
true
end
#
# <string-literal> ::= '.*' | ".*"
#
def string_literal
@out << @input.scan(/"([^\\"]|\\.)*?"|'([^\\']|\\.)*?'/)
true
end
#
# <number-literal> ::= [ "-" ] <digit>+ [ "." <digit>+ ]
#
def number_literal
@out << @input.scan(/-?\d+(\.\d+)?/)
true
end
#
# <type-name> ::= <type-application> | "*"
#
# <type-application> ::= <ident-chain> [ "." "<" <type-arguments> ">" ]
#
# <type-arguments> ::= <alteration-type> [ "," <alteration-type> ]*
#
# <ident-chain> ::= <ident> [ "." <ident> ]*
#
# <ident> ::= [a-zA-Z0-9_]+
#
#
# <type-arguments> ::= <alteration-type> [ "," <alteration-type> ]*
#
def type_arguments
skip_whitespace
# First argument is required
return false unless alteration_type
skip_whitespace
# Go through additional arguments, separated with ","
while @input.check(/,/)
@out << @input.scan(/,/)
skip_whitespace
return false unless alteration_type
skip_whitespace
end
true
end
def skip_whitespace
ws = @input.scan(/\s*/)
@out << ws if ws
end
end
|
hashicorp/vagrant | lib/vagrant/bundler.rb | Vagrant.Bundler.generate_plugin_set | ruby | def generate_plugin_set(*args)
plugin_path = args.detect{|i| i.is_a?(Pathname) } || plugin_gem_path
skip = args.detect{|i| i.is_a?(Array) } || []
plugin_set = PluginSet.new
@logger.debug("Generating new plugin set instance. Skip gems - #{skip}")
Dir.glob(plugin_path.join('specifications/*.gemspec').to_s).each do |spec_path|
spec = Gem::Specification.load(spec_path)
desired_spec_path = File.join(spec.gem_dir, "#{spec.name}.gemspec")
# Vendor set requires the spec to be within the gem directory. Some gems will package their
# spec file, and that's not what we want to load.
if !File.exist?(desired_spec_path) || !FileUtils.cmp(spec.spec_file, desired_spec_path)
File.write(desired_spec_path, spec.to_ruby)
end
next if skip.include?(spec.name) || skip.include?(spec.full_name)
plugin_set.add_vendor_gem(spec.name, spec.gem_dir)
end
plugin_set
end | Generate the plugin resolver set. Optionally provide specification names (short or
full) that should be ignored
@param [Pathname] path to plugins
@param [Array<String>] gems to skip
@return [PluginSet] | train | https://github.com/hashicorp/vagrant/blob/c22a145c59790c098f95d50141d9afb48e1ef55f/lib/vagrant/bundler.rb#L463-L480 | class Bundler
# Location of HashiCorp gem repository
HASHICORP_GEMSTORE = "https://gems.hashicorp.com/".freeze
# Default gem repositories
DEFAULT_GEM_SOURCES = [
HASHICORP_GEMSTORE,
"https://rubygems.org/".freeze
].freeze
def self.instance
@bundler ||= self.new
end
# @return [Pathname] Global plugin path
attr_reader :plugin_gem_path
# @return [Pathname] Vagrant environment specific plugin path
attr_reader :env_plugin_gem_path
def initialize
@plugin_gem_path = Vagrant.user_data_path.join("gems", RUBY_VERSION).freeze
@logger = Log4r::Logger.new("vagrant::bundler")
end
# Enable Vagrant environment specific plugins at given data path
#
# @param [Pathname] Path to Vagrant::Environment data directory
# @return [Pathname] Path to environment specific gem directory
def environment_path=(env_data_path)
@env_plugin_gem_path = env_data_path.join("plugins", "gems", RUBY_VERSION).freeze
end
# Initializes Bundler and the various gem paths so that we can begin
# loading gems.
def init!(plugins, repair=false)
if !@initial_specifications
@initial_specifications = Gem::Specification.find_all{true}
else
Gem::Specification.all = @initial_specifications
Gem::Specification.reset
end
# Add HashiCorp RubyGems source
if !Gem.sources.include?(HASHICORP_GEMSTORE)
current_sources = Gem.sources.sources.dup
Gem.sources.clear
Gem.sources << HASHICORP_GEMSTORE
current_sources.each do |src|
Gem.sources << src
end
end
# Generate dependencies for all registered plugins
plugin_deps = plugins.map do |name, info|
Gem::Dependency.new(name, info['installed_gem_version'].to_s.empty? ? '> 0' : info['installed_gem_version'])
end
@logger.debug("Current generated plugin dependency list: #{plugin_deps}")
# Load dependencies into a request set for resolution
request_set = Gem::RequestSet.new(*plugin_deps)
# Never allow dependencies to be remotely satisfied during init
request_set.remote = false
repair_result = nil
begin
# Compose set for resolution
composed_set = generate_vagrant_set
# Resolve the request set to ensure proper activation order
solution = request_set.resolve(composed_set)
rescue Gem::UnsatisfiableDependencyError => failure
if repair
raise failure if @init_retried
@logger.debug("Resolution failed but attempting to repair. Failure: #{failure}")
install(plugins)
@init_retried = true
retry
else
raise
end
end
# Activate the gems
activate_solution(solution)
full_vagrant_spec_list = @initial_specifications +
solution.map(&:full_spec)
if(defined?(::Bundler))
@logger.debug("Updating Bundler with full specification list")
::Bundler.rubygems.replace_entrypoints(full_vagrant_spec_list)
end
Gem.post_reset do
Gem::Specification.all = full_vagrant_spec_list
end
Gem::Specification.reset
nil
end
# Removes any temporary files created by init
def deinit
# no-op
end
# Installs the list of plugins.
#
# @param [Hash] plugins
# @param [Boolean] env_local Environment local plugin install
# @return [Array<Gem::Specification>]
def install(plugins, env_local=false)
internal_install(plugins, nil, env_local: env_local)
end
# Installs a local '*.gem' file so that Bundler can find it.
#
# @param [String] path Path to a local gem file.
# @return [Gem::Specification]
def install_local(path, opts={})
plugin_source = Gem::Source::SpecificFile.new(path)
plugin_info = {
plugin_source.spec.name => {
"gem_version" => plugin_source.spec.version.to_s,
"local_source" => plugin_source,
"sources" => opts.fetch(:sources, [])
}
}
@logger.debug("Installing local plugin - #{plugin_info}")
internal_install(plugin_info, nil, env_local: opts[:env_local])
plugin_source.spec
end
# Update updates the given plugins, or every plugin if none is given.
#
# @param [Hash] plugins
# @param [Array<String>] specific Specific plugin names to update. If
# empty or nil, all plugins will be updated.
def update(plugins, specific, **opts)
specific ||= []
update = opts.merge({gems: specific.empty? ? true : specific})
internal_install(plugins, update)
end
# Clean removes any unused gems.
def clean(plugins, **opts)
@logger.debug("Cleaning Vagrant plugins of stale gems.")
# Generate dependencies for all registered plugins
plugin_deps = plugins.map do |name, info|
gem_version = info['installed_gem_version']
gem_version = info['gem_version'] if gem_version.to_s.empty?
gem_version = "> 0" if gem_version.to_s.empty?
Gem::Dependency.new(name, gem_version)
end
@logger.debug("Current plugin dependency list: #{plugin_deps}")
# Load dependencies into a request set for resolution
request_set = Gem::RequestSet.new(*plugin_deps)
# Never allow dependencies to be remotely satisfied during cleaning
request_set.remote = false
# Sets that we can resolve our dependencies from. Note that we only
# resolve from the current set as all required deps are activated during
# init.
current_set = generate_vagrant_set
# Collect all plugin specifications
plugin_specs = Dir.glob(plugin_gem_path.join('specifications/*.gemspec').to_s).map do |spec_path|
Gem::Specification.load(spec_path)
end
# Include environment specific specification if enabled
if env_plugin_gem_path
plugin_specs += Dir.glob(env_plugin_gem_path.join('specifications/*.gemspec').to_s).map do |spec_path|
Gem::Specification.load(spec_path)
end
end
@logger.debug("Generating current plugin state solution set.")
# Resolve the request set to ensure proper activation order
solution = request_set.resolve(current_set)
solution_specs = solution.map(&:full_spec)
solution_full_names = solution_specs.map(&:full_name)
# Find all specs installed to plugins directory that are not
# found within the solution set.
plugin_specs.delete_if do |spec|
solution_full_names.include?(spec.full_name)
end
if env_plugin_gem_path
# If we are cleaning locally, remove any global specs. If
# not, remove any local specs
if opts[:env_local]
@logger.debug("Removing specifications that are not environment local")
plugin_specs.delete_if do |spec|
spec.full_gem_path.to_s.include?(plugin_gem_path.realpath.to_s)
end
else
@logger.debug("Removing specifications that are environment local")
plugin_specs.delete_if do |spec|
spec.full_gem_path.to_s.include?(env_plugin_gem_path.realpath.to_s)
end
end
end
@logger.debug("Specifications to be removed - #{plugin_specs.map(&:full_name)}")
# Now delete all unused specs
plugin_specs.each do |spec|
@logger.debug("Uninstalling gem - #{spec.full_name}")
Gem::Uninstaller.new(spec.name,
version: spec.version,
install_dir: plugin_gem_path,
all: true,
executables: true,
force: true,
ignore: true,
).uninstall_gem(spec)
end
solution.find_all do |spec|
plugins.keys.include?(spec.name)
end
end
# During the duration of the yielded block, Bundler loud output
# is enabled.
def verbose
if block_given?
initial_state = @verbose
@verbose = true
yield
@verbose = initial_state
else
@verbose = true
end
end
protected
def internal_install(plugins, update, **extra)
update = {} if !update.is_a?(Hash)
skips = []
source_list = {}
system_plugins = plugins.map do |plugin_name, plugin_info|
plugin_name if plugin_info["system"]
end.compact
installer_set = VagrantSet.new(:both)
installer_set.system_plugins = system_plugins
# Generate all required plugin deps
plugin_deps = plugins.map do |name, info|
gem_version = info['gem_version'].to_s.empty? ? '> 0' : info['gem_version']
if update[:gems] == true || (update[:gems].respond_to?(:include?) && update[:gems].include?(name))
if Gem::Requirement.new(gem_version).exact?
gem_version = "> 0"
@logger.debug("Detected exact version match for `#{name}` plugin update. Reset to loose constraint #{gem_version.inspect}.")
end
skips << name
end
source_list[name] ||= []
if plugin_source = info.delete("local_source")
installer_set.add_local(plugin_source.spec.name, plugin_source.spec, plugin_source)
source_list[name] << plugin_source.path
end
Array(info["sources"]).each do |source|
if !source.end_with?("/")
source = source + "/"
end
source_list[name] << source
end
Gem::Dependency.new(name, gem_version)
end
if Vagrant.strict_dependency_enforcement
@logger.debug("Enabling strict dependency enforcement")
plugin_deps += vagrant_internal_specs.map do |spec|
next if system_plugins.include?(spec.name)
Gem::Dependency.new(spec.name, spec.version)
end.compact
else
@logger.debug("Disabling strict dependency enforcement")
end
@logger.debug("Dependency list for installation:\n - " \
"#{plugin_deps.map{|d| "#{d.name} #{d.requirement}"}.join("\n - ")}")
all_sources = source_list.values.flatten.uniq
default_sources = DEFAULT_GEM_SOURCES & all_sources
all_sources -= DEFAULT_GEM_SOURCES
# Only allow defined Gem sources
Gem.sources.clear
@logger.debug("Enabling user defined remote RubyGems sources")
all_sources.each do |src|
begin
next if File.file?(src) || URI.parse(src).scheme.nil?
rescue URI::InvalidURIError
next
end
@logger.debug("Adding RubyGems source #{src}")
Gem.sources << src
end
@logger.debug("Enabling default remote RubyGems sources")
default_sources.each do |src|
@logger.debug("Adding source - #{src}")
Gem.sources << src
end
validate_configured_sources!
source_list.values.each{|srcs| srcs.delete_if{|src| default_sources.include?(src)}}
installer_set.prefer_sources = source_list
@logger.debug("Current source list for install: #{Gem.sources.to_a}")
# Create the request set for the new plugins
request_set = Gem::RequestSet.new(*plugin_deps)
installer_set = Gem::Resolver.compose_sets(
installer_set,
generate_builtin_set(system_plugins),
generate_plugin_set(skips)
)
@logger.debug("Generating solution set for installation.")
# Generate the required solution set for new plugins
solution = request_set.resolve(installer_set)
activate_solution(solution)
# Remove gems which are already installed
request_set.sorted_requests.delete_if do |activation_req|
rs_spec = activation_req.spec
if vagrant_internal_specs.detect{|ispec| ispec.name == rs_spec.name && ispec.version == rs_spec.version }
@logger.debug("Removing activation request from install. Already installed. (#{rs_spec.spec.full_name})")
true
end
end
@logger.debug("Installing required gems.")
# Install all remote gems into plugin path. Set the installer to ignore dependencies
# as we know the dependencies are satisfied and it will attempt to validate a gem's
# dependencies are satisfied by gems in the install directory (which will likely not
# be true)
install_path = extra[:env_local] ? env_plugin_gem_path : plugin_gem_path
result = request_set.install_into(install_path.to_s, true,
ignore_dependencies: true,
prerelease: Vagrant.prerelease?,
wrappers: true
)
result = result.map(&:full_spec)
result.each do |spec|
existing_paths = $LOAD_PATH.find_all{|s| s.include?(spec.full_name) }
if !existing_paths.empty?
@logger.debug("Removing existing LOAD_PATHs for #{spec.full_name} - " +
existing_paths.join(", "))
existing_paths.each{|s| $LOAD_PATH.delete(s) }
end
spec.full_require_paths.each do |r_path|
if !$LOAD_PATH.include?(r_path)
@logger.debug("Adding path to LOAD_PATH - #{r_path}")
$LOAD_PATH.unshift(r_path)
end
end
end
result
end
# Generate the composite resolver set totally all of vagrant (builtin + plugin set)
def generate_vagrant_set
sets = [generate_builtin_set, generate_plugin_set]
if env_plugin_gem_path && env_plugin_gem_path.exist?
sets << generate_plugin_set(env_plugin_gem_path)
end
Gem::Resolver.compose_sets(*sets)
end
# @return [Array<[Gem::Specification, String]>] spec and directory pairs
def vagrant_internal_specs
list = {}
directories = [Gem::Specification.default_specifications_dir]
Gem::Specification.find_all{true}.each do |spec|
list[spec.full_name] = spec
end
if(!defined?(::Bundler))
directories += Gem::Specification.dirs.find_all do |path|
!path.start_with?(Gem.user_dir)
end
end
Gem::Specification.each_spec(directories) do |spec|
if !list[spec.full_name]
list[spec.full_name] = spec
end
end
list.values
end
# Iterates each configured RubyGem source to validate that it is properly
# available. If source is unavailable an exception is raised.
def validate_configured_sources!
Gem.sources.each_source do |src|
begin
src.load_specs(:released)
rescue Gem::Exception => source_error
if ENV["VAGRANT_ALLOW_PLUGIN_SOURCE_ERRORS"]
@logger.warn("Failed to load configured plugin source: #{src}!")
@logger.warn("Error received attempting to load source (#{src}): #{source_error}")
@logger.warn("Ignoring plugin source load failure due user request via env variable")
else
@logger.error("Failed to load configured plugin source `#{src}`: #{source_error}")
raise Vagrant::Errors::PluginSourceError,
source: src.uri.to_s,
error_msg: source_error.message
end
end
end
end
# Generate the builtin resolver set
def generate_builtin_set(system_plugins=[])
builtin_set = BuiltinSet.new
@logger.debug("Generating new builtin set instance.")
vagrant_internal_specs.each do |spec|
if !system_plugins.include?(spec.name)
builtin_set.add_builtin_spec(spec)
end
end
builtin_set
end
# Generate the plugin resolver set. Optionally provide specification names (short or
# full) that should be ignored
#
# @param [Pathname] path to plugins
# @param [Array<String>] gems to skip
# @return [PluginSet]
# Activate a given solution
def activate_solution(solution)
retried = false
begin
@logger.debug("Activating solution set: #{solution.map(&:full_name)}")
solution.each do |activation_request|
unless activation_request.full_spec.activated?
@logger.debug("Activating gem #{activation_request.full_spec.full_name}")
activation_request.full_spec.activate
if(defined?(::Bundler))
@logger.debug("Marking gem #{activation_request.full_spec.full_name} loaded within Bundler.")
::Bundler.rubygems.mark_loaded activation_request.full_spec
end
end
end
rescue Gem::LoadError => e
# Depending on the version of Ruby, the ordering of the solution set
# will be either 0..n (molinillo) or n..0 (pre-molinillo). Instead of
# attempting to determine what's in use, or if it has some how changed
# again, just reverse order on failure and attempt again.
if retried
@logger.error("Failed to load solution set - #{e.class}: #{e}")
matcher = e.message.match(/Could not find '(?<gem_name>[^']+)'/)
if matcher && !matcher["gem_name"].empty?
desired_activation_request = solution.detect do |request|
request.name == matcher["gem_name"]
end
if desired_activation_request && !desired_activation_request.full_spec.activated?
activation_request = desired_activation_request
@logger.warn("Found misordered activation request for #{desired_activation_request.full_name}. Moving to solution HEAD.")
solution.delete(desired_activation_request)
solution.unshift(desired_activation_request)
retry
end
end
raise
else
@logger.debug("Failed to load solution set. Retrying with reverse order.")
retried = true
solution.reverse!
retry
end
end
end
# This is a custom Gem::Resolver::InstallerSet. It will prefer sources which are
# explicitly provided over default sources when matches are found. This is generally
# the entire set used for performing full resolutions on install.
class VagrantSet < Gem::Resolver::InstallerSet
attr_accessor :prefer_sources
attr_accessor :system_plugins
def initialize(domain, defined_sources={})
@prefer_sources = defined_sources
@system_plugins = []
super(domain)
end
# Allow InstallerSet to find matching specs, then filter
# for preferred sources
def find_all(req)
result = super
if system_plugins.include?(req.name)
result.delete_if do |spec|
spec.is_a?(Gem::Resolver::InstalledSpecification)
end
end
subset = result.find_all do |idx_spec|
preferred = false
if prefer_sources[req.name]
if idx_spec.source.respond_to?(:path)
preferred = prefer_sources[req.name].include?(idx_spec.source.path.to_s)
end
if !preferred
preferred = prefer_sources[req.name].include?(idx_spec.source.uri.to_s)
end
end
preferred
end
subset.empty? ? result : subset
end
end
# This is a custom Gem::Resolver::Set for use with vagrant "system" gems. It
# allows the installed set of gems to be used for providing a solution while
# enforcing strict constraints. This ensures that plugins cannot "upgrade"
# gems that are builtin to vagrant itself.
class BuiltinSet < Gem::Resolver::Set
def initialize
super
@remote = false
@specs = []
end
def add_builtin_spec(spec)
@specs.push(spec).uniq!
end
def find_all(req)
@specs.select do |spec|
allow_prerelease = spec.name == "vagrant" && Vagrant.prerelease?
req.match?(spec, allow_prerelease)
end.map do |spec|
Gem::Resolver::InstalledSpecification.new(self, spec)
end
end
end
# This is a custom Gem::Resolver::Set for use with Vagrant plugins. It is
# a modified Gem::Resolver::VendorSet that supports multiple versions of
# a specific gem
class PluginSet < Gem::Resolver::VendorSet
##
# Adds a specification to the set with the given +name+ which has been
# unpacked into the given +directory+.
def add_vendor_gem(name, directory)
gemspec = File.join(directory, "#{name}.gemspec")
spec = Gem::Specification.load(gemspec)
if !spec
raise Gem::GemNotFoundException,
"unable to find #{gemspec} for gem #{name}"
end
spec.full_gem_path = File.expand_path(directory)
spec.base_dir = File.dirname(spec.base_dir)
@specs[spec.name] ||= []
@specs[spec.name] << spec
@directories[spec] = directory
spec
end
##
# Returns an Array of VendorSpecification objects matching the
# DependencyRequest +req+.
def find_all(req)
@specs.values.flatten.select do |spec|
req.match?(spec)
end.map do |spec|
source = Gem::Source::Vendor.new(@directories[spec])
Gem::Resolver::VendorSpecification.new(self, spec, source)
end
end
##
# Loads a spec with the given +name+. +version+, +platform+ and +source+ are
# ignored.
def load_spec (name, version, platform, source)
version = Gem::Version.new(version) if !version.is_a?(Gem::Version)
@specs.fetch(name, []).detect{|s| s.name == name && s.version == version}
end
end
end
|
agios/simple_form-dojo | lib/simple_form-dojo/form_builder.rb | SimpleFormDojo.FormBuilder.button_default_value | ruby | def button_default_value
obj = object.respond_to?(:to_model) ? object.to_model : object
key = obj ? (obj.persisted? ? :edit : :new) : :submit
model = if obj.class.respond_to?(:model_name)
obj.class.model_name.human
else
object_name.to_s.humanize
end
defaults = []
defaults << "helpers.submit.#{object_name}.#{key}"
defaults << "#{key.to_s.humanize} #{model}"
I18n.t(defaults.shift, :default => defaults)
end | Basically the same as rails submit_default_value | train | https://github.com/agios/simple_form-dojo/blob/c4b134f56f4cb68cba81d583038965360c70fba4/lib/simple_form-dojo/form_builder.rb#L62-L76 | class FormBuilder < SimpleForm::FormBuilder
include SimpleFormDojo::Inputs
# need to include this in order to
# get the html_escape method
include ERB::Util
attr_accessor :dojo_props
map_type :currency, :to => SimpleFormDojo::Inputs::CurrencyInput
map_type :date, :time, :datetime, :to => SimpleFormDojo::Inputs::DateTimeInput
map_type :select, :radio_buttons, :check_boxes, :to => SimpleFormDojo::Inputs::CollectionInput
map_type :integer, :decimal, :float, :to => SimpleFormDojo::Inputs::NumericInput
#map_type :password, :text, :text_simple, :file, :to => SimpleFormDojo::Inputs::MappingInput
map_type :string, :email, :search, :tel, :url, :to => SimpleFormDojo::Inputs::StringInput
map_type :text, :text_simple, :to => SimpleFormDojo::Inputs::TextInput
map_type :password, :to => SimpleFormDojo::Inputs::PasswordInput
map_type :boolean, :to => SimpleFormDojo::Inputs::BooleanInput
# Simple override of initializer in order to add in the dojo_props attribute
def initialize(object_name, object, template, options, proc)
@dojo_props = nil
super(object_name, object, template, options, proc)
end
# Creates a button
#
# overrides simple_form's button method
#
# dojo_form_for @user do |f|
# f.button :submit, :value => 'Save Me'
# end
#
# To use dojox/form/BusyButton, pass :busy => true
# dojo_form_for @uswer do |f|
# f.button :submit, :busy => true, :value => 'Save Me'
# end
#
# If :value doesn't exist, tries to determine the
# the value based on the current object
def button(type, *args, &block)
# set options to value if first arg is a Hash
options = args.extract_options!
button_type = 'dijit/form/Button'
button_type = 'dojox/form/BusyButton' if options[:busy]
options.reverse_merge!(:'data-dojo-type' => button_type)
content = ''
if value = options.delete(:value)
content = value.html_safe
else
content = button_default_value
end
options.reverse_merge!({ :type => type, :value => content })
dojo_props = {}
dojo_props.merge!(options[:dojo_html]) if options.include?(:dojo_html)
options[:'data-dojo-props'] = SimpleFormDojo::FormBuilder.encode_as_dojo_props(dojo_props)
options[:class] = "button #{options[:class]}".strip
template.content_tag(:button, content, *(args << options), &block)
end
# Basically the same as rails submit_default_value
def dojo_collection_radio_buttons(attribute, collection, value_method,
text_method, options={}, html_options={})
rendered_collection = render_collection(
collection, value_method, text_method, options, html_options
) do |item, value, text, default_html_options|
local_dojo_props = @dojo_props.dup
## Checked?
#if values_are_equal?(local_dojo_props[:value], value)
#local_dojo_props[:checked] = "checked"
#default_html_options[:checked] = "checked"
#end
default_html_options[:'data-dojo-props'] = SimpleFormDojo::FormBuilder.encode_as_dojo_props(local_dojo_props) if !local_dojo_props.nil?
# append the object id to the html id
default_html_options["id"] = "#{html_options["id"]}_#{value.to_s.gsub(/\s/, "_").gsub(/[^-\w]/, "").downcase}" if html_options["id"].present?
builder = instantiate_collection_builder(SimpleForm::ActionViewExtensions::RadioButtonBuilder, attribute, item, value, text, default_html_options)
if block_given?
yield builder
else
builder.radio_button + builder.label(:class => "collection_radio_buttons")
end
end
wrap_rendered_collection(rendered_collection, options)
end
def dojo_collection_check_boxes(attribute, collection, value_method,
text_method, options={}, html_options={})
rendered_collection = render_collection(
collection, value_method, text_method, options, html_options
) do |item, value, text, default_html_options|
local_dojo_props = @dojo_props.dup
## Checked
#if values_are_equal?(local_dojo_props[:value], value)
#local_dojo_props[:checked] = "checked"
#default_html_options[:checked] = "checked"
#end
default_html_options[:multiple] = true
default_html_options[:'data-dojo-props'] = SimpleFormDojo::FormBuilder.encode_as_dojo_props(local_dojo_props)
# append the object id to the html id
default_html_options["id"] = "#{html_options["id"]}_#{value.to_s.gsub(/\s/, "_").gsub(/[^-\w]/, "").downcase}" if html_options["id"].present?
builder = instantiate_collection_builder(SimpleForm::ActionViewExtensions::CheckBoxBuilder, attribute, item, value, text, default_html_options)
if block_given?
yield builder
else
builder.check_box + builder.label(:class => "collection_check_boxes")
end
end
wrap_rendered_collection(rendered_collection, options)
end
def association(association, options={}, &block)
options = options.dup
reflection = find_association_reflection(association)
raise "Association #{association.inspect} not found" unless reflection
if reflection.macro == :belongs_to
options[:input_html] ||= {}
options[:dojo_html] ||= {}
attribute = (reflection.respond_to?(:options) && reflection.options[:foreign_key]) || :"#{reflection.name}_id"
options[:input_html][:value] ||= object.send(attribute).as_json
#Prevent collections from being loaded if using a store
options[:collection] = [] if options[:dojo_html][:store]
end
super(association, options, &block)
end
##
# The dojo props string is evaluated as javascript,
# can therefore contain any valid javascript object
# and cannot be encoded as JSON
def self.encode_as_dojo_props(options)
encode_obj(options)
.slice(1..-2)
.html_safe
end
def self.encode_obj(obj)
case obj
when Hash
"{#{obj.collect{|k, v| "#{k}:#{encode_obj(v)}"}.join(', ')}}"
when Array
"[#{obj.collect{|v| encode_obj(v)}.join(', ')}]"
else
obj.to_s
end
end
private
def values_are_equal?(obj_value, item_value)
value = obj_value
if value.is_a?(String)
values = obj_value[/\[([,0-9\s]+)\]/,1]
unless values.nil?
return values.tr(' ','').split(',').include?(item_value.to_s)
end
end
(value.to_s == item_value.to_s ? true : false)
end
end
|
rmagick/rmagick | lib/rmagick_internal.rb | Magick.Draw.opacity | ruby | def opacity(opacity)
if opacity.is_a?(Numeric)
Kernel.raise ArgumentError, 'opacity must be >= 0 and <= 1.0' if opacity < 0 || opacity > 1.0
end
primitive "opacity #{opacity}"
end | Specify drawing fill and stroke opacities. If the value is a string
ending with a %, the number will be multiplied by 0.01. | train | https://github.com/rmagick/rmagick/blob/ef6688ed9d76bf123c2ea1a483eff8635051adb7/lib/rmagick_internal.rb#L418-L423 | class Draw
# Thse hashes are used to map Magick constant
# values to the strings used in the primitives.
ALIGN_TYPE_NAMES = {
LeftAlign.to_i => 'left',
RightAlign.to_i => 'right',
CenterAlign.to_i => 'center'
}.freeze
ANCHOR_TYPE_NAMES = {
StartAnchor.to_i => 'start',
MiddleAnchor.to_i => 'middle',
EndAnchor.to_i => 'end'
}.freeze
DECORATION_TYPE_NAMES = {
NoDecoration.to_i => 'none',
UnderlineDecoration.to_i => 'underline',
OverlineDecoration.to_i => 'overline',
LineThroughDecoration.to_i => 'line-through'
}.freeze
FONT_WEIGHT_NAMES = {
AnyWeight.to_i => 'all',
NormalWeight.to_i => 'normal',
BoldWeight.to_i => 'bold',
BolderWeight.to_i => 'bolder',
LighterWeight.to_i => 'lighter'
}.freeze
GRAVITY_NAMES = {
NorthWestGravity.to_i => 'northwest',
NorthGravity.to_i => 'north',
NorthEastGravity.to_i => 'northeast',
WestGravity.to_i => 'west',
CenterGravity.to_i => 'center',
EastGravity.to_i => 'east',
SouthWestGravity.to_i => 'southwest',
SouthGravity.to_i => 'south',
SouthEastGravity.to_i => 'southeast'
}.freeze
PAINT_METHOD_NAMES = {
PointMethod.to_i => 'point',
ReplaceMethod.to_i => 'replace',
FloodfillMethod.to_i => 'floodfill',
FillToBorderMethod.to_i => 'filltoborder',
ResetMethod.to_i => 'reset'
}.freeze
STRETCH_TYPE_NAMES = {
NormalStretch.to_i => 'normal',
UltraCondensedStretch.to_i => 'ultra-condensed',
ExtraCondensedStretch.to_i => 'extra-condensed',
CondensedStretch.to_i => 'condensed',
SemiCondensedStretch.to_i => 'semi-condensed',
SemiExpandedStretch.to_i => 'semi-expanded',
ExpandedStretch.to_i => 'expanded',
ExtraExpandedStretch.to_i => 'extra-expanded',
UltraExpandedStretch.to_i => 'ultra-expanded',
AnyStretch.to_i => 'all'
}.freeze
STYLE_TYPE_NAMES = {
NormalStyle.to_i => 'normal',
ItalicStyle.to_i => 'italic',
ObliqueStyle.to_i => 'oblique',
AnyStyle.to_i => 'all'
}.freeze
private
def enquote(str)
if str.length > 2 && /\A(?:\"[^\"]+\"|\'[^\']+\'|\{[^\}]+\})\z/.match(str)
str
else
'"' + str + '"'
end
end
public
# Apply coordinate transformations to support scaling (s), rotation (r),
# and translation (t). Angles are specified in radians.
def affine(sx, rx, ry, sy, tx, ty)
primitive 'affine ' + format('%g,%g,%g,%g,%g,%g', sx, rx, ry, sy, tx, ty)
end
# Draw an arc.
def arc(start_x, start_y, end_x, end_y, start_degrees, end_degrees)
primitive 'arc ' + format('%g,%g %g,%g %g,%g',
start_x, start_y, end_x, end_y, start_degrees, end_degrees)
end
# Draw a bezier curve.
def bezier(*points)
if points.length.zero?
Kernel.raise ArgumentError, 'no points specified'
elsif points.length.odd?
Kernel.raise ArgumentError, 'odd number of arguments specified'
end
primitive 'bezier ' + points.join(',')
end
# Draw a circle
def circle(origin_x, origin_y, perim_x, perim_y)
primitive 'circle ' + format('%g,%g %g,%g', origin_x, origin_y, perim_x, perim_y)
end
# Invoke a clip-path defined by def_clip_path.
def clip_path(name)
primitive "clip-path #{name}"
end
# Define the clipping rule.
def clip_rule(rule)
Kernel.raise ArgumentError, "Unknown clipping rule #{rule}" unless %w[evenodd nonzero].include?(rule.downcase)
primitive "clip-rule #{rule}"
end
# Define the clip units
def clip_units(unit)
Kernel.raise ArgumentError, "Unknown clip unit #{unit}" unless %w[userspace userspaceonuse objectboundingbox].include?(unit.downcase)
primitive "clip-units #{unit}"
end
# Set color in image according to specified colorization rule. Rule is one of
# point, replace, floodfill, filltoborder,reset
def color(x, y, method)
Kernel.raise ArgumentError, "Unknown PaintMethod: #{method}" unless PAINT_METHOD_NAMES.key?(method.to_i)
primitive "color #{x},#{y},#{PAINT_METHOD_NAMES[method.to_i]}"
end
# Specify EITHER the text decoration (none, underline, overline,
# line-through) OR the text solid background color (any color name or spec)
def decorate(decoration)
if DECORATION_TYPE_NAMES.key?(decoration.to_i)
primitive "decorate #{DECORATION_TYPE_NAMES[decoration.to_i]}"
else
primitive "decorate #{enquote(decoration)}"
end
end
# Define a clip-path. A clip-path is a sequence of primitives
# bracketed by the "push clip-path <name>" and "pop clip-path"
# primitives. Upon advice from the IM guys, we also bracket
# the clip-path primitives with "push(pop) defs" and "push
# (pop) graphic-context".
def define_clip_path(name)
push('defs')
push("clip-path \"#{name}\"")
push('graphic-context')
yield
ensure
pop('graphic-context')
pop('clip-path')
pop('defs')
end
# Draw an ellipse
def ellipse(origin_x, origin_y, width, height, arc_start, arc_end)
primitive 'ellipse ' + format('%g,%g %g,%g %g,%g',
origin_x, origin_y, width, height, arc_start, arc_end)
end
# Let anything through, but the only defined argument
# is "UTF-8". All others are apparently ignored.
def encoding(encoding)
primitive "encoding #{encoding}"
end
# Specify object fill, a color name or pattern name
def fill(colorspec)
primitive "fill #{enquote(colorspec)}"
end
alias fill_color fill
alias fill_pattern fill
# Specify fill opacity (use "xx%" to indicate percentage)
def fill_opacity(opacity)
primitive "fill-opacity #{opacity}"
end
def fill_rule(rule)
Kernel.raise ArgumentError, "Unknown fill rule #{rule}" unless %w[evenodd nonzero].include?(rule.downcase)
primitive "fill-rule #{rule}"
end
# Specify text drawing font
def font(name)
primitive "font \'#{name}\'"
end
def font_family(name)
primitive "font-family \'#{name}\'"
end
def font_stretch(stretch)
Kernel.raise ArgumentError, 'Unknown stretch type' unless STRETCH_TYPE_NAMES.key?(stretch.to_i)
primitive "font-stretch #{STRETCH_TYPE_NAMES[stretch.to_i]}"
end
def font_style(style)
Kernel.raise ArgumentError, 'Unknown style type' unless STYLE_TYPE_NAMES.key?(style.to_i)
primitive "font-style #{STYLE_TYPE_NAMES[style.to_i]}"
end
# The font weight argument can be either a font weight
# constant or [100,200,...,900]
def font_weight(weight)
if FONT_WEIGHT_NAMES.key?(weight.to_i)
primitive "font-weight #{FONT_WEIGHT_NAMES[weight.to_i]}"
else
primitive "font-weight #{weight}"
end
end
# Specify the text positioning gravity, one of:
# NorthWest, North, NorthEast, West, Center, East, SouthWest, South, SouthEast
def gravity(grav)
Kernel.raise ArgumentError, 'Unknown text positioning gravity' unless GRAVITY_NAMES.key?(grav.to_i)
primitive "gravity #{GRAVITY_NAMES[grav.to_i]}"
end
# IM 6.5.5-8 and later
def interline_spacing(space)
begin
Float(space)
rescue ArgumentError
Kernel.raise ArgumentError, 'invalid value for interline_spacing'
rescue TypeError
Kernel.raise TypeError, "can't convert #{space.class} into Float"
end
primitive "interline-spacing #{space}"
end
# IM 6.4.8-3 and later
def interword_spacing(space)
begin
Float(space)
rescue ArgumentError
Kernel.raise ArgumentError, 'invalid value for interword_spacing'
rescue TypeError
Kernel.raise TypeError, "can't convert #{space.class} into Float"
end
primitive "interword-spacing #{space}"
end
# IM 6.4.8-3 and later
def kerning(space)
begin
Float(space)
rescue ArgumentError
Kernel.raise ArgumentError, 'invalid value for kerning'
rescue TypeError
Kernel.raise TypeError, "can't convert #{space.class} into Float"
end
primitive "kerning #{space}"
end
# Draw a line
def line(start_x, start_y, end_x, end_y)
primitive 'line ' + format('%g,%g %g,%g', start_x, start_y, end_x, end_y)
end
# Set matte (make transparent) in image according to the specified
# colorization rule
def matte(x, y, method)
Kernel.raise ArgumentError, 'Unknown paint method' unless PAINT_METHOD_NAMES.key?(method.to_i)
primitive "matte #{x},#{y} #{PAINT_METHOD_NAMES[method.to_i]}"
end
# Specify drawing fill and stroke opacities. If the value is a string
# ending with a %, the number will be multiplied by 0.01.
# Draw using SVG-compatible path drawing commands. Note that the
# primitive requires that the commands be surrounded by quotes or
# apostrophes. Here we simply use apostrophes.
def path(cmds)
primitive "path '" + cmds + "'"
end
# Define a pattern. In the block, call primitive methods to
# draw the pattern. Reference the pattern by using its name
# as the argument to the 'fill' or 'stroke' methods
def pattern(name, x, y, width, height)
push('defs')
push("pattern #{name} #{x} #{y} #{width} #{height}")
push('graphic-context')
yield
ensure
pop('graphic-context')
pop('pattern')
pop('defs')
end
# Set point to fill color.
def point(x, y)
primitive "point #{x},#{y}"
end
# Specify the font size in points. Yes, the primitive is "font-size" but
# in other places this value is called the "pointsize". Give it both names.
def pointsize(points)
primitive "font-size #{points}"
end
alias font_size pointsize
# Draw a polygon
def polygon(*points)
if points.length.zero?
Kernel.raise ArgumentError, 'no points specified'
elsif points.length.odd?
Kernel.raise ArgumentError, 'odd number of points specified'
end
primitive 'polygon ' + points.join(',')
end
# Draw a polyline
def polyline(*points)
if points.length.zero?
Kernel.raise ArgumentError, 'no points specified'
elsif points.length.odd?
Kernel.raise ArgumentError, 'odd number of points specified'
end
primitive 'polyline ' + points.join(',')
end
# Return to the previously-saved set of whatever
# pop('graphic-context') (the default if no arguments)
# pop('defs')
# pop('gradient')
# pop('pattern')
def pop(*what)
if what.length.zero?
primitive 'pop graphic-context'
else
# to_s allows a Symbol to be used instead of a String
primitive 'pop ' + what.map(&:to_s).join(' ')
end
end
# Push the current set of drawing options. Also you can use
# push('graphic-context') (the default if no arguments)
# push('defs')
# push('gradient')
# push('pattern')
def push(*what)
if what.length.zero?
primitive 'push graphic-context'
else
# to_s allows a Symbol to be used instead of a String
primitive 'push ' + what.map(&:to_s).join(' ')
end
end
# Draw a rectangle
def rectangle(upper_left_x, upper_left_y, lower_right_x, lower_right_y)
primitive 'rectangle ' + format('%g,%g %g,%g',
upper_left_x, upper_left_y, lower_right_x, lower_right_y)
end
# Specify coordinate space rotation. "angle" is measured in degrees
def rotate(angle)
primitive "rotate #{angle}"
end
# Draw a rectangle with rounded corners
def roundrectangle(center_x, center_y, width, height, corner_width, corner_height)
primitive 'roundrectangle ' + format('%g,%g,%g,%g,%g,%g',
center_x, center_y, width, height, corner_width, corner_height)
end
# Specify scaling to be applied to coordinate space on subsequent drawing commands.
def scale(x, y)
primitive "scale #{x},#{y}"
end
def skewx(angle)
primitive "skewX #{angle}"
end
def skewy(angle)
primitive "skewY #{angle}"
end
# Specify the object stroke, a color name or pattern name.
def stroke(colorspec)
primitive "stroke #{enquote(colorspec)}"
end
alias stroke_color stroke
alias stroke_pattern stroke
# Specify if stroke should be antialiased or not
def stroke_antialias(bool)
bool = bool ? '1' : '0'
primitive "stroke-antialias #{bool}"
end
# Specify a stroke dash pattern
def stroke_dasharray(*list)
if list.length.zero?
primitive 'stroke-dasharray none'
else
list.each do |x|
Kernel.raise ArgumentError, "dash array elements must be > 0 (#{x} given)" if x <= 0
end
primitive "stroke-dasharray #{list.join(',')}"
end
end
# Specify the initial offset in the dash pattern
def stroke_dashoffset(value = 0)
primitive "stroke-dashoffset #{value}"
end
def stroke_linecap(value)
Kernel.raise ArgumentError, "Unknown linecap type: #{value}" unless %w[butt round square].include?(value.downcase)
primitive "stroke-linecap #{value}"
end
def stroke_linejoin(value)
Kernel.raise ArgumentError, "Unknown linejoin type: #{value}" unless %w[round miter bevel].include?(value.downcase)
primitive "stroke-linejoin #{value}"
end
def stroke_miterlimit(value)
Kernel.raise ArgumentError, 'miterlimit must be >= 1' if value < 1
primitive "stroke-miterlimit #{value}"
end
# Specify opacity of stroke drawing color
# (use "xx%" to indicate percentage)
def stroke_opacity(value)
primitive "stroke-opacity #{value}"
end
# Specify stroke (outline) width in pixels.
def stroke_width(pixels)
primitive "stroke-width #{pixels}"
end
# Draw text at position x,y. Add quotes to text that is not already quoted.
def text(x, y, text)
Kernel.raise ArgumentError, 'missing text argument' if text.to_s.empty?
if text.length > 2 && /\A(?:\"[^\"]+\"|\'[^\']+\'|\{[^\}]+\})\z/.match(text)
# text already quoted
elsif !text['\'']
text = '\'' + text + '\''
elsif !text['"']
text = '"' + text + '"'
elsif !(text['{'] || text['}'])
text = '{' + text + '}'
else
# escape existing braces, surround with braces
text = '{' + text.gsub(/[}]/) { |b| '\\' + b } + '}'
end
primitive "text #{x},#{y} #{text}"
end
# Specify text alignment relative to a given point
def text_align(alignment)
Kernel.raise ArgumentError, "Unknown alignment constant: #{alignment}" unless ALIGN_TYPE_NAMES.key?(alignment.to_i)
primitive "text-align #{ALIGN_TYPE_NAMES[alignment.to_i]}"
end
# SVG-compatible version of text_align
def text_anchor(anchor)
Kernel.raise ArgumentError, "Unknown anchor constant: #{anchor}" unless ANCHOR_TYPE_NAMES.key?(anchor.to_i)
primitive "text-anchor #{ANCHOR_TYPE_NAMES[anchor.to_i]}"
end
# Specify if rendered text is to be antialiased.
def text_antialias(boolean)
boolean = boolean ? '1' : '0'
primitive "text-antialias #{boolean}"
end
# Specify color underneath text
def text_undercolor(color)
primitive "text-undercolor #{enquote(color)}"
end
# Specify center of coordinate space to use for subsequent drawing
# commands.
def translate(x, y)
primitive "translate #{x},#{y}"
end
end # class Magick::Draw
|
mailgun/mailgun-ruby | lib/mailgun/client.rb | Mailgun.Client.post | ruby | def post(resource_path, data, headers = {})
response = @http_client[resource_path].post(data, headers)
Response.new(response)
rescue => err
raise communication_error err
end | Generic Mailgun POST Handler
@param [String] resource_path This is the API resource you wish to interact
with. Be sure to include your domain, where necessary.
@param [Hash] data This should be a standard Hash
containing required parameters for the requested resource.
@param [Hash] headers Additional headers to pass to the resource.
@return [Mailgun::Response] A Mailgun::Response object. | train | https://github.com/mailgun/mailgun-ruby/blob/265efffd51209b0170a3225bbe945b649643465a/lib/mailgun/client.rb#L103-L108 | class Client
def initialize(api_key = Mailgun.api_key,
api_host = 'api.mailgun.net',
api_version = 'v3',
ssl = true,
test_mode = false,
timeout = nil)
endpoint = endpoint_generator(api_host, api_version, ssl)
@http_client = RestClient::Resource.new(endpoint,
user: 'api',
password: api_key,
user_agent: "mailgun-sdk-ruby/#{Mailgun::VERSION}",
timeout: timeout)
@test_mode = test_mode
end
# Enable test mode
#
# Prevents sending of any messages.
def enable_test_mode!
@test_mode = true
end
# Disable test mode
#
# Reverts the test_mode flag and allows the client to send messages.
def disable_test_mode!
@test_mode = false
end
# Client is in test mode?
#
# @return [Boolean] Is the client set in test mode?
def test_mode?
@test_mode
end
# Provides a store of all the emails sent in test mode so you can check them.
#
# @return [Hash]
def self.deliveries
@@deliveries ||= []
end
# Simple Message Sending
#
# @param [String] working_domain This is the domain you wish to send from.
# @param [Hash] data This should be a standard Hash
# containing required parameters for the requested resource.
# @return [Mailgun::Response] A Mailgun::Response object.
def send_message(working_domain, data)
if test_mode? then
Mailgun::Client.deliveries << data
return Response.from_hash(
{
:body => '{"id": "test-mode-mail@localhost", "message": "Queued. Thank you."}',
:code => 200,
}
)
end
case data
when Hash
# Remove nil values from the data hash
# Submitting nils to the API will likely cause an error.
# See also: https://github.com/mailgun/mailgun-ruby/issues/32
data = data.select { |k, v| v != nil }
if data.key?(:message)
if data[:message].is_a?(String)
data[:message] = convert_string_to_file(data[:message])
end
return post("#{working_domain}/messages.mime", data)
end
post("#{working_domain}/messages", data)
when MessageBuilder
post("#{working_domain}/messages", data.message)
else
fail ParameterError.new('Unknown data type for data parameter.', data)
end
end
# Generic Mailgun POST Handler
#
# @param [String] resource_path This is the API resource you wish to interact
# with. Be sure to include your domain, where necessary.
# @param [Hash] data This should be a standard Hash
# containing required parameters for the requested resource.
# @param [Hash] headers Additional headers to pass to the resource.
# @return [Mailgun::Response] A Mailgun::Response object.
# Generic Mailgun GET Handler
#
# @param [String] resource_path This is the API resource you wish to interact
# with. Be sure to include your domain, where necessary.
# @param [Hash] params This should be a standard Hash
# containing required parameters for the requested resource.
# @param [String] accept Acceptable Content-Type of the response body.
# @return [Mailgun::Response] A Mailgun::Response object.
def get(resource_path, params = nil, accept = '*/*')
if params
response = @http_client[resource_path].get(params: params, accept: accept)
else
response = @http_client[resource_path].get(accept: accept)
end
Response.new(response)
rescue => err
raise communication_error err
end
# Generic Mailgun PUT Handler
#
# @param [String] resource_path This is the API resource you wish to interact
# with. Be sure to include your domain, where necessary.
# @param [Hash] data This should be a standard Hash
# containing required parameters for the requested resource.
# @return [Mailgun::Response] A Mailgun::Response object.
def put(resource_path, data)
response = @http_client[resource_path].put(data)
Response.new(response)
rescue => err
raise communication_error err
end
# Generic Mailgun DELETE Handler
#
# @param [String] resource_path This is the API resource you wish to interact
# with. Be sure to include your domain, where necessary.
# @return [Mailgun::Response] A Mailgun::Response object.
def delete(resource_path)
response = @http_client[resource_path].delete
Response.new(response)
rescue => err
raise communication_error err
end
# Constructs a Suppressions client for the given domain.
#
# @param [String] domain Domain which suppressions requests will be made for
# @return [Mailgun::Suppressions]
def suppressions(domain)
Suppressions.new(self, domain)
end
private
# Converts MIME string to file for easy uploading to API
#
# @param [String] string MIME string to post to API
# @return [File] File object
def convert_string_to_file(string)
file = Tempfile.new('MG_TMP_MIME')
file.write(string)
file.rewind
file
end
# Generates the endpoint URL to for the API. Allows overriding
# API endpoint, API versions, and toggling SSL.
#
# @param [String] api_host URL endpoint the library will hit
# @param [String] api_version The version of the API to hit
# @param [Boolean] ssl True, SSL. False, No SSL.
# @return [string] concatenated URL string
def endpoint_generator(api_host, api_version, ssl)
ssl ? scheme = 'https' : scheme = 'http'
if api_version
"#{scheme}://#{api_host}/#{api_version}"
else
"#{scheme}://#{api_host}"
end
end
# Raises CommunicationError and stores response in it if present
#
# @param [StandardException] e upstream exception object
def communication_error(e)
return CommunicationError.new(e.message, e.response) if e.respond_to? :response
CommunicationError.new(e.message)
end
end
|
dmitrizagidulin/riagent | lib/riagent/persistence.rb | Riagent.Persistence.save! | ruby | def save!(options={:validate => true})
unless save(options)
raise Riagent::InvalidDocumentError.new(self)
end
true
end | Attempts to validate and save the document just like +save+ but will raise a +Riagent::InvalidDocumentError+
exception instead of returning +false+ if the doc is not valid. | train | https://github.com/dmitrizagidulin/riagent/blob/074bbb9c354abc1ba2037d704b0706caa3f34f37/lib/riagent/persistence.rb#L73-L78 | module Persistence
extend ActiveSupport::Concern
COLLECTION_TYPES = [:riak_kv]
# Key Listing strategies for +:riak_kv+ collections
VALID_KEY_LISTS = [:streaming_list_keys, :riak_dt_set]
included do
extend ActiveModel::Callbacks
define_model_callbacks :create, :update, :save, :destroy
end
# Delete the document from its collection
def destroy
return nil if self.new_record?
run_callbacks(:destroy) do
self.class.persistence.remove(self)
@destroyed = true
end
end
# Performs validations and saves the document
# The validation process can be skipped by passing <tt>validate: false</tt>.
# Also triggers :before_create / :after_create type callbacks
# @return [String] Returns the key for the inserted document
def save(options={:validate => true})
context = self.new_record? ? :create : :update
return false if options[:validate] && !valid?(context)
run_callbacks(context) do
if context == :create
key = self.class.persistence.insert(self)
else
key = self.class.persistence.update(self)
end
self.persist!
key
end
end
# Attempts to validate and save the document just like +save+ but will raise a +Riagent::InvalidDocumentError+
# exception instead of returning +false+ if the doc is not valid.
# Update an object's attributes and save it
def update(attrs)
run_callbacks(:update) do
self.attributes = attrs
self.save
end
end
# Perform an update(), raise an error if the doc is not valid
def update!(attrs)
unless update(attrs)
raise Riagent::InvalidDocumentError.new(self)
end
true
end
# Update attributes (alias for update() for Rails versions < 4)
def update_attributes(attrs)
self.update(attrs)
end
module ClassMethods
# Return all the documents in the collection
# @param [Integer] results_limit Number of results returned
# @return [Array|nil] of ActiveDocument instances
def all(results_limit=1000)
self.persistence.all(results_limit)
end
# Set the document's persistence strategy
# Usage:
# <code>
# class SomeModel
# include Riagent::ActiveDocument
# collection_type :riak_kv, # Persist to a Riak::Bucket
# list_keys_using: :riak_dt_set #keep track of keys in a Set CRDT data type
# end
# </code>
def collection_type(coll_type, options={})
unless COLLECTION_TYPES.include? coll_type
raise ArgumentError, "Invalid collection type: #{coll_type.to_s}"
end
@collection_type = coll_type
case @collection_type
when :riak_kv
self.persistence = Riagent::Persistence::RiakKVStrategy.new(self)
if options.has_key? :list_keys_using
if options[:list_keys_using] == :streaming_list_keys
self.persistence = Riagent::Persistence::RiakNoIndexStrategy.new(self)
elsif options[:list_keys_using] == :riak_dt_set
self.persistence = Riagent::Persistence::RiakDTSetStrategy.new(self)
end
end
end
end
# Load a document by key.
def find(key)
return nil if key.nil? or key.empty?
self.persistence.find(key)
end
# Return the first document that matches the query
def find_one(query)
unless self.persistence.allows_query?
raise NotImplementedError, "This collection type does not support querying"
end
self.persistence.find_one(query)
end
def get_collection_type
@collection_type ||= nil
end
def persistence
@persistence ||= nil
end
def persistence=(persistence_strategy)
@persistence = persistence_strategy
end
# Return all documents that match the query
def where(query)
unless self.persistence.allows_query?
raise NotImplementedError, "This collection type does not support querying"
end
self.persistence.where(query)
end
end
end
|
davidbarral/sugarfree-config | lib/sugarfree-config/config.rb | SugarfreeConfig.ConfigIterator.next | ruby | def next
if (value = @scoped_config[@path_elements.last]).nil?
raise ConfigKeyException.new(@path_elements)
elsif value.is_a?(Hash)
@scoped_config = value
self
else
value
end
end | Iterate to the next element in the path
Algorithm:
1. Get the last element of the key path
2. Try to find it in the scoped config
3. If not present raise an error
4. If present and is a hash we are not in a config leaf, so the scoped
config is reset to this new value and self is returned
5. If present and is a value then return the value | train | https://github.com/davidbarral/sugarfree-config/blob/76b590627d50cd50b237c21fdf8ea3022ebbdf42/lib/sugarfree-config/config.rb#L117-L126 | class ConfigIterator
#
# Create a new iterator with a given +configuration+ and the first
# element of the path to be iterated (+first_path_element+)
#
def initialize(configuration, first_path_element)
@scoped_config = configuration
@path_elements = [first_path_element.to_s]
end
#
# Returns the current scope as a hash. Usefull to get a Big hash of config
# that will be used later.
#
def to_hash
@scoped_config
end
#
# Here is the magic. When an unknown symbol is passed this symbol is set
# as the last path element of this iteration, and the iterator is then
# forced to make that movement
#
def method_missing(symbol, *args)
@path_elements << symbol.to_s
self.next
end
#
# Iterate to the next element in the path
#
# Algorithm:
# 1. Get the last element of the key path
# 2. Try to find it in the scoped config
# 3. If not present raise an error
# 4. If present and is a hash we are not in a config leaf, so the scoped
# config is reset to this new value and self is returned
# 5. If present and is a value then return the value
#
end
|
visoft/ruby_odata | lib/ruby_odata/service.rb | OData.Service.build_property_metadata | ruby | def build_property_metadata(props, keys=[])
metadata = {}
props.each do |property_element|
prop_meta = PropertyMetadata.new(property_element)
prop_meta.is_key = keys.include?(prop_meta.name)
# If this is a navigation property, we need to add the association to the property metadata
prop_meta.association = Association.new(property_element, @edmx) if prop_meta.nav_prop
metadata[prop_meta.name] = prop_meta
end
metadata
end | Builds the metadata need for each property for things like feed customizations and navigation properties | train | https://github.com/visoft/ruby_odata/blob/ca3d441494aa2f745c7f7fb2cd90173956f73663/lib/ruby_odata/service.rb#L353-L364 | class Service
attr_reader :classes, :class_metadata, :options, :collections, :edmx, :function_imports, :response
# Creates a new instance of the Service class
#
# @param [String] service_uri the root URI of the OData service
# @param [Hash] options the options to pass to the service
# @option options [String] :username for http basic auth
# @option options [String] :password for http basic auth
# @option options [Object] :verify_ssl false if no verification, otherwise mode (OpenSSL::SSL::VERIFY_PEER is default)
# @option options [Hash] :rest_options a hash of rest-client options that will be passed to all OData::Resource.new calls
# @option options [Hash] :additional_params a hash of query string params that will be passed on all calls
# @option options [Boolean, true] :eager_partial true if queries should consume partial feeds until the feed is complete, false if explicit calls to next must be performed
def initialize(service_uri, options = {})
@uri = service_uri.gsub!(/\/?$/, '')
set_options! options
default_instance_vars!
set_namespaces
build_collections_and_classes
end
# Handles the dynamic `AddTo<EntityName>` methods as well as the collections on the service
def method_missing(name, *args)
# Queries
if @collections.include?(name.to_s)
@query = build_collection_query_object(name,@additional_params, *args)
return @query
# Adds
elsif name.to_s =~ /^AddTo(.*)/
type = $1
if @collections.include?(type)
@save_operations << Operation.new("Add", $1, args[0])
else
super
end
elsif @function_imports.include?(name.to_s)
execute_import_function(name.to_s, args)
else
super
end
end
# Queues an object for deletion. To actually remove it from the server, you must call save_changes as well.
#
# @param [Object] obj the object to mark for deletion
#
# @raise [NotSupportedError] if the `obj` isn't a tracked entity
def delete_object(obj)
type = obj.class.to_s
if obj.respond_to?(:__metadata) && !obj.send(:__metadata).nil?
@save_operations << Operation.new("Delete", type, obj)
else
raise OData::NotSupportedError.new "You cannot delete a non-tracked entity"
end
end
# Queues an object for update. To actually update it on the server, you must call save_changes as well.
#
# @param [Object] obj the object to queue for update
#
# @raise [NotSupportedError] if the `obj` isn't a tracked entity
def update_object(obj)
type = obj.class.to_s
if obj.respond_to?(:__metadata) && !obj.send(:__metadata).nil?
@save_operations << Operation.new("Update", type, obj)
else
raise OData::NotSupportedError.new "You cannot update a non-tracked entity"
end
end
# Performs save operations (Create/Update/Delete) against the server
def save_changes
return nil if @save_operations.empty?
result = nil
begin
if @save_operations.length == 1
result = single_save(@save_operations[0])
else
result = batch_save(@save_operations)
end
# TODO: We should probably perform a check here
# to make sure everything worked before clearing it out
@save_operations.clear
return result
rescue Exception => e
handle_exception(e)
end
end
# Performs query operations (Read) against the server.
# Typically this returns an array of record instances, except in the case of count queries
# @raise [ServiceError] if there is an error when talking to the service
def execute
begin
@response = OData::Resource.new(build_query_uri, @rest_options).get
rescue Exception => e
handle_exception(e)
end
return Integer(@response.body) if @response.body =~ /\A\d+\z/
handle_collection_result(@response.body)
end
# Overridden to identify methods handled by method_missing
def respond_to?(method)
if @collections.include?(method.to_s)
return true
# Adds
elsif method.to_s =~ /^AddTo(.*)/
type = $1
if @collections.include?(type)
return true
else
super
end
# Function Imports
elsif @function_imports.include?(method.to_s)
return true
else
super
end
end
# Retrieves the next resultset of a partial result (if any). Does not honor the `:eager_partial` option.
def next
return if not partial?
handle_partial
end
# Does the most recent collection returned represent a partial collection? Will aways be false if a query hasn't executed, even if the query would have a partial
def partial?
@has_partial
end
# Lazy loads a navigation property on a model
#
# @param [Object] obj the object to fill
# @param [String] nav_prop the navigation property to fill
#
# @raise [NotSupportedError] if the `obj` isn't a tracked entity
# @raise [ArgumentError] if the `nav_prop` isn't a valid navigation property
def load_property(obj, nav_prop)
raise NotSupportedError, "You cannot load a property on an entity that isn't tracked" if obj.send(:__metadata).nil?
raise ArgumentError, "'#{nav_prop}' is not a valid navigation property" unless obj.respond_to?(nav_prop.to_sym)
raise ArgumentError, "'#{nav_prop}' is not a valid navigation property" unless @class_metadata[obj.class.to_s][nav_prop].nav_prop
results = OData::Resource.new(build_load_property_uri(obj, nav_prop), @rest_options).get
prop_results = build_classes_from_result(results.body)
obj.send "#{nav_prop}=", (singular?(nav_prop) ? prop_results.first : prop_results)
end
# Adds a child object to a parent object's collection
#
# @param [Object] parent the parent object
# @param [String] nav_prop the name of the navigation property to add the child to
# @param [Object] child the child object
# @raise [NotSupportedError] if the `parent` isn't a tracked entity
# @raise [ArgumentError] if the `nav_prop` isn't a valid navigation property
# @raise [NotSupportedError] if the `child` isn't a tracked entity
def add_link(parent, nav_prop, child)
raise NotSupportedError, "You cannot add a link on an entity that isn't tracked (#{parent.class})" if parent.send(:__metadata).nil?
raise ArgumentError, "'#{nav_prop}' is not a valid navigation property for #{parent.class}" unless parent.respond_to?(nav_prop.to_sym)
raise ArgumentError, "'#{nav_prop}' is not a valid navigation property for #{parent.class}" unless @class_metadata[parent.class.to_s][nav_prop].nav_prop
raise NotSupportedError, "You cannot add a link on a child entity that isn't tracked (#{child.class})" if child.send(:__metadata).nil?
@save_operations << Operation.new("AddLink", nav_prop, parent, child)
end
private
# Constructs a QueryBuilder instance for a collection using the arguments provided.
#
# @param [String] name the name of the collection
# @param [Hash] additional_parameters the additional parameters
# @param [Array] args the arguments to use for query
def build_collection_query_object(name, additional_parameters, *args)
root = "/#{name.to_s}"
if args.empty?
#nothing to add
elsif args.size == 1
if args.first.to_s =~ /\d+/
id_metadata = find_id_metadata(name.to_s)
root << build_id_path(args.first, id_metadata)
else
root << "(#{args.first})"
end
else
root << "(#{args.join(',')})"
end
QueryBuilder.new(root, additional_parameters)
end
# Finds the metadata associated with the given collection's first id property
# Remarks: This is used for single item lookup queries using the ID, e.g. Products(1), not complex primary keys
#
# @param [String] collection_name the name of the collection
def find_id_metadata(collection_name)
collection_data = @collections.fetch(collection_name)
class_metadata = @class_metadata.fetch(collection_data[:type].to_s)
key = class_metadata.select{|k,h| h.is_key }.collect{|k,h| h.name }[0]
class_metadata[key]
end
# Builds the ID expression of a given id for query
#
# @param [Object] id_value the actual value to be used
# @param [PropertyMetadata] id_metadata the property metadata object for the id
def build_id_path(id_value, id_metadata)
if id_metadata.type == "Edm.Int64"
"(#{id_value}L)"
else
"(#{id_value})"
end
end
def set_options!(options)
@options = options
if @options[:eager_partial].nil?
@options[:eager_partial] = true
end
@rest_options = { :verify_ssl => get_verify_mode, :user => @options[:username], :password => @options[:password] }
@rest_options.merge!(options[:rest_options] || {})
@additional_params = options[:additional_params] || {}
@namespace = options[:namespace]
@json_type = options[:json_type] || 'application/json'
end
def default_instance_vars!
@collections = {}
@function_imports = {}
@save_operations = []
@has_partial = false
@next_uri = nil
end
def set_namespaces
@edmx = Nokogiri::XML(OData::Resource.new(build_metadata_uri, @rest_options).get.body)
@ds_namespaces = {
"m" => "http://schemas.microsoft.com/ado/2007/08/dataservices/metadata",
"edmx" => "http://schemas.microsoft.com/ado/2007/06/edmx",
"ds" => "http://schemas.microsoft.com/ado/2007/08/dataservices",
"atom" => "http://www.w3.org/2005/Atom"
}
# Get the edm namespace from the edmx
edm_ns = @edmx.xpath("edmx:Edmx/edmx:DataServices/*", @namespaces).first.namespaces['xmlns'].to_s
@ds_namespaces.merge! "edm" => edm_ns
end
# Gets ssl certificate verification mode, or defaults to verify_peer
def get_verify_mode
if @options[:verify_ssl].nil?
return OpenSSL::SSL::VERIFY_PEER
else
return @options[:verify_ssl]
end
end
# Build the classes required by the metadata
def build_collections_and_classes
@classes = Hash.new
@class_metadata = Hash.new # This is used to store property information about a class
# Build complex types first, these will be used for entities
complex_types = @edmx.xpath("//edm:ComplexType", @ds_namespaces) || []
complex_types.each do |c|
name = qualify_class_name(c['Name'])
props = c.xpath(".//edm:Property", @ds_namespaces)
methods = props.collect { |p| p['Name'] } # Standard Properties
@classes[name] = ClassBuilder.new(name, methods, [], self, @namespace).build unless @classes.keys.include?(name)
end
entity_types = @edmx.xpath("//edm:EntityType", @ds_namespaces)
entity_types.each do |e|
next if e['Abstract'] == "true"
klass_name = qualify_class_name(e['Name'])
methods = collect_properties(klass_name, e, @edmx)
nav_props = collect_navigation_properties(klass_name, e, @edmx)
@classes[klass_name] = ClassBuilder.new(klass_name, methods, nav_props, self, @namespace).build unless @classes.keys.include?(klass_name)
end
# Fill in the collections instance variable
collections = @edmx.xpath("//edm:EntityContainer/edm:EntitySet", @ds_namespaces)
collections.each do |c|
entity_type = c["EntityType"]
@collections[c["Name"]] = { :edmx_type => entity_type, :type => convert_to_local_type(entity_type) }
end
build_function_imports
end
# Parses the function imports and fills the @function_imports collection
def build_function_imports
# Fill in the function imports
functions = @edmx.xpath("//edm:EntityContainer/edm:FunctionImport", @ds_namespaces)
functions.each do |f|
http_method_attribute = f.xpath("@m:HttpMethod", @ds_namespaces).first # HttpMethod is no longer required http://www.odata.org/2011/10/actions-in-odata/
is_side_effecting_attribute = f.xpath("@edm:IsSideEffecting", @ds_namespaces).first
http_method = 'POST' # default to POST
if http_method_attribute
http_method = http_method_attribute.content
elsif is_side_effecting_attribute
is_side_effecting = is_side_effecting_attribute.content
http_method = is_side_effecting ? 'POST' : 'GET'
end
return_type = f["ReturnType"]
inner_return_type = nil
unless return_type.nil?
return_type = (return_type =~ /^Collection/) ? Array : convert_to_local_type(return_type)
if f["ReturnType"] =~ /\((.*)\)/
inner_return_type = convert_to_local_type($~[1])
end
end
params = f.xpath("edm:Parameter", @ds_namespaces)
parameters = nil
if params.length > 0
parameters = {}
params.each do |p|
parameters[p["Name"]] = p["Type"]
end
end
@function_imports[f["Name"]] = {
:http_method => http_method,
:return_type => return_type,
:inner_return_type => inner_return_type,
:parameters => parameters }
end
end
# Converts the EDMX model type to the local model type
def convert_to_local_type(edmx_type)
return edm_to_ruby_type(edmx_type) if edmx_type =~ /^Edm/
klass_name = qualify_class_name(edmx_type.split('.').last)
klass_name.camelize.constantize
end
# Converts a class name to its fully qualified name (if applicable) and returns the new name
def qualify_class_name(klass_name)
unless @namespace.nil? || @namespace.blank? || klass_name.include?('::')
namespaces = @namespace.split(/\.|::/)
namespaces << klass_name
klass_name = namespaces.join '::'
end
klass_name.camelize
end
# Builds the metadata need for each property for things like feed customizations and navigation properties
# Handle parsing of OData Atom result and return an array of Entry classes
def handle_collection_result(result)
results = build_classes_from_result(result)
while partial? && @options[:eager_partial]
results.concat handle_partial
end
results
end
# Handles errors from the OData service
def handle_exception(e)
raise e unless defined?(e.response) && e.response != nil
code = e.response[:status]
error = Nokogiri::XML(e.response[:body])
message = if error.xpath("m:error/m:message", @ds_namespaces).first
error.xpath("m:error/m:message", @ds_namespaces).first.content
else
"Server returned error but no message."
end
raise ServiceError.new(code), message
end
# Loops through the standard properties (non-navigation) for a given class and returns the appropriate list of methods
def collect_properties(klass_name, element, doc)
props = element.xpath(".//edm:Property", @ds_namespaces)
key_elemnts = element.xpath(".//edm:Key//edm:PropertyRef", @ds_namespaces)
keys = key_elemnts.collect { |k| k['Name'] }
@class_metadata[klass_name] = build_property_metadata(props, keys)
methods = props.collect { |p| p['Name'] }
unless element["BaseType"].nil?
base = element["BaseType"].split(".").last()
baseType = doc.xpath("//edm:EntityType[@Name=\"#{base}\"]", @ds_namespaces).first()
props = baseType.xpath(".//edm:Property", @ds_namespaces)
@class_metadata[klass_name].merge!(build_property_metadata(props))
methods = methods.concat(props.collect { |p| p['Name']})
end
methods
end
# Similar to +collect_properties+, but handles the navigation properties
def collect_navigation_properties(klass_name, element, doc)
nav_props = element.xpath(".//edm:NavigationProperty", @ds_namespaces)
@class_metadata[klass_name].merge!(build_property_metadata(nav_props))
nav_props.collect { |p| p['Name'] }
end
# Helper to loop through a result and create an instance for each entity in the results
def build_classes_from_result(result)
doc = Nokogiri::XML(result)
is_links = doc.at_xpath("/ds:links", @ds_namespaces)
return parse_link_results(doc) if is_links
entries = doc.xpath("//atom:entry[not(ancestor::atom:entry)]", @ds_namespaces)
extract_partial(doc)
results = []
entries.each do |entry|
results << entry_to_class(entry)
end
return results
end
# Converts an XML Entry into a class
def entry_to_class(entry)
# Retrieve the class name from the fully qualified name (the last string after the last dot)
klass_name = entry.xpath("./atom:category/@term", @ds_namespaces).to_s.split('.')[-1]
# Is the category missing? See if there is a title that we can use to build the class
if klass_name.nil?
title = entry.xpath("./atom:title", @ds_namespaces).first
return nil if title.nil?
klass_name = title.content.to_s
end
return nil if klass_name.nil?
properties = entry.xpath("./atom:content/m:properties/*", @ds_namespaces)
klass = @classes[qualify_class_name(klass_name)].new
# Fill metadata
meta_id = entry.xpath("./atom:id", @ds_namespaces)[0].content
klass.send :__metadata=, { :uri => meta_id }
# Fill properties
for prop in properties
prop_name = prop.name
klass.send "#{prop_name}=", parse_value_xml(prop)
end
# Fill properties represented outside of the properties collection
@class_metadata[qualify_class_name(klass_name)].select { |k,v| v.fc_keep_in_content == false }.each do |k, meta|
if meta.fc_target_path == "SyndicationTitle"
title = entry.xpath("./atom:title", @ds_namespaces).first
klass.send "#{meta.name}=", title.content
elsif meta.fc_target_path == "SyndicationSummary"
summary = entry.xpath("./atom:summary", @ds_namespaces).first
klass.send "#{meta.name}=", summary.content
end
end
inline_links = entry.xpath("./atom:link[m:inline]", @ds_namespaces)
for link in inline_links
# TODO: Use the metadata's associations to determine the multiplicity instead of this "hack"
property_name = link.attributes['title'].to_s
if singular?(property_name)
inline_entry = link.xpath("./m:inline/atom:entry", @ds_namespaces).first
inline_klass = build_inline_class(klass, inline_entry, property_name)
klass.send "#{property_name}=", inline_klass
else
inline_classes, inline_entries = [], link.xpath("./m:inline/atom:feed/atom:entry", @ds_namespaces)
for inline_entry in inline_entries
# Build the class
inline_klass = entry_to_class(inline_entry)
# Add the property to the temp collection
inline_classes << inline_klass
end
# Assign the array of classes to the property
property_name = link.xpath("@title", @ds_namespaces)
klass.send "#{property_name}=", inline_classes
end
end
klass
end
# Tests for and extracts the next href of a partial
def extract_partial(doc)
next_links = doc.xpath('//atom:link[@rel="next"]', @ds_namespaces)
@has_partial = next_links.any?
if @has_partial
uri = Addressable::URI.parse(next_links[0]['href'])
uri.query_values = uri.query_values.merge @additional_params unless @additional_params.empty?
@next_uri = uri.to_s
end
end
def handle_partial
if @next_uri
result = OData::Resource.new(@next_uri, @rest_options).get
results = handle_collection_result(result.body)
end
results
end
# Handle link results
def parse_link_results(doc)
uris = doc.xpath("/ds:links/ds:uri", @ds_namespaces)
results = []
uris.each do |uri_el|
link = uri_el.content
results << URI.parse(link)
end
results
end
# Build URIs
def build_metadata_uri
uri = "#{@uri}/$metadata"
uri << "?#{@additional_params.to_query}" unless @additional_params.empty?
uri
end
def build_query_uri
"#{@uri}#{@query.query}"
end
def build_save_uri(operation)
uri = "#{@uri}/#{operation.klass_name}"
uri << "?#{@additional_params.to_query}" unless @additional_params.empty?
uri
end
def build_add_link_uri(operation)
uri = operation.klass.send(:__metadata)[:uri].dup
uri << "/$links/#{operation.klass_name}"
uri << "?#{@additional_params.to_query}" unless @additional_params.empty?
uri
end
def build_resource_uri(operation)
uri = operation.klass.send(:__metadata)[:uri].dup
uri << "?#{@additional_params.to_query}" unless @additional_params.empty?
uri
end
def build_batch_uri
uri = "#{@uri}/$batch"
uri << "?#{@additional_params.to_query}" unless @additional_params.empty?
uri
end
def build_load_property_uri(obj, property)
uri = obj.__metadata[:uri].dup
uri << "/#{property}"
uri
end
def build_function_import_uri(name, params)
uri = "#{@uri}/#{name}"
params.merge! @additional_params
uri << "?#{params.to_query}" unless params.empty?
uri
end
def build_inline_class(klass, entry, property_name)
# Build the class
inline_klass = entry_to_class(entry)
# Add the property
klass.send "#{property_name}=", inline_klass
end
# Used to link a child object to its parent and vice-versa after a add_link operation
def link_child_to_parent(operation)
child_collection = operation.klass.send("#{operation.klass_name}") || []
child_collection << operation.child_klass
operation.klass.send("#{operation.klass_name}=", child_collection)
# Attach the parent to the child
parent_meta = @class_metadata[operation.klass.class.to_s][operation.klass_name]
child_meta = @class_metadata[operation.child_klass.class.to_s]
# Find the matching relationship on the child object
child_properties = Helpers.normalize_to_hash(
child_meta.select { |k, prop|
prop.nav_prop &&
prop.association.relationship == parent_meta.association.relationship })
child_property_to_set = child_properties.keys.first # There should be only one match
# TODO: Handle many to many scenarios where the child property is an enumerable
operation.child_klass.send("#{child_property_to_set}=", operation.klass)
end
def single_save(operation)
if operation.kind == "Add"
save_uri = build_save_uri(operation)
json_klass = operation.klass.to_json(:type => :add)
post_result = OData::Resource.new(save_uri, @rest_options).post json_klass, {:content_type => @json_type}
return build_classes_from_result(post_result.body)
elsif operation.kind == "Update"
update_uri = build_resource_uri(operation)
json_klass = operation.klass.to_json
update_result = OData::Resource.new(update_uri, @rest_options).put json_klass, {:content_type => @json_type}
return (update_result.status == 204)
elsif operation.kind == "Delete"
delete_uri = build_resource_uri(operation)
delete_result = OData::Resource.new(delete_uri, @rest_options).delete
return (delete_result.status == 204)
elsif operation.kind == "AddLink"
save_uri = build_add_link_uri(operation)
json_klass = operation.child_klass.to_json(:type => :link)
post_result = OData::Resource.new(save_uri, @rest_options).post json_klass, {:content_type => @json_type}
# Attach the child to the parent
link_child_to_parent(operation) if (post_result.status == 204)
return(post_result.status == 204)
end
end
# Batch Saves
def generate_guid
rand(36**12).to_s(36).insert(4, "-").insert(9, "-")
end
def batch_save(operations)
batch_num = generate_guid
changeset_num = generate_guid
batch_uri = build_batch_uri
body = build_batch_body(operations, batch_num, changeset_num)
result = OData::Resource.new( batch_uri, @rest_options).post body, {:content_type => "multipart/mixed; boundary=batch_#{batch_num}"}
# TODO: More result validation needs to be done.
# The result returns HTTP 202 even if there is an error in the batch
return (result.status == 202)
end
def build_batch_body(operations, batch_num, changeset_num)
# Header
body = "--batch_#{batch_num}\n"
body << "Content-Type: multipart/mixed;boundary=changeset_#{changeset_num}\n\n"
# Operations
operations.each do |operation|
body << build_batch_operation(operation, changeset_num)
body << "\n"
end
# Footer
body << "\n\n--changeset_#{changeset_num}--\n"
body << "--batch_#{batch_num}--"
return body
end
def build_batch_operation(operation, changeset_num)
accept_headers = "Accept-Charset: utf-8\n"
accept_headers << "Content-Type: application/json;charset=utf-8\n" unless operation.kind == "Delete"
accept_headers << "\n"
content = "--changeset_#{changeset_num}\n"
content << "Content-Type: application/http\n"
content << "Content-Transfer-Encoding: binary\n\n"
if operation.kind == "Add"
save_uri = "#{@uri}/#{operation.klass_name}"
json_klass = operation.klass.to_json(:type => :add)
content << "POST #{save_uri} HTTP/1.1\n"
content << accept_headers
content << json_klass
elsif operation.kind == "Update"
update_uri = operation.klass.send(:__metadata)[:uri]
json_klass = operation.klass.to_json
content << "PUT #{update_uri} HTTP/1.1\n"
content << accept_headers
content << json_klass
elsif operation.kind == "Delete"
delete_uri = operation.klass.send(:__metadata)[:uri]
content << "DELETE #{delete_uri} HTTP/1.1\n"
content << accept_headers
elsif
save_uri = build_add_link_uri(operation)
json_klass = operation.child_klass.to_json(:type => :link)
content << "POST #{save_uri} HTTP/1.1\n"
content << accept_headers
content << json_klass
link_child_to_parent(operation)
end
return content
end
# Complex Types
def complex_type_to_class(complex_type_xml)
type = Helpers.get_namespaced_attribute(complex_type_xml, 'type', 'm')
is_collection = false
# Extract the class name in case this is a Collection
if type =~ /\(([^)]*)\)/m
type = $~[1]
is_collection = true
collection = []
end
klass_name = qualify_class_name(type.split('.')[-1])
if is_collection
# extract the elements from the collection
elements = complex_type_xml.xpath(".//d:element", @namespaces)
elements.each do |e|
if type.match(/^Edm/)
collection << parse_value(e.content, type)
else
element = @classes[klass_name].new
fill_complex_type_properties(e, element)
collection << element
end
end
return collection
else
klass = @classes[klass_name].new
# Fill in the properties
fill_complex_type_properties(complex_type_xml, klass)
return klass
end
end
# Helper method for complex_type_to_class
def fill_complex_type_properties(complex_type_xml, klass)
properties = complex_type_xml.xpath(".//*")
properties.each do |prop|
klass.send "#{prop.name}=", parse_value_xml(prop)
end
end
# Field Converters
# Handles parsing datetimes from a string
def parse_date(sdate)
# Assume this is UTC if no timezone is specified
sdate = sdate + "Z" unless sdate.match(/Z|([+|-]\d{2}:\d{2})$/)
# This is to handle older versions of Ruby (e.g. ruby 1.8.7 (2010-12-23 patchlevel 330) [i386-mingw32])
# See http://makandra.com/notes/1017-maximum-representable-value-for-a-ruby-time-object
# In recent versions of Ruby, Time has a much larger range
begin
result = Time.parse(sdate)
rescue ArgumentError
result = DateTime.parse(sdate)
end
return result
end
# Parses a value into the proper type based on an xml property element
def parse_value_xml(property_xml)
property_type = Helpers.get_namespaced_attribute(property_xml, 'type', 'm')
property_null = Helpers.get_namespaced_attribute(property_xml, 'null', 'm')
if property_type.nil? || (property_type && property_type.match(/^Edm/))
return parse_value(property_xml.content, property_type, property_null)
end
complex_type_to_class(property_xml)
end
def parse_value(content, property_type = nil, property_null = nil)
# Handle anything marked as null
return nil if !property_null.nil? && property_null == "true"
# Handle a nil property type, this is a string
return content if property_type.nil?
# Handle integers
return content.to_i if property_type.match(/^Edm.Int/)
# Handle decimals
return content.to_d if property_type.match(/Edm.Decimal/)
# Handle DateTimes
# return Time.parse(property_xml.content) if property_type.match(/Edm.DateTime/)
return parse_date(content) if property_type.match(/Edm.DateTime/)
# If we can't parse the value, just return the element's content
content
end
# Parses a value into the proper type based on a specified return type
def parse_primative_type(value, return_type)
return value.to_i if return_type == Fixnum
return value.to_d if return_type == Float
return parse_date(value.to_s) if return_type == Time
return value.to_s
end
# Converts an edm type (string) to a ruby type
def edm_to_ruby_type(edm_type)
return String if edm_type =~ /Edm.String/
return Fixnum if edm_type =~ /^Edm.Int/
return Float if edm_type =~ /Edm.Decimal/
return Time if edm_type =~ /Edm.DateTime/
return String
end
# Method Missing Handlers
# Executes an import function
def execute_import_function(name, *args)
func = @function_imports[name]
# Check the args making sure that more weren't passed in than the function needs
param_count = func[:parameters].nil? ? 0 : func[:parameters].count
arg_count = args.nil? ? 0 : args[0].count
if arg_count > param_count
raise ArgumentError, "wrong number of arguments (#{arg_count} for #{param_count})"
end
# Convert the parameters to a hash
params = {}
func[:parameters].keys.each_with_index { |key, i| params[key] = args[0][i] } unless func[:parameters].nil?
function_uri = build_function_import_uri(name, params)
result = OData::Resource.new(function_uri, @rest_options).send(func[:http_method].downcase, {})
# Is this a 204 (No content) result?
return true if result.status == 204
# No? Then we need to parse the results. There are 4 kinds...
if func[:return_type] == Array
# a collection of entites
return build_classes_from_result(result.body) if @classes.include?(func[:inner_return_type].to_s)
# a collection of native types
elements = Nokogiri::XML(result.body).xpath("//ds:element", @ds_namespaces)
results = []
elements.each do |e|
results << parse_primative_type(e.content, func[:inner_return_type])
end
return results
end
# a single entity
if @classes.include?(func[:return_type].to_s)
entry = Nokogiri::XML(result.body).xpath("atom:entry[not(ancestor::atom:entry)]", @ds_namespaces)
return entry_to_class(entry)
end
# or a single native type
unless func[:return_type].nil?
e = Nokogiri::XML(result.body).xpath("/*").first
return parse_primative_type(e.content, func[:return_type])
end
# Nothing could be parsed, so just return if we got a 200 or not
return (result.status == 200)
end
# Helpers
def singular?(value)
value.singularize == value
end
end
|
xi-livecode/xi | lib/xi/pattern.rb | Xi.Pattern.reverse_each | ruby | def reverse_each
return enum_for(__method__) unless block_given?
each.to_a.reverse.each { |v| yield v }
end | Same as {#each} but in reverse order
@example
Pattern.new([1, 2, 3]).reverse_each.to_a
# => [3, 2, 1]
@return [Enumerator]
@yield [Object] value | train | https://github.com/xi-livecode/xi/blob/215dfb84899b3dd00f11089ae3eab0febf498e95/lib/xi/pattern.rb#L259-L262 | class Pattern
extend Generators
include Transforms
# Array or Proc that produces values or events
attr_reader :source
# Event delta in terms of cycles (default: 1)
attr_reader :delta
# Hash that contains metadata related to pattern usage
attr_reader :metadata
# Size of pattern
attr_reader :size
# Duration of pattern
attr_reader :duration
# Creates a new Pattern given either a +source+ or a +block+ that yields
# events.
#
# If a block is given, +yielder+ parameter must yield +value+ and +start+
# (optional) for each event.
#
# @example Pattern from an Array
# Pattern.new(['a', 'b', 'c']).take(5)
# # => [['a', 0, 1, 0],
# # ['b', 1, 1, 0],
# # ['c', 2, 1, 0],
# # ['a', 3, 1, 1], # starts cycling...
# # ['b', 4, 1, 1]]
#
# @example Pattern from a block that yields only values.
# Pattern.new { |y| y << rand(100) }.take(5)
# # => [[52, 0, 1, 0],
# # [8, 1, 1, 0],
# # [83, 2, 1, 0],
# # [25, 3, 1, 0],
# # [3, 4, 1, 0]]
#
# @param source [Array]
# @param size [Integer] number of events per iteration
# @param delta [Numeric, Array<Numeric>, Pattern<Numeric>] event delta
# @param metadata [Hash]
# @yield [yielder, delta] yielder and event delta
# @yieldreturn [value, start, duration]
# @return [Pattern]
#
def initialize(source=nil, size: nil, delta: nil, **metadata, &block)
if source.nil? && block.nil?
fail ArgumentError, 'must provide source or block'
end
if delta && delta.respond_to?(:size) && !(delta.size < Float::INFINITY)
fail ArgumentError, 'delta cannot be infinite'
end
# If delta is an array of 1 or 0 values, flatten array
delta = delta.first if delta.is_a?(Array) && delta.size <= 1
# Block takes precedence as source, even though +source+ can be used to
# infer attributes
@source = block || source
# Infer attributes from +source+ if it is a pattern
if source.is_a?(Pattern)
@delta = source.delta
@size = source.size
@metadata = source.metadata
else
@delta = 1
@size = (source.respond_to?(:size) ? source.size : nil) ||
Float::INFINITY
@metadata = {}
end
# Flatten source if it is a pattern
@source = @source.source if @source.is_a?(Pattern)
# Override or merge custom attributes if they were specified
@size = size if size
@delta = delta if delta
@metadata.merge!(metadata)
# Flatten delta values to an array, if it is an enumerable or pattern
@delta = @delta.to_a if @delta.respond_to?(:to_a)
# Set duration based on delta values
@duration = delta_values.reduce(:+) || 0
end
# Create a new Pattern given an array of +args+
#
# @see Pattern#initialize
#
# @param args [Array]
# @param kwargs [Hash]
# @return [Pattern]
#
def self.[](*args, **kwargs)
new(args, **kwargs)
end
# Returns a new Pattern with the same +source+, but with +delta+ overriden
# and +metadata+ merged.
#
# @param delta [Array<Numeric>, Pattern<Numeric>, Numeric]
# @param metadata [Hash]
# @return [Pattern]
#
def p(*delta, **metadata)
delta = delta.compact.empty? ? @delta : delta
Pattern.new(@source, delta: delta, size: @size, **@metadata.merge(metadata))
end
# Returns true if pattern is infinite
#
# A Pattern is infinite if it was created from a Proc or another infinite
# pattern, and size was not specified.
#
# @return [Boolean]
# @see #finite?
#
def infinite?
@size == Float::INFINITY
end
# Returns true if pattern is finite
#
# A pattern is finite if it has a finite size.
#
# @return [Boolean]
# @see #infinite?
#
def finite?
!infinite?
end
# Calls the given block once for each event, passing its value, start
# position, duration and iteration as parameters.
#
# +cycle+ can be any number, even if there is no event that starts exactly
# at that moment. It will start from the next event.
#
# If no block is given, an enumerator is returned instead.
#
# Enumeration loops forever, and starts yielding events based on pattern's
# delta and from the +cycle+ position, which is by default 0.
#
# @example block yields value, start, duration and iteration
# Pattern.new([1, 2], delta: 0.25).each_event.take(4)
# # => [[1, 0.0, 0.25, 0],
# # [2, 0.25, 0.25, 0],
# # [1, 0.5, 0.25, 1],
# # [2, 0.75, 0.25, 1]]
#
# @example +cycle+ is used to start iterating from that moment in time
# Pattern.new([:a, :b, :c], delta: 1/2).each_event(42).take(4)
# # => [[:a, (42/1), (1/2), 28],
# # [:b, (85/2), (1/2), 28],
# # [:c, (43/1), (1/2), 28],
# # [:a, (87/2), (1/2), 29]]
#
# @example +cycle+ can also be a fractional number
# Pattern.new([:a, :b, :c]).each_event(0.97).take(3)
# # => [[:b, 1, 1, 0],
# # [:c, 2, 1, 0],
# # [:a, 3, 1, 1]]
#
# @param cycle [Numeric]
# @yield [v, s, d, i] value, start, duration and iteration
# @return [Enumerator]
#
def each_event(cycle=0)
return enum_for(__method__, cycle) unless block_given?
EventEnumerator.new(self, cycle).each { |v, s, d, i| yield v, s, d, i }
end
# Calls the given block passing the delta of each value in pattern
#
# This method is used internally by {#each_event} to calculate when each
# event in pattern occurs in time. If no block is given, an Enumerator is
# returned instead.
#
# @param index [Numeric]
# @yield [d] duration
# @return [Enumerator]
#
def each_delta(index=0)
return enum_for(__method__, index) unless block_given?
delta = @delta
if delta.is_a?(Array)
size = delta.size
return if size == 0
start = index.floor
i = start % size
loop do
yield delta[i]
i = (i + 1) % size
start += 1
end
elsif delta.is_a?(Pattern)
delta.each_event(index) { |v, _| yield v }
else
loop { yield delta }
end
end
# Calls the given block once for each value in source
#
# @example
# Pattern.new([1, 2, 3]).each.to_a
# # => [1, 2, 3]
#
# @return [Enumerator]
# @yield [Object] value
#
def each
return enum_for(__method__) unless block_given?
each_event { |v, _, _, i|
break if i > 0
yield v
}
end
# Same as {#each} but in reverse order
#
# @example
# Pattern.new([1, 2, 3]).reverse_each.to_a
# # => [3, 2, 1]
#
# @return [Enumerator]
# @yield [Object] value
#
# Returns an array of values from a single iteration of pattern
#
# @return [Array] values
# @see #to_events
#
def to_a
fail StandardError, 'pattern is infinite' if infinite?
each.to_a
end
# Returns an array of events (i.e. a tuple [value, start, duration,
# iteration]) from the first iteration.
#
# Only applies to finite patterns.
#
# @return [Array] events
# @see #to_a
#
def to_events
fail StandardError, 'pattern is infinite' if infinite?
each_event.take(size)
end
# Returns a new Pattern with the results of running +block+ once for every
# value in +self+
#
# If no block is given, an Enumerator is returned.
#
# @yield [v, s, d, i] value, start, duration and iteration
# @yieldreturn [v, s, d] value, start (optional) and duration (optional)
# @return [Pattern]
#
def map
return enum_for(__method__) unless block_given?
Pattern.new(self) do |y, d|
each_event do |v, s, ed, i|
y << yield(v, s, ed, i)
end
end
end
alias_method :collect, :map
# Returns a Pattern containing all events of +self+ for which +block+ is
# true.
#
# If no block is given, an Enumerator is returned.
#
# @see Pattern#reject
#
# @yield [v, s, d, i] value, start, duration and iteration
# @yieldreturn [Boolean] whether value is selected
# @return [Pattern]
#
def select
return enum_for(__method__) unless block_given?
Pattern.new(self) do |y, d|
each_event do |v, s, ed, i|
y << v if yield(v, s, ed, i)
end
end
end
alias_method :find_all, :select
# Returns a Pattern containing all events of +self+ for which +block+
# is false.
#
# If no block is given, an Enumerator is returned.
#
# @see Pattern#select
#
# @yield [v, s, d, i] value, start, duration and iteration
# @yieldreturn [Boolean] whether event is rejected
# @return [Pattern]
#
def reject
return enum_for(__method__) unless block_given?
select { |v, s, d, i| !yield(v, s, d, i) }
end
# Returns the first +n+ events from the pattern, starting from +cycle+
#
# @param n [Integer]
# @param cycle [Numeric]
# @return [Array] values
#
def take(n, cycle=0)
each_event(cycle).take(n)
end
# Returns the first +n+ values from +self+, starting from +cycle+.
#
# Only values are returned, start position and duration are ignored.
#
# @see #take
#
def take_values(*args)
take(*args).map(&:first)
end
# @see #take_values
def peek(n=10, *args)
take_values(n, *args)
end
# @see #take
def peek_events(n=10, cycle=0)
take(n, cycle)
end
# Returns the first element, or the first +n+ elements, of the pattern.
#
# If the pattern is empty, the first form returns nil, and the second form
# returns an empty array.
#
# @see #take
#
# @param n [Integer]
# @param args same arguments as {#take}
# @return [Object, Array]
#
def first(n=nil, *args)
res = take(n || 1, *args)
n.nil? ? res.first : res
end
# Returns a string containing a human-readable representation
#
# When source is not a Proc, this string can be evaluated to construct the
# same instance.
#
# @return [String]
#
def inspect
ss = if @source.respond_to?(:join)
@source.map(&:inspect).join(', ')
elsif @source.is_a?(Proc)
"?proc"
else
@source.inspect
end
ms = @metadata.reject { |_, v| v.nil? }
ms.merge!(delta: delta) if delta != 1
ms = ms.map { |k, v| "#{k}: #{v.inspect}" }.join(', ')
"P[#{ss}#{", #{ms}" unless ms.empty?}]"
end
alias_method :to_s, :inspect
# Returns pattern interation size or length
#
# This is usually calculated from the least-common multiple between the sum
# of delta values and the size of the pattern. If pattern is infinite,
# pattern size is assumed to be 1, so iteration size depends on delta
# values.
#
# @return [Integer]
#
def iteration_size
finite? ? delta_size.lcm(@size) : delta_size
end
# @private
def ==(o)
self.class == o.class &&
delta == o.delta &&
size == o.size &&
duration == o.duration &&
metadata == o.metadata &&
(finite? && to_a == o.to_a)
end
private
class EventEnumerator
def initialize(pattern, cycle)
@cycle = cycle
@source = pattern.source
@size = pattern.size
@iter_size = pattern.iteration_size
@iter = pattern.duration > 0 ? (cycle / pattern.duration).floor : 0
@delta_enum = pattern.each_delta(@iter * @iter_size)
@start = @iter * pattern.duration
@prev_ev = nil
@i = 0
end
def each(&block)
return enum_for(__method__, @cycle) unless block_given?
return if @size == 0
if @source.respond_to?(:call)
loop do
yielder = ::Enumerator::Yielder.new do |value|
each_block(value, &block)
end
@source.call(yielder, @delta_enum.peek)
end
elsif @source.respond_to?(:each_event)
@source.each_event(@start) do |value, _|
each_block(value, &block)
end
elsif @source.respond_to?(:[])
loop do
each_block(@source[@i % @size], &block)
end
else
fail StandardError, 'invalid source'
end
end
private
def each_block(value)
delta = @delta_enum.peek
if @start >= @cycle
if @prev_ev
yield @prev_ev if @start > @cycle
@prev_ev = nil
end
yield value, @start, delta, @iter
else
@prev_ev = [value, @start, delta, @iter]
end
@iter += 1 if @i + 1 == @iter_size
@i = (@i + 1) % @iter_size
@start += delta
@delta_enum.next
end
end
def delta_values
each_delta.take(iteration_size)
end
def delta_size
@delta.respond_to?(:each) && @delta.respond_to?(:size) ? @delta.size : 1
end
end
|
HewlettPackard/hpe3par_ruby_sdk | lib/Hpe3parSdk/client.rb | Hpe3parSdk.Client.create_physical_copy | ruby | def create_physical_copy(src_name, dest_name, dest_cpg, optional = nil)
if @current_version < @min_version_with_compression && !optional.nil?
[:compression, :allowRemoteCopyParent, :skipZero].each { |key| optional.delete key }
end
begin
@volume.create_physical_copy(src_name, dest_name, dest_cpg, optional)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end | Creates a physical copy of a VirtualVolume
==== Attributes
* src_name - the source volume name
type src_name: String
* dest_name - the destination volume name
type dest_name: String
* dest_cpg - the destination CPG
type dest_cpg: String
* optional - Hash of optional parameters
type optional: Hash
optional = {
'online' => false, # should physical copy be
# performed online?
'tpvv' => false, # use thin provisioned space
# for destination
# (online copy only)
'snapCPG' => 'OpenStack_SnapCPG', # snapshot CPG for the
# destination
# (online copy only)
'saveSnapshot' => false, # save the snapshot of the
# source volume
'priority' => 1 # taskPriorityEnum (does not
# apply to online copy - Hpe3parSdk::TaskPriority)
} | train | https://github.com/HewlettPackard/hpe3par_ruby_sdk/blob/f8cfc6e597741be593cf7fe013accadf982ee68b/lib/Hpe3parSdk/client.rb#L1674-L1684 | class Client
def initialize(api_url,debug:false, secure: false, timeout: nil, suppress_ssl_warnings: false, app_type: 'ruby_SDK_3par', log_file_path: nil)
unless api_url.is_a?(String)
raise Hpe3parSdk::HPE3PARException.new(nil,
"'api_url' parameter is mandatory and should be of type String")
end
@api_url = api_url
@debug = debug
@secure = secure
@timeout = timeout
@suppress_ssl_warnings = suppress_ssl_warnings
@log_level = Logger::INFO
@log_file_path = log_file_path
init_log
@http = HTTPJSONRestClient.new(
@api_url, @secure, @debug,
@suppress_ssl_warnings, @timeout = nil
)
check_WSAPI_version
@vlun_query_supported = false
@cpg = CPGManager.new(@http)
@qos = QOSManager.new(@http)
@flash_cache = FlashCacheManager.new(@http)
@port = PortManager.new(@http)
@task = TaskManager.new(@http)
@host_and_vv_set_filter_supported = false
@ssh = nil
@vlun = VlunManager.new(@http, @vlun_query_supported)
@host = HostManager.new(@http, @vlun_query_supported)
@volume_set = VolumeSetManager.new(@http, @host_and_vv_set_filter_supported)
@host_set = HostSetManager.new(@http, @host_and_vv_set_filter_supported)
@app_type = app_type
end
private def init_log
unless @log_file_path.nil?
client_logger = Logger.new(@log_file_path, 'daily', formatter: CustomFormatter.new)
else
client_logger = Logger.new(STDOUT)
end
if @debug
@log_level = Logger::DEBUG
end
Hpe3parSdk.logger = MultiLog.new(:level => @log_level, :loggers => client_logger)
end
private def check_WSAPI_version
begin
@api_version = get_ws_api_version
rescue HPE3PARException => ex
ex_message = ex.message
if ex_message && ex_message.include?('SSL Certificate Verification Failed')
raise Hpe3parSdk::SSLCertFailed
else
msg = "Error: #{ex_message} - Error communicating with 3PAR WSAPI. '
'Check proxy settings. If error persists, either the '
'3PAR WSAPI is not running OR the version of the WSAPI is '
'not supported."
raise Hpe3parSdk::HPE3PARException(message: msg)
end
end
compare_version(@api_version)
end
private def set_ssh_options(username, password, port=22, conn_timeout=nil)
@ssh=Hpe3parSdk::SSH.new(@api_url.split("//")[1].split(":")[0], username, password)
end
private def compare_version(api_version)
@min_version = WSAPIVersion
.parse(WSAPIVersionSupport::WSAPI_MIN_SUPPORTED_VERSION)
@min_version_with_compression = WSAPIVersion
.parse(WSAPIVersionSupport::WSAPI_MIN_VERSION_COMPRESSION_SUPPORT)
@current_version = WSAPIVersion.new(api_version['major'], api_version['minor'],
api_version['revision'])
if @current_version < @min_version
err_msg = "Unsupported 3PAR WS API version #{@current_version}, min supported version is, #{WSAPIVersionSupport::WSAPI_MIN_SUPPORTED_VERSION}"
raise Hpe3parSdk::UnsupportedVersion.new(nil, err_msg)
end
# Check for VLUN query support.
min_vlun_query_support_version = WSAPIVersion
.parse(WSAPIVersionSupport::WSAPI_MIN_VERSION_VLUN_QUERY_SUPPORT)
if @current_version >= min_vlun_query_support_version
@vlun_query_supported = true
end
# Check for Host and VV Set query support
if @current_version >= @min_version_with_compression
@host_and_vv_set_filter_supported = true
end
end
# Get the 3PAR WS API version.
#
# ==== Returns
#
# WSAPI version hash
def get_ws_api_version
# remove everything down to host:port
host_url = @api_url.split('/api')
@http.set_url(host_url[0])
begin
# get the api version
response = @http.get('/api')
response[1]
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
ensure
# reset the url
@http.set_url(@api_url)
end
# Gets the WSAPI Configuration.
#
# ==== Returns
#
# WSAPI configuration hash
def get_ws_api_configuration_info
begin
response = @http.get('/wsapiconfiguration')
response[1]
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a new FlashCache
#
# ==== Attributes
#
# * size_in_gib - Specifies the node pair size of the Flash Cache on the system
# type size_in_gib: Integer
# * mode - Values supported Simulator: 1, Real: 2 (default)
# type mode: Integer
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - NO_SPACE - Not enough space is available for the operation.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EXCEEDS_RANGE - A JSON input object contains a name-value pair with a numeric value that exceeds the expected range. Flash Cache exceeds the expected range. The HTTP ref member contains the name.
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_FLASH_CACHE - The Flash Cache already exists.
# * Hpe3parSdk::HTTPForbidden
# - FLASH_CACHE_NOT_SUPPORTED - Flash Cache is not supported.
# * Hpe3parSdk::HTTPBadRequest
# - INV_FLASH_CACHE_SIZE - Invalid Flash Cache size. The size must be a multiple of 16 G.
def create_flash_cache(size_in_gib, mode = nil)
begin
@flash_cache.create_flash_cache(size_in_gib, mode)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Get Flash Cache information
#
# ==== Returns
#
# FlashCache - Details of the specified flash cache
def get_flash_cache
begin
@flash_cache.get_flash_cache
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes an existing Flash Cache
#
# ==== Raises
#
# * Hpe3parSdk::HTTPForbidden
# - FLASH_CACHE_IS_BEING_REMOVED - Unable to delete the Flash Cache, the Flash Cache is being removed.
# * Hpe3parSdk::HTTPForbidden
# - FLASH_CACHE_NOT_SUPPORTED - Flash Cache is not supported on this system.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_FLASH_CACHE - The Flash Cache does not exist.
def delete_flash_cache
begin
@flash_cache.delete_flash_cache
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets the Storage System Information
#
# ==== Returns
#
# Hash of Storage System Info
def get_storage_system_info
begin
response = @http.get('/system')
response[1]
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets the overall system capacity for the 3PAR server.
#
# ==== Returns
#
# Hash of system capacity information
#
#
# capacity = {
# "allCapacity"=> { # Overall system capacity
# # includes FC, NL, SSD
# # device types
# "totalMiB"=>20054016, # Total system capacity
# # in MiB
# "allocated"=>{ # Allocated space info
# "totalAllocatedMiB"=>12535808, # Total allocated
# # capacity
# "volumes"=> { # Volume capacity info
# "totalVolumesMiB"=>10919936, # Total capacity
# # allocated to volumes
# "nonCPGsMiB"=> 0, # Total non-CPG capacity
# "nonCPGUserMiB"=> 0, # The capacity allocated
# # to non-CPG user space
# "nonCPGSnapshotMiB"=>0, # The capacity allocated
# # to non-CPG snapshot
# # volumes
# "nonCPGAdminMiB"=> 0, # The capacity allocated
# # to non-CPG
# # administrative volumes
# "CPGsMiB"=>10919936, # Total capacity
# # allocated to CPGs
# "CPGUserMiB"=>7205538, # User CPG space
# "CPGUserUsedMiB"=>7092550, # The CPG allocated to
# # user space that is
# # in use
# "CPGUserUnusedMiB"=>112988, # The CPG allocated to
# # user space that is not
# # in use
# "CPGSnapshotMiB"=>2411870, # Snapshot CPG space
# "CPGSnapshotUsedMiB"=>210256, # CPG allocated to
# # snapshot that is in use
# "CPGSnapshotUnusedMiB"=>2201614, # CPG allocated to
# # snapshot space that is
# # not in use
# "CPGAdminMiB"=>1302528, # Administrative volume
# # CPG space
# "CPGAdminUsedMiB"=> 115200, # The CPG allocated to
# # administrative space
# # that is in use
# "CPGAdminUnusedMiB"=>1187328, # The CPG allocated to
# # administrative space
# # that is not in use
# "unmappedMiB"=>0 # Allocated volume space
# # that is unmapped
# },
# "system"=> { # System capacity info
# "totalSystemMiB"=> 1615872, # System space capacity
# "internalMiB"=>780288, # The system capacity
# # allocated to internal
# # resources
# "spareMiB"=> 835584, # Total spare capacity
# "spareUsedMiB"=> 0, # The system capacity
# # allocated to spare resources
# # in use
# "spareUnusedMiB"=> 835584 # The system capacity
# # allocated to spare resources
# # that are unused
# }
# },
# "freeMiB"=> 7518208, # Free capacity
# "freeInitializedMiB"=> 7518208, # Free initialized capacity
# "freeUninitializedMiB"=> 0, # Free uninitialized capacity
# "unavailableCapacityMiB"=> 0, # Unavailable capacity in MiB
# "failedCapacityMiB"=> 0 # Failed capacity in MiB
# },
# "FCCapacity"=> { # System capacity from FC devices only
# ... # Same structure as above
# },
# "NLCapacity"=> { # System capacity from NL devices only
# ... # Same structure as above
# },
# "SSDCapacity"=> { # System capacity from SSD devices only
# ... # Same structure as above
# }
# }
def get_overall_system_capacity
begin
response = @http.get('/capacity')
response[1]
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# This authenticates against the 3PAR WSAPI server and creates a session.
# ==== Attributes
#
# * username - The username
# type username: String
# * password - The Password
# type password: String
def login(username, password, optional = nil)
set_ssh_options(username, password, port=22, conn_timeout=nil)
@volume = VolumeManager.new(@http, @ssh, @app_type)
@http.authenticate(username, password, optional)
end
# Get the list of all 3PAR Tasks
#
# ==== Returns
#
# Array of Task
def get_all_tasks
begin
@task.get_all_tasks
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Get the status of a 3PAR Task
#
# ==== Attributes
#
# * task_id - the task id
# type task_id: Integer
#
# ==== Returns
#
# Task
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_BELOW_RANGE - Bad Request Task ID must be a positive value.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EXCEEDS_RANGE - Bad Request Task ID is too large.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_TASK - Task with the specified Task ID does not exist.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_WRONG_TYPE - Task ID is not an integer.
def get_task(task_id)
begin
@task.get_task(task_id)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def vlun_exists?(volname,lunid,host=nil,port=nil)
begin
@vlun.vlun_exists?(volname,lunid,host,port)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a new VLUN.
#
# When creating a VLUN, the volumeName is required. The lun member is
# not required if auto is set to True.
# Either hostname or portPos (or both in the case of matched sets) is
# also required. The noVcn and overrideLowerPriority members are
# optional.
# * volume_name: Name of the volume to be exported
# type volume_name: String
# * lun: LUN id
# type lun: Integer
# * host_name: Name of the host which the volume is to be exported.
# type host_name: String
# * port_pos: System port of VLUN exported to. It includes node number, slot number, and card port number
# type port_pos: Hash
# port_pos = {'node'=> 1, # System node (0-7)
# 'slot'=> 2, # PCI bus slot in the node (0-5)
# 'port'=> 1} # Port number on the FC card (0-4)
# * no_vcn: A VLUN change notification (VCN) not be issued after export (-novcn).
# type no_vcn: Boolean
# * override_lower_priority: Existing lower priority VLUNs will be overridden (-ovrd). Use only if hostname member exists.
# type override_lower_priority: Boolean
#
# ==== Returns
#
# VLUN id
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ MISSING_REQUIRED - Missing volume or hostname or lunid.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOL MISSING_REQUIRED - Specified volume does not exist.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - Specified hostname not found.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_PORT - Specified port does not exist.
def create_vlun(volume_name, lun = nil, host_name = nil, port_pos = nil, no_vcn = false, override_lower_priority = false, auto = false)
begin
@vlun.create_vlun(volume_name, host_name, lun, port_pos, no_vcn, override_lower_priority, auto)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets VLUNs.
#
# ==== Returns
#
# Array of VLUN objects
def get_vluns
begin
@vlun.get_vluns
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets information about a VLUN.
#
# ==== Attributes
#
# * volume_name: The volume name of the VLUN to find
# type volume_name: String
#
# ==== Returns
#
# VLUN object
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VLUN - VLUN doesn't exist
def get_vlun(volume_name)
begin
@vlun.get_vlun(volume_name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes a VLUN.
#
# ==== Attributes
#
# * volume_name: Volume name of the VLUN
# type volume_name: String
# * lun_id: LUN ID
# type lun_id: Integer
# * host_name: Name of the host which the volume is exported. For VLUN of port type,the value is empty
# type host_name: String
# * port: Specifies the system port of the VLUN export. It includes the system node number, PCI bus slot number, and card port number on the FC card in the format<node>:<slot>:<cardPort>
# type port: Hash
#
# port = {'node'=> 1, # System node (0-7)
# 'slot'=> 2, # PCI bus slot in the node (0-5)
# 'port'=>1} # Port number on the FC card (0-4)
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_MISSING_REQUIRED - Incomplete VLUN info. Missing
# volumeName or lun, or both hostname and port.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_PORT_SELECTION - Specified port is invalid.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EXCEEDS_RANGE - The LUN specified exceeds expected
# range.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - The host does not exist
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VLUN - The VLUN does not exist
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_PORT - The port does not exist
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
def delete_vlun(volume_name, lun_id, host_name = nil, port = nil)
begin
@vlun.delete_vlun(volume_name, lun_id, host_name, port)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets QoS Rules.
#
# ==== Returns
#
# Array of QoSRule objects
#
def query_qos_rules
begin
@qos.query_qos_rules
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Queries a QoS rule
#
# ==== Attributes
#
# * target_name : Name of the target. When targetType is sys, target name must be sys:all_others.
# type target_name: String
# * target_type : Target type is vvset or sys
# type target_type: String
# ==== Returns
#
# QoSRule object
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_QOS_RULE - QoS rule does not exist.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - Illegal character in the input.
def query_qos_rule(target_name, target_type = 'vvset')
begin
@qos.query_qos_rule(target_name, target_type)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def qos_rule_exists?(target_name, target_type = 'vvset')
begin
@qos.qos_rule_exists?(target_name, target_type)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates QOS rules
# The QoS rule can be applied to VV sets. By using sys:all_others,
# you can apply the rule to all volumes in the system for which no
# QoS rule has been defined.
# ioMinGoal and ioMaxLimit must be used together to set I/O limits.
# Similarly, bwMinGoalKB and bwMaxLimitKB must be used together.
# If ioMaxLimitOP is set to 2 (no limit), ioMinGoalOP must also be
# to set to 2 (zero), and vice versa. They cannot be set to
# 'none' individually. Similarly, if bwMaxLimitOP is set to 2 (no
# limit), then bwMinGoalOP must also be set to 2.
# If ioMaxLimitOP is set to 1 (no limit), ioMinGoalOP must also be
# to set to 1 (zero) and vice versa. Similarly, if bwMaxLimitOP is
# set to 1 (zero), then bwMinGoalOP must also be set to 1.
# The ioMinGoalOP and ioMaxLimitOP fields take precedence over
# the ioMinGoal and ioMaxLimit fields.
# The bwMinGoalOP and bwMaxLimitOP fields take precedence over
# the bwMinGoalKB and bwMaxLimitKB fields
#
# ==== Attributes
#
# * target_type: Type of QoS target, either enum TARGET_TYPE_VVS or TARGET_TYPE_SYS.
# type target_type: VVSET or SYS. Refer QoStargetType::VVSET for complete enumeration
# * target_name: Name of the target object on which the QoS rule will be created.
# type target_name: String
# * qos_rules: QoS options
# type qos_rules: Hash
# qos_rules = {
# 'priority'=> 2, # Refer Hpe3parSdk::QoSpriorityEnumeration for complete enumeration
# 'bwMinGoalKB'=> 1024, # bandwidth rate minimum goal in
# # kilobytes per second
# 'bwMaxLimitKB'=> 1024, # bandwidth rate maximum limit in
# # kilobytes per second
# 'ioMinGoal'=> 10000, # I/O-per-second minimum goal
# 'ioMaxLimit'=> 2000000, # I/0-per-second maximum limit
# 'enable'=> false, # QoS rule for target enabled?
# 'bwMinGoalOP'=> 1, # zero none operation enum, when set to
# # 1, bandwidth minimum goal is 0
# # when set to 2, the bandwidth mimumum
# # goal is none (NoLimit)
# 'bwMaxLimitOP'=> 1, # zero none operation enum, when set to
# # 1, bandwidth maximum limit is 0
# # when set to 2, the bandwidth maximum
# # limit is none (NoLimit)
# 'ioMinGoalOP'=>1, # zero none operation enum, when set to
# # 1, I/O minimum goal is 0
# # when set to 2, the I/O minimum goal is
# # none (NoLimit)
# 'ioMaxLimitOP'=> 1, # zero none operation enum, when set to
# # 1, I/O maximum limit is 0
# # when set to 2, the I/O maximum limit
# # is none (NoLimit)
# 'latencyGoal'=>5000, # Latency goal in milliseconds
# 'defaultLatency'=> false# Use latencyGoal or defaultLatency?
# }
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EXCEEDS_RANGE - Invalid input: number exceeds expected range.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_QOS_RULE - QoS rule does not exists.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - Illegal character in the input.
# * Hpe3parSdk::HTTPBadRequest
# - EXISTENT_QOS_RULE - QoS rule already exists.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_MIN_GOAL_GRT_MAX_LIMIT - I/O-per-second maximum limit should be greater than the minimum goal.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_BW_MIN_GOAL_GRT_MAX_LIMIT - Bandwidth maximum limit should be greater than the mimimum goal.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_BELOW_RANGE - I/O-per-second limit is below range.Bandwidth limit is below range.
# * Hpe3parSdk::HTTPBadRequest
# - UNLICENSED_FEATURE - The system is not licensed for QoS.
def create_qos_rules(target_name, qos_rules, target_type = QoStargetType::VVSET)
if @current_version < @min_version && !qos_rules.nil?
qos_rules.delete_if { |key, _value| key == :latencyGoaluSecs }
end
begin
@qos.create_qos_rules(target_name, qos_rules, target_type)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Modifies an existing QOS rules
#
# The QoS rule can be applied to VV sets. By using sys:all_others,
# you can apply the rule to all volumes in the system for which no
# QoS rule has been defined.
# ioMinGoal and ioMaxLimit must be used together to set I/O limits.
# Similarly, bwMinGoalKB and bwMaxLimitKB must be used together.
# If ioMaxLimitOP is set to 2 (no limit), ioMinGoalOP must also be
# to set to 2 (zero), and vice versa. They cannot be set to
# 'none' individually. Similarly, if bwMaxLimitOP is set to 2 (no
# limit), then bwMinGoalOP must also be set to 2.
# If ioMaxLimitOP is set to 1 (no limit), ioMinGoalOP must also be
# to set to 1 (zero) and vice versa. Similarly, if bwMaxLimitOP is
# set to 1 (zero), then bwMinGoalOP must also be set to 1.
# The ioMinGoalOP and ioMaxLimitOP fields take precedence over
# the ioMinGoal and ioMaxLimit fields.
# The bwMinGoalOP and bwMaxLimitOP fields take precedence over
# the bwMinGoalKB and bwMaxLimitKB fields
#
# ==== Attributes
#
# * target_name: Name of the target object on which the QoS rule will be created.
# type target_name: String
# * target_type: Type of QoS target, either vvset or sys.Refer Hpe3parSdk::QoStargetTypeConstants for complete enumeration
# type target_type: String
# * qos_rules: QoS options
# type qos_rules: Hash
# qos_rules = {
# 'priority'=> 2, # Refer Hpe3parSdk::QoSpriorityEnumeration for complete enumeration
# 'bwMinGoalKB'=> 1024, # bandwidth rate minimum goal in
# # kilobytes per second
# 'bwMaxLimitKB'=> 1024, # bandwidth rate maximum limit in
# # kilobytes per second
# 'ioMinGoal'=> 10000, # I/O-per-second minimum goal.
# 'ioMaxLimit'=> 2000000, # I/0-per-second maximum limit
# 'enable'=> True, # QoS rule for target enabled?
# 'bwMinGoalOP'=> 1, # zero none operation enum, when set to
# # 1, bandwidth minimum goal is 0
# # when set to 2, the bandwidth minimum
# # goal is none (NoLimit)
# 'bwMaxLimitOP'=> 1, # zero none operation enum, when set to
# # 1, bandwidth maximum limit is 0
# # when set to 2, the bandwidth maximum
# # limit is none (NoLimit)
# 'ioMinGoalOP'=> 1, # zero none operation enum, when set to
# # 1, I/O minimum goal minimum goal is 0
# # when set to 2, the I/O minimum goal is
# # none (NoLimit)
# 'ioMaxLimitOP'=> 1, # zero none operation enum, when set to
# # 1, I/O maximum limit is 0
# # when set to 2, the I/O maximum limit
# # is none (NoLimit)
# 'latencyGoal'=> 5000, # Latency goal in milliseconds
# 'defaultLatency'=> false# Use latencyGoal or defaultLatency?
# }
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# INV_INPUT_EXCEEDS_RANGE - Invalid input: number exceeds expected
# range.
# * Hpe3parSdk::HTTPNotFound
# NON_EXISTENT_QOS_RULE - QoS rule does not exists.
# * Hpe3parSdk::HTTPBadRequest
# INV_INPUT_ILLEGAL_CHAR - Illegal character in the input.
# * Hpe3parSdk::HTTPBadRequest
# EXISTENT_QOS_RULE - QoS rule already exists.
# * Hpe3parSdk::HTTPBadRequest
# INV_INPUT_IO_MIN_GOAL_GRT_MAX_LIMIT - I/O-per-second maximum limit
# should be greater than the minimum goal.
# * Hpe3parSdk::HTTPBadRequest
# INV_INPUT_BW_MIN_GOAL_GRT_MAX_LIMIT - Bandwidth maximum limit
# should be greater than the minimum goal.
# * Hpe3parSdk::HTTPBadRequest
# INV_INPUT_BELOW_RANGE - I/O-per-second limit is below
# range. Bandwidth limit is below range.
# * Hpe3parSdk::HTTPBadRequest
# UNLICENSED_FEATURE - The system is not licensed for QoS.
def modify_qos_rules(target_name, qos_rules, target_type = QoStargetTypeConstants::VVSET)
if @current_version < @min_version && !qos_rules.nil?
qos_rules.delete_if { |key, _value| key == :latencyGoaluSecs }
end
begin
@qos.modify_qos_rules(target_name, qos_rules, target_type)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes QoS rules.
#
# ==== Attributes
#
# * target_name: Name of the target. When target_type is sys, target_name must be sys:all_others.
# type target_name: String
# * target_type: target type is vvset or sys
# type target_type: String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# NON_EXISTENT_QOS_RULE - QoS rule does not exist.
# * Hpe3parSdk::HTTPBadRequest
# INV_INPUT_ILLEGAL_CHAR - Illegal character in the input
def delete_qos_rules(target_name, target_type = QoStargetTypeConstants::VVSET)
begin
@qos.delete_qos_rules(target_name, target_type)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets all hosts.
#
# ==== Returns
#
# Array of Host.
def get_hosts
begin
@host.get_hosts
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets host information by name.
#
# ==== Attributes
#
# * name - The name of the host to find.
# type name: String
#
# ==== Returns
#
# Host.
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT - Invalid URI syntax.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - Host not found.
# * Hpe3parSdk::HTTPInternalServerError
# - INT_SERV_ERR - Internal server error.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - Host name contains invalid character.
def get_host(name)
begin
@host.get_host(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a new Host.
#
# ==== Attributes
#
# * name - The name of the host.
# type name: String
# * iscsi_names - Array of iSCSI iqns.
# type iscsi_names: Array
# * fcwwns - Array of Fibre Channel World Wide Names.
# type fcwwns: Array
# * optional - The optional stuff.
# type optional: Hash
# optional = {
# 'persona'=> 1, # Refer Hpe3parSdk::HostPersona for complete enumeration.
# # 3.1.3 default: Generic-ALUA
# # 3.1.2 default: General
# 'domain'=> 'myDomain', # Create the host in the
# # specified domain, or default
# # domain if unspecified.
# 'forceTearDown'=> false, # If True, force to tear down
# # low-priority VLUN exports.
# 'descriptors'=>
# {'location'=> 'earth', # The host's location
# 'IPAddr'=> '10.10.10.10', # The host's IP address
# 'os'=> 'linux', # The operating system running on the host.
# 'model'=> 'ex', # The host's model
# 'contact'=> 'Smith', # The host's owner and contact
# 'comment'=> "Joe's box"} # Additional host information
# }
#
# ==== Raises
#
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_MISSING_REQUIRED - Name not specified.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_PARAM_CONFLICT - FCWWNs and iSCSINames are both specified.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EXCEEDS_LENGTH - Host name, domain name, or iSCSI name is too long.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EMPTY_STR - Input string (for domain name, iSCSI name, etc.) is empty.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - Any error from host-name or domain-name parsing.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_TOO_MANY_WWN_OR_iSCSI - More than 1024 WWNs or iSCSI names are specified.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_WRONG_TYPE - The length of WWN is not 16. WWN specification contains non-hexadecimal digit.
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_PATH - host WWN/iSCSI name already used by another host.
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_HOST - host name is already used.
# * Hpe3parSdk::HTTPBadRequest
# - NO_SPACE - No space to create host.
def create_host(name, iscsi_names = nil, fcwwns = nil, optional = nil)
begin
@host.create_host(name, iscsi_names, fcwwns, optional)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Modifies an existing Host.
#
# ==== Attributes
#
# * name - Name of the host.
# type name: String
# * mod_request - Objects for host modification request.
# type mod_request: Hash
# mod_request = {
# 'newName'=> 'myNewName', # New name of the host
# 'pathOperation'=> 1, # Refer Hpe3parSdk::HostEditOperation for complete enumeration
# 'FCWWNs'=> [], # One or more WWN to set for the host.
# 'iSCSINames'=> [], # One or more iSCSI names to set for the host.
# 'forcePathRemoval'=> false, # If True, remove SSN(s) or
# # iSCSI(s) even if there are
# # VLUNs exported to host
# 'persona'=> 1, # Refer Hpe3parSdk::HostPersona for complete enumeration.
# 'descriptors'=>
# {'location'=> 'earth', # The host's location
# 'IPAddr'=> '10.10.10.10', # The host's IP address
# 'os'=> 'linux', # The operating system running on the host.
# 'model'=> 'ex', # The host's model
# 'contact'=> 'Smith', # The host's owner and contact
# 'comment'=> 'Joes box'} # Additional host information
# 'chapOperation'=> 1, # Refer Hpe3parSdk::HostEditOperation for complete enumeration
# 'chapOperationMode'=> TARGET, # Refer Hpe3parSdk::ChapOperationMode for complete enumeration
# 'chapName'=> 'MyChapName', # The chap name
# 'chapSecret'=> 'xyz', # The chap secret for the host or the target
# 'chapSecretHex'=> false, # If True, the chapSecret is treated as Hex.
# 'chapRemoveTargetOnly'=> true # If True, then remove target chap only
# }
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT - Missing host name.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_PARAM_CONFLICT - Both iSCSINames & FCWWNs are specified. (lot of other possibilities).
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ONE_REQUIRED - iSCSINames or FCWwns missing.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ONE_REQUIRED - No path operation specified.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_BAD_ENUM_VALUE - Invalid enum value.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_MISSING_REQUIRED - Required fields missing.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EXCEEDS_LENGTH - Host descriptor argument length, new host name, or iSCSI name is too long.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - Error parsing host or iSCSI name.
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_HOST - New host name is already used.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - Host to be modified does not exist.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_TOO_MANY_WWN_OR_iSCSI - More than 1024 WWNs or iSCSI names are specified.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_WRONG_TYPE - Input value is of the wrong type.
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_PATH - WWN or iSCSI name is already claimed by other host.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_BAD_LENGTH - CHAP hex secret length is not 16 bytes, or chap ASCII secret length is not 12 to 16 characters.
# * Hpe3parSdk::HTTPNotFound
# - NO_INITIATOR_CHAP - Setting target CHAP without initiator CHAP.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_CHAP - Remove non-existing CHAP.
# * Hpe3parSdk::HTTPConflict
# - NON_UNIQUE_CHAP_SECRET - CHAP secret is not unique.
# * Hpe3parSdk::HTTPConflict
# - EXPORTED_VLUN - Setting persona with active export; remove a host path on an active export.
# * Hpe3parSdk::HTTPBadRequest
# - NON_EXISTENT_PATH - Remove a non-existing path.
# * Hpe3parSdk::HTTPConflict
# - LUN_HOSTPERSONA_CONFLICT - LUN number and persona capability conflict.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_DUP_PATH - Duplicate path specified.
def modify_host(name, mod_request)
begin
@host.modify_host(name, mod_request)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes a host.
#
# ==== Attributes
#
# * name - The name of host to be deleted.
# type name: String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - Host not found
# * Hpe3parSdk::HTTPConflict
# - HOST_IN_SET - Host is a member of a set
def delete_host(name)
begin
@host.delete_host(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Finds the host with the specified FC WWN path.
#
# ==== Attributes
#
# * wwn - Lookup based on WWN.
# type wwn: String
#
# ==== Returns
#
# Host with specified FC WWN.
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT - Invalid URI syntax.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - HOST Not Found
# * Hpe3parSdk::HTTPInternalServerError
# - INTERNAL_SERVER_ERR - Internal server error.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - Host name contains invalid character.
def query_host_by_fc_path(wwn = nil)
begin
@host.query_host_by_fc_path(wwn)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Finds the host with the specified iSCSI initiator.
#
# ==== Attributes
#
# * iqn - Lookup based on iSCSI initiator.
# type iqn: String
#
# ==== Returns
#
# Host with specified IQN.
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT - Invalid URI syntax.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - The specified host not found.
# * Hpe3parSdk::HTTPInternalServerError
# - INTERNAL_SERVER_ERR - Internal server error.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - The host name contains invalid character.
def query_host_by_iscsi_path(iqn = nil)
begin
@host.query_host_by_iscsi_path(iqn)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets all host sets.
#
# ==== Returns
#
# Array of HostSet.
def get_host_sets
begin
@host_set.get_host_sets
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a new HostSet.
#
# ==== Attributes
#
# * name - Name of the host set to be created.
# type name: String
# * domain - The domain in which the host set will be created.
# type domain: String
# * comment - Comment for the host set.
# type comment: String
# * setmembers - The hosts to be added to the set. The existence of the host will not be checked.
# type setmembers: Array of String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - EXISTENT_SET - The set already exits.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_DOMAIN - The domain does not exist.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_IN_DOMAINSET - The host is in a domain set.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_IN_SET - The object is already part of the set.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_NOT_IN_SAME_DOMAIN - Objects must be in the same domain to perform this operation.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - The host does not exists.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_DUP_NAME - Invalid input (duplicate name).
def create_host_set(name, domain = nil, comment = nil, setmembers = nil)
begin
@host_set.create_host_set(name, domain, comment, setmembers)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes a HostSet.
#
# ==== Attributes
#
# * name - The hostset to delete.
# type name: String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_SET - The set does not exists.
# * Hpe3parSdk::HTTPConflict
# - EXPORTED_VLUN - The host set has exported VLUNs.
def delete_host_set(name)
begin
@host_set.delete_host_set(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Modifies a HostSet.
#
# ==== Attributes
#
# * name - Hostset name
# type name: String
# * action - Add or Remove host(s) from the set
# type action: Refer values of Hpe3parSdk::SetCustomAction::MEM_ADD and Hpe3parSdk::SetCustomAction::MEM_REMOVE
# * setmembers - Host(s) to add to the set, the existence of the host(s) will not be checked
# type setmembers: Array of String
# * new_name - New name of set
# type new_name: String
# * comment - New comment for the set
# type comment: String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - EXISTENT_SET - The set already exits.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_SET - The set does not exists.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_IN_DOMAINSET - The host is in a domain set.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_IN_SET - The object is already part of the set.
# * Hpe3parSdk::HTTPNotFound
# - MEMBER_NOT_IN_SET - The object is not part of the set.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_NOT_IN_SAME_DOMAIN - Objects must be in the same domain to perform this operation.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_DUP_NAME - Invalid input (duplicate name).
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_PARAM_CONFLICT - Invalid input (parameters cannot be present at the same time).
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - Invalid contains one or more illegal characters.
def modify_host_set(name, action = nil, setmembers = nil, new_name = nil, comment = nil)
begin
@host_set.modify_host_set(name, action, setmembers, new_name, comment)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Adds host(s) to a host set.
#
# ==== Attributes
#
# * set_name - Hostset name.
# type set_name: String
# * setmembers - Array of host names to add to the set.
# type setmembers: Array of String
def add_hosts_to_host_set(set_name, setmembers)
begin
@host_set.add_hosts_to_host_set(set_name, setmembers)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Removes host(s) from a host set.
#
# ==== Attributes
#
# * set_name - The host set name.
# type set_name: String
# * setmembers - Array of host names to remove from the set.
# type setmembers: Array of String
def remove_hosts_from_host_set(set_name, setmembers)
begin
@host_set.remove_hosts_from_host_set(set_name, setmembers)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Returns an array of every Hostset the given host is a part of. The array can contain zero, one, or multiple items.
#
# ==== Attributes
#
# * host_name - The host name of whose hostset is to be found.
# type host_name: String
#
# ==== Returns
#
# Array of HostSet.
def find_host_sets(host_name)
begin
@host_set.find_host_sets(host_name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets hostset information by name.
#
# ==== Attributes
#
# * name - The name of the hostset to find.
# type name: String
#
# ==== Returns
#
# HostSet.
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_SET - The set does not exist.
def get_host_set(name)
begin
@host_set.get_host_set(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets all of the VLUNs on a specific host.
#
# ==== Attributes
#
# * host_name - Name of the host.
# type host_name: String
#
# ==== Returns
#
# Array of VLUN.
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - The specified host not found.
def get_host_vluns(host_name)
begin
@host.get_host_vluns(host_name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets all Volumes in the array
#
# ==== Returns
#
# Array of VirtualVolume
def get_volumes
begin
@volume.get_volumes(VolumeCopyType::BASE_VOLUME)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets the list of snapshots in the array
#
# ==== Returns
#
# Array of VirtualVolume
def get_snapshots
begin
@volume.get_volumes(VolumeCopyType::VIRTUAL_COPY)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets information about a volume by name
#
# ==== Attributes
#
# * name - The name of the volume to find
# type name: String
#
# ==== Returns
#
# VirtualVolume
#
# ==== Raises
#
# * Hpe3parSdk::HPE3PARException
# Error with code: 23 message: volume does not exist
def get_volume(name)
begin
@volume.get_volume(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets information about a volume by wwn
#
# ==== Attributes
#
# * wwn - The wwn of the volume to find
# type wwn: String
#
# ==== Returns
#
# * VirtualVolume
#
# ==== Raises
#
# * Hpe3parSdk::HPE3PARException
# Error with code: 23 message: volume does not exist
def get_volume_by_wwn(wwn)
begin
@volume.get_volume_by_wwn(wwn)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a new volume.
#
# ==== Attributes
#
# * name - the name of the volume
# type name: String
# * cpg_name - the name of the destination CPG
# type cpg_name: String
# * size_MiB - size in MiB for the volume
# type size_MiB: Integer
# * optional - hash of other optional items
# type optional: hash
#
# optional = {
# 'id' => 12, # Volume ID. If not specified, next
# # available is chosen
# 'comment' => 'some comment', # Additional information up to 511
# # characters
# 'policies: { # Specifies VV policies
# 'staleSS' => false, # True allows stale snapshots.
# 'oneHost' => true, # True constrains volume export to
# # single host or host cluster
# 'zeroDetect' => true, # True requests Storage System to
# # scan for zeros in incoming write
# # data
# 'system' => false, # True special volume used by system
# # False is normal user volume
# 'caching' => true}, # Read-only. True indicates write &
# # read caching & read ahead enabled
# 'snapCPG' => 'CPG name', # CPG Used for snapshots
# 'ssSpcAllocWarningPct' => 12, # Snapshot space allocation warning
# 'ssSpcAllocLimitPct' => 22, # Snapshot space allocation limit
# 'tpvv' => true, # True: Create TPVV
# # False (default) Create FPVV
# 'usrSpcAllocWarningPct' => 22, # Enable user space allocation
# # warning
# 'usrSpcAllocLimitPct' => 22, # User space allocation limit
# 'expirationHours' => 256, # Relative time from now to expire
# # volume (max 43,800 hours)
# 'retentionHours' => 256 # Relative time from now to retain
# }
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT - Invalid Parameter
# * Hpe3parSdk::HTTPBadRequest
# - TOO_LARGE - Volume size above limit
# * Hpe3parSdk::HTTPBadRequest
# - NO_SPACE - Not Enough space is available
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_SV - Volume Exists already
def create_volume(name, cpg_name, size_MiB, optional = nil)
if @current_version < @min_version_with_compression && !optional.nil?
optional.delete_if { |key, _value| key == :compression }
end
begin
@volume.create_volume(name, cpg_name, size_MiB, optional)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes a volume
#
# ==== Attributes
#
# * name - the name of the volume
# type name: String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOL - The volume does not exist
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
# * Hpe3parSdk::HTTPForbidden
# - RETAINED - Volume retention time has not expired
# * Hpe3parSdk::HTTPForbidden
# - HAS_RO_CHILD - Volume has read-only child
# * Hpe3parSdk::HTTPConflict
# - HAS_CHILD - The volume has a child volume
# * Hpe3parSdk::HTTPConflict
# - IN_USE - The volume is in use by VV set, VLUN, etc
def delete_volume(name)
begin
@volume.delete_volume(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Modifies a volume
#
# ==== Attributes
#
# * name - the name of the volume
# type name: String
# * volumeMods - Hash of volume attributes to change
# type volumeMods: Hash
# volumeMods = {
# 'newName' => 'newName', # New volume name
# 'comment' => 'some comment', # New volume comment
# 'snapCPG' => 'CPG name', # Snapshot CPG name
# 'policies: { # Specifies VV policies
# 'staleSS' => false, # True allows stale snapshots.
# 'oneHost' => true, # True constrains volume export to
# # single host or host cluster
# 'zeroDetect' => true, # True requests Storage System to
# # scan for zeros in incoming write
# # data
# 'system' => false, # True special volume used by system
# # False is normal user volume
# 'caching' => true}, # Read-only. True indicates write &
# # read caching & read ahead enabled
# 'ssSpcAllocWarningPct' => 12, # Snapshot space allocation warning
# 'ssSpcAllocLimitPct' => 22, # Snapshot space allocation limit
# 'tpvv' => true, # True: Create TPVV
# # False: (default) Create FPVV
# 'usrSpcAllocWarningPct' => 22, # Enable user space allocation
# # warning
# 'usrSpcAllocLimitPct' => 22, # User space allocation limit
# 'userCPG' => 'User CPG name', # User CPG name
# 'expirationHours' => 256, # Relative time from now to expire
# # volume (max 43,800 hours)
# 'retentionHours' => 256, # Relative time from now to retain
# # volume (max 43,800 hours)
# 'rmSsSpcAllocWarning' => false, # True removes snapshot space
# # allocation warning.
# # False sets it when value > 0
# 'rmUsrSpcAllocWarwaning' => false,# True removes user space
# # allocation warning.
# # False sets it when value > 0
# 'rmExpTime' => false, # True resets expiration time to 0.
# # False sets it when value > 0
# 'rmSsSpcAllocLimit' => false, # True removes snapshot space
# # allocation limit.
# # False sets it when value > 0
# 'rmUsrSpcAllocLimit' => false # True removes user space
# # allocation limit.
# # False sets it when value > 0
# }
#
# ==== Raises:
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_WARN_GT_LIMIT - Allocation warning level is higher than
# the limit.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_USR_ALRT_NON_TPVV - User space allocation alerts are
# valid only with a TPVV.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_RETAIN_GT_EXPIRE - Retention time is greater than
# expiration time.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_VV_POLICY - Invalid policy specification (for example,
# caching or system is set to true).
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EXCEEDS_LENGTH - Invalid input: string length exceeds
# limit.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_TIME - Invalid time specified.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_MODIFY_USR_CPG_TPVV - usr_cpg cannot be modified
# on a TPVV.
# * Hpe3parSdk::HTTPBadRequest
# - UNLICENSED_FEATURE - Retention time cannot be modified on a
# system without the Virtual Lock license.
# * Hpe3parSdk::HTTPForbidden
# - CPG_NOT_IN_SAME_DOMAIN - Snap CPG is not in the same domain as
# the user CPG.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_PEER_VOLUME - Cannot modify a peer volume.
# * Hpe3parSdk::HTTPInternalServerError
# - INT_SERV_ERR - Metadata of the VV is corrupted.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_SYS_VOLUME - Cannot modify retention time on a
# system volume.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_INTERNAL_VOLUME - Cannot modify an internal
# volume
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_VOLUME_NOT_DEFINED_ALL_NODES - Cannot modify a
# volume until the volume is defined on all volumes.
# * Hpe3parSdk::HTTPConflict
# - INVALID_OPERATION_VV_ONLINE_COPY_IN_PROGRESS - Cannot modify a
# volume when an online copy for that volume is in progress.
# * Hpe3parSdk::HTTPConflict
# - INVALID_OPERATION_VV_VOLUME_CONV_IN_PROGRESS - Cannot modify a
# volume in the middle of a conversion operation.
# * Hpe3parSdk::HTTPConflict
# - INVALID_OPERATION_VV_SNAPSPACE_NOT_MOVED_TO_CPG - Snapshot space
# of a volume needs to be moved to a CPG before the user space.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_VOLUME_ACCOUNTING_IN_PROGRESS - The volume
# cannot be renamed until snapshot accounting has finished.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_ZERO_DETECT_TPVV - The zero_detect policy can be
# used only on TPVVs.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_CPG_ON_SNAPSHOT - CPG cannot be assigned to a
# snapshot.
def modify_volume(name, volume_mods)
begin
@volume.modify_volume(name, volume_mods)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Grows an existing volume by 'amount' Mebibytes.
#
# ==== Attributes
#
# * name - the name of the volume
# type name: String
# * amount: the additional size in MiB to add, rounded up to the next chunklet size (e.g. 256 or 1000 MiB)
# type amount: Integer
#
# ==== Raises:
#
# * Hpe3parSdk::HTTPForbidden
# - VV_NOT_IN_SAME_DOMAIN - The volume is not in the same domain.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOL - The volume does not exist.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_UNSUPPORTED_VV_TYPE - Invalid operation: Cannot
# grow this type of volume.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_TUNE_IN_PROGRESS - Invalid operation: Volume
# tuning is in progress.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EXCEEDS_LENGTH - Invalid input: String length exceeds
# limit.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_VV_GROW_SIZE - Invalid grow size.
# * Hpe3parSdk::HTTPForbidden
# - VV_NEW_SIZE_EXCEEDS_CPG_LIMIT - New volume size exceeds CPG limit
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_INTERNAL_VOLUME - This operation is not allowed
# on an internal volume.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_VOLUME_CONV_IN_PROGRESS - Invalid operation: VV
# conversion is in progress.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_VOLUME_COPY_IN_PROGRESS - Invalid operation:
# online copy is in progress.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_CLEANUP_IN_PROGRESS - Internal volume cleanup is
# in progress.
# * Hpe3parSdk::HTTPForbidden
# - VV_IS_BEING_REMOVED - The volume is being removed.
# * Hpe3parSdk::HTTPForbidden
# - VV_IN_INCONSISTENT_STATE - The volume has an internal consistency
# error.
# * Hpe3parSdk::HTTPForbidden
# - VV_SIZE_CANNOT_REDUCE - New volume size is smaller than the
# current size.
# * Hpe3parSdk::HTTPForbidden
# - VV_NEW_SIZE_EXCEEDS_LIMITS - New volume size exceeds the limit.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_SA_SD_SPACE_REMOVED - Invalid operation: Volume
# SA/SD space is being removed.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_IS_BUSY - Invalid operation: Volume is currently
# busy.
# * Hpe3parSdk::HTTPForbidden
# - VV_NOT_STARTED - Volume is not started.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_IS_PCOPY - Invalid operation: Volume is a
# physical copy.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_NOT_IN_NORMAL_STATE - Volume state is not normal
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_PROMOTE_IN_PROGRESS - Invalid operation: Volume
# promotion is in progress.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_PARENT_OF_PCOPY - Invalid operation: Volume is
# the parent of physical copy.
# * Hpe3parSdk::HTTPBadRequest
# - NO_SPACE - Insufficent space for requested operation.
def grow_volume(name, amount)
begin
@volume.grow_volume(name, amount)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a physical copy of a VirtualVolume
#
# ==== Attributes
#
# * src_name - the source volume name
# type src_name: String
# * dest_name - the destination volume name
# type dest_name: String
# * dest_cpg - the destination CPG
# type dest_cpg: String
# * optional - Hash of optional parameters
# type optional: Hash
#
# optional = {
# 'online' => false, # should physical copy be
# # performed online?
# 'tpvv' => false, # use thin provisioned space
# # for destination
# # (online copy only)
# 'snapCPG' => 'OpenStack_SnapCPG', # snapshot CPG for the
# # destination
# # (online copy only)
# 'saveSnapshot' => false, # save the snapshot of the
# # source volume
# 'priority' => 1 # taskPriorityEnum (does not
# # apply to online copy - Hpe3parSdk::TaskPriority)
# }
# Deletes a physical copy
#
# ==== Attributes
#
# * name - the name of the clone volume
# type name: String
#
# ==== Raises:
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOL - The volume does not exist
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
# * Hpe3parSdk::HTTPForbidden
# - RETAINED - Volume retention time has not expired
# * Hpe3parSdk::HTTPForbidden
# - HAS_RO_CHILD - Volume has read-only child
# * Hpe3parSdk::HTTPConflict
# - HAS_CHILD - The volume has a child volume
# * Hpe3parSdk::HTTPConflict
# - IN_USE - The volume is in use by VV set, VLUN, etc
def delete_physical_copy(name)
begin
@volume.delete_volume(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Tunes a volume
#
# ==== Attributes
#
# * name - the volume name
# type name: String
# * tune_operation - Enum of tune operation - 1: Change User CPG, 2: Change snap CPG
# type dest_name: Integer
# * optional - hash of optional parameters
# type optional: hash
#
# optional = {
# 'userCPG' => 'user_cpg', # Specifies the new user
# # CPG to which the volume
# # will be tuned.
# 'snapCPG' => 'snap_cpg', # Specifies the snap CPG to
# # which the volume will be
# # tuned.
# 'conversionOperation' => 1, # conversion operation enum. Refer Hpe3parSdk::VolumeConversionOperation
# 'keepVV' => 'new_volume', # Name of the new volume
# # where the original logical disks are saved.
# 'compression' => true # Enables (true) or disables (false) compression.
# # You cannot compress a fully provisioned volume.
# }
def tune_volume(name, tune_operation, optional = nil)
if @current_version < @min_version_with_compression && !optional.nil?
optional.delete_if { |key, _value| key == :compression }
end
begin
object_hash = @volume.tune_volume(name, tune_operation, optional)
get_task(object_hash['taskid'])
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Returns an array of every VolumeSet the given volume is a part of.
# The array can contain zero, one, or multiple items.
#
# ==== Attributes
#
# * name - the volume name
# type name: String
#
# ==== Returns
#
# Array of VolumeSet
#
# ==== Raises
#
# * Hpe3parSdk::HTTPForbidden
# - VV_IN_INCONSISTENT_STATE - Internal inconsistency error in vol
# * Hpe3parSdk::HTTPForbidden
# - VV_IS_BEING_REMOVED - The volume is being removed
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOLUME - The volume does not exists
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_SYS_VOLUME - Illegal op on system vol
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_INTERNAL_VOLUME - Illegal op on internal vol
def find_all_volume_sets(name)
begin
@volume_set.find_all_volume_sets(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets the Volume Sets
#
# ==== Returns
#
# Array of VolumeSet
def get_volume_sets
begin
@volume_set.get_volume_sets
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets the information about a Volume Set.
#
# ==== Attributes
#
# * name - The name of the CPG to find
# type name: String
#
# ==== Returns
#
# VolumeSet
#
# ==== Raises
#
# * Hpe3parSdk::HPE3PARException
# Error with code: 102 message: Set does not exist
def get_volume_set(name)
begin
@volume_set.get_volume_set(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a new volume set
#
# ==== Attributes
#
# * name - the volume set to create
# type name: String
# * domain: the domain where the set lives
# type domain: String
# * comment: the comment for the vv set
# type comment: String
# * setmembers: the vv(s) to add to the set, the existence of the vv(s) will not be checked
# type name: Array of String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT Invalid URI Syntax.
# * Hpe3parSdk::HTTPBadRequest
# - NON_EXISTENT_DOMAIN - Domain doesn't exist.
# * Hpe3parSdk::HTTPBadRequest
# - NO_SPACE - Not Enough space is available.
# * Hpe3parSdk::HTTPBadRequest
# - BAD_CPG_PATTERN A Pattern in a CPG specifies illegal values.
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_CPG - CPG Exists already
def create_volume_set(name, domain = nil, comment = nil, setmembers = nil)
begin
@volume_set.create_volume_set(name, domain, comment, setmembers)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes the volume set. You must clear all QOS rules before a volume set can be deleted.
#
# ==== Attributes
#
# * name - The name of the VolumeSet
# type name: String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_SET - The set does not exists.
# * Hpe3parSdk::HTTPConflict
# - - EXPORTED_VLUN - The host set has exported VLUNs. The VV set was exported.
# * Hpe3parSdk::HTTPConflict
# - VVSET_QOS_TARGET - The object is already part of the set.
def delete_volume_set(name)
begin
@volume_set.delete_volume_set(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Modifies a volume set by adding or removing a volume from the volume
# set. It's actions is based on the enums MEM_ADD or MEM_REMOVE.
#
# ==== Attributes
#
# * action: add or remove volume from the set
# type name: Hpe3parSdk::SetCustomAction
# * name: the volume set name
# type name: String
# * newName: new name of set
# type newName: String
# * comment: the comment for on the vv set
# type comment: String
# * flash_cache_policy: the flash-cache policy for the vv set
# type flash_cache_policy: enum
# * setmembers: the vv to add to the set, the existence of the vv will not be checked
# type name: Array of String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - EXISTENT_SET - The set already exits.
# * Hpe3parSdk::HTTPBadRequest
# - EXISTENT_SET - The set already exits.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_SET - The set does not exists.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_IN_DOMAINSET - The host is in a domain set.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_IN_SET - The object is already part of the set.
# * Hpe3parSdk::HTTPNotFound
# - MEMBER_NOT_IN_SET - The object is not part of the set.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_NOT_IN_SAME_DOMAIN - Objects must be in the same domain to
# perform this operation.
# * Hpe3parSdk::HTTPForbidden
# - VV_IN_INCONSISTENT_STATE - The volume has an internal
# inconsistency error.
# * Hpe3parSdk::HTTPForbidden
# - VV_IS_BEING_REMOVED - The volume is being removed.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOLUME - The volume does not exists.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_SYS_VOLUME - The operation is not allowed on a
# system volume.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_INTERNAL_VOLUME - The operation is not allowed
# on an internal volume.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_DUP_NAME - Invalid input (duplicate name).
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_PARAM_CONFLICT - Invalid input (parameters cannot be
# present at the same time).
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - Invalid contains one or more illegal
# characters.
def modify_volume_set(name, action = nil, newName = nil, comment = nil, flash_cache_policy = nil, setmembers = nil)
begin
@volume_set.modify_volume_set(name, action, newName, comment, flash_cache_policy, setmembers)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Adds volume(s) to a volume set.
#
# ==== Attributes
#
# * set_name - the volume set name
# type set_name: String
# * setmembers - the volume(s) name to add
# type setmembers: Array of String
def add_volumes_to_volume_set(set_name, setmembers)
begin
@volume_set.add_volumes_to_volume_set(set_name, setmembers)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Removes a volume from a volume set
#
# ==== Attributes
#
# * set_name - the volume set name
# type set_name: String
# * name - the volume name to remove
# type name: String
def remove_volumes_from_volume_set(set_name, setmembers)
begin
@volume_set.remove_volumes_from_volume_set(set_name, setmembers)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a snapshot of an existing VolumeSet
#
# ==== Attributes
#
# * name: Name of the Snapshot. The vvname pattern is described in "VV Name Patterns" in the HPE 3PAR Command Line Interface Reference, which is available at the following website: http://www.hp.com/go/storage/docs
# type name: String
# * copy_of_name: the name of the parent volume
# type copy_of_name: String
# * comment: the comment on the vv set
# type comment: String
# * optional: Hash of optional params
# type optional: Hash
# optional = {
# 'id' => 12, # Specifies ID of the volume set
# # set, next by default
# 'comment' => "some comment",
# 'readOnly' => true, # Read Only
# 'expirationHours' => 36, # time from now to expire
# 'retentionHours' => 12 # time from now to expire
# }
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INVALID_INPUT_VV_PATTERN - Invalid volume pattern specified
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_SET - The set does not exists.
# * Hpe3parSdk::HTTPNotFound
# - EMPTY_SET - The set is empty
# * Hpe3parSdk::HTTPServiceUnavailable
# - VV_LIMIT_REACHED - Maximum number of volumes reached
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOL - The storage volume does not exist
# * Hpe3parSdk::HTTPForbidden
# - VV_IS_BEING_REMOVED - The volume is being removed
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_READONLY_TO_READONLY_SNAP - Creating a read-only copy from a read-only volume is not permitted
# * Hpe3parSdk::HTTPConflict
# - NO_SNAP_CPG - No snapshot CPG has been configured for the volume
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_DUP_NAME - Invalid input (duplicate name).
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_SNAP_PARENT_SAME_BASE - Two parent snapshots share the same base volume
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_ONLINE_COPY_IN_PROGRESS - Invalid operation. Online copyis in progress
# * Hpe3parSdk::HTTPServiceUnavailable
# - VV_ID_LIMIT_REACHED - Max number of volumeIDs has been reached
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOLUME - The volume does not exists
# * Hpe3parSdk::HTTPForbidden
# - VV_IN_STALE_STATE - The volume is in a stale state.
# * Hpe3parSdk::HTTPForbidden
# - VV_NOT_STARTED - Volume is not started
# * Hpe3parSdk::HTTPForbidden
# - VV_UNAVAILABLE - The volume is not accessible
# * Hpe3parSdk::HTTPServiceUnavailable
# - SNAPSHOT_LIMIT_REACHED - Max number of snapshots has been reached
# * Hpe3parSdk::HTTPServiceUnavailable
# - CPG_ALLOCATION_WARNING_REACHED - The CPG has reached the allocation warning
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_VOLUME_CONV_IN_PROGRESS - Invalid operation: VV conversion is in progress.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_CLEANUP_IN_PROGRESS - Internal volume cleanup is in progress.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_PEER_VOLUME - Cannot modify a peer volume.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_VOLUME_CONV_IN_PROGRESS - INV_OPERATION_VV_ONLINE_COPY_IN_PROGRESS - The volume is the target of an online copy.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_INTERNAL_VOLUME - Illegal op on internal vol
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_ID - An ID exists
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_NOT_IN_NORMAL_STATE - Volume state is not normal
# * Hpe3parSdk::HTTPForbidden
# - VV_IN_INCONSISTENT_STATE - Internal inconsistency error in vol
# * Hpe3parSdk::HTTPBadRequest
# - INVALID_INPUT_VV_PATTERN - - INV_INPUT_RETAIN_GT_EXPIRE - Retention time is greater than expiration time.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_TIME - Invalid time specified.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_SNAPSHOT_NOT_SAME_TYPE - Some snapshots in the volume set are read-only, some are read-write
def create_snapshot_of_volume_set(name, copy_of_name, optional = nil)
begin
@volume_set.create_snapshot_of_volume_set(name, copy_of_name, optional)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a snapshot of an existing Volume.
#
# ==== Attributes
#
# * name - the name of the Snapshot
# type name: String
# * copy_of_name - the name of the parent volume
# type copy_of_name: String
# * optional - Hash of other optional items
# type optional: Hash
#
# optional = {
# 'id' => 12, # Specifies the ID of the volume,
# # next by default
# 'comment' => "some comment",
# 'readOnly' => true, # Read Only
# 'expirationHours' => 36, # time from now to expire
# 'retentionHours' => 12 # time from now to expire
# }
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - INON_EXISTENT_VOL - The volume does not exist
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
def create_snapshot(name, copy_of_name, optional = nil)
if @current_version < @min_version_with_compression && !optional.nil?
optional.delete_if { |key, _value| key == :allowRemoteCopyParent }
end
begin
@volume.create_snapshot(name, copy_of_name, optional)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Restores from a snapshot to a volume
#
# ==== Attributes
#
# * name - the name of the Snapshot
# type name: String
# * optional - hash of other optional items
# type name: Hash
#
# optional = {
# 'online' => false, # Enables (true) or disables
# #(false) executing the promote
# #operation on an online volume.
# #The default setting is false
#
# 'priority' => 2 #Does not apply to online promote
# #operation or to stop promote
# #operation.
#
# 'allowRemoteCopyParent' => false #Allows the promote operation to
# #proceed even if the RW parent
# #volume is currently in a Remote
# #Copy volume group, if that group
# #has not been started. If the
# #Remote Copy group has been
# #started, this command fails.
# #(WSAPI 1.6 and later.)
# }
#
def restore_snapshot(name, optional = nil)
if @current_version < @min_version_with_compression && !optional.nil?
optional.delete_if { |key, _value| key == :allowRemoteCopyParent }
end
begin
@volume.restore_snapshot(name, optional)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes a snapshot
#
# ==== Attributes
#
# * name - the name of the snapshot volume
# type name: String
#
# ==== Raises:
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOL - The volume does not exist
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
# * Hpe3parSdk::HTTPForbidden
# - RETAINED - Volume retention time has not expired
# * Hpe3parSdk::HTTPForbidden
# - HAS_RO_CHILD - Volume has read-only child
# * Hpe3parSdk::HTTPConflict
# - HAS_CHILD - The volume has a child volume
# * Hpe3parSdk::HTTPConflict
# - IN_USE - The volume is in use by VV set, VLUN, etc
def delete_snapshot(name)
begin
@volume.delete_volume(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets the snapshots of a particular volume
#
# ==== Attributes
#
# * name - the name of the volume
# type name: String
#
# ==== Returns
#
# Array of VirtualVolume
def get_volume_snapshots(name)
begin
@volume.get_volume_snapshots(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets an array of all ports on the 3PAR.
#
# ==== Returns
#
# Array of Port.
def get_ports
begin
@port.get_ports
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets an array of Fibre Channel Ports.
#
# * state - Port link state.
# type name: Integer. Refer Hpe3parSdk::PortLinkState for complete enumeration.
#
# ==== Returns
#
# Array of Fibre Channel Port.
def get_fc_ports(state = nil)
begin
@port.get_fc_ports(state)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets an array of iSCSI Ports.
#
# * state - Port link state.
# type name: Integer. Refer Hpe3parSdk::PortLinkState for complete enumeration.
#
# ==== Returns
#
# Array of iSCSI Port.
def get_iscsi_ports(state = nil)
begin
@port.get_iscsi_ports(state)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets an array of IP Ports.
#
# ==== Attributes
#
# * state - Port link state.
# type name: Integer. Refer Hpe3parSdk::PortLinkState for complete enumeration.
#
# ==== Returns
#
# Array of IP Port.
def get_ip_ports(state = nil)
begin
@port.get_ip_ports(state)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets entire list of CPGs.
#
# ==== Returns
#
# CPG array
def get_cpgs
begin
@cpg.get_cpgs
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets information about a Cpg.
#
# ==== Attributes
#
# * name - The name of the cpg to find
# type name: String
#
# ==== Returns
#
# CPG
#
# ==== Raises
#
# * Hpe3parSdk::HPE3PARException
# Error with code: 15 message: cpg does not exist
def get_cpg(name)
begin
@cpg.get_cpg(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a new CPG.
#
# ==== Attributes
#
# * name - Name of the cpg
# type name: String
# * optional - Hash of other optional items
# type optional: Hash
#
# optional = {
# 'growthIncrementMiB' 100, # Growth increment in MiB for
# # each auto-grown operation
# 'growthLimitMiB': 1024, # Auto-grow operation is limited
# # to specified storage amount
# 'usedLDWarningAlertMiB': 200, # Threshold to trigger warning
# # of used logical disk space
# 'domain': 'MyDomain', # Name of the domain object
# 'LDLayout': {
# 'RAIDType': 1, # Disk Raid Type
# 'setSize': 100, # Size in number of chunklets
# 'HA': 0, # Layout supports failure of
# # one port pair (1),
# # one cage (2),
# # or one magazine (3)
# 'chunkletPosPref': 2, # Chunklet location perference
# # characteristics.
# # Lowest Number/Fastest transfer
# # = 1
# # Higher Number/Slower transfer
# # = 2
# 'diskPatterns': []} # Patterns for candidate disks
# }
#
# ==== Raises
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT Invalid URI Syntax.
# * Hpe3parSdk::HTTPBadRequest
# - NON_EXISTENT_DOMAIN - Domain doesn't exist.
# * Hpe3parSdk::HTTPBadRequest
# - NO_SPACE - Not Enough space is available.
# * Hpe3parSdk::HTTPBadRequest
# - BAD_CPG_PATTERN A Pattern in a CPG specifies illegal values.
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_CPG - Cpg Exists already
def create_cpg(name, optional = nil)
begin
@cpg.create_cpg(name, optional)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Modifies a CPG.
#
# ==== Attributes
#
# * name - Name of the CPG
# type name: String
# * optional - hash of other optional items
# type optional: Hash
#
# optional = {
# 'newName'=> "newCPG:, # Specifies the name of the
# # CPG to update.
# 'disableAutoGrow'=>false, # Enables (false) or
# # disables (true) CPG auto
# # grow. Defaults to false.
# 'rmGrowthLimit'=> false, # Enables (false) or
# # disables (true) auto grow
# # limit enforcement. Defaults
# # to false.
# 'rmWarningAlert'=> false, # Enables (false) or
# # disables (true) warning
# # limit enforcement. Defaults
# # to false.
# }
#
def modify_cpg(name, cpg_mods)
begin
@cpg.modify_cpg(name, cpg_mods)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets available space information about a cpg.
#
# ==== Attributes
#
# * name - The name of the cpg to find
# type name: String
#
# ==== Returns
#
# Available space details in form of LDLayoutCapacity object
#
# ==== Raises
#
# * Hpe3parSdk::HPE3PARException
# Error with code: 15 message: cpg does not exist
def get_cpg_available_space(name)
begin
@cpg.get_cpg_available_space(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes a CPG.
#
# ==== Attributes
#
# * name - The name of the CPG
# type name: String
#
# ==== Raises
#
# * Hpe3parSdk::HPE3PARException
# Error with code: 15 message: CPG does not exist
# * Hpe3parSdk::HTTPForbidden
# - IN_USE - The CPG Cannot be removed because it's in use.
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
def delete_cpg(name)
begin
@cpg.delete_cpg(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets the status of an online physical copy
#
# ==== Attributes
#
# * name - The name of the volume
# type name: str
#
# ==== Returns
#
# Status of online copy (String)
#
# ==== Raises
#
# * Hpe3parSdk::HPE3PARException
# Error: message: Volume not an online physical copy
def get_online_physical_copy_status(name)
begin
@volume.get_online_physical_copy_status(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Stops an offline physical copy operation
#
# ==== Attributes
#
# * name - The name of the volume
# type name: String
def stop_offline_physical_copy(name)
begin
@volume.stop_offline_physical_copy(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Stops an online physical copy operation
#
# ==== Attributes
#
# * name - The name of the volume
# type name: String
def stop_online_physical_copy(name)
begin
@volume.stop_online_physical_copy(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Resynchronizes a physical copy.
#
# ==== Attributes
#
# * name - The name of the volume
# type name: String
def resync_physical_copy(name)
begin
@volume.resync_physical_copy(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Waits for a 3PAR task to end.
#
# ==== Attributes
#
# * task_id - The Id of the task to be waited upon.
# type task_id: Integer
# * poll_rate_secs - The polling interval in seconds.
# type poll_rate_secs: Integer
def wait_for_task_to_end(task_id, poll_rate_secs = 15)
begin
@task.wait_for_task_to_end(task_id, poll_rate_secs)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Cancel a 3PAR task
#
# ==== Attributes
#
# * task_id - The Id of the task to be cancelled.
# type task_id: Integer
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - NON_ACTIVE_TASK - The task is not active at this time.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_CANNOT_CANCEL_ TASK - Invalid operation: Task cannot be cancelled.
def cancel_task(task_id)
begin
@task.cancel_task(task_id)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def flash_cache_exists?
begin
@flash_cache.flash_cache_exists?
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def volume_exists?(name)
begin
@volume.volume_exists?(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def volume_set_exists?(name)
begin
@volume_set.volume_set_exists?(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def host_exists?(host_name)
begin
@host.host_exists?(host_name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def host_set_exists?(host_name)
begin
@host_set.host_set_exists?(host_name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def cpg_exists?(name)
begin
@cpg.cpg_exists?(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def flash_cache_exists?
begin
@flash_cache.flash_cache_exists?
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def online_physical_copy_exists?(src_name, phy_copy_name)
begin
@volume.online_physical_copy_exists?(src_name, phy_copy_name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def offline_physical_copy_exists?(src_name, phy_copy_name)
begin
@volume.offline_physical_copy_exists?(src_name, phy_copy_name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Logout from the 3PAR Array
def logout
unless @log_file_path.nil?
if Hpe3parSdk.logger != nil
Hpe3parSdk.logger.close
Hpe3parSdk.logger = nil
end
end
begin
@http.unauthenticate
rescue Hpe3parSdk::HPE3PARException => ex
#Do nothing
end
end
end
|
state-machines/state_machines | lib/state_machines/node_collection.rb | StateMachines.NodeCollection.<< | ruby | def <<(node)
@nodes << node
@index_names.each { |name| add_to_index(name, value(node, name), node) }
@contexts.each { |context| eval_context(context, node) }
self
end | Adds a new node to the collection. By doing so, this will also add it to
the configured indices. This will also evaluate any existings contexts
that match the new node. | train | https://github.com/state-machines/state_machines/blob/10b03af5fc9245bcb09bbd9c40c58ffba9a85422/lib/state_machines/node_collection.rb#L85-L90 | class NodeCollection
include Enumerable
# The machine associated with the nodes
attr_reader :machine
# Creates a new collection of nodes for the given state machine. By default,
# the collection is empty.
#
# Configuration options:
# * <tt>:index</tt> - One or more attributes to automatically generate
# hashed indices for in order to perform quick lookups. Default is to
# index by the :name attribute
def initialize(machine, options = {})
options.assert_valid_keys(:index)
options = { index: :name }.merge(options)
@machine = machine
@nodes = []
@index_names = Array(options[:index])
@indices = @index_names.reduce({}) do |indices, name|
indices[name] = {}
indices[:"#{name}_to_s"] = {}
indices[:"#{name}_to_sym"] = {}
indices
end
@default_index = Array(options[:index]).first
@contexts = []
end
# Creates a copy of this collection such that modifications don't affect
# the original collection
def initialize_copy(orig) #:nodoc:
super
nodes = @nodes
contexts = @contexts
@nodes = []
@contexts = []
@indices = @indices.reduce({}) { |indices, (name, *)| indices[name] = {}; indices }
# Add nodes *prior* to copying over the contexts so that they don't get
# evaluated multiple times
concat(nodes.map { |n| n.dup })
@contexts = contexts.dup
end
# Changes the current machine associated with the collection. In turn, this
# will change the state machine associated with each node in the collection.
def machine=(new_machine)
@machine = new_machine
each { |node| node.machine = new_machine }
end
# Gets the number of nodes in this collection
def length
@nodes.length
end
# Gets the set of unique keys for the given index
def keys(index_name = @default_index)
index(index_name).keys
end
# Tracks a context that should be evaluated for any nodes that get added
# which match the given set of nodes. Matchers can be used so that the
# context can get added once and evaluated after multiple adds.
def context(nodes, &block)
nodes = nodes.first.is_a?(Matcher) ? nodes.first : WhitelistMatcher.new(nodes)
@contexts << context = { nodes: nodes, block: block }
# Evaluate the new context for existing nodes
each { |node| eval_context(context, node) }
context
end
# Adds a new node to the collection. By doing so, this will also add it to
# the configured indices. This will also evaluate any existings contexts
# that match the new node.
# Appends a group of nodes to the collection
def concat(nodes)
nodes.each { |node| self << node }
end
# Updates the indexed keys for the given node. If the node's attribute
# has changed since it was added to the collection, the old indexed keys
# will be replaced with the updated ones.
def update(node)
@index_names.each { |name| update_index(name, node) }
end
# Calls the block once for each element in self, passing that element as a
# parameter.
#
# states = StateMachines::NodeCollection.new
# states << StateMachines::State.new(machine, :parked)
# states << StateMachines::State.new(machine, :idling)
# states.each {|state| puts state.name, ' -- '}
#
# ...produces:
#
# parked -- idling --
def each
@nodes.each { |node| yield node }
self
end
# Gets the node at the given index.
#
# states = StateMachines::NodeCollection.new
# states << StateMachines::State.new(machine, :parked)
# states << StateMachines::State.new(machine, :idling)
#
# states.at(0).name # => :parked
# states.at(1).name # => :idling
def at(index)
@nodes[index]
end
# Gets the node indexed by the given key. By default, this will look up the
# key in the first index configured for the collection. A custom index can
# be specified like so:
#
# collection['parked', :value]
#
# The above will look up the "parked" key in a hash indexed by each node's
# +value+ attribute.
#
# If the key cannot be found, then nil will be returned.
def [](key, index_name = @default_index)
index(index_name)[key] ||
index(:"#{index_name}_to_s")[key.to_s] ||
to_sym?(key) && index(:"#{index_name}_to_sym")[:"#{key}"] ||
nil
end
# Gets the node indexed by the given key. By default, this will look up the
# key in the first index configured for the collection. A custom index can
# be specified like so:
#
# collection['parked', :value]
#
# The above will look up the "parked" key in a hash indexed by each node's
# +value+ attribute.
#
# If the key cannot be found, then an IndexError exception will be raised:
#
# collection['invalid', :value] # => IndexError: "invalid" is an invalid value
def fetch(key, index_name = @default_index)
self[key, index_name] || fail(IndexError, "#{key.inspect} is an invalid #{index_name}")
end
protected
# Gets the given index. If the index does not exist, then an ArgumentError
# is raised.
def index(name)
fail ArgumentError, 'No indices configured' unless @indices.any?
@indices[name] || fail(ArgumentError, "Invalid index: #{name.inspect}")
end
# Gets the value for the given attribute on the node
def value(node, attribute)
node.send(attribute)
end
# Adds the given key / node combination to an index, including the string
# and symbol versions of the index
def add_to_index(name, key, node)
index(name)[key] = node
index(:"#{name}_to_s")[key.to_s] = node
index(:"#{name}_to_sym")[:"#{key}"] = node if to_sym?(key)
end
# Removes the given key from an index, including the string and symbol
# versions of the index
def remove_from_index(name, key)
index(name).delete(key)
index(:"#{name}_to_s").delete(key.to_s)
index(:"#{name}_to_sym").delete(:"#{key}") if to_sym?(key)
end
# Updates the node for the given index, including the string and symbol
# versions of the index
def update_index(name, node)
index = self.index(name)
old_key = index.key(node)
new_key = value(node, name)
# Only replace the key if it's changed
if old_key != new_key
remove_from_index(name, old_key)
add_to_index(name, new_key, node)
end
end
# Determines whether the given value can be converted to a symbol
def to_sym?(value)
"#{value}" != ''
end
# Evaluates the given context for a particular node. This will only
# evaluate the context if the node matches.
def eval_context(context, node)
node.context(&context[:block]) if context[:nodes].matches?(node.name)
end
end
|
ideonetwork/lato-blog | app/controllers/lato_blog/back/posts_controller.rb | LatoBlog.Back::PostsController.create | ruby | def create
@post = LatoBlog::Post.new(new_post_params)
unless @post.save
flash[:danger] = @post.errors.full_messages.to_sentence
redirect_to lato_blog.new_post_path
return
end
flash[:success] = LANGUAGES[:lato_blog][:flashes][:post_create_success]
redirect_to lato_blog.post_path(@post.id)
end | This function creates a new post. | train | https://github.com/ideonetwork/lato-blog/blob/a0d92de299a0e285851743b9d4a902f611187cba/app/controllers/lato_blog/back/posts_controller.rb#L49-L60 | class Back::PostsController < Back::BackController
before_action do
core__set_menu_active_item('blog_articles')
end
# This function shows the list of published posts.
def index
core__set_header_active_page_title(LANGUAGES[:lato_blog][:pages][:posts])
# find correct status to show
@posts_status = 'published'
@posts_status = 'drafted' if params[:status] && params[:status] === 'drafted'
@posts_status = 'deleted' if params[:status] && params[:status] === 'deleted'
# find informations data
@posts_informations = {
published_length: LatoBlog::Post.published.where(meta_language: cookies[:lato_blog__current_language]).length,
drafted_length: LatoBlog::Post.drafted.where(meta_language: cookies[:lato_blog__current_language]).length,
deleted_length: LatoBlog::Post.deleted.where(meta_language: cookies[:lato_blog__current_language]).length
}
# find posts to show
@posts = LatoBlog::Post.where(meta_status: @posts_status,
meta_language: cookies[:lato_blog__current_language]).joins(:post_parent).order('lato_blog_post_parents.publication_datetime DESC')
@widget_index_posts = core__widgets_index(@posts, search: 'title', pagination: 10)
end
# This function shows a single post. It create a redirect to the edit path.
def show
# use edit as default post show page
redirect_to lato_blog.edit_post_path(params[:id])
end
# This function shows the view to create a new post.
def new
core__set_header_active_page_title(LANGUAGES[:lato_blog][:pages][:posts_new])
@post = LatoBlog::Post.new
set_current_language params[:language] if params[:language]
if params[:parent]
@post_parent = LatoBlog::PostParent.find_by(id: params[:parent])
end
fetch_external_objects
end
# This function creates a new post.
# This function show the view to edit a post.
def edit
core__set_header_active_page_title(LANGUAGES[:lato_blog][:pages][:posts_edit])
@post = LatoBlog::Post.find_by(id: params[:id])
return unless check_post_presence
if @post.meta_language != cookies[:lato_blog__current_language]
set_current_language @post.meta_language
end
fetch_external_objects
end
# This function updates a post.
def update
@post = LatoBlog::Post.find_by(id: params[:id])
return unless check_post_presence
# update for autosaving
autosaving = params[:autosave] && params[:autosave] == 'true'
if autosaving
@post.update(edit_post_params)
update_fields
render status: 200, json: {} # render something positive :)
return
end
# check post data update
unless @post.update(edit_post_params)
flash[:danger] = @post.errors.full_messages.to_sentence
redirect_to lato_blog.edit_post_path(@post.id)
return
end
# update single fields
unless update_fields
flash[:warning] = LANGUAGES[:lato_blog][:flashes][:post_update_fields_warning]
redirect_to lato_blog.edit_post_path(@post.id)
return
end
# render positive response
flash[:success] = LANGUAGES[:lato_blog][:flashes][:post_update_success]
redirect_to lato_blog.post_path(@post.id)
end
# This function updates the status of a post.
def update_status
@post = LatoBlog::Post.find_by(id: params[:id])
return unless check_post_presence
@post.update(meta_status: params[:status])
end
# This function updates the publication datetime of a post (update the post parent).
def update_publication_datetime
@post = LatoBlog::Post.find_by(id: params[:id])
return unless check_post_presence
@post.post_parent.update(publication_datetime: params[:publication_datetime])
end
# This function updates the categories of a post.
def update_categories
@post = LatoBlog::Post.find_by(id: params[:id])
return unless check_post_presence
params[:categories].each do |category_id, value|
category = LatoBlog::Category.find_by(id: category_id)
next if !category || category.meta_language != @post.meta_language
category_post = LatoBlog::CategoryPost.find_by(lato_blog_post_id: @post.id, lato_blog_category_id: category.id)
if value == 'true'
LatoBlog::CategoryPost.create(lato_blog_post_id: @post.id, lato_blog_category_id: category.id) unless category_post
else
category_post.destroy if category_post
end
end
end
# This function updates the tags of a post.
def update_tags
@post = LatoBlog::Post.find_by(id: params[:id])
return unless check_post_presence
params_tags = params[:tags].map(&:to_i)
tag_posts = LatoBlog::TagPost.where(lato_blog_post_id: @post.id)
params_tags.each do |tag_id|
tag = LatoBlog::Tag.find_by(id: tag_id)
next if !tag || tag.meta_language != @post.meta_language
tag_post = tag_posts.find_by(lato_blog_tag_id: tag.id)
LatoBlog::TagPost.create(lato_blog_post_id: @post.id, lato_blog_tag_id: tag.id) unless tag_post
end
tag_ids = tag_posts.pluck(:lato_blog_tag_id)
tag_ids.each do |tag_id|
next if params_tags.include?(tag_id)
tag_post = tag_posts.find_by(lato_blog_tag_id: tag_id)
tag_post.destroy if tag_post
end
end
# This function updates the seo description of a post.
def update_seo_description
@post = LatoBlog::Post.find_by(id: params[:id])
return unless check_post_presence
@post.update(seo_description: params[:seo_description])
end
# This function destroyes a post.
def destroy
@post = LatoBlog::Post.find_by(id: params[:id])
return unless check_post_presence
unless @post.destroy
flash[:danger] = @post.post_parent.errors.full_messages.to_sentence
redirect_to lato_blog.edit_post_path(@post.id)
return
end
flash[:success] = LANGUAGES[:lato_blog][:flashes][:post_destroy_success]
redirect_to lato_blog.posts_path(status: 'deleted')
end
# Tis function destroyes all posts with status deleted.
def destroy_all_deleted
@posts = LatoBlog::Post.deleted
if !@posts || @posts.empty?
flash[:warning] = LANGUAGES[:lato_blog][:flashes][:deleted_posts_not_found]
redirect_to lato_blog.posts_path(status: 'deleted')
return
end
@posts.each do |post|
unless post.destroy
flash[:danger] = post.errors.full_messages.to_sentence
redirect_to lato_blog.edit_post_path(post.id)
return
end
end
flash[:success] = LANGUAGES[:lato_blog][:flashes][:deleted_posts_destroy_success]
redirect_to lato_blog.posts_path(status: 'deleted')
end
# Private functions:
# **************************************************************************
private
def fetch_external_objects
@categories = LatoBlog::Category.all.where(meta_language: cookies[:lato_blog__current_language])
@tags = LatoBlog::Tag.all.where(meta_language: cookies[:lato_blog__current_language])
@medias = LatoMedia::Media.all
end
# This function checks the @post variable is present and redirect to index if it not exist.
def check_post_presence
if !@post
flash[:warning] = LANGUAGES[:lato_blog][:flashes][:post_not_found]
redirect_to lato_blog.posts_path
return false
end
true
end
# Update fields helpers:
# **************************************************************************
# This function checks all fields params and update the value
# on the database for the field.
def update_fields
return true unless params[:fields]
params[:fields].each do |id, value|
field = @post.post_fields.find_by(id: id)
return false unless field
return false unless update_field(field, value)
end
true
end
# This function updates a single field from its key and value.
def update_field(field, value)
case field.typology
when 'text'
update_field_text(field, value)
when 'textarea'
update_field_textarea(field, value)
when 'datetime'
update_field_datetime(field, value)
when 'editor'
update_field_editor(field, value)
when 'geolocalization'
update_field_geolocalization(field, value)
when 'image'
update_field_image(field, value)
when 'gallery'
update_field_gallery(field, value)
when 'youtube'
update_field_youtube(field, value)
when 'composed'
update_field_composed(field, value)
when 'relay'
update_field_relay(field, value)
end
end
# Update specific fields helpers:
# **************************************************************************
# Text.
def update_field_text(field, value)
field.update(value: value)
end
# Textarea.
def update_field_textarea(field, value)
field.update(value: value)
end
# Datetime.
def update_field_datetime(field, value)
field.update(value: value)
end
# Editor.
def update_field_editor(field, value)
field.update(value: value)
end
# Geolocalization.
def update_field_geolocalization(field, value)
final_value = {
lat: value[:lat],
lng: value[:lng],
address: value[:address]
}
field.update(value: final_value)
end
# Image.
def update_field_image(field, value)
field.update(value: value)
end
# Gallery.
def update_field_gallery(field, value)
field.update(value: value)
end
# Youtube.
def update_field_youtube(field, value)
field.update(value: value)
end
# Composed.
def update_field_composed(field, value)
# find composed children
child_fields = field.post_fields.visibles
# loop values and update single children
value.each do |child_id, child_value|
child_field = child_fields.find_by(id: child_id)
return false unless child_field
return false unless update_field(child_field, child_value)
end
true
end
# Relay.
def update_field_relay(field, value)
# find composed children
child_fields = field.post_fields.visibles
# loop values and update single children
value.each do |child_id, child_value|
if child_id.include?('position')
child_id = child_id.dup
child_id.slice! 'position'
return false unless update_field_relay_single_position(child_id, child_value, child_fields)
else
return false unless update_field_relay_single_value(child_id, child_value, child_fields)
end
end
true
end
def update_field_relay_single_value(id, value, child_fields)
child_field = child_fields.find_by(id: id)
return false unless child_field
update_field(child_field, value)
end
def update_field_relay_single_position(id, value, child_fields)
child_field = child_fields.find_by(id: id)
return false unless child_field
child_field.update(position: value)
end
# Params helpers:
# **************************************************************************
# This function generate params for a new post.
def new_post_params
# take params from front-end request
post_params = params.require(:post).permit(:title, :subtitle).to_h
# add current superuser id
post_params[:lato_core_superuser_creator_id] = @core__current_superuser.id
# add post parent id
post_params[:lato_blog_post_parent_id] = (params[:parent] && !params[:parent].blank? ? params[:parent] : generate_post_parent)
# add metadata
post_params[:meta_language] = cookies[:lato_blog__current_language]
post_params[:meta_status] = BLOG_POSTS_STATUS[:drafted]
# return final post object
post_params
end
# This function generate params for a edit post.
def edit_post_params
params.require(:post).permit(:meta_permalink, :title, :subtitle, :content, :excerpt)
end
# This function generate and save a new post parent and return the id.
def generate_post_parent
post_parent = LatoBlog::PostParent.create
post_parent.id
end
end
|
chikamichi/logg | lib/logg/core.rb | Logg.Dispatcher.as | ruby | def as(method, &block)
raise ArgumentError, 'Missing mandatory block' unless block_given?
method = method.to_sym
# Define the guard at class-level, if not already defined.
if !eigenclass.respond_to?(method)
eigenclass.send(:define_method, method) do |*args|
Render.new.instance_exec(*args, &block)
end
end
# Define the guard at instance-level by overriding #initialize, if not
# already defined.
eigenclass.send(:define_method, :new) do
o = super
if !o.respond_to?(method)
o.send(:define_method, method) do |*args|
Render.new.instance_exec(*args, &block)
end
end
o
end
end | Define a custom logger, using a template. The template may be defined
within the block as a (multi-line) string, or one may reference a
file.
# do whatever you want with data or anything else, for instance,
send mails, tweet, then…
Inline templates (defined within the block) make use of #render_inline
(indentation broken for the sake of example readibility):
logger.as(:custom) do |response|
tpl = <<-TPL
%h2 Query log report
%span
Statu:
= data.status
%span
Response:
= data.body
%br/
TPL
puts render_inline(tpl, :as => :haml, :data => response)
end
With an external template, one should use the #render helper to, well,
render the template file. The extension will be used to infer the proper
rendering engine. If not provided or when a custom extension is used, one
may declare the template syntax.
logger.as(:custom) do |data|
# do whatever you want with data or anything else, then…
out = render('my/template.erb', :data => data)
# one may then use out to send mails, log to file, tweet…
end
logger.as(:custom) do |data|
render('my/template', :as => :erb, :data => data)
end
See #render and #render_inline for more details.
TODO: memoize the Render instance somehow? Or find another trick to
execute the block. | train | https://github.com/chikamichi/logg/blob/fadc70f80ee48930058db131888aabf7da21da2d/lib/logg/core.rb#L138-L161 | class Dispatcher
class Render
# Render a template. Just a mere proxy for Tilt::Template#render method,
# the first argument being the filepath or file, and the latter,
# the usual arguments for Tilt's #render.
#
# @param [String, #path, #realpath] path filepath or an object behaving
# like a legacy File
# @param [Object] obj context object the template will be rendered within
# @param [Hash] args rendering context
# @option [Symbol] :as syntax engine
# @option [Object] :data template's rendering contextual object
# @option [Hash] :locals template's locals
# @return [String] the interpolated template
#
def render(path, *args)
args = args.first
path = detect_path(path)
tpl = fetch_template(args, path)
tpl.render(args[:data], args[:locals])
end
def render_inline(content, *args)
args = args.first
syntax = detect_syntax(args)
res = Object
Better::Tempfile.open(['dummylogg', ".#{syntax}"]) do |f|
f.write(content)
f.rewind
res = Tilt.new(f.path).render(args[:data], args[:locals])
end
res
end
def detect_path(path)
if path.respond_to?(:path)
path.path
elsif path.respond_to?(:realpath)
path.to_s
elsif path.respond_to?(:to_s)
path.to_s
else
raise ArgumentError, 'Missing file or a filepath.'
end
end
def fetch_template(args, path)
if args[:as]
begin
test_path = Pathname.new(path)
raise ArgumentError, "Invalid filepath #{path}" unless test_path.file?
rescue
test_path = Pathname.new(path + ".#{args[:as].to_s.downcase}")
raise ArgumentError, "Invalid filepath #{path}" unless test_path.file?
path = test_path.to_s
end
Tilt.const_get("#{args[:as].to_s.downcase.capitalize}Template").new(path)
else
Tilt.new(path)
end
end
def detect_syntax(options)
unless options.has_key?(:as)
raise ArgumentError, 'Missing template syntax specified as the :as option.'
end
options[:as].to_s
end
end
attr_reader :message, :namespace
# The Dispatcher default behavior relies on #method_missing. It sets both the
# message and a namespace, then auto-sends the order to output.
#
def method_missing(meth, *args, &block)
@namespace = meth.to_s
@message = (args.first.to_s == 'debug') ? nil : args.first.to_s
self.send :output!
end
def eigenclass
class << self; self; end
end
# Define a custom logger, using a template. The template may be defined
# within the block as a (multi-line) string, or one may reference a
# file.
# # do whatever you want with data or anything else, for instance,
# send mails, tweet, then…
#
# Inline templates (defined within the block) make use of #render_inline
# (indentation broken for the sake of example readibility):
#
# logger.as(:custom) do |response|
# tpl = <<-TPL
# %h2 Query log report
# %span
# Statu:
# = data.status
# %span
# Response:
# = data.body
# %br/
# TPL
# puts render_inline(tpl, :as => :haml, :data => response)
# end
#
# With an external template, one should use the #render helper to, well,
# render the template file. The extension will be used to infer the proper
# rendering engine. If not provided or when a custom extension is used, one
# may declare the template syntax.
#
# logger.as(:custom) do |data|
# # do whatever you want with data or anything else, then…
# out = render('my/template.erb', :data => data)
# # one may then use out to send mails, log to file, tweet…
# end
#
# logger.as(:custom) do |data|
# render('my/template', :as => :erb, :data => data)
# end
#
# See #render and #render_inline for more details.
#
# TODO: memoize the Render instance somehow? Or find another trick to
# execute the block.
#
private
# Default logging behavior. Outputs to $stdout using #puts and return
# the message.
#
def output!
output = "#{Time.now} | "
output += "[#{@namespace.gsub('_', ' ')}] " unless @namespace.nil?
output += @message
puts output if defined?(Logg::ALWAYS_PUTS) && Logg::ALWAYS_PUTS
return output
end
end
|
lostisland/faraday | lib/faraday/connection.rb | Faraday.Connection.initialize_proxy | ruby | def initialize_proxy(url, options)
@manual_proxy = !!options.proxy
@proxy =
if options.proxy
ProxyOptions.from(options.proxy)
else
proxy_from_env(url)
end
@temp_proxy = @proxy
end | Initializes a new Faraday::Connection.
@param url [URI, String] URI or String base URL to use as a prefix for all
requests (optional).
@param options [Hash, Faraday::ConnectionOptions]
@option options [URI, String] :url ('http:/') URI or String base URL
@option options [Hash<String => String>] :params URI query unencoded
key/value pairs.
@option options [Hash<String => String>] :headers Hash of unencoded HTTP
header key/value pairs.
@option options [Hash] :request Hash of request options.
@option options [Hash] :ssl Hash of SSL options.
@option options [Hash, URI, String] :proxy proxy options, either as a URL
or as a Hash
@option options [URI, String] :proxy[:uri]
@option options [String] :proxy[:user]
@option options [String] :proxy[:password]
@yield [self] after all setup has been done | train | https://github.com/lostisland/faraday/blob/3abe9d1eea4bdf61cdf7b76ff9f1ae7e09482e70/lib/faraday/connection.rb#L94-L103 | class Connection
# A Set of allowed HTTP verbs.
METHODS = Set.new %i[get post put delete head patch options trace connect]
# @return [Hash] URI query unencoded key/value pairs.
attr_reader :params
# @return [Hash] unencoded HTTP header key/value pairs.
attr_reader :headers
# @return [String] a URI with the prefix used for all requests from this
# Connection. This includes a default host name, scheme, port, and path.
attr_reader :url_prefix
# @return [Faraday::Builder] Builder for this Connection.
attr_reader :builder
# @return [Hash] SSL options.
attr_reader :ssl
# @return [Object] the parallel manager for this Connection.
attr_reader :parallel_manager
# Sets the default parallel manager for this connection.
attr_writer :default_parallel_manager
# @return [Hash] proxy options.
attr_reader :proxy
# Initializes a new Faraday::Connection.
#
# @param url [URI, String] URI or String base URL to use as a prefix for all
# requests (optional).
# @param options [Hash, Faraday::ConnectionOptions]
# @option options [URI, String] :url ('http:/') URI or String base URL
# @option options [Hash<String => String>] :params URI query unencoded
# key/value pairs.
# @option options [Hash<String => String>] :headers Hash of unencoded HTTP
# header key/value pairs.
# @option options [Hash] :request Hash of request options.
# @option options [Hash] :ssl Hash of SSL options.
# @option options [Hash, URI, String] :proxy proxy options, either as a URL
# or as a Hash
# @option options [URI, String] :proxy[:uri]
# @option options [String] :proxy[:user]
# @option options [String] :proxy[:password]
# @yield [self] after all setup has been done
def initialize(url = nil, options = nil)
options = ConnectionOptions.from(options)
if url.is_a?(Hash) || url.is_a?(ConnectionOptions)
options = options.merge(url)
url = options.url
end
@parallel_manager = nil
@headers = Utils::Headers.new
@params = Utils::ParamsHash.new
@options = options.request
@ssl = options.ssl
@default_parallel_manager = options.parallel_manager
@builder = options.builder || begin
# pass an empty block to Builder so it doesn't assume default middleware
options.new_builder(block_given? ? proc { |b| } : nil)
end
self.url_prefix = url || 'http:/'
@params.update(options.params) if options.params
@headers.update(options.headers) if options.headers
initialize_proxy(url, options)
yield(self) if block_given?
@headers[:user_agent] ||= "Faraday v#{VERSION}"
end
# Sets the Hash of URI query unencoded key/value pairs.
# @param hash [Hash]
def params=(hash)
@params.replace hash
end
# Sets the Hash of unencoded HTTP header key/value pairs.
# @param hash [Hash]
def headers=(hash)
@headers.replace hash
end
extend Forwardable
def_delegators :builder, :build, :use, :request, :response, :adapter, :app
# @!method get(url = nil, params = nil, headers = nil)
# Makes a GET HTTP request without a body.
# @!scope class
#
# @param url [String] The optional String base URL to use as a prefix for
# all requests. Can also be the options Hash.
# @param params [Hash] Hash of URI query unencoded key/value pairs.
# @param headers [Hash] unencoded HTTP header key/value pairs.
#
# @example
# conn.get '/items', { page: 1 }, :accept => 'application/json'
#
# # ElasticSearch example sending a body with GET.
# conn.get '/twitter/tweet/_search' do |req|
# req.headers[:content_type] = 'application/json'
# req.params[:routing] = 'kimchy'
# req.body = JSON.generate(query: {...})
# end
#
# @yield [Faraday::Request] for further request customizations
# @return [Faraday::Response]
# @!method head(url = nil, params = nil, headers = nil)
# Makes a HEAD HTTP request without a body.
# @!scope class
#
# @param url [String] The optional String base URL to use as a prefix for
# all requests. Can also be the options Hash.
# @param params [Hash] Hash of URI query unencoded key/value pairs.
# @param headers [Hash] unencoded HTTP header key/value pairs.
#
# @example
# conn.head '/items/1'
#
# @yield [Faraday::Request] for further request customizations
# @return [Faraday::Response]
# @!method delete(url = nil, params = nil, headers = nil)
# Makes a DELETE HTTP request without a body.
# @!scope class
#
# @param url [String] The optional String base URL to use as a prefix for
# all requests. Can also be the options Hash.
# @param params [Hash] Hash of URI query unencoded key/value pairs.
# @param headers [Hash] unencoded HTTP header key/value pairs.
#
# @example
# conn.delete '/items/1'
#
# @yield [Faraday::Request] for further request customizations
# @return [Faraday::Response]
# @!method connect(url = nil, params = nil, headers = nil)
# Makes a CONNECT HTTP request without a body.
# @!scope class
#
# @param url [String] The optional String base URL to use as a prefix for
# all requests. Can also be the options Hash.
# @param params [Hash] Hash of URI query unencoded key/value pairs.
# @param headers [Hash] unencoded HTTP header key/value pairs.
#
# @example
# conn.connect '/items/1'
#
# @yield [Faraday::Request] for further request customizations
# @return [Faraday::Response]
# @!method trace(url = nil, params = nil, headers = nil)
# Makes a TRACE HTTP request without a body.
# @!scope class
#
# @param url [String] The optional String base URL to use as a prefix for
# all requests. Can also be the options Hash.
# @param params [Hash] Hash of URI query unencoded key/value pairs.
# @param headers [Hash] unencoded HTTP header key/value pairs.
#
# @example
# conn.connect '/items/1'
#
# @yield [Faraday::Request] for further request customizations
# @return [Faraday::Response]
# @!visibility private
METHODS_WITH_QUERY.each do |method|
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{method}(url = nil, params = nil, headers = nil)
run_request(:#{method}, url, nil, headers) do |request|
request.params.update(params) if params
yield request if block_given?
end
end
RUBY
end
# @overload options()
# Returns current Connection options.
#
# @overload options(url, params = nil, headers = nil)
# Makes an OPTIONS HTTP request to the given URL.
# @param url [String] String base URL to sue as a prefix for all requests.
# @param params [Hash] Hash of URI query unencoded key/value pairs.
# @param headers [Hash] unencoded HTTP header key/value pairs.
#
# @example
# conn.options '/items/1'
#
# @yield [Faraday::Request] for further request customizations
# @return [Faraday::Response]
def options(*args)
return @options if args.size.zero?
url, params, headers = *args
run_request(:options, url, nil, headers) do |request|
request.params.update(params) if params
yield request if block_given?
end
end
# @!method post(url = nil, body = nil, headers = nil)
# Makes a POST HTTP request with a body.
# @!scope class
#
# @param url [String] The optional String base URL to use as a prefix for
# all requests. Can also be the options Hash.
# @param body [String] body for the request.
# @param headers [Hash] unencoded HTTP header key/value pairs.
#
# @example
# conn.post '/items', data, content_type: 'application/json'
#
# # Simple ElasticSearch indexing sample.
# conn.post '/twitter/tweet' do |req|
# req.headers[:content_type] = 'application/json'
# req.params[:routing] = 'kimchy'
# req.body = JSON.generate(user: 'kimchy', ...)
# end
#
# @yield [Faraday::Request] for further request customizations
# @return [Faraday::Response]
# @!method put(url = nil, body = nil, headers = nil)
# Makes a PUT HTTP request with a body.
# @!scope class
#
# @param url [String] The optional String base URL to use as a prefix for
# all requests. Can also be the options Hash.
# @param body [String] body for the request.
# @param headers [Hash] unencoded HTTP header key/value pairs.
#
# @example
# # TODO: Make it a PUT example
# conn.post '/items', data, content_type: 'application/json'
#
# # Simple ElasticSearch indexing sample.
# conn.post '/twitter/tweet' do |req|
# req.headers[:content_type] = 'application/json'
# req.params[:routing] = 'kimchy'
# req.body = JSON.generate(user: 'kimchy', ...)
# end
#
# @yield [Faraday::Request] for further request customizations
# @return [Faraday::Response]
# @!visibility private
METHODS_WITH_BODY.each do |method|
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{method}(url = nil, body = nil, headers = nil, &block)
run_request(:#{method}, url, body, headers, &block)
end
RUBY
end
# Sets up the Authorization header with these credentials, encoded
# with base64.
#
# @param login [String] The authentication login.
# @param pass [String] The authentication password.
#
# @example
#
# conn.basic_auth 'Aladdin', 'open sesame'
# conn.headers['Authorization']
# # => "Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=="
#
# @return [void]
def basic_auth(login, pass)
set_authorization_header(:basic_auth, login, pass)
end
# Sets up the Authorization header with the given token.
#
# @param token [String]
# @param options [Hash] extra token options.
#
# @example
#
# conn.token_auth 'abcdef', foo: 'bar'
# conn.headers['Authorization']
# # => "Token token=\"abcdef\",
# foo=\"bar\""
#
# @return [void]
def token_auth(token, options = nil)
set_authorization_header(:token_auth, token, options)
end
# Sets up a custom Authorization header.
#
# @param type [String] authorization type
# @param token [String, Hash] token. A String value is taken literally, and
# a Hash is encoded into comma-separated key/value pairs.
#
# @example
#
# conn.authorization :Bearer, 'mF_9.B5f-4.1JqM'
# conn.headers['Authorization']
# # => "Bearer mF_9.B5f-4.1JqM"
#
# conn.authorization :Token, token: 'abcdef', foo: 'bar'
# conn.headers['Authorization']
# # => "Token token=\"abcdef\",
# foo=\"bar\""
#
# @return [void]
def authorization(type, token)
set_authorization_header(:authorization, type, token)
end
# Check if the adapter is parallel-capable.
#
# @yield if the adapter isn't parallel-capable, or if no adapter is set yet.
#
# @return [Object, nil] a parallel manager or nil if yielded
# @api private
def default_parallel_manager
@default_parallel_manager ||= begin
adapter = @builder.adapter.klass if @builder.adapter
if support_parallel?(adapter)
adapter.setup_parallel_manager
elsif block_given?
yield
end
end
end
# Determine if this Faraday::Connection can make parallel requests.
#
# @return [Boolean]
def in_parallel?
!!@parallel_manager
end
# Sets up the parallel manager to make a set of requests.
#
# @param manager [Object] The parallel manager that this Connection's
# Adapter uses.
#
# @yield a block to execute multiple requests.
# @return [void]
def in_parallel(manager = nil)
@parallel_manager = manager || default_parallel_manager do
warn 'Warning: `in_parallel` called but no parallel-capable adapter ' \
'on Faraday stack'
warn caller[2, 10].join("\n")
nil
end
yield
@parallel_manager&.run
ensure
@parallel_manager = nil
end
# Sets the Hash proxy options.
#
# @param new_value [Object]
def proxy=(new_value)
@manual_proxy = true
@proxy = new_value ? ProxyOptions.from(new_value) : nil
end
def_delegators :url_prefix, :scheme, :scheme=, :host, :host=, :port, :port=
def_delegator :url_prefix, :path, :path_prefix
# Parses the given URL with URI and stores the individual
# components in this connection. These components serve as defaults for
# requests made by this connection.
#
# @param url [String, URI]
# @param encoder [Object]
#
# @example
#
# conn = Faraday::Connection.new { ... }
# conn.url_prefix = "https://sushi.com/api"
# conn.scheme # => https
# conn.path_prefix # => "/api"
#
# conn.get("nigiri?page=2") # accesses https://sushi.com/api/nigiri
def url_prefix=(url, encoder = nil)
uri = @url_prefix = Utils.URI(url)
self.path_prefix = uri.path
params.merge_query(uri.query, encoder)
uri.query = nil
with_uri_credentials(uri) do |user, password|
basic_auth user, password
uri.user = uri.password = nil
end
end
# Sets the path prefix and ensures that it always has a leading
# slash.
#
# @param value [String]
#
# @return [String] the new path prefix
def path_prefix=(value)
url_prefix.path = if value
value = '/' + value unless value[0, 1] == '/'
value
end
end
# Takes a relative url for a request and combines it with the defaults
# set on the connection instance.
#
# @param url [String]
# @param extra_params [Hash]
#
# @example
# conn = Faraday::Connection.new { ... }
# conn.url_prefix = "https://sushi.com/api?token=abc"
# conn.scheme # => https
# conn.path_prefix # => "/api"
#
# conn.build_url("nigiri?page=2")
# # => https://sushi.com/api/nigiri?token=abc&page=2
#
# conn.build_url("nigiri", page: 2)
# # => https://sushi.com/api/nigiri?token=abc&page=2
#
def build_url(url = nil, extra_params = nil)
uri = build_exclusive_url(url)
query_values = params.dup.merge_query(uri.query, options.params_encoder)
query_values.update(extra_params) if extra_params
uri.query =
if query_values.empty?
nil
else
query_values.to_query(options.params_encoder)
end
uri
end
# Builds and runs the Faraday::Request.
#
# @param method [Symbol] HTTP method.
# @param url [String, URI] String or URI to access.
# @param body [Object] The request body that will eventually be converted to
# a string.
# @param headers [Hash] unencoded HTTP header key/value pairs.
#
# @return [Faraday::Response]
def run_request(method, url, body, headers)
unless METHODS.include?(method)
raise ArgumentError, "unknown http method: #{method}"
end
# Resets temp_proxy
@temp_proxy = proxy_for_request(url)
request = build_request(method) do |req|
req.options = req.options.merge(proxy: @temp_proxy)
req.url(url) if url
req.headers.update(headers) if headers
req.body = body if body
yield(req) if block_given?
end
builder.build_response(self, request)
end
# Creates and configures the request object.
#
# @param method [Symbol]
#
# @yield [Faraday::Request] if block given
# @return [Faraday::Request]
def build_request(method)
Request.create(method) do |req|
req.params = params.dup
req.headers = headers.dup
req.options = options
yield(req) if block_given?
end
end
# Build an absolute URL based on url_prefix.
#
# @param url [String, URI]
# @param params [Faraday::Utils::ParamsHash] A Faraday::Utils::ParamsHash to
# replace the query values
# of the resulting url (default: nil).
#
# @return [URI]
def build_exclusive_url(url = nil, params = nil, params_encoder = nil)
url = nil if url.respond_to?(:empty?) && url.empty?
base = url_prefix
if url && base.path && base.path !~ %r{/$}
base = base.dup
base.path = base.path + '/' # ensure trailing slash
end
uri = url ? base + url : base
if params
uri.query = params.to_query(params_encoder || options.params_encoder)
end
# rubocop:disable Style/SafeNavigation
uri.query = nil if uri.query && uri.query.empty?
# rubocop:enable Style/SafeNavigation
uri
end
# Creates a duplicate of this Faraday::Connection.
#
# @api private
#
# @return [Faraday::Connection]
def dup
self.class.new(build_exclusive_url,
headers: headers.dup,
params: params.dup,
builder: builder.dup,
ssl: ssl.dup,
request: options.dup)
end
# Yields username and password extracted from a URI if they both exist.
#
# @param uri [URI]
# @yield [username, password] any username and password
# @yieldparam username [String] any username from URI
# @yieldparam password [String] any password from URI
# @return [void]
# @api private
def with_uri_credentials(uri)
return unless uri.user && uri.password
yield(Utils.unescape(uri.user), Utils.unescape(uri.password))
end
def set_authorization_header(header_type, *args)
header = Faraday::Request
.lookup_middleware(header_type)
.header(*args)
headers[Faraday::Request::Authorization::KEY] = header
end
def proxy_from_env(url)
return if Faraday.ignore_env_proxy
uri = nil
if URI.parse('').respond_to?(:find_proxy)
case url
when String
uri = Utils.URI(url)
uri = URI.parse("#{uri.scheme}://#{uri.hostname}").find_proxy
when URI
uri = url.find_proxy
when nil
uri = find_default_proxy
end
else
warn 'no_proxy is unsupported' if ENV['no_proxy'] || ENV['NO_PROXY']
uri = find_default_proxy
end
ProxyOptions.from(uri) if uri
end
def find_default_proxy
uri = ENV['http_proxy']
return unless uri && !uri.empty?
uri = 'http://' + uri if uri !~ /^http/i
uri
end
def proxy_for_request(url)
return proxy if @manual_proxy
if url && Utils.URI(url).absolute?
proxy_from_env(url)
else
proxy
end
end
def support_parallel?(adapter)
adapter&.respond_to?(:supports_parallel?) && adapter&.supports_parallel?
end
end
|
tbuehlmann/ponder | lib/ponder/thaum.rb | Ponder.Thaum.setup_default_callbacks | ruby | def setup_default_callbacks
on :query, /^\001PING \d+\001$/ do |event_data|
time = event_data[:message].scan(/\d+/)[0]
notice event_data[:nick], "\001PING #{time}\001"
end
on :query, /^\001VERSION\001$/ do |event_data|
notice event_data[:nick], "\001VERSION Ponder #{Ponder::VERSION} (https://github.com/tbuehlmann/ponder)\001"
end
on :query, /^\001TIME\001$/ do |event_data|
notice event_data[:nick], "\001TIME #{Time.now.strftime('%a %b %d %H:%M:%S %Y')}\001"
end
on 005 do |event_data|
@isupport.parse event_data[:params]
end
end | Default callbacks for PING, VERSION, TIME and ISUPPORT processing. | train | https://github.com/tbuehlmann/ponder/blob/930912e1b78b41afa1359121aca46197e9edff9c/lib/ponder/thaum.rb#L135-L152 | class Thaum
include IRC
attr_reader :config, :callbacks, :isupport, :channel_list, :user_list, :connection, :loggers
attr_accessor :connected, :deferrables
def initialize(&block)
# default settings
@config = OpenStruct.new(
:server => 'chat.freenode.org',
:port => 6667,
:ssl => false,
:nick => "Ponder#{rand(10_000)}",
:username => 'Ponder',
:real_name => 'Ponder Stibbons',
:verbose => true,
:logging => false,
:reconnect => true,
:reconnect_interval => 30,
:hide_ping_pongs => true,
:rejoin_after_kick => false
)
# custom settings
block.call(@config) if block_given?
# setting up loggers
@console_logger = if @config.verbose
Logging::Twoflogger.new($stdout)
else
Logging::BlindIo.new
end
@logger = if @config.logging
if @config.logger
@config.logger
else
log_path = File.join($0, 'logs', 'log.log')
log_dir = File.dirname(log_path)
FileUtils.mkdir_p(log_dir) unless File.exist?(log_dir)
Logging::Twoflogger.new(log_path, File::WRONLY | File::APPEND)
end
else
Logging::BlindIo.new
end
@loggers = Logging::LoggerList.new
@loggers.push(@console_logger, @logger)
@connected = false
# user callbacks
@callbacks = Hash.new { |hash, key| hash[key] = [] }
# setting up isuport
@isupport = ISupport.new
setup_default_callbacks
setup_channel_and_user_tracking
end
def on(event_type = :channel, match = //, *options, &block)
options = options.extract_options!
callback = Callback.new(match, options, block)
@callbacks[event_type] << callback
callback
end
def connect
@loggers.info '-- Starting Ponder'
EventMachine::run do
@connection = EventMachine::connect(@config.server, @config.port, Connection, self)
end
end
# parsing incoming traffic
def parse(message)
message.chomp!
if message =~ /^PING \S+$/
if @config.hide_ping_pongs
send_data message.sub(/PING/, 'PONG')
else
@loggers.info "<< #{message}"
raw message.sub(/PING/, 'PONG')
end
else
@loggers.info "<< #{message}"
event_data = IRC::Events::Parser.parse(message, @isupport['CHANTYPES'])
parse_event_data(event_data) unless event_data.empty?
end
end
# Each matching callback will run in its own fiber. So the execution
# of code can be stopped until necessary data (eg from a WHOIS) gets in.
#
# The callback processing is exception handled, so the EM reactor won't die
# from exceptions.
def process_callbacks(event_type, event_data)
@callbacks[event_type].each do |callback|
fiber = Fiber.new do
begin
callback.call(event_data)
rescue => e
@loggers.error "-- #{e.class}: #{e.message}"
e.backtrace.each { |line| @loggers.error("-- #{line}") }
end
end
# If the callback has a :defer option, call it in a thread
# from the EM thread pool. Else call it in the reactor thread.
if callback.options[:defer]
EM.defer(fiber.resume)
else
fiber.resume
end
end
end
private
# parses incoming traffic (types)
def parse_event_data(event_data)
if ((event_data[:type] == 376) || (event_data[:type] == 422)) && !@connected
@connected = true
process_callbacks(:connect, event_data)
end
process_callbacks(event_data[:type], event_data)
end
# Default callbacks for PING, VERSION, TIME and ISUPPORT processing.
def setup_channel_and_user_tracking
@channel_list = ChannelList.new
@user_list = UserList.new
on :connect do
thaum = User.new(@config.nick, self)
@user_list.add(thaum, true)
end
on :join do |event_data|
joined_user = {
:nick => event_data.delete(:nick),
:user => event_data.delete(:user),
:host => event_data.delete(:host)
}
channel = event_data.delete(:channel)
# TODO: Update existing users with user/host information.
# Refactor
user = @user_list.find(joined_user[:nick])
if user
if user.thaum?
channel = Channel.new(channel, self)
channel.get_mode
@channel_list.add channel
else
channel = @channel_list.find(channel)
end
else
channel = @channel_list.find(channel)
user = User.new(joined_user[:nick], self)
@user_list.add user
end
channel.add_user(user, [])
event_data[:join] = IRC::Events::Join.new(user, channel)
end
on 353 do |event_data|
channel_name = event_data[:params].split(' ')[2]
channel = @channel_list.find(channel_name)
nicks_with_prefixes = event_data[:params].scan(/:(.*)/)[0][0].split(' ')
nicks, prefixes = [], []
channel_prefixes = @isupport['PREFIX'].values.map do |p|
Regexp.escape(p)
end.join('|')
nicks_with_prefixes.each do |nick_with_prefixes|
nick = nick_with_prefixes.gsub(/#{channel_prefixes}/, '')
prefixes = nick_with_prefixes.scan(/#{channel_prefixes}/)
user = @user_list.find(nick)
unless user
user = User.new(nick, self)
@user_list.add(user)
end
channel.add_user(user, prefixes)
end
end
on :part do |event_data|
nick = event_data.delete(:nick)
user = event_data.delete(:user)
host = event_data.delete(:host)
channel = event_data.delete(:channel)
message = event_data.delete(:message)
# TODO: Update existing users with user/host information.
user = @user_list.find(nick)
channel = @channel_list.find(channel)
if user && user.thaum?
# Remove the channel from the channel_list.
@channel_list.remove(channel)
# Remove all users from the user_list that do not share channels
# with the Thaum.
all_known_users = @channel_list.channels.map(&:users).flatten
@user_list.kill_zombie_users(all_known_users)
else
channel.remove_user nick
remove_user = @channel_list.channels.none? do |_channel|
_channel.has_user? nick
end
@user_list.remove(nick) if remove_user
end
event_data[:part] = IRC::Events::Part.new(user, channel, message)
end
on :kick do |event_data|
nick = event_data.delete(:nick)
user = event_data.delete(:user)
host = event_data.delete(:host)
channel = event_data.delete(:channel)
victim = event_data.delete(:victim)
message = event_data.delete(:message)
channel = @channel_list.find(channel)
kicker = @user_list.find(nick)
victim = @user_list.find(victim)
channel.remove_user victim.nick
if victim.thaum?
# Remove the channel from the channel_list.
@channel_list.remove(channel)
# Remove all users from the user_list that do not share channels
# with the Thaum.
all_known_users = @channel_list.channels.map(&:users).flatten
@user_list.kill_zombie_users(all_known_users)
else
remove_user = @channel_list.channels.none? do |_channel|
_channel.has_user?(victim)
end
@user_list.remove(victim.nick) if remove_user
end
event_data[:kick] = Ponder::IRC::Events::Kick.new(kicker, victim, channel, message)
end
# If @config.rejoin_after_kick is set to `true`, let
# the Thaum rejoin a channel after being kicked.
on :kick do |event_data|
if @config.rejoin_after_kick && event_data[:kick].victim.thaum?
key = event_data[:kick].channel.modes['k']
event_data[:kick].channel.join key
end
end
on :quit do |event_data|
nick = event_data.delete(:nick)
user = event_data.delete(:user)
host = event_data.delete(:host)
message = event_data.delete(:message)
# TODO: Update existing users with user/host information.
user = @user_list.find nick
if user && user.thaum?
channels = @channel_list.clear
@user_list.clear
else
channels = @channel_list.remove_user(nick)
@user_list.remove nick
end
event_data[:quit] = IRC::Events::Quit.new(user, channels, message)
end
on :disconnect do |event_data|
@channel_list.clear
@user_list.clear
end
on :channel do |event_data|
nick = event_data[:nick]
user = event_data[:user]
host = event_data[:host]
channel = event_data[:channel]
message = event_data[:message]
channel = @channel_list.find channel
user = @user_list.find nick
# TODO: Update existing users with user/host information.
event_data[:message] = IRC::Events::Message.new(user, message, channel)
end
on :query do |event_data|
nick = event_data[:nick]
user = event_data[:user]
host = event_data[:host]
message = event_data[:message]
user = @user_list.find nick
# TODO: Update existing users with user/host information.
event_data[:message] = IRC::Events::Message.new(user, message)
end
on :channel_mode do |event_data|
# TODO: Update existing users with user/host information.
# nick = event_data[:nick]
# user = event_data[:user]
# host = event_data[:host]
channel = event_data.delete(:channel)
nick = event_data.delete(:nick)
params = event_data.delete(:params)
modes = event_data.delete(:modes)
channel = @channel_list.find(channel)
event_data[:channel] = channel
event_data[:user] = @user_list.find(nick)
mode_changes = IRC::Events::ModeParser.parse(modes, params, @isupport)
event_data[:channel_modes] = mode_changes.map do |mode_change|
IRC::Events::ChannelMode.new(mode_change.merge(:channel => channel))
end
event_data[:channel_modes].each do |mode|
channel.set_mode(mode, isupport)
end
end
# Response to MODE command, giving back the channel modes.
on 324 do |event_data|
split = event_data[:params].split(/ /)
channel_name = split[1]
channel = @channel_list.find(channel_name)
if channel
modes = split[2]
params = split[3..-1]
mode_changes = IRC::Events::ModeParser.parse(modes, params, @isupport)
channel_modes = mode_changes.map do |mode_change|
IRC::Events::ChannelMode.new(mode_change.merge(:channel => channel))
end
channel_modes.each do |mode|
channel.set_mode(mode, isupport)
end
end
end
# Response to MODE command, giving back the time,
# the channel was created.
on 329 do |event_data|
split = event_data[:params].split(/ /)
channel_name = split[1]
channel = @channel_list.find(channel_name)
# Only set created_at if the Thaum is on the channel.
if channel
epoch_time = split[2].to_i
channel.created_at = Time.at(epoch_time)
end
end
end
end
|
state-machines/state_machines | lib/state_machines/machine.rb | StateMachines.Machine.write | ruby | def write(object, attribute, value, ivar = false)
attribute = self.attribute(attribute)
ivar ? object.instance_variable_set("@#{attribute}", value) : object.send("#{attribute}=", value)
end | Sets a new value in the given object's attribute.
For example,
class Vehicle
state_machine :initial => :parked do
...
end
end
vehicle = Vehicle.new # => #<Vehicle:0xb7d94ab0 @state="parked">
Vehicle.state_machine.write(vehicle, :state, 'idling') # => Equivalent to vehicle.state = 'idling'
Vehicle.state_machine.write(vehicle, :event, 'park') # => Equivalent to vehicle.state_event = 'park'
vehicle.state # => "idling"
vehicle.event # => "park" | train | https://github.com/state-machines/state_machines/blob/10b03af5fc9245bcb09bbd9c40c58ffba9a85422/lib/state_machines/machine.rb#L1076-L1079 | class Machine
include EvalHelpers
include MatcherHelpers
class << self
# Attempts to find or create a state machine for the given class. For
# example,
#
# StateMachines::Machine.find_or_create(Vehicle)
# StateMachines::Machine.find_or_create(Vehicle, :initial => :parked)
# StateMachines::Machine.find_or_create(Vehicle, :status)
# StateMachines::Machine.find_or_create(Vehicle, :status, :initial => :parked)
#
# If a machine of the given name already exists in one of the class's
# superclasses, then a copy of that machine will be created and stored
# in the new owner class (the original will remain unchanged).
def find_or_create(owner_class, *args, &block)
options = args.last.is_a?(Hash) ? args.pop : {}
name = args.first || :state
# Find an existing machine
machine = owner_class.respond_to?(:state_machines) &&
(args.first && owner_class.state_machines[name] || !args.first &&
owner_class.state_machines.values.first) || nil
if machine
# Only create a new copy if changes are being made to the machine in
# a subclass
if machine.owner_class != owner_class && (options.any? || block_given?)
machine = machine.clone
machine.initial_state = options[:initial] if options.include?(:initial)
machine.owner_class = owner_class
end
# Evaluate DSL
machine.instance_eval(&block) if block_given?
else
# No existing machine: create a new one
machine = new(owner_class, name, options, &block)
end
machine
end
def draw(*)
fail NotImplementedError
end
# Default messages to use for validation errors in ORM integrations
attr_accessor :default_messages
attr_accessor :ignore_method_conflicts
end
@default_messages = {
:invalid => 'is invalid',
:invalid_event => 'cannot transition when %s',
:invalid_transition => 'cannot transition via "%1$s"'
}
# Whether to ignore any conflicts that are detected for helper methods that
# get generated for a machine's owner class. Default is false.
@ignore_method_conflicts = false
# The class that the machine is defined in
attr_reader :owner_class
# The name of the machine, used for scoping methods generated for the
# machine as a whole (not states or events)
attr_reader :name
# The events that trigger transitions. These are sorted, by default, in
# the order in which they were defined.
attr_reader :events
# A list of all of the states known to this state machine. This will pull
# states from the following sources:
# * Initial state
# * State behaviors
# * Event transitions (:to, :from, and :except_from options)
# * Transition callbacks (:to, :from, :except_to, and :except_from options)
# * Unreferenced states (using +other_states+ helper)
#
# These are sorted, by default, in the order in which they were referenced.
attr_reader :states
# The callbacks to invoke before/after a transition is performed
#
# Maps :before => callbacks and :after => callbacks
attr_reader :callbacks
# The action to invoke when an object transitions
attr_reader :action
# An identifier that forces all methods (including state predicates and
# event methods) to be generated with the value prefixed or suffixed,
# depending on the context.
attr_reader :namespace
# Whether the machine will use transactions when firing events
attr_reader :use_transactions
# Creates a new state machine for the given attribute
def initialize(owner_class, *args, &block)
options = args.last.is_a?(Hash) ? args.pop : {}
options.assert_valid_keys(:attribute, :initial, :initialize, :action, :plural, :namespace, :integration, :messages, :use_transactions)
# Find an integration that matches this machine's owner class
if options.include?(:integration)
@integration = options[:integration] && StateMachines::Integrations.find_by_name(options[:integration])
else
@integration = StateMachines::Integrations.match(owner_class)
end
if @integration
extend @integration
options = (@integration.defaults || {}).merge(options)
end
# Add machine-wide defaults
options = {:use_transactions => true, :initialize => true}.merge(options)
# Set machine configuration
@name = args.first || :state
@attribute = options[:attribute] || @name
@events = EventCollection.new(self)
@states = StateCollection.new(self)
@callbacks = {:before => [], :after => [], :failure => []}
@namespace = options[:namespace]
@messages = options[:messages] || {}
@action = options[:action]
@use_transactions = options[:use_transactions]
@initialize_state = options[:initialize]
@action_hook_defined = false
self.owner_class = owner_class
# Merge with sibling machine configurations
add_sibling_machine_configs
# Define class integration
define_helpers
define_scopes(options[:plural])
after_initialize
# Evaluate DSL
instance_eval(&block) if block_given?
self.initial_state = options[:initial] unless sibling_machines.any?
end
# Creates a copy of this machine in addition to copies of each associated
# event/states/callback, so that the modifications to those collections do
# not affect the original machine.
def initialize_copy(orig) #:nodoc:
super
@events = @events.dup
@events.machine = self
@states = @states.dup
@states.machine = self
@callbacks = {:before => @callbacks[:before].dup, :after => @callbacks[:after].dup, :failure => @callbacks[:failure].dup}
end
# Sets the class which is the owner of this state machine. Any methods
# generated by states, events, or other parts of the machine will be defined
# on the given owner class.
def owner_class=(klass)
@owner_class = klass
# Create modules for extending the class with state/event-specific methods
@helper_modules = helper_modules = {:instance => HelperModule.new(self, :instance), :class => HelperModule.new(self, :class)}
owner_class.class_eval do
extend helper_modules[:class]
include helper_modules[:instance]
end
# Add class-/instance-level methods to the owner class for state initialization
unless owner_class < StateMachines::InstanceMethods
owner_class.class_eval do
extend StateMachines::ClassMethods
include StateMachines::InstanceMethods
end
define_state_initializer if @initialize_state
end
# Record this machine as matched to the name in the current owner class.
# This will override any machines mapped to the same name in any superclasses.
owner_class.state_machines[name] = self
end
# Sets the initial state of the machine. This can be either the static name
# of a state or a lambda block which determines the initial state at
# creation time.
def initial_state=(new_initial_state)
@initial_state = new_initial_state
add_states([@initial_state]) unless dynamic_initial_state?
# Update all states to reflect the new initial state
states.each { |state| state.initial = (state.name == @initial_state) }
# Output a warning if there are conflicting initial states for the machine's
# attribute
initial_state = states.detect { |state| state.initial }
if !owner_class_attribute_default.nil? && (dynamic_initial_state? || !owner_class_attribute_default_matches?(initial_state))
warn(
"Both #{owner_class.name} and its #{name.inspect} machine have defined "\
"a different default for \"#{attribute}\". Use only one or the other for "\
"defining defaults to avoid unexpected behaviors."
)
end
end
# Gets the initial state of the machine for the given object. If a dynamic
# initial state was configured for this machine, then the object will be
# passed into the lambda block to help determine the actual state.
#
# == Examples
#
# With a static initial state:
#
# class Vehicle
# state_machine :initial => :parked do
# ...
# end
# end
#
# vehicle = Vehicle.new
# Vehicle.state_machine.initial_state(vehicle) # => #<StateMachines::State name=:parked value="parked" initial=true>
#
# With a dynamic initial state:
#
# class Vehicle
# attr_accessor :force_idle
#
# state_machine :initial => lambda {|vehicle| vehicle.force_idle ? :idling : :parked} do
# ...
# end
# end
#
# vehicle = Vehicle.new
#
# vehicle.force_idle = true
# Vehicle.state_machine.initial_state(vehicle) # => #<StateMachines::State name=:idling value="idling" initial=false>
#
# vehicle.force_idle = false
# Vehicle.state_machine.initial_state(vehicle) # => #<StateMachines::State name=:parked value="parked" initial=false>
def initial_state(object)
states.fetch(dynamic_initial_state? ? evaluate_method(object, @initial_state) : @initial_state) if instance_variable_defined?('@initial_state')
end
# Whether a dynamic initial state is being used in the machine
def dynamic_initial_state?
instance_variable_defined?('@initial_state') && @initial_state.is_a?(Proc)
end
# Initializes the state on the given object. Initial values are only set if
# the machine's attribute hasn't been previously initialized.
#
# Configuration options:
# * <tt>:force</tt> - Whether to initialize the state regardless of its
# current value
# * <tt>:to</tt> - A hash to set the initial value in instead of writing
# directly to the object
def initialize_state(object, options = {})
state = initial_state(object)
if state && (options[:force] || initialize_state?(object))
value = state.value
if hash = options[:to]
hash[attribute.to_s] = value
else
write(object, :state, value)
end
end
end
# Gets the actual name of the attribute on the machine's owner class that
# stores data with the given name.
def attribute(name = :state)
name == :state ? @attribute : :"#{self.name}_#{name}"
end
# Defines a new helper method in an instance or class scope with the given
# name. If the method is already defined in the scope, then this will not
# override it.
#
# If passing in a block, there are two side effects to be aware of
# 1. The method cannot be chained, meaning that the block cannot call +super+
# 2. If the method is already defined in an ancestor, then it will not get
# overridden and a warning will be output.
#
# Example:
#
# # Instance helper
# machine.define_helper(:instance, :state_name) do |machine, object|
# machine.states.match(object).name
# end
#
# # Class helper
# machine.define_helper(:class, :state_machine_name) do |machine, klass|
# "State"
# end
#
# You can also define helpers using string evaluation like so:
#
# # Instance helper
# machine.define_helper :instance, <<-end_eval, __FILE__, __LINE__ + 1
# def state_name
# self.class.state_machine(:state).states.match(self).name
# end
# end_eval
#
# # Class helper
# machine.define_helper :class, <<-end_eval, __FILE__, __LINE__ + 1
# def state_machine_name
# "State"
# end
# end_eval
def define_helper(scope, method, *args, &block)
helper_module = @helper_modules.fetch(scope)
if block_given?
if !self.class.ignore_method_conflicts && conflicting_ancestor = owner_class_ancestor_has_method?(scope, method)
ancestor_name = conflicting_ancestor.name && !conflicting_ancestor.name.empty? ? conflicting_ancestor.name : conflicting_ancestor.to_s
warn "#{scope == :class ? 'Class' : 'Instance'} method \"#{method}\" is already defined in #{ancestor_name}, use generic helper instead or set StateMachines::Machine.ignore_method_conflicts = true."
else
name = self.name
helper_module.class_eval do
define_method(method) do |*block_args|
block.call((scope == :instance ? self.class : self).state_machine(name), self, *block_args)
end
end
end
else
helper_module.class_eval(method, *args)
end
end
# Customizes the definition of one or more states in the machine.
#
# Configuration options:
# * <tt>:value</tt> - The actual value to store when an object transitions
# to the state. Default is the name (stringified).
# * <tt>:cache</tt> - If a dynamic value (via a lambda block) is being used,
# then setting this to true will cache the evaluated result
# * <tt>:if</tt> - Determines whether an object's value matches the state
# (e.g. :value => lambda {Time.now}, :if => lambda {|state| !state.nil?}).
# By default, the configured value is matched.
# * <tt>:human_name</tt> - The human-readable version of this state's name.
# By default, this is either defined by the integration or stringifies the
# name and converts underscores to spaces.
#
# == Customizing the stored value
#
# Whenever a state is automatically discovered in the state machine, its
# default value is assumed to be the stringified version of the name. For
# example,
#
# class Vehicle
# state_machine :initial => :parked do
# event :ignite do
# transition :parked => :idling
# end
# end
# end
#
# In the above state machine, there are two states automatically discovered:
# :parked and :idling. These states, by default, will store their stringified
# equivalents when an object moves into that state (e.g. "parked" / "idling").
#
# For legacy systems or when tying state machines into existing frameworks,
# it's oftentimes necessary to need to store a different value for a state
# than the default. In order to continue taking advantage of an expressive
# state machine and helper methods, every defined state can be re-configured
# with a custom stored value. For example,
#
# class Vehicle
# state_machine :initial => :parked do
# event :ignite do
# transition :parked => :idling
# end
#
# state :idling, :value => 'IDLING'
# state :parked, :value => 'PARKED
# end
# end
#
# This is also useful if being used in association with a database and,
# instead of storing the state name in a column, you want to store the
# state's foreign key:
#
# class VehicleState < ActiveRecord::Base
# end
#
# class Vehicle < ActiveRecord::Base
# state_machine :attribute => :state_id, :initial => :parked do
# event :ignite do
# transition :parked => :idling
# end
#
# states.each do |state|
# self.state(state.name, :value => lambda { VehicleState.find_by_name(state.name.to_s).id }, :cache => true)
# end
# end
# end
#
# In the above example, each known state is configured to store it's
# associated database id in the +state_id+ attribute. Also, notice that a
# lambda block is used to define the state's value. This is required in
# situations (like testing) where the model is loaded without any existing
# data (i.e. no VehicleState records available).
#
# One caveat to the above example is to keep performance in mind. To avoid
# constant db hits for looking up the VehicleState ids, the value is cached
# by specifying the <tt>:cache</tt> option. Alternatively, a custom
# caching strategy can be used like so:
#
# class VehicleState < ActiveRecord::Base
# cattr_accessor :cache_store
# self.cache_store = ActiveSupport::Cache::MemoryStore.new
#
# def self.find_by_name(name)
# cache_store.fetch(name) { find(:first, :conditions => {:name => name}) }
# end
# end
#
# === Dynamic values
#
# In addition to customizing states with other value types, lambda blocks
# can also be specified to allow for a state's value to be determined
# dynamically at runtime. For example,
#
# class Vehicle
# state_machine :purchased_at, :initial => :available do
# event :purchase do
# transition all => :purchased
# end
#
# event :restock do
# transition all => :available
# end
#
# state :available, :value => nil
# state :purchased, :if => lambda {|value| !value.nil?}, :value => lambda {Time.now}
# end
# end
#
# In the above definition, the <tt>:purchased</tt> state is customized with
# both a dynamic value *and* a value matcher.
#
# When an object transitions to the purchased state, the value's lambda
# block will be called. This will get the current time and store it in the
# object's +purchased_at+ attribute.
#
# *Note* that the custom matcher is very important here. Since there's no
# way for the state machine to figure out an object's state when it's set to
# a runtime value, it must be explicitly defined. If the <tt>:if</tt> option
# were not configured for the state, then an ArgumentError exception would
# be raised at runtime, indicating that the state machine could not figure
# out what the current state of the object was.
#
# == Behaviors
#
# Behaviors define a series of methods to mixin with objects when the current
# state matches the given one(s). This allows instance methods to behave
# a specific way depending on what the value of the object's state is.
#
# For example,
#
# class Vehicle
# attr_accessor :driver
# attr_accessor :passenger
#
# state_machine :initial => :parked do
# event :ignite do
# transition :parked => :idling
# end
#
# state :parked do
# def speed
# 0
# end
#
# def rotate_driver
# driver = self.driver
# self.driver = passenger
# self.passenger = driver
# true
# end
# end
#
# state :idling, :first_gear do
# def speed
# 20
# end
#
# def rotate_driver
# self.state = 'parked'
# rotate_driver
# end
# end
#
# other_states :backing_up
# end
# end
#
# In the above example, there are two dynamic behaviors defined for the
# class:
# * +speed+
# * +rotate_driver+
#
# Each of these behaviors are instance methods on the Vehicle class. However,
# which method actually gets invoked is based on the current state of the
# object. Using the above class as the example:
#
# vehicle = Vehicle.new
# vehicle.driver = 'John'
# vehicle.passenger = 'Jane'
#
# # Behaviors in the "parked" state
# vehicle.state # => "parked"
# vehicle.speed # => 0
# vehicle.rotate_driver # => true
# vehicle.driver # => "Jane"
# vehicle.passenger # => "John"
#
# vehicle.ignite # => true
#
# # Behaviors in the "idling" state
# vehicle.state # => "idling"
# vehicle.speed # => 20
# vehicle.rotate_driver # => true
# vehicle.driver # => "John"
# vehicle.passenger # => "Jane"
#
# As can be seen, both the +speed+ and +rotate_driver+ instance method
# implementations changed how they behave based on what the current state
# of the vehicle was.
#
# === Invalid behaviors
#
# If a specific behavior has not been defined for a state, then a
# NoMethodError exception will be raised, indicating that that method would
# not normally exist for an object with that state.
#
# Using the example from before:
#
# vehicle = Vehicle.new
# vehicle.state = 'backing_up'
# vehicle.speed # => NoMethodError: undefined method 'speed' for #<Vehicle:0xb7d296ac> in state "backing_up"
#
# === Using matchers
#
# The +all+ / +any+ matchers can be used to easily define behaviors for a
# group of states. Note, however, that you cannot use these matchers to
# set configurations for states. Behaviors using these matchers can be
# defined at any point in the state machine and will always get applied to
# the proper states.
#
# For example:
#
# state_machine :initial => :parked do
# ...
#
# state all - [:parked, :idling, :stalled] do
# validates_presence_of :speed
#
# def speed
# gear * 10
# end
# end
# end
#
# == State-aware class methods
#
# In addition to defining scopes for instance methods that are state-aware,
# the same can be done for certain types of class methods.
#
# Some libraries have support for class-level methods that only run certain
# behaviors based on a conditions hash passed in. For example:
#
# class Vehicle < ActiveRecord::Base
# state_machine do
# ...
# state :first_gear, :second_gear, :third_gear do
# validates_presence_of :speed
# validates_inclusion_of :speed, :in => 0..25, :if => :in_school_zone?
# end
# end
# end
#
# In the above ActiveRecord model, two validations have been defined which
# will *only* run when the Vehicle object is in one of the three states:
# +first_gear+, +second_gear+, or +third_gear. Notice, also, that if/unless
# conditions can continue to be used.
#
# This functionality is not library-specific and can work for any class-level
# method that is defined like so:
#
# def validates_presence_of(attribute, options = {})
# ...
# end
#
# The minimum requirement is that the last argument in the method be an
# options hash which contains at least <tt>:if</tt> condition support.
def state(*names, &block)
options = names.last.is_a?(Hash) ? names.pop : {}
options.assert_valid_keys(:value, :cache, :if, :human_name)
# Store the context so that it can be used for / matched against any state
# that gets added
@states.context(names, &block) if block_given?
if names.first.is_a?(Matcher)
# Add any states referenced in the matcher. When matchers are used,
# states are not allowed to be configured.
raise ArgumentError, "Cannot configure states when using matchers (using #{options.inspect})" if options.any?
states = add_states(names.first.values)
else
states = add_states(names)
# Update the configuration for the state(s)
states.each do |state|
if options.include?(:value)
state.value = options[:value]
self.states.update(state)
end
state.human_name = options[:human_name] if options.include?(:human_name)
state.cache = options[:cache] if options.include?(:cache)
state.matcher = options[:if] if options.include?(:if)
end
end
states.length == 1 ? states.first : states
end
alias_method :other_states, :state
# Gets the current value stored in the given object's attribute.
#
# For example,
#
# class Vehicle
# state_machine :initial => :parked do
# ...
# end
# end
#
# vehicle = Vehicle.new # => #<Vehicle:0xb7d94ab0 @state="parked">
# Vehicle.state_machine.read(vehicle, :state) # => "parked" # Equivalent to vehicle.state
# Vehicle.state_machine.read(vehicle, :event) # => nil # Equivalent to vehicle.state_event
def read(object, attribute, ivar = false)
attribute = self.attribute(attribute)
if ivar
object.instance_variable_defined?("@#{attribute}") ? object.instance_variable_get("@#{attribute}") : nil
else
object.send(attribute)
end
end
# Sets a new value in the given object's attribute.
#
# For example,
#
# class Vehicle
# state_machine :initial => :parked do
# ...
# end
# end
#
# vehicle = Vehicle.new # => #<Vehicle:0xb7d94ab0 @state="parked">
# Vehicle.state_machine.write(vehicle, :state, 'idling') # => Equivalent to vehicle.state = 'idling'
# Vehicle.state_machine.write(vehicle, :event, 'park') # => Equivalent to vehicle.state_event = 'park'
# vehicle.state # => "idling"
# vehicle.event # => "park"
# Defines one or more events for the machine and the transitions that can
# be performed when those events are run.
#
# This method is also aliased as +on+ for improved compatibility with
# using a domain-specific language.
#
# Configuration options:
# * <tt>:human_name</tt> - The human-readable version of this event's name.
# By default, this is either defined by the integration or stringifies the
# name and converts underscores to spaces.
#
# == Instance methods
#
# The following instance methods are generated when a new event is defined
# (the "park" event is used as an example):
# * <tt>park(..., run_action = true)</tt> - Fires the "park" event,
# transitioning from the current state to the next valid state. If the
# last argument is a boolean, it will control whether the machine's action
# gets run.
# * <tt>park!(..., run_action = true)</tt> - Fires the "park" event,
# transitioning from the current state to the next valid state. If the
# transition fails, then a StateMachines::InvalidTransition error will be
# raised. If the last argument is a boolean, it will control whether the
# machine's action gets run.
# * <tt>can_park?(requirements = {})</tt> - Checks whether the "park" event
# can be fired given the current state of the object. This will *not* run
# validations or callbacks in ORM integrations. It will only determine if
# the state machine defines a valid transition for the event. To check
# whether an event can fire *and* passes validations, use event attributes
# (e.g. state_event) as described in the "Events" documentation of each
# ORM integration.
# * <tt>park_transition(requirements = {})</tt> - Gets the next transition
# that would be performed if the "park" event were to be fired now on the
# object or nil if no transitions can be performed. Like <tt>can_park?</tt>
# this will also *not* run validations or callbacks. It will only
# determine if the state machine defines a valid transition for the event.
#
# With a namespace of "car", the above names map to the following methods:
# * <tt>can_park_car?</tt>
# * <tt>park_car_transition</tt>
# * <tt>park_car</tt>
# * <tt>park_car!</tt>
#
# The <tt>can_park?</tt> and <tt>park_transition</tt> helpers both take an
# optional set of requirements for determining what transitions are available
# for the current object. These requirements include:
# * <tt>:from</tt> - One or more states to transition from. If none are
# specified, then this will be the object's current state.
# * <tt>:to</tt> - One or more states to transition to. If none are
# specified, then this will match any to state.
# * <tt>:guard</tt> - Whether to guard transitions with the if/unless
# conditionals defined for each one. Default is true.
#
# == Defining transitions
#
# +event+ requires a block which allows you to define the possible
# transitions that can happen as a result of that event. For example,
#
# event :park, :stop do
# transition :idling => :parked
# end
#
# event :first_gear do
# transition :parked => :first_gear, :if => :seatbelt_on?
# transition :parked => same # Allow to loopback if seatbelt is off
# end
#
# See StateMachines::Event#transition for more information on
# the possible options that can be passed in.
#
# *Note* that this block is executed within the context of the actual event
# object. As a result, you will not be able to reference any class methods
# on the model without referencing the class itself. For example,
#
# class Vehicle
# def self.safe_states
# [:parked, :idling, :stalled]
# end
#
# state_machine do
# event :park do
# transition Vehicle.safe_states => :parked
# end
# end
# end
#
# == Overriding the event method
#
# By default, this will define an instance method (with the same name as the
# event) that will fire the next possible transition for that. Although the
# +before_transition+, +after_transition+, and +around_transition+ hooks
# allow you to define behavior that gets executed as a result of the event's
# transition, you can also override the event method in order to have a
# little more fine-grained control.
#
# For example:
#
# class Vehicle
# state_machine do
# event :park do
# ...
# end
# end
#
# def park(*)
# take_deep_breath # Executes before the transition (and before_transition hooks) even if no transition is possible
# if result = super # Runs the transition and all before/after/around hooks
# applaud # Executes after the transition (and after_transition hooks)
# end
# result
# end
# end
#
# There are a few important things to note here. First, the method
# signature is defined with an unlimited argument list in order to allow
# callers to continue passing arguments that are expected by state_machine.
# For example, it will still allow calls to +park+ with a single parameter
# for skipping the configured action.
#
# Second, the overridden event method must call +super+ in order to run the
# logic for running the next possible transition. In order to remain
# consistent with other events, the result of +super+ is returned.
#
# Third, any behavior defined in this method will *not* get executed if
# you're taking advantage of attribute-based event transitions. For example:
#
# vehicle = Vehicle.new
# vehicle.state_event = 'park'
# vehicle.save
#
# In this case, the +park+ event will run the before/after/around transition
# hooks and transition the state, but the behavior defined in the overriden
# +park+ method will *not* be executed.
#
# == Defining additional arguments
#
# Additional arguments can be passed into events and accessed by transition
# hooks like so:
#
# class Vehicle
# state_machine do
# after_transition :on => :park do |vehicle, transition|
# kind = *transition.args # :parallel
# ...
# end
# after_transition :on => :park, :do => :take_deep_breath
#
# event :park do
# ...
# end
#
# def take_deep_breath(transition)
# kind = *transition.args # :parallel
# ...
# end
# end
# end
#
# vehicle = Vehicle.new
# vehicle.park(:parallel)
#
# *Remember* that if the last argument is a boolean, it will be used as the
# +run_action+ parameter to the event action. Using the +park+ action
# example from above, you can might call it like so:
#
# vehicle.park # => Uses default args and runs machine action
# vehicle.park(:parallel) # => Specifies the +kind+ argument and runs the machine action
# vehicle.park(:parallel, false) # => Specifies the +kind+ argument and *skips* the machine action
#
# If you decide to override the +park+ event method *and* define additional
# arguments, you can do so as shown below:
#
# class Vehicle
# state_machine do
# event :park do
# ...
# end
# end
#
# def park(kind = :parallel, *args)
# take_deep_breath if kind == :parallel
# super
# end
# end
#
# Note that +super+ is called instead of <tt>super(*args)</tt>. This allow
# the entire arguments list to be accessed by transition callbacks through
# StateMachines::Transition#args.
#
# === Using matchers
#
# The +all+ / +any+ matchers can be used to easily execute blocks for a
# group of events. Note, however, that you cannot use these matchers to
# set configurations for events. Blocks using these matchers can be
# defined at any point in the state machine and will always get applied to
# the proper events.
#
# For example:
#
# state_machine :initial => :parked do
# ...
#
# event all - [:crash] do
# transition :stalled => :parked
# end
# end
#
# == Example
#
# class Vehicle
# state_machine do
# # The park, stop, and halt events will all share the given transitions
# event :park, :stop, :halt do
# transition [:idling, :backing_up] => :parked
# end
#
# event :stop do
# transition :first_gear => :idling
# end
#
# event :ignite do
# transition :parked => :idling
# transition :idling => same # Allow ignite while still idling
# end
# end
# end
def event(*names, &block)
options = names.last.is_a?(Hash) ? names.pop : {}
options.assert_valid_keys(:human_name)
# Store the context so that it can be used for / matched against any event
# that gets added
@events.context(names, &block) if block_given?
if names.first.is_a?(Matcher)
# Add any events referenced in the matcher. When matchers are used,
# events are not allowed to be configured.
raise ArgumentError, "Cannot configure events when using matchers (using #{options.inspect})" if options.any?
events = add_events(names.first.values)
else
events = add_events(names)
# Update the configuration for the event(s)
events.each do |event|
event.human_name = options[:human_name] if options.include?(:human_name)
# Add any states that may have been referenced within the event
add_states(event.known_states)
end
end
events.length == 1 ? events.first : events
end
alias_method :on, :event
# Creates a new transition that determines what to change the current state
# to when an event fires.
#
# == Defining transitions
#
# The options for a new transition uses the Hash syntax to map beginning
# states to ending states. For example,
#
# transition :parked => :idling, :idling => :first_gear, :on => :ignite
#
# In this case, when the +ignite+ event is fired, this transition will cause
# the state to be +idling+ if it's current state is +parked+ or +first_gear+
# if it's current state is +idling+.
#
# To help define these implicit transitions, a set of helpers are available
# for slightly more complex matching:
# * <tt>all</tt> - Matches every state in the machine
# * <tt>all - [:parked, :idling, ...]</tt> - Matches every state except those specified
# * <tt>any</tt> - An alias for +all+ (matches every state in the machine)
# * <tt>same</tt> - Matches the same state being transitioned from
#
# See StateMachines::MatcherHelpers for more information.
#
# Examples:
#
# transition all => nil, :on => :ignite # Transitions to nil regardless of the current state
# transition all => :idling, :on => :ignite # Transitions to :idling regardless of the current state
# transition all - [:idling, :first_gear] => :idling, :on => :ignite # Transitions every state but :idling and :first_gear to :idling
# transition nil => :idling, :on => :ignite # Transitions to :idling from the nil state
# transition :parked => :idling, :on => :ignite # Transitions to :idling if :parked
# transition [:parked, :stalled] => :idling, :on => :ignite # Transitions to :idling if :parked or :stalled
#
# transition :parked => same, :on => :park # Loops :parked back to :parked
# transition [:parked, :stalled] => same, :on => [:park, :stall] # Loops either :parked or :stalled back to the same state on the park and stall events
# transition all - :parked => same, :on => :noop # Loops every state but :parked back to the same state
#
# # Transitions to :idling if :parked, :first_gear if :idling, or :second_gear if :first_gear
# transition :parked => :idling, :idling => :first_gear, :first_gear => :second_gear, :on => :shift_up
#
# == Verbose transitions
#
# Transitions can also be defined use an explicit set of configuration
# options:
# * <tt>:from</tt> - A state or array of states that can be transitioned from.
# If not specified, then the transition can occur for *any* state.
# * <tt>:to</tt> - The state that's being transitioned to. If not specified,
# then the transition will simply loop back (i.e. the state will not change).
# * <tt>:except_from</tt> - A state or array of states that *cannot* be
# transitioned from.
#
# These options must be used when defining transitions within the context
# of a state.
#
# Examples:
#
# transition :to => nil, :on => :park
# transition :to => :idling, :on => :ignite
# transition :except_from => [:idling, :first_gear], :to => :idling, :on => :ignite
# transition :from => nil, :to => :idling, :on => :ignite
# transition :from => [:parked, :stalled], :to => :idling, :on => :ignite
#
# == Conditions
#
# In addition to the state requirements for each transition, a condition
# can also be defined to help determine whether that transition is
# available. These options will work on both the normal and verbose syntax.
#
# Configuration options:
# * <tt>:if</tt> - A method, proc or string to call to determine if the
# transition should occur (e.g. :if => :moving?, or :if => lambda {|vehicle| vehicle.speed > 60}).
# The condition should return or evaluate to true or false.
# * <tt>:unless</tt> - A method, proc or string to call to determine if the
# transition should not occur (e.g. :unless => :stopped?, or :unless => lambda {|vehicle| vehicle.speed <= 60}).
# The condition should return or evaluate to true or false.
#
# Examples:
#
# transition :parked => :idling, :on => :ignite, :if => :moving?
# transition :parked => :idling, :on => :ignite, :unless => :stopped?
# transition :idling => :first_gear, :first_gear => :second_gear, :on => :shift_up, :if => :seatbelt_on?
#
# transition :from => :parked, :to => :idling, :on => ignite, :if => :moving?
# transition :from => :parked, :to => :idling, :on => ignite, :unless => :stopped?
#
# == Order of operations
#
# Transitions are evaluated in the order in which they're defined. As a
# result, if more than one transition applies to a given object, then the
# first transition that matches will be performed.
def transition(options)
raise ArgumentError, 'Must specify :on event' unless options[:on]
branches = []
options = options.dup
event(*Array(options.delete(:on))) { branches << transition(options) }
branches.length == 1 ? branches.first : branches
end
# Creates a callback that will be invoked *before* a transition is
# performed so long as the given requirements match the transition.
#
# == The callback
#
# Callbacks must be defined as either an argument, in the :do option, or
# as a block. For example,
#
# class Vehicle
# state_machine do
# before_transition :set_alarm
# before_transition :set_alarm, all => :parked
# before_transition all => :parked, :do => :set_alarm
# before_transition all => :parked do |vehicle, transition|
# vehicle.set_alarm
# end
# ...
# end
# end
#
# Notice that the first three callbacks are the same in terms of how the
# methods to invoke are defined. However, using the <tt>:do</tt> can
# provide for a more fluid DSL.
#
# In addition, multiple callbacks can be defined like so:
#
# class Vehicle
# state_machine do
# before_transition :set_alarm, :lock_doors, all => :parked
# before_transition all => :parked, :do => [:set_alarm, :lock_doors]
# before_transition :set_alarm do |vehicle, transition|
# vehicle.lock_doors
# end
# end
# end
#
# Notice that the different ways of configuring methods can be mixed.
#
# == State requirements
#
# Callbacks can require that the machine be transitioning from and to
# specific states. These requirements use a Hash syntax to map beginning
# states to ending states. For example,
#
# before_transition :parked => :idling, :idling => :first_gear, :do => :set_alarm
#
# In this case, the +set_alarm+ callback will only be called if the machine
# is transitioning from +parked+ to +idling+ or from +idling+ to +parked+.
#
# To help define state requirements, a set of helpers are available for
# slightly more complex matching:
# * <tt>all</tt> - Matches every state/event in the machine
# * <tt>all - [:parked, :idling, ...]</tt> - Matches every state/event except those specified
# * <tt>any</tt> - An alias for +all+ (matches every state/event in the machine)
# * <tt>same</tt> - Matches the same state being transitioned from
#
# See StateMachines::MatcherHelpers for more information.
#
# Examples:
#
# before_transition :parked => [:idling, :first_gear], :do => ... # Matches from parked to idling or first_gear
# before_transition all - [:parked, :idling] => :idling, :do => ... # Matches from every state except parked and idling to idling
# before_transition all => :parked, :do => ... # Matches all states to parked
# before_transition any => same, :do => ... # Matches every loopback
#
# == Event requirements
#
# In addition to state requirements, an event requirement can be defined so
# that the callback is only invoked on specific events using the +on+
# option. This can also use the same matcher helpers as the state
# requirements.
#
# Examples:
#
# before_transition :on => :ignite, :do => ... # Matches only on ignite
# before_transition :on => all - :ignite, :do => ... # Matches on every event except ignite
# before_transition :parked => :idling, :on => :ignite, :do => ... # Matches from parked to idling on ignite
#
# == Verbose Requirements
#
# Requirements can also be defined using verbose options rather than the
# implicit Hash syntax and helper methods described above.
#
# Configuration options:
# * <tt>:from</tt> - One or more states being transitioned from. If none
# are specified, then all states will match.
# * <tt>:to</tt> - One or more states being transitioned to. If none are
# specified, then all states will match.
# * <tt>:on</tt> - One or more events that fired the transition. If none
# are specified, then all events will match.
# * <tt>:except_from</tt> - One or more states *not* being transitioned from
# * <tt>:except_to</tt> - One more states *not* being transitioned to
# * <tt>:except_on</tt> - One or more events that *did not* fire the transition
#
# Examples:
#
# before_transition :from => :ignite, :to => :idling, :on => :park, :do => ...
# before_transition :except_from => :ignite, :except_to => :idling, :except_on => :park, :do => ...
#
# == Conditions
#
# In addition to the state/event requirements, a condition can also be
# defined to help determine whether the callback should be invoked.
#
# Configuration options:
# * <tt>:if</tt> - A method, proc or string to call to determine if the
# callback should occur (e.g. :if => :allow_callbacks, or
# :if => lambda {|user| user.signup_step > 2}). The method, proc or string
# should return or evaluate to a true or false value.
# * <tt>:unless</tt> - A method, proc or string to call to determine if the
# callback should not occur (e.g. :unless => :skip_callbacks, or
# :unless => lambda {|user| user.signup_step <= 2}). The method, proc or
# string should return or evaluate to a true or false value.
#
# Examples:
#
# before_transition :parked => :idling, :if => :moving?, :do => ...
# before_transition :on => :ignite, :unless => :seatbelt_on?, :do => ...
#
# == Accessing the transition
#
# In addition to passing the object being transitioned, the actual
# transition describing the context (e.g. event, from, to) can be accessed
# as well. This additional argument is only passed if the callback allows
# for it.
#
# For example,
#
# class Vehicle
# # Only specifies one parameter (the object being transitioned)
# before_transition all => :parked do |vehicle|
# vehicle.set_alarm
# end
#
# # Specifies 2 parameters (object being transitioned and actual transition)
# before_transition all => :parked do |vehicle, transition|
# vehicle.set_alarm(transition)
# end
# end
#
# *Note* that the object in the callback will only be passed in as an
# argument if callbacks are configured to *not* be bound to the object
# involved. This is the default and may change on a per-integration basis.
#
# See StateMachines::Transition for more information about the
# attributes available on the transition.
#
# == Usage with delegates
#
# As noted above, state_machine uses the callback method's argument list
# arity to determine whether to include the transition in the method call.
# If you're using delegates, such as those defined in ActiveSupport or
# Forwardable, the actual arity of the delegated method gets masked. This
# means that callbacks which reference delegates will always get passed the
# transition as an argument. For example:
#
# class Vehicle
# extend Forwardable
# delegate :refresh => :dashboard
#
# state_machine do
# before_transition :refresh
# ...
# end
#
# def dashboard
# @dashboard ||= Dashboard.new
# end
# end
#
# class Dashboard
# def refresh(transition)
# # ...
# end
# end
#
# In the above example, <tt>Dashboard#refresh</tt> *must* defined a
# +transition+ argument. Otherwise, an +ArgumentError+ exception will get
# raised. The only way around this is to avoid the use of delegates and
# manually define the delegate method so that the correct arity is used.
#
# == Examples
#
# Below is an example of a class with one state machine and various types
# of +before+ transitions defined for it:
#
# class Vehicle
# state_machine do
# # Before all transitions
# before_transition :update_dashboard
#
# # Before specific transition:
# before_transition [:first_gear, :idling] => :parked, :on => :park, :do => :take_off_seatbelt
#
# # With conditional callback:
# before_transition all => :parked, :do => :take_off_seatbelt, :if => :seatbelt_on?
#
# # Using helpers:
# before_transition all - :stalled => same, :on => any - :crash, :do => :update_dashboard
# ...
# end
# end
#
# As can be seen, any number of transitions can be created using various
# combinations of configuration options.
def before_transition(*args, &block)
options = (args.last.is_a?(Hash) ? args.pop : {})
options[:do] = args if args.any?
add_callback(:before, options, &block)
end
# Creates a callback that will be invoked *after* a transition is
# performed so long as the given requirements match the transition.
#
# See +before_transition+ for a description of the possible configurations
# for defining callbacks.
def after_transition(*args, &block)
options = (args.last.is_a?(Hash) ? args.pop : {})
options[:do] = args if args.any?
add_callback(:after, options, &block)
end
# Creates a callback that will be invoked *around* a transition so long as
# the given requirements match the transition.
#
# == The callback
#
# Around callbacks wrap transitions, executing code both before and after.
# These callbacks are defined in the exact same manner as before / after
# callbacks with the exception that the transition must be yielded to in
# order to finish running it.
#
# If defining +around+ callbacks using blocks, you must yield within the
# transition by directly calling the block (since yielding is not allowed
# within blocks).
#
# For example,
#
# class Vehicle
# state_machine do
# around_transition do |block|
# Benchmark.measure { block.call }
# end
#
# around_transition do |vehicle, block|
# logger.info "vehicle was #{state}..."
# block.call
# logger.info "...and is now #{state}"
# end
#
# around_transition do |vehicle, transition, block|
# logger.info "before #{transition.event}: #{vehicle.state}"
# block.call
# logger.info "after #{transition.event}: #{vehicle.state}"
# end
# end
# end
#
# Notice that referencing the block is similar to doing so within an
# actual method definition in that it is always the last argument.
#
# On the other hand, if you're defining +around+ callbacks using method
# references, you can yield like normal:
#
# class Vehicle
# state_machine do
# around_transition :benchmark
# ...
# end
#
# def benchmark
# Benchmark.measure { yield }
# end
# end
#
# See +before_transition+ for a description of the possible configurations
# for defining callbacks.
def around_transition(*args, &block)
options = (args.last.is_a?(Hash) ? args.pop : {})
options[:do] = args if args.any?
add_callback(:around, options, &block)
end
# Creates a callback that will be invoked *after* a transition failures to
# be performed so long as the given requirements match the transition.
#
# See +before_transition+ for a description of the possible configurations
# for defining callbacks. *Note* however that you cannot define the state
# requirements in these callbacks. You may only define event requirements.
#
# = The callback
#
# Failure callbacks get invoked whenever an event fails to execute. This
# can happen when no transition is available, a +before+ callback halts
# execution, or the action associated with this machine fails to succeed.
# In any of these cases, any failure callback that matches the attempted
# transition will be run.
#
# For example,
#
# class Vehicle
# state_machine do
# after_failure do |vehicle, transition|
# logger.error "vehicle #{vehicle} failed to transition on #{transition.event}"
# end
#
# after_failure :on => :ignite, :do => :log_ignition_failure
#
# ...
# end
# end
def after_failure(*args, &block)
options = (args.last.is_a?(Hash) ? args.pop : {})
options[:do] = args if args.any?
options.assert_valid_keys(:on, :do, :if, :unless)
add_callback(:failure, options, &block)
end
# Generates a list of the possible transition sequences that can be run on
# the given object. These paths can reveal all of the possible states and
# events that can be encountered in the object's state machine based on the
# object's current state.
#
# Configuration options:
# * +from+ - The initial state to start all paths from. By default, this
# is the object's current state.
# * +to+ - The target state to end all paths on. By default, paths will
# end when they loop back to the first transition on the path.
# * +deep+ - Whether to allow the target state to be crossed more than once
# in a path. By default, paths will immediately stop when the target
# state (if specified) is reached. If this is enabled, then paths can
# continue even after reaching the target state; they will stop when
# reaching the target state a second time.
#
# *Note* that the object is never modified when the list of paths is
# generated.
#
# == Examples
#
# class Vehicle
# state_machine :initial => :parked do
# event :ignite do
# transition :parked => :idling
# end
#
# event :shift_up do
# transition :idling => :first_gear, :first_gear => :second_gear
# end
#
# event :shift_down do
# transition :second_gear => :first_gear, :first_gear => :idling
# end
# end
# end
#
# vehicle = Vehicle.new # => #<Vehicle:0xb7c27024 @state="parked">
# vehicle.state # => "parked"
#
# vehicle.state_paths
# # => [
# # [#<StateMachines::Transition attribute=:state event=:ignite from="parked" from_name=:parked to="idling" to_name=:idling>,
# # #<StateMachines::Transition attribute=:state event=:shift_up from="idling" from_name=:idling to="first_gear" to_name=:first_gear>,
# # #<StateMachines::Transition attribute=:state event=:shift_up from="first_gear" from_name=:first_gear to="second_gear" to_name=:second_gear>,
# # #<StateMachines::Transition attribute=:state event=:shift_down from="second_gear" from_name=:second_gear to="first_gear" to_name=:first_gear>,
# # #<StateMachines::Transition attribute=:state event=:shift_down from="first_gear" from_name=:first_gear to="idling" to_name=:idling>],
# #
# # [#<StateMachines::Transition attribute=:state event=:ignite from="parked" from_name=:parked to="idling" to_name=:idling>,
# # #<StateMachines::Transition attribute=:state event=:shift_up from="idling" from_name=:idling to="first_gear" to_name=:first_gear>,
# # #<StateMachines::Transition attribute=:state event=:shift_down from="first_gear" from_name=:first_gear to="idling" to_name=:idling>]
# # ]
#
# vehicle.state_paths(:from => :parked, :to => :second_gear)
# # => [
# # [#<StateMachines::Transition attribute=:state event=:ignite from="parked" from_name=:parked to="idling" to_name=:idling>,
# # #<StateMachines::Transition attribute=:state event=:shift_up from="idling" from_name=:idling to="first_gear" to_name=:first_gear>,
# # #<StateMachines::Transition attribute=:state event=:shift_up from="first_gear" from_name=:first_gear to="second_gear" to_name=:second_gear>]
# # ]
#
# In addition to getting the possible paths that can be accessed, you can
# also get summary information about the states / events that can be
# accessed at some point along one of the paths. For example:
#
# # Get the list of states that can be accessed from the current state
# vehicle.state_paths.to_states # => [:idling, :first_gear, :second_gear]
#
# # Get the list of events that can be accessed from the current state
# vehicle.state_paths.events # => [:ignite, :shift_up, :shift_down]
def paths_for(object, requirements = {})
PathCollection.new(object, self, requirements)
end
# Marks the given object as invalid with the given message.
#
# By default, this is a no-op.
def invalidate(_object, _attribute, _message, _values = [])
end
# Gets a description of the errors for the given object. This is used to
# provide more detailed information when an InvalidTransition exception is
# raised.
def errors_for(_object)
''
end
# Resets any errors previously added when invalidating the given object.
#
# By default, this is a no-op.
def reset(_object)
end
# Generates the message to use when invalidating the given object after
# failing to transition on a specific event
def generate_message(name, values = [])
message = (@messages[name] || self.class.default_messages[name])
# Check whether there are actually any values to interpolate to avoid
# any warnings
if message.scan(/%./).any? { |match| match != '%%' }
message % values.map { |value| value.last }
else
message
end
end
# Runs a transaction, rolling back any changes if the yielded block fails.
#
# This is only applicable to integrations that involve databases. By
# default, this will not run any transactions since the changes aren't
# taking place within the context of a database.
def within_transaction(object)
if use_transactions
transaction(object) { yield }
else
yield
end
end
def draw(*)
fail NotImplementedError
end
# Determines whether an action hook was defined for firing attribute-based
# event transitions when the configured action gets called.
def action_hook?(self_only = false)
@action_hook_defined || !self_only && owner_class.state_machines.any? { |name, machine| machine.action == action && machine != self && machine.action_hook?(true) }
end
protected
# Runs additional initialization hooks. By default, this is a no-op.
def after_initialize
end
# Looks up other machines that have been defined in the owner class and
# are targeting the same attribute as this machine. When accessing
# sibling machines, they will be automatically copied for the current
# class if they haven't been already. This ensures that any configuration
# changes made to the sibling machines only affect this class and not any
# base class that may have originally defined the machine.
def sibling_machines
owner_class.state_machines.inject([]) do |machines, (name, machine)|
if machine.attribute == attribute && machine != self
machines << (owner_class.state_machine(name) {})
end
machines
end
end
# Determines if the machine's attribute needs to be initialized. This
# will only be true if the machine's attribute is blank.
def initialize_state?(object)
value = read(object, :state)
(value.nil? || value.respond_to?(:empty?) && value.empty?) && !states[value, :value]
end
# Adds helper methods for interacting with the state machine, including
# for states, events, and transitions
def define_helpers
define_state_accessor
define_state_predicate
define_event_helpers
define_path_helpers
define_action_helpers if define_action_helpers?
define_name_helpers
end
# Defines the initial values for state machine attributes. Static values
# are set prior to the original initialize method and dynamic values are
# set *after* the initialize method in case it is dependent on it.
def define_state_initializer
define_helper :instance, <<-end_eval, __FILE__, __LINE__ + 1
def initialize(*)
self.class.state_machines.initialize_states(self) { super }
end
end_eval
end
# Adds reader/writer methods for accessing the state attribute
def define_state_accessor
attribute = self.attribute
@helper_modules[:instance].class_eval { attr_reader attribute } unless owner_class_ancestor_has_method?(:instance, attribute)
@helper_modules[:instance].class_eval { attr_writer attribute } unless owner_class_ancestor_has_method?(:instance, "#{attribute}=")
end
# Adds predicate method to the owner class for determining the name of the
# current state
def define_state_predicate
call_super = !!owner_class_ancestor_has_method?(:instance, "#{name}?")
define_helper :instance, <<-end_eval, __FILE__, __LINE__ + 1
def #{name}?(*args)
args.empty? && (#{call_super} || defined?(super)) ? super : self.class.state_machine(#{name.inspect}).states.matches?(self, *args)
end
end_eval
end
# Adds helper methods for getting information about this state machine's
# events
def define_event_helpers
# Gets the events that are allowed to fire on the current object
define_helper(:instance, attribute(:events)) do |machine, object, *args|
machine.events.valid_for(object, *args).map { |event| event.name }
end
# Gets the next possible transitions that can be run on the current
# object
define_helper(:instance, attribute(:transitions)) do |machine, object, *args|
machine.events.transitions_for(object, *args)
end
# Fire an arbitrary event for this machine
define_helper(:instance, "fire_#{attribute(:event)}") do |machine, object, event, *args|
machine.events.fetch(event).fire(object, *args)
end
# Add helpers for tracking the event / transition to invoke when the
# action is called
if action
event_attribute = attribute(:event)
define_helper(:instance, event_attribute) do |machine, object|
# Interpret non-blank events as present
event = machine.read(object, :event, true)
event && !(event.respond_to?(:empty?) && event.empty?) ? event.to_sym : nil
end
# A roundabout way of writing the attribute is used here so that
# integrations can hook into this modification
define_helper(:instance, "#{event_attribute}=") do |machine, object, value|
machine.write(object, :event, value, true)
end
event_transition_attribute = attribute(:event_transition)
define_helper :instance, <<-end_eval, __FILE__, __LINE__ + 1
protected; attr_accessor #{event_transition_attribute.inspect}
end_eval
end
end
# Adds helper methods for getting information about this state machine's
# available transition paths
def define_path_helpers
# Gets the paths of transitions available to the current object
define_helper(:instance, attribute(:paths)) do |machine, object, *args|
machine.paths_for(object, *args)
end
end
# Determines whether action helpers should be defined for this machine.
# This is only true if there is an action configured and no other machines
# have process this same configuration already.
def define_action_helpers?
action && !owner_class.state_machines.any? { |name, machine| machine.action == action && machine != self }
end
# Adds helper methods for automatically firing events when an action
# is invoked
def define_action_helpers
if action_hook
@action_hook_defined = true
define_action_hook
end
end
# Hooks directly into actions by defining the same method in an included
# module. As a result, when the action gets invoked, any state events
# defined for the object will get run. Method visibility is preserved.
def define_action_hook
action_hook = self.action_hook
action = self.action
private_action_hook = owner_class.private_method_defined?(action_hook)
# Only define helper if it hasn't
define_helper :instance, <<-end_eval, __FILE__, __LINE__ + 1
def #{action_hook}(*)
self.class.state_machines.transitions(self, #{action.inspect}).perform { super }
end
private #{action_hook.inspect} if #{private_action_hook}
end_eval
end
# The method to hook into for triggering transitions when invoked. By
# default, this is the action configured for the machine.
#
# Since the default hook technique relies on module inheritance, the
# action must be defined in an ancestor of the owner classs in order for
# it to be the action hook.
def action_hook
action && owner_class_ancestor_has_method?(:instance, action) ? action : nil
end
# Determines whether there's already a helper method defined within the
# given scope. This is true only if one of the owner's ancestors defines
# the method and is further along in the ancestor chain than this
# machine's helper module.
def owner_class_ancestor_has_method?(scope, method)
return false unless owner_class_has_method?(scope, method)
superclasses = owner_class.ancestors.select { |ancestor| ancestor.is_a?(Class) }[1..-1]
if scope == :class
current = owner_class.singleton_class
superclass = superclasses.first
else
current = owner_class
superclass = owner_class.superclass
end
# Generate the list of modules that *only* occur in the owner class, but
# were included *prior* to the helper modules, in addition to the
# superclasses
ancestors = current.ancestors - superclass.ancestors + superclasses
ancestors = ancestors[ancestors.index(@helper_modules[scope])..-1].reverse
# Search for for the first ancestor that defined this method
ancestors.detect do |ancestor|
ancestor = ancestor.singleton_class if scope == :class && ancestor.is_a?(Class)
ancestor.method_defined?(method) || ancestor.private_method_defined?(method)
end
end
def owner_class_has_method?(scope, method)
target = scope == :class ? owner_class.singleton_class : owner_class
target.method_defined?(method) || target.private_method_defined?(method)
end
# Adds helper methods for accessing naming information about states and
# events on the owner class
def define_name_helpers
# Gets the humanized version of a state
define_helper(:class, "human_#{attribute(:name)}") do |machine, klass, state|
machine.states.fetch(state).human_name(klass)
end
# Gets the humanized version of an event
define_helper(:class, "human_#{attribute(:event_name)}") do |machine, klass, event|
machine.events.fetch(event).human_name(klass)
end
# Gets the state name for the current value
define_helper(:instance, attribute(:name)) do |machine, object|
machine.states.match!(object).name
end
# Gets the human state name for the current value
define_helper(:instance, "human_#{attribute(:name)}") do |machine, object|
machine.states.match!(object).human_name(object.class)
end
end
# Defines the with/without scope helpers for this attribute. Both the
# singular and plural versions of the attribute are defined for each
# scope helper. A custom plural can be specified if it cannot be
# automatically determined by either calling +pluralize+ on the attribute
# name or adding an "s" to the end of the name.
def define_scopes(custom_plural = nil)
plural = custom_plural || pluralize(name)
[:with, :without].each do |kind|
[name, plural].map { |s| s.to_s }.uniq.each do |suffix|
method = "#{kind}_#{suffix}"
if scope = send("create_#{kind}_scope", method)
# Converts state names to their corresponding values so that they
# can be looked up properly
define_helper(:class, method) do |machine, klass, *states|
run_scope(scope, machine, klass, states)
end
end
end
end
end
# Generates the results for the given scope based on one or more states to
# filter by
def run_scope(scope, machine, klass, states)
values = states.flatten.map { |state| machine.states.fetch(state).value }
scope.call(klass, values)
end
# Pluralizes the given word using #pluralize (if available) or simply
# adding an "s" to the end of the word
def pluralize(word)
word = word.to_s
if word.respond_to?(:pluralize)
word.pluralize
else
"#{name}s"
end
end
# Creates a scope for finding objects *with* a particular value or values
# for the attribute.
#
# By default, this is a no-op.
def create_with_scope(name)
end
# Creates a scope for finding objects *without* a particular value or
# values for the attribute.
#
# By default, this is a no-op.
def create_without_scope(name)
end
# Always yields
def transaction(object)
yield
end
# Gets the initial attribute value defined by the owner class (outside of
# the machine's definition). By default, this is always nil.
def owner_class_attribute_default
nil
end
# Checks whether the given state matches the attribute default specified
# by the owner class
def owner_class_attribute_default_matches?(state)
state.matches?(owner_class_attribute_default)
end
# Updates this machine based on the configuration of other machines in the
# owner class that share the same target attribute.
def add_sibling_machine_configs
# Add existing states
sibling_machines.each do |machine|
machine.states.each { |state| states << state unless states[state.name] }
end
end
# Adds a new transition callback of the given type.
def add_callback(type, options, &block)
callbacks[type == :around ? :before : type] << callback = Callback.new(type, options, &block)
add_states(callback.known_states)
callback
end
# Tracks the given set of states in the list of all known states for
# this machine
def add_states(new_states)
new_states.map do |new_state|
# Check for other states that use a different class type for their name.
# This typically prevents string / symbol misuse.
if new_state && conflict = states.detect { |state| state.name && state.name.class != new_state.class }
raise ArgumentError, "#{new_state.inspect} state defined as #{new_state.class}, #{conflict.name.inspect} defined as #{conflict.name.class}; all states must be consistent"
end
unless state = states[new_state]
states << state = State.new(self, new_state)
# Copy states over to sibling machines
sibling_machines.each { |machine| machine.states << state }
end
state
end
end
# Tracks the given set of events in the list of all known events for
# this machine
def add_events(new_events)
new_events.map do |new_event|
# Check for other states that use a different class type for their name.
# This typically prevents string / symbol misuse.
if conflict = events.detect { |event| event.name.class != new_event.class }
raise ArgumentError, "#{new_event.inspect} event defined as #{new_event.class}, #{conflict.name.inspect} defined as #{conflict.name.class}; all events must be consistent"
end
unless event = events[new_event]
events << event = Event.new(self, new_event)
end
event
end
end
end
|
kontena/kontena | agent/lib/kontena/websocket_client.rb | Kontena.WebsocketClient.send_request | ruby | def send_request(id, method, params)
data = MessagePack.dump([0, id, method, params]).bytes
ws.send(data)
rescue => exc
warn exc
abort exc
end | Called from RpcClient, does not crash the Actor on errors.
@param [Integer] id
@param [String] method
@param [Array] params
@raise [RuntimeError] not connected | train | https://github.com/kontena/kontena/blob/5cb5b4457895985231ac88e78c8cbc5a8ffb5ec7/agent/lib/kontena/websocket_client.rb#L291-L297 | class WebsocketClient
include Celluloid
include Celluloid::Notifications
include Kontena::Logging
STRFTIME = '%F %T.%NZ'
CONNECT_INTERVAL = 1.0
RECONNECT_BACKOFF = 90.0
CONNECT_TIMEOUT = 10.0
OPEN_TIMEOUT = 10.0
PING_INTERVAL = 30.0 # seconds
PING_TIMEOUT = Kernel::Float(ENV['WEBSOCKET_TIMEOUT'] || 5.0)
CLOSE_TIMEOUT = 10.0
WRITE_TIMEOUT = 10.0 # this one is a little odd
# @param [String] api_uri
# @param [String] node_id
# @param [String] node_name
# @param [String] grid_token
# @param [String] node_token
# @param [Array<String>] node_labels
# @param [Hash] ssl_params
# @param [String] ssl_hostname
def initialize(api_uri, node_id, node_name:, grid_token: nil, node_token: nil, node_labels: [], ssl_params: {}, ssl_hostname: nil, autostart: true)
@api_uri = api_uri
@node_id = node_id
@node_name = node_name
@grid_token = grid_token
@node_token = node_token
@node_labels = node_labels
@ssl_params = ssl_params
@ssl_hostname = ssl_hostname
@connected = false
@reconnect_attempt = 0
if @node_token
info "initialized with node token #{@node_token[0..8]}..., node ID #{@node_id}"
elsif @grid_token
info "initialized with grid token #{@grid_token[0..8]}..., node ID #{@node_id}"
else
fail "Missing grid, node token"
end
async.start if autostart
end
# @return [Boolean]
def connected?
@connected
end
# @return [Boolean]
def reconnecting?
@reconnect_attempt > 0
end
def rpc_server
Celluloid::Actor[:rpc_server]
end
def rpc_client
Celluloid::Actor[:rpc_client]
end
def start
after(CONNECT_INTERVAL) do
connect!
end
end
# Using randomized full jitter to spread reconnect attempts across clients and reduce server contention
# See https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/
#
# @param attempt [Integer]
# @return [Float]
def reconnect_backoff(attempt)
if attempt > 16
backoff = RECONNECT_BACKOFF
else
backoff = [RECONNECT_BACKOFF, CONNECT_INTERVAL * 2 ** attempt].min
end
backoff *= rand
end
def reconnect!
backoff = reconnect_backoff(@reconnect_attempt)
@reconnect_attempt += 1
info "reconnect attempt #{@reconnect_attempt} in #{'%.2fs' % backoff}..."
after(backoff) do
connect!
end
end
# Connect to server, and start connect_client task
#
# Calls reconnect! on errors
def connect!
info "connecting to master at #{@api_uri}"
headers = {
'Kontena-Node-Id' => @node_id.to_s,
'Kontena-Node-Name' => @node_name,
'Kontena-Version' => Kontena::Agent::VERSION,
'Kontena-Node-Labels' => @node_labels.join(','),
'Kontena-Connected-At' => Time.now.utc.strftime(STRFTIME),
}
if @node_token
headers['Kontena-Node-Token'] = @node_token.to_s
elsif @grid_token
headers['Kontena-Grid-Token'] = @grid_token.to_s
else
fail "Missing grid, node token"
end
@ws = Kontena::Websocket::Client.new(@api_uri,
headers: headers,
ssl_params: @ssl_params,
ssl_hostname: @ssl_hostname,
connect_timeout: CONNECT_TIMEOUT,
open_timeout: OPEN_TIMEOUT,
ping_interval: PING_INTERVAL,
ping_timeout: PING_TIMEOUT,
close_timeout: CLOSE_TIMEOUT,
)
async.connect_client @ws
publish('websocket:connect', nil)
rescue => exc
error exc
reconnect!
end
# Connect the websocket client, and read messages.
#
# Keeps running as a separate defer thread as long as the websocket client is connected.
#
# Calls disconnected! -> reconnect! when done.
#
# @param ws [Kontena::Websocket::Client]
def connect_client(ws)
actor = Actor.current
# run the blocking websocket client connect+read in a separate thread
defer {
ws.on_pong do |delay|
# XXX: called with the client mutex locked, do not block
actor.async.on_pong(delay)
end
# blocks until open, raises on errors
ws.connect
# These are called from the read_ws -> defer thread, proxy back to actor
actor.on_open
ws.read do |message|
actor.on_message(message)
end
}
rescue Kontena::Websocket::CloseError => exc
# server closed connection
on_close(exc.code, exc.reason)
disconnected!
rescue Kontena::Websocket::Error => exc
# handle known errors, will reconnect or shutdown
on_error exc
disconnected!
rescue => exc
# XXX: crash instead of reconnecting on unknown errors?
error exc
disconnected!
else
# impossible: agent closed connection?!
info "Agent closed connection with code #{ws.close_code}: #{ws.close_reason}"
disconnected!
ensure
ws.disconnect # close socket
end
# Websocket handshake complete.
def on_open
ssl_verify = ws.ssl_verify?
begin
ssl_cert = ws.ssl_cert!
ssl_error = nil
rescue Kontena::Websocket::SSLVerifyError => exc
ssl_cert = exc.cert
ssl_error = exc
end
if ssl_error
if ssl_cert
warn "insecure connection established with SSL errors: #{ssl_error}: #{ssl_cert.subject} (issuer #{ssl_cert.issuer})"
else
warn "insecure connection established with SSL errors: #{ssl_error}"
end
elsif ssl_cert
if !ssl_verify
warn "secure connection established without KONTENA_SSL_VERIFY=true: #{ssl_cert.subject} (issuer #{ssl_cert.issuer})"
else
info "secure connection established with KONTENA_SSL_VERIFY: #{ssl_cert.subject} (issuer #{ssl_cert.issuer})"
end
else
info "unsecure connection established without SSL"
end
connected!
end
# The websocket is connected: @ws is now valid and wen can send message
def connected!
@connected = true
@reconnect_attempt = 0
# NOTE: the server may still reject the websocket connection by closing it after the open handshake
# wait for the /agent/master_info RPC before emitting websocket:connected
publish('websocket:open', nil)
end
def ws
fail "not connected" unless @ws
@ws
end
# The websocket is disconnected: @ws is invalid and we can no longer send messages
def disconnected!
@ws = nil # prevent further send_message calls until reconnected
@connected = false
# any queued up send_message calls will fail
publish('websocket:disconnected', nil)
reconnect!
end
# Called from RpcServer, does not crash the Actor on errors.
#
# @param [String, Array] msg
# @raise [RuntimeError] not connected
def send_message(msg)
ws.send(msg)
rescue => exc
warn exc
abort exc
end
# Called from RpcClient, does not crash the Actor on errors.
#
# @param [String] method
# @param [Array] params
# @raise [RuntimeError] not connected
def send_notification(method, params)
data = MessagePack.dump([2, method, params]).bytes
ws.send(data)
rescue => exc
warn exc
abort exc
end
# Called from RpcClient, does not crash the Actor on errors.
#
# @param [Integer] id
# @param [String] method
# @param [Array] params
# @raise [RuntimeError] not connected
# @param [String] message
def on_message(message)
data = MessagePack.unpack(message.pack('c*'))
if request_message?(data)
rpc_server.async.handle_request(Actor.current, data)
elsif response_message?(data)
rpc_client.async.handle_response(data)
elsif notification_message?(data)
rpc_server.async.handle_notification(data)
end
end
# Websocket connection failed
#
# @param exc [Kontena::Websocket::Error]
def on_error(exc)
case exc
when Kontena::Websocket::SSLVerifyError
if exc.cert
error "unable to connect to SSL server with KONTENA_SSL_VERIFY=true: #{exc} (subject #{exc.subject}, issuer #{exc.issuer})"
else
error "unable to connect to SSL server with KONTENA_SSL_VERIFY=true: #{exc}"
end
when Kontena::Websocket::SSLConnectError
error "unable to connect to SSL server: #{exc}"
when Kontena::Websocket::ConnectError
error "unable to connect to server: #{exc}"
when Kontena::Websocket::ProtocolError
error "unexpected response from server, check url: #{exc}"
else
error "websocket error: #{exc}"
end
end
# Server closed websocket connection
#
# @param code [Integer]
# @param reason [String]
def on_close(code, reason)
debug "Server closed connection with code #{code}: #{reason}"
case code
when 4001
handle_invalid_token
when 4010
handle_invalid_version(reason)
when 4040, 4041
handle_invalid_connection(reason)
else
warn "connection closed with code #{code}: #{reason}"
end
end
def handle_invalid_token
error 'master does not accept our token, shutting down ...'
Kontena::Agent.shutdown
end
def handle_invalid_version(reason)
agent_version = Kontena::Agent::VERSION
error "master does not accept our version (#{agent_version}): #{reason}"
Kontena::Agent.shutdown
end
def handle_invalid_connection(reason)
error "master indicates that this agent should not reconnect: #{reason}"
Kontena::Agent.shutdown
end
# @param [Array] msg
# @return [Boolean]
def request_message?(msg)
msg.is_a?(Array) && msg.size == 4 && msg[0] == 0
end
# @param [Array] msg
# @return [Boolean]
def notification_message?(msg)
msg.is_a?(Array) && msg.size == 3 && msg[0] == 2
end
# @param [Array] msg
# @return [Boolean]
def response_message?(msg)
msg.is_a?(Array) && msg.size == 4 && msg[0] == 1
end
# @param delay [Float]
def on_pong(delay)
if delay > PING_TIMEOUT / 2
warn "server ping %.2fs of %.2fs timeout" % [delay, PING_TIMEOUT]
else
debug "server ping %.2fs of %.2fs timeout" % [delay, PING_TIMEOUT]
end
end
end
|
mojombo/chronic | lib/chronic/handlers.rb | Chronic.Handlers.handle_sm_sy | ruby | def handle_sm_sy(tokens, options)
month = tokens[0].get_tag(ScalarMonth).type
year = tokens[1].get_tag(ScalarYear).type
handle_year_and_month(year, month)
end | Handle scalar-month/scalar-year | train | https://github.com/mojombo/chronic/blob/2b1eae7ec440d767c09e0b1a7f0e9bcf30ce1d6c/lib/chronic/handlers.rb#L293-L297 | module Handlers
module_function
# Handle month/day
def handle_m_d(month, day, time_tokens, options)
month.start = self.now
span = month.this(options[:context])
year, month = span.begin.year, span.begin.month
day_start = Chronic.time_class.local(year, month, day)
day_start = Chronic.time_class.local(year + 1, month, day) if options[:context] == :future && day_start < now
day_or_time(day_start, time_tokens, options)
end
# Handle repeater-month-name/scalar-day
def handle_rmn_sd(tokens, options)
month = tokens[0].get_tag(RepeaterMonthName)
day = tokens[1].get_tag(ScalarDay).type
return if month_overflow?(self.now.year, month.index, day)
handle_m_d(month, day, tokens[2..tokens.size], options)
end
# Handle repeater-month-name/scalar-day with separator-on
def handle_rmn_sd_on(tokens, options)
if tokens.size > 3
month = tokens[2].get_tag(RepeaterMonthName)
day = tokens[3].get_tag(ScalarDay).type
token_range = 0..1
else
month = tokens[1].get_tag(RepeaterMonthName)
day = tokens[2].get_tag(ScalarDay).type
token_range = 0..0
end
return if month_overflow?(self.now.year, month.index, day)
handle_m_d(month, day, tokens[token_range], options)
end
# Handle repeater-month-name/ordinal-day
def handle_rmn_od(tokens, options)
month = tokens[0].get_tag(RepeaterMonthName)
day = tokens[1].get_tag(OrdinalDay).type
return if month_overflow?(self.now.year, month.index, day)
handle_m_d(month, day, tokens[2..tokens.size], options)
end
# Handle ordinal this month
def handle_od_rm(tokens, options)
day = tokens[0].get_tag(OrdinalDay).type
month = tokens[2].get_tag(RepeaterMonth)
handle_m_d(month, day, tokens[3..tokens.size], options)
end
# Handle ordinal-day/repeater-month-name
def handle_od_rmn(tokens, options)
month = tokens[1].get_tag(RepeaterMonthName)
day = tokens[0].get_tag(OrdinalDay).type
return if month_overflow?(self.now.year, month.index, day)
handle_m_d(month, day, tokens[2..tokens.size], options)
end
def handle_sy_rmn_od(tokens, options)
year = tokens[0].get_tag(ScalarYear).type
month = tokens[1].get_tag(RepeaterMonthName).index
day = tokens[2].get_tag(OrdinalDay).type
time_tokens = tokens.last(tokens.size - 3)
return if month_overflow?(year, month, day)
begin
day_start = Chronic.time_class.local(year, month, day)
day_or_time(day_start, time_tokens, options)
rescue ArgumentError
nil
end
end
# Handle scalar-day/repeater-month-name
def handle_sd_rmn(tokens, options)
month = tokens[1].get_tag(RepeaterMonthName)
day = tokens[0].get_tag(ScalarDay).type
return if month_overflow?(self.now.year, month.index, day)
handle_m_d(month, day, tokens[2..tokens.size], options)
end
# Handle repeater-month-name/ordinal-day with separator-on
def handle_rmn_od_on(tokens, options)
if tokens.size > 3
month = tokens[2].get_tag(RepeaterMonthName)
day = tokens[3].get_tag(OrdinalDay).type
token_range = 0..1
else
month = tokens[1].get_tag(RepeaterMonthName)
day = tokens[2].get_tag(OrdinalDay).type
token_range = 0..0
end
return if month_overflow?(self.now.year, month.index, day)
handle_m_d(month, day, tokens[token_range], options)
end
# Handle scalar-year/repeater-quarter-name
def handle_sy_rqn(tokens, options)
handle_rqn_sy(tokens[0..1].reverse, options)
end
# Handle repeater-quarter-name/scalar-year
def handle_rqn_sy(tokens, options)
year = tokens[1].get_tag(ScalarYear).type
quarter_tag = tokens[0].get_tag(RepeaterQuarterName)
quarter_tag.start = Chronic.construct(year)
quarter_tag.this(:none)
end
# Handle repeater-month-name/scalar-year
def handle_rmn_sy(tokens, options)
month = tokens[0].get_tag(RepeaterMonthName).index
year = tokens[1].get_tag(ScalarYear).type
if month == 12
next_month_year = year + 1
next_month_month = 1
else
next_month_year = year
next_month_month = month + 1
end
begin
end_time = Chronic.time_class.local(next_month_year, next_month_month)
Span.new(Chronic.time_class.local(year, month), end_time)
rescue ArgumentError
nil
end
end
# Handle generic timestamp (ruby 1.8)
def handle_generic(tokens, options)
t = Chronic.time_class.parse(options[:text])
Span.new(t, t + 1)
rescue ArgumentError => e
raise e unless e.message =~ /out of range/
end
# Handle repeater-month-name/scalar-day/scalar-year
def handle_rmn_sd_sy(tokens, options)
month = tokens[0].get_tag(RepeaterMonthName).index
day = tokens[1].get_tag(ScalarDay).type
year = tokens[2].get_tag(ScalarYear).type
time_tokens = tokens.last(tokens.size - 3)
return if month_overflow?(year, month, day)
begin
day_start = Chronic.time_class.local(year, month, day)
day_or_time(day_start, time_tokens, options)
rescue ArgumentError
nil
end
end
# Handle repeater-month-name/ordinal-day/scalar-year
def handle_rmn_od_sy(tokens, options)
month = tokens[0].get_tag(RepeaterMonthName).index
day = tokens[1].get_tag(OrdinalDay).type
year = tokens[2].get_tag(ScalarYear).type
time_tokens = tokens.last(tokens.size - 3)
return if month_overflow?(year, month, day)
begin
day_start = Chronic.time_class.local(year, month, day)
day_or_time(day_start, time_tokens, options)
rescue ArgumentError
nil
end
end
# Handle oridinal-day/repeater-month-name/scalar-year
def handle_od_rmn_sy(tokens, options)
day = tokens[0].get_tag(OrdinalDay).type
month = tokens[1].get_tag(RepeaterMonthName).index
year = tokens[2].get_tag(ScalarYear).type
time_tokens = tokens.last(tokens.size - 3)
return if month_overflow?(year, month, day)
begin
day_start = Chronic.time_class.local(year, month, day)
day_or_time(day_start, time_tokens, options)
rescue ArgumentError
nil
end
end
# Handle scalar-day/repeater-month-name/scalar-year
def handle_sd_rmn_sy(tokens, options)
new_tokens = [tokens[1], tokens[0], tokens[2]]
time_tokens = tokens.last(tokens.size - 3)
handle_rmn_sd_sy(new_tokens + time_tokens, options)
end
# Handle scalar-month/scalar-day/scalar-year (endian middle)
def handle_sm_sd_sy(tokens, options)
month = tokens[0].get_tag(ScalarMonth).type
day = tokens[1].get_tag(ScalarDay).type
year = tokens[2].get_tag(ScalarYear).type
time_tokens = tokens.last(tokens.size - 3)
return if month_overflow?(year, month, day)
begin
day_start = Chronic.time_class.local(year, month, day)
day_or_time(day_start, time_tokens, options)
rescue ArgumentError
nil
end
end
# Handle scalar-day/scalar-month/scalar-year (endian little)
def handle_sd_sm_sy(tokens, options)
new_tokens = [tokens[1], tokens[0], tokens[2]]
time_tokens = tokens.last(tokens.size - 3)
handle_sm_sd_sy(new_tokens + time_tokens, options)
end
# Handle scalar-year/scalar-month/scalar-day
def handle_sy_sm_sd(tokens, options)
new_tokens = [tokens[1], tokens[2], tokens[0]]
time_tokens = tokens.last(tokens.size - 3)
handle_sm_sd_sy(new_tokens + time_tokens, options)
end
# Handle scalar-month/scalar-day
def handle_sm_sd(tokens, options)
month = tokens[0].get_tag(ScalarMonth).type
day = tokens[1].get_tag(ScalarDay).type
year = self.now.year
time_tokens = tokens.last(tokens.size - 2)
return if month_overflow?(year, month, day)
begin
day_start = Chronic.time_class.local(year, month, day)
if options[:context] == :future && day_start < now
day_start = Chronic.time_class.local(year + 1, month, day)
elsif options[:context] == :past && day_start > now
day_start = Chronic.time_class.local(year - 1, month, day)
end
day_or_time(day_start, time_tokens, options)
rescue ArgumentError
nil
end
end
# Handle scalar-day/scalar-month
def handle_sd_sm(tokens, options)
new_tokens = [tokens[1], tokens[0]]
time_tokens = tokens.last(tokens.size - 2)
handle_sm_sd(new_tokens + time_tokens, options)
end
def handle_year_and_month(year, month)
if month == 12
next_month_year = year + 1
next_month_month = 1
else
next_month_year = year
next_month_month = month + 1
end
begin
end_time = Chronic.time_class.local(next_month_year, next_month_month)
Span.new(Chronic.time_class.local(year, month), end_time)
rescue ArgumentError
nil
end
end
# Handle scalar-month/scalar-year
# Handle scalar-year/scalar-month
def handle_sy_sm(tokens, options)
year = tokens[0].get_tag(ScalarYear).type
month = tokens[1].get_tag(ScalarMonth).type
handle_year_and_month(year, month)
end
# Handle RepeaterDayName RepeaterMonthName OrdinalDay
def handle_rdn_rmn_od(tokens, options)
month = tokens[1].get_tag(RepeaterMonthName)
day = tokens[2].get_tag(OrdinalDay).type
time_tokens = tokens.last(tokens.size - 3)
year = self.now.year
return if month_overflow?(year, month.index, day)
begin
if time_tokens.empty?
start_time = Chronic.time_class.local(year, month.index, day)
end_time = time_with_rollover(year, month.index, day + 1)
Span.new(start_time, end_time)
else
day_start = Chronic.time_class.local(year, month.index, day)
day_or_time(day_start, time_tokens, options)
end
rescue ArgumentError
nil
end
end
# Handle RepeaterDayName RepeaterMonthName OrdinalDay ScalarYear
def handle_rdn_rmn_od_sy(tokens, options)
month = tokens[1].get_tag(RepeaterMonthName)
day = tokens[2].get_tag(OrdinalDay).type
year = tokens[3].get_tag(ScalarYear).type
return if month_overflow?(year, month.index, day)
begin
start_time = Chronic.time_class.local(year, month.index, day)
end_time = time_with_rollover(year, month.index, day + 1)
Span.new(start_time, end_time)
rescue ArgumentError
nil
end
end
# Handle RepeaterDayName OrdinalDay
def handle_rdn_od(tokens, options)
day = tokens[1].get_tag(OrdinalDay).type
time_tokens = tokens.last(tokens.size - 2)
year = self.now.year
month = self.now.month
if options[:context] == :future
self.now.day > day ? month += 1 : month
end
return if month_overflow?(year, month, day)
begin
if time_tokens.empty?
start_time = Chronic.time_class.local(year, month, day)
end_time = time_with_rollover(year, month, day + 1)
Span.new(start_time, end_time)
else
day_start = Chronic.time_class.local(year, month, day)
day_or_time(day_start, time_tokens, options)
end
rescue ArgumentError
nil
end
end
# Handle RepeaterDayName RepeaterMonthName ScalarDay
def handle_rdn_rmn_sd(tokens, options)
month = tokens[1].get_tag(RepeaterMonthName)
day = tokens[2].get_tag(ScalarDay).type
time_tokens = tokens.last(tokens.size - 3)
year = self.now.year
return if month_overflow?(year, month.index, day)
begin
if time_tokens.empty?
start_time = Chronic.time_class.local(year, month.index, day)
end_time = time_with_rollover(year, month.index, day + 1)
Span.new(start_time, end_time)
else
day_start = Chronic.time_class.local(year, month.index, day)
day_or_time(day_start, time_tokens, options)
end
rescue ArgumentError
nil
end
end
# Handle RepeaterDayName RepeaterMonthName ScalarDay ScalarYear
def handle_rdn_rmn_sd_sy(tokens, options)
month = tokens[1].get_tag(RepeaterMonthName)
day = tokens[2].get_tag(ScalarDay).type
year = tokens[3].get_tag(ScalarYear).type
return if month_overflow?(year, month.index, day)
begin
start_time = Chronic.time_class.local(year, month.index, day)
end_time = time_with_rollover(year, month.index, day + 1)
Span.new(start_time, end_time)
rescue ArgumentError
nil
end
end
def handle_sm_rmn_sy(tokens, options)
day = tokens[0].get_tag(ScalarDay).type
month = tokens[1].get_tag(RepeaterMonthName).index
year = tokens[2].get_tag(ScalarYear).type
if tokens.size > 3
time = get_anchor([tokens.last], options).begin
h, m, s = time.hour, time.min, time.sec
time = Chronic.time_class.local(year, month, day, h, m, s)
end_time = Chronic.time_class.local(year, month, day + 1, h, m, s)
else
time = Chronic.time_class.local(year, month, day)
day += 1 unless day >= 31
end_time = Chronic.time_class.local(year, month, day)
end
Span.new(time, end_time)
end
# anchors
# Handle repeaters
def handle_r(tokens, options)
dd_tokens = dealias_and_disambiguate_times(tokens, options)
get_anchor(dd_tokens, options)
end
# Handle repeater/grabber/repeater
def handle_r_g_r(tokens, options)
new_tokens = [tokens[1], tokens[0], tokens[2]]
handle_r(new_tokens, options)
end
# arrows
# Handle scalar/repeater/pointer helper
def handle_srp(tokens, span, options)
distance = tokens[0].get_tag(Scalar).type
repeater = tokens[1].get_tag(Repeater)
pointer = tokens[2].get_tag(Pointer).type
repeater.offset(span, distance, pointer) if repeater.respond_to?(:offset)
end
# Handle scalar/repeater/pointer
def handle_s_r_p(tokens, options)
span = Span.new(self.now, self.now + 1)
handle_srp(tokens, span, options)
end
# Handle pointer/scalar/repeater
def handle_p_s_r(tokens, options)
new_tokens = [tokens[1], tokens[2], tokens[0]]
handle_s_r_p(new_tokens, options)
end
# Handle scalar/repeater/pointer/anchor
def handle_s_r_p_a(tokens, options)
anchor_span = get_anchor(tokens[3..tokens.size - 1], options)
handle_srp(tokens, anchor_span, options)
end
# Handle repeater/scalar/repeater/pointer
def handle_rmn_s_r_p(tokens, options)
handle_s_r_p_a(tokens[1..3] + tokens[0..0], options)
end
def handle_s_r_a_s_r_p_a(tokens, options)
anchor_span = get_anchor(tokens[4..tokens.size - 1], options)
span = handle_srp(tokens[0..1]+tokens[4..6], anchor_span, options)
handle_srp(tokens[2..3]+tokens[4..6], span, options)
end
# narrows
# Handle oridinal repeaters
def handle_orr(tokens, outer_span, options)
repeater = tokens[1].get_tag(Repeater)
repeater.start = outer_span.begin - 1
ordinal = tokens[0].get_tag(Ordinal).type
span = nil
ordinal.times do
span = repeater.next(:future)
if span.begin >= outer_span.end
span = nil
break
end
end
span
end
# Handle ordinal/repeater/separator/repeater
def handle_o_r_s_r(tokens, options)
outer_span = get_anchor([tokens[3]], options)
handle_orr(tokens[0..1], outer_span, options)
end
# Handle ordinal/repeater/grabber/repeater
def handle_o_r_g_r(tokens, options)
outer_span = get_anchor(tokens[2..3], options)
handle_orr(tokens[0..1], outer_span, options)
end
# support methods
def day_or_time(day_start, time_tokens, options)
outer_span = Span.new(day_start, day_start + (24 * 60 * 60))
unless time_tokens.empty?
self.now = outer_span.begin
get_anchor(dealias_and_disambiguate_times(time_tokens, options), options.merge(:context => :future))
else
outer_span
end
end
def get_anchor(tokens, options)
grabber = Grabber.new(:this)
pointer = :future
repeaters = get_repeaters(tokens)
repeaters.size.times { tokens.pop }
if tokens.first && tokens.first.get_tag(Grabber)
grabber = tokens.shift.get_tag(Grabber)
end
head = repeaters.shift
head.start = self.now
case grabber.type
when :last
outer_span = head.next(:past)
when :this
if options[:context] != :past and repeaters.size > 0
outer_span = head.this(:none)
else
outer_span = head.this(options[:context])
end
when :next
outer_span = head.next(:future)
else
raise 'Invalid grabber'
end
if Chronic.debug
puts "Handler-class: #{head.class}"
puts "--#{outer_span}"
end
find_within(repeaters, outer_span, pointer)
end
def get_repeaters(tokens)
tokens.map { |token| token.get_tag(Repeater) }.compact.sort.reverse
end
def month_overflow?(year, month, day)
if ::Date.leap?(year)
day > RepeaterMonth::MONTH_DAYS_LEAP[month - 1]
else
day > RepeaterMonth::MONTH_DAYS[month - 1]
end
rescue ArgumentError
false
end
# Recursively finds repeaters within other repeaters.
# Returns a Span representing the innermost time span
# or nil if no repeater union could be found
def find_within(tags, span, pointer)
puts "--#{span}" if Chronic.debug
return span if tags.empty?
head = tags.shift
head.start = (pointer == :future ? span.begin : span.end)
h = head.this(:none)
if span.cover?(h.begin) || span.cover?(h.end)
find_within(tags, h, pointer)
end
end
def time_with_rollover(year, month, day)
date_parts =
if month_overflow?(year, month, day)
if month == 12
[year + 1, 1, 1]
else
[year, month + 1, 1]
end
else
[year, month, day]
end
Chronic.time_class.local(*date_parts)
end
def dealias_and_disambiguate_times(tokens, options)
# handle aliases of am/pm
# 5:00 in the morning -> 5:00 am
# 7:00 in the evening -> 7:00 pm
day_portion_index = nil
tokens.each_with_index do |t, i|
if t.get_tag(RepeaterDayPortion)
day_portion_index = i
break
end
end
time_index = nil
tokens.each_with_index do |t, i|
if t.get_tag(RepeaterTime)
time_index = i
break
end
end
if day_portion_index && time_index
t1 = tokens[day_portion_index]
t1tag = t1.get_tag(RepeaterDayPortion)
case t1tag.type
when :morning
puts '--morning->am' if Chronic.debug
t1.untag(RepeaterDayPortion)
t1.tag(RepeaterDayPortion.new(:am))
when :afternoon, :evening, :night
puts "--#{t1tag.type}->pm" if Chronic.debug
t1.untag(RepeaterDayPortion)
t1.tag(RepeaterDayPortion.new(:pm))
end
end
# handle ambiguous times if :ambiguous_time_range is specified
if options[:ambiguous_time_range] != :none
ambiguous_tokens = []
tokens.each_with_index do |token, i|
ambiguous_tokens << token
next_token = tokens[i + 1]
if token.get_tag(RepeaterTime) && token.get_tag(RepeaterTime).type.ambiguous? && (!next_token || !next_token.get_tag(RepeaterDayPortion))
distoken = Token.new('disambiguator')
distoken.tag(RepeaterDayPortion.new(options[:ambiguous_time_range]))
ambiguous_tokens << distoken
end
end
tokens = ambiguous_tokens
end
tokens
end
end
|
NCSU-Libraries/quick_search | app/controllers/quick_search/logging_controller.rb | QuickSearch.LoggingController.log_event | ruby | def log_event
if params[:category].present? && params[:event_action].present? && params[:label].present?
# if an action isn't passed in, assume that it is a click
action = params.fetch(:action_type, 'click')
# create a new event on the current session
@session.events.create(category: params[:category], item: params[:event_action], query: params[:label][0..250], action: action)
if params[:ga].present? and params[:ga]
send_event_to_ga(params[:category], params[:event_action], params[:label])
end
# check whether this is a jsonp request
if params[:callback].present?
render :json => { 'response': 'success' }, :callback => params[:callback]
else
render :json => { 'response': 'success' }
end
else
head :bad_request
end
end | Logs an event to the database. Typically, these can be clicks or serves.
This is an API endpoint for logging an event. It requires that at least a TODO are
present in the query parameters. It returns a 200 OK HTTP status if the request was successful, or
an 400 BAD REQUEST HTTP status if any parameters are missing. This endpoint supports JSONP requests. | train | https://github.com/NCSU-Libraries/quick_search/blob/2e2c3f8682eed63a2bf2c008fa77f04ff9dd6a03/app/controllers/quick_search/logging_controller.rb#L36-L57 | class LoggingController < ApplicationController
include QuickSearch::OnCampus
before_action :handle_session
protect_from_forgery except: :log_event
##
# Logs a search to the database
#
# This is an API endpoint for logging a search. It requires that at least a search query and a page are
# present in the query parameters. It returns a 200 OK HTTP status if the request was successful, or
# an 400 BAD REQUEST HTTP status if any parameters are missing.
def log_search
if params[:query].present? && params[:page].present?
@session.searches.create(query: params[:query], page: params[:page])
head :ok
else
head :bad_request
end
end
##
# Logs an event to the database. Typically, these can be clicks or serves.
#
# This is an API endpoint for logging an event. It requires that at least a TODO are
# present in the query parameters. It returns a 200 OK HTTP status if the request was successful, or
# an 400 BAD REQUEST HTTP status if any parameters are missing. This endpoint supports JSONP requests.
private
##
# Logs an event to Google Analytics using the Measurement Protocol API
# https://developers.google.com/analytics/devguides/collection/protocol/v1/
def send_event_to_ga(category, action, label)
# google_analytics_client_id is a UUID that identifies a particular client to the GA Measurement Protocol API
if QuickSearch::Engine::APP_CONFIG['google_analytics_tracking_id'].blank? or QuickSearch::Engine::APP_CONFIG['google_analytics_client_id'].blank?
return false
end
# Pass along the client user agent and IP address so it is associated
# with the event in Google Analytics
params = {
v: 1,
t: 'event',
tid: QuickSearch::Engine::APP_CONFIG['google_analytics_tracking_id'],
cid: QuickSearch::Engine::APP_CONFIG['google_analytics_client_id'],
ec: category,
ea: action,
el: label,
uip: request.remote_ip,
ua: request.user_agent,
}
client = HTTPClient.new
url = "https://www.google-analytics.com/collect?" + params.to_query
# The measurement protocol API does not validate responses.
# The best way to test this is testing the query string against:
# https://ga-dev-tools.appspot.com/hit-builder/
client.post(url)
end
##
# Handles creating/updating a session on every request
def handle_session
if is_existing_session?
update_session
else
new_session
end
end
##
# Returns true if current request has an existing session, false otherwise
def is_existing_session?
cookies.has_key? :session_id and Session.find_by(session_uuid: cookies[:session_id])
end
##
# Returns true if current request was from a mobile device
#
# Uses User-Agent from request to make the determination, which may not be all-encompassing
# but works for most modern devices/browsers (iOS, Android). Looks for the string "Mobi" within
# the user-agent, which normally contains either Mobi or Mobile if the request was from a mobile browser
def is_mobile?
# TODO: better test for mobile?
# Recommended here as simple test: https://developer.mozilla.org/en-US/docs/Web/HTTP/Browser_detection_using_the_user_agent
request.user_agent.include? "Mobi"
end
##
# Creates a new session, and logs it in the database
#
# A session is tracked by a UUID that is stored in a cookie, and has a 5 minute expiry time.
# Sessions are stored in the database with the time they were initiated, their expiry time (or end time),
# whether the request originated from a campus IP address, and whether the request originated from a mobile device
def new_session
on_campus = on_campus?(request.remote_ip)
is_mobile = is_mobile?
session_expiry = 5.minutes.from_now
session_uuid = SecureRandom.uuid
# create session in db
@session = Session.create(session_uuid: session_uuid, expiry: session_expiry, on_campus: on_campus, is_mobile: is_mobile)
# set cookie
cookies[:session_id] = { :value => session_uuid, :expires => session_expiry }
end
##
# Updates a session's expiration time on cookie and in database
#
# When a request is made with a non-expired session, the expiration time is updated to 5 minutes from the current time.
# This update is reflected in the cookie as well as in the database entry for the session.
def update_session
# update session expiry in the database
session_id = cookies[:session_id]
@session = Session.find_by session_uuid: session_id
@session.expiry = 5.minutes.from_now
@session.save
# update session expiry on cookie
cookies[:session_id] = { :value => session_id, :expires => @session.expiry }
end
end
|
hashicorp/vault-ruby | lib/vault/api/sys/auth.rb | Vault.Sys.auth_tune | ruby | def auth_tune(path)
json = client.get("/v1/sys/auth/#{encode_path(path)}/tune")
return AuthConfig.decode(json)
rescue HTTPError => e
return nil if e.code == 404
raise
end | Read the given auth path's configuration.
@example
Vault.sys.auth_tune("github") #=> #<Vault::AuthConfig "default_lease_ttl"=3600, "max_lease_ttl"=7200>
@param [String] path
the path to retrieve configuration for
@return [AuthConfig]
configuration of the given auth path | train | https://github.com/hashicorp/vault-ruby/blob/02f0532a802ba1a2a0d8703a4585dab76eb9d864/lib/vault/api/sys/auth.rb#L89-L95 | class Sys
# List all auths in Vault.
#
# @example
# Vault.sys.auths #=> {:token => #<Vault::Auth type="token", description="token based credentials">}
#
# @return [Hash<Symbol, Auth>]
def auths
json = client.get("/v1/sys/auth")
json = json[:data] if json[:data]
return Hash[*json.map do |k,v|
[k.to_s.chomp("/").to_sym, Auth.decode(v)]
end.flatten]
end
# Enable a particular authentication at the given path.
#
# @example
# Vault.sys.enable_auth("github", "github") #=> true
#
# @param [String] path
# the path to mount the auth
# @param [String] type
# the type of authentication
# @param [String] description
# a human-friendly description (optional)
#
# @return [true]
def enable_auth(path, type, description = nil)
payload = { type: type }
payload[:description] = description if !description.nil?
client.post("/v1/sys/auth/#{encode_path(path)}", JSON.fast_generate(payload))
return true
end
# Disable a particular authentication at the given path. If not auth
# exists at that path, an error will be raised.
#
# @example
# Vault.sys.disable_auth("github") #=> true
#
# @param [String] path
# the path to disable
#
# @return [true]
def disable_auth(path)
client.delete("/v1/sys/auth/#{encode_path(path)}")
return true
end
# Read the given auth path's configuration.
#
# @example
# Vault.sys.auth_tune("github") #=> #<Vault::AuthConfig "default_lease_ttl"=3600, "max_lease_ttl"=7200>
#
# @param [String] path
# the path to retrieve configuration for
#
# @return [AuthConfig]
# configuration of the given auth path
# Write the given auth path's configuration.
#
# @example
# Vault.sys.auth_tune("github", "default_lease_ttl" => 600, "max_lease_ttl" => 1200 ) #=> true
#
# @param [String] path
# the path to retrieve configuration for
#
# @return [AuthConfig]
# configuration of the given auth path
def put_auth_tune(path, config = {})
json = client.put("/v1/sys/auth/#{encode_path(path)}/tune", JSON.fast_generate(config))
if json.nil?
return true
else
return Secret.decode(json)
end
end
end
|
NullVoxPopuli/lazy_crud | lib/lazy_crud/instance_methods.rb | LazyCrud.InstanceMethods.resource_proxy | ruby | def resource_proxy(with_deleted = false)
proxy = if parent_instance.present?
parent_instance.send(resource_plural_name)
else
self.class.resource_class
end
if with_deleted and proxy.respond_to?(:with_deleted)
proxy = proxy.with_deleted
end
proxy
end | determines if we want to use the parent class if available or
if we just use the resource class | train | https://github.com/NullVoxPopuli/lazy_crud/blob/80997de5de9eba4f96121c2bdb11fc4e4b8b754a/lib/lazy_crud/instance_methods.rb#L95-L107 | module InstanceMethods
def index
respond_with(set_collection_instance)
end
def show
# instance variable set in before_action
respond_with(get_resource_instance)
end
def new
set_resource_instance(resource_proxy.new)
respond_with(get_resource_instance)
end
def edit
# instance variable set in before_action
end
def create
@resource = resource_proxy.send(build_method, resource_params)
# ensure we can still use model name-based instance variables
# such as @discount, or @event
set_resource_instance
run_before_create_hooks
flash[:notice] = "#{resource_name} has been created." if @resource.save
respond_with(@resource, location: { action: :index })
# if @resource.save
# flash[:notice] = "#{resource_name} has been created."
# redirect_to action: :index
# else
# render action: :new
# end
end
def update
run_before_update_hooks
@resource.update(resource_params)
respond_with(@resource, location: { action: :index })
# if @resource.update(resource_params)
# redirect_to action: :index
# else
# redirect_to action: :edit
# end
end
def destroy
run_before_destroy_hooks
@resource.destroy
respond_with(@resource, location: { action: :index })
# flash[:notice] = "#{resource_name} has been deleted."
# redirect_to action: :index
end
# only works if deleting of resources occurs by setting
# the deleted_at field
def undestroy
@resource = resource_proxy(true).find(params[:id])
set_resource_instance
@resource.deleted_at = nil
@resource.save
respond_with(@resource, location: { action: :index })
# flash[:notice] = "#{resource_name} has been undeleted"
# redirect_to action: :index
end
private
def resource_name
@resource.try(:name) || @resource.class.name
end
def set_resource
@resource = resource_proxy.find(params[:id])
end
# use .try() on the params hash, in case the user forgot to provide
# the attributes
def resource_params
params[resource_singular_name].try(:permit, self.class.param_whitelist)
end
# determines if we want to use the parent class if available or
# if we just use the resource class
# if the resource_proxy has a parent, we can use the
# build method. Otherwise, resource_proxy, is just the
# resource's class - in which case we'll use new
def build_method
resource_proxy.respond_to?(:build) ? :build : :new
end
# allows all of our views to still use things like
# @level, @event, @whatever
# rather than just @resource
def set_resource_instance(resource = @resource)
instance_variable_set("@#{resource_singular_name}", resource)
end
def get_resource_instance
instance_variable_get("@#{resource_singular_name}")
end
# sets the plural instance variable for a collection of objects
def set_collection_instance
instance_variable_set("@#{resource_plural_name}", resource_proxy)
end
def parent_instance
if (not @parent) and self.class.parent_class.present?
# e.g.: Event => 'event'
parent_instance_name = self.class.parent_class.name.underscore
@parent = instance_variable_get("@#{parent_instance_name}")
end
@parent
end
# e.g.: Event => 'events'
def resource_plural_name
@association_name ||= self.class.resource_class.name.tableize
end
# e.g.: Event => 'event'
# alternatively, @resource.class.name.underscore
def resource_singular_name
@singular_name ||= resource_plural_name.singularize
end
end
|
ynab/ynab-sdk-ruby | lib/ynab/api/transactions_api.rb | YNAB.TransactionsApi.get_transactions | ruby | def get_transactions(budget_id, opts = {})
data, _status_code, _headers = get_transactions_with_http_info(budget_id, opts)
data
end | List transactions
Returns budget transactions
@param budget_id The id of the budget (\"last-used\" can also be used to specify the last used budget)
@param [Hash] opts the optional parameters
@option opts [Date] :since_date If specified, only transactions on or after this date will be included. The date should be ISO formatted (e.g. 2016-12-30).
@option opts [String] :type If specified, only transactions of the specified type will be included. 'uncategorized' and 'unapproved' are currently supported.
@option opts [Integer] :last_knowledge_of_server The starting server knowledge. If provided, only entities that have changed since last_knowledge_of_server will be included.
@return [TransactionsResponse] | train | https://github.com/ynab/ynab-sdk-ruby/blob/389a959e482b6fa9643c7e7bfb55d8640cc952fd/lib/ynab/api/transactions_api.rb#L146-L149 | class TransactionsApi
attr_accessor :api_client
def initialize(api_client = ApiClient.default)
@api_client = api_client
end
# Create a single transaction or multiple transactions
# Creates a single transaction or multiple transactions. If you provide a body containing a 'transaction' object, a single transaction will be created and if you provide a body containing a 'transactions' array, multiple transactions will be created.
# @param budget_id The id of the budget (\"last-used\" can also be used to specify the last used budget)
# @param data The transaction or transactions to create. To create a single transaction you can specify a value for the 'transaction' object and to create multiple transactions you can specify an array of 'transactions'. It is expected that you will only provide a value for one of these objects.
# @param [Hash] opts the optional parameters
# @return [SaveTransactionsResponse]
def create_transaction(budget_id, data, opts = {})
data, _status_code, _headers = create_transaction_with_http_info(budget_id, data, opts)
data
end
# Create a single transaction or multiple transactions
# Creates a single transaction or multiple transactions. If you provide a body containing a 'transaction' object, a single transaction will be created and if you provide a body containing a 'transactions' array, multiple transactions will be created.
# @param budget_id The id of the budget (\"last-used\" can also be used to specify the last used budget)
# @param data The transaction or transactions to create. To create a single transaction you can specify a value for the 'transaction' object and to create multiple transactions you can specify an array of 'transactions'. It is expected that you will only provide a value for one of these objects.
# @param [Hash] opts the optional parameters
# @return [Array<(SaveTransactionsResponse, Fixnum, Hash)>] SaveTransactionsResponse data, response status code and response headers
def create_transaction_with_http_info(budget_id, data, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: TransactionsApi.create_transaction ...'
end
# verify the required parameter 'budget_id' is set
if @api_client.config.client_side_validation && budget_id.nil?
fail ArgumentError, "Missing the required parameter 'budget_id' when calling TransactionsApi.create_transaction"
end
# verify the required parameter 'data' is set
if @api_client.config.client_side_validation && data.nil?
fail ArgumentError, "Missing the required parameter 'data' when calling TransactionsApi.create_transaction"
end
# resource path
local_var_path = '/budgets/{budget_id}/transactions'.sub('{' + 'budget_id' + '}', budget_id.to_s)
# query parameters
query_params = {}
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = @api_client.object_to_http_body(data)
auth_names = ['bearer']
data, status_code, headers = @api_client.call_api(:POST, local_var_path,
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => 'SaveTransactionsResponse')
if @api_client.config.debugging
@api_client.config.logger.debug "API called: TransactionsApi#create_transaction\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# Single transaction
# Returns a single transaction
# @param budget_id The id of the budget (\"last-used\" can also be used to specify the last used budget)
# @param transaction_id The id of the transaction
# @param [Hash] opts the optional parameters
# @return [TransactionResponse]
def get_transaction_by_id(budget_id, transaction_id, opts = {})
data, _status_code, _headers = get_transaction_by_id_with_http_info(budget_id, transaction_id, opts)
data
end
# Single transaction
# Returns a single transaction
# @param budget_id The id of the budget (\"last-used\" can also be used to specify the last used budget)
# @param transaction_id The id of the transaction
# @param [Hash] opts the optional parameters
# @return [Array<(TransactionResponse, Fixnum, Hash)>] TransactionResponse data, response status code and response headers
def get_transaction_by_id_with_http_info(budget_id, transaction_id, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: TransactionsApi.get_transaction_by_id ...'
end
# verify the required parameter 'budget_id' is set
if @api_client.config.client_side_validation && budget_id.nil?
fail ArgumentError, "Missing the required parameter 'budget_id' when calling TransactionsApi.get_transaction_by_id"
end
# verify the required parameter 'transaction_id' is set
if @api_client.config.client_side_validation && transaction_id.nil?
fail ArgumentError, "Missing the required parameter 'transaction_id' when calling TransactionsApi.get_transaction_by_id"
end
# resource path
local_var_path = '/budgets/{budget_id}/transactions/{transaction_id}'.sub('{' + 'budget_id' + '}', budget_id.to_s).sub('{' + 'transaction_id' + '}', transaction_id.to_s)
# query parameters
query_params = {}
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = nil
auth_names = ['bearer']
data, status_code, headers = @api_client.call_api(:GET, local_var_path,
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => 'TransactionResponse')
if @api_client.config.debugging
@api_client.config.logger.debug "API called: TransactionsApi#get_transaction_by_id\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# List transactions
# Returns budget transactions
# @param budget_id The id of the budget (\"last-used\" can also be used to specify the last used budget)
# @param [Hash] opts the optional parameters
# @option opts [Date] :since_date If specified, only transactions on or after this date will be included. The date should be ISO formatted (e.g. 2016-12-30).
# @option opts [String] :type If specified, only transactions of the specified type will be included. 'uncategorized' and 'unapproved' are currently supported.
# @option opts [Integer] :last_knowledge_of_server The starting server knowledge. If provided, only entities that have changed since last_knowledge_of_server will be included.
# @return [TransactionsResponse]
# List transactions
# Returns budget transactions
# @param budget_id The id of the budget (\"last-used\" can also be used to specify the last used budget)
# @param [Hash] opts the optional parameters
# @option opts [Date] :since_date If specified, only transactions on or after this date will be included. The date should be ISO formatted (e.g. 2016-12-30).
# @option opts [String] :type If specified, only transactions of the specified type will be included. 'uncategorized' and 'unapproved' are currently supported.
# @option opts [Integer] :last_knowledge_of_server The starting server knowledge. If provided, only entities that have changed since last_knowledge_of_server will be included.
# @return [Array<(TransactionsResponse, Fixnum, Hash)>] TransactionsResponse data, response status code and response headers
def get_transactions_with_http_info(budget_id, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: TransactionsApi.get_transactions ...'
end
# verify the required parameter 'budget_id' is set
if @api_client.config.client_side_validation && budget_id.nil?
fail ArgumentError, "Missing the required parameter 'budget_id' when calling TransactionsApi.get_transactions"
end
if @api_client.config.client_side_validation && opts[:'type'] && !['uncategorized', 'unapproved'].include?(opts[:'type'])
fail ArgumentError, 'invalid value for "type", must be one of uncategorized, unapproved'
end
# resource path
local_var_path = '/budgets/{budget_id}/transactions'.sub('{' + 'budget_id' + '}', budget_id.to_s)
# query parameters
query_params = {}
query_params[:'since_date'] = opts[:'since_date'] if !opts[:'since_date'].nil?
query_params[:'type'] = opts[:'type'] if !opts[:'type'].nil?
query_params[:'last_knowledge_of_server'] = opts[:'last_knowledge_of_server'] if !opts[:'last_knowledge_of_server'].nil?
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = nil
auth_names = ['bearer']
data, status_code, headers = @api_client.call_api(:GET, local_var_path,
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => 'TransactionsResponse')
if @api_client.config.debugging
@api_client.config.logger.debug "API called: TransactionsApi#get_transactions\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# List account transactions
# Returns all transactions for a specified account
# @param budget_id The id of the budget (\"last-used\" can also be used to specify the last used budget)
# @param account_id The id of the account
# @param [Hash] opts the optional parameters
# @option opts [Date] :since_date If specified, only transactions on or after this date will be included. The date should be ISO formatted (e.g. 2016-12-30).
# @option opts [String] :type If specified, only transactions of the specified type will be included. 'uncategorized' and 'unapproved' are currently supported.
# @option opts [Integer] :last_knowledge_of_server The starting server knowledge. If provided, only entities that have changed since last_knowledge_of_server will be included.
# @return [TransactionsResponse]
def get_transactions_by_account(budget_id, account_id, opts = {})
data, _status_code, _headers = get_transactions_by_account_with_http_info(budget_id, account_id, opts)
data
end
# List account transactions
# Returns all transactions for a specified account
# @param budget_id The id of the budget (\"last-used\" can also be used to specify the last used budget)
# @param account_id The id of the account
# @param [Hash] opts the optional parameters
# @option opts [Date] :since_date If specified, only transactions on or after this date will be included. The date should be ISO formatted (e.g. 2016-12-30).
# @option opts [String] :type If specified, only transactions of the specified type will be included. 'uncategorized' and 'unapproved' are currently supported.
# @option opts [Integer] :last_knowledge_of_server The starting server knowledge. If provided, only entities that have changed since last_knowledge_of_server will be included.
# @return [Array<(TransactionsResponse, Fixnum, Hash)>] TransactionsResponse data, response status code and response headers
def get_transactions_by_account_with_http_info(budget_id, account_id, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: TransactionsApi.get_transactions_by_account ...'
end
# verify the required parameter 'budget_id' is set
if @api_client.config.client_side_validation && budget_id.nil?
fail ArgumentError, "Missing the required parameter 'budget_id' when calling TransactionsApi.get_transactions_by_account"
end
# verify the required parameter 'account_id' is set
if @api_client.config.client_side_validation && account_id.nil?
fail ArgumentError, "Missing the required parameter 'account_id' when calling TransactionsApi.get_transactions_by_account"
end
if @api_client.config.client_side_validation && opts[:'type'] && !['uncategorized', 'unapproved'].include?(opts[:'type'])
fail ArgumentError, 'invalid value for "type", must be one of uncategorized, unapproved'
end
# resource path
local_var_path = '/budgets/{budget_id}/accounts/{account_id}/transactions'.sub('{' + 'budget_id' + '}', budget_id.to_s).sub('{' + 'account_id' + '}', account_id.to_s)
# query parameters
query_params = {}
query_params[:'since_date'] = opts[:'since_date'] if !opts[:'since_date'].nil?
query_params[:'type'] = opts[:'type'] if !opts[:'type'].nil?
query_params[:'last_knowledge_of_server'] = opts[:'last_knowledge_of_server'] if !opts[:'last_knowledge_of_server'].nil?
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = nil
auth_names = ['bearer']
data, status_code, headers = @api_client.call_api(:GET, local_var_path,
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => 'TransactionsResponse')
if @api_client.config.debugging
@api_client.config.logger.debug "API called: TransactionsApi#get_transactions_by_account\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# List category transactions
# Returns all transactions for a specified category
# @param budget_id The id of the budget (\"last-used\" can also be used to specify the last used budget)
# @param category_id The id of the category
# @param [Hash] opts the optional parameters
# @option opts [Date] :since_date If specified, only transactions on or after this date will be included. The date should be ISO formatted (e.g. 2016-12-30).
# @option opts [String] :type If specified, only transactions of the specified type will be included. 'uncategorized' and 'unapproved' are currently supported.
# @option opts [Integer] :last_knowledge_of_server The starting server knowledge. If provided, only entities that have changed since last_knowledge_of_server will be included.
# @return [HybridTransactionsResponse]
def get_transactions_by_category(budget_id, category_id, opts = {})
data, _status_code, _headers = get_transactions_by_category_with_http_info(budget_id, category_id, opts)
data
end
# List category transactions
# Returns all transactions for a specified category
# @param budget_id The id of the budget (\"last-used\" can also be used to specify the last used budget)
# @param category_id The id of the category
# @param [Hash] opts the optional parameters
# @option opts [Date] :since_date If specified, only transactions on or after this date will be included. The date should be ISO formatted (e.g. 2016-12-30).
# @option opts [String] :type If specified, only transactions of the specified type will be included. 'uncategorized' and 'unapproved' are currently supported.
# @option opts [Integer] :last_knowledge_of_server The starting server knowledge. If provided, only entities that have changed since last_knowledge_of_server will be included.
# @return [Array<(HybridTransactionsResponse, Fixnum, Hash)>] HybridTransactionsResponse data, response status code and response headers
def get_transactions_by_category_with_http_info(budget_id, category_id, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: TransactionsApi.get_transactions_by_category ...'
end
# verify the required parameter 'budget_id' is set
if @api_client.config.client_side_validation && budget_id.nil?
fail ArgumentError, "Missing the required parameter 'budget_id' when calling TransactionsApi.get_transactions_by_category"
end
# verify the required parameter 'category_id' is set
if @api_client.config.client_side_validation && category_id.nil?
fail ArgumentError, "Missing the required parameter 'category_id' when calling TransactionsApi.get_transactions_by_category"
end
if @api_client.config.client_side_validation && opts[:'type'] && !['uncategorized', 'unapproved'].include?(opts[:'type'])
fail ArgumentError, 'invalid value for "type", must be one of uncategorized, unapproved'
end
# resource path
local_var_path = '/budgets/{budget_id}/categories/{category_id}/transactions'.sub('{' + 'budget_id' + '}', budget_id.to_s).sub('{' + 'category_id' + '}', category_id.to_s)
# query parameters
query_params = {}
query_params[:'since_date'] = opts[:'since_date'] if !opts[:'since_date'].nil?
query_params[:'type'] = opts[:'type'] if !opts[:'type'].nil?
query_params[:'last_knowledge_of_server'] = opts[:'last_knowledge_of_server'] if !opts[:'last_knowledge_of_server'].nil?
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = nil
auth_names = ['bearer']
data, status_code, headers = @api_client.call_api(:GET, local_var_path,
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => 'HybridTransactionsResponse')
if @api_client.config.debugging
@api_client.config.logger.debug "API called: TransactionsApi#get_transactions_by_category\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# List payee transactions
# Returns all transactions for a specified payee
# @param budget_id The id of the budget (\"last-used\" can also be used to specify the last used budget)
# @param payee_id The id of the payee
# @param [Hash] opts the optional parameters
# @option opts [Date] :since_date If specified, only transactions on or after this date will be included. The date should be ISO formatted (e.g. 2016-12-30).
# @option opts [String] :type If specified, only transactions of the specified type will be included. 'uncategorized' and 'unapproved' are currently supported.
# @option opts [Integer] :last_knowledge_of_server The starting server knowledge. If provided, only entities that have changed since last_knowledge_of_server will be included.
# @return [HybridTransactionsResponse]
def get_transactions_by_payee(budget_id, payee_id, opts = {})
data, _status_code, _headers = get_transactions_by_payee_with_http_info(budget_id, payee_id, opts)
data
end
# List payee transactions
# Returns all transactions for a specified payee
# @param budget_id The id of the budget (\"last-used\" can also be used to specify the last used budget)
# @param payee_id The id of the payee
# @param [Hash] opts the optional parameters
# @option opts [Date] :since_date If specified, only transactions on or after this date will be included. The date should be ISO formatted (e.g. 2016-12-30).
# @option opts [String] :type If specified, only transactions of the specified type will be included. 'uncategorized' and 'unapproved' are currently supported.
# @option opts [Integer] :last_knowledge_of_server The starting server knowledge. If provided, only entities that have changed since last_knowledge_of_server will be included.
# @return [Array<(HybridTransactionsResponse, Fixnum, Hash)>] HybridTransactionsResponse data, response status code and response headers
def get_transactions_by_payee_with_http_info(budget_id, payee_id, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: TransactionsApi.get_transactions_by_payee ...'
end
# verify the required parameter 'budget_id' is set
if @api_client.config.client_side_validation && budget_id.nil?
fail ArgumentError, "Missing the required parameter 'budget_id' when calling TransactionsApi.get_transactions_by_payee"
end
# verify the required parameter 'payee_id' is set
if @api_client.config.client_side_validation && payee_id.nil?
fail ArgumentError, "Missing the required parameter 'payee_id' when calling TransactionsApi.get_transactions_by_payee"
end
if @api_client.config.client_side_validation && opts[:'type'] && !['uncategorized', 'unapproved'].include?(opts[:'type'])
fail ArgumentError, 'invalid value for "type", must be one of uncategorized, unapproved'
end
# resource path
local_var_path = '/budgets/{budget_id}/payees/{payee_id}/transactions'.sub('{' + 'budget_id' + '}', budget_id.to_s).sub('{' + 'payee_id' + '}', payee_id.to_s)
# query parameters
query_params = {}
query_params[:'since_date'] = opts[:'since_date'] if !opts[:'since_date'].nil?
query_params[:'type'] = opts[:'type'] if !opts[:'type'].nil?
query_params[:'last_knowledge_of_server'] = opts[:'last_knowledge_of_server'] if !opts[:'last_knowledge_of_server'].nil?
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = nil
auth_names = ['bearer']
data, status_code, headers = @api_client.call_api(:GET, local_var_path,
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => 'HybridTransactionsResponse')
if @api_client.config.debugging
@api_client.config.logger.debug "API called: TransactionsApi#get_transactions_by_payee\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# Updates an existing transaction
# Updates a transaction
# @param budget_id The id of the budget (\"last-used\" can also be used to specify the last used budget)
# @param transaction_id The id of the transaction
# @param data The transaction to update
# @param [Hash] opts the optional parameters
# @return [TransactionResponse]
def update_transaction(budget_id, transaction_id, data, opts = {})
data, _status_code, _headers = update_transaction_with_http_info(budget_id, transaction_id, data, opts)
data
end
# Updates an existing transaction
# Updates a transaction
# @param budget_id The id of the budget (\"last-used\" can also be used to specify the last used budget)
# @param transaction_id The id of the transaction
# @param data The transaction to update
# @param [Hash] opts the optional parameters
# @return [Array<(TransactionResponse, Fixnum, Hash)>] TransactionResponse data, response status code and response headers
def update_transaction_with_http_info(budget_id, transaction_id, data, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: TransactionsApi.update_transaction ...'
end
# verify the required parameter 'budget_id' is set
if @api_client.config.client_side_validation && budget_id.nil?
fail ArgumentError, "Missing the required parameter 'budget_id' when calling TransactionsApi.update_transaction"
end
# verify the required parameter 'transaction_id' is set
if @api_client.config.client_side_validation && transaction_id.nil?
fail ArgumentError, "Missing the required parameter 'transaction_id' when calling TransactionsApi.update_transaction"
end
# verify the required parameter 'data' is set
if @api_client.config.client_side_validation && data.nil?
fail ArgumentError, "Missing the required parameter 'data' when calling TransactionsApi.update_transaction"
end
# resource path
local_var_path = '/budgets/{budget_id}/transactions/{transaction_id}'.sub('{' + 'budget_id' + '}', budget_id.to_s).sub('{' + 'transaction_id' + '}', transaction_id.to_s)
# query parameters
query_params = {}
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = @api_client.object_to_http_body(data)
auth_names = ['bearer']
data, status_code, headers = @api_client.call_api(:PUT, local_var_path,
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => 'TransactionResponse')
if @api_client.config.debugging
@api_client.config.logger.debug "API called: TransactionsApi#update_transaction\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# Update multiple transactions
# Updates multiple transactions, by 'id' or 'import_id'.
# @param budget_id The id of the budget (\"last-used\" can also be used to specify the last used budget)
# @param data The transactions to update. Optionally, transaction 'id' value(s) can be specified as null and an 'import_id' value can be provided which will allow transaction(s) to updated by their import_id.
# @param [Hash] opts the optional parameters
# @return [SaveTransactionsResponse]
def update_transactions(budget_id, data, opts = {})
data, _status_code, _headers = update_transactions_with_http_info(budget_id, data, opts)
data
end
# Update multiple transactions
# Updates multiple transactions, by 'id' or 'import_id'.
# @param budget_id The id of the budget (\"last-used\" can also be used to specify the last used budget)
# @param data The transactions to update. Optionally, transaction 'id' value(s) can be specified as null and an 'import_id' value can be provided which will allow transaction(s) to updated by their import_id.
# @param [Hash] opts the optional parameters
# @return [Array<(SaveTransactionsResponse, Fixnum, Hash)>] SaveTransactionsResponse data, response status code and response headers
def update_transactions_with_http_info(budget_id, data, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: TransactionsApi.update_transactions ...'
end
# verify the required parameter 'budget_id' is set
if @api_client.config.client_side_validation && budget_id.nil?
fail ArgumentError, "Missing the required parameter 'budget_id' when calling TransactionsApi.update_transactions"
end
# verify the required parameter 'data' is set
if @api_client.config.client_side_validation && data.nil?
fail ArgumentError, "Missing the required parameter 'data' when calling TransactionsApi.update_transactions"
end
# resource path
local_var_path = '/budgets/{budget_id}/transactions'.sub('{' + 'budget_id' + '}', budget_id.to_s)
# query parameters
query_params = {}
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = @api_client.object_to_http_body(data)
auth_names = ['bearer']
data, status_code, headers = @api_client.call_api(:PATCH, local_var_path,
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => 'SaveTransactionsResponse')
if @api_client.config.debugging
@api_client.config.logger.debug "API called: TransactionsApi#update_transactions\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
end
|
sunspot/sunspot | sunspot/lib/sunspot/indexer.rb | Sunspot.Indexer.prepare_full_update | ruby | def prepare_full_update(model)
document = document_for_full_update(model)
setup = setup_for_object(model)
if boost = setup.document_boost_for(model)
document.attrs[:boost] = boost
end
setup.all_field_factories.each do |field_factory|
field_factory.populate_document(document, model)
end
document
end | Convert documents into hash of indexed properties | train | https://github.com/sunspot/sunspot/blob/31dd76cd7a14a4ef7bd541de97483d8cd72ff685/sunspot/lib/sunspot/indexer.rb#L104-L114 | class Indexer #:nodoc:
def initialize(connection)
@connection = connection
end
#
# Construct a representation of the model for indexing and send it to the
# connection for indexing
#
# ==== Parameters
#
# model<Object>:: the model to index
#
def add(model)
documents = Util.Array(model).map { |m| prepare_full_update(m) }
add_batch_documents(documents)
end
#
# Construct a representation of the given class instances for atomic properties update
# and send it to the connection for indexing
#
# ==== Parameters
#
# clazz<Class>:: the class of the models to be updated
# updates<Hash>:: hash of updates where keys are model ids
# and values are hash with property name/values to be updated
#
def add_atomic_update(clazz, updates={})
documents = updates.map { |id, m| prepare_atomic_update(clazz, id, m) }
add_batch_documents(documents)
end
#
# Remove the given model from the Solr index
#
def remove(*models)
@connection.delete_by_id(
models.map { |model| Adapters::InstanceAdapter.adapt(model).index_id }
)
end
#
# Remove the model from the Solr index by specifying the class and ID
#
def remove_by_id(class_name, *ids)
ids.flatten!
@connection.delete_by_id(
ids.map { |id| Adapters::InstanceAdapter.index_id_for(class_name, id) }
)
end
#
# Delete all documents of the class indexed by this indexer from Solr.
#
def remove_all(clazz = nil)
if clazz
@connection.delete_by_query("type:#{Util.escape(clazz.name)}")
else
@connection.delete_by_query("*:*")
end
end
#
# Remove all documents that match the scope given in the Query
#
def remove_by_scope(scope)
@connection.delete_by_query(scope.to_boolean_phrase)
end
#
# Start batch processing
#
def start_batch
batcher.start_new
end
#
# Write batch out to Solr and clear it
#
def flush_batch
add_documents(batcher.end_current)
end
private
def batcher
@batcher ||= Batcher.new
end
#
# Convert documents into hash of indexed properties
#
def prepare_atomic_update(clazz, id, updates = {})
document = document_for_atomic_update(clazz, id)
setup_for_class(clazz).all_field_factories.each do |field_factory|
if updates.has_key?(field_factory.name)
field_factory.populate_document(document, nil, value: updates[field_factory.name], update: :set)
end
end
document
end
def add_documents(documents)
@connection.add(documents)
end
def add_batch_documents(documents)
if batcher.batching?
batcher.concat(documents)
else
add_documents(documents)
end
end
#
# All indexed documents index and store the +id+ and +type+ fields.
# These methods construct the document hash containing those key-value
# pairs.
#
def document_for_full_update(model)
RSolr::Xml::Document.new(
id: Adapters::InstanceAdapter.adapt(model).index_id,
type: Util.superclasses_for(model.class).map(&:name)
)
end
def document_for_atomic_update(clazz, id)
if Adapters::InstanceAdapter.for(clazz)
RSolr::Xml::Document.new(
id: Adapters::InstanceAdapter.index_id_for(clazz.name, id),
type: Util.superclasses_for(clazz).map(&:name)
)
end
end
#
# Get the Setup object for the given object's class.
#
# ==== Parameters
#
# object<Object>:: The object whose setup is to be retrieved
#
# ==== Returns
#
# Sunspot::Setup:: The setup for the object's class
#
def setup_for_object(object)
setup_for_class(object.class)
end
#
# Get the Setup object for the given class.
#
# ==== Parameters
#
# clazz<Class>:: The class whose setup is to be retrieved
#
# ==== Returns
#
# Sunspot::Setup:: The setup for the class
#
def setup_for_class(clazz)
Setup.for(clazz) || raise(NoSetupError, "Sunspot is not configured for #{clazz.inspect}")
end
end
|
oleganza/btcruby | lib/btcruby/wire_format.rb | BTC.WireFormat.read_string | ruby | def read_string(data: nil, stream: nil, offset: 0)
if data && !stream
string_length, read_length = read_varint(data: data, offset: offset)
# If failed to read the length prefix, return nil.
return [nil, read_length] if !string_length
# Check if we have enough bytes to read the string itself
return [nil, read_length] if data.bytesize < read_length + string_length
string = BTC::Data.ensure_binary_encoding(data)[read_length, string_length]
return [string, read_length + string_length]
elsif stream && !data
string_length, read_length = read_varint(stream: stream, offset: offset)
return [nil, read_length] if !string_length
buf = stream.read(string_length)
return [nil, read_length] if !buf
return [nil, read_length + buf.bytesize] if buf.bytesize < string_length
return [buf, read_length + buf.bytesize]
else
raise ArgumentError, "Either data or stream must be specified."
end
end | Reads variable-length string from data buffer or IO stream.
Either data or stream must be present (and only one of them).
Returns [string, length] where length is a number of bytes read (includes length prefix and offset bytes).
In case of failure, returns [nil, length] where length is a number of bytes read before the error was encountered. | train | https://github.com/oleganza/btcruby/blob/0aa0231a29dfc3c9f7fc54b39686aed10b6d9808/lib/btcruby/wire_format.rb#L134-L163 | module WireFormat
extend self
# Reads varint from data or stream.
# Either data or stream must be present (and only one of them).
# Optional offset is useful when reading from data.
# Returns [value, length] where value is a decoded integer value and length is number of bytes read (including offset bytes).
# Value may be nil when decoding failed (length might be zero or greater, depending on how much data was consumed before failing).
# Usage:
# i, _ = read_varint(data: buffer, offset: 42)
# i, _ = read_varint(stream: File.open('someblock','r'))
def read_varint(data: nil, stream: nil, offset: 0)
if data && !stream
return [nil, 0] if data.bytesize < 1 + offset
bytes = BTC::Data.bytes_from_data(data, offset: offset, limit: 9) # we don't need more than 9 bytes.
byte = bytes[0]
if byte < 0xfd
return [byte, offset + 1]
elsif byte == 0xfd
return [nil, 1] if data.bytesize < 3 + offset # 1 byte prefix, 2 bytes uint16
return [bytes[1] +
bytes[2]*256, offset + 3]
elsif byte == 0xfe
return [nil, 1] if data.bytesize < 5 + offset # 1 byte prefix, 4 bytes uint32
return [bytes[1] +
bytes[2]*256 +
bytes[3]*256*256 +
bytes[4]*256*256*256, offset + 5]
elsif byte == 0xff
return [nil, 1] if data.bytesize < 9 + offset # 1 byte prefix, 8 bytes uint64
return [bytes[1] +
bytes[2]*256 +
bytes[3]*256*256 +
bytes[4]*256*256*256 +
bytes[5]*256*256*256*256 +
bytes[6]*256*256*256*256*256 +
bytes[7]*256*256*256*256*256*256 +
bytes[8]*256*256*256*256*256*256*256, offset + 9]
end
elsif stream && !data
if stream.eof?
raise ArgumentError, "Can't parse varint from stream because it is already closed."
end
if offset > 0
buf = stream.read(offset)
return [nil, 0] if !buf
return [nil, buf.bytesize] if buf.bytesize < offset
end
prefix = stream.read(1)
return [nil, offset] if !prefix || prefix.bytesize == 0
byte = prefix.bytes[0]
if byte < 0xfd
return [byte, offset + 1]
elsif byte == 0xfd
buf = stream.read(2)
return [nil, offset + 1] if !buf
return [nil, offset + 1 + buf.bytesize] if buf.bytesize < 2
return [buf.unpack("v").first, offset + 3]
elsif byte == 0xfe
buf = stream.read(4)
return [nil, offset + 1] if !buf
return [nil, offset + 1 + buf.bytesize] if buf.bytesize < 4
return [buf.unpack("V").first, offset + 5]
elsif byte == 0xff
buf = stream.read(8)
return [nil, offset + 1] if !buf
return [nil, offset + 1 + buf.bytesize] if buf.bytesize < 8
return [buf.unpack("Q<").first, offset + 9]
end
else
raise ArgumentError, "Either data or stream must be specified."
end
end # read_varint
# Encodes integer and returns its binary varint representation.
def encode_varint(i)
raise ArgumentError, "int must be present" if !i
raise ArgumentError, "int must be non-negative" if i < 0
buf = if i < 0xfd
[i].pack("C")
elsif i <= 0xffff
[0xfd, i].pack("Cv")
elsif i <= 0xffffffff
[0xfe, i].pack("CV")
elsif i <= 0xffffffffffffffff
[0xff, i].pack("CQ<")
else
raise ArgumentError, "Does not support integers larger 0xffffffffffffffff (i = 0x#{i.to_s(16)})"
end
buf
end
# Encodes integer and returns its binary varint representation.
# If data is given, appends to a data.
# If stream is given, writes to a stream.
def write_varint(i, data: nil, stream: nil)
buf = encode_varint(i)
data << buf if data
stream.write(buf) if stream
buf
end
# Reads variable-length string from data buffer or IO stream.
# Either data or stream must be present (and only one of them).
# Returns [string, length] where length is a number of bytes read (includes length prefix and offset bytes).
# In case of failure, returns [nil, length] where length is a number of bytes read before the error was encountered.
# Returns the binary representation of the var-length string.
def encode_string(string)
raise ArgumentError, "String must be present" if !string
encode_varint(string.bytesize) + BTC::Data.ensure_binary_encoding(string)
end
# Writes variable-length string to a data buffer or IO stream.
# If data is given, appends to a data.
# If stream is given, writes to a stream.
# Returns the binary representation of the var-length string.
def write_string(string, data: nil, stream: nil)
raise ArgumentError, "String must be present" if !string
intbuf = write_varint(string.bytesize, data: data, stream: stream)
stringbuf = BTC::Data.ensure_binary_encoding(string)
data << stringbuf if data
stream.write(stringbuf) if stream
intbuf + stringbuf
end
# Reads varint length prefix, then calls the block appropriate number of times to read the items.
# Returns an array of items.
def read_array(data: nil, stream: nil, offset: 0)
count, len = read_varint(data: data, stream: stream, offset: offset)
return [nil, len] if !count
(0...count).map do |i|
yield
end
end
def encode_array(array, &block)
write_array(array, &block)
end
def write_array(array, data: nil, stream: nil, &block)
raise ArgumentError, "Array must be present" if !array
raise ArgumentError, "Parsing block must be present" if !block
intbuf = write_varint(array.size, data: data, stream: stream)
array.inject(intbuf) do |buf, e|
string = block.call(e)
stringbuf = BTC::Data.ensure_binary_encoding(string)
data << stringbuf if data
stream.write(stringbuf) if stream
buf << stringbuf
buf
end
end
# LEB128 encoding used in Open Assets protocol
# Decodes an unsigned integer encoded in LEB128.
# Returns `[value, length]` where `value` is an integer decoded from LEB128 and `length`
# is a number of bytes read (includes length prefix and offset bytes).
def read_uleb128(data: nil, stream: nil, offset: 0)
if (data && stream) || (!data && !stream)
raise ArgumentError, "Either data or stream must be specified."
end
if data
data = BTC::Data.ensure_binary_encoding(data)
end
if stream
if stream.eof?
raise ArgumentError, "Can't read LEB128 from stream because it is already closed."
end
if offset > 0
buf = stream.read(offset)
return [nil, 0] if !buf
return [nil, buf.bytesize] if buf.bytesize < offset
end
end
result = 0
shift = 0
while true
byte = if data
return [nil, offset] if data.bytesize < 1 + offset
BTC::Data.bytes_from_data(data, offset: offset, limit: 1)[0]
elsif stream
buf = stream.read(1)
return [nil, offset] if !buf || buf.bytesize == 0
buf.bytes[0]
end
result |= (byte & 0x7f) << shift
break if byte & 0x80 == 0
shift += 7
offset += 1
end
[result, offset + 1]
end
# Encodes an unsigned integer using LEB128.
def encode_uleb128(value)
raise ArgumentError, "Signed integers are not supported" if value < 0
return "\x00" if value == 0
bytes = []
while value != 0
byte = value & 0b01111111 # 0x7f
value >>= 7
if value != 0
byte |= 0b10000000 # 0x80
end
bytes << byte
end
return BTC::Data.data_from_bytes(bytes)
end
# Writes an unsigned integer encoded in LEB128 to a data buffer or a stream.
# Returns LEB128-encoded binary string.
def write_uleb128(value, data: nil, stream: nil)
raise ArgumentError, "Integer must be present" if !value
buf = encode_uleb128(value)
data << buf if data
stream.write(buf) if stream
buf
end
PACK_FORMAT_UINT8 = "C".freeze
PACK_FORMAT_INT8 = "c".freeze
PACK_FORMAT_UINT16LE = "S<".freeze
PACK_FORMAT_INT16LE = "s<".freeze
PACK_FORMAT_UINT32LE = "L<".freeze
PACK_FORMAT_INT32LE = "l<".freeze
PACK_FORMAT_UINT32BE = "L>".freeze # used in BIP32
PACK_FORMAT_INT32BE = "l>".freeze
PACK_FORMAT_UINT64LE = "Q<".freeze
PACK_FORMAT_INT64LE = "q<".freeze
# These read fixed-length integer in appropriate format ("le" stands for "little-endian")
# Return [value, length] or [nil, length] just like #read_varint method (see above).
def read_uint8(data: nil, stream: nil, offset: 0)
_read_fixint(name: :uint8, length: 1, pack_format: PACK_FORMAT_UINT8, data: data, stream: stream, offset: offset)
end
def read_int8(data: nil, stream: nil, offset: 0)
_read_fixint(name: :int8, length: 1, pack_format: PACK_FORMAT_INT8, data: data, stream: stream, offset: offset)
end
def read_uint16le(data: nil, stream: nil, offset: 0)
_read_fixint(name: :uint16le, length: 2, pack_format: PACK_FORMAT_UINT16LE, data: data, stream: stream, offset: offset)
end
def read_int16le(data: nil, stream: nil, offset: 0)
_read_fixint(name: :int16le, length: 2, pack_format: PACK_FORMAT_INT16LE, data: data, stream: stream, offset: offset)
end
def read_uint32le(data: nil, stream: nil, offset: 0)
_read_fixint(name: :uint32le, length: 4, pack_format: PACK_FORMAT_UINT32LE, data: data, stream: stream, offset: offset)
end
def read_int32le(data: nil, stream: nil, offset: 0)
_read_fixint(name: :int32le, length: 4, pack_format: PACK_FORMAT_INT32LE, data: data, stream: stream, offset: offset)
end
def read_uint32be(data: nil, stream: nil, offset: 0) # used in BIP32
_read_fixint(name: :uint32be, length: 4, pack_format: PACK_FORMAT_UINT32BE, data: data, stream: stream, offset: offset)
end
def read_int32be(data: nil, stream: nil, offset: 0)
_read_fixint(name: :int32be, length: 4, pack_format: PACK_FORMAT_INT32BE, data: data, stream: stream, offset: offset)
end
def read_uint64le(data: nil, stream: nil, offset: 0)
_read_fixint(name: :uint64le, length: 8, pack_format: PACK_FORMAT_UINT64LE, data: data, stream: stream, offset: offset)
end
def read_int64le(data: nil, stream: nil, offset: 0)
_read_fixint(name: :int64le, length: 8, pack_format: PACK_FORMAT_INT64LE, data: data, stream: stream, offset: offset)
end
# Encode int into one of the formats
def encode_uint8(int); [int].pack(PACK_FORMAT_UINT8); end
def encode_int8(int); [int].pack(PACK_FORMAT_INT8); end
def encode_uint16le(int); [int].pack(PACK_FORMAT_UINT16LE); end
def encode_int16le(int); [int].pack(PACK_FORMAT_INT16LE); end
def encode_int32be(int); [int].pack(PACK_FORMAT_INT32BE); end
def encode_uint32le(int); [int].pack(PACK_FORMAT_UINT32LE); end
def encode_int32le(int); [int].pack(PACK_FORMAT_INT32LE); end
def encode_uint32be(int); [int].pack(PACK_FORMAT_UINT32BE); end # used in BIP32
def encode_uint64le(int); [int].pack(PACK_FORMAT_UINT64LE); end
def encode_int64le(int); [int].pack(PACK_FORMAT_INT64LE); end
protected
def _read_fixint(name: nil, length: nil, pack_format: nil, data: nil, stream: nil, offset: 0)
if data && !stream
if data.bytesize < offset + length
Diagnostics.current.add_message("BTC::WireFormat#read_#{name}: Not enough bytes to read #{name} in binary string.")
return [nil, 0]
end
if offset > 0
pack_format = "@#{offset}" + pack_format
end
return [data.unpack(pack_format).first, offset + length]
elsif stream && !data
if offset > 0
buf = stream.read(offset)
return [nil, 0] if !buf
return [nil, buf.bytesize] if buf.bytesize < offset
end
buf = stream.read(length)
if !buf
Diagnostics.current.add_message("BTC::WireFormat#read_#{name}: Failed to read #{name} from stream.")
return [nil, offset]
end
if buf.bytesize < length
Diagnostics.current.add_message("BTC::WireFormat#read_#{name}: Not enough bytes to read #{name} from stream.")
return [nil, offset + buf.bytesize]
end
return [buf.unpack(pack_format).first, offset + length]
else
raise ArgumentError, "BTC::WireFormat#read_#{name}: Either data or stream must be specified."
end
end
end
|
ideasasylum/tinycert | lib/tinycert/request.rb | Tinycert.Request.build_request | ruby | def build_request
req = Net::HTTP::Post.new(@uri)
req.add_field "Content-Type", "application/x-www-form-urlencoded; charset=utf-8"
req.body = params_string_with_digest
# puts @uri
# puts req.body
req
end | Create Request | train | https://github.com/ideasasylum/tinycert/blob/6176e740e7d14eb3e9468e442d6c3575fb5810dc/lib/tinycert/request.rb#L39-L46 | class Request
attr_reader :params
def initialize api_key, url, params
@api_key = api_key
@uri = URI(url)
@params = prepare_params(params)
# Create client
@client = Net::HTTP.new(@uri.host, @uri.port)
@client.use_ssl = true
@client.verify_mode = OpenSSL::SSL::VERIFY_PEER
end
# Sort the params consistently
def prepare_params p
results = {}
# Build a new hash with string keys
p.each { |k, v| results[k.to_s] = v }
# Sort nested structures
results.sort.to_h
end
def digest
OpenSSL::HMAC.hexdigest(OpenSSL::Digest.new('sha256'), @api_key, params_string)
end
def params_string
URI.encode_www_form(@params)
end
# Create Request
def params_string_with_digest
params_string << "&digest=#{digest}"
end
# Fetch Request
def response
res = @client.request(build_request)
raise Tinycert::Error.new(res) if res.code != '200'
puts res.body
res
end
def results
results = JSON.parse(response.body)
puts results
results
end
end
|
Falkor/falkorlib | lib/falkorlib/git/base.rb | FalkorLib.Git.dirty? | ruby | def dirty?(path = Dir.pwd)
g = MiniGit.new(path)
a = g.capturing.diff :shortstat => true
#ap a
!a.empty?
end | Check if a git directory is in dirty mode
git diff --shortstat 2> /dev/null | tail -n1 | train | https://github.com/Falkor/falkorlib/blob/1a6d732e8fd5550efb7c98a87ee97fcd2e051858/lib/falkorlib/git/base.rb#L261-L266 | module Git
module_function
## Check if a git directory has been initialized
def init?(path = Dir.pwd)
begin
MiniGit.new(path)
rescue Exception
return false
end
true
end
## Check if the repositories already holds some commits
def commits?(path)
res = false
Dir.chdir(path) do
_stdout, _stderr, exit_status = Open3.capture3( "git rev-parse HEAD" )
res = (exit_status.to_i.zero?)
end
res
end
## Check the availability of a given git command
def command?(cmd)
cg = MiniGit::Capturing.new
cmd_list = cg.help :a => true
# typical run:
# usage: git [--version] [--help] [-C <path>] [-c name=value]
# [--exec-path[=<path>]] [--html-path] [--man-path] [--info-path]
# [-p|--paginate|--no-pager] [--no-replace-objects] [--bare]
# [--git-dir=<path>] [--work-tree=<path>] [--namespace=<name>]
# <command> [<args>]
#
# available git commands in '/usr/local/Cellar/git/1.8.5.2/libexec/git-core'
#
# add [...] \
# [...] | The part we are interested in, delimited by '\n\n' sequence
# [...] /
#
# 'git help -a' and 'git help -g' lists available subcommands and some
# concept guides. See 'git help <command>' or 'git help <concept>'
# to read about a specific subcommand or concept
l = cmd_list.split("\n\n")
l.shift # useless first part
#ap l
subl = l.each_index.select { |i| l[i] =~ /^\s\s+/ } # find sublines that starts with at least two whitespaces
#ap subl
return false if subl.empty?
subl.any? { |i| l[i].split.include?(cmd) }
end
###
# Initialize a git repository
##
def init(path = Dir.pwd, _options = {})
# FIXME: for travis test: ensure the global git configurations
# 'user.email' and 'user.name' are set
[ 'user.name', 'user.email' ].each do |userconf|
next unless MiniGit[userconf].nil?
warn "The Git global configuration '#{userconf}' is not set so"
warn "you should *seriously* consider setting them by running\n\t git config --global #{userconf} 'your_#{userconf.sub(/\./, '_')}'"
default_val = ENV['USER']
default_val += '@domain.org' if userconf =~ /email/
warn "Now putting a default value '#{default_val}' you could change later on"
run %(
git config --global #{userconf} "#{default_val}"
)
#MiniGit[userconf] = default_val
end
exit_status = 1
Dir.mkdir( path ) unless Dir.exist?( path )
Dir.chdir( path ) do
execute "git init" unless FalkorLib.config.debug
exit_status = $?.to_i
end
# #puts "#init #{path}"
# Dir.chdir( "#{path}" ) do
# %x[ pwd && git init ] unless FalkorLib.config.debug
# end
exit_status
end
# Return the Git working tree from the proposed path (current directory by default)
def rootdir(path = Dir.pwd)
g = MiniGit.new
g.find_git_dir(path)[1]
end
# Return the git root directory for the path (current directory by default)
def gitdir(path = Dir.pwd)
g = MiniGit.new
g.find_git_dir(path)[0]
end
# Create a new branch
def create_branch(branch, path = Dir.pwd)
#ap method(__method__).parameters.map { |arg| arg[1] }
g = MiniGit.new(path)
error "not yet any commit performed -- You shall do one" unless commits?(path)
g.branch branch.to_s
end
# Delete a branch.
def delete_branch(branch, path = Dir.pwd, opts = { :force => false })
g = MiniGit.new(path)
error "'#{branch}' is not a valid existing branch" unless list_branch(path).include?( branch )
g.branch ((opts[:force]) ? :D : :d) => branch.to_s
end
###### config ######
# Retrieve the Git configuration
# You can propose a pattern as key
# Supported options:
# * :list [boolean] list all configurations
# * :hash [boolean] return a Hash
##
def config(key, dir = Dir.pwd, options = {})
#info "Retrieve the Git configuration"
res = nil
if (options[:list] || (key.is_a? Regexp) || (key =~ /\*/))
cg = MiniGit::Capturing.new(dir)
res = (cg.config :list => true).split("\n")
res.select! { |e| e.match(/^#{key}/) } unless key == '*'
#res = res.map { |e| e.split('=') }.to_h if options[:hash]
res = Hash[ res.map { |e| e.split('=') } ] if options[:hash]
else
g = MiniGit.new(dir)
res = g[key]
res = { key => g[key] } if options[:hash]
end
#ap res
res
end
## Fetch the latest changes
def fetch(path = Dir.pwd)
Dir.chdir( path ) do
execute "git fetch --all -v"
end
end
## Get an array of the local branches present (first element is always the
## current branch)
def list_branch(path = Dir.pwd)
cg = MiniGit::Capturing.new(path)
res = cg.branch :a => true
res = res.split("\n")
# Eventually reorder to make the first element of the array the current branch
i = res.find_index { |e| e =~ /^\*\s/ }
res[0], res[i] = res[i], res[0] unless (i.nil? || i.zero?)
res.each { |e| e.sub!(/^\*?\s+/, '') }
res
end
## Get the current git branch
def branch?(path = Dir.pwd)
list_branch(path)[0]
end
## Grab a remote branch
def grab(branch, path = Dir.pwd, remote = 'origin')
exit_status = 1
error "no branch provided" if branch.nil?
#remotes = FalkorLib::Git.remotes(path)
branches = FalkorLib::Git.list_branch(path)
if branches.include? "remotes/#{remote}/#{branch}"
info "Grab the branch '#{remote}/#{branch}'"
exit_status = execute_in_dir(FalkorLib::Git.rootdir( path ), "git branch --track #{branch} #{remote}/#{branch}")
else
warning "the remote branch '#{remote}/#{branch}' cannot be found"
end
exit_status
end
## Publish a branch on the remote
def publish(branch, path = Dir.pwd, remote = 'origin')
exit_status = 1
error "no branch provided" if branch.nil?
#remotes = FalkorLib::Git.remotes(path)
branches = FalkorLib::Git.list_branch(path)
Dir.chdir(FalkorLib::Git.rootdir( path ) ) do
if branches.include? "remotes/#{remote}/#{branch}"
warning "the remote branch '#{remote}/#{branch}' already exists"
else
info "Publish the branch '#{branch}' on the remote '#{remote}'"
exit_status = run %(
git push #{remote} #{branch}:refs/heads/#{branch}
git fetch #{remote}
git branch -u #{remote}/#{branch} #{branch}
)
end
end
exit_status
end
## List the files currently under version
def list_files(path = Dir.pwd)
g = MiniGit.new(path)
g.capturing.ls_files.split
end
## Add a file/whatever to Git and commit it
# Supported options:
# * :force [boolean]: force the add
def add(path, msg = "", options = {})
exit_status = 0
dir = File.realpath(File.dirname(path))
root = rootdir(path)
relative_path_to_root = Pathname.new( File.realpath(path) ).relative_path_from Pathname.new(root)
real_msg = ((msg.empty?) ? "add '#{relative_path_to_root}'" : msg)
opts = '-f' if options[:force]
Dir.chdir( dir ) do
exit_status = run %(
git add #{opts} #{path}
git commit -s -m "#{real_msg}" #{path}
)
end
exit_status.to_i
end
## Check if a git directory is in dirty mode
# git diff --shortstat 2> /dev/null | tail -n1
## Get a hash table of tags under the format
# { <tag> => <commit> }
def list_tag(path = Dir.pwd)
res = {}
cg = MiniGit::Capturing.new(path)
unless (cg.tag :list => true).empty?
# git show-ref --tags
a = (cg.show_ref :tags => true).split("\n")
res = Hash[ a.collect { |item| item.split(' refs/tags/') } ].invert
end
res
end # list_tag
## Get the last tag commit, or nil if no tag can be found
def last_tag_commit(path = Dir.pwd)
res = ""
g = MiniGit.new(path)
unless (g.capturing.tag :list => true).empty?
# git rev-list --tags --max-count=1
res = (g.capturing.rev_list :tags => true, :max_count => 1).chomp
end
res
end # last_tag_commit
## Create a new tag
# You can add extra options to the git tag command through the opts hash.
# Ex:
# FalkorLib::Git.tag('name', dir, { :delete => true } )
#
def tag(name, path = Dir.pwd, opts = {})
g = MiniGit.new(path)
g.tag opts, name
end # tag
## List of Git remotes
def remotes(path = Dir.pwd)
g = MiniGit.new(path)
g.capturing.remote.split
end
## Check existence of remotes
def remotes?(path = Dir.pwd)
!remotes(path).empty?
end
# Create a new remote <name> targeting url <url>
# You can pass additional options expected by git remote add in <opts>,
# for instance as follows:
#
# create_remote('origin', url, dir, { :fetch => true })
#
def create_remote(name, url, path = Dir.pwd, opts = {})
g = MiniGit.new(path)
g.remote :add, opts, name, url.to_s
end
# Delete a branch.
# def delete_branch(branch, path = Dir.pwd, opts = { :force => false })
# g = MiniGit.new(path)
# error "'#{branch}' is not a valid existing branch" unless list_branch(path).include?( branch )
# g.branch (opts[:force] ? :D : :d) => "#{branch}"
# end
###
# Initialize git submodule from the configuration
##
def submodule_init(path = Dir.pwd, submodules = FalkorLib.config.git[:submodules], _options = {})
exit_status = 1
git_root_dir = rootdir(path)
if File.exist?("#{git_root_dir}/.gitmodules")
unless submodules.empty?
# TODO: Check if it contains all submodules of the configuration
end
end
#ap FalkorLib.config.git
Dir.chdir(git_root_dir) do
exit_status = FalkorLib::Git.submodule_update( git_root_dir )
submodules.each do |subdir, conf|
next if conf[:url].nil?
url = conf[:url]
dir = "#{FalkorLib.config.git[:submodulesdir]}/#{subdir}"
branch = (conf[:branch].nil?) ? 'master' : conf[:branch]
if File.directory?( dir )
puts " ... the git submodule '#{subdir}' is already setup."
else
info "adding Git submodule '#{dir}' from '#{url}'"
exit_status = run %(
git submodule add -b #{branch} #{url} #{dir}
git commit -s -m "Add Git submodule '#{dir}' from '#{url}'" .gitmodules #{dir}
)
end
end
end
exit_status
end
## Update the Git submodules to the **local** registered version
def submodule_update(path = Dir.pwd)
execute_in_dir(rootdir(path),
%(
git submodule init
git submodule update
))
end
## Upgrade the Git submodules to the latest HEAD version from the remote
def submodule_upgrade(path = Dir.pwd)
execute_in_dir(rootdir(path),
%{
git submodule foreach 'git fetch origin; git checkout $(git rev-parse --abbrev-ref HEAD); git reset --hard origin/$(git rev-parse --abbrev-ref HEAD); git submodule update --recursive; git clean -dfx'
})
end
## Initialize git subtrees from the configuration
def subtree_init(path = Dir.pwd)
raise ArgumentError, "Git 'subtree' command is not available" unless FalkorLib::Git.command? "subtree"
if FalkorLib.config.git[:subtrees].empty?
FalkorLib::Git.config_warn(:subtrees)
return 1
end
exit_status = 0
git_root_dir = rootdir(path)
Dir.chdir(git_root_dir) do
FalkorLib.config.git[:subtrees].each do |dir, conf|
next if conf[:url].nil?
url = conf[:url]
remote = dir.gsub(/\//, '-')
branch = (conf[:branch].nil?) ? 'master' : conf[:branch]
remotes = FalkorLib::Git.remotes
unless remotes.include?( remote )
info "Initialize Git remote '#{remote}' from URL '#{url}'"
exit_status = execute "git remote add --no-tags -f #{remote} #{url}"
end
unless File.directory?( File.join(git_root_dir, dir) )
info "initialize Git subtree '#{dir}'"
exit_status = execute "git subtree add --prefix #{dir} --squash #{remote}/#{branch}"
end
end
end
exit_status
end
## Check if the subtrees have been initialized.
## Actually based on a naive check of sub-directory existence
def subtree_init?(path = Dir.pwd)
res = true
FalkorLib.config.git[:subtrees].keys.each do |dir|
res &&= File.directory?(File.join(path, dir))
end
res
end # subtree_init?
## Show difference between local subtree(s) and their remotes"
def subtree_diff(path = Dir.pwd)
raise ArgumentError, "Git 'subtree' command is not available" unless FalkorLib::Git.command? "subtree"
if FalkorLib.config.git[:subtrees].empty?
FalkorLib::Git.config_warn(:subtrees)
return 1
end
exit_status = 0
git_root_dir = rootdir(path)
Dir.chdir(git_root_dir) do
FalkorLib.config.git[:subtrees].each do |dir, conf|
next if conf[:url].nil?
#url = conf[:url]
remote = dir.gsub(/\//, '-')
branch = (conf[:branch].nil?) ? 'master' : conf[:branch]
remotes = FalkorLib::Git.remotes
raise IOError, "The git remote '#{remote}' is not configured" unless remotes.include?( remote )
raise IOError, "The git subtree directory '#{dir}' does not exists" unless File.directory?( File.join(git_root_dir, dir) )
info "Git diff on subtree '#{dir}' with remote '#{remote}/#{branch}'"
exit_status = execute "git diff #{remote}/#{branch} #{FalkorLib::Git.branch?( git_root_dir )}:#{dir}"
end
end
exit_status
end
# Pull the latest changes, assuming the git repository is not dirty
def subtree_up(path = Dir.pwd)
error "Unable to pull subtree(s): Dirty Git repository" if FalkorLib::Git.dirty?( path )
exit_status = 0
git_root_dir = rootdir(path)
Dir.chdir(git_root_dir) do
FalkorLib.config.git[:subtrees].each do |dir, conf|
next if conf[:url].nil?
#url = conf[:url]
remote = dir.gsub(/\//, '-')
branch = (conf[:branch].nil?) ? 'master' : conf[:branch]
remotes = FalkorLib::Git.remotes
info "Pulling changes into subtree '#{dir}' using remote '#{remote}/#{branch}'"
raise IOError, "The git remote '#{remote}' is not configured" unless remotes.include?( remote )
info "\t\\__ fetching remote '#{remotes.join(',')}'"
FalkorLib::Git.fetch( git_root_dir )
raise IOError, "The git subtree directory '#{dir}' does not exists" unless File.directory?( File.join(git_root_dir, dir) )
info "\t\\__ pulling changes"
exit_status = execute "git subtree pull --prefix #{dir} --squash #{remote} #{branch}"
#exit_status = puts "git subtree pull --prefix #{dir} --squash #{remote} #{branch}"
end
end
exit_status
end
alias_method :subtree_pull, :subtree_up
# Raise a warning message if subtree/submodule section is not present
def config_warn(type = :subtrees)
warn "You shall setup 'Falkorlib.config.git[#{type.to_sym}]' to configure #{type} as follows:"
warn " FalkorLib.config.git do |c|"
warn " c[#{type.to_sym}] = {"
warn " '<subdir>' => {"
warn " :url => '<giturl>',"
warn " :branch => 'develop' # if different from master"
warn " },"
warn " }"
warn " end"
if type == :submodules
warn "This will configure the Git submodule into FalkorLib.config.git.submodulesdir"
warn "i.e. '#{FalkorLib.config.git[:submodulesdir]}'" if FalkorLib.config.git[:submodulesdir]
end
end
end # module FalkorLib::Git
|
visoft/ruby_odata | lib/ruby_odata/service.rb | OData.Service.entry_to_class | ruby | def entry_to_class(entry)
# Retrieve the class name from the fully qualified name (the last string after the last dot)
klass_name = entry.xpath("./atom:category/@term", @ds_namespaces).to_s.split('.')[-1]
# Is the category missing? See if there is a title that we can use to build the class
if klass_name.nil?
title = entry.xpath("./atom:title", @ds_namespaces).first
return nil if title.nil?
klass_name = title.content.to_s
end
return nil if klass_name.nil?
properties = entry.xpath("./atom:content/m:properties/*", @ds_namespaces)
klass = @classes[qualify_class_name(klass_name)].new
# Fill metadata
meta_id = entry.xpath("./atom:id", @ds_namespaces)[0].content
klass.send :__metadata=, { :uri => meta_id }
# Fill properties
for prop in properties
prop_name = prop.name
klass.send "#{prop_name}=", parse_value_xml(prop)
end
# Fill properties represented outside of the properties collection
@class_metadata[qualify_class_name(klass_name)].select { |k,v| v.fc_keep_in_content == false }.each do |k, meta|
if meta.fc_target_path == "SyndicationTitle"
title = entry.xpath("./atom:title", @ds_namespaces).first
klass.send "#{meta.name}=", title.content
elsif meta.fc_target_path == "SyndicationSummary"
summary = entry.xpath("./atom:summary", @ds_namespaces).first
klass.send "#{meta.name}=", summary.content
end
end
inline_links = entry.xpath("./atom:link[m:inline]", @ds_namespaces)
for link in inline_links
# TODO: Use the metadata's associations to determine the multiplicity instead of this "hack"
property_name = link.attributes['title'].to_s
if singular?(property_name)
inline_entry = link.xpath("./m:inline/atom:entry", @ds_namespaces).first
inline_klass = build_inline_class(klass, inline_entry, property_name)
klass.send "#{property_name}=", inline_klass
else
inline_classes, inline_entries = [], link.xpath("./m:inline/atom:feed/atom:entry", @ds_namespaces)
for inline_entry in inline_entries
# Build the class
inline_klass = entry_to_class(inline_entry)
# Add the property to the temp collection
inline_classes << inline_klass
end
# Assign the array of classes to the property
property_name = link.xpath("@title", @ds_namespaces)
klass.send "#{property_name}=", inline_classes
end
end
klass
end | Converts an XML Entry into a class | train | https://github.com/visoft/ruby_odata/blob/ca3d441494aa2f745c7f7fb2cd90173956f73663/lib/ruby_odata/service.rb#L433-L497 | class Service
attr_reader :classes, :class_metadata, :options, :collections, :edmx, :function_imports, :response
# Creates a new instance of the Service class
#
# @param [String] service_uri the root URI of the OData service
# @param [Hash] options the options to pass to the service
# @option options [String] :username for http basic auth
# @option options [String] :password for http basic auth
# @option options [Object] :verify_ssl false if no verification, otherwise mode (OpenSSL::SSL::VERIFY_PEER is default)
# @option options [Hash] :rest_options a hash of rest-client options that will be passed to all OData::Resource.new calls
# @option options [Hash] :additional_params a hash of query string params that will be passed on all calls
# @option options [Boolean, true] :eager_partial true if queries should consume partial feeds until the feed is complete, false if explicit calls to next must be performed
def initialize(service_uri, options = {})
@uri = service_uri.gsub!(/\/?$/, '')
set_options! options
default_instance_vars!
set_namespaces
build_collections_and_classes
end
# Handles the dynamic `AddTo<EntityName>` methods as well as the collections on the service
def method_missing(name, *args)
# Queries
if @collections.include?(name.to_s)
@query = build_collection_query_object(name,@additional_params, *args)
return @query
# Adds
elsif name.to_s =~ /^AddTo(.*)/
type = $1
if @collections.include?(type)
@save_operations << Operation.new("Add", $1, args[0])
else
super
end
elsif @function_imports.include?(name.to_s)
execute_import_function(name.to_s, args)
else
super
end
end
# Queues an object for deletion. To actually remove it from the server, you must call save_changes as well.
#
# @param [Object] obj the object to mark for deletion
#
# @raise [NotSupportedError] if the `obj` isn't a tracked entity
def delete_object(obj)
type = obj.class.to_s
if obj.respond_to?(:__metadata) && !obj.send(:__metadata).nil?
@save_operations << Operation.new("Delete", type, obj)
else
raise OData::NotSupportedError.new "You cannot delete a non-tracked entity"
end
end
# Queues an object for update. To actually update it on the server, you must call save_changes as well.
#
# @param [Object] obj the object to queue for update
#
# @raise [NotSupportedError] if the `obj` isn't a tracked entity
def update_object(obj)
type = obj.class.to_s
if obj.respond_to?(:__metadata) && !obj.send(:__metadata).nil?
@save_operations << Operation.new("Update", type, obj)
else
raise OData::NotSupportedError.new "You cannot update a non-tracked entity"
end
end
# Performs save operations (Create/Update/Delete) against the server
def save_changes
return nil if @save_operations.empty?
result = nil
begin
if @save_operations.length == 1
result = single_save(@save_operations[0])
else
result = batch_save(@save_operations)
end
# TODO: We should probably perform a check here
# to make sure everything worked before clearing it out
@save_operations.clear
return result
rescue Exception => e
handle_exception(e)
end
end
# Performs query operations (Read) against the server.
# Typically this returns an array of record instances, except in the case of count queries
# @raise [ServiceError] if there is an error when talking to the service
def execute
begin
@response = OData::Resource.new(build_query_uri, @rest_options).get
rescue Exception => e
handle_exception(e)
end
return Integer(@response.body) if @response.body =~ /\A\d+\z/
handle_collection_result(@response.body)
end
# Overridden to identify methods handled by method_missing
def respond_to?(method)
if @collections.include?(method.to_s)
return true
# Adds
elsif method.to_s =~ /^AddTo(.*)/
type = $1
if @collections.include?(type)
return true
else
super
end
# Function Imports
elsif @function_imports.include?(method.to_s)
return true
else
super
end
end
# Retrieves the next resultset of a partial result (if any). Does not honor the `:eager_partial` option.
def next
return if not partial?
handle_partial
end
# Does the most recent collection returned represent a partial collection? Will aways be false if a query hasn't executed, even if the query would have a partial
def partial?
@has_partial
end
# Lazy loads a navigation property on a model
#
# @param [Object] obj the object to fill
# @param [String] nav_prop the navigation property to fill
#
# @raise [NotSupportedError] if the `obj` isn't a tracked entity
# @raise [ArgumentError] if the `nav_prop` isn't a valid navigation property
def load_property(obj, nav_prop)
raise NotSupportedError, "You cannot load a property on an entity that isn't tracked" if obj.send(:__metadata).nil?
raise ArgumentError, "'#{nav_prop}' is not a valid navigation property" unless obj.respond_to?(nav_prop.to_sym)
raise ArgumentError, "'#{nav_prop}' is not a valid navigation property" unless @class_metadata[obj.class.to_s][nav_prop].nav_prop
results = OData::Resource.new(build_load_property_uri(obj, nav_prop), @rest_options).get
prop_results = build_classes_from_result(results.body)
obj.send "#{nav_prop}=", (singular?(nav_prop) ? prop_results.first : prop_results)
end
# Adds a child object to a parent object's collection
#
# @param [Object] parent the parent object
# @param [String] nav_prop the name of the navigation property to add the child to
# @param [Object] child the child object
# @raise [NotSupportedError] if the `parent` isn't a tracked entity
# @raise [ArgumentError] if the `nav_prop` isn't a valid navigation property
# @raise [NotSupportedError] if the `child` isn't a tracked entity
def add_link(parent, nav_prop, child)
raise NotSupportedError, "You cannot add a link on an entity that isn't tracked (#{parent.class})" if parent.send(:__metadata).nil?
raise ArgumentError, "'#{nav_prop}' is not a valid navigation property for #{parent.class}" unless parent.respond_to?(nav_prop.to_sym)
raise ArgumentError, "'#{nav_prop}' is not a valid navigation property for #{parent.class}" unless @class_metadata[parent.class.to_s][nav_prop].nav_prop
raise NotSupportedError, "You cannot add a link on a child entity that isn't tracked (#{child.class})" if child.send(:__metadata).nil?
@save_operations << Operation.new("AddLink", nav_prop, parent, child)
end
private
# Constructs a QueryBuilder instance for a collection using the arguments provided.
#
# @param [String] name the name of the collection
# @param [Hash] additional_parameters the additional parameters
# @param [Array] args the arguments to use for query
def build_collection_query_object(name, additional_parameters, *args)
root = "/#{name.to_s}"
if args.empty?
#nothing to add
elsif args.size == 1
if args.first.to_s =~ /\d+/
id_metadata = find_id_metadata(name.to_s)
root << build_id_path(args.first, id_metadata)
else
root << "(#{args.first})"
end
else
root << "(#{args.join(',')})"
end
QueryBuilder.new(root, additional_parameters)
end
# Finds the metadata associated with the given collection's first id property
# Remarks: This is used for single item lookup queries using the ID, e.g. Products(1), not complex primary keys
#
# @param [String] collection_name the name of the collection
def find_id_metadata(collection_name)
collection_data = @collections.fetch(collection_name)
class_metadata = @class_metadata.fetch(collection_data[:type].to_s)
key = class_metadata.select{|k,h| h.is_key }.collect{|k,h| h.name }[0]
class_metadata[key]
end
# Builds the ID expression of a given id for query
#
# @param [Object] id_value the actual value to be used
# @param [PropertyMetadata] id_metadata the property metadata object for the id
def build_id_path(id_value, id_metadata)
if id_metadata.type == "Edm.Int64"
"(#{id_value}L)"
else
"(#{id_value})"
end
end
def set_options!(options)
@options = options
if @options[:eager_partial].nil?
@options[:eager_partial] = true
end
@rest_options = { :verify_ssl => get_verify_mode, :user => @options[:username], :password => @options[:password] }
@rest_options.merge!(options[:rest_options] || {})
@additional_params = options[:additional_params] || {}
@namespace = options[:namespace]
@json_type = options[:json_type] || 'application/json'
end
def default_instance_vars!
@collections = {}
@function_imports = {}
@save_operations = []
@has_partial = false
@next_uri = nil
end
def set_namespaces
@edmx = Nokogiri::XML(OData::Resource.new(build_metadata_uri, @rest_options).get.body)
@ds_namespaces = {
"m" => "http://schemas.microsoft.com/ado/2007/08/dataservices/metadata",
"edmx" => "http://schemas.microsoft.com/ado/2007/06/edmx",
"ds" => "http://schemas.microsoft.com/ado/2007/08/dataservices",
"atom" => "http://www.w3.org/2005/Atom"
}
# Get the edm namespace from the edmx
edm_ns = @edmx.xpath("edmx:Edmx/edmx:DataServices/*", @namespaces).first.namespaces['xmlns'].to_s
@ds_namespaces.merge! "edm" => edm_ns
end
# Gets ssl certificate verification mode, or defaults to verify_peer
def get_verify_mode
if @options[:verify_ssl].nil?
return OpenSSL::SSL::VERIFY_PEER
else
return @options[:verify_ssl]
end
end
# Build the classes required by the metadata
def build_collections_and_classes
@classes = Hash.new
@class_metadata = Hash.new # This is used to store property information about a class
# Build complex types first, these will be used for entities
complex_types = @edmx.xpath("//edm:ComplexType", @ds_namespaces) || []
complex_types.each do |c|
name = qualify_class_name(c['Name'])
props = c.xpath(".//edm:Property", @ds_namespaces)
methods = props.collect { |p| p['Name'] } # Standard Properties
@classes[name] = ClassBuilder.new(name, methods, [], self, @namespace).build unless @classes.keys.include?(name)
end
entity_types = @edmx.xpath("//edm:EntityType", @ds_namespaces)
entity_types.each do |e|
next if e['Abstract'] == "true"
klass_name = qualify_class_name(e['Name'])
methods = collect_properties(klass_name, e, @edmx)
nav_props = collect_navigation_properties(klass_name, e, @edmx)
@classes[klass_name] = ClassBuilder.new(klass_name, methods, nav_props, self, @namespace).build unless @classes.keys.include?(klass_name)
end
# Fill in the collections instance variable
collections = @edmx.xpath("//edm:EntityContainer/edm:EntitySet", @ds_namespaces)
collections.each do |c|
entity_type = c["EntityType"]
@collections[c["Name"]] = { :edmx_type => entity_type, :type => convert_to_local_type(entity_type) }
end
build_function_imports
end
# Parses the function imports and fills the @function_imports collection
def build_function_imports
# Fill in the function imports
functions = @edmx.xpath("//edm:EntityContainer/edm:FunctionImport", @ds_namespaces)
functions.each do |f|
http_method_attribute = f.xpath("@m:HttpMethod", @ds_namespaces).first # HttpMethod is no longer required http://www.odata.org/2011/10/actions-in-odata/
is_side_effecting_attribute = f.xpath("@edm:IsSideEffecting", @ds_namespaces).first
http_method = 'POST' # default to POST
if http_method_attribute
http_method = http_method_attribute.content
elsif is_side_effecting_attribute
is_side_effecting = is_side_effecting_attribute.content
http_method = is_side_effecting ? 'POST' : 'GET'
end
return_type = f["ReturnType"]
inner_return_type = nil
unless return_type.nil?
return_type = (return_type =~ /^Collection/) ? Array : convert_to_local_type(return_type)
if f["ReturnType"] =~ /\((.*)\)/
inner_return_type = convert_to_local_type($~[1])
end
end
params = f.xpath("edm:Parameter", @ds_namespaces)
parameters = nil
if params.length > 0
parameters = {}
params.each do |p|
parameters[p["Name"]] = p["Type"]
end
end
@function_imports[f["Name"]] = {
:http_method => http_method,
:return_type => return_type,
:inner_return_type => inner_return_type,
:parameters => parameters }
end
end
# Converts the EDMX model type to the local model type
def convert_to_local_type(edmx_type)
return edm_to_ruby_type(edmx_type) if edmx_type =~ /^Edm/
klass_name = qualify_class_name(edmx_type.split('.').last)
klass_name.camelize.constantize
end
# Converts a class name to its fully qualified name (if applicable) and returns the new name
def qualify_class_name(klass_name)
unless @namespace.nil? || @namespace.blank? || klass_name.include?('::')
namespaces = @namespace.split(/\.|::/)
namespaces << klass_name
klass_name = namespaces.join '::'
end
klass_name.camelize
end
# Builds the metadata need for each property for things like feed customizations and navigation properties
def build_property_metadata(props, keys=[])
metadata = {}
props.each do |property_element|
prop_meta = PropertyMetadata.new(property_element)
prop_meta.is_key = keys.include?(prop_meta.name)
# If this is a navigation property, we need to add the association to the property metadata
prop_meta.association = Association.new(property_element, @edmx) if prop_meta.nav_prop
metadata[prop_meta.name] = prop_meta
end
metadata
end
# Handle parsing of OData Atom result and return an array of Entry classes
def handle_collection_result(result)
results = build_classes_from_result(result)
while partial? && @options[:eager_partial]
results.concat handle_partial
end
results
end
# Handles errors from the OData service
def handle_exception(e)
raise e unless defined?(e.response) && e.response != nil
code = e.response[:status]
error = Nokogiri::XML(e.response[:body])
message = if error.xpath("m:error/m:message", @ds_namespaces).first
error.xpath("m:error/m:message", @ds_namespaces).first.content
else
"Server returned error but no message."
end
raise ServiceError.new(code), message
end
# Loops through the standard properties (non-navigation) for a given class and returns the appropriate list of methods
def collect_properties(klass_name, element, doc)
props = element.xpath(".//edm:Property", @ds_namespaces)
key_elemnts = element.xpath(".//edm:Key//edm:PropertyRef", @ds_namespaces)
keys = key_elemnts.collect { |k| k['Name'] }
@class_metadata[klass_name] = build_property_metadata(props, keys)
methods = props.collect { |p| p['Name'] }
unless element["BaseType"].nil?
base = element["BaseType"].split(".").last()
baseType = doc.xpath("//edm:EntityType[@Name=\"#{base}\"]", @ds_namespaces).first()
props = baseType.xpath(".//edm:Property", @ds_namespaces)
@class_metadata[klass_name].merge!(build_property_metadata(props))
methods = methods.concat(props.collect { |p| p['Name']})
end
methods
end
# Similar to +collect_properties+, but handles the navigation properties
def collect_navigation_properties(klass_name, element, doc)
nav_props = element.xpath(".//edm:NavigationProperty", @ds_namespaces)
@class_metadata[klass_name].merge!(build_property_metadata(nav_props))
nav_props.collect { |p| p['Name'] }
end
# Helper to loop through a result and create an instance for each entity in the results
def build_classes_from_result(result)
doc = Nokogiri::XML(result)
is_links = doc.at_xpath("/ds:links", @ds_namespaces)
return parse_link_results(doc) if is_links
entries = doc.xpath("//atom:entry[not(ancestor::atom:entry)]", @ds_namespaces)
extract_partial(doc)
results = []
entries.each do |entry|
results << entry_to_class(entry)
end
return results
end
# Converts an XML Entry into a class
# Tests for and extracts the next href of a partial
def extract_partial(doc)
next_links = doc.xpath('//atom:link[@rel="next"]', @ds_namespaces)
@has_partial = next_links.any?
if @has_partial
uri = Addressable::URI.parse(next_links[0]['href'])
uri.query_values = uri.query_values.merge @additional_params unless @additional_params.empty?
@next_uri = uri.to_s
end
end
def handle_partial
if @next_uri
result = OData::Resource.new(@next_uri, @rest_options).get
results = handle_collection_result(result.body)
end
results
end
# Handle link results
def parse_link_results(doc)
uris = doc.xpath("/ds:links/ds:uri", @ds_namespaces)
results = []
uris.each do |uri_el|
link = uri_el.content
results << URI.parse(link)
end
results
end
# Build URIs
def build_metadata_uri
uri = "#{@uri}/$metadata"
uri << "?#{@additional_params.to_query}" unless @additional_params.empty?
uri
end
def build_query_uri
"#{@uri}#{@query.query}"
end
def build_save_uri(operation)
uri = "#{@uri}/#{operation.klass_name}"
uri << "?#{@additional_params.to_query}" unless @additional_params.empty?
uri
end
def build_add_link_uri(operation)
uri = operation.klass.send(:__metadata)[:uri].dup
uri << "/$links/#{operation.klass_name}"
uri << "?#{@additional_params.to_query}" unless @additional_params.empty?
uri
end
def build_resource_uri(operation)
uri = operation.klass.send(:__metadata)[:uri].dup
uri << "?#{@additional_params.to_query}" unless @additional_params.empty?
uri
end
def build_batch_uri
uri = "#{@uri}/$batch"
uri << "?#{@additional_params.to_query}" unless @additional_params.empty?
uri
end
def build_load_property_uri(obj, property)
uri = obj.__metadata[:uri].dup
uri << "/#{property}"
uri
end
def build_function_import_uri(name, params)
uri = "#{@uri}/#{name}"
params.merge! @additional_params
uri << "?#{params.to_query}" unless params.empty?
uri
end
def build_inline_class(klass, entry, property_name)
# Build the class
inline_klass = entry_to_class(entry)
# Add the property
klass.send "#{property_name}=", inline_klass
end
# Used to link a child object to its parent and vice-versa after a add_link operation
def link_child_to_parent(operation)
child_collection = operation.klass.send("#{operation.klass_name}") || []
child_collection << operation.child_klass
operation.klass.send("#{operation.klass_name}=", child_collection)
# Attach the parent to the child
parent_meta = @class_metadata[operation.klass.class.to_s][operation.klass_name]
child_meta = @class_metadata[operation.child_klass.class.to_s]
# Find the matching relationship on the child object
child_properties = Helpers.normalize_to_hash(
child_meta.select { |k, prop|
prop.nav_prop &&
prop.association.relationship == parent_meta.association.relationship })
child_property_to_set = child_properties.keys.first # There should be only one match
# TODO: Handle many to many scenarios where the child property is an enumerable
operation.child_klass.send("#{child_property_to_set}=", operation.klass)
end
def single_save(operation)
if operation.kind == "Add"
save_uri = build_save_uri(operation)
json_klass = operation.klass.to_json(:type => :add)
post_result = OData::Resource.new(save_uri, @rest_options).post json_klass, {:content_type => @json_type}
return build_classes_from_result(post_result.body)
elsif operation.kind == "Update"
update_uri = build_resource_uri(operation)
json_klass = operation.klass.to_json
update_result = OData::Resource.new(update_uri, @rest_options).put json_klass, {:content_type => @json_type}
return (update_result.status == 204)
elsif operation.kind == "Delete"
delete_uri = build_resource_uri(operation)
delete_result = OData::Resource.new(delete_uri, @rest_options).delete
return (delete_result.status == 204)
elsif operation.kind == "AddLink"
save_uri = build_add_link_uri(operation)
json_klass = operation.child_klass.to_json(:type => :link)
post_result = OData::Resource.new(save_uri, @rest_options).post json_klass, {:content_type => @json_type}
# Attach the child to the parent
link_child_to_parent(operation) if (post_result.status == 204)
return(post_result.status == 204)
end
end
# Batch Saves
def generate_guid
rand(36**12).to_s(36).insert(4, "-").insert(9, "-")
end
def batch_save(operations)
batch_num = generate_guid
changeset_num = generate_guid
batch_uri = build_batch_uri
body = build_batch_body(operations, batch_num, changeset_num)
result = OData::Resource.new( batch_uri, @rest_options).post body, {:content_type => "multipart/mixed; boundary=batch_#{batch_num}"}
# TODO: More result validation needs to be done.
# The result returns HTTP 202 even if there is an error in the batch
return (result.status == 202)
end
def build_batch_body(operations, batch_num, changeset_num)
# Header
body = "--batch_#{batch_num}\n"
body << "Content-Type: multipart/mixed;boundary=changeset_#{changeset_num}\n\n"
# Operations
operations.each do |operation|
body << build_batch_operation(operation, changeset_num)
body << "\n"
end
# Footer
body << "\n\n--changeset_#{changeset_num}--\n"
body << "--batch_#{batch_num}--"
return body
end
def build_batch_operation(operation, changeset_num)
accept_headers = "Accept-Charset: utf-8\n"
accept_headers << "Content-Type: application/json;charset=utf-8\n" unless operation.kind == "Delete"
accept_headers << "\n"
content = "--changeset_#{changeset_num}\n"
content << "Content-Type: application/http\n"
content << "Content-Transfer-Encoding: binary\n\n"
if operation.kind == "Add"
save_uri = "#{@uri}/#{operation.klass_name}"
json_klass = operation.klass.to_json(:type => :add)
content << "POST #{save_uri} HTTP/1.1\n"
content << accept_headers
content << json_klass
elsif operation.kind == "Update"
update_uri = operation.klass.send(:__metadata)[:uri]
json_klass = operation.klass.to_json
content << "PUT #{update_uri} HTTP/1.1\n"
content << accept_headers
content << json_klass
elsif operation.kind == "Delete"
delete_uri = operation.klass.send(:__metadata)[:uri]
content << "DELETE #{delete_uri} HTTP/1.1\n"
content << accept_headers
elsif
save_uri = build_add_link_uri(operation)
json_klass = operation.child_klass.to_json(:type => :link)
content << "POST #{save_uri} HTTP/1.1\n"
content << accept_headers
content << json_klass
link_child_to_parent(operation)
end
return content
end
# Complex Types
def complex_type_to_class(complex_type_xml)
type = Helpers.get_namespaced_attribute(complex_type_xml, 'type', 'm')
is_collection = false
# Extract the class name in case this is a Collection
if type =~ /\(([^)]*)\)/m
type = $~[1]
is_collection = true
collection = []
end
klass_name = qualify_class_name(type.split('.')[-1])
if is_collection
# extract the elements from the collection
elements = complex_type_xml.xpath(".//d:element", @namespaces)
elements.each do |e|
if type.match(/^Edm/)
collection << parse_value(e.content, type)
else
element = @classes[klass_name].new
fill_complex_type_properties(e, element)
collection << element
end
end
return collection
else
klass = @classes[klass_name].new
# Fill in the properties
fill_complex_type_properties(complex_type_xml, klass)
return klass
end
end
# Helper method for complex_type_to_class
def fill_complex_type_properties(complex_type_xml, klass)
properties = complex_type_xml.xpath(".//*")
properties.each do |prop|
klass.send "#{prop.name}=", parse_value_xml(prop)
end
end
# Field Converters
# Handles parsing datetimes from a string
def parse_date(sdate)
# Assume this is UTC if no timezone is specified
sdate = sdate + "Z" unless sdate.match(/Z|([+|-]\d{2}:\d{2})$/)
# This is to handle older versions of Ruby (e.g. ruby 1.8.7 (2010-12-23 patchlevel 330) [i386-mingw32])
# See http://makandra.com/notes/1017-maximum-representable-value-for-a-ruby-time-object
# In recent versions of Ruby, Time has a much larger range
begin
result = Time.parse(sdate)
rescue ArgumentError
result = DateTime.parse(sdate)
end
return result
end
# Parses a value into the proper type based on an xml property element
def parse_value_xml(property_xml)
property_type = Helpers.get_namespaced_attribute(property_xml, 'type', 'm')
property_null = Helpers.get_namespaced_attribute(property_xml, 'null', 'm')
if property_type.nil? || (property_type && property_type.match(/^Edm/))
return parse_value(property_xml.content, property_type, property_null)
end
complex_type_to_class(property_xml)
end
def parse_value(content, property_type = nil, property_null = nil)
# Handle anything marked as null
return nil if !property_null.nil? && property_null == "true"
# Handle a nil property type, this is a string
return content if property_type.nil?
# Handle integers
return content.to_i if property_type.match(/^Edm.Int/)
# Handle decimals
return content.to_d if property_type.match(/Edm.Decimal/)
# Handle DateTimes
# return Time.parse(property_xml.content) if property_type.match(/Edm.DateTime/)
return parse_date(content) if property_type.match(/Edm.DateTime/)
# If we can't parse the value, just return the element's content
content
end
# Parses a value into the proper type based on a specified return type
def parse_primative_type(value, return_type)
return value.to_i if return_type == Fixnum
return value.to_d if return_type == Float
return parse_date(value.to_s) if return_type == Time
return value.to_s
end
# Converts an edm type (string) to a ruby type
def edm_to_ruby_type(edm_type)
return String if edm_type =~ /Edm.String/
return Fixnum if edm_type =~ /^Edm.Int/
return Float if edm_type =~ /Edm.Decimal/
return Time if edm_type =~ /Edm.DateTime/
return String
end
# Method Missing Handlers
# Executes an import function
def execute_import_function(name, *args)
func = @function_imports[name]
# Check the args making sure that more weren't passed in than the function needs
param_count = func[:parameters].nil? ? 0 : func[:parameters].count
arg_count = args.nil? ? 0 : args[0].count
if arg_count > param_count
raise ArgumentError, "wrong number of arguments (#{arg_count} for #{param_count})"
end
# Convert the parameters to a hash
params = {}
func[:parameters].keys.each_with_index { |key, i| params[key] = args[0][i] } unless func[:parameters].nil?
function_uri = build_function_import_uri(name, params)
result = OData::Resource.new(function_uri, @rest_options).send(func[:http_method].downcase, {})
# Is this a 204 (No content) result?
return true if result.status == 204
# No? Then we need to parse the results. There are 4 kinds...
if func[:return_type] == Array
# a collection of entites
return build_classes_from_result(result.body) if @classes.include?(func[:inner_return_type].to_s)
# a collection of native types
elements = Nokogiri::XML(result.body).xpath("//ds:element", @ds_namespaces)
results = []
elements.each do |e|
results << parse_primative_type(e.content, func[:inner_return_type])
end
return results
end
# a single entity
if @classes.include?(func[:return_type].to_s)
entry = Nokogiri::XML(result.body).xpath("atom:entry[not(ancestor::atom:entry)]", @ds_namespaces)
return entry_to_class(entry)
end
# or a single native type
unless func[:return_type].nil?
e = Nokogiri::XML(result.body).xpath("/*").first
return parse_primative_type(e.content, func[:return_type])
end
# Nothing could be parsed, so just return if we got a 200 or not
return (result.status == 200)
end
# Helpers
def singular?(value)
value.singularize == value
end
end
|
hashicorp/vagrant | lib/vagrant/guest.rb | Vagrant.Guest.detect! | ruby | def detect!
guest_name = @machine.config.vm.guest
initialize_capabilities!(guest_name, @guests, @capabilities, @machine)
rescue Errors::CapabilityHostExplicitNotDetected => e
raise Errors::GuestExplicitNotDetected, value: e.extra_data[:value]
rescue Errors::CapabilityHostNotDetected
raise Errors::GuestNotDetected
end | This will detect the proper guest OS for the machine and set up
the class to actually execute capabilities. | train | https://github.com/hashicorp/vagrant/blob/c22a145c59790c098f95d50141d9afb48e1ef55f/lib/vagrant/guest.rb#L32-L39 | class Guest
include CapabilityHost
def initialize(machine, guests, capabilities)
@capabilities = capabilities
@guests = guests
@machine = machine
end
# This will detect the proper guest OS for the machine and set up
# the class to actually execute capabilities.
# See {CapabilityHost#capability}
def capability(*args)
super
rescue Errors::CapabilityNotFound => e
raise Errors::GuestCapabilityNotFound,
cap: e.extra_data[:cap],
guest: name
rescue Errors::CapabilityInvalid => e
raise Errors::GuestCapabilityInvalid,
cap: e.extra_data[:cap],
guest: name
end
# Returns the specified or detected guest type name.
#
# @return [Symbol]
def name
capability_host_chain[0][0]
end
# This returns whether the guest is ready to work. If this returns
# `false`, then {#detect!} should be called in order to detect the
# guest OS.
#
# @return [Boolean]
def ready?
!!capability_host_chain
end
end
|
sugaryourcoffee/syclink | lib/syclink/link.rb | SycLink.Link.match? | ruby | def match?(args)
select_defined(args).reduce(true) do |sum, attribute|
sum = sum && (send(attribute[0]) == attribute[1])
end
end | Checks whether the link matches the values provided by args and returns
true if so otherwise false
link.match?(name: "Example", tag: "Test") | train | https://github.com/sugaryourcoffee/syclink/blob/941ee2045c946daa1e0db394eb643aa82c1254cc/lib/syclink/link.rb#L55-L59 | class Link
include LinkChecker
# Attributes that are accessible
ATTRS = [:url, :name, :description, :tag]
# Attribute accessors generated from ATTRS
attr_accessor *ATTRS
# Create a new link with url and params. If params are not provided
# defaults are used for name the url is used, description is empty and
# tag is set to 'untagged'
#
# Usage
# =====
#
# Link.new("http://example.com", name: "example",
# description: "For testing purposes",
# tag: "Test,Example")
#
# Params
# ======
# url:: the URL of the link
# name:: the name of the link. If not given the URL is used
# description:: the description of the link (optional)
# tag:: if not given it is set to 'untagged'
def initialize(url, params = {})
@url = url
params = defaults(url).merge(select_defined(params))
@name = params[:name]
@description = params[:description]
@tag = params[:tag]
end
# Updates the attributes of the link specified by args and returns the
# updated link
# link.update(name: "Example website for testing purposes")
def update(args)
select_defined(args).each do |attribute, value|
send("#{attribute}=", value)
end
self
end
# Checks whether the link matches the values provided by args and returns
# true if so otherwise false
# link.match?(name: "Example", tag: "Test")
# Checks whether the search string is contained in one or more of the
# attributes. If the search string is found true is returned otherwise
# false
# link.contains?("example.com")
def contains?(search)
search = search.delete(' ').downcase
target = instance_variables.map { |v| instance_variable_get v }.join
target.downcase.delete(' ').scan(search).size > 0
end
# Return the values of the link in an array
# link.row
def row
[ url, name, description, tag ]
end
private
# Specifies the default values
def defaults(url)
{ name: url, description: "", tag: "untagged" }
end
# Based on the ATTRS the args are returned that are included in the ATTRS.
# args with nil values are omitted
def select_defined(args)
args.select { |k, v| (ATTRS.include? k) && !v.nil? }
end
end
|
amatsuda/rfd | lib/rfd.rb | Rfd.Controller.grep | ruby | def grep(pattern = '.*')
regexp = Regexp.new(pattern)
fetch_items_from_filesystem_or_zip
@items = items.shift(2) + items.select {|i| i.name =~ regexp}
sort_items_according_to_current_direction
draw_items
draw_total_items
switch_page 0
move_cursor 0
end | Search files and directories from the current directory, and update the screen.
* +pattern+ - Search pattern against file names in Ruby Regexp string.
=== Example
a : Search files that contains the letter "a" in their file name
.*\.pdf$ : Search PDF files | train | https://github.com/amatsuda/rfd/blob/403c0bc0ff0a9da1d21220b479d5a42008512b78/lib/rfd.rb#L344-L353 | class Controller
include Rfd::Commands
attr_reader :header_l, :header_r, :main, :command_line, :items, :displayed_items, :current_row, :current_page, :current_dir, :current_zip
# :nodoc:
def initialize
@main = MainWindow.new
@header_l = HeaderLeftWindow.new
@header_r = HeaderRightWindow.new
@command_line = CommandLineWindow.new
@debug = DebugWindow.new if ENV['DEBUG']
@direction, @dir_history, @last_command, @times, @yanked_items = nil, [], nil, nil, nil
end
# The main loop.
def run
loop do
begin
number_pressed = false
ret = case (c = Curses.getch)
when 10, 13 # enter, return
enter
when 27 # ESC
q
when ' ' # space
space
when 127 # DEL
del
when Curses::KEY_DOWN
j
when Curses::KEY_UP
k
when Curses::KEY_LEFT
h
when Curses::KEY_RIGHT
l
when Curses::KEY_CTRL_A..Curses::KEY_CTRL_Z
chr = ((c - 1 + 65) ^ 0b0100000).chr
public_send "ctrl_#{chr}" if respond_to?("ctrl_#{chr}")
when ?0..?9
public_send c
number_pressed = true
when ?!..?~
if respond_to? c
public_send c
else
debug "key: #{c}" if ENV['DEBUG']
end
when Curses::KEY_MOUSE
if (mouse_event = Curses.getmouse)
case mouse_event.bstate
when Curses::BUTTON1_CLICKED
click y: mouse_event.y, x: mouse_event.x
when Curses::BUTTON1_DOUBLE_CLICKED
double_click y: mouse_event.y, x: mouse_event.x
end
end
else
debug "key: #{c}" if ENV['DEBUG']
end
Curses.doupdate if ret
@times = nil unless number_pressed
rescue StopIteration
raise
rescue => e
command_line.show_error e.to_s
raise if ENV['DEBUG']
end
end
ensure
Curses.close_screen
end
# Change the number of columns in the main window.
def spawn_panes(num)
main.number_of_panes = num
@current_row = @current_page = 0
end
# Number of times to repeat the next command.
def times
(@times || 1).to_i
end
# The file or directory on which the cursor is on.
def current_item
items[current_row]
end
# * marked files and directories.
def marked_items
items.select(&:marked?)
end
# Marked files and directories or Array(the current file or directory).
#
# . and .. will not be included.
def selected_items
((m = marked_items).any? ? m : Array(current_item)).reject {|i| %w(. ..).include? i.name}
end
# Move the cursor to specified row.
#
# The main window and the headers will be updated reflecting the displayed files and directories.
# The row number can be out of range of the current page.
def move_cursor(row = nil)
if row
if (prev_item = items[current_row])
main.draw_item prev_item
end
page = row / max_items
switch_page page if page != current_page
main.activate_pane row / maxy
@current_row = row
else
@current_row = 0
end
item = items[current_row]
main.draw_item item, current: true
main.display current_page
header_l.draw_current_file_info item
@current_row
end
# Change the current directory.
def cd(dir = '~', pushd: true)
dir = load_item path: expand_path(dir) unless dir.is_a? Item
unless dir.zip?
Dir.chdir dir
@current_zip = nil
else
@current_zip = dir
end
@dir_history << current_dir if current_dir && pushd
@current_dir, @current_page, @current_row = dir, 0, nil
main.activate_pane 0
ls
@current_dir
end
# cd to the previous directory.
def popd
cd @dir_history.pop, pushd: false if @dir_history.any?
end
# Fetch files from current directory.
# Then update each windows reflecting the newest information.
def ls
fetch_items_from_filesystem_or_zip
sort_items_according_to_current_direction
@current_page ||= 0
draw_items
move_cursor (current_row ? [current_row, items.size - 1].min : nil)
draw_marked_items
draw_total_items
true
end
# Sort the whole files and directories in the current directory, then refresh the screen.
#
# ==== Parameters
# * +direction+ - Sort order in a String.
# nil : order by name
# r : reverse order by name
# s, S : order by file size
# sr, Sr: reverse order by file size
# t : order by mtime
# tr : reverse order by mtime
# c : order by ctime
# cr : reverse order by ctime
# u : order by atime
# ur : reverse order by atime
# e : order by extname
# er : reverse order by extname
def sort(direction = nil)
@direction, @current_page = direction, 0
sort_items_according_to_current_direction
switch_page 0
move_cursor 0
end
# Change the file permission of the selected files and directories.
#
# ==== Parameters
# * +mode+ - Unix chmod string (e.g. +w, g-r, 755, 0644)
def chmod(mode = nil)
return unless mode
begin
Integer mode
mode = Integer mode.size == 3 ? "0#{mode}" : mode
rescue ArgumentError
end
FileUtils.chmod mode, selected_items.map(&:path)
ls
end
# Change the file owner of the selected files and directories.
#
# ==== Parameters
# * +user_and_group+ - user name and group name separated by : (e.g. alice, nobody:nobody, :admin)
def chown(user_and_group)
return unless user_and_group
user, group = user_and_group.split(':').map {|s| s == '' ? nil : s}
FileUtils.chown user, group, selected_items.map(&:path)
ls
end
# Fetch files from current directory or current .zip file.
def fetch_items_from_filesystem_or_zip
unless in_zip?
@items = Dir.foreach(current_dir).map {|fn|
load_item dir: current_dir, name: fn
}.to_a.partition {|i| %w(. ..).include? i.name}.flatten
else
@items = [load_item(dir: current_dir, name: '.', stat: File.stat(current_dir)),
load_item(dir: current_dir, name: '..', stat: File.stat(File.dirname(current_dir)))]
zf = Zip::File.new current_dir
zf.each {|entry|
next if entry.name_is_directory?
stat = zf.file.stat entry.name
@items << load_item(dir: current_dir, name: entry.name, stat: stat)
}
end
end
# Focus at the first file or directory of which name starts with the given String.
def find(str)
index = items.index {|i| i.index > current_row && i.name.start_with?(str)} || items.index {|i| i.name.start_with? str}
move_cursor index if index
end
# Focus at the last file or directory of which name starts with the given String.
def find_reverse(str)
index = items.reverse.index {|i| i.index < current_row && i.name.start_with?(str)} || items.reverse.index {|i| i.name.start_with? str}
move_cursor items.size - index - 1 if index
end
# Height of the currently active pane.
def maxy
main.maxy
end
# Number of files or directories that the current main window can show in a page.
def max_items
main.max_items
end
# Update the main window with the loaded files and directories. Also update the header.
def draw_items
main.newpad items
@displayed_items = items[current_page * max_items, max_items]
main.display current_page
header_l.draw_path_and_page_number path: current_dir.path, current: current_page + 1, total: total_pages
end
# Sort the loaded files and directories in already given sort order.
def sort_items_according_to_current_direction
case @direction
when nil
@items = items.shift(2) + items.partition(&:directory?).flat_map(&:sort)
when 'r'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort.reverse}
when 'S', 's'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort_by {|i| -i.size}}
when 'Sr', 'sr'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort_by(&:size)}
when 't'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort {|x, y| y.mtime <=> x.mtime}}
when 'tr'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort_by(&:mtime)}
when 'c'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort {|x, y| y.ctime <=> x.ctime}}
when 'cr'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort_by(&:ctime)}
when 'u'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort {|x, y| y.atime <=> x.atime}}
when 'ur'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort_by(&:atime)}
when 'e'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort {|x, y| y.extname <=> x.extname}}
when 'er'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort_by(&:extname)}
end
items.each.with_index {|item, index| item.index = index}
end
# Search files and directories from the current directory, and update the screen.
#
# * +pattern+ - Search pattern against file names in Ruby Regexp string.
#
# === Example
#
# a : Search files that contains the letter "a" in their file name
# .*\.pdf$ : Search PDF files
# Copy selected files and directories to the destination.
def cp(dest)
unless in_zip?
src = (m = marked_items).any? ? m.map(&:path) : current_item
FileUtils.cp_r src, expand_path(dest)
else
raise 'cping multiple items in .zip is not supported.' if selected_items.size > 1
Zip::File.open(current_zip) do |zip|
entry = zip.find_entry(selected_items.first.name).dup
entry.name, entry.name_length = dest, dest.size
zip.instance_variable_get(:@entry_set) << entry
end
end
ls
end
# Move selected files and directories to the destination.
def mv(dest)
unless in_zip?
src = (m = marked_items).any? ? m.map(&:path) : current_item
FileUtils.mv src, expand_path(dest)
else
raise 'mving multiple items in .zip is not supported.' if selected_items.size > 1
rename "#{selected_items.first.name}/#{dest}"
end
ls
end
# Rename selected files and directories.
#
# ==== Parameters
# * +pattern+ - new filename, or a shash separated Regexp like string
def rename(pattern)
from, to = pattern.sub(/^\//, '').sub(/\/$/, '').split '/'
if to.nil?
from, to = current_item.name, from
else
from = Regexp.new from
end
unless in_zip?
selected_items.each do |item|
name = item.name.gsub from, to
FileUtils.mv item, current_dir.join(name) if item.name != name
end
else
Zip::File.open(current_zip) do |zip|
selected_items.each do |item|
name = item.name.gsub from, to
zip.rename item.name, name
end
end
end
ls
end
# Soft delete selected files and directories.
#
# If the OS is not OSX, performs the same as `delete` command.
def trash
unless in_zip?
if osx?
FileUtils.mv selected_items.map(&:path), File.expand_path('~/.Trash/')
else
#TODO support other OS
FileUtils.rm_rf selected_items.map(&:path)
end
else
return unless ask %Q[Trashing zip entries is not supported. Actually the files will be deleted. Are you sure want to proceed? (y/n)]
delete
end
@current_row -= selected_items.count {|i| i.index <= current_row}
ls
end
# Delete selected files and directories.
def delete
unless in_zip?
FileUtils.rm_rf selected_items.map(&:path)
else
Zip::File.open(current_zip) do |zip|
zip.select {|e| selected_items.map(&:name).include? e.to_s}.each do |entry|
if entry.name_is_directory?
zip.dir.delete entry.to_s
else
zip.file.delete entry.to_s
end
end
end
end
@current_row -= selected_items.count {|i| i.index <= current_row}
ls
end
# Create a new directory.
def mkdir(dir)
unless in_zip?
FileUtils.mkdir_p current_dir.join(dir)
else
Zip::File.open(current_zip) do |zip|
zip.dir.mkdir dir
end
end
ls
end
# Create a new empty file.
def touch(filename)
unless in_zip?
FileUtils.touch current_dir.join(filename)
else
Zip::File.open(current_zip) do |zip|
# zip.file.open(filename, 'w') {|_f| } #HAXX this code creates an unneeded temporary file
zip.instance_variable_get(:@entry_set) << Zip::Entry.new(current_zip, filename)
end
end
ls
end
# Create a symlink to the current file or directory.
def symlink(name)
FileUtils.ln_s current_item, name
ls
end
# Yank selected file / directory names.
def yank
@yanked_items = selected_items
end
# Paste yanked files / directories here.
def paste
if @yanked_items
if current_item.directory?
FileUtils.cp_r @yanked_items.map(&:path), current_item
else
@yanked_items.each do |item|
if items.include? item
i = 1
while i += 1
new_item = load_item dir: current_dir, name: "#{item.basename}_#{i}#{item.extname}", stat: item.stat
break unless File.exist? new_item.path
end
FileUtils.cp_r item, new_item
else
FileUtils.cp_r item, current_dir
end
end
end
ls
end
end
# Copy selected files and directories' path into clipboard on OSX.
def clipboard
IO.popen('pbcopy', 'w') {|f| f << selected_items.map(&:path).join(' ')} if osx?
end
# Archive selected files and directories into a .zip file.
def zip(zipfile_name)
return unless zipfile_name
zipfile_name += '.zip' unless zipfile_name.end_with? '.zip'
Zip::File.open(zipfile_name, Zip::File::CREATE) do |zipfile|
selected_items.each do |item|
next if item.symlink?
if item.directory?
Dir[item.join('**/**')].each do |file|
zipfile.add file.sub("#{current_dir}/", ''), file
end
else
zipfile.add item.name, item
end
end
end
ls
end
# Unarchive .zip and .tar.gz files within selected files and directories into current_directory.
def unarchive
unless in_zip?
zips, gzs = selected_items.partition(&:zip?).tap {|z, others| break [z, *others.partition(&:gz?)]}
zips.each do |item|
FileUtils.mkdir_p current_dir.join(item.basename)
Zip::File.open(item) do |zip|
zip.each do |entry|
FileUtils.mkdir_p File.join(item.basename, File.dirname(entry.to_s))
zip.extract(entry, File.join(item.basename, entry.to_s)) { true }
end
end
end
gzs.each do |item|
Zlib::GzipReader.open(item) do |gz|
Gem::Package::TarReader.new(gz) do |tar|
dest_dir = current_dir.join (gz.orig_name || item.basename).sub(/\.tar$/, '')
tar.each do |entry|
dest = nil
if entry.full_name == '././@LongLink'
dest = File.join dest_dir, entry.read.strip
next
end
dest ||= File.join dest_dir, entry.full_name
if entry.directory?
FileUtils.mkdir_p dest, :mode => entry.header.mode
elsif entry.file?
FileUtils.mkdir_p dest_dir
File.open(dest, 'wb') {|f| f.print entry.read}
FileUtils.chmod entry.header.mode, dest
elsif entry.header.typeflag == '2' # symlink
File.symlink entry.header.linkname, dest
end
unless Dir.exist? dest_dir
FileUtils.mkdir_p dest_dir
File.open(File.join(dest_dir, gz.orig_name || item.basename), 'wb') {|f| f.print gz.read}
end
end
end
end
end
else
Zip::File.open(current_zip) do |zip|
zip.select {|e| selected_items.map(&:name).include? e.to_s}.each do |entry|
FileUtils.mkdir_p File.join(current_zip.dir, current_zip.basename, File.dirname(entry.to_s))
zip.extract(entry, File.join(current_zip.dir, current_zip.basename, entry.to_s)) { true }
end
end
end
ls
end
# Current page is the first page?
def first_page?
current_page == 0
end
# Do we have more pages?
def last_page?
current_page == total_pages - 1
end
# Number of pages in the current directory.
def total_pages
(items.size - 1) / max_items + 1
end
# Move to the given page number.
#
# ==== Parameters
# * +page+ - Target page number
def switch_page(page)
main.display (@current_page = page)
@displayed_items = items[current_page * max_items, max_items]
header_l.draw_path_and_page_number path: current_dir.path, current: current_page + 1, total: total_pages
end
# Update the header information concerning currently marked files or directories.
def draw_marked_items
items = marked_items
header_r.draw_marked_items count: items.size, size: items.inject(0) {|sum, i| sum += i.size}
end
# Update the header information concerning total files and directories in the current directory.
def draw_total_items
header_r.draw_total_items count: items.size, size: items.inject(0) {|sum, i| sum += i.size}
end
# Swktch on / off marking on the current file or directory.
def toggle_mark
main.toggle_mark current_item
end
# Get a char as a String from user input.
def get_char
c = Curses.getch
c if (0..255) === c.ord
end
def clear_command_line
command_line.writeln 0, ""
command_line.clear
command_line.noutrefresh
end
# Accept user input, and directly execute it as a Ruby method call to the controller.
#
# ==== Parameters
# * +preset_command+ - A command that would be displayed at the command line before user input.
def process_command_line(preset_command: nil)
prompt = preset_command ? ":#{preset_command} " : ':'
command_line.set_prompt prompt
cmd, *args = command_line.get_command(prompt: prompt).split(' ')
if cmd && !cmd.empty? && respond_to?(cmd)
ret = self.public_send cmd, *args
clear_command_line
ret
end
rescue Interrupt
clear_command_line
end
# Accept user input, and directly execute it in an external shell.
def process_shell_command
command_line.set_prompt ':!'
cmd = command_line.get_command(prompt: ':!')[1..-1]
execute_external_command pause: true do
system cmd
end
rescue Interrupt
ensure
command_line.clear
command_line.noutrefresh
end
# Let the user answer y or n.
#
# ==== Parameters
# * +prompt+ - Prompt message
def ask(prompt = '(y/n)')
command_line.set_prompt prompt
command_line.refresh
while (c = Curses.getch)
next unless [?N, ?Y, ?n, ?y, 3, 27] .include? c # N, Y, n, y, ^c, esc
command_line.clear
command_line.noutrefresh
break (c == 'y') || (c == 'Y')
end
end
# Open current file or directory with the editor.
def edit
execute_external_command do
editor = ENV['EDITOR'] || 'vim'
unless in_zip?
system %Q[#{editor} "#{current_item.path}"]
else
begin
tmpdir, tmpfile_name = nil
Zip::File.open(current_zip) do |zip|
tmpdir = Dir.mktmpdir
FileUtils.mkdir_p File.join(tmpdir, File.dirname(current_item.name))
tmpfile_name = File.join(tmpdir, current_item.name)
File.open(tmpfile_name, 'w') {|f| f.puts zip.file.read(current_item.name)}
system %Q[#{editor} "#{tmpfile_name}"]
zip.add(current_item.name, tmpfile_name) { true }
end
ls
ensure
FileUtils.remove_entry_secure tmpdir if tmpdir
end
end
end
end
# Open current file or directory with the viewer.
def view
pager = ENV['PAGER'] || 'less'
execute_external_command do
unless in_zip?
system %Q[#{pager} "#{current_item.path}"]
else
begin
tmpdir, tmpfile_name = nil
Zip::File.open(current_zip) do |zip|
tmpdir = Dir.mktmpdir
FileUtils.mkdir_p File.join(tmpdir, File.dirname(current_item.name))
tmpfile_name = File.join(tmpdir, current_item.name)
File.open(tmpfile_name, 'w') {|f| f.puts zip.file.read(current_item.name)}
end
system %Q[#{pager} "#{tmpfile_name}"]
ensure
FileUtils.remove_entry_secure tmpdir if tmpdir
end
end
end
end
def move_cursor_by_click(y: nil, x: nil)
if (idx = main.pane_index_at(y: y, x: x))
row = current_page * max_items + main.maxy * idx + y - main.begy
move_cursor row if (row >= 0) && (row < items.size)
end
end
private
def execute_external_command(pause: false)
Curses.def_prog_mode
Curses.close_screen
yield
ensure
Curses.reset_prog_mode
Curses.getch if pause
#NOTE needs to draw borders and ls again here since the stdlib Curses.refresh fails to retrieve the previous screen
Rfd::Window.draw_borders
Curses.refresh
ls
end
def expand_path(path)
File.expand_path path.start_with?('/', '~') ? path : current_dir ? current_dir.join(path) : path
end
def load_item(path: nil, dir: nil, name: nil, stat: nil)
Item.new dir: dir || File.dirname(path), name: name || File.basename(path), stat: stat, window_width: main.width
end
def osx?
@_osx ||= RbConfig::CONFIG['host_os'] =~ /darwin/
end
def in_zip?
@current_zip
end
def debug(str)
@debug.debug str
end
end
|
sup-heliotrope/sup | lib/sup/maildir.rb | Redwood.Maildir.poll | ruby | def poll
added = []
deleted = []
updated = []
@ctimes.each do |d,prev_ctime|
subdir = File.join @dir, d
debug "polling maildir #{subdir}"
raise FatalSourceError, "#{subdir} not a directory" unless File.directory? subdir
ctime = File.ctime subdir
next if prev_ctime >= ctime
@ctimes[d] = ctime
old_ids = benchmark(:maildir_read_index) { Index.instance.enum_for(:each_source_info, self.id, "#{d}/").to_a }
new_ids = benchmark(:maildir_read_dir) {
Dir.open(subdir).select {
|f| !File.directory? f}.map {
|x| File.join(d,File.basename(x)) }.sort }
added += new_ids - old_ids
deleted += old_ids - new_ids
debug "#{old_ids.size} in index, #{new_ids.size} in filesystem"
end
## find updated mails by checking if an id is in both added and
## deleted arrays, meaning that its flags changed or that it has
## been moved, these ids need to be removed from added and deleted
add_to_delete = del_to_delete = []
map = Hash.new { |hash, key| hash[key] = [] }
deleted.each do |id_del|
map[maildir_data(id_del)[0]].push id_del
end
added.each do |id_add|
map[maildir_data(id_add)[0]].each do |id_del|
updated.push [ id_del, id_add ]
add_to_delete.push id_add
del_to_delete.push id_del
end
end
added -= add_to_delete
deleted -= del_to_delete
debug "#{added.size} added, #{deleted.size} deleted, #{updated.size} updated"
total_size = added.size+deleted.size+updated.size
added.each_with_index do |id,i|
yield :add,
:info => id,
:labels => @labels + maildir_labels(id) + [:inbox],
:progress => i.to_f/total_size
end
deleted.each_with_index do |id,i|
yield :delete,
:info => id,
:progress => (i.to_f+added.size)/total_size
end
updated.each_with_index do |id,i|
yield :update,
:old_info => id[0],
:new_info => id[1],
:labels => @labels + maildir_labels(id[1]),
:progress => (i.to_f+added.size+deleted.size)/total_size
end
nil
end | XXX use less memory | train | https://github.com/sup-heliotrope/sup/blob/36f95462e3014c354c577d63a78ba030c4b84474/lib/sup/maildir.rb#L118-L181 | class Maildir < Source
include SerializeLabelsNicely
MYHOSTNAME = Socket.gethostname
## remind me never to use inheritance again.
yaml_properties :uri, :usual, :archived, :sync_back, :id, :labels
def initialize uri, usual=true, archived=false, sync_back=true, id=nil, labels=[]
super uri, usual, archived, id
@expanded_uri = Source.expand_filesystem_uri(uri)
parts = @expanded_uri.match /^([a-zA-Z0-9]*:(\/\/)?)(.*)/
if parts
prefix = parts[1]
@path = parts[3]
uri = URI(prefix + URI.encode(@path, URI_ENCODE_CHARS))
else
uri = URI(URI.encode @expanded_uri, URI_ENCODE_CHARS)
@path = uri.path
end
raise ArgumentError, "not a maildir URI" unless uri.scheme == "maildir"
raise ArgumentError, "maildir URI cannot have a host: #{uri.host}" if uri.host
raise ArgumentError, "maildir URI must have a path component" unless uri.path
@sync_back = sync_back
# sync by default if not specified
@sync_back = true if @sync_back.nil?
@dir = URI.decode uri.path
@labels = Set.new(labels || [])
@mutex = Mutex.new
@ctimes = { 'cur' => Time.at(0), 'new' => Time.at(0) }
end
def file_path; @dir end
def self.suggest_labels_for path; [] end
def is_source_for? uri; super || (uri == @expanded_uri); end
def supported_labels?
[:draft, :starred, :forwarded, :replied, :unread, :deleted]
end
def sync_back_enabled?
@sync_back
end
def store_message date, from_email, &block
stored = false
new_fn = new_maildir_basefn + ':2,S'
Dir.chdir(@dir) do |d|
tmp_path = File.join(@dir, 'tmp', new_fn)
new_path = File.join(@dir, 'new', new_fn)
begin
sleep 2 if File.stat(tmp_path)
File.stat(tmp_path)
rescue Errno::ENOENT #this is what we want.
begin
File.open(tmp_path, 'wb') do |f|
yield f #provide a writable interface for the caller
f.fsync
end
File.safe_link tmp_path, new_path
stored = true
ensure
File.unlink tmp_path if File.exist? tmp_path
end
end #rescue Errno...
end #Dir.chdir
stored
end
def each_raw_message_line id
with_file_for(id) do |f|
until f.eof?
yield f.gets
end
end
end
def load_header id
with_file_for(id) { |f| parse_raw_email_header f }
end
def load_message id
with_file_for(id) { |f| RMail::Parser.read f }
end
def sync_back id, labels
synchronize do
debug "syncing back maildir message #{id} with flags #{labels.to_a}"
flags = maildir_reconcile_flags id, labels
maildir_mark_file id, flags
end
end
def raw_header id
ret = ""
with_file_for(id) do |f|
until f.eof? || (l = f.gets) =~ /^$/
ret += l
end
end
ret
end
def raw_message id
with_file_for(id) { |f| f.read }
end
## XXX use less memory
def labels? id
maildir_labels id
end
def maildir_labels id
(seen?(id) ? [] : [:unread]) +
(trashed?(id) ? [:deleted] : []) +
(flagged?(id) ? [:starred] : []) +
(passed?(id) ? [:forwarded] : []) +
(replied?(id) ? [:replied] : []) +
(draft?(id) ? [:draft] : [])
end
def draft? id; maildir_data(id)[2].include? "D"; end
def flagged? id; maildir_data(id)[2].include? "F"; end
def passed? id; maildir_data(id)[2].include? "P"; end
def replied? id; maildir_data(id)[2].include? "R"; end
def seen? id; maildir_data(id)[2].include? "S"; end
def trashed? id; maildir_data(id)[2].include? "T"; end
def valid? id
File.exist? File.join(@dir, id)
end
private
def new_maildir_basefn
Kernel::srand()
"#{Time.now.to_i.to_s}.#{$$}#{Kernel.rand(1000000)}.#{MYHOSTNAME}"
end
def with_file_for id
fn = File.join(@dir, id)
begin
File.open(fn, 'rb') { |f| yield f }
rescue SystemCallError, IOError => e
raise FatalSourceError, "Problem reading file for id #{id.inspect}: #{fn.inspect}: #{e.message}."
end
end
def maildir_data id
id = File.basename id
# Flags we recognize are DFPRST
id =~ %r{^([^:]+):([12]),([A-Za-z]*)$}
[($1 || id), ($2 || "2"), ($3 || "")]
end
def maildir_reconcile_flags id, labels
new_flags = Set.new( maildir_data(id)[2].each_char )
# Set flags based on labels for the six flags we recognize
if labels.member? :draft then new_flags.add?( "D" ) else new_flags.delete?( "D" ) end
if labels.member? :starred then new_flags.add?( "F" ) else new_flags.delete?( "F" ) end
if labels.member? :forwarded then new_flags.add?( "P" ) else new_flags.delete?( "P" ) end
if labels.member? :replied then new_flags.add?( "R" ) else new_flags.delete?( "R" ) end
if not labels.member? :unread then new_flags.add?( "S" ) else new_flags.delete?( "S" ) end
if labels.member? :deleted or labels.member? :killed then new_flags.add?( "T" ) else new_flags.delete?( "T" ) end
## Flags must be stored in ASCII order according to Maildir
## documentation
new_flags.to_a.sort.join
end
def maildir_mark_file orig_path, flags
@mutex.synchronize do
new_base = (flags.include?("S")) ? "cur" : "new"
md_base, md_ver, md_flags = maildir_data orig_path
return if md_flags == flags
new_loc = File.join new_base, "#{md_base}:#{md_ver},#{flags}"
orig_path = File.join @dir, orig_path
new_path = File.join @dir, new_loc
tmp_path = File.join @dir, "tmp", "#{md_base}:#{md_ver},#{flags}"
File.safe_link orig_path, tmp_path
File.unlink orig_path
File.safe_link tmp_path, new_path
File.unlink tmp_path
new_loc
end
end
end
|
state-machines/state_machines | lib/state_machines/event_collection.rb | StateMachines.EventCollection.attribute_transition_for | ruby | def attribute_transition_for(object, invalidate = false)
return unless machine.action
# TODO, simplify
machine.read(object, :event_transition) || if event_name = machine.read(object, :event)
if event = self[event_name.to_sym, :name]
event.transition_for(object) || begin
# No valid transition: invalidate
machine.invalidate(object, :event, :invalid_event, [[:state, machine.states.match!(object).human_name(object.class)]]) if invalidate
false
end
else
# Event is unknown: invalidate
machine.invalidate(object, :event, :invalid) if invalidate
false
end
end
end | Gets the transition that should be performed for the event stored in the
given object's event attribute. This also takes an additional parameter
for automatically invalidating the object if the event or transition are
invalid. By default, this is turned off.
*Note* that if a transition has already been generated for the event, then
that transition will be used.
== Examples
class Vehicle < ActiveRecord::Base
state_machine :initial => :parked do
event :ignite do
transition :parked => :idling
end
end
end
vehicle = Vehicle.new # => #<Vehicle id: nil, state: "parked">
events = Vehicle.state_machine.events
vehicle.state_event = nil
events.attribute_transition_for(vehicle) # => nil # Event isn't defined
vehicle.state_event = 'invalid'
events.attribute_transition_for(vehicle) # => false # Event is invalid
vehicle.state_event = 'ignite'
events.attribute_transition_for(vehicle) # => #<StateMachines::Transition attribute=:state event=:ignite from="parked" from_name=:parked to="idling" to_name=:idling> | train | https://github.com/state-machines/state_machines/blob/10b03af5fc9245bcb09bbd9c40c58ffba9a85422/lib/state_machines/event_collection.rb#L114-L132 | class EventCollection < NodeCollection
def initialize(machine) #:nodoc:
super(machine, :index => [:name, :qualified_name])
end
# Gets the list of events that can be fired on the given object.
#
# Valid requirement options:
# * <tt>:from</tt> - One or more states being transitioned from. If none
# are specified, then this will be the object's current state.
# * <tt>:to</tt> - One or more states being transitioned to. If none are
# specified, then this will match any to state.
# * <tt>:on</tt> - One or more events that fire the transition. If none
# are specified, then this will match any event.
# * <tt>:guard</tt> - Whether to guard transitions with the if/unless
# conditionals defined for each one. Default is true.
#
# == Examples
#
# class Vehicle
# state_machine :initial => :parked do
# event :park do
# transition :idling => :parked
# end
#
# event :ignite do
# transition :parked => :idling
# end
# end
# end
#
# events = Vehicle.state_machine(:state).events
#
# vehicle = Vehicle.new # => #<Vehicle:0xb7c464b0 @state="parked">
# events.valid_for(vehicle) # => [#<StateMachines::Event name=:ignite transitions=[:parked => :idling]>]
#
# vehicle.state = 'idling'
# events.valid_for(vehicle) # => [#<StateMachines::Event name=:park transitions=[:idling => :parked]>]
def valid_for(object, requirements = {})
match(requirements).select { |event| event.can_fire?(object, requirements) }
end
# Gets the list of transitions that can be run on the given object.
#
# Valid requirement options:
# * <tt>:from</tt> - One or more states being transitioned from. If none
# are specified, then this will be the object's current state.
# * <tt>:to</tt> - One or more states being transitioned to. If none are
# specified, then this will match any to state.
# * <tt>:on</tt> - One or more events that fire the transition. If none
# are specified, then this will match any event.
# * <tt>:guard</tt> - Whether to guard transitions with the if/unless
# conditionals defined for each one. Default is true.
#
# == Examples
#
# class Vehicle
# state_machine :initial => :parked do
# event :park do
# transition :idling => :parked
# end
#
# event :ignite do
# transition :parked => :idling
# end
# end
# end
#
# events = Vehicle.state_machine.events
#
# vehicle = Vehicle.new # => #<Vehicle:0xb7c464b0 @state="parked">
# events.transitions_for(vehicle) # => [#<StateMachines::Transition attribute=:state event=:ignite from="parked" from_name=:parked to="idling" to_name=:idling>]
#
# vehicle.state = 'idling'
# events.transitions_for(vehicle) # => [#<StateMachines::Transition attribute=:state event=:park from="idling" from_name=:idling to="parked" to_name=:parked>]
#
# # Search for explicit transitions regardless of the current state
# events.transitions_for(vehicle, :from => :parked) # => [#<StateMachines::Transition attribute=:state event=:ignite from="parked" from_name=:parked to="idling" to_name=:idling>]
def transitions_for(object, requirements = {})
match(requirements).map { |event| event.transition_for(object, requirements) }.compact
end
# Gets the transition that should be performed for the event stored in the
# given object's event attribute. This also takes an additional parameter
# for automatically invalidating the object if the event or transition are
# invalid. By default, this is turned off.
#
# *Note* that if a transition has already been generated for the event, then
# that transition will be used.
#
# == Examples
#
# class Vehicle < ActiveRecord::Base
# state_machine :initial => :parked do
# event :ignite do
# transition :parked => :idling
# end
# end
# end
#
# vehicle = Vehicle.new # => #<Vehicle id: nil, state: "parked">
# events = Vehicle.state_machine.events
#
# vehicle.state_event = nil
# events.attribute_transition_for(vehicle) # => nil # Event isn't defined
#
# vehicle.state_event = 'invalid'
# events.attribute_transition_for(vehicle) # => false # Event is invalid
#
# vehicle.state_event = 'ignite'
# events.attribute_transition_for(vehicle) # => #<StateMachines::Transition attribute=:state event=:ignite from="parked" from_name=:parked to="idling" to_name=:idling>
private
def match(requirements) #:nodoc:
requirements && requirements[:on] ? [fetch(requirements.delete(:on))] : self
end
end
|
projectcypress/health-data-standards | lib/hqmf-parser/2.0/document_helpers/doc_utilities.rb | HQMF2.DocumentUtilities.complex_coverage | ruby | def complex_coverage(data_criteria, check_criteria)
same_value = data_criteria.value.nil? ||
data_criteria.value.try(:to_model).try(:to_json) == check_criteria.value.try(:to_model).try(:to_json)
same_field_values = same_field_values_check(data_criteria, check_criteria)
same_negation_values = data_criteria.negation_code_list_id.nil? ||
data_criteria.negation_code_list_id == check_criteria.negation_code_list_id
same_value && same_negation_values && same_field_values
end | Check elements that do not already exist; else, if they do, check if those elements are the same
in a different, potentially matching, data criteria | train | https://github.com/projectcypress/health-data-standards/blob/252d4f0927c513eacde6b9ea41b76faa1423c34b/lib/hqmf-parser/2.0/document_helpers/doc_utilities.rb#L107-L117 | module DocumentUtilities
# Create grouper data criteria for encapsulating variable data criteria
# and update document data criteria list and references map
def handle_variable(data_criteria, collapsed_source_data_criteria)
if data_criteria.is_derived_specific_occurrence_variable
data_criteria.handle_derived_specific_occurrence_variable
extract_source_data_criteria(data_criteria)
return
end
tmp_id = data_criteria.id
grouper_data_criteria = data_criteria.extract_variable_grouper
return unless grouper_data_criteria
@data_criteria_references[data_criteria.id] = data_criteria
@data_criteria_references[grouper_data_criteria.id] = grouper_data_criteria
# create a source data criteria for the grouping data critera we just created
sdc = SourceDataCriteriaHelper.strip_non_sc_elements(grouper_data_criteria)
@source_data_criteria << sdc
# check if the original source has been collapsed when generating the SDC list (we need to reference the collapsed version in the sdc list)
if collapsed_source_data_criteria[tmp_id]
data_criteria.instance_variable_set(:@source_data_criteria, collapsed_source_data_criteria[tmp_id])
else
# check if we need to add _source suffix (most source data criteria are segmented with '_source' suffixes)
data_criteria_sdc = find(@source_data_criteria, :id, "#{tmp_id}_source")
if data_criteria_sdc
data_criteria.instance_variable_set(:@source_data_criteria, data_criteria_sdc.id)
data_criteria_sdc.instance_variable_set(:@variable, false)
# if it's not a derived data criteria then we may need to strip off temporal references, fields, etc as a new source data criteria
elsif !['derived', 'satisfies_any', 'satisfies_all'].include?(data_criteria.definition)
extract_source_data_criteria(data_criteria)
end
end
@data_criteria << grouper_data_criteria
end
def extract_source_data_criteria (data_criteria)
# check if we have temporal references other non-SDC elements on this data criteria.
# if we do, we need to create a new SDC to reference
if !SourceDataCriteriaHelper.already_stripped?(data_criteria)
candidate_sdc = SourceDataCriteriaHelper.strip_non_sc_elements(data_criteria.clone)
candidate_sdc.instance_variable_set(:@id, "#{candidate_sdc.id}_source")
candidate_sdc.instance_variable_set(:@source_data_criteria, candidate_sdc.id)
existing_candidate = SourceDataCriteriaHelper.find_existing_source_data_criteria(@source_data_criteria, candidate_sdc)
if existing_candidate
candidate_sdc = existing_candidate
else
@source_data_criteria << candidate_sdc
# Specific occurrence variables need a copy of the source in the data criteria to display variable results
@data_criteria << candidate_sdc if data_criteria.is_derived_specific_occurrence_variable
end
data_criteria.instance_variable_set(:@source_data_criteria, candidate_sdc.id)
end
end
# Checks if one data criteria is covered by another (has all the appropriate elements of)
def criteria_covered_by_criteria?(dc)
@reference_ids.uniq
base_criteria_defs = %w(patient_characteristic_ethnicity patient_characteristic_gender patient_characteristic_payer patient_characteristic_race)
to_reject = true
# don't reject if anything refers directly to this criteria
to_reject &&= @reference_ids.index(dc.id).nil?
# don't reject if it is a "base" criteria (no references but must exist)
to_reject &&= !base_criteria_defs.include?(dc.definition)
# keep referral occurrence
to_reject &&= dc.specific_occurrence_const.nil? ||
dc.code_list_id != '2.16.840.1.113883.3.464.1003.101.12.1046'
to_reject && !@data_criteria.detect do |dc2|
similar_criteria = true
similar_criteria &&= dc != dc2 # Don't check against itself
similar_criteria &&= dc.code_list_id == dc2.code_list_id # Ensure code list ids are the same
similar_criteria && detect_criteria_covered_by_criteria(dc, dc2)
end.nil? # don't reject unless there is a similar element
end
# Check if one data criteria contains the others information by checking that one has everything the other has
# (or more)
def detect_criteria_covered_by_criteria(data_criteria, check_criteria)
base_checks = true
# Check whether basic features are the same
base_checks &&= data_criteria.definition == check_criteria.definition # same definition
base_checks &&= data_criteria.status == check_criteria.status # same status
# same children
base_checks &&= data_criteria.children_criteria.sort.join(',') == check_criteria.children_criteria.sort.join(',')
# Ensure it doesn't contain basic elements that should not be removed
base_checks &&= !data_criteria.variable # Ensure it's not a variable
base_checks &&= data_criteria.derivation_operator.nil? # Ensure it doesn't have a derivation operator
base_checks &&= data_criteria.subset_operators.empty? # Ensure it doesn't have a subset operator
# Ensure it doesn't have Temporal References
base_checks &&= data_criteria.temporal_references.nil? || data_criteria.temporal_references.empty?
base_checks && complex_coverage(data_criteria, check_criteria)
end
# Check elements that do not already exist; else, if they do, check if those elements are the same
# in a different, potentially matching, data criteria
def same_field_values_check(data_criteria, check_criteria)
empty = data_criteria.field_values.nil? || data_criteria.field_values.empty?
# Ignore STATUS (and ORDINAL for CMS172v5)
# The meaning of status has changed over time. Laboratory test and procedure now use status differently.
# This change is causing superficial discrepencies between the simplexml and hqmf regarding STATUS.
dc_filtered = data_criteria.field_values.except('STATUS').except('ORDINAL')
cc_filtered = check_criteria.field_values.except('STATUS').except('ORDINAL')
left = dc_filtered.nil? || dc_filtered.empty? ? nil : dc_filtered.try(:to_json)
right = cc_filtered.nil? || cc_filtered.empty? ? nil : cc_filtered.try(:to_json)
return empty || left == right
end
end
|
zeevex/zeevex_threadsafe | lib/zeevex_threadsafe/aliasing.rb | ZeevexThreadsafe.Aliasing.alias_method_chain | ruby | def alias_method_chain(target, feature)
# Strip out punctuation on predicates or bang methods since
# e.g. target?_without_feature is not a valid method name.
aliased_target, punctuation = target.to_s.sub(/([?!=])$/, ''), $1
yield(aliased_target, punctuation) if block_given?
with_method, without_method = "#{aliased_target}_with_#{feature}#{punctuation}", "#{aliased_target}_without_#{feature}#{punctuation}"
alias_method without_method, target
alias_method target, with_method
case
when public_method_defined?(without_method)
public target
when protected_method_defined?(without_method)
protected target
when private_method_defined?(without_method)
private target
end
end | Encapsulates the common pattern of:
alias_method :foo_without_feature, :foo
alias_method :foo, :foo_with_feature
With this, you simply do:
alias_method_chain :foo, :feature
And both aliases are set up for you.
Query and bang methods (foo?, foo!) keep the same punctuation:
alias_method_chain :foo?, :feature
is equivalent to
alias_method :foo_without_feature?, :foo?
alias_method :foo?, :foo_with_feature?
so you can safely chain foo, foo?, and foo! with the same feature. | train | https://github.com/zeevex/zeevex_threadsafe/blob/a486da9094204c8fb9007bf7a4668a17f97a1f22/lib/zeevex_threadsafe/aliasing.rb#L28-L47 | module Aliasing
# Encapsulates the common pattern of:
#
# alias_method :foo_without_feature, :foo
# alias_method :foo, :foo_with_feature
#
# With this, you simply do:
#
# alias_method_chain :foo, :feature
#
# And both aliases are set up for you.
#
# Query and bang methods (foo?, foo!) keep the same punctuation:
#
# alias_method_chain :foo?, :feature
#
# is equivalent to
#
# alias_method :foo_without_feature?, :foo?
# alias_method :foo?, :foo_with_feature?
#
# so you can safely chain foo, foo?, and foo! with the same feature.
# Allows you to make aliases for attributes, which includes
# getter, setter, and query methods.
#
# Example:
#
# class Content < ActiveRecord::Base
# # has a title attribute
# end
#
# class Email < Content
# alias_attribute :subject, :title
# end
#
# e = Email.find(1)
# e.title # => "Superstars"
# e.subject # => "Superstars"
# e.subject? # => true
# e.subject = "Megastars"
# e.title # => "Megastars"
def alias_attribute(new_name, old_name)
module_eval <<-STR, __FILE__, __LINE__ + 1
def #{new_name}; self.#{old_name}; end # def subject; self.title; end
def #{new_name}?; self.#{old_name}?; end # def subject?; self.title?; end
def #{new_name}=(v); self.#{old_name} = v; end # def subject=(v); self.title = v; end
STR
end
end
|
mongodb/mongo-ruby-driver | lib/mongo/session.rb | Mongo.Session.add_txn_opts! | ruby | def add_txn_opts!(command, read)
command.tap do |c|
# The read preference should be added for all read operations.
if read && txn_read_pref = txn_read_preference
Mongo::Lint.validate_underscore_read_preference(txn_read_pref)
txn_read_pref = txn_read_pref.dup
txn_read_pref[:mode] = txn_read_pref[:mode].to_s.gsub(/(_\w)/) { |match| match[1].upcase }
Mongo::Lint.validate_camel_case_read_preference(txn_read_pref)
c['$readPreference'] = txn_read_pref
end
# The read concern should be added to any command that starts a transaction.
if starting_transaction?
# https://jira.mongodb.org/browse/SPEC-1161: transaction's
# read concern overrides collection/database/client read concerns,
# even if transaction's read concern is not set.
# Read concern here is the one sent to the server and may
# include afterClusterTime.
if rc = c[:readConcern]
rc = rc.dup
rc.delete(:level)
end
if txn_read_concern
if rc
rc.update(txn_read_concern)
else
rc = txn_read_concern.dup
end
end
if rc.nil? || rc.empty?
c.delete(:readConcern)
else
c[:readConcern ] = rc
end
end
# We need to send the read concern level as a string rather than a symbol.
if c[:readConcern] && c[:readConcern][:level]
c[:readConcern][:level] = c[:readConcern][:level].to_s
end
# The write concern should be added to any abortTransaction or commitTransaction command.
if (c[:abortTransaction] || c[:commitTransaction])
if @already_committed
wc = BSON::Document.new(c[:writeConcern] || txn_write_concern || {})
wc.merge!(w: :majority)
wc[:wtimeout] ||= 10000
c[:writeConcern] = wc
elsif txn_write_concern
c[:writeConcern] ||= txn_write_concern
end
end
# A non-numeric write concern w value needs to be sent as a string rather than a symbol.
if c[:writeConcern] && c[:writeConcern][:w] && c[:writeConcern][:w].is_a?(Symbol)
c[:writeConcern][:w] = c[:writeConcern][:w].to_s
end
end
end | Add the transactions options if applicable.
@example
session.add_txn_opts!(cmd)
@return [ Hash, BSON::Document ] The command document.
@since 2.6.0
@api private | train | https://github.com/mongodb/mongo-ruby-driver/blob/dca26d0870cb3386fad9ccc1d17228097c1fe1c8/lib/mongo/session.rb#L263-L321 | class Session
extend Forwardable
include Retryable
include Loggable
# Get the options for this session.
#
# @since 2.5.0
attr_reader :options
# Get the client through which this session was created.
#
# @since 2.5.1
attr_reader :client
# The cluster time for this session.
#
# @since 2.5.0
attr_reader :cluster_time
# The latest seen operation time for this session.
#
# @since 2.5.0
attr_reader :operation_time
# The options for the transaction currently being executed on the session.
#
# @since 2.6.0
attr_reader :txn_options
# Error message indicating that the session was retrieved from a client with a different cluster than that of the
# client through which it is currently being used.
#
# @since 2.5.0
MISMATCHED_CLUSTER_ERROR_MSG = 'The configuration of the client used to create this session does not match that ' +
'of the client owning this operation. Please only use this session for operations through its parent ' +
'client.'.freeze
# Error message describing that the session cannot be used because it has already been ended.
#
# @since 2.5.0
SESSION_ENDED_ERROR_MSG = 'This session has ended and cannot be used. Please create a new one.'.freeze
# Error message describing that sessions are not supported by the server version.
#
# @since 2.5.0
SESSIONS_NOT_SUPPORTED = 'Sessions are not supported by the connected servers.'.freeze
# The state of a session in which the last operation was not related to
# any transaction or no operations have yet occurred.
#
# @since 2.6.0
NO_TRANSACTION_STATE = :no_transaction
# The state of a session in which a user has initiated a transaction but
# no operations within the transactions have occurred yet.
#
# @since 2.6.0
STARTING_TRANSACTION_STATE = :starting_transaction
# The state of a session in which a transaction has been started and at
# least one operation has occurred, but the transaction has not yet been
# committed or aborted.
#
# @since 2.6.0
TRANSACTION_IN_PROGRESS_STATE = :transaction_in_progress
# The state of a session in which the last operation executed was a transaction commit.
#
# @since 2.6.0
TRANSACTION_COMMITTED_STATE = :transaction_committed
# The state of a session in which the last operation executed was a transaction abort.
#
# @since 2.6.0
TRANSACTION_ABORTED_STATE = :transaction_aborted
UNLABELED_WRITE_CONCERN_CODES = [
79, # UnknownReplWriteConcern
100, # CannotSatisfyWriteConcern,
].freeze
# Initialize a Session.
#
# @note Applications should use Client#start_session to begin a session.
#
# @example
# Session.new(server_session, client, options)
#
# @param [ ServerSession ] server_session The server session this session is associated with.
# @param [ Client ] client The client through which this session is created.
# @param [ Hash ] options The options for this session.
#
# @option options [ true|false ] :causal_consistency Whether to enable
# causal consistency for this session.
# @option options [ Hash ] :default_transaction_options Options to pass
# to start_transaction by default, can contain any of the options that
# start_transaction accepts.
# @option options [ true|false ] :implicit For internal driver use only -
# specifies whether the session is implicit.
# @option options [ Hash ] :read_preference The read preference options hash,
# with the following optional keys:
# - *:mode* -- the read preference as a string or symbol; valid values are
# *:primary*, *:primary_preferred*, *:secondary*, *:secondary_preferred*
# and *:nearest*.
#
# @since 2.5.0
# @api private
def initialize(server_session, client, options = {})
@server_session = server_session
options = options.dup
# Because the read preference will need to be inserted into a command as a string, we convert
# it from a symbol immediately upon receiving it.
if options[:read_preference] && options[:read_preference][:mode]
options[:read_preference][:mode] = options[:read_preference][:mode].to_s
end
@client = client.use(:admin)
@options = options.freeze
@cluster_time = nil
@state = NO_TRANSACTION_STATE
end
# Get a formatted string for use in inspection.
#
# @example Inspect the session object.
# session.inspect
#
# @return [ String ] The session inspection.
#
# @since 2.5.0
def inspect
"#<Mongo::Session:0x#{object_id} session_id=#{session_id} options=#{@options}>"
end
# End this session.
#
# @example
# session.end_session
#
# @return [ nil ] Always nil.
#
# @since 2.5.0
def end_session
if !ended? && @client
if within_states?(TRANSACTION_IN_PROGRESS_STATE)
begin
abort_transaction
rescue Mongo::Error
end
end
@client.cluster.session_pool.checkin(@server_session)
end
ensure
@server_session = nil
end
# Whether this session has ended.
#
# @example
# session.ended?
#
# @return [ true, false ] Whether the session has ended.
#
# @since 2.5.0
def ended?
@server_session.nil?
end
# Add the autocommit field to a command document if applicable.
#
# @example
# session.add_autocommit!(cmd)
#
# @return [ Hash, BSON::Document ] The command document.
#
# @since 2.6.0
# @api private
def add_autocommit!(command)
command.tap do |c|
c[:autocommit] = false if in_transaction?
end
end
# Add this session's id to a command document.
#
# @example
# session.add_id!(cmd)
#
# @return [ Hash, BSON::Document ] The command document.
#
# @since 2.5.0
# @api private
def add_id!(command)
command.merge!(lsid: session_id)
end
# Add the startTransaction field to a command document if applicable.
#
# @example
# session.add_start_transaction!(cmd)
#
# @return [ Hash, BSON::Document ] The command document.
#
# @since 2.6.0
# @api private
def add_start_transaction!(command)
command.tap do |c|
if starting_transaction?
c[:startTransaction] = true
end
end
end
# Add the transaction number to a command document if applicable.
#
# @example
# session.add_txn_num!(cmd)
#
# @return [ Hash, BSON::Document ] The command document.
#
# @since 2.6.0
# @api private
def add_txn_num!(command)
command.tap do |c|
c[:txnNumber] = BSON::Int64.new(@server_session.txn_num) if in_transaction?
end
end
# Add the transactions options if applicable.
#
# @example
# session.add_txn_opts!(cmd)
#
# @return [ Hash, BSON::Document ] The command document.
#
# @since 2.6.0
# @api private
# Remove the read concern and/or write concern from the command if not applicable.
#
# @example
# session.suppress_read_write_concern!(cmd)
#
# @return [ Hash, BSON::Document ] The command document.
#
# @since 2.6.0
# @api private
def suppress_read_write_concern!(command)
command.tap do |c|
next unless in_transaction?
c.delete(:readConcern) unless starting_transaction?
c.delete(:writeConcern) unless c[:commitTransaction] || c[:abortTransaction]
end
end
# Ensure that the read preference of a command primary.
#
# @example
# session.validate_read_preference!(command)
#
# @raise [ Mongo::Error::InvalidTransactionOperation ] If the read preference of the command is
# not primary.
#
# @since 2.6.0
# @api private
def validate_read_preference!(command)
return unless in_transaction? && non_primary_read_preference_mode?(command)
raise Mongo::Error::InvalidTransactionOperation.new(
Mongo::Error::InvalidTransactionOperation::INVALID_READ_PREFERENCE)
end
# Update the state of the session due to a (non-commit and non-abort) operation being run.
#
# @since 2.6.0
# @api private
def update_state!
case @state
when STARTING_TRANSACTION_STATE
@state = TRANSACTION_IN_PROGRESS_STATE
when TRANSACTION_COMMITTED_STATE, TRANSACTION_ABORTED_STATE
@state = NO_TRANSACTION_STATE
end
end
# Validate the session.
#
# @example
# session.validate!(cluster)
#
# @param [ Cluster ] cluster The cluster the session is attempted to be used with.
#
# @return [ nil ] nil if the session is valid.
#
# @raise [ Mongo::Error::InvalidSession ] Raise error if the session is not valid.
#
# @since 2.5.0
# @api private
def validate!(cluster)
check_matching_cluster!(cluster)
check_if_ended!
self
end
# Process a response from the server that used this session.
#
# @example Process a response from the server.
# session.process(result)
#
# @param [ Operation::Result ] result The result from the operation.
#
# @return [ Operation::Result ] The result.
#
# @since 2.5.0
# @api private
def process(result)
unless implicit?
set_operation_time(result)
set_cluster_time(result)
end
@server_session.set_last_use!
result
end
# Advance the cached cluster time document for this session.
#
# @example Advance the cluster time.
# session.advance_cluster_time(doc)
#
# @param [ BSON::Document, Hash ] new_cluster_time The new cluster time.
#
# @return [ BSON::Document, Hash ] The new cluster time.
#
# @since 2.5.0
def advance_cluster_time(new_cluster_time)
if @cluster_time
@cluster_time = [ @cluster_time, new_cluster_time ].max_by { |doc| doc[Cluster::CLUSTER_TIME] }
else
@cluster_time = new_cluster_time
end
end
# Advance the cached operation time for this session.
#
# @example Advance the operation time.
# session.advance_operation_time(timestamp)
#
# @param [ BSON::Timestamp ] new_operation_time The new operation time.
#
# @return [ BSON::Timestamp ] The max operation time, considering the current and new times.
#
# @since 2.5.0
def advance_operation_time(new_operation_time)
if @operation_time
@operation_time = [ @operation_time, new_operation_time ].max
else
@operation_time = new_operation_time
end
end
# Whether reads executed with this session can be retried according to
# the modern retryable reads specification.
#
# If this method returns true, the modern retryable reads have been
# requested by the application. If the server selected for a read operation
# supports modern retryable reads, they will be used for that particular
# operation. If the server selected for a read operation does not support
# modern retryable reads, the read will not be retried.
#
# If this method returns false, legacy retryable reads have been requested
# by the application. Legacy retryable read logic will be used regardless
# of server version of the server(s) that the client is connected to.
# The number of read retries is given by :max_read_retries client option,
# which is 1 by default and can be set to 0 to disable legacy read retries.
#
# @api private
def retry_reads?
client.options[:retry_reads] != false
end
# Will writes executed with this session be retried.
#
# @example Will writes be retried.
# session.retry_writes?
#
# @return [ true, false ] If writes will be retried.
#
# @note Retryable writes are only available on server versions at least 3.6
# and with sharded clusters or replica sets.
#
# @since 2.5.0
def retry_writes?
!!client.options[:retry_writes] && (cluster.replica_set? || cluster.sharded?)
end
# Get the server session id of this session, if the session was not ended.
# If the session was ended, returns nil.
#
# @example Get the session id.
# session.session_id
#
# @return [ BSON::Document ] The server session id.
#
# @since 2.5.0
def session_id
if ended?
raise Error::SessionEnded
end
@server_session.session_id
end
# Increment and return the next transaction number.
#
# @example Get the next transaction number.
# session.next_txn_num
#
# @return [ Integer ] The next transaction number.
#
# @since 2.5.0
# @api private
def next_txn_num
if ended?
raise Error::SessionEnded
end
@server_session.next_txn_num
end
# Get the current transaction number.
#
# @example Get the current transaction number.
# session.txn_num
#
# @return [ Integer ] The current transaction number.
#
# @since 2.6.0
def txn_num
if ended?
raise Error::SessionEnded
end
@server_session.txn_num
end
# Is this session an implicit one (not user-created).
#
# @example Is the session implicit?
# session.implicit?
#
# @return [ true, false ] Whether this session is implicit.
#
# @since 2.5.1
def implicit?
@implicit ||= !!(@options.key?(:implicit) && @options[:implicit] == true)
end
# Is this session an explicit one (i.e. user-created).
#
# @example Is the session explicit?
# session.explicit?
#
# @return [ true, false ] Whether this session is explicit.
#
# @since 2.5.2
def explicit?
@explicit ||= !implicit?
end
# Places subsequent operations in this session into a new transaction.
#
# Note that the transaction will not be started on the server until an
# operation is performed after start_transaction is called.
#
# @example Start a new transaction
# session.start_transaction(options)
#
# @param [ Hash ] options The options for the transaction being started.
#
# @option options [ Hash ] read_concern The read concern options hash,
# with the following optional keys:
# - *:level* -- the read preference level as a symbol; valid values
# are *:local*, *:majority*, and *:snapshot*
# @option options [ Hash ] :write_concern The write concern options. Can be :w =>
# Integer|String, :fsync => Boolean, :j => Boolean.
# @option options [ Hash ] :read The read preference options. The hash may have the following
# items:
# - *:mode* -- read preference specified as a symbol; the only valid value is
# *:primary*.
#
# @raise [ Error::InvalidTransactionOperation ] If a transaction is already in
# progress or if the write concern is unacknowledged.
#
# @since 2.6.0
def start_transaction(options = nil)
if options
Lint.validate_read_concern_option(options[:read_concern])
end
check_if_ended!
if within_states?(STARTING_TRANSACTION_STATE, TRANSACTION_IN_PROGRESS_STATE)
raise Mongo::Error::InvalidTransactionOperation.new(
Mongo::Error::InvalidTransactionOperation::TRANSACTION_ALREADY_IN_PROGRESS)
end
next_txn_num
@txn_options = options || @options[:default_transaction_options] || {}
if txn_write_concern && WriteConcern.send(:unacknowledged?, txn_write_concern)
raise Mongo::Error::InvalidTransactionOperation.new(
Mongo::Error::InvalidTransactionOperation::UNACKNOWLEDGED_WRITE_CONCERN)
end
@state = STARTING_TRANSACTION_STATE
@already_committed = false
end
# Commit the currently active transaction on the session.
#
# @example Commits the transaction.
# session.commit_transaction
#
# @option options :write_concern [ nil | WriteConcern::Base ] The write
# concern to use for this operation.
#
# @raise [ Error::InvalidTransactionOperation ] If there is no active transaction.
#
# @since 2.6.0
def commit_transaction(options=nil)
check_if_ended!
check_if_no_transaction!
if within_states?(TRANSACTION_ABORTED_STATE)
raise Mongo::Error::InvalidTransactionOperation.new(
Mongo::Error::InvalidTransactionOperation.cannot_call_after_msg(
:abortTransaction, :commitTransaction))
end
options ||= {}
begin
# If commitTransaction is called twice, we need to run the same commit
# operation again, so we revert the session to the previous state.
if within_states?(TRANSACTION_COMMITTED_STATE)
@state = @last_commit_skipped ? STARTING_TRANSACTION_STATE : TRANSACTION_IN_PROGRESS_STATE
@already_committed = true
end
if starting_transaction?
@last_commit_skipped = true
else
@last_commit_skipped = false
write_concern = options[:write_concern] || txn_options[:write_concern]
if write_concern && !write_concern.is_a?(WriteConcern::Base)
write_concern = WriteConcern.get(write_concern)
end
write_with_retry(self, write_concern, true) do |server, txn_num, is_retry|
if is_retry
if write_concern
wco = write_concern.options.merge(w: :majority)
wco[:wtimeout] ||= 10000
write_concern = WriteConcern.get(wco)
else
write_concern = WriteConcern.get(w: :majority, wtimeout: 10000)
end
end
Operation::Command.new(
selector: { commitTransaction: 1 },
db_name: 'admin',
session: self,
txn_num: txn_num,
write_concern: write_concern,
).execute(server)
end
end
rescue Mongo::Error::NoServerAvailable, Mongo::Error::SocketError => e
e.send(:add_label, Mongo::Error::UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL)
raise e
rescue Mongo::Error::OperationFailure => e
err_doc = e.instance_variable_get(:@result).send(:first_document)
if e.write_retryable? || (err_doc['writeConcernError'] &&
!UNLABELED_WRITE_CONCERN_CODES.include?(err_doc['writeConcernError']['code']))
e.send(:add_label, Mongo::Error::UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL)
end
raise e
ensure
@state = TRANSACTION_COMMITTED_STATE
end
end
# Abort the currently active transaction without making any changes to the database.
#
# @example Abort the transaction.
# session.abort_transaction
#
# @raise [ Error::InvalidTransactionOperation ] If there is no active transaction.
#
# @since 2.6.0
def abort_transaction
check_if_ended!
check_if_no_transaction!
if within_states?(TRANSACTION_COMMITTED_STATE)
raise Mongo::Error::InvalidTransactionOperation.new(
Mongo::Error::InvalidTransactionOperation.cannot_call_after_msg(
:commitTransaction, :abortTransaction))
end
if within_states?(TRANSACTION_ABORTED_STATE)
raise Mongo::Error::InvalidTransactionOperation.new(
Mongo::Error::InvalidTransactionOperation.cannot_call_twice_msg(:abortTransaction))
end
begin
unless starting_transaction?
write_with_retry(self, txn_options[:write_concern], true) do |server, txn_num|
Operation::Command.new(
selector: { abortTransaction: 1 },
db_name: 'admin',
session: self,
txn_num: txn_num
).execute(server)
end
end
@state = TRANSACTION_ABORTED_STATE
rescue Mongo::Error::InvalidTransactionOperation
raise
rescue Mongo::Error
@state = TRANSACTION_ABORTED_STATE
rescue Exception
@state = TRANSACTION_ABORTED_STATE
raise
end
end
# Whether or not the session is currently in a transaction.
#
# @example Is the session in a transaction?
# session.in_transaction?
#
# @return [ true | false ] Whether or not the session in a transaction.
#
# @since 2.6.0
def in_transaction?
within_states?(STARTING_TRANSACTION_STATE, TRANSACTION_IN_PROGRESS_STATE)
end
# Executes the provided block in a transaction, retrying as necessary.
#
# Returns the return value of the block.
#
# Exact number of retries and when they are performed are implementation
# details of the driver; the provided block should be idempotent, and
# should be prepared to be called more than once. The driver may retry
# the commit command within an active transaction or it may repeat the
# transaction and invoke the block again, depending on the error
# encountered if any. Note also that the retries may be executed against
# different servers.
#
# Transactions cannot be nested - InvalidTransactionOperation will be raised
# if this method is called when the session already has an active transaction.
#
# Exceptions raised by the block which are not derived from Mongo::Error
# stop processing, abort the transaction and are propagated out of
# with_transaction. Exceptions derived from Mongo::Error may be
# handled by with_transaction, resulting in retries of the process.
#
# Currently, with_transaction will retry commits and block invocations
# until at least 120 seconds have passed since with_transaction started
# executing. This timeout is not configurable and may change in a future
# driver version.
#
# @note with_transaction contains a loop, therefore the if with_transaction
# itself is placed in a loop, its block should not call next or break to
# control the outer loop because this will instead affect the loop in
# with_transaction. The driver will warn and abort the transaction
# if it detects this situation.
#
# @example Execute a statement in a transaction
# session.with_transaction(write_concern: {w: :majority}) do
# collection.update_one({ id: 3 }, { '$set' => { status: 'Inactive'} },
# session: session)
#
# end
#
# @example Execute a statement in a transaction, limiting total time consumed
# Timeout.timeout(5) do
# session.with_transaction(write_concern: {w: :majority}) do
# collection.update_one({ id: 3 }, { '$set' => { status: 'Inactive'} },
# session: session)
#
# end
# end
#
# @param [ Hash ] options The options for the transaction being started.
# These are the same options that start_transaction accepts.
#
# @raise [ Error::InvalidTransactionOperation ] If a transaction is already in
# progress or if the write concern is unacknowledged.
#
# @since 2.7.0
def with_transaction(options=nil)
# Non-configurable 120 second timeout for the entire operation
deadline = Time.now + 120
transaction_in_progress = false
loop do
commit_options = {}
if options
commit_options[:write_concern] = options[:write_concern]
end
start_transaction(options)
transaction_in_progress = true
begin
rv = yield self
rescue Exception => e
if within_states?(STARTING_TRANSACTION_STATE, TRANSACTION_IN_PROGRESS_STATE)
abort_transaction
transaction_in_progress = false
end
if Time.now >= deadline
transaction_in_progress = false
raise
end
if e.is_a?(Mongo::Error) && e.label?(Mongo::Error::TRANSIENT_TRANSACTION_ERROR_LABEL)
next
end
raise
else
if within_states?(TRANSACTION_ABORTED_STATE, NO_TRANSACTION_STATE, TRANSACTION_COMMITTED_STATE)
transaction_in_progress = false
return rv
end
begin
commit_transaction(commit_options)
transaction_in_progress = false
return rv
rescue Mongo::Error => e
if e.label?(Mongo::Error::UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL)
# WriteConcernFailed
if e.is_a?(Mongo::Error::OperationFailure) && e.code == 64 && e.wtimeout?
transaction_in_progress = false
raise
end
if Time.now >= deadline
transaction_in_progress = false
raise
end
wc_options = case v = commit_options[:write_concern]
when WriteConcern::Base
v.options
when nil
{}
else
v
end
commit_options[:write_concern] = wc_options.merge(w: :majority)
retry
elsif e.label?(Mongo::Error::TRANSIENT_TRANSACTION_ERROR_LABEL)
if Time.now >= deadline
transaction_in_progress = false
raise
end
next
else
transaction_in_progress = false
raise
end
end
end
end
ensure
if transaction_in_progress
log_warn('with_transaction callback altered with_transaction loop, aborting transaction')
begin
abort_transaction
rescue Error::OperationFailure, Error::InvalidTransactionOperation
end
end
end
# Get the read preference the session will use in the currently
# active transaction.
#
# This is a driver style hash with underscore keys.
#
# @example Get the transaction's read preference
# session.txn_read_preference
#
# @return [ Hash ] The read preference of the transaction.
#
# @since 2.6.0
def txn_read_preference
rp = txn_options && txn_options[:read_preference] ||
@client.read_preference
Mongo::Lint.validate_underscore_read_preference(rp)
rp
end
def cluster
@client.cluster
end
protected
# Get the read concern the session will use when starting a transaction.
#
# This is a driver style hash with underscore keys.
#
# @example Get the session's transaction read concern.
# session.txn_read_concern
#
# @return [ Hash ] The read concern used for starting transactions.
#
# @since 2.9.0
def txn_read_concern
# Read concern is inherited from client but not db or collection.
txn_options && txn_options[:read_concern] || @client.read_concern
end
private
def within_states?(*states)
states.include?(@state)
end
def starting_transaction?
within_states?(STARTING_TRANSACTION_STATE)
end
def check_if_no_transaction!
return unless within_states?(NO_TRANSACTION_STATE)
raise Mongo::Error::InvalidTransactionOperation.new(
Mongo::Error::InvalidTransactionOperation::NO_TRANSACTION_STARTED)
end
def txn_write_concern
(txn_options && txn_options[:write_concern]) ||
(@client.write_concern && @client.write_concern.options)
end
def non_primary_read_preference_mode?(command)
return false unless command['$readPreference']
mode = command['$readPreference']['mode'] || command['$readPreference'][:mode]
mode && mode != 'primary'
end
# Returns causal consistency document if the last operation time is
# known and causal consistency is enabled, otherwise returns nil.
def causal_consistency_doc
if operation_time && causal_consistency?
{:afterClusterTime => operation_time}
else
nil
end
end
def causal_consistency?
@causal_consistency ||= (if @options.key?(:causal_consistency)
!!@options[:causal_consistency]
else
true
end)
end
def set_operation_time(result)
if result && result.operation_time
@operation_time = result.operation_time
end
end
def set_cluster_time(result)
if cluster_time_doc = result.cluster_time
if @cluster_time.nil?
@cluster_time = cluster_time_doc
elsif cluster_time_doc[Cluster::CLUSTER_TIME] > @cluster_time[Cluster::CLUSTER_TIME]
@cluster_time = cluster_time_doc
end
end
end
def check_if_ended!
raise Mongo::Error::InvalidSession.new(SESSION_ENDED_ERROR_MSG) if ended?
end
def check_matching_cluster!(cluster)
if @client.cluster != cluster
raise Mongo::Error::InvalidSession.new(MISMATCHED_CLUSTER_ERROR_MSG)
end
end
end
|
grpc/grpc | src/ruby/lib/grpc/errors.rb | GRPC.BadStatus.to_rpc_status | ruby | def to_rpc_status
status = to_status
return if status.nil?
GoogleRpcStatusUtils.extract_google_rpc_status(status)
rescue Google::Protobuf::ParseError => parse_error
GRPC.logger.warn('parse error: to_rpc_status failed')
GRPC.logger.warn(parse_error)
nil
end | Converts the exception to a deserialized {Google::Rpc::Status} object.
Returns `nil` if the `grpc-status-details-bin` trailer could not be
converted to a {Google::Rpc::Status} due to the server not providing
the necessary trailers.
@return [Google::Rpc::Status, nil] | train | https://github.com/grpc/grpc/blob/f3937f0e55227a4ef3a23f895d3b204a947610f8/src/ruby/lib/grpc/errors.rb#L60-L70 | class BadStatus < StandardError
attr_reader :code, :details, :metadata
include GRPC::Core::StatusCodes
# @param code [Numeric] the status code
# @param details [String] the details of the exception
# @param metadata [Hash] the error's metadata
def initialize(code, details = 'unknown cause', metadata = {})
super("#{code}:#{details}")
@code = code
@details = details
@metadata = metadata
end
# Converts the exception to a {Struct::Status} for use in the networking
# wrapper layer.
#
# @return [Struct::Status] with the same code and details
def to_status
Struct::Status.new(code, details, metadata)
end
# Converts the exception to a deserialized {Google::Rpc::Status} object.
# Returns `nil` if the `grpc-status-details-bin` trailer could not be
# converted to a {Google::Rpc::Status} due to the server not providing
# the necessary trailers.
#
# @return [Google::Rpc::Status, nil]
def self.new_status_exception(code, details = 'unknown cause',
metadata = {})
codes = {}
codes[OK] = Ok
codes[CANCELLED] = Cancelled
codes[UNKNOWN] = Unknown
codes[INVALID_ARGUMENT] = InvalidArgument
codes[DEADLINE_EXCEEDED] = DeadlineExceeded
codes[NOT_FOUND] = NotFound
codes[ALREADY_EXISTS] = AlreadyExists
codes[PERMISSION_DENIED] = PermissionDenied
codes[UNAUTHENTICATED] = Unauthenticated
codes[RESOURCE_EXHAUSTED] = ResourceExhausted
codes[FAILED_PRECONDITION] = FailedPrecondition
codes[ABORTED] = Aborted
codes[OUT_OF_RANGE] = OutOfRange
codes[UNIMPLEMENTED] = Unimplemented
codes[INTERNAL] = Internal
codes[UNAVAILABLE] = Unavailable
codes[DATA_LOSS] = DataLoss
if codes[code].nil?
BadStatus.new(code, details, metadata)
else
codes[code].new(details, metadata)
end
end
end
|
algolia/algoliasearch-client-ruby | lib/algolia/client.rb | Algolia.Client.copy_synonyms! | ruby | def copy_synonyms!(src_index, dst_index, request_options = {})
res = copy_synonyms(src_index, dst_index, request_options)
wait_task(dst_index, res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end | Copy an existing index synonyms and wait until the copy has been processed.
@param src_index the name of index to copy.
@param dst_index the new index name that will contains a copy of srcIndexName synonyms (destination synonyms will be overriten if it already exist).
@param request_options contains extra parameters to send with your query | train | https://github.com/algolia/algoliasearch-client-ruby/blob/5292cd9b1029f879e4e0257a3e89d0dc9ad0df3b/lib/algolia/client.rb#L265-L269 | class Client
attr_reader :ssl, :ssl_version, :hosts, :search_hosts, :application_id, :api_key, :headers, :connect_timeout, :send_timeout, :receive_timeout, :search_timeout, :batch_timeout
DEFAULT_CONNECT_TIMEOUT = 2
DEFAULT_RECEIVE_TIMEOUT = 30
DEFAULT_SEND_TIMEOUT = 30
DEFAULT_BATCH_TIMEOUT = 120
DEFAULT_SEARCH_TIMEOUT = 5
DEFAULT_USER_AGENT = ["Algolia for Ruby (#{::Algolia::VERSION})", "Ruby (#{RUBY_VERSION})"]
def initialize(data = {})
raise ArgumentError.new('No APPLICATION_ID provided, please set :application_id') if data[:application_id].nil?
@ssl = data[:ssl].nil? ? true : data[:ssl]
@ssl_version = data[:ssl_version].nil? ? nil : data[:ssl_version]
@gzip = data[:gzip].nil? ? true : data[:gzip]
@application_id = data[:application_id]
@api_key = data[:api_key]
@hosts = data[:hosts] || (["#{@application_id}.algolia.net"] + 1.upto(3).map { |i| "#{@application_id}-#{i}.algolianet.com" }.shuffle)
@search_hosts = data[:search_hosts] || data[:hosts] || (["#{@application_id}-dsn.algolia.net"] + 1.upto(3).map { |i| "#{@application_id}-#{i}.algolianet.com" }.shuffle)
@connect_timeout = data[:connect_timeout] || DEFAULT_CONNECT_TIMEOUT
@send_timeout = data[:send_timeout] || DEFAULT_SEND_TIMEOUT
@batch_timeout = data[:batch_timeout] || DEFAULT_BATCH_TIMEOUT
@receive_timeout = data[:receive_timeout] || DEFAULT_RECEIVE_TIMEOUT
@search_timeout = data[:search_timeout] || DEFAULT_SEARCH_TIMEOUT
@headers = {
Protocol::HEADER_API_KEY => api_key,
Protocol::HEADER_APP_ID => application_id,
'Content-Type' => 'application/json; charset=utf-8',
'User-Agent' => DEFAULT_USER_AGENT.push(data[:user_agent]).compact.join('; ')
}
end
def destroy
Thread.current["algolia_search_hosts_#{application_id}"] = nil
Thread.current["algolia_hosts_#{application_id}"] = nil
Thread.current["algolia_host_index_#{application_id}"] = nil
Thread.current["algolia_search_host_index_#{application_id}"] = nil
end
#
# Initialize a new index
#
def init_index(name)
Index.new(name, self)
end
#
# Initialize analytics helper
#
def init_analytics()
Analytics.new(self, { :headers => @headers })
end
#
# Allow to set custom headers
#
def set_extra_header(key, value)
headers[key] = value
end
#
# Allow to use IP rate limit when you have a proxy between end-user and Algolia.
# This option will set the X-Forwarded-For HTTP header with the client IP and the X-Forwarded-API-Key with the API Key having rate limits.
#
# @param admin_api_key the admin API Key you can find in your dashboard
# @param end_user_ip the end user IP (you can use both IPV4 or IPV6 syntax)
# @param rate_limit_api_key the API key on which you have a rate limit
#
def enable_rate_limit_forward(admin_api_key, end_user_ip, rate_limit_api_key)
headers[Protocol::HEADER_API_KEY] = admin_api_key
headers[Protocol::HEADER_FORWARDED_IP] = end_user_ip
headers[Protocol::HEADER_FORWARDED_API_KEY] = rate_limit_api_key
end
#
# Disable IP rate limit enabled with enableRateLimitForward() function
#
def disable_rate_limit_forward
headers[Protocol::HEADER_API_KEY] = api_key
headers.delete(Protocol::HEADER_FORWARDED_IP)
headers.delete(Protocol::HEADER_FORWARDED_API_KEY)
end
#
# Convenience method thats wraps enable_rate_limit_forward/disable_rate_limit_forward
#
def with_rate_limits(end_user_ip, rate_limit_api_key, &block)
enable_rate_limit_forward(api_key, end_user_ip, rate_limit_api_key)
begin
yield
ensure
disable_rate_limit_forward
end
end
#
# This method allows to query multiple indexes with one API call
#
# @param queries the array of hash representing the query and associated index name
# @param options - accepts those keys:
# - index_name_key the name of the key used to fetch the index_name (:index_name by default)
# - strategy define the strategy applied on the sequential searches (none by default)
# - request_options contains extra parameters to send with your query
#
def multiple_queries(queries, options = nil, strategy = nil)
if options.is_a?(Hash)
index_name_key = options.delete(:index_name_key) || options.delete('index_name_key')
strategy = options.delete(:strategy) || options.delete('strategy')
request_options = options.delete(:request_options) || options.delete('request_options')
else
# Deprecated def multiple_queries(queries, index_name_key, strategy)
index_name_key = options
end
index_name_key ||= :index_name
strategy ||= 'none'
request_options ||= {}
requests = {
:requests => queries.map do |query|
query = query.dup
index_name = query.delete(index_name_key) || query.delete(index_name_key.to_s)
raise ArgumentError.new("Missing '#{index_name_key}' option") if index_name.nil?
encoded_params = Hash[query.map { |k, v| [k.to_s, v.is_a?(Array) ? v.to_json : v] }]
{ :indexName => index_name, :params => Protocol.to_query(encoded_params) }
end
}
post(Protocol.multiple_queries_uri(strategy), requests.to_json, :search, request_options)
end
#
# Get objects by objectID across multiple indexes
#
# @param requests [
# { "indexName" => index_name_1, "objectID" => "obj1" },
# { "indexName" => index_name_2, "objectID" => "obj2" }
# ]
#
def multiple_get_objects(requests, request_options = {})
post(Protocol.objects_uri, {:requests => requests}.to_json, :search, request_options)
end
#
# List all existing indexes
# return an Answer object with answer in the form
# {"items": [{ "name": "contacts", "createdAt": "2013-01-18T15:33:13.556Z"},
# {"name": "notes", "createdAt": "2013-01-18T15:33:13.556Z"}]}
#
# @param request_options contains extra parameters to send with your query
#
def list_indexes(request_options = {})
get(Protocol.indexes_uri, :read, request_options)
end
#
# Move an existing index.
#
# @param src_index the name of index to copy.
# @param dst_index the new index name that will contains a copy of srcIndexName (destination will be overriten if it already exist).
# @param request_options contains extra parameters to send with your query
#
def move_index(src_index, dst_index, request_options = {})
request = { 'operation' => 'move', 'destination' => dst_index }
post(Protocol.index_operation_uri(src_index), request.to_json, :write, request_options)
end
#
# Move an existing index and wait until the move has been processed
#
# @param src_index the name of index to copy.
# @param dst_index the new index name that will contains a copy of srcIndexName (destination will be overriten if it already exist).
# @param request_options contains extra parameters to send with your query
#
def move_index!(src_index, dst_index, request_options = {})
res = move_index(src_index, dst_index, request_options)
wait_task(dst_index, res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Copy an existing index.
#
# @param src_index the name of index to copy.
# @param dst_index the new index name that will contains a copy of srcIndexName (destination will be overriten if it already exist).
# @param scope the optional list of scopes to copy (all if not specified).
# @param request_options contains extra parameters to send with your query
#
def copy_index(src_index, dst_index, scope = nil, request_options = {})
request = { 'operation' => 'copy', 'destination' => dst_index }
request['scope'] = scope unless scope.nil?
post(Protocol.index_operation_uri(src_index), request.to_json, :write, request_options)
end
#
# Copy an existing index and wait until the copy has been processed.
#
# @param src_index the name of index to copy.
# @param dst_index the new index name that will contains a copy of srcIndexName (destination will be overriten if it already exist).
# @param scope the optional list of scopes to copy (all if not specified).
# @param request_options contains extra parameters to send with your query
#
def copy_index!(src_index, dst_index, scope = nil, request_options = {})
res = copy_index(src_index, dst_index, scope, request_options)
wait_task(dst_index, res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Copy an existing index settings.
#
# @param src_index the name of index to copy.
# @param dst_index the new index name that will contains a copy of srcIndexName's settings (destination's settings will be overriten if it already exist).
# @param request_options contains extra parameters to send with your query
#
def copy_settings(src_index, dst_index, request_options = {})
copy_index(src_index, dst_index, ['settings'], request_options)
end
#
# Copy an existing index settings and wait until the copy has been processed.
#
# @param src_index the name of index to copy.
# @param dst_index the new index name that will contains a copy of srcIndexName settings (destination settings will be overriten if it already exist).
# @param request_options contains extra parameters to send with your query
#
def copy_settings!(src_index, dst_index, request_options = {})
res = copy_settings(src_index, dst_index, request_options)
wait_task(dst_index, res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Copy an existing index synonyms.
#
# @param src_index the name of index to copy.
# @param dst_index the new index name that will contains a copy of srcIndexName's synonyms (destination's synonyms will be overriten if it already exist).
# @param request_options contains extra parameters to send with your query
#
def copy_synonyms(src_index, dst_index, request_options = {})
copy_index(src_index, dst_index, ['synonyms'], request_options)
end
#
# Copy an existing index synonyms and wait until the copy has been processed.
#
# @param src_index the name of index to copy.
# @param dst_index the new index name that will contains a copy of srcIndexName synonyms (destination synonyms will be overriten if it already exist).
# @param request_options contains extra parameters to send with your query
#
#
# Copy an existing index rules.
#
# @param src_index the name of index to copy.
# @param dst_index the new index name that will contains a copy of srcIndexName's rules (destination's rules will be overriten if it already exist).
# @param request_options contains extra parameters to send with your query
#
def copy_rules(src_index, dst_index, request_options = {})
copy_index(src_index, dst_index, ['rules'], request_options)
end
#
# Copy an existing index rules and wait until the copy has been processed.
#
# @param src_index the name of index to copy.
# @param dst_index the new index name that will contains a copy of srcIndexName rules (destination rules will be overriten if it already exist).
# @param request_options contains extra parameters to send with your query
#
def copy_rules!(src_index, dst_index, request_options = {})
res = copy_rules(src_index, dst_index, request_options)
wait_task(dst_index, res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Delete an index
# @param name the name of the index to delete
# @param request_options contains extra parameters to send with your query
#
def delete_index(name, request_options = {})
init_index(name).delete(request_options)
end
#
# Delete an index and wait until the deletion has been processed.
# @param name the name of the index to delete
# @param request_options contains extra parameters to send with your query
#
def delete_index!(name, request_options = {})
init_index(name).delete!(request_options)
end
#
# Return last logs entries.
#
# @param options - accepts those keys:
# - offset Specify the first entry to retrieve (0-based, 0 is the most recent log entry) - Default = 0
# - length Specify the maximum number of entries to retrieve starting at offset. Maximum allowed value: 1000 - Default = 10
# - type Type of log entries to retrieve ("all", "query", "build" or "error") - Default = 'all'
# - request_options contains extra parameters to send with your query
#
def get_logs(options = nil, length = nil, type = nil)
if options.is_a?(Hash)
offset = options.delete('offset') || options.delete(:offset)
length = options.delete('length') || options.delete(:length)
type = options.delete('type') || options.delete(:type)
request_options = options.delete('request_options') || options.delete(:request_options)
else
# Deprecated def get_logs(offset, length, type)
offset = options
end
length ||= 10
type = 'all' if type.nil?
type = type ? 'error' : 'all' if type.is_a?(true.class)
request_options ||= {}
get(Protocol.logs(offset, length, type), :write, request_options)
end
#
# List all existing user keys with their associated ACLs
#
# @param request_options contains extra parameters to send with your query
#
def list_api_keys(request_options = {})
get(Protocol.keys_uri, :read, request_options)
end
#
# Get ACL of a user key
#
# @param request_options contains extra parameters to send with your query
#
def get_api_key(key, request_options = {})
get(Protocol.key_uri(key), :read, request_options)
end
#
# Create a new user key
#
# Deprecated call was add_api_key(acl, validity, maxQueriesPerIPPerHour, maxHitsPerQuery, indexes)
#
# ACL can contain an array with those strings:
# - search: allow to search (https and http)
# - addObject: allows to add/update an object in the index (https only)
# - deleteObject : allows to delete an existing object (https only)
# - deleteIndex : allows to delete index content (https only)
# - settings : allows to get index settings (https only)
# - editSettings : allows to change index settings (https only)
#
# @param object The list of parameters for this key.
# Defined by a Hash that can contain the following values:
# - acl: array of string
# - indexes: array of string
# - validity: int
# - referers: array of string
# - description: string
# - maxHitsPerQuery: integer
# - queryParameters: string
# - maxQueriesPerIPPerHour: integer
# @param request_options contains extra parameters to send with your query - Default = {}
#
def add_api_key(object, request_options = {}, max_queries_per_IP_per_hour = 0, max_hits_per_query = 0, indexes = nil)
if object.instance_of?(Array)
params = { :acl => object }
else
params = object
end
validity = 0
unless request_options.is_a?(Hash)
validity = request_options
request_options = {}
end
params[:indexes] = indexes if indexes
params['validity'] = validity.to_i if validity != 0
params['maxHitsPerQuery'] = max_hits_per_query.to_i if max_hits_per_query != 0
params['maxQueriesPerIPPerHour'] = max_queries_per_IP_per_hour.to_i if max_queries_per_IP_per_hour != 0
post(Protocol.keys_uri, params.to_json, :write, request_options)
end
#
# Update a user key
#
# Deprecated call was update_api_key(key, acl, validity, max_queries_per_IP_per_hour, max_hits_per_query, indexes)
#
# ACL can contain an array with those strings:
# - search: allow to search (https and http)
# - addObject: allows to add/update an object in the index (https only)
# - deleteObject : allows to delete an existing object (https only)
# - deleteIndex : allows to delete index content (https only)
# - settings : allows to get index settings (https only)
# - editSettings : allows to change index settings (https only)
#
# @param key API Key to update
# @param object The list of parameters for this key.
# Defined by a Hash that can contain the following values:
# - acl: array of string
# - indexes: array of string
# - validity: int
# - referers: array of string
# - description: string
# - maxHitsPerQuery: integer
# - queryParameters: string
# - maxQueriesPerIPPerHour: integer
# @param request_options contains extra parameters to send with your query - Default = {}
#
def update_api_key(key, object, request_options = {}, max_queries_per_IP_per_hour = 0, max_hits_per_query = 0, indexes = nil)
if object.instance_of?(Array)
params = { :acl => object }
else
params = object
end
validity = 0
unless request_options.is_a?(Hash)
validity = request_options
request_options = {}
end
params[:indexes] = indexes if indexes
params['validity'] = validity.to_i if validity != 0
params['maxQueriesPerIPPerHour'] = max_queries_per_IP_per_hour.to_i if max_queries_per_IP_per_hour != 0
params['maxHitsPerQuery'] = max_hits_per_query.to_i if max_hits_per_query != 0
put(Protocol.key_uri(key), params.to_json, :write, request_options)
end
#
# Delete an existing user key
#
def delete_api_key(key, request_options = {})
delete(Protocol.key_uri(key), :write, request_options)
end
#
# Restore a deleted api key
#
def restore_api_key(key, request_options = {})
post(Protocol.restore_key_uri(key), :write, request_options)
end
#
# Send a batch request targeting multiple indices
#
def batch(operations, request_options = {})
post(Protocol.batch_uri, { 'requests' => operations }.to_json, :batch, request_options)
end
#
# Send a batch request targeting multiple indices and wait the end of the indexing
#
def batch!(operations, request_options = {})
res = batch(operations, request_options)
res['taskID'].each do |index, taskID|
wait_task(index, taskID, WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
end
end
#
# Check the status of a task on the server.
# All server task are asynchronous and you can check the status of a task with this method.
#
# @param index_name the index name owning the taskID
# @param taskID the id of the task returned by server
# @param request_options contains extra parameters to send with your query
#
def get_task_status(index_name, taskID, request_options = {})
get(Protocol.task_uri(index_name, taskID), :read, request_options)['status']
end
#
# Wait the publication of a task on the server.
# All server task are asynchronous and you can check with this method that the task is published.
#
# @param index_name the index name owning the taskID
# @param taskID the id of the task returned by server
# @param time_before_retry the time in milliseconds before retry (default = 100ms)
# @param request_options contains extra parameters to send with your query
#
def wait_task(index_name, taskID, time_before_retry = WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options = {})
loop do
status = get_task_status(index_name, taskID, request_options)
if status == 'published'
return
end
sleep(time_before_retry.to_f / 1000)
end
end
def get_personalization_strategy(request_options = {})
get(Protocol.personalization_strategy_uri, :read, request_options)
end
def set_personalization_strategy(strategy, request_options = {})
post(Protocol.personalization_strategy_uri, strategy.to_json, :write, request_options)
end
#
# Multicluster management
#
def list_clusters(request_options = {})
get(Protocol.clusters_uri, :read, request_options)
end
def list_user_ids(page = 0, hits_per_page = 20, request_options = {})
get(Protocol.list_ids_uri(page, hits_per_page), :read, request_options)
end
def get_top_user_ids(request_options = {})
get(Protocol.cluster_top_user_uri, :read, request_options)
end
def assign_user_id(user_id, cluster_name, request_options = {})
request_options = add_header_to_request_options(request_options, { :'X-Algolia-User-ID' => user_id})
body = { :cluster => cluster_name }
post(Protocol.cluster_mapping_uri, body.to_json, :write, request_options)
end
def get_user_id(user_id, request_options = {})
get(Protocol.cluster_mapping_uri(user_id), :read, request_options)
end
def remove_user_id(user_id, request_options = {})
request_options = add_header_to_request_options(request_options, { :'X-Algolia-User-ID' => user_id})
delete(Protocol.cluster_mapping_uri, :write, request_options)
end
def search_user_id(query, cluster_name = nil, page = nil, hits_per_page = nil, request_options = {})
body = { :query => query }
body[:clusterName] = cluster_name unless cluster_name.nil?
body[:page] = page unless page.nil?
body[:hitsPerPage] = hits_per_page unless hits_per_page.nil?
post(Protocol.search_user_id_uri, body.to_json, :read, request_options)
end
# Perform an HTTP request for the given uri and method
# with common basic response handling. Will raise a
# AlgoliaProtocolError if the response has an error status code,
# and will return the parsed JSON body on success, if there is one.
#
def request(uri, method, data = nil, type = :write, request_options = {})
exceptions = []
connect_timeout = @connect_timeout
send_timeout = if type == :search
@search_timeout
elsif type == :batch
type = :write
@batch_timeout
else
@send_timeout
end
receive_timeout = type == :search ? @search_timeout : @receive_timeout
thread_local_hosts(type != :write).each_with_index do |host, i|
connect_timeout += 2 if i == 2
send_timeout += 10 if i == 2
receive_timeout += 10 if i == 2
thread_index_key = type != :write ? "algolia_search_host_index_#{application_id}" : "algolia_host_index_#{application_id}"
Thread.current[thread_index_key] = host[:index]
host[:last_call] = Time.now.to_i
host[:session].connect_timeout = connect_timeout
host[:session].send_timeout = send_timeout
host[:session].receive_timeout = receive_timeout
begin
return perform_request(host[:session], host[:base_url] + uri, method, data, request_options)
rescue AlgoliaProtocolError => e
raise if e.code / 100 == 4
exceptions << e
rescue => e
exceptions << e
end
host[:session].reset_all
end
raise AlgoliaProtocolError.new(0, "Cannot reach any host: #{exceptions.map { |e| e.to_s }.join(', ')}")
end
def get(uri, type = :write, request_options = {})
request(uri, :GET, nil, type, request_options)
end
def post(uri, body = {}, type = :write, request_options = {})
request(uri, :POST, body, type, request_options)
end
def put(uri, body = {}, type = :write, request_options = {})
request(uri, :PUT, body, type, request_options)
end
def delete(uri, type = :write, request_options = {})
request(uri, :DELETE, nil, type, request_options)
end
private
#
# This method returns a thread-local array of sessions
#
def thread_local_hosts(read)
thread_hosts_key = read ? "algolia_search_hosts_#{application_id}" : "algolia_hosts_#{application_id}"
Thread.current[thread_hosts_key] ||= (read ? search_hosts : hosts).each_with_index.map do |host, i|
client = HTTPClient.new
client.ssl_config.ssl_version = @ssl_version if @ssl && @ssl_version
client.transparent_gzip_decompression = @gzip
client.ssl_config.add_trust_ca File.expand_path(File.join(File.dirname(__FILE__), '..', '..', 'resources', 'ca-bundle.crt'))
{
:index => i,
:base_url => "http#{@ssl ? 's' : ''}://#{host}",
:session => client,
:last_call => nil
}
end
hosts = Thread.current[thread_hosts_key]
thread_index_key = read ? "algolia_search_host_index_#{application_id}" : "algolia_host_index_#{application_id}"
current_host = Thread.current[thread_index_key].to_i # `to_i` to ensure first call is 0
# we want to always target host 0 first
# if the current host is not 0, then we want to use it first only if (we never used it OR we're using it since less than 1 minute)
if current_host != 0 && (hosts[current_host][:last_call].nil? || hosts[current_host][:last_call] > Time.now.to_i - 60)
# first host will be `current_host`
first = hosts[current_host]
[first] + hosts.reject { |h| h[:index] == 0 || h == first } + hosts.select { |h| h[:index] == 0 }
else
# first host will be `0`
hosts
end
end
def perform_request(session, url, method, data, request_options)
hs = {}
extra_headers = request_options[:headers] || request_options['headers'] || {}
@headers.each { |key, val| hs[key.to_s] = val }
extra_headers.each { |key, val| hs[key.to_s] = val }
response = case method
when :GET
session.get(url, { :header => hs })
when :POST
session.post(url, { :body => data, :header => hs })
when :PUT
session.put(url, { :body => data, :header => hs })
when :DELETE
session.delete(url, { :header => hs })
end
if response.code / 100 != 2
raise AlgoliaProtocolError.new(response.code, "Cannot #{method} to #{url}: #{response.content} (#{response.code})")
end
return JSON.parse(response.content)
end
def add_header_to_request_options(request_options, headers_to_add)
if !request_options['headers'].is_a?(Hash)
if request_options[:headers].is_a?(Hash)
request_options['headers'] = request_options[:headers]
request_options.delete(:headers)
else
request_options['headers'] = {}
end
end
request_options['headers'].merge!(headers_to_add)
request_options
end
# Deprecated
alias_method :list_user_keys, :list_api_keys
alias_method :get_user_key, :get_api_key
alias_method :add_user_key, :add_api_key
alias_method :update_user_key, :update_api_key
alias_method :delete_user_key, :delete_api_key
end
|
rmagick/rmagick | lib/rmagick_internal.rb | Magick.Draw.circle | ruby | def circle(origin_x, origin_y, perim_x, perim_y)
primitive 'circle ' + format('%g,%g %g,%g', origin_x, origin_y, perim_x, perim_y)
end | Draw a circle | train | https://github.com/rmagick/rmagick/blob/ef6688ed9d76bf123c2ea1a483eff8635051adb7/lib/rmagick_internal.rb#L249-L251 | class Draw
# Thse hashes are used to map Magick constant
# values to the strings used in the primitives.
ALIGN_TYPE_NAMES = {
LeftAlign.to_i => 'left',
RightAlign.to_i => 'right',
CenterAlign.to_i => 'center'
}.freeze
ANCHOR_TYPE_NAMES = {
StartAnchor.to_i => 'start',
MiddleAnchor.to_i => 'middle',
EndAnchor.to_i => 'end'
}.freeze
DECORATION_TYPE_NAMES = {
NoDecoration.to_i => 'none',
UnderlineDecoration.to_i => 'underline',
OverlineDecoration.to_i => 'overline',
LineThroughDecoration.to_i => 'line-through'
}.freeze
FONT_WEIGHT_NAMES = {
AnyWeight.to_i => 'all',
NormalWeight.to_i => 'normal',
BoldWeight.to_i => 'bold',
BolderWeight.to_i => 'bolder',
LighterWeight.to_i => 'lighter'
}.freeze
GRAVITY_NAMES = {
NorthWestGravity.to_i => 'northwest',
NorthGravity.to_i => 'north',
NorthEastGravity.to_i => 'northeast',
WestGravity.to_i => 'west',
CenterGravity.to_i => 'center',
EastGravity.to_i => 'east',
SouthWestGravity.to_i => 'southwest',
SouthGravity.to_i => 'south',
SouthEastGravity.to_i => 'southeast'
}.freeze
PAINT_METHOD_NAMES = {
PointMethod.to_i => 'point',
ReplaceMethod.to_i => 'replace',
FloodfillMethod.to_i => 'floodfill',
FillToBorderMethod.to_i => 'filltoborder',
ResetMethod.to_i => 'reset'
}.freeze
STRETCH_TYPE_NAMES = {
NormalStretch.to_i => 'normal',
UltraCondensedStretch.to_i => 'ultra-condensed',
ExtraCondensedStretch.to_i => 'extra-condensed',
CondensedStretch.to_i => 'condensed',
SemiCondensedStretch.to_i => 'semi-condensed',
SemiExpandedStretch.to_i => 'semi-expanded',
ExpandedStretch.to_i => 'expanded',
ExtraExpandedStretch.to_i => 'extra-expanded',
UltraExpandedStretch.to_i => 'ultra-expanded',
AnyStretch.to_i => 'all'
}.freeze
STYLE_TYPE_NAMES = {
NormalStyle.to_i => 'normal',
ItalicStyle.to_i => 'italic',
ObliqueStyle.to_i => 'oblique',
AnyStyle.to_i => 'all'
}.freeze
private
def enquote(str)
if str.length > 2 && /\A(?:\"[^\"]+\"|\'[^\']+\'|\{[^\}]+\})\z/.match(str)
str
else
'"' + str + '"'
end
end
public
# Apply coordinate transformations to support scaling (s), rotation (r),
# and translation (t). Angles are specified in radians.
def affine(sx, rx, ry, sy, tx, ty)
primitive 'affine ' + format('%g,%g,%g,%g,%g,%g', sx, rx, ry, sy, tx, ty)
end
# Draw an arc.
def arc(start_x, start_y, end_x, end_y, start_degrees, end_degrees)
primitive 'arc ' + format('%g,%g %g,%g %g,%g',
start_x, start_y, end_x, end_y, start_degrees, end_degrees)
end
# Draw a bezier curve.
def bezier(*points)
if points.length.zero?
Kernel.raise ArgumentError, 'no points specified'
elsif points.length.odd?
Kernel.raise ArgumentError, 'odd number of arguments specified'
end
primitive 'bezier ' + points.join(',')
end
# Draw a circle
# Invoke a clip-path defined by def_clip_path.
def clip_path(name)
primitive "clip-path #{name}"
end
# Define the clipping rule.
def clip_rule(rule)
Kernel.raise ArgumentError, "Unknown clipping rule #{rule}" unless %w[evenodd nonzero].include?(rule.downcase)
primitive "clip-rule #{rule}"
end
# Define the clip units
def clip_units(unit)
Kernel.raise ArgumentError, "Unknown clip unit #{unit}" unless %w[userspace userspaceonuse objectboundingbox].include?(unit.downcase)
primitive "clip-units #{unit}"
end
# Set color in image according to specified colorization rule. Rule is one of
# point, replace, floodfill, filltoborder,reset
def color(x, y, method)
Kernel.raise ArgumentError, "Unknown PaintMethod: #{method}" unless PAINT_METHOD_NAMES.key?(method.to_i)
primitive "color #{x},#{y},#{PAINT_METHOD_NAMES[method.to_i]}"
end
# Specify EITHER the text decoration (none, underline, overline,
# line-through) OR the text solid background color (any color name or spec)
def decorate(decoration)
if DECORATION_TYPE_NAMES.key?(decoration.to_i)
primitive "decorate #{DECORATION_TYPE_NAMES[decoration.to_i]}"
else
primitive "decorate #{enquote(decoration)}"
end
end
# Define a clip-path. A clip-path is a sequence of primitives
# bracketed by the "push clip-path <name>" and "pop clip-path"
# primitives. Upon advice from the IM guys, we also bracket
# the clip-path primitives with "push(pop) defs" and "push
# (pop) graphic-context".
def define_clip_path(name)
push('defs')
push("clip-path \"#{name}\"")
push('graphic-context')
yield
ensure
pop('graphic-context')
pop('clip-path')
pop('defs')
end
# Draw an ellipse
def ellipse(origin_x, origin_y, width, height, arc_start, arc_end)
primitive 'ellipse ' + format('%g,%g %g,%g %g,%g',
origin_x, origin_y, width, height, arc_start, arc_end)
end
# Let anything through, but the only defined argument
# is "UTF-8". All others are apparently ignored.
def encoding(encoding)
primitive "encoding #{encoding}"
end
# Specify object fill, a color name or pattern name
def fill(colorspec)
primitive "fill #{enquote(colorspec)}"
end
alias fill_color fill
alias fill_pattern fill
# Specify fill opacity (use "xx%" to indicate percentage)
def fill_opacity(opacity)
primitive "fill-opacity #{opacity}"
end
def fill_rule(rule)
Kernel.raise ArgumentError, "Unknown fill rule #{rule}" unless %w[evenodd nonzero].include?(rule.downcase)
primitive "fill-rule #{rule}"
end
# Specify text drawing font
def font(name)
primitive "font \'#{name}\'"
end
def font_family(name)
primitive "font-family \'#{name}\'"
end
def font_stretch(stretch)
Kernel.raise ArgumentError, 'Unknown stretch type' unless STRETCH_TYPE_NAMES.key?(stretch.to_i)
primitive "font-stretch #{STRETCH_TYPE_NAMES[stretch.to_i]}"
end
def font_style(style)
Kernel.raise ArgumentError, 'Unknown style type' unless STYLE_TYPE_NAMES.key?(style.to_i)
primitive "font-style #{STYLE_TYPE_NAMES[style.to_i]}"
end
# The font weight argument can be either a font weight
# constant or [100,200,...,900]
def font_weight(weight)
if FONT_WEIGHT_NAMES.key?(weight.to_i)
primitive "font-weight #{FONT_WEIGHT_NAMES[weight.to_i]}"
else
primitive "font-weight #{weight}"
end
end
# Specify the text positioning gravity, one of:
# NorthWest, North, NorthEast, West, Center, East, SouthWest, South, SouthEast
def gravity(grav)
Kernel.raise ArgumentError, 'Unknown text positioning gravity' unless GRAVITY_NAMES.key?(grav.to_i)
primitive "gravity #{GRAVITY_NAMES[grav.to_i]}"
end
# IM 6.5.5-8 and later
def interline_spacing(space)
begin
Float(space)
rescue ArgumentError
Kernel.raise ArgumentError, 'invalid value for interline_spacing'
rescue TypeError
Kernel.raise TypeError, "can't convert #{space.class} into Float"
end
primitive "interline-spacing #{space}"
end
# IM 6.4.8-3 and later
def interword_spacing(space)
begin
Float(space)
rescue ArgumentError
Kernel.raise ArgumentError, 'invalid value for interword_spacing'
rescue TypeError
Kernel.raise TypeError, "can't convert #{space.class} into Float"
end
primitive "interword-spacing #{space}"
end
# IM 6.4.8-3 and later
def kerning(space)
begin
Float(space)
rescue ArgumentError
Kernel.raise ArgumentError, 'invalid value for kerning'
rescue TypeError
Kernel.raise TypeError, "can't convert #{space.class} into Float"
end
primitive "kerning #{space}"
end
# Draw a line
def line(start_x, start_y, end_x, end_y)
primitive 'line ' + format('%g,%g %g,%g', start_x, start_y, end_x, end_y)
end
# Set matte (make transparent) in image according to the specified
# colorization rule
def matte(x, y, method)
Kernel.raise ArgumentError, 'Unknown paint method' unless PAINT_METHOD_NAMES.key?(method.to_i)
primitive "matte #{x},#{y} #{PAINT_METHOD_NAMES[method.to_i]}"
end
# Specify drawing fill and stroke opacities. If the value is a string
# ending with a %, the number will be multiplied by 0.01.
def opacity(opacity)
if opacity.is_a?(Numeric)
Kernel.raise ArgumentError, 'opacity must be >= 0 and <= 1.0' if opacity < 0 || opacity > 1.0
end
primitive "opacity #{opacity}"
end
# Draw using SVG-compatible path drawing commands. Note that the
# primitive requires that the commands be surrounded by quotes or
# apostrophes. Here we simply use apostrophes.
def path(cmds)
primitive "path '" + cmds + "'"
end
# Define a pattern. In the block, call primitive methods to
# draw the pattern. Reference the pattern by using its name
# as the argument to the 'fill' or 'stroke' methods
def pattern(name, x, y, width, height)
push('defs')
push("pattern #{name} #{x} #{y} #{width} #{height}")
push('graphic-context')
yield
ensure
pop('graphic-context')
pop('pattern')
pop('defs')
end
# Set point to fill color.
def point(x, y)
primitive "point #{x},#{y}"
end
# Specify the font size in points. Yes, the primitive is "font-size" but
# in other places this value is called the "pointsize". Give it both names.
def pointsize(points)
primitive "font-size #{points}"
end
alias font_size pointsize
# Draw a polygon
def polygon(*points)
if points.length.zero?
Kernel.raise ArgumentError, 'no points specified'
elsif points.length.odd?
Kernel.raise ArgumentError, 'odd number of points specified'
end
primitive 'polygon ' + points.join(',')
end
# Draw a polyline
def polyline(*points)
if points.length.zero?
Kernel.raise ArgumentError, 'no points specified'
elsif points.length.odd?
Kernel.raise ArgumentError, 'odd number of points specified'
end
primitive 'polyline ' + points.join(',')
end
# Return to the previously-saved set of whatever
# pop('graphic-context') (the default if no arguments)
# pop('defs')
# pop('gradient')
# pop('pattern')
def pop(*what)
if what.length.zero?
primitive 'pop graphic-context'
else
# to_s allows a Symbol to be used instead of a String
primitive 'pop ' + what.map(&:to_s).join(' ')
end
end
# Push the current set of drawing options. Also you can use
# push('graphic-context') (the default if no arguments)
# push('defs')
# push('gradient')
# push('pattern')
def push(*what)
if what.length.zero?
primitive 'push graphic-context'
else
# to_s allows a Symbol to be used instead of a String
primitive 'push ' + what.map(&:to_s).join(' ')
end
end
# Draw a rectangle
def rectangle(upper_left_x, upper_left_y, lower_right_x, lower_right_y)
primitive 'rectangle ' + format('%g,%g %g,%g',
upper_left_x, upper_left_y, lower_right_x, lower_right_y)
end
# Specify coordinate space rotation. "angle" is measured in degrees
def rotate(angle)
primitive "rotate #{angle}"
end
# Draw a rectangle with rounded corners
def roundrectangle(center_x, center_y, width, height, corner_width, corner_height)
primitive 'roundrectangle ' + format('%g,%g,%g,%g,%g,%g',
center_x, center_y, width, height, corner_width, corner_height)
end
# Specify scaling to be applied to coordinate space on subsequent drawing commands.
def scale(x, y)
primitive "scale #{x},#{y}"
end
def skewx(angle)
primitive "skewX #{angle}"
end
def skewy(angle)
primitive "skewY #{angle}"
end
# Specify the object stroke, a color name or pattern name.
def stroke(colorspec)
primitive "stroke #{enquote(colorspec)}"
end
alias stroke_color stroke
alias stroke_pattern stroke
# Specify if stroke should be antialiased or not
def stroke_antialias(bool)
bool = bool ? '1' : '0'
primitive "stroke-antialias #{bool}"
end
# Specify a stroke dash pattern
def stroke_dasharray(*list)
if list.length.zero?
primitive 'stroke-dasharray none'
else
list.each do |x|
Kernel.raise ArgumentError, "dash array elements must be > 0 (#{x} given)" if x <= 0
end
primitive "stroke-dasharray #{list.join(',')}"
end
end
# Specify the initial offset in the dash pattern
def stroke_dashoffset(value = 0)
primitive "stroke-dashoffset #{value}"
end
def stroke_linecap(value)
Kernel.raise ArgumentError, "Unknown linecap type: #{value}" unless %w[butt round square].include?(value.downcase)
primitive "stroke-linecap #{value}"
end
def stroke_linejoin(value)
Kernel.raise ArgumentError, "Unknown linejoin type: #{value}" unless %w[round miter bevel].include?(value.downcase)
primitive "stroke-linejoin #{value}"
end
def stroke_miterlimit(value)
Kernel.raise ArgumentError, 'miterlimit must be >= 1' if value < 1
primitive "stroke-miterlimit #{value}"
end
# Specify opacity of stroke drawing color
# (use "xx%" to indicate percentage)
def stroke_opacity(value)
primitive "stroke-opacity #{value}"
end
# Specify stroke (outline) width in pixels.
def stroke_width(pixels)
primitive "stroke-width #{pixels}"
end
# Draw text at position x,y. Add quotes to text that is not already quoted.
def text(x, y, text)
Kernel.raise ArgumentError, 'missing text argument' if text.to_s.empty?
if text.length > 2 && /\A(?:\"[^\"]+\"|\'[^\']+\'|\{[^\}]+\})\z/.match(text)
# text already quoted
elsif !text['\'']
text = '\'' + text + '\''
elsif !text['"']
text = '"' + text + '"'
elsif !(text['{'] || text['}'])
text = '{' + text + '}'
else
# escape existing braces, surround with braces
text = '{' + text.gsub(/[}]/) { |b| '\\' + b } + '}'
end
primitive "text #{x},#{y} #{text}"
end
# Specify text alignment relative to a given point
def text_align(alignment)
Kernel.raise ArgumentError, "Unknown alignment constant: #{alignment}" unless ALIGN_TYPE_NAMES.key?(alignment.to_i)
primitive "text-align #{ALIGN_TYPE_NAMES[alignment.to_i]}"
end
# SVG-compatible version of text_align
def text_anchor(anchor)
Kernel.raise ArgumentError, "Unknown anchor constant: #{anchor}" unless ANCHOR_TYPE_NAMES.key?(anchor.to_i)
primitive "text-anchor #{ANCHOR_TYPE_NAMES[anchor.to_i]}"
end
# Specify if rendered text is to be antialiased.
def text_antialias(boolean)
boolean = boolean ? '1' : '0'
primitive "text-antialias #{boolean}"
end
# Specify color underneath text
def text_undercolor(color)
primitive "text-undercolor #{enquote(color)}"
end
# Specify center of coordinate space to use for subsequent drawing
# commands.
def translate(x, y)
primitive "translate #{x},#{y}"
end
end # class Magick::Draw
|
lambda2/rice_cooker | lib/rice_cooker/base/helpers.rb | RiceCooker.Helpers.parse_sorting_param | ruby | def parse_sorting_param(sorting_param, model)
return {} unless sorting_param.present?
sorting_params = CSV.parse_line(URI.unescape(sorting_param)).collect do |sort|
sorting_param = if sort.start_with?('-')
{ field: sort[1..-1].to_s.to_sym, direction: :desc }
else
{ field: sort.to_s.to_sym, direction: :asc }
end
check_sorting_param(model, sorting_param)
sorting_param
end
sorting_params.map { |par| [par[:field], par[:direction]] }.to_h
end | ------------------------ Sort helpers --------------------
model -> resource_class with inherited resources | train | https://github.com/lambda2/rice_cooker/blob/b7ce285d3bd76ae979111f0374c5a43815473332/lib/rice_cooker/base/helpers.rb#L81-L95 | module Helpers
extend ActiveSupport::Concern
# Overridable method for available sortable fields
def sortable_fields_for(model)
if model.respond_to?(:sortable_fields)
model.sortable_fields.map(&:to_sym)
elsif model.respond_to?(:column_names)
model.column_names.map(&:to_sym)
else
[]
end
end
# Overridable method for available filterable fields
def filterable_fields_for(model)
if model.respond_to?(:filterable_fields)
model.filterable_fields.map(&:to_sym)
elsif model.respond_to?(:column_names)
model.column_names.map(&:to_sym)
else
[]
end
end
# Overridable method for available searchable fields
def searchable_fields_for(model)
if model.respond_to?(:searchable_fields)
model.searchable_fields.map(&:to_sym)
else
filterable_fields_for(model)
end
end
# Overridable method for available fuzzy fields
def fuzzy_fields_for(model)
if model.respond_to?(:fuzzy_fields)
model.fuzzy_fields.map(&:to_sym)
else
searchable_fields_for(model)
end
end
# Overridable method for available rangeable fields
def rangeable_fields_for(model)
if model.respond_to?(:rangeable_fields)
model.rangeable_fields.map(&:to_sym)
else
filterable_fields_for(model)
end
end
# ------------------------ Sort helpers --------------------
# model -> resource_class with inherited resources
def check_sorting_param(model, sorting_param)
sort_field = sorting_param[:field]
sortable_fields = sortable_fields_for(model)
unless sortable_fields.include? sort_field.to_sym
raise InvalidSortException, "The #{sort_field} field is not sortable"
end
end
def param_from_defaults(sorting_params)
sorting_params.map { |k, v| "#{v == :desc ? '-' : ''}#{k}" }.join(',')
end
def apply_sort_to_collection(collection, sorting_params)
return collection unless collection.any?
# p "Before apply: #{sorting_params.inspect}"
collection.order(sorting_params)
end
# ------------------------ Filter helpers --------------------
# Va transformer le param url en hash exploitable
def parse_filtering_param(filtering_param, allowed_params)
return {} unless filtering_param.present?
fields = {}
# Extract the fields for each type from the fields parameters
if filtering_param.is_a?(Hash)
filtering_param.each do |field, value|
resource_fields = value.split(',') unless value.nil? || value.empty?
fields[field.to_sym] = resource_fields
end
else
raise InvalidFilterException, "Invalid filter format for #{filtering_param}"
end
check_filtering_param(fields, allowed_params)
fields
end
# Our little barrier <3
def check_filtering_param(filtering_param, allowed)
🔞 = filtering_param.keys.map(&:to_sym) - allowed.map(&:to_sym)
raise InvalidFilterException, "Attributes #{🔞.map(&:to_s).to_sentence} doesn't exists or aren't filterables. Available filters are: #{allowed.to_sentence}" if 🔞.any?
end
# On va essayer de garder un format commun, qui est:
#
# ```
# filter: {
# proc: -> (values) { * je fais des trucs avec les values * },
# all: ['les', 'valeurs', 'aceptées'],
# description: "La description dans la doc"
# }
# ```
#
# On va donc transformer `additional` dans le format ci-dessus
#
def format_additional_param(additional, context_format = 'filtering')
if additional.is_a? Hash
additional = additional.map do |field, value|
if value.is_a?(Hash)
value = {
proc: nil,
all: [],
description: ''
}.merge(value)
elsif value.is_a? Array
value = {
proc: value.try(:at, 0),
all: value.try(:at, 1) || [],
description: value.try(:at, 2) || ''
}
elsif value.is_a? Proc
value = {
proc: value,
all: [],
description: ''
}
else
raise "Unable to format addional #{context_format} params (got #{additional})"
end
[field, value]
end.to_h
end
additional
end
def apply_filter_to_collection(collection, filtering_params, additional = {})
return collection if collection.nil?
filtering_params.each do |field, value|
puts "Filtering param #{field} -> #{value}"
if additional.key?(field) && additional[field].key?(:proc)
# Si on a fourni des valeurs, on verifie qu'elle matchent
if additional[field].key?(:all) && additional[field][:all].try(:any?)
allowed = additional[field][:all].map(&:to_s)
raise InvalidFilterValueException, "Value #{(value - allowed).to_sentence} is not allowed for filter #{field}, can be #{allowed.to_sentence}" if (value - allowed).any?
end
collection = collection.instance_exec(value, &(additional[field][:proc]))
elsif field =~ /_at$/ && (value.is_a?(String) || value.is_a?(Array))
collection = collection.where("DATE(#{collection.model.table_name}.#{field}) = ?", [*value])
elsif value.is_a?(String) || value.is_a?(Array)
collection = collection.where("#{collection.model.table_name}.#{field}" => value)
elsif value.is_a?(Hash) && value.key?(:proc)
collection
end
end
collection
end
# ------------------------ Search helpers --------------------
# Va transformer le param url en hash exploitable
def parse_searching_param(searching_param, allowed_params)
return {} unless searching_param.present?
fields = {}
# Extract the fields for each type from the fields parameters
if searching_param.is_a?(Hash)
searching_param.each do |field, value|
resource_fields = value.split(',') unless value.nil? || value.empty?
fields[field.to_sym] = resource_fields
end
else
raise InvalidSearchException, "Invalid search format for #{searching_param}"
end
check_searching_param(fields, allowed_params)
fields
end
# Our little barrier <3
def check_searching_param(searching_param, allowed)
🔞 = searching_param.keys.map(&:to_sym) - allowed.map(&:to_sym)
raise InvalidSearchException, "Attributes #{🔞.map(&:to_s).to_sentence} doesn't exists or aren't searchables. Available searchs are: #{allowed.to_sentence}" if 🔞.any?
end
# On va essayer de garder un format commun, qui est:
#
# ```
# search: {
# proc: -> (values) { * je fais des trucs avec les values * },
# all: ['les', 'valeurs', 'aceptées'],
# description: "La description dans la doc"
# }
# ```
#
# On va donc transformer `additional` dans le format ci-dessus
#
def format_additional_param(additional, context_format = 'searching')
if additional.is_a? Hash
additional = additional.map do |field, value|
if value.is_a?(Hash)
value = {
proc: nil,
all: [],
description: ''
}.merge(value)
elsif value.is_a? Array
value = {
proc: value.try(:at, 0),
all: value.try(:at, 1) || [],
description: value.try(:at, 2) || ''
}
elsif value.is_a? Proc
value = {
proc: value,
all: [],
description: ''
}
else
raise "Unable to format addional #{context_format} params (got #{additional})"
end
[field, value]
end.to_h
end
additional
end
def reduce_where(col, field, value)
reducer = nil
value.each do |v|
query = col.model.arel_table[field.to_sym].matches("%#{v.to_s}%")
reducer = (reducer ? reducer.or(query) : query)
end
col.where(reducer)
end
def reduce_fields_where(col, fields, value)
reducer = nil
fields.each do |f|
case col.model.columns.select{|e| e.name.to_sym == f.to_sym}.first.type
when :string
query = col.model.arel_table[f.to_sym].matches("%#{value.to_s}%")
when :integer
query = col.model.arel_table[f.to_sym].eq(value.to_i)
# when :boolean
# query = col.model.where(false)
else
query = false
end
reducer = (reducer ? reducer.or(query) : query) if query
end
col.where(reducer)
end
def apply_search_to_collection(col, searching_params, additional = {})
return col if col.nil?
searching_params.each do |field, value|
if additional.key?(field) && additional[field].key?(:proc)
col = col.instance_exec(value, &(additional[field][:proc]))
elsif value.is_a?(String)
col = (col.where(col.model.arel_table[field.to_sym].matches("%#{value.join(' ')}%")) rescue col)
elsif value.is_a?(Array)
col = reduce_where(col, field, value)
elsif value.is_a?(Hash) && value.key?(:proc)
col
end
end
col
end
# ------------------------ Range helpers --------------------
# Va transformer le param url en hash exploitable
def parse_ranged_param(ranged_param, allowed_params)
return {} unless ranged_param.present?
fields = {}
# Extract the fields for each type from the fields parameters
if ranged_param.is_a?(Hash)
ranged_param.each do |field, value|
resource_fields = value.split(',') unless value.nil? || value.empty?
raise InvalidRangeException, "Invalid range format for #{ranged_param}. Too many arguments for filter (#{resource_fields})." if resource_fields.length > 2
raise InvalidRangeException, "Invalid range format for #{ranged_param}. Begin and end must be separated by a comma (,)." if resource_fields.length < 2
fields[field.to_sym] = resource_fields
end
else
raise InvalidRangeException, "Invalid range format for #{ranged_param}"
end
check_ranged_param(fields, allowed_params)
fields
end
# Our little barrier <3
def check_ranged_param(ranged_param, allowed)
🔞 = ranged_param.keys.map(&:to_sym) - allowed.map(&:to_sym)
raise InvalidRangeException, "Attributes #{🔞.map(&:to_s).to_sentence} doesn't exists or aren't rangeables. Available ranges are: #{allowed.to_sentence}" if 🔞.any?
end
def apply_range_to_collection(collection, ranged_params, additional = {})
return collection if collection.nil?
ranged_params.each do |field, value|
if additional.key?(field) && additional[field].key?(:proc)
# Si on a fourni des valeurs, on verifie qu'elle matchent
if additional[field].key?(:all) && additional[field][:all].try(:any?)
allowed = additional[field][:all].map(&:to_s)
raise InvalidRangeValueException, "
Value #{(value - allowed).to_sentence} is not allowed for range #{field}, can be #{allowed.to_sentence}
" if (value - allowed).any?
end
collection = collection.instance_exec(value.try(:first), value.try(:last), &(additional[field][:proc]))
elsif value.is_a? Array
from, to = value.slice(0, 2)
begin
collection = collection.where("#{collection.model.table_name}.#{field}" => from..to)
rescue ArgumentError
raise InvalidRangeValueException, "
Unable to create a range between values '#{from}' and '#{to}'
"
end
elsif value.is_a?(Hash) && value.key?(:proc)
collection
end
end
collection
end
end
|
state-machines/state_machines | lib/state_machines/machine.rb | StateMachines.Machine.add_events | ruby | def add_events(new_events)
new_events.map do |new_event|
# Check for other states that use a different class type for their name.
# This typically prevents string / symbol misuse.
if conflict = events.detect { |event| event.name.class != new_event.class }
raise ArgumentError, "#{new_event.inspect} event defined as #{new_event.class}, #{conflict.name.inspect} defined as #{conflict.name.class}; all events must be consistent"
end
unless event = events[new_event]
events << event = Event.new(self, new_event)
end
event
end
end | Tracks the given set of events in the list of all known events for
this machine | train | https://github.com/state-machines/state_machines/blob/10b03af5fc9245bcb09bbd9c40c58ffba9a85422/lib/state_machines/machine.rb#L2218-L2232 | class Machine
include EvalHelpers
include MatcherHelpers
class << self
# Attempts to find or create a state machine for the given class. For
# example,
#
# StateMachines::Machine.find_or_create(Vehicle)
# StateMachines::Machine.find_or_create(Vehicle, :initial => :parked)
# StateMachines::Machine.find_or_create(Vehicle, :status)
# StateMachines::Machine.find_or_create(Vehicle, :status, :initial => :parked)
#
# If a machine of the given name already exists in one of the class's
# superclasses, then a copy of that machine will be created and stored
# in the new owner class (the original will remain unchanged).
def find_or_create(owner_class, *args, &block)
options = args.last.is_a?(Hash) ? args.pop : {}
name = args.first || :state
# Find an existing machine
machine = owner_class.respond_to?(:state_machines) &&
(args.first && owner_class.state_machines[name] || !args.first &&
owner_class.state_machines.values.first) || nil
if machine
# Only create a new copy if changes are being made to the machine in
# a subclass
if machine.owner_class != owner_class && (options.any? || block_given?)
machine = machine.clone
machine.initial_state = options[:initial] if options.include?(:initial)
machine.owner_class = owner_class
end
# Evaluate DSL
machine.instance_eval(&block) if block_given?
else
# No existing machine: create a new one
machine = new(owner_class, name, options, &block)
end
machine
end
def draw(*)
fail NotImplementedError
end
# Default messages to use for validation errors in ORM integrations
attr_accessor :default_messages
attr_accessor :ignore_method_conflicts
end
@default_messages = {
:invalid => 'is invalid',
:invalid_event => 'cannot transition when %s',
:invalid_transition => 'cannot transition via "%1$s"'
}
# Whether to ignore any conflicts that are detected for helper methods that
# get generated for a machine's owner class. Default is false.
@ignore_method_conflicts = false
# The class that the machine is defined in
attr_reader :owner_class
# The name of the machine, used for scoping methods generated for the
# machine as a whole (not states or events)
attr_reader :name
# The events that trigger transitions. These are sorted, by default, in
# the order in which they were defined.
attr_reader :events
# A list of all of the states known to this state machine. This will pull
# states from the following sources:
# * Initial state
# * State behaviors
# * Event transitions (:to, :from, and :except_from options)
# * Transition callbacks (:to, :from, :except_to, and :except_from options)
# * Unreferenced states (using +other_states+ helper)
#
# These are sorted, by default, in the order in which they were referenced.
attr_reader :states
# The callbacks to invoke before/after a transition is performed
#
# Maps :before => callbacks and :after => callbacks
attr_reader :callbacks
# The action to invoke when an object transitions
attr_reader :action
# An identifier that forces all methods (including state predicates and
# event methods) to be generated with the value prefixed or suffixed,
# depending on the context.
attr_reader :namespace
# Whether the machine will use transactions when firing events
attr_reader :use_transactions
# Creates a new state machine for the given attribute
def initialize(owner_class, *args, &block)
options = args.last.is_a?(Hash) ? args.pop : {}
options.assert_valid_keys(:attribute, :initial, :initialize, :action, :plural, :namespace, :integration, :messages, :use_transactions)
# Find an integration that matches this machine's owner class
if options.include?(:integration)
@integration = options[:integration] && StateMachines::Integrations.find_by_name(options[:integration])
else
@integration = StateMachines::Integrations.match(owner_class)
end
if @integration
extend @integration
options = (@integration.defaults || {}).merge(options)
end
# Add machine-wide defaults
options = {:use_transactions => true, :initialize => true}.merge(options)
# Set machine configuration
@name = args.first || :state
@attribute = options[:attribute] || @name
@events = EventCollection.new(self)
@states = StateCollection.new(self)
@callbacks = {:before => [], :after => [], :failure => []}
@namespace = options[:namespace]
@messages = options[:messages] || {}
@action = options[:action]
@use_transactions = options[:use_transactions]
@initialize_state = options[:initialize]
@action_hook_defined = false
self.owner_class = owner_class
# Merge with sibling machine configurations
add_sibling_machine_configs
# Define class integration
define_helpers
define_scopes(options[:plural])
after_initialize
# Evaluate DSL
instance_eval(&block) if block_given?
self.initial_state = options[:initial] unless sibling_machines.any?
end
# Creates a copy of this machine in addition to copies of each associated
# event/states/callback, so that the modifications to those collections do
# not affect the original machine.
def initialize_copy(orig) #:nodoc:
super
@events = @events.dup
@events.machine = self
@states = @states.dup
@states.machine = self
@callbacks = {:before => @callbacks[:before].dup, :after => @callbacks[:after].dup, :failure => @callbacks[:failure].dup}
end
# Sets the class which is the owner of this state machine. Any methods
# generated by states, events, or other parts of the machine will be defined
# on the given owner class.
def owner_class=(klass)
@owner_class = klass
# Create modules for extending the class with state/event-specific methods
@helper_modules = helper_modules = {:instance => HelperModule.new(self, :instance), :class => HelperModule.new(self, :class)}
owner_class.class_eval do
extend helper_modules[:class]
include helper_modules[:instance]
end
# Add class-/instance-level methods to the owner class for state initialization
unless owner_class < StateMachines::InstanceMethods
owner_class.class_eval do
extend StateMachines::ClassMethods
include StateMachines::InstanceMethods
end
define_state_initializer if @initialize_state
end
# Record this machine as matched to the name in the current owner class.
# This will override any machines mapped to the same name in any superclasses.
owner_class.state_machines[name] = self
end
# Sets the initial state of the machine. This can be either the static name
# of a state or a lambda block which determines the initial state at
# creation time.
def initial_state=(new_initial_state)
@initial_state = new_initial_state
add_states([@initial_state]) unless dynamic_initial_state?
# Update all states to reflect the new initial state
states.each { |state| state.initial = (state.name == @initial_state) }
# Output a warning if there are conflicting initial states for the machine's
# attribute
initial_state = states.detect { |state| state.initial }
if !owner_class_attribute_default.nil? && (dynamic_initial_state? || !owner_class_attribute_default_matches?(initial_state))
warn(
"Both #{owner_class.name} and its #{name.inspect} machine have defined "\
"a different default for \"#{attribute}\". Use only one or the other for "\
"defining defaults to avoid unexpected behaviors."
)
end
end
# Gets the initial state of the machine for the given object. If a dynamic
# initial state was configured for this machine, then the object will be
# passed into the lambda block to help determine the actual state.
#
# == Examples
#
# With a static initial state:
#
# class Vehicle
# state_machine :initial => :parked do
# ...
# end
# end
#
# vehicle = Vehicle.new
# Vehicle.state_machine.initial_state(vehicle) # => #<StateMachines::State name=:parked value="parked" initial=true>
#
# With a dynamic initial state:
#
# class Vehicle
# attr_accessor :force_idle
#
# state_machine :initial => lambda {|vehicle| vehicle.force_idle ? :idling : :parked} do
# ...
# end
# end
#
# vehicle = Vehicle.new
#
# vehicle.force_idle = true
# Vehicle.state_machine.initial_state(vehicle) # => #<StateMachines::State name=:idling value="idling" initial=false>
#
# vehicle.force_idle = false
# Vehicle.state_machine.initial_state(vehicle) # => #<StateMachines::State name=:parked value="parked" initial=false>
def initial_state(object)
states.fetch(dynamic_initial_state? ? evaluate_method(object, @initial_state) : @initial_state) if instance_variable_defined?('@initial_state')
end
# Whether a dynamic initial state is being used in the machine
def dynamic_initial_state?
instance_variable_defined?('@initial_state') && @initial_state.is_a?(Proc)
end
# Initializes the state on the given object. Initial values are only set if
# the machine's attribute hasn't been previously initialized.
#
# Configuration options:
# * <tt>:force</tt> - Whether to initialize the state regardless of its
# current value
# * <tt>:to</tt> - A hash to set the initial value in instead of writing
# directly to the object
def initialize_state(object, options = {})
state = initial_state(object)
if state && (options[:force] || initialize_state?(object))
value = state.value
if hash = options[:to]
hash[attribute.to_s] = value
else
write(object, :state, value)
end
end
end
# Gets the actual name of the attribute on the machine's owner class that
# stores data with the given name.
def attribute(name = :state)
name == :state ? @attribute : :"#{self.name}_#{name}"
end
# Defines a new helper method in an instance or class scope with the given
# name. If the method is already defined in the scope, then this will not
# override it.
#
# If passing in a block, there are two side effects to be aware of
# 1. The method cannot be chained, meaning that the block cannot call +super+
# 2. If the method is already defined in an ancestor, then it will not get
# overridden and a warning will be output.
#
# Example:
#
# # Instance helper
# machine.define_helper(:instance, :state_name) do |machine, object|
# machine.states.match(object).name
# end
#
# # Class helper
# machine.define_helper(:class, :state_machine_name) do |machine, klass|
# "State"
# end
#
# You can also define helpers using string evaluation like so:
#
# # Instance helper
# machine.define_helper :instance, <<-end_eval, __FILE__, __LINE__ + 1
# def state_name
# self.class.state_machine(:state).states.match(self).name
# end
# end_eval
#
# # Class helper
# machine.define_helper :class, <<-end_eval, __FILE__, __LINE__ + 1
# def state_machine_name
# "State"
# end
# end_eval
def define_helper(scope, method, *args, &block)
helper_module = @helper_modules.fetch(scope)
if block_given?
if !self.class.ignore_method_conflicts && conflicting_ancestor = owner_class_ancestor_has_method?(scope, method)
ancestor_name = conflicting_ancestor.name && !conflicting_ancestor.name.empty? ? conflicting_ancestor.name : conflicting_ancestor.to_s
warn "#{scope == :class ? 'Class' : 'Instance'} method \"#{method}\" is already defined in #{ancestor_name}, use generic helper instead or set StateMachines::Machine.ignore_method_conflicts = true."
else
name = self.name
helper_module.class_eval do
define_method(method) do |*block_args|
block.call((scope == :instance ? self.class : self).state_machine(name), self, *block_args)
end
end
end
else
helper_module.class_eval(method, *args)
end
end
# Customizes the definition of one or more states in the machine.
#
# Configuration options:
# * <tt>:value</tt> - The actual value to store when an object transitions
# to the state. Default is the name (stringified).
# * <tt>:cache</tt> - If a dynamic value (via a lambda block) is being used,
# then setting this to true will cache the evaluated result
# * <tt>:if</tt> - Determines whether an object's value matches the state
# (e.g. :value => lambda {Time.now}, :if => lambda {|state| !state.nil?}).
# By default, the configured value is matched.
# * <tt>:human_name</tt> - The human-readable version of this state's name.
# By default, this is either defined by the integration or stringifies the
# name and converts underscores to spaces.
#
# == Customizing the stored value
#
# Whenever a state is automatically discovered in the state machine, its
# default value is assumed to be the stringified version of the name. For
# example,
#
# class Vehicle
# state_machine :initial => :parked do
# event :ignite do
# transition :parked => :idling
# end
# end
# end
#
# In the above state machine, there are two states automatically discovered:
# :parked and :idling. These states, by default, will store their stringified
# equivalents when an object moves into that state (e.g. "parked" / "idling").
#
# For legacy systems or when tying state machines into existing frameworks,
# it's oftentimes necessary to need to store a different value for a state
# than the default. In order to continue taking advantage of an expressive
# state machine and helper methods, every defined state can be re-configured
# with a custom stored value. For example,
#
# class Vehicle
# state_machine :initial => :parked do
# event :ignite do
# transition :parked => :idling
# end
#
# state :idling, :value => 'IDLING'
# state :parked, :value => 'PARKED
# end
# end
#
# This is also useful if being used in association with a database and,
# instead of storing the state name in a column, you want to store the
# state's foreign key:
#
# class VehicleState < ActiveRecord::Base
# end
#
# class Vehicle < ActiveRecord::Base
# state_machine :attribute => :state_id, :initial => :parked do
# event :ignite do
# transition :parked => :idling
# end
#
# states.each do |state|
# self.state(state.name, :value => lambda { VehicleState.find_by_name(state.name.to_s).id }, :cache => true)
# end
# end
# end
#
# In the above example, each known state is configured to store it's
# associated database id in the +state_id+ attribute. Also, notice that a
# lambda block is used to define the state's value. This is required in
# situations (like testing) where the model is loaded without any existing
# data (i.e. no VehicleState records available).
#
# One caveat to the above example is to keep performance in mind. To avoid
# constant db hits for looking up the VehicleState ids, the value is cached
# by specifying the <tt>:cache</tt> option. Alternatively, a custom
# caching strategy can be used like so:
#
# class VehicleState < ActiveRecord::Base
# cattr_accessor :cache_store
# self.cache_store = ActiveSupport::Cache::MemoryStore.new
#
# def self.find_by_name(name)
# cache_store.fetch(name) { find(:first, :conditions => {:name => name}) }
# end
# end
#
# === Dynamic values
#
# In addition to customizing states with other value types, lambda blocks
# can also be specified to allow for a state's value to be determined
# dynamically at runtime. For example,
#
# class Vehicle
# state_machine :purchased_at, :initial => :available do
# event :purchase do
# transition all => :purchased
# end
#
# event :restock do
# transition all => :available
# end
#
# state :available, :value => nil
# state :purchased, :if => lambda {|value| !value.nil?}, :value => lambda {Time.now}
# end
# end
#
# In the above definition, the <tt>:purchased</tt> state is customized with
# both a dynamic value *and* a value matcher.
#
# When an object transitions to the purchased state, the value's lambda
# block will be called. This will get the current time and store it in the
# object's +purchased_at+ attribute.
#
# *Note* that the custom matcher is very important here. Since there's no
# way for the state machine to figure out an object's state when it's set to
# a runtime value, it must be explicitly defined. If the <tt>:if</tt> option
# were not configured for the state, then an ArgumentError exception would
# be raised at runtime, indicating that the state machine could not figure
# out what the current state of the object was.
#
# == Behaviors
#
# Behaviors define a series of methods to mixin with objects when the current
# state matches the given one(s). This allows instance methods to behave
# a specific way depending on what the value of the object's state is.
#
# For example,
#
# class Vehicle
# attr_accessor :driver
# attr_accessor :passenger
#
# state_machine :initial => :parked do
# event :ignite do
# transition :parked => :idling
# end
#
# state :parked do
# def speed
# 0
# end
#
# def rotate_driver
# driver = self.driver
# self.driver = passenger
# self.passenger = driver
# true
# end
# end
#
# state :idling, :first_gear do
# def speed
# 20
# end
#
# def rotate_driver
# self.state = 'parked'
# rotate_driver
# end
# end
#
# other_states :backing_up
# end
# end
#
# In the above example, there are two dynamic behaviors defined for the
# class:
# * +speed+
# * +rotate_driver+
#
# Each of these behaviors are instance methods on the Vehicle class. However,
# which method actually gets invoked is based on the current state of the
# object. Using the above class as the example:
#
# vehicle = Vehicle.new
# vehicle.driver = 'John'
# vehicle.passenger = 'Jane'
#
# # Behaviors in the "parked" state
# vehicle.state # => "parked"
# vehicle.speed # => 0
# vehicle.rotate_driver # => true
# vehicle.driver # => "Jane"
# vehicle.passenger # => "John"
#
# vehicle.ignite # => true
#
# # Behaviors in the "idling" state
# vehicle.state # => "idling"
# vehicle.speed # => 20
# vehicle.rotate_driver # => true
# vehicle.driver # => "John"
# vehicle.passenger # => "Jane"
#
# As can be seen, both the +speed+ and +rotate_driver+ instance method
# implementations changed how they behave based on what the current state
# of the vehicle was.
#
# === Invalid behaviors
#
# If a specific behavior has not been defined for a state, then a
# NoMethodError exception will be raised, indicating that that method would
# not normally exist for an object with that state.
#
# Using the example from before:
#
# vehicle = Vehicle.new
# vehicle.state = 'backing_up'
# vehicle.speed # => NoMethodError: undefined method 'speed' for #<Vehicle:0xb7d296ac> in state "backing_up"
#
# === Using matchers
#
# The +all+ / +any+ matchers can be used to easily define behaviors for a
# group of states. Note, however, that you cannot use these matchers to
# set configurations for states. Behaviors using these matchers can be
# defined at any point in the state machine and will always get applied to
# the proper states.
#
# For example:
#
# state_machine :initial => :parked do
# ...
#
# state all - [:parked, :idling, :stalled] do
# validates_presence_of :speed
#
# def speed
# gear * 10
# end
# end
# end
#
# == State-aware class methods
#
# In addition to defining scopes for instance methods that are state-aware,
# the same can be done for certain types of class methods.
#
# Some libraries have support for class-level methods that only run certain
# behaviors based on a conditions hash passed in. For example:
#
# class Vehicle < ActiveRecord::Base
# state_machine do
# ...
# state :first_gear, :second_gear, :third_gear do
# validates_presence_of :speed
# validates_inclusion_of :speed, :in => 0..25, :if => :in_school_zone?
# end
# end
# end
#
# In the above ActiveRecord model, two validations have been defined which
# will *only* run when the Vehicle object is in one of the three states:
# +first_gear+, +second_gear+, or +third_gear. Notice, also, that if/unless
# conditions can continue to be used.
#
# This functionality is not library-specific and can work for any class-level
# method that is defined like so:
#
# def validates_presence_of(attribute, options = {})
# ...
# end
#
# The minimum requirement is that the last argument in the method be an
# options hash which contains at least <tt>:if</tt> condition support.
def state(*names, &block)
options = names.last.is_a?(Hash) ? names.pop : {}
options.assert_valid_keys(:value, :cache, :if, :human_name)
# Store the context so that it can be used for / matched against any state
# that gets added
@states.context(names, &block) if block_given?
if names.first.is_a?(Matcher)
# Add any states referenced in the matcher. When matchers are used,
# states are not allowed to be configured.
raise ArgumentError, "Cannot configure states when using matchers (using #{options.inspect})" if options.any?
states = add_states(names.first.values)
else
states = add_states(names)
# Update the configuration for the state(s)
states.each do |state|
if options.include?(:value)
state.value = options[:value]
self.states.update(state)
end
state.human_name = options[:human_name] if options.include?(:human_name)
state.cache = options[:cache] if options.include?(:cache)
state.matcher = options[:if] if options.include?(:if)
end
end
states.length == 1 ? states.first : states
end
alias_method :other_states, :state
# Gets the current value stored in the given object's attribute.
#
# For example,
#
# class Vehicle
# state_machine :initial => :parked do
# ...
# end
# end
#
# vehicle = Vehicle.new # => #<Vehicle:0xb7d94ab0 @state="parked">
# Vehicle.state_machine.read(vehicle, :state) # => "parked" # Equivalent to vehicle.state
# Vehicle.state_machine.read(vehicle, :event) # => nil # Equivalent to vehicle.state_event
def read(object, attribute, ivar = false)
attribute = self.attribute(attribute)
if ivar
object.instance_variable_defined?("@#{attribute}") ? object.instance_variable_get("@#{attribute}") : nil
else
object.send(attribute)
end
end
# Sets a new value in the given object's attribute.
#
# For example,
#
# class Vehicle
# state_machine :initial => :parked do
# ...
# end
# end
#
# vehicle = Vehicle.new # => #<Vehicle:0xb7d94ab0 @state="parked">
# Vehicle.state_machine.write(vehicle, :state, 'idling') # => Equivalent to vehicle.state = 'idling'
# Vehicle.state_machine.write(vehicle, :event, 'park') # => Equivalent to vehicle.state_event = 'park'
# vehicle.state # => "idling"
# vehicle.event # => "park"
def write(object, attribute, value, ivar = false)
attribute = self.attribute(attribute)
ivar ? object.instance_variable_set("@#{attribute}", value) : object.send("#{attribute}=", value)
end
# Defines one or more events for the machine and the transitions that can
# be performed when those events are run.
#
# This method is also aliased as +on+ for improved compatibility with
# using a domain-specific language.
#
# Configuration options:
# * <tt>:human_name</tt> - The human-readable version of this event's name.
# By default, this is either defined by the integration or stringifies the
# name and converts underscores to spaces.
#
# == Instance methods
#
# The following instance methods are generated when a new event is defined
# (the "park" event is used as an example):
# * <tt>park(..., run_action = true)</tt> - Fires the "park" event,
# transitioning from the current state to the next valid state. If the
# last argument is a boolean, it will control whether the machine's action
# gets run.
# * <tt>park!(..., run_action = true)</tt> - Fires the "park" event,
# transitioning from the current state to the next valid state. If the
# transition fails, then a StateMachines::InvalidTransition error will be
# raised. If the last argument is a boolean, it will control whether the
# machine's action gets run.
# * <tt>can_park?(requirements = {})</tt> - Checks whether the "park" event
# can be fired given the current state of the object. This will *not* run
# validations or callbacks in ORM integrations. It will only determine if
# the state machine defines a valid transition for the event. To check
# whether an event can fire *and* passes validations, use event attributes
# (e.g. state_event) as described in the "Events" documentation of each
# ORM integration.
# * <tt>park_transition(requirements = {})</tt> - Gets the next transition
# that would be performed if the "park" event were to be fired now on the
# object or nil if no transitions can be performed. Like <tt>can_park?</tt>
# this will also *not* run validations or callbacks. It will only
# determine if the state machine defines a valid transition for the event.
#
# With a namespace of "car", the above names map to the following methods:
# * <tt>can_park_car?</tt>
# * <tt>park_car_transition</tt>
# * <tt>park_car</tt>
# * <tt>park_car!</tt>
#
# The <tt>can_park?</tt> and <tt>park_transition</tt> helpers both take an
# optional set of requirements for determining what transitions are available
# for the current object. These requirements include:
# * <tt>:from</tt> - One or more states to transition from. If none are
# specified, then this will be the object's current state.
# * <tt>:to</tt> - One or more states to transition to. If none are
# specified, then this will match any to state.
# * <tt>:guard</tt> - Whether to guard transitions with the if/unless
# conditionals defined for each one. Default is true.
#
# == Defining transitions
#
# +event+ requires a block which allows you to define the possible
# transitions that can happen as a result of that event. For example,
#
# event :park, :stop do
# transition :idling => :parked
# end
#
# event :first_gear do
# transition :parked => :first_gear, :if => :seatbelt_on?
# transition :parked => same # Allow to loopback if seatbelt is off
# end
#
# See StateMachines::Event#transition for more information on
# the possible options that can be passed in.
#
# *Note* that this block is executed within the context of the actual event
# object. As a result, you will not be able to reference any class methods
# on the model without referencing the class itself. For example,
#
# class Vehicle
# def self.safe_states
# [:parked, :idling, :stalled]
# end
#
# state_machine do
# event :park do
# transition Vehicle.safe_states => :parked
# end
# end
# end
#
# == Overriding the event method
#
# By default, this will define an instance method (with the same name as the
# event) that will fire the next possible transition for that. Although the
# +before_transition+, +after_transition+, and +around_transition+ hooks
# allow you to define behavior that gets executed as a result of the event's
# transition, you can also override the event method in order to have a
# little more fine-grained control.
#
# For example:
#
# class Vehicle
# state_machine do
# event :park do
# ...
# end
# end
#
# def park(*)
# take_deep_breath # Executes before the transition (and before_transition hooks) even if no transition is possible
# if result = super # Runs the transition and all before/after/around hooks
# applaud # Executes after the transition (and after_transition hooks)
# end
# result
# end
# end
#
# There are a few important things to note here. First, the method
# signature is defined with an unlimited argument list in order to allow
# callers to continue passing arguments that are expected by state_machine.
# For example, it will still allow calls to +park+ with a single parameter
# for skipping the configured action.
#
# Second, the overridden event method must call +super+ in order to run the
# logic for running the next possible transition. In order to remain
# consistent with other events, the result of +super+ is returned.
#
# Third, any behavior defined in this method will *not* get executed if
# you're taking advantage of attribute-based event transitions. For example:
#
# vehicle = Vehicle.new
# vehicle.state_event = 'park'
# vehicle.save
#
# In this case, the +park+ event will run the before/after/around transition
# hooks and transition the state, but the behavior defined in the overriden
# +park+ method will *not* be executed.
#
# == Defining additional arguments
#
# Additional arguments can be passed into events and accessed by transition
# hooks like so:
#
# class Vehicle
# state_machine do
# after_transition :on => :park do |vehicle, transition|
# kind = *transition.args # :parallel
# ...
# end
# after_transition :on => :park, :do => :take_deep_breath
#
# event :park do
# ...
# end
#
# def take_deep_breath(transition)
# kind = *transition.args # :parallel
# ...
# end
# end
# end
#
# vehicle = Vehicle.new
# vehicle.park(:parallel)
#
# *Remember* that if the last argument is a boolean, it will be used as the
# +run_action+ parameter to the event action. Using the +park+ action
# example from above, you can might call it like so:
#
# vehicle.park # => Uses default args and runs machine action
# vehicle.park(:parallel) # => Specifies the +kind+ argument and runs the machine action
# vehicle.park(:parallel, false) # => Specifies the +kind+ argument and *skips* the machine action
#
# If you decide to override the +park+ event method *and* define additional
# arguments, you can do so as shown below:
#
# class Vehicle
# state_machine do
# event :park do
# ...
# end
# end
#
# def park(kind = :parallel, *args)
# take_deep_breath if kind == :parallel
# super
# end
# end
#
# Note that +super+ is called instead of <tt>super(*args)</tt>. This allow
# the entire arguments list to be accessed by transition callbacks through
# StateMachines::Transition#args.
#
# === Using matchers
#
# The +all+ / +any+ matchers can be used to easily execute blocks for a
# group of events. Note, however, that you cannot use these matchers to
# set configurations for events. Blocks using these matchers can be
# defined at any point in the state machine and will always get applied to
# the proper events.
#
# For example:
#
# state_machine :initial => :parked do
# ...
#
# event all - [:crash] do
# transition :stalled => :parked
# end
# end
#
# == Example
#
# class Vehicle
# state_machine do
# # The park, stop, and halt events will all share the given transitions
# event :park, :stop, :halt do
# transition [:idling, :backing_up] => :parked
# end
#
# event :stop do
# transition :first_gear => :idling
# end
#
# event :ignite do
# transition :parked => :idling
# transition :idling => same # Allow ignite while still idling
# end
# end
# end
def event(*names, &block)
options = names.last.is_a?(Hash) ? names.pop : {}
options.assert_valid_keys(:human_name)
# Store the context so that it can be used for / matched against any event
# that gets added
@events.context(names, &block) if block_given?
if names.first.is_a?(Matcher)
# Add any events referenced in the matcher. When matchers are used,
# events are not allowed to be configured.
raise ArgumentError, "Cannot configure events when using matchers (using #{options.inspect})" if options.any?
events = add_events(names.first.values)
else
events = add_events(names)
# Update the configuration for the event(s)
events.each do |event|
event.human_name = options[:human_name] if options.include?(:human_name)
# Add any states that may have been referenced within the event
add_states(event.known_states)
end
end
events.length == 1 ? events.first : events
end
alias_method :on, :event
# Creates a new transition that determines what to change the current state
# to when an event fires.
#
# == Defining transitions
#
# The options for a new transition uses the Hash syntax to map beginning
# states to ending states. For example,
#
# transition :parked => :idling, :idling => :first_gear, :on => :ignite
#
# In this case, when the +ignite+ event is fired, this transition will cause
# the state to be +idling+ if it's current state is +parked+ or +first_gear+
# if it's current state is +idling+.
#
# To help define these implicit transitions, a set of helpers are available
# for slightly more complex matching:
# * <tt>all</tt> - Matches every state in the machine
# * <tt>all - [:parked, :idling, ...]</tt> - Matches every state except those specified
# * <tt>any</tt> - An alias for +all+ (matches every state in the machine)
# * <tt>same</tt> - Matches the same state being transitioned from
#
# See StateMachines::MatcherHelpers for more information.
#
# Examples:
#
# transition all => nil, :on => :ignite # Transitions to nil regardless of the current state
# transition all => :idling, :on => :ignite # Transitions to :idling regardless of the current state
# transition all - [:idling, :first_gear] => :idling, :on => :ignite # Transitions every state but :idling and :first_gear to :idling
# transition nil => :idling, :on => :ignite # Transitions to :idling from the nil state
# transition :parked => :idling, :on => :ignite # Transitions to :idling if :parked
# transition [:parked, :stalled] => :idling, :on => :ignite # Transitions to :idling if :parked or :stalled
#
# transition :parked => same, :on => :park # Loops :parked back to :parked
# transition [:parked, :stalled] => same, :on => [:park, :stall] # Loops either :parked or :stalled back to the same state on the park and stall events
# transition all - :parked => same, :on => :noop # Loops every state but :parked back to the same state
#
# # Transitions to :idling if :parked, :first_gear if :idling, or :second_gear if :first_gear
# transition :parked => :idling, :idling => :first_gear, :first_gear => :second_gear, :on => :shift_up
#
# == Verbose transitions
#
# Transitions can also be defined use an explicit set of configuration
# options:
# * <tt>:from</tt> - A state or array of states that can be transitioned from.
# If not specified, then the transition can occur for *any* state.
# * <tt>:to</tt> - The state that's being transitioned to. If not specified,
# then the transition will simply loop back (i.e. the state will not change).
# * <tt>:except_from</tt> - A state or array of states that *cannot* be
# transitioned from.
#
# These options must be used when defining transitions within the context
# of a state.
#
# Examples:
#
# transition :to => nil, :on => :park
# transition :to => :idling, :on => :ignite
# transition :except_from => [:idling, :first_gear], :to => :idling, :on => :ignite
# transition :from => nil, :to => :idling, :on => :ignite
# transition :from => [:parked, :stalled], :to => :idling, :on => :ignite
#
# == Conditions
#
# In addition to the state requirements for each transition, a condition
# can also be defined to help determine whether that transition is
# available. These options will work on both the normal and verbose syntax.
#
# Configuration options:
# * <tt>:if</tt> - A method, proc or string to call to determine if the
# transition should occur (e.g. :if => :moving?, or :if => lambda {|vehicle| vehicle.speed > 60}).
# The condition should return or evaluate to true or false.
# * <tt>:unless</tt> - A method, proc or string to call to determine if the
# transition should not occur (e.g. :unless => :stopped?, or :unless => lambda {|vehicle| vehicle.speed <= 60}).
# The condition should return or evaluate to true or false.
#
# Examples:
#
# transition :parked => :idling, :on => :ignite, :if => :moving?
# transition :parked => :idling, :on => :ignite, :unless => :stopped?
# transition :idling => :first_gear, :first_gear => :second_gear, :on => :shift_up, :if => :seatbelt_on?
#
# transition :from => :parked, :to => :idling, :on => ignite, :if => :moving?
# transition :from => :parked, :to => :idling, :on => ignite, :unless => :stopped?
#
# == Order of operations
#
# Transitions are evaluated in the order in which they're defined. As a
# result, if more than one transition applies to a given object, then the
# first transition that matches will be performed.
def transition(options)
raise ArgumentError, 'Must specify :on event' unless options[:on]
branches = []
options = options.dup
event(*Array(options.delete(:on))) { branches << transition(options) }
branches.length == 1 ? branches.first : branches
end
# Creates a callback that will be invoked *before* a transition is
# performed so long as the given requirements match the transition.
#
# == The callback
#
# Callbacks must be defined as either an argument, in the :do option, or
# as a block. For example,
#
# class Vehicle
# state_machine do
# before_transition :set_alarm
# before_transition :set_alarm, all => :parked
# before_transition all => :parked, :do => :set_alarm
# before_transition all => :parked do |vehicle, transition|
# vehicle.set_alarm
# end
# ...
# end
# end
#
# Notice that the first three callbacks are the same in terms of how the
# methods to invoke are defined. However, using the <tt>:do</tt> can
# provide for a more fluid DSL.
#
# In addition, multiple callbacks can be defined like so:
#
# class Vehicle
# state_machine do
# before_transition :set_alarm, :lock_doors, all => :parked
# before_transition all => :parked, :do => [:set_alarm, :lock_doors]
# before_transition :set_alarm do |vehicle, transition|
# vehicle.lock_doors
# end
# end
# end
#
# Notice that the different ways of configuring methods can be mixed.
#
# == State requirements
#
# Callbacks can require that the machine be transitioning from and to
# specific states. These requirements use a Hash syntax to map beginning
# states to ending states. For example,
#
# before_transition :parked => :idling, :idling => :first_gear, :do => :set_alarm
#
# In this case, the +set_alarm+ callback will only be called if the machine
# is transitioning from +parked+ to +idling+ or from +idling+ to +parked+.
#
# To help define state requirements, a set of helpers are available for
# slightly more complex matching:
# * <tt>all</tt> - Matches every state/event in the machine
# * <tt>all - [:parked, :idling, ...]</tt> - Matches every state/event except those specified
# * <tt>any</tt> - An alias for +all+ (matches every state/event in the machine)
# * <tt>same</tt> - Matches the same state being transitioned from
#
# See StateMachines::MatcherHelpers for more information.
#
# Examples:
#
# before_transition :parked => [:idling, :first_gear], :do => ... # Matches from parked to idling or first_gear
# before_transition all - [:parked, :idling] => :idling, :do => ... # Matches from every state except parked and idling to idling
# before_transition all => :parked, :do => ... # Matches all states to parked
# before_transition any => same, :do => ... # Matches every loopback
#
# == Event requirements
#
# In addition to state requirements, an event requirement can be defined so
# that the callback is only invoked on specific events using the +on+
# option. This can also use the same matcher helpers as the state
# requirements.
#
# Examples:
#
# before_transition :on => :ignite, :do => ... # Matches only on ignite
# before_transition :on => all - :ignite, :do => ... # Matches on every event except ignite
# before_transition :parked => :idling, :on => :ignite, :do => ... # Matches from parked to idling on ignite
#
# == Verbose Requirements
#
# Requirements can also be defined using verbose options rather than the
# implicit Hash syntax and helper methods described above.
#
# Configuration options:
# * <tt>:from</tt> - One or more states being transitioned from. If none
# are specified, then all states will match.
# * <tt>:to</tt> - One or more states being transitioned to. If none are
# specified, then all states will match.
# * <tt>:on</tt> - One or more events that fired the transition. If none
# are specified, then all events will match.
# * <tt>:except_from</tt> - One or more states *not* being transitioned from
# * <tt>:except_to</tt> - One more states *not* being transitioned to
# * <tt>:except_on</tt> - One or more events that *did not* fire the transition
#
# Examples:
#
# before_transition :from => :ignite, :to => :idling, :on => :park, :do => ...
# before_transition :except_from => :ignite, :except_to => :idling, :except_on => :park, :do => ...
#
# == Conditions
#
# In addition to the state/event requirements, a condition can also be
# defined to help determine whether the callback should be invoked.
#
# Configuration options:
# * <tt>:if</tt> - A method, proc or string to call to determine if the
# callback should occur (e.g. :if => :allow_callbacks, or
# :if => lambda {|user| user.signup_step > 2}). The method, proc or string
# should return or evaluate to a true or false value.
# * <tt>:unless</tt> - A method, proc or string to call to determine if the
# callback should not occur (e.g. :unless => :skip_callbacks, or
# :unless => lambda {|user| user.signup_step <= 2}). The method, proc or
# string should return or evaluate to a true or false value.
#
# Examples:
#
# before_transition :parked => :idling, :if => :moving?, :do => ...
# before_transition :on => :ignite, :unless => :seatbelt_on?, :do => ...
#
# == Accessing the transition
#
# In addition to passing the object being transitioned, the actual
# transition describing the context (e.g. event, from, to) can be accessed
# as well. This additional argument is only passed if the callback allows
# for it.
#
# For example,
#
# class Vehicle
# # Only specifies one parameter (the object being transitioned)
# before_transition all => :parked do |vehicle|
# vehicle.set_alarm
# end
#
# # Specifies 2 parameters (object being transitioned and actual transition)
# before_transition all => :parked do |vehicle, transition|
# vehicle.set_alarm(transition)
# end
# end
#
# *Note* that the object in the callback will only be passed in as an
# argument if callbacks are configured to *not* be bound to the object
# involved. This is the default and may change on a per-integration basis.
#
# See StateMachines::Transition for more information about the
# attributes available on the transition.
#
# == Usage with delegates
#
# As noted above, state_machine uses the callback method's argument list
# arity to determine whether to include the transition in the method call.
# If you're using delegates, such as those defined in ActiveSupport or
# Forwardable, the actual arity of the delegated method gets masked. This
# means that callbacks which reference delegates will always get passed the
# transition as an argument. For example:
#
# class Vehicle
# extend Forwardable
# delegate :refresh => :dashboard
#
# state_machine do
# before_transition :refresh
# ...
# end
#
# def dashboard
# @dashboard ||= Dashboard.new
# end
# end
#
# class Dashboard
# def refresh(transition)
# # ...
# end
# end
#
# In the above example, <tt>Dashboard#refresh</tt> *must* defined a
# +transition+ argument. Otherwise, an +ArgumentError+ exception will get
# raised. The only way around this is to avoid the use of delegates and
# manually define the delegate method so that the correct arity is used.
#
# == Examples
#
# Below is an example of a class with one state machine and various types
# of +before+ transitions defined for it:
#
# class Vehicle
# state_machine do
# # Before all transitions
# before_transition :update_dashboard
#
# # Before specific transition:
# before_transition [:first_gear, :idling] => :parked, :on => :park, :do => :take_off_seatbelt
#
# # With conditional callback:
# before_transition all => :parked, :do => :take_off_seatbelt, :if => :seatbelt_on?
#
# # Using helpers:
# before_transition all - :stalled => same, :on => any - :crash, :do => :update_dashboard
# ...
# end
# end
#
# As can be seen, any number of transitions can be created using various
# combinations of configuration options.
def before_transition(*args, &block)
options = (args.last.is_a?(Hash) ? args.pop : {})
options[:do] = args if args.any?
add_callback(:before, options, &block)
end
# Creates a callback that will be invoked *after* a transition is
# performed so long as the given requirements match the transition.
#
# See +before_transition+ for a description of the possible configurations
# for defining callbacks.
def after_transition(*args, &block)
options = (args.last.is_a?(Hash) ? args.pop : {})
options[:do] = args if args.any?
add_callback(:after, options, &block)
end
# Creates a callback that will be invoked *around* a transition so long as
# the given requirements match the transition.
#
# == The callback
#
# Around callbacks wrap transitions, executing code both before and after.
# These callbacks are defined in the exact same manner as before / after
# callbacks with the exception that the transition must be yielded to in
# order to finish running it.
#
# If defining +around+ callbacks using blocks, you must yield within the
# transition by directly calling the block (since yielding is not allowed
# within blocks).
#
# For example,
#
# class Vehicle
# state_machine do
# around_transition do |block|
# Benchmark.measure { block.call }
# end
#
# around_transition do |vehicle, block|
# logger.info "vehicle was #{state}..."
# block.call
# logger.info "...and is now #{state}"
# end
#
# around_transition do |vehicle, transition, block|
# logger.info "before #{transition.event}: #{vehicle.state}"
# block.call
# logger.info "after #{transition.event}: #{vehicle.state}"
# end
# end
# end
#
# Notice that referencing the block is similar to doing so within an
# actual method definition in that it is always the last argument.
#
# On the other hand, if you're defining +around+ callbacks using method
# references, you can yield like normal:
#
# class Vehicle
# state_machine do
# around_transition :benchmark
# ...
# end
#
# def benchmark
# Benchmark.measure { yield }
# end
# end
#
# See +before_transition+ for a description of the possible configurations
# for defining callbacks.
def around_transition(*args, &block)
options = (args.last.is_a?(Hash) ? args.pop : {})
options[:do] = args if args.any?
add_callback(:around, options, &block)
end
# Creates a callback that will be invoked *after* a transition failures to
# be performed so long as the given requirements match the transition.
#
# See +before_transition+ for a description of the possible configurations
# for defining callbacks. *Note* however that you cannot define the state
# requirements in these callbacks. You may only define event requirements.
#
# = The callback
#
# Failure callbacks get invoked whenever an event fails to execute. This
# can happen when no transition is available, a +before+ callback halts
# execution, or the action associated with this machine fails to succeed.
# In any of these cases, any failure callback that matches the attempted
# transition will be run.
#
# For example,
#
# class Vehicle
# state_machine do
# after_failure do |vehicle, transition|
# logger.error "vehicle #{vehicle} failed to transition on #{transition.event}"
# end
#
# after_failure :on => :ignite, :do => :log_ignition_failure
#
# ...
# end
# end
def after_failure(*args, &block)
options = (args.last.is_a?(Hash) ? args.pop : {})
options[:do] = args if args.any?
options.assert_valid_keys(:on, :do, :if, :unless)
add_callback(:failure, options, &block)
end
# Generates a list of the possible transition sequences that can be run on
# the given object. These paths can reveal all of the possible states and
# events that can be encountered in the object's state machine based on the
# object's current state.
#
# Configuration options:
# * +from+ - The initial state to start all paths from. By default, this
# is the object's current state.
# * +to+ - The target state to end all paths on. By default, paths will
# end when they loop back to the first transition on the path.
# * +deep+ - Whether to allow the target state to be crossed more than once
# in a path. By default, paths will immediately stop when the target
# state (if specified) is reached. If this is enabled, then paths can
# continue even after reaching the target state; they will stop when
# reaching the target state a second time.
#
# *Note* that the object is never modified when the list of paths is
# generated.
#
# == Examples
#
# class Vehicle
# state_machine :initial => :parked do
# event :ignite do
# transition :parked => :idling
# end
#
# event :shift_up do
# transition :idling => :first_gear, :first_gear => :second_gear
# end
#
# event :shift_down do
# transition :second_gear => :first_gear, :first_gear => :idling
# end
# end
# end
#
# vehicle = Vehicle.new # => #<Vehicle:0xb7c27024 @state="parked">
# vehicle.state # => "parked"
#
# vehicle.state_paths
# # => [
# # [#<StateMachines::Transition attribute=:state event=:ignite from="parked" from_name=:parked to="idling" to_name=:idling>,
# # #<StateMachines::Transition attribute=:state event=:shift_up from="idling" from_name=:idling to="first_gear" to_name=:first_gear>,
# # #<StateMachines::Transition attribute=:state event=:shift_up from="first_gear" from_name=:first_gear to="second_gear" to_name=:second_gear>,
# # #<StateMachines::Transition attribute=:state event=:shift_down from="second_gear" from_name=:second_gear to="first_gear" to_name=:first_gear>,
# # #<StateMachines::Transition attribute=:state event=:shift_down from="first_gear" from_name=:first_gear to="idling" to_name=:idling>],
# #
# # [#<StateMachines::Transition attribute=:state event=:ignite from="parked" from_name=:parked to="idling" to_name=:idling>,
# # #<StateMachines::Transition attribute=:state event=:shift_up from="idling" from_name=:idling to="first_gear" to_name=:first_gear>,
# # #<StateMachines::Transition attribute=:state event=:shift_down from="first_gear" from_name=:first_gear to="idling" to_name=:idling>]
# # ]
#
# vehicle.state_paths(:from => :parked, :to => :second_gear)
# # => [
# # [#<StateMachines::Transition attribute=:state event=:ignite from="parked" from_name=:parked to="idling" to_name=:idling>,
# # #<StateMachines::Transition attribute=:state event=:shift_up from="idling" from_name=:idling to="first_gear" to_name=:first_gear>,
# # #<StateMachines::Transition attribute=:state event=:shift_up from="first_gear" from_name=:first_gear to="second_gear" to_name=:second_gear>]
# # ]
#
# In addition to getting the possible paths that can be accessed, you can
# also get summary information about the states / events that can be
# accessed at some point along one of the paths. For example:
#
# # Get the list of states that can be accessed from the current state
# vehicle.state_paths.to_states # => [:idling, :first_gear, :second_gear]
#
# # Get the list of events that can be accessed from the current state
# vehicle.state_paths.events # => [:ignite, :shift_up, :shift_down]
def paths_for(object, requirements = {})
PathCollection.new(object, self, requirements)
end
# Marks the given object as invalid with the given message.
#
# By default, this is a no-op.
def invalidate(_object, _attribute, _message, _values = [])
end
# Gets a description of the errors for the given object. This is used to
# provide more detailed information when an InvalidTransition exception is
# raised.
def errors_for(_object)
''
end
# Resets any errors previously added when invalidating the given object.
#
# By default, this is a no-op.
def reset(_object)
end
# Generates the message to use when invalidating the given object after
# failing to transition on a specific event
def generate_message(name, values = [])
message = (@messages[name] || self.class.default_messages[name])
# Check whether there are actually any values to interpolate to avoid
# any warnings
if message.scan(/%./).any? { |match| match != '%%' }
message % values.map { |value| value.last }
else
message
end
end
# Runs a transaction, rolling back any changes if the yielded block fails.
#
# This is only applicable to integrations that involve databases. By
# default, this will not run any transactions since the changes aren't
# taking place within the context of a database.
def within_transaction(object)
if use_transactions
transaction(object) { yield }
else
yield
end
end
def draw(*)
fail NotImplementedError
end
# Determines whether an action hook was defined for firing attribute-based
# event transitions when the configured action gets called.
def action_hook?(self_only = false)
@action_hook_defined || !self_only && owner_class.state_machines.any? { |name, machine| machine.action == action && machine != self && machine.action_hook?(true) }
end
protected
# Runs additional initialization hooks. By default, this is a no-op.
def after_initialize
end
# Looks up other machines that have been defined in the owner class and
# are targeting the same attribute as this machine. When accessing
# sibling machines, they will be automatically copied for the current
# class if they haven't been already. This ensures that any configuration
# changes made to the sibling machines only affect this class and not any
# base class that may have originally defined the machine.
def sibling_machines
owner_class.state_machines.inject([]) do |machines, (name, machine)|
if machine.attribute == attribute && machine != self
machines << (owner_class.state_machine(name) {})
end
machines
end
end
# Determines if the machine's attribute needs to be initialized. This
# will only be true if the machine's attribute is blank.
def initialize_state?(object)
value = read(object, :state)
(value.nil? || value.respond_to?(:empty?) && value.empty?) && !states[value, :value]
end
# Adds helper methods for interacting with the state machine, including
# for states, events, and transitions
def define_helpers
define_state_accessor
define_state_predicate
define_event_helpers
define_path_helpers
define_action_helpers if define_action_helpers?
define_name_helpers
end
# Defines the initial values for state machine attributes. Static values
# are set prior to the original initialize method and dynamic values are
# set *after* the initialize method in case it is dependent on it.
def define_state_initializer
define_helper :instance, <<-end_eval, __FILE__, __LINE__ + 1
def initialize(*)
self.class.state_machines.initialize_states(self) { super }
end
end_eval
end
# Adds reader/writer methods for accessing the state attribute
def define_state_accessor
attribute = self.attribute
@helper_modules[:instance].class_eval { attr_reader attribute } unless owner_class_ancestor_has_method?(:instance, attribute)
@helper_modules[:instance].class_eval { attr_writer attribute } unless owner_class_ancestor_has_method?(:instance, "#{attribute}=")
end
# Adds predicate method to the owner class for determining the name of the
# current state
def define_state_predicate
call_super = !!owner_class_ancestor_has_method?(:instance, "#{name}?")
define_helper :instance, <<-end_eval, __FILE__, __LINE__ + 1
def #{name}?(*args)
args.empty? && (#{call_super} || defined?(super)) ? super : self.class.state_machine(#{name.inspect}).states.matches?(self, *args)
end
end_eval
end
# Adds helper methods for getting information about this state machine's
# events
def define_event_helpers
# Gets the events that are allowed to fire on the current object
define_helper(:instance, attribute(:events)) do |machine, object, *args|
machine.events.valid_for(object, *args).map { |event| event.name }
end
# Gets the next possible transitions that can be run on the current
# object
define_helper(:instance, attribute(:transitions)) do |machine, object, *args|
machine.events.transitions_for(object, *args)
end
# Fire an arbitrary event for this machine
define_helper(:instance, "fire_#{attribute(:event)}") do |machine, object, event, *args|
machine.events.fetch(event).fire(object, *args)
end
# Add helpers for tracking the event / transition to invoke when the
# action is called
if action
event_attribute = attribute(:event)
define_helper(:instance, event_attribute) do |machine, object|
# Interpret non-blank events as present
event = machine.read(object, :event, true)
event && !(event.respond_to?(:empty?) && event.empty?) ? event.to_sym : nil
end
# A roundabout way of writing the attribute is used here so that
# integrations can hook into this modification
define_helper(:instance, "#{event_attribute}=") do |machine, object, value|
machine.write(object, :event, value, true)
end
event_transition_attribute = attribute(:event_transition)
define_helper :instance, <<-end_eval, __FILE__, __LINE__ + 1
protected; attr_accessor #{event_transition_attribute.inspect}
end_eval
end
end
# Adds helper methods for getting information about this state machine's
# available transition paths
def define_path_helpers
# Gets the paths of transitions available to the current object
define_helper(:instance, attribute(:paths)) do |machine, object, *args|
machine.paths_for(object, *args)
end
end
# Determines whether action helpers should be defined for this machine.
# This is only true if there is an action configured and no other machines
# have process this same configuration already.
def define_action_helpers?
action && !owner_class.state_machines.any? { |name, machine| machine.action == action && machine != self }
end
# Adds helper methods for automatically firing events when an action
# is invoked
def define_action_helpers
if action_hook
@action_hook_defined = true
define_action_hook
end
end
# Hooks directly into actions by defining the same method in an included
# module. As a result, when the action gets invoked, any state events
# defined for the object will get run. Method visibility is preserved.
def define_action_hook
action_hook = self.action_hook
action = self.action
private_action_hook = owner_class.private_method_defined?(action_hook)
# Only define helper if it hasn't
define_helper :instance, <<-end_eval, __FILE__, __LINE__ + 1
def #{action_hook}(*)
self.class.state_machines.transitions(self, #{action.inspect}).perform { super }
end
private #{action_hook.inspect} if #{private_action_hook}
end_eval
end
# The method to hook into for triggering transitions when invoked. By
# default, this is the action configured for the machine.
#
# Since the default hook technique relies on module inheritance, the
# action must be defined in an ancestor of the owner classs in order for
# it to be the action hook.
def action_hook
action && owner_class_ancestor_has_method?(:instance, action) ? action : nil
end
# Determines whether there's already a helper method defined within the
# given scope. This is true only if one of the owner's ancestors defines
# the method and is further along in the ancestor chain than this
# machine's helper module.
def owner_class_ancestor_has_method?(scope, method)
return false unless owner_class_has_method?(scope, method)
superclasses = owner_class.ancestors.select { |ancestor| ancestor.is_a?(Class) }[1..-1]
if scope == :class
current = owner_class.singleton_class
superclass = superclasses.first
else
current = owner_class
superclass = owner_class.superclass
end
# Generate the list of modules that *only* occur in the owner class, but
# were included *prior* to the helper modules, in addition to the
# superclasses
ancestors = current.ancestors - superclass.ancestors + superclasses
ancestors = ancestors[ancestors.index(@helper_modules[scope])..-1].reverse
# Search for for the first ancestor that defined this method
ancestors.detect do |ancestor|
ancestor = ancestor.singleton_class if scope == :class && ancestor.is_a?(Class)
ancestor.method_defined?(method) || ancestor.private_method_defined?(method)
end
end
def owner_class_has_method?(scope, method)
target = scope == :class ? owner_class.singleton_class : owner_class
target.method_defined?(method) || target.private_method_defined?(method)
end
# Adds helper methods for accessing naming information about states and
# events on the owner class
def define_name_helpers
# Gets the humanized version of a state
define_helper(:class, "human_#{attribute(:name)}") do |machine, klass, state|
machine.states.fetch(state).human_name(klass)
end
# Gets the humanized version of an event
define_helper(:class, "human_#{attribute(:event_name)}") do |machine, klass, event|
machine.events.fetch(event).human_name(klass)
end
# Gets the state name for the current value
define_helper(:instance, attribute(:name)) do |machine, object|
machine.states.match!(object).name
end
# Gets the human state name for the current value
define_helper(:instance, "human_#{attribute(:name)}") do |machine, object|
machine.states.match!(object).human_name(object.class)
end
end
# Defines the with/without scope helpers for this attribute. Both the
# singular and plural versions of the attribute are defined for each
# scope helper. A custom plural can be specified if it cannot be
# automatically determined by either calling +pluralize+ on the attribute
# name or adding an "s" to the end of the name.
def define_scopes(custom_plural = nil)
plural = custom_plural || pluralize(name)
[:with, :without].each do |kind|
[name, plural].map { |s| s.to_s }.uniq.each do |suffix|
method = "#{kind}_#{suffix}"
if scope = send("create_#{kind}_scope", method)
# Converts state names to their corresponding values so that they
# can be looked up properly
define_helper(:class, method) do |machine, klass, *states|
run_scope(scope, machine, klass, states)
end
end
end
end
end
# Generates the results for the given scope based on one or more states to
# filter by
def run_scope(scope, machine, klass, states)
values = states.flatten.map { |state| machine.states.fetch(state).value }
scope.call(klass, values)
end
# Pluralizes the given word using #pluralize (if available) or simply
# adding an "s" to the end of the word
def pluralize(word)
word = word.to_s
if word.respond_to?(:pluralize)
word.pluralize
else
"#{name}s"
end
end
# Creates a scope for finding objects *with* a particular value or values
# for the attribute.
#
# By default, this is a no-op.
def create_with_scope(name)
end
# Creates a scope for finding objects *without* a particular value or
# values for the attribute.
#
# By default, this is a no-op.
def create_without_scope(name)
end
# Always yields
def transaction(object)
yield
end
# Gets the initial attribute value defined by the owner class (outside of
# the machine's definition). By default, this is always nil.
def owner_class_attribute_default
nil
end
# Checks whether the given state matches the attribute default specified
# by the owner class
def owner_class_attribute_default_matches?(state)
state.matches?(owner_class_attribute_default)
end
# Updates this machine based on the configuration of other machines in the
# owner class that share the same target attribute.
def add_sibling_machine_configs
# Add existing states
sibling_machines.each do |machine|
machine.states.each { |state| states << state unless states[state.name] }
end
end
# Adds a new transition callback of the given type.
def add_callback(type, options, &block)
callbacks[type == :around ? :before : type] << callback = Callback.new(type, options, &block)
add_states(callback.known_states)
callback
end
# Tracks the given set of states in the list of all known states for
# this machine
def add_states(new_states)
new_states.map do |new_state|
# Check for other states that use a different class type for their name.
# This typically prevents string / symbol misuse.
if new_state && conflict = states.detect { |state| state.name && state.name.class != new_state.class }
raise ArgumentError, "#{new_state.inspect} state defined as #{new_state.class}, #{conflict.name.inspect} defined as #{conflict.name.class}; all states must be consistent"
end
unless state = states[new_state]
states << state = State.new(self, new_state)
# Copy states over to sibling machines
sibling_machines.each { |machine| machine.states << state }
end
state
end
end
# Tracks the given set of events in the list of all known events for
# this machine
end
|
weshatheleopard/rubyXL | lib/rubyXL/convenience_methods/worksheet.rb | RubyXL.WorksheetConvenienceMethods.insert_row | ruby | def insert_row(row_index = 0)
validate_workbook
ensure_cell_exists(row_index)
old_row = new_cells = nil
if row_index > 0 then
old_row = sheet_data.rows[row_index - 1]
if old_row then
new_cells = old_row.cells.collect { |c|
if c.nil? then nil
else nc = RubyXL::Cell.new(:style_index => c.style_index)
nc.worksheet = self
nc
end
}
end
end
row0 = sheet_data.rows[0]
new_cells ||= Array.new((row0 && row0.cells.size) || 0)
sheet_data.rows.insert(row_index, nil)
new_row = add_row(row_index, :cells => new_cells, :style_index => old_row && old_row.style_index)
# Update row values for all rows below
row_index.upto(sheet_data.rows.size - 1) { |r|
row = sheet_data.rows[r]
next if row.nil?
row.cells.each_with_index { |cell, c|
next if cell.nil?
cell.r = RubyXL::Reference.new(r, c)
}
}
return new_row
end | Inserts row at row_index, pushes down, copies style from the row above (that's what Excel 2013 does!)
NOTE: use of this method will break formulas which reference cells which are being "pushed down" | train | https://github.com/weshatheleopard/rubyXL/blob/e61d78de9486316cdee039d3590177dc05db0f0c/lib/rubyXL/convenience_methods/worksheet.rb#L74-L110 | module WorksheetConvenienceMethods
NAME = 0
SIZE = 1
COLOR = 2
ITALICS = 3
BOLD = 4
UNDERLINE = 5
STRIKETHROUGH = 6
def insert_cell(row = 0, col = 0, data = nil, formula = nil, shift = nil)
validate_workbook
ensure_cell_exists(row, col)
case shift
when nil then # No shifting at all
when :right then
sheet_data.rows[row].insert_cell_shift_right(nil, col)
when :down then
add_row(sheet_data.size, :cells => Array.new(sheet_data.rows[row].size))
(sheet_data.size - 1).downto(row + 1) { |index|
old_row = sheet_data.rows[index - 1]
if old_row.nil? then
sheet_data.rows[index] = nil
else
new_row = sheet_data.rows[index] || add_row(index)
new_row.cells[col] = old_row.cells[col]
end
}
else
raise 'invalid shift option'
end
return add_cell(row, col, data, formula)
end
# by default, only sets cell to nil
# if :left is specified, method will shift row contents to the right of the deleted cell to the left
# if :up is specified, method will shift column contents below the deleted cell upward
def delete_cell(row_index = 0, column_index=0, shift=nil)
validate_workbook
validate_nonnegative(row_index)
validate_nonnegative(column_index)
row = sheet_data[row_index]
old_cell = row && row[column_index]
case shift
when nil then
row.cells[column_index] = nil if row
when :left then
row.delete_cell_shift_left(column_index) if row
when :up then
(row_index...(sheet_data.size - 1)).each { |index|
old_row = sheet_data.rows[index + 1]
if old_row.nil? then
sheet_data.rows[index] = nil
else
new_row = sheet_data.rows[index] || add_row(index)
c = new_row.cells[column_index] = old_row.cells[column_index]
c.row = (index + 1) if c.is_a?(Cell)
end
}
else
raise 'invalid shift option'
end
return old_cell
end
# Inserts row at row_index, pushes down, copies style from the row above (that's what Excel 2013 does!)
# NOTE: use of this method will break formulas which reference cells which are being "pushed down"
def delete_row(row_index=0)
validate_workbook
validate_nonnegative(row_index)
deleted = sheet_data.rows.delete_at(row_index)
# Update row number of each cell
row_index.upto(sheet_data.size - 1) { |index|
row = sheet_data[index]
row && row.cells.each{ |c| c.row -= 1 unless c.nil? }
}
return deleted
end
# Inserts column at +column_index+, pushes everything right, takes styles from column to left
# NOTE: use of this method will break formulas which reference cells which are being "pushed right"
def insert_column(column_index = 0)
validate_workbook
ensure_cell_exists(0, column_index)
old_range = cols.get_range(column_index)
# Go through each cell in column
sheet_data.rows.each_with_index { |row, row_index|
next if row.nil? # Do not process blank rows
old_cell = row[column_index]
c = nil
if old_cell && old_cell.style_index != 0 &&
old_range && old_range.style_index != old_cell.style_index then
c = RubyXL::Cell.new(:style_index => old_cell.style_index, :worksheet => self,
:row => row_index, :column => column_index,
:datatype => RubyXL::DataType::SHARED_STRING)
end
row.insert_cell_shift_right(c, column_index)
}
cols.insert_column(column_index)
# TODO: update column numbers
end
def delete_column(column_index = 0)
validate_workbook
validate_nonnegative(column_index)
# Delete column
sheet_data.rows.each { |row| row && row.cells.delete_at(column_index) }
# Update column numbers for cells to the right of the deleted column
sheet_data.rows.each_with_index { |row, row_index|
next if row.nil?
row.cells.each_with_index { |c, ci|
c.column = ci if c.is_a?(Cell)
}
}
cols.each { |range| range.delete_column(column_index) }
end
def get_row_style(row_index)
row = sheet_data.rows[row_index]
(row && row.style_index) || 0
end
def get_row_fill(row = 0)
(row = sheet_data.rows[row]) && row.get_fill_color
end
def get_row_font_name(row = 0)
(font = row_font(row)) && font.get_name
end
def get_row_font_size(row = 0)
(font = row_font(row)) && font.get_size
end
def get_row_font_color(row = 0)
font = row_font(row)
color = font && font.color
color && (color.rgb || '000000')
end
def is_row_italicized(row = 0)
(font = row_font(row)) && font.is_italic
end
def is_row_bolded(row = 0)
(font = row_font(row)) && font.is_bold
end
def is_row_underlined(row = 0)
(font = row_font(row)) && font.is_underlined
end
def is_row_struckthrough(row = 0)
(font = row_font(row)) && font.is_strikethrough
end
def get_row_height(row = 0)
validate_workbook
validate_nonnegative(row)
row = sheet_data.rows[row]
row && row.ht || RubyXL::Row::DEFAULT_HEIGHT
end
def get_row_border(row, border_direction)
validate_workbook
border = @workbook.borders[get_row_xf(row).border_id]
border && border.get_edge_style(border_direction)
end
def get_row_border_color(row, border_direction)
validate_workbook
border = @workbook.borders[get_row_xf(row).border_id]
border && border.get_edge_color(border_direction)
end
def row_font(row)
(row = sheet_data.rows[row]) && row.get_font
end
def get_row_alignment(row, is_horizontal)
validate_workbook
xf_obj = get_row_xf(row)
return nil if xf_obj.alignment.nil?
if is_horizontal then return xf_obj.alignment.horizontal
else return xf_obj.alignment.vertical
end
end
def get_cols_style_index(column_index)
validate_nonnegative(column_index)
range = cols.locate_range(column_index)
(range && range.style_index) || 0
end
def get_column_font_name(col = 0)
font = column_font(col)
font && font.get_name
end
def get_column_font_size(col = 0)
font = column_font(col)
font && font.get_size
end
def get_column_font_color(col = 0)
font = column_font(col)
font && (font.get_rgb_color || '000000')
end
def is_column_italicized(col = 0)
font = column_font(col)
font && font.is_italic
end
def is_column_bolded(col = 0)
font = column_font(col)
font && font.is_bold
end
def is_column_underlined(col = 0)
font = column_font(col)
font && font.is_underlined
end
def is_column_struckthrough(col = 0)
font = column_font(col)
font && font.is_strikethrough
end
# Get raw column width value as stored in the file
def get_column_width_raw(column_index = 0)
validate_workbook
validate_nonnegative(column_index)
range = cols.locate_range(column_index)
range && range.width
end
# Get column width measured in number of digits, as per
# http://msdn.microsoft.com/en-us/library/documentformat.openxml.spreadsheet.column%28v=office.14%29.aspx
def get_column_width(column_index = 0)
width = get_column_width_raw(column_index)
return RubyXL::ColumnRange::DEFAULT_WIDTH if width.nil?
(width - (5.0 / RubyXL::Font::MAX_DIGIT_WIDTH)).round
end
# Set raw column width value
def change_column_width_raw(column_index, width)
validate_workbook
ensure_cell_exists(0, column_index)
range = cols.get_range(column_index)
range.width = width
range.custom_width = true
end
# Get column width measured in number of digits, as per
# http://msdn.microsoft.com/en-us/library/documentformat.openxml.spreadsheet.column%28v=office.14%29.aspx
def change_column_width(column_index, width_in_chars = RubyXL::ColumnRange::DEFAULT_WIDTH)
change_column_width_raw(column_index, ((width_in_chars + (5.0 / RubyXL::Font::MAX_DIGIT_WIDTH)) * 256).to_i / 256.0)
end
# Helper method to get the style index for a column
def get_col_style(column_index)
range = cols.locate_range(column_index)
(range && range.style_index) || 0
end
def get_column_fill(col=0)
validate_workbook
validate_nonnegative(col)
@workbook.get_fill_color(get_col_xf(col))
end
def change_column_fill(column_index, color_code = 'ffffff')
validate_workbook
RubyXL::Color.validate_color(color_code)
ensure_cell_exists(0, column_index)
cols.get_range(column_index).style_index = @workbook.modify_fill(get_col_style(column_index), color_code)
sheet_data.rows.each { |row|
next if row.nil?
c = row[column_index]
next if c.nil?
c.change_fill(color_code)
}
end
def get_column_border(col, border_direction)
validate_workbook
xf = @workbook.cell_xfs[get_cols_style_index(col)]
border = @workbook.borders[xf.border_id]
border && border.get_edge_style(border_direction)
end
def get_column_border_color(col, border_direction)
validate_workbook
xf = @workbook.cell_xfs[get_cols_style_index(col)]
border = @workbook.borders[xf.border_id]
border && border.get_edge_color(border_direction)
end
def column_font(col)
validate_workbook
@workbook.fonts[@workbook.cell_xfs[get_cols_style_index(col)].font_id]
end
def get_column_alignment(col, type)
validate_workbook
xf = @workbook.cell_xfs[get_cols_style_index(col)]
xf.alignment && xf.alignment.send(type)
end
def change_row_horizontal_alignment(row = 0, alignment = 'center')
validate_workbook
validate_nonnegative(row)
change_row_alignment(row) { |a| a.horizontal = alignment }
end
def change_row_vertical_alignment(row = 0, alignment = 'center')
validate_workbook
validate_nonnegative(row)
change_row_alignment(row) { |a| a.vertical = alignment }
end
def change_row_border(row, direction, weight)
validate_workbook
ensure_cell_exists(row)
sheet_data.rows[row].style_index = @workbook.modify_border(get_row_style(row), direction, weight)
sheet_data[row].cells.each { |c|
c.change_border(direction, weight) unless c.nil?
}
end
def change_row_border_color(row, direction, color = '000000')
validate_workbook
ensure_cell_exists(row)
Color.validate_color(color)
sheet_data.rows[row].style_index = @workbook.modify_border_color(get_row_style(row), direction, color)
sheet_data[row].cells.each { |c|
c.change_border_color(direction, color) unless c.nil?
}
end
def change_row_fill(row_index = 0, rgb = 'ffffff')
validate_workbook
ensure_cell_exists(row_index)
Color.validate_color(rgb)
sheet_data.rows[row_index].style_index = @workbook.modify_fill(get_row_style(row_index), rgb)
sheet_data[row_index].cells.each { |c| c.change_fill(rgb) unless c.nil? }
end
# Helper method to update the row styles array
# change_type - NAME or SIZE or COLOR etc
# main method to change font, called from each separate font mutator method
def change_row_font(row_index, change_type, arg, font)
validate_workbook
ensure_cell_exists(row_index)
xf = workbook.register_new_font(font, get_row_xf(row_index))
row = sheet_data[row_index]
row.style_index = workbook.register_new_xf(xf)
row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }
end
def change_row_font_name(row = 0, font_name = 'Verdana')
ensure_cell_exists(row)
font = row_font(row).dup
font.set_name(font_name)
change_row_font(row, Worksheet::NAME, font_name, font)
end
def change_row_font_size(row = 0, font_size=10)
ensure_cell_exists(row)
font = row_font(row).dup
font.set_size(font_size)
change_row_font(row, Worksheet::SIZE, font_size, font)
end
def change_row_font_color(row = 0, font_color = '000000')
ensure_cell_exists(row)
Color.validate_color(font_color)
font = row_font(row).dup
font.set_rgb_color(font_color)
change_row_font(row, Worksheet::COLOR, font_color, font)
end
def change_row_italics(row = 0, italicized = false)
ensure_cell_exists(row)
font = row_font(row).dup
font.set_italic(italicized)
change_row_font(row, Worksheet::ITALICS, italicized, font)
end
def change_row_bold(row = 0, bolded = false)
ensure_cell_exists(row)
font = row_font(row).dup
font.set_bold(bolded)
change_row_font(row, Worksheet::BOLD, bolded, font)
end
def change_row_underline(row = 0, underlined=false)
ensure_cell_exists(row)
font = row_font(row).dup
font.set_underline(underlined)
change_row_font(row, Worksheet::UNDERLINE, underlined, font)
end
def change_row_strikethrough(row = 0, struckthrough=false)
ensure_cell_exists(row)
font = row_font(row).dup
font.set_strikethrough(struckthrough)
change_row_font(row, Worksheet::STRIKETHROUGH, struckthrough, font)
end
def change_row_height(row = 0, height = 10)
validate_workbook
ensure_cell_exists(row)
c = sheet_data.rows[row]
c.ht = height
c.custom_height = true
end
# Helper method to update the fonts and cell styles array
# main method to change font, called from each separate font mutator method
def change_column_font(column_index, change_type, arg, font, xf)
validate_workbook
ensure_cell_exists(0, column_index)
xf = workbook.register_new_font(font, xf)
cols.get_range(column_index).style_index = workbook.register_new_xf(xf)
sheet_data.rows.each { |row|
c = row && row[column_index]
c.font_switch(change_type, arg) unless c.nil?
}
end
def change_column_font_name(column_index = 0, font_name = 'Verdana')
xf = get_col_xf(column_index)
font = @workbook.fonts[xf.font_id].dup
font.set_name(font_name)
change_column_font(column_index, Worksheet::NAME, font_name, font, xf)
end
def change_column_font_size(column_index, font_size=10)
xf = get_col_xf(column_index)
font = @workbook.fonts[xf.font_id].dup
font.set_size(font_size)
change_column_font(column_index, Worksheet::SIZE, font_size, font, xf)
end
def change_column_font_color(column_index, font_color='000000')
Color.validate_color(font_color)
xf = get_col_xf(column_index)
font = @workbook.fonts[xf.font_id].dup
font.set_rgb_color(font_color)
change_column_font(column_index, Worksheet::COLOR, font_color, font, xf)
end
def change_column_italics(column_index, italicized = false)
xf = get_col_xf(column_index)
font = @workbook.fonts[xf.font_id].dup
font.set_italic(italicized)
change_column_font(column_index, Worksheet::ITALICS, italicized, font, xf)
end
def change_column_bold(column_index, bolded = false)
xf = get_col_xf(column_index)
font = @workbook.fonts[xf.font_id].dup
font.set_bold(bolded)
change_column_font(column_index, Worksheet::BOLD, bolded, font, xf)
end
def change_column_underline(column_index, underlined = false)
xf = get_col_xf(column_index)
font = @workbook.fonts[xf.font_id].dup
font.set_underline(underlined)
change_column_font(column_index, Worksheet::UNDERLINE, underlined, font, xf)
end
def change_column_strikethrough(column_index, struckthrough=false)
xf = get_col_xf(column_index)
font = @workbook.fonts[xf.font_id].dup
font.set_strikethrough(struckthrough)
change_column_font(column_index, Worksheet::STRIKETHROUGH, struckthrough, font, xf)
end
def change_column_horizontal_alignment(column_index, alignment = 'center')
change_column_alignment(column_index) { |a| a.horizontal = alignment }
end
def change_column_vertical_alignment(column_index, alignment = 'center')
change_column_alignment(column_index) { |a| a.vertical = alignment }
end
def change_column_border(column_index, direction, weight)
validate_workbook
ensure_cell_exists(0, column_index)
cols.get_range(column_index).style_index = @workbook.modify_border(get_col_style(column_index), direction, weight)
sheet_data.rows.each { |row|
next if row.nil?
c = row.cells[column_index]
next if c.nil?
c.change_border(direction, weight)
}
end
def change_column_border_color(column_index, direction, color)
validate_workbook
ensure_cell_exists(0, column_index)
Color.validate_color(color)
cols.get_range(column_index).style_index = @workbook.modify_border_color(get_col_style(column_index), direction, color)
sheet_data.rows.each { |row|
c = row.cells[column_index]
c.change_border_color(direction, color) unless c.nil?
}
end
def change_row_alignment(row, &block)
validate_workbook
validate_nonnegative(row)
ensure_cell_exists(row)
sheet_data.rows[row].style_index = @workbook.modify_alignment(get_row_style(row), &block)
sheet_data[row].cells.each { |c|
next if c.nil?
c.style_index = @workbook.modify_alignment(c.style_index, &block)
}
end
def change_column_alignment(column_index, &block)
validate_workbook
ensure_cell_exists(0, column_index)
cols.get_range(column_index).style_index = @workbook.modify_alignment(get_col_style(column_index), &block)
# Excel gets confused if width is not explicitly set for a column that had alignment changes
change_column_width(column_index) if get_column_width_raw(column_index).nil?
sheet_data.rows.each { |row|
next if row.nil?
c = row[column_index]
next if c.nil?
c.style_index = @workbook.modify_alignment(c.style_index, &block)
}
end
# Merges cells within a rectangular area
def merge_cells(start_row, start_col, end_row, end_col)
validate_workbook
self.merged_cells ||= RubyXL::MergedCells.new
# TODO: add validation to make sure ranges are not intersecting with existing ones
merged_cells << RubyXL::MergedCell.new(:ref => RubyXL::Reference.new(start_row, end_row, start_col, end_col))
end
end
|
kmuto/review | lib/review/makerhelper.rb | ReVIEW.MakerHelper.copy_images_to_dir | ruby | def copy_images_to_dir(from_dir, to_dir, options = {})
image_files = []
Dir.open(from_dir) do |dir|
dir.each do |fname|
next if fname =~ /^\./
if FileTest.directory?("#{from_dir}/#{fname}")
image_files += copy_images_to_dir("#{from_dir}/#{fname}", "#{to_dir}/#{fname}", options)
else
FileUtils.mkdir_p(to_dir) unless File.exist?(to_dir)
is_converted = false
(options[:convert] || {}).each do |orig_type, conv_type|
next unless /\.#{orig_type}$/ =~ fname
is_converted = system("convert #{from_dir}/#{fname} #{to_dir}/#{fname}.#{conv_type}")
image_files << "#{from_dir}/#{fname}.#{conv_type}"
end
exts = options[:exts] || %w[png gif jpg jpeg svg pdf eps ai tif psd]
exts_str = exts.join('|')
if !is_converted && fname =~ /\.(#{exts_str})$/i
FileUtils.cp "#{from_dir}/#{fname}", to_dir
image_files << "#{from_dir}/#{fname}"
end
end
end
end
image_files
end | Copy image files under from_dir to to_dir recursively
==== Args
from_dir :: path to the directory which has image files to be copied
to_dir :: path to the directory to which the image files are copied
options :: used to specify optional operations during copy
==== Returns
list of image files
==== Options
:convert :: Conversion rule
==== Examples
copy_images_to_dir("/path/to/foo", "/path/to/bar", :convert => {:eps => :png})
Image files are copied recursively, and each '.eps' file is converted into '.eps.png' | train | https://github.com/kmuto/review/blob/77d1273e671663f05db2992281fd891b776badf0/lib/review/makerhelper.rb#L37-L66 | module MakerHelper
# Return review/bin directory
def bindir
Pathname.new("#{Pathname.new(__FILE__).realpath.dirname}/../../bin").realpath
end
module_function :bindir
# Copy image files under from_dir to to_dir recursively
# ==== Args
# from_dir :: path to the directory which has image files to be copied
# to_dir :: path to the directory to which the image files are copied
# options :: used to specify optional operations during copy
# ==== Returns
# list of image files
# ==== Options
# :convert :: Conversion rule
# ==== Examples
#
# copy_images_to_dir("/path/to/foo", "/path/to/bar", :convert => {:eps => :png})
#
# Image files are copied recursively, and each '.eps' file is converted into '.eps.png'
#
module_function :copy_images_to_dir
def cleanup_mathimg
math_dir = "./#{@config['imagedir']}/_review_math"
if @config['imgmath'] && Dir.exist?(math_dir)
FileUtils.rm_rf(math_dir)
end
end
def default_imgmath_preamble
<<-EOB
\\documentclass[uplatex,a3paper,landscape]{jsarticle}
\\usepackage[deluxe,uplatex]{otf}
\\usepackage[T1]{fontenc}
\\usepackage{textcomp}
\\usepackage{lmodern}
\\usepackage[dvipdfmx]{graphicx}
\\usepackage[dvipdfmx,table]{xcolor}
\\usepackage[utf8]{inputenc}
\\usepackage{ascmac}
\\usepackage{float}
\\usepackage{alltt}
\\usepackage{amsmath}
\\usepackage{amssymb}
\\usepackage{amsfonts}
\\usepackage{anyfontsize}
\\usepackage{bm}
\\pagestyle{empty}
% \\setpaperwidth{1000mm}
EOB
end
def make_math_images(math_dir)
fontsize = @config['imgmath_options']['fontsize'].to_f
lineheight = @config['imgmath_options']['lineheight'].to_f
texsrc = default_imgmath_preamble
if @config['imgmath_options']['preamble_file'] && File.readable?(@config['imgmath_options']['preamble_file'])
texsrc = File.read(@config['imgmath_options']['preamble_file'])
end
texsrc << <<-EOB
\\begin{document}
\\fontsize{#{fontsize}}{#{lineheight}}\\selectfont
\\input{__IMGMATH_BODY__}
\\end{document}
EOB
math_dir = File.realpath(math_dir)
Dir.mktmpdir do |tmpdir|
FileUtils.cp([File.join(math_dir, '__IMGMATH_BODY__.tex'),
File.join(math_dir, '__IMGMATH_BODY__.map')],
tmpdir)
tex_path = File.join(tmpdir, '__IMGMATH__.tex')
File.write(tex_path, texsrc)
begin
case @config['imgmath_options']['converter']
when 'pdfcrop'
make_math_images_pdfcrop(tmpdir, tex_path, math_dir)
when 'dvipng'
make_math_images_dvipng(tmpdir, tex_path, math_dir)
else
error "unknown math converter error. imgmath_options/converter parameter should be 'pdfcrop' or 'dvipng'."
end
rescue CompileError
FileUtils.cp([tex_path,
File.join(File.dirname(tex_path), '__IMGMATH__.log')],
math_dir)
error "LaTeX math compile error. See #{math_dir}/__IMGMATH__.log for details."
end
end
FileUtils.rm_f([File.join(math_dir, '__IMGMATH_BODY__.tex'),
File.join(math_dir, '__IMGMATH_BODY__.map')])
end
def make_math_images_pdfcrop(dir, tex_path, math_dir)
Dir.chdir(dir) do
dvi_path = '__IMGMATH__.dvi'
pdf_path = '__IMGMATH__.pdf'
out, status = Open3.capture2e(*[@config['texcommand'], @config['texoptions'].shellsplit, tex_path].flatten.compact)
if !status.success? || (!File.exist?(dvi_path) && !File.exist?(pdf_path))
raise CompileError
end
if File.exist?(dvi_path)
out, status = Open3.capture2e(*[@config['dvicommand'], @config['dvioptions'].shellsplit, dvi_path].flatten.compact)
if !status.success? || !File.exist?(pdf_path)
warn "error in #{@config['dvicommand']}. Error log:\n#{out}"
raise CompileError
end
end
args = @config['imgmath_options']['pdfcrop_cmd'].shellsplit
args.map! do |m|
m.sub('%i', pdf_path).
sub('%o', '__IMGMATH__pdfcrop.pdf')
end
out, status = Open3.capture2e(*args)
unless status.success?
warn "error in pdfcrop. Error log:\n#{out}"
raise CompileError
end
pdf_path = '__IMGMATH__pdfcrop.pdf'
pdf_path2 = pdf_path
File.open('__IMGMATH_BODY__.map') do |f|
page = 0
f.each_line do |key|
page += 1
key.chomp!
if File.exist?(File.join(math_dir, "_gen_#{key}.#{@config['imgmath_options']['format']}"))
# made already
next
end
if @config['imgmath_options']['extract_singlepage']
# if extract_singlepage = true, split each page
args = @config['imgmath_options']['pdfextract_cmd'].shellsplit
args.map! do |m|
m.sub('%i', pdf_path).
sub('%o', "__IMGMATH__pdfcrop_p#{page}.pdf").
sub('%O', "__IMGMATH__pdfcrop_p#{page}").
sub('%p', page.to_s)
end
out, status = Open3.capture2e(*args)
unless status.success?
warn "error in pdf extracting. Error log:\n#{out}"
raise CompileError
end
pdf_path2 = "__IMGMATH__pdfcrop_p#{page}.pdf"
end
args = @config['imgmath_options']['pdfcrop_pixelize_cmd'].shellsplit
args.map! do |m|
m.sub('%i', pdf_path2).
sub('%o', File.join(math_dir, "_gen_#{key}.#{@config['imgmath_options']['format']}")).
sub('%O', File.join(math_dir, "_gen_#{key}")).
sub('%p', page.to_s)
end
out, status = Open3.capture2e(*args)
unless status.success?
warn "error in pdf pixelizing. Error log:\n#{out}"
raise CompileError
end
end
end
end
end
def make_math_images_dvipng(dir, tex_path, math_dir)
Dir.chdir(dir) do
dvi_path = '__IMGMATH__.dvi'
out, status = Open3.capture2e(*[@config['texcommand'], @config['texoptions'].shellsplit, tex_path].flatten.compact)
if !status.success? || !File.exist?(dvi_path)
raise CompileError
end
File.open('__IMGMATH_BODY__.map') do |f|
page = 0
f.each_line do |key|
page += 1
key.chomp!
args = @config['imgmath_options']['dvipng_cmd'].shellsplit
args.map! do |m|
m.sub('%i', dvi_path).
sub('%o', File.join(math_dir, "_gen_#{key}.#{@config['imgmath_options']['format']}")).
sub('%O', File.join(math_dir, "_gen_#{key}")).
sub('%p', page.to_s)
end
out, status = Open3.capture2e(*args)
unless status.success?
warn "error in dvipng. Error log:\n#{out}"
raise CompileError
end
end
end
end
end
end
|
enkessler/cuke_modeler | lib/cuke_modeler/models/example.rb | CukeModeler.Example.to_s | ruby | def to_s
text = ''
text << tag_output_string + "\n" unless tags.empty?
text << "#{@keyword}:#{name_output_string}"
text << "\n" + description_output_string unless (description.nil? || description.empty?)
text << "\n" unless (rows.empty? || description.nil? || description.empty?)
text << "\n" + parameters_output_string if parameter_row
text << "\n" + rows_output_string unless argument_rows.empty?
text
end | Returns a string representation of this model. For an example model,
this will be Gherkin text that is equivalent to the example being modeled. | train | https://github.com/enkessler/cuke_modeler/blob/6c4c05a719741d7fdaad218432bfa76eaa47b0cb/lib/cuke_modeler/models/example.rb#L104-L115 | class Example < Model
include Parsing
include Parsed
include Named
include Described
include Sourceable
include Taggable
# The example's keyword
attr_accessor :keyword
# The row models in the example table
attr_accessor :rows
# Creates a new Example object and, if *source_text* is provided,
# populates the object.
def initialize(source_text = nil)
@tags = []
@rows = []
super(source_text)
if source_text
parsed_example_data = parse_source(source_text)
populate_example(self, parsed_example_data)
end
end
# Adds a row to the example table. The row can be given as a Hash of
# parameters and their corresponding values or as an Array of values which
# will be assigned in order.
def add_row(row)
raise('Cannot add a row. No parameters have been set.') if rows.empty?
# A quick 'deep clone' so that the input isn't modified
row = Marshal::load(Marshal.dump(row))
case
when row.is_a?(Array)
# 'stringify' input
row.collect! { |value| value.to_s }
@rows << Row.new("|#{row.join('|')}|")
when row.is_a?(Hash)
# 'stringify' input
row = row.inject({}) { |hash, (key, value)| hash[key.to_s] = value.to_s; hash }
@rows << Row.new("|#{ordered_row_values(row).join('|')}|")
else
raise(ArgumentError, "Can only add row from a Hash or an Array but received #{row.class}")
end
end
# Removes a row from the example table. The row can be given as a Hash of
# parameters and their corresponding values or as an Array of values
# which will be assigned in order.
def remove_row(row_removed)
return unless argument_rows
case
when row_removed.is_a?(Array)
location = argument_rows.index { |row| row.cells.collect { |cell| cell.value } == row_removed.collect { |value| value.strip } }
when row_removed.is_a?(Hash)
# Note: the hash value order has to be manually calculated because Ruby 1.8.7 does not have ordered
# hash keys. Alternatively, the hash may have simply been built up 'willy nilly' by the user instead
# of being built up in order according to the parameter order.
location = argument_rows.index { |row| row.cells.collect { |cell| cell.value } == ordered_row_values(row_removed.each_value { |value| value.strip! }) }
else
raise(ArgumentError, "Can only remove row from a Hash or an Array but received #{row_removed.class}")
end
@rows.delete_at(location + 1) if location
end
# The argument rows in the example table
def argument_rows
rows[1..rows.count] || []
end
# The parameter row for the example table
def parameter_row
rows.first
end
# Returns the parameters of the example table
def parameters
parameter_row ? parameter_row.cells.collect { |cell| cell.value } : []
end
# Returns the model objects that belong to this model.
def children
rows + tags
end
# Returns a string representation of this model. For an example model,
# this will be Gherkin text that is equivalent to the example being modeled.
private
def parse_source(source_text)
base_file_string = "# language: #{Parsing.dialect}\n#{dialect_feature_keyword}: Fake feature to parse\n#{dialect_outline_keyword}:\n#{dialect_step_keyword} fake step\n"
source_text = base_file_string + source_text
parsed_file = Parsing::parse_text(source_text, 'cuke_modeler_stand_alone_example.feature')
parsed_file.first['feature']['elements'].first['examples'].first
end
def determine_buffer_size(index)
rows.collect { |row| row.cells[index].to_s.length }.max || 0
end
def parameters_output_string
text = ''
unless parameter_row.nil?
text << " |"
parameter_row.cells.count.times { |index| text << " #{string_for(parameter_row.cells, index)} |" }
end
text
end
def rows_output_string
text = ''
unless argument_rows.empty?
argument_rows.each do |row|
text << " |"
row.cells.count.times { |index| text << " #{string_for(row.cells, index)} |" }
text << "\n"
end
text.chomp!
end
text
end
def string_for(cells, index)
cells[index] ? cells[index].to_s.ljust(determine_buffer_size(index)) : ''
end
def ordered_row_values(row_hash)
parameter_row.cells.collect { |cell| cell.value }.collect { |parameter| row_hash[parameter] }
end
end
|
kontena/kontena | agent/lib/kontena/observable.rb | Kontena.Observable.set_and_notify | ruby | def set_and_notify(value)
@mutex.synchronize do
@value = value
@observers.each do |observer, persistent|
if !observer.alive?
debug { "dead: #{observer}" }
@observers.delete(observer)
elsif !persistent
debug { "notify and drop: #{observer} <- #{value}" }
observer << Message.new(observer, self, value)
@observers.delete(observer)
else
debug { "notify: #{observer} <- #{value}" }
observer << Message.new(observer, self, value)
end
end
end
end | Send Message with given value to each Kontena::Observer that is still alive.
Future calls to `add_observer` will also return the same value.
Drops any observers that are dead or non-persistent.
TODO: automatically clean out all observers when the observable crashes?
@param value [Object, nil, Exception] | train | https://github.com/kontena/kontena/blob/5cb5b4457895985231ac88e78c8cbc5a8ffb5ec7/agent/lib/kontena/observable.rb#L185-L209 | class Observable
require_relative './observable/registry'
# @return [Celluloid::Proxy::Cell<Kontena::Observable::Registry>] system registry actor
def self.registry
Celluloid::Actor[:observable_registry] || fail(Celluloid::DeadActorError, "Observable registry actor not running")
end
include Kontena::Logging
attr_reader :logging_prefix # customize Kontena::Logging#logging_prefix by instance
class Message
attr_reader :observer, :observable, :value
# @param observer [Kontena::Observer]
# @param observable [Kontena::Observable]
# @param value [Object, nil, Exception]
def initialize(observer, observable, value)
@observer = observer
@observable = observable
@value = value
end
end
# mixin for Celluloid actor classes
module Helper
# Create a new Observable using the including class name as the subject.
# Register the Observable with the Kontena::Observable::Registry.
# Links to the registry to crash the Observable if the owning actor crashes.
#
# @return [Kontena::Observable]
def observable
return @observable if @observable
# the register can suspend this task, so other calls might get the observable before it gets registered
# shouldn't be a problem, unless the register/linking somehow fails and crashes this actor without crashing the
# observable?
@observable = Kontena::Observable.new(self.class.name)
observable_registry = Kontena::Observable.registry
observable_registry.register(@observable, self.current_actor)
self.links << observable_registry # registry monitors owner
@observable
end
end
# @param subject [Object] used to identify the Observable for logging purposes
def initialize(subject = nil)
@subject = subject
@mutex = Thread::Mutex.new
@observers = {}
@value = nil
# include the subject (owning actor class, other resource) in log messages
@logging_prefix = "#{self}"
end
# @return [String]
def to_s
"#{self.class.name}<#{@subject}>"
end
# @return [Object, nil] last updated value, or nil if not ready?
def get
@value
end
# Observable has updated, and has not reset. It might be crashed?
#
# @return [Boolean]
def ready?
!!@value
end
# Observable has an exception set.
#
# Calls to `add_observer` will raise.
#
def crashed?
Exception === @value
end
# Observable has observers.
#
# NOTE: dead observers will only get cleaned out on the next update
#
# @return [Boolean]
def observed?
!@observers.empty?
end
# The Observable has a value. Propagate it to any observers.
#
# This will notify any Observers, causing them to yield/return if ready.
#
# The value must be immutable and threadsafe: it must remain valid for use by other threads
# both after this update, and after any other future updates. Do not send a mutable object
# that gets invalidated in between updates.
#
# TODO: automatically freeze the value?
#
# @param value [Object]
# @raise [RuntimeError] Observable crashed
# @raise [ArgumentError] Update with nil value
def update(value)
raise RuntimeError, "Observable crashed: #{@value}" if crashed?
raise ArgumentError, "Update with nil value" if value.nil?
debug { "update: #{value}" }
set_and_notify(value)
end
# Reset the observable value back into the initialized state.
# This will notify any Observers, causing them to wait until we update again.
#
def reset
debug { "reset" }
set_and_notify(nil)
end
# @param reason [Exception]
def crash(reason)
raise ArgumentError, "Crash with non-exception: #{reason.class.name}" unless Exception === reason
debug { "crash: #{reason}" }
set_and_notify(reason)
end
# Observer is observing this Observable's value.
# Raises if observable has crashed.
# Returns current value, or nil if not yet ready.
# Subscribes observer for updates if persistent, or if not yet ready (returning nil).
#
# The observer will be dropped once no longer alive?.
#
# @param observer [Kontena::Observer]
# @param persistent [Boolean] false => either return immediate value, or return nil and subscribe for a single notification
# @raise [Exception]
# @return [Object, nil] current value if ready
def add_observer(observer, persistent: true)
@mutex.synchronize do
if !@value
# subscribe for future udpates, no value to return
@observers[observer] = persistent
elsif Exception === @value
# raise with immediate value, no future updates to subscribe to
raise @value
elsif persistent
# return with immediate value, also subscribe for future updates
@observers[observer] = persistent
else
# return with immediate value, do not subscribe for future updates
end
return @value
end
end
# Send Message with given value to each Kontena::Observer that is still alive.
# Future calls to `add_observer` will also return the same value.
# Drops any observers that are dead or non-persistent.
#
# TODO: automatically clean out all observers when the observable crashes?
#
# @param value [Object, nil, Exception]
end
|
sds/haml-lint | lib/haml_lint/cli.rb | HamlLint.CLI.configure_logger | ruby | def configure_logger(options)
log.color_enabled = options.fetch(:color, log.tty?)
log.summary_enabled = options.fetch(:summary, true)
end | Given the provided options, configure the logger.
@return [void] | train | https://github.com/sds/haml-lint/blob/024c773667e54cf88db938c2b368977005d70ee8/lib/haml_lint/cli.rb#L60-L63 | class CLI # rubocop:disable Metrics/ClassLength
# Create a CLI that outputs to the specified logger.
#
# @param logger [HamlLint::Logger]
def initialize(logger)
@log = logger
end
# Parses the given command-line arguments and executes appropriate logic
# based on those arguments.
#
# @param args [Array<String>] command line arguments
# @return [Integer] exit status code
def run(args)
options = HamlLint::Options.new.parse(args)
act_on_options(options)
rescue StandardError => e
handle_exception(e)
end
private
attr_reader :log
# Given the provided options, execute the appropriate command.
#
# @return [Integer] exit status code
def act_on_options(options)
configure_logger(options)
if options[:help]
print_help(options)
Sysexits::EX_OK
elsif options[:version] || options[:verbose_version]
print_version(options)
Sysexits::EX_OK
elsif options[:show_linters]
print_available_linters
Sysexits::EX_OK
elsif options[:show_reporters]
print_available_reporters
Sysexits::EX_OK
else
scan_for_lints(options)
end
end
# Given the provided options, configure the logger.
#
# @return [void]
# Outputs a message and returns an appropriate error code for the specified
# exception.
def handle_exception(exception)
case exception
when HamlLint::Exceptions::ConfigurationError
log.error exception.message
Sysexits::EX_CONFIG
when HamlLint::Exceptions::InvalidCLIOption
log.error exception.message
log.log "Run `#{APP_NAME}` --help for usage documentation"
Sysexits::EX_USAGE
when HamlLint::Exceptions::InvalidFilePath
log.error exception.message
Sysexits::EX_NOINPUT
when HamlLint::Exceptions::NoLintersError
log.error exception.message
Sysexits::EX_NOINPUT
else
print_unexpected_exception(exception)
Sysexits::EX_SOFTWARE
end
end
# Instantiates a new reporter based on the options.
#
# @param options [HamlLint::Configuration]
# @option options [true, nil] :auto_gen_config whether to use the config
# generating reporter
# @option options [Class] :reporter the class of reporter to use
# @return [HamlLint::Reporter]
def reporter_from_options(options)
if options[:auto_gen_config]
HamlLint::Reporter::DisabledConfigReporter.new(log, limit: options[:auto_gen_exclude_limit] || 15) # rubocop:disable Metrics/LineLength
else
options.fetch(:reporter, HamlLint::Reporter::DefaultReporter).new(log)
end
end
# Scans the files specified by the given options for lints.
#
# @return [Integer] exit status code
def scan_for_lints(options)
reporter = reporter_from_options(options)
report = Runner.new.run(options.merge(reporter: reporter))
report.display
report.failed? ? Sysexits::EX_DATAERR : Sysexits::EX_OK
end
# Outputs a list of all currently available linters.
def print_available_linters
log.info 'Available linters:'
linter_names = HamlLint::LinterRegistry.linters.map do |linter|
linter.name.split('::').last
end
linter_names.sort.each do |linter_name|
log.log " - #{linter_name}"
end
end
# Outputs a list of currently available reporters.
def print_available_reporters
log.info 'Available reporters:'
HamlLint::Reporter.available.map(&:cli_name).sort.each do |reporter_name|
log.log " - #{reporter_name}"
end
end
# Outputs help documentation.
def print_help(options)
log.log options[:help]
end
# Outputs the application name and version.
def print_version(options)
log.log "#{HamlLint::APP_NAME} #{HamlLint::VERSION}"
if options[:verbose_version]
log.log "haml #{Gem.loaded_specs['haml'].version}"
log.log "rubocop #{Gem.loaded_specs['rubocop'].version}"
log.log RUBY_DESCRIPTION
end
end
# Outputs the backtrace of an exception with instructions on how to report
# the issue.
def print_unexpected_exception(exception) # rubocop:disable Metrics/AbcSize
log.bold_error exception.message
log.error exception.backtrace.join("\n")
log.warning 'Report this bug at ', false
log.info HamlLint::BUG_REPORT_URL
log.newline
log.success 'To help fix this issue, please include:'
log.log '- The above stack trace'
log.log '- Haml-Lint version: ', false
log.info HamlLint::VERSION
log.log '- Haml version: ', false
log.info Gem.loaded_specs['haml'].version
log.log '- RuboCop version: ', false
log.info Gem.loaded_specs['rubocop'].version
log.log '- Ruby version: ', false
log.info RUBY_VERSION
end
end
|
caruby/core | lib/caruby/database.rb | CaRuby.Database.print_operations | ruby | def print_operations
ops = @operations.reverse.map do |op|
attr_s = " #{op.attribute}" if op.attribute
"#{op.type.to_s.capitalize_first} #{op.subject.qp}#{attr_s}"
end
ops.qp
end | Returns the current database operation stack as a String. | train | https://github.com/caruby/core/blob/a682dc57c6fa31aef765cdd206ed3d4b4c289c60/lib/caruby/database.rb#L289-L295 | class Database
include Reader, Writer, Persistifier
# The application and database connection options.
ACCESS_OPTS = [
[:user, '-u USER', '--user USER', 'the application login user'],
[:password, '-p PSWD', '--password PSWD', 'the application login password'],
[:host, '--host HOST', 'the application host name'],
[:port, '--port PORT', 'the application port number'],
[:classpath, '--classpath PATH', 'the application client classpath']
]
attr_reader :operations
# @return [PersistenceService] the services used by this database
attr_reader :persistence_services
# Creates a new Database with the specified service name and options.
#
# @quirk caCORE obtaining a caCORE session instance mysteriously depends on referencing the
# application service first. Therefore, the default persistence service appService method must
# be called after it is instantiated and before the session is instantiated. However, when
# the appService method is called just before a session is acquired, then this call corrupts
# the object state of existing objects.
#
# Specifically, when a CaTissue::CollectionProtocol is created which references a
# CaTissue::CollectionProtocolRegistration which in turn references a CaTissue::Participant,
# then the call to PersistenceService.appService replaces the CaTissue::Participant
# reference with a difference CaTissue::Participant instance. The work-around for
# this extremely bizarre bug is to call appService immediately after instantiating
# the default persistence service.
#
# This bug might be a low-level JRuby-Java-caCORE-Hibernate confusion where something in
# caCORE stomps on an existing JRuby object graph. To reproduce, move the appService call
# to the start_session method and run the +PSBIN::MigrationTest+ biopsy save test case.
#
# @param [String] service_name the name of the default {PersistenceService}
# @param [{Symbol => String}, nil] opts the access options, or nil if specified as a block
# @option opts [String] :host application service host name
# @option opts [String] :login application service login user
# @option opts [String] :password application service login password
# @yield the access options defined by a block rather than a parameter
# @example
# Database.new(:user => 'perdita', :password => 'changeMe')
def initialize(service_name, opts=nil)
super()
# The options can be defined in a block.
opts ||= yield if block_given?
if opts.nil? then raise ArgumentError.new("Missing required database access properties") end
@user = Options.get(:user, opts)
@password = Options.get(:password, opts)
host = Options.get(:host, opts)
port = Options.get(:port, opts)
# The class => service hash is populated with the default service.
@def_persist_svc = PersistenceService.new(service_name, :host => host, :port => port)
@persistence_services = [@def_persist_svc].to_set
@cls_svc_hash = Hash.new(@def_persist_svc)
# the create/update nested operations
@operations = []
# the objects for which exists? is unsuccessful in the context of a nested operation
@transients = Set.new
end
# @return [Boolean] whether there is an active session
def open?
!!@session
end
# @return [Boolean] whether this database is not {#open?}
def closed?
not open?
end
# Calls the block given to this method with this database as an argument, and closes the
# database when done.
#
# @param [String, nil] user the application login user
# @param [String, nil] password the application login password
# @yield [database] the operation to perform on the database
# @yieldparam [Database] database self
def open(user=nil, password=nil)
raise ArgumentError.new("Database open requires an execution block") unless block_given?
raise DatabaseError.new("The caRuby application database is already in use.") if open?
# reset the execution timers
persistence_services.each { |svc| svc.timer.reset }
# Start the session.
start_session(user, password)
# Call the block and close when done.
yield(self) ensure close
end
# @return [Numeric] the execution time in seconds spent since the last open
def execution_time
persistence_services.inject(0) do |total, svc|
st = svc.timer.elapsed
total + st
end
end
# Returns the PersistanceService to use for the given {Jinx::Resource} class.
# This base method always returns the standard application service.
# Subclasses can override for specialized services. A session is started
# on demand if necessary.
#
# @param [Class] klass the domain object class
# @return [PersistanceService] the corresponding service
def persistence_service(klass)
unless Class === klass then
raise ArgumentError.new("#{self} persistence_service argument is not a Class: {#klass.qp}")
end
@def_persist_svc
end
# Adds the given service to this database.
#
# @param [PersistenceService] service the service to add
def add_persistence_service(service)
@persistence_services << service
end
# Imports the caCORE +ClientSession+ class on demand.
def self.const_missing(sym)
if sym == :ClientSession then
java_import Java::gov.nih.nci.system.comm.client.ClientSession
else
super
end
end
alias :to_s :print_class_and_id
alias :inspect :to_s
## Utility classes and methods, used by Query and Store mix-ins ##
private
# Releases database resources. This method should be called when database interaction
# is completed.
def close
return if @session.nil?
begin
@session.terminate_session
rescue Exception => e
logger.error("Session termination unsuccessful - #{e.message}")
end
# clear the cache
clear
logger.info("Disconnected from application server.")
@session = nil
end
# A mergeable autogenerated operation is recursively defined as:
# * a create of an object with auto-generated dependents
# * an update of an auto-generated dependent in the context of a mergeable autogenerated operation
#
# @return [Boolean] whether the innermost operation conforms to the above criterion
def mergeable_autogenerated_operation?
# the inner operation subject
inner = nil
@operations.reverse_each do |op|
if inner and op.subject != inner.owner then
# not a dependent
return false
end
if op.type == :create then
# innermost or owner create
return (not op.subject.class.autogenerated_dependent_attributes.empty?)
elsif op.type != :update then
# not a save
return false
end
# iterate to the scoping operation
inner = op.subject
end
false
end
# Performs the operation given by the given op symbol on obj by calling the block given to this method.
# Lazy loading is suspended during the operation.
#
# @param [:find, :query, :create, :update, :delete] op the database operation type
# @param [Resource] obj the domain object on which the operation is performed
# @param opts (#see Operation#initialize)
# @yield the database operation block
# @return the result of calling the operation block
def perform(op, obj, opts=nil, &block)
op_s = op.to_s.capitalize_first
pa = Options.get(:attribute, opts)
attr_s = " #{pa}" if pa
ag_s = " autogenerated" if Options.get(:autogenerated, opts)
ctxt_s = " in context #{print_operations}" unless @operations.empty?
logger.info(">> #{op_s}#{ag_s} #{obj.pp_s(:single_line)}#{attr_s}#{ctxt_s}...")
# Clear the error flag.
@error = nil
# Push the operation on the nested operation stack.
@operations.push(Operation.new(op, obj, opts))
begin
# perform the operation
result = perform_operation(&block)
rescue Exception => e
# If the current operation is the immediate cause, then print the
# error to the log.
if @error.nil? then
msg = "Error performing #{op} on #{obj}:\n#{e.message}\n#{obj.dump}\n#{e.backtrace.qp}"
logger.error(msg)
@error = e
end
raise e
ensure
# the operation is done
@operations.pop
# If this is a top-level operation, then clear the transient set.
if @operations.empty? then @transients.clear end
end
logger.info("<< Completed #{obj.qp}#{attr_s} #{op}.")
result
end
# Calls the given block with the lazy loader suspended.
# The database is opened, if necessary.
#
# @yield the database operation block
# @return the result of calling the operation block
def perform_operation(&block)
if closed? then
open { perform_operation(&block) }
else
@lazy_loader.suspend { yield }
end
end
def each_persistence_service(&block)
ObjectSpace.each_object(PersistenceService, &block)
end
# Initializes the default application service.
def start_session(user=nil, password=nil)
user ||= @user
password ||= @password
if user.nil? then raise DatabaseError.new('The caRuby application is missing the login user') end
if password.nil? then raise DatabaseError.new('The caRuby application is missing the login password') end
@session = ClientSession.instance
connect(user, password)
end
# Returns the current database operation stack as a String.
# Connects to the database.
def connect(user, password)
logger.debug { "Connecting to application server with login id #{user}..." }
begin
@session.start_session(user, password)
rescue Exception => e
logger.error("Login of #{user} with password #{password} was unsuccessful - #{e.message}")
raise e
end
logger.info("Connected to application server.")
end
end
|
dicom/rtp-connect | lib/rtp-connect/plan_to_dcm.rb | RTP.Plan.create_beam_limiting_device_positions | ruby | def create_beam_limiting_device_positions(cp_item, cp, options={})
dp_seq = DICOM::Sequence.new('300A,011A', :parent => cp_item)
# The ASYMX item ('backup jaws') doesn't exist on all models:
if ['SYM', 'ASY'].include?(cp.parent.field_x_mode.upcase)
dp_item_x = create_asym_item(cp, dp_seq, axis=:x, options)
end
# Always create one ASYMY item:
dp_item_y = create_asym_item(cp, dp_seq, axis=:y, options)
# MLCX:
dp_item_mlcx = DICOM::Item.new(:parent => dp_seq)
# RT Beam Limiting Device Type:
DICOM::Element.new('300A,00B8', "MLCX", :parent => dp_item_mlcx)
# Leaf/Jaw Positions:
DICOM::Element.new('300A,011C', cp.dcm_mlc_positions(options[:scale]), :parent => dp_item_mlcx)
dp_seq
end | Creates a beam limiting device positions sequence in the given DICOM object.
@param [DICOM::Item] cp_item the DICOM control point item in which to insert the sequence
@param [ControlPoint] cp the RTP control point to fetch device parameters from
@return [DICOM::Sequence] the constructed beam limiting device positions sequence | train | https://github.com/dicom/rtp-connect/blob/e23791970218a7087a0d798aa430acf36f79d758/lib/rtp-connect/plan_to_dcm.rb#L559-L574 | class Plan < Record
attr_accessor :current_gantry
attr_accessor :current_collimator
attr_accessor :current_couch_angle
attr_accessor :current_couch_pedestal
attr_accessor :current_couch_lateral
attr_accessor :current_couch_longitudinal
attr_accessor :current_couch_vertical
# Converts the Plan (and child) records to a
# DICOM::DObject of modality RTPLAN.
#
# @note Only photon plans have been tested.
# Electron beams beams may give an invalid DICOM file.
# Also note that, due to limitations in the RTP file format, some original
# values can not be recreated, like e.g. Study UID or Series UID.
# @param [Hash] options the options to use for creating the DICOM object
# @option options [Boolean] :dose_ref if set, Dose Reference & Referenced Dose Reference sequences will be included in the generated DICOM file
# @option options [String] :manufacturer the value used for the manufacturer tag (0008,0070) in the beam sequence
# @option options [String] :model the value used for the manufacturer's model name tag (0008,1090) in the beam sequence
# @option options [Symbol] :scale if set, relevant device parameters are converted from native readout format to IEC1217 (supported values are :elekta & :varian)
# @option options [String] :serial_number the value used for the device serial number tag (0018,1000) in the beam sequence
# @return [DICOM::DObject] the converted DICOM object
#
def to_dcm(options={})
#
# FIXME: This method is rather big, with a few sections of somewhat similar, repeating code.
# Refactoring and simplifying it at some stage might be a good idea.
#
require 'dicom'
original_level = DICOM.logger.level
DICOM.logger.level = Logger::FATAL
p = @prescriptions.first
# If no prescription is present, we are not going to be able to make a valid DICOM object:
logger.error("No Prescription Record present. Unable to build a valid RTPLAN DICOM object.") unless p
dcm = DICOM::DObject.new
#
# TOP LEVEL TAGS:
#
# Specific Character Set:
DICOM::Element.new('0008,0005', 'ISO_IR 100', :parent => dcm)
# Instance Creation Date
DICOM::Element.new('0008,0012', Time.now.strftime("%Y%m%d"), :parent => dcm)
# Instance Creation Time:
DICOM::Element.new('0008,0013', Time.now.strftime("%H%M%S"), :parent => dcm)
# SOP Class UID:
DICOM::Element.new('0008,0016', '1.2.840.10008.5.1.4.1.1.481.5', :parent => dcm)
# SOP Instance UID (if an original UID is not present, we make up a UID):
begin
sop_uid = p.fields.first.extended_field.original_plan_uid.empty? ? DICOM.generate_uid : p.fields.first.extended_field.original_plan_uid
rescue
sop_uid = DICOM.generate_uid
end
DICOM::Element.new('0008,0018', sop_uid, :parent => dcm)
# Study Date
DICOM::Element.new('0008,0020', Time.now.strftime("%Y%m%d"), :parent => dcm)
# Study Time:
DICOM::Element.new('0008,0030', Time.now.strftime("%H%M%S"), :parent => dcm)
# Accession Number:
DICOM::Element.new('0008,0050', '', :parent => dcm)
# Modality:
DICOM::Element.new('0008,0060', 'RTPLAN', :parent => dcm)
# Manufacturer:
DICOM::Element.new('0008,0070', 'rtp-connect', :parent => dcm)
# Referring Physician's Name:
DICOM::Element.new('0008,0090', "#{@md_last_name}^#{@md_first_name}^#{@md_middle_name}^^", :parent => dcm)
# Operator's Name:
DICOM::Element.new('0008,1070', "#{@author_last_name}^#{@author_first_name}^#{@author_middle_name}^^", :parent => dcm)
# Patient's Name:
DICOM::Element.new('0010,0010', "#{@patient_last_name}^#{@patient_first_name}^#{@patient_middle_name}^^", :parent => dcm)
# Patient ID:
DICOM::Element.new('0010,0020', @patient_id, :parent => dcm)
# Patient's Birth Date:
DICOM::Element.new('0010,0030', '', :parent => dcm)
# Patient's Sex:
DICOM::Element.new('0010,0040', '', :parent => dcm)
# Manufacturer's Model Name:
DICOM::Element.new('0008,1090', 'RTP-to-DICOM', :parent => dcm)
# Software Version(s):
DICOM::Element.new('0018,1020', "RubyRTP#{VERSION}", :parent => dcm)
# Study Instance UID:
DICOM::Element.new('0020,000D', DICOM.generate_uid, :parent => dcm)
# Series Instance UID:
DICOM::Element.new('0020,000E', DICOM.generate_uid, :parent => dcm)
# Study ID:
DICOM::Element.new('0020,0010', '1', :parent => dcm)
# Series Number:
DICOM::Element.new('0020,0011', '1', :parent => dcm)
# Frame of Reference UID (if an original UID is not present, we make up a UID):
begin
for_uid = p.site_setup.frame_of_ref_uid.empty? ? DICOM.generate_uid : p.site_setup.frame_of_ref_uid
rescue
for_uid = DICOM.generate_uid
end
DICOM::Element.new('0020,0052', for_uid, :parent => dcm)
# Position Reference Indicator:
DICOM::Element.new('0020,1040', '', :parent => dcm)
# RT Plan Label (max 16 characters):
plan_label = p ? p.rx_site_name[0..15] : @course_id
DICOM::Element.new('300A,0002', plan_label, :parent => dcm)
# RT Plan Name:
plan_name = p ? p.rx_site_name : @course_id
DICOM::Element.new('300A,0003', plan_name, :parent => dcm)
# RT Plan Description:
plan_desc = p ? p.technique : @diagnosis
DICOM::Element.new('300A,0004', plan_desc, :parent => dcm)
# RT Plan Date:
plan_date = @plan_date.empty? ? Time.now.strftime("%Y%m%d") : @plan_date
DICOM::Element.new('300A,0006', plan_date, :parent => dcm)
# RT Plan Time:
plan_time = @plan_time.empty? ? Time.now.strftime("%H%M%S") : @plan_time
DICOM::Element.new('300A,0007', plan_time, :parent => dcm)
# Approval Status:
DICOM::Element.new('300E,0002', 'UNAPPROVED', :parent => dcm)
#
# SEQUENCES:
#
# Tolerance Table Sequence:
if p && p.fields.first && !p.fields.first.tolerance_table.empty?
tt_seq = DICOM::Sequence.new('300A,0040', :parent => dcm)
tt_item = DICOM::Item.new(:parent => tt_seq)
# Tolerance Table Number:
DICOM::Element.new('300A,0042', p.fields.first.tolerance_table, :parent => tt_item)
end
# Structure set information:
if p && p.site_setup && !p.site_setup.structure_set_uid.empty?
#
# Referenced Structure Set Sequence:
#
ss_seq = DICOM::Sequence.new('300C,0060', :parent => dcm)
ss_item = DICOM::Item.new(:parent => ss_seq)
# Referenced SOP Class UID:
DICOM::Element.new('0008,1150', '1.2.840.10008.5.1.4.1.1.481.3', :parent => ss_item)
DICOM::Element.new('0008,1155', p.site_setup.structure_set_uid, :parent => ss_item)
# RT Plan Geometry:
DICOM::Element.new('300A,000C', 'PATIENT', :parent => dcm)
else
# RT Plan Geometry:
DICOM::Element.new('300A,000C', 'TREATMENT_DEVICE', :parent => dcm)
end
#
# Patient Setup Sequence:
#
ps_seq = DICOM::Sequence.new('300A,0180', :parent => dcm)
ps_item = DICOM::Item.new(:parent => ps_seq)
# Patient Position:
begin
pat_pos = p.site_setup.patient_orientation.empty? ? 'HFS' : p.site_setup.patient_orientation
rescue
pat_pos = 'HFS'
end
DICOM::Element.new('0018,5100', pat_pos, :parent => ps_item)
# Patient Setup Number:
DICOM::Element.new('300A,0182', '1', :parent => ps_item)
# Setup Technique (assume Isocentric):
DICOM::Element.new('300A,01B0', 'ISOCENTRIC', :parent => ps_item)
#
# Dose Reference Sequence:
#
create_dose_reference(dcm, plan_name) if options[:dose_ref]
#
# Fraction Group Sequence:
#
fg_seq = DICOM::Sequence.new('300A,0070', :parent => dcm)
fg_item = DICOM::Item.new(:parent => fg_seq)
# Fraction Group Number:
DICOM::Element.new('300A,0071', '1', :parent => fg_item)
# Number of Fractions Planned (try to derive from total dose/fraction dose, or use 1 as default):
begin
num_frac = p.dose_ttl.empty? || p.dose_tx.empty? ? '1' : (p.dose_ttl.to_i / p.dose_tx.to_f).round.to_s
rescue
num_frac = '0'
end
DICOM::Element.new('300A,0078', num_frac, :parent => fg_item)
# Number of Brachy Application Setups:
DICOM::Element.new('300A,00A0', '0', :parent => fg_item)
# Referenced Beam Sequence (items created for each beam below):
rb_seq = DICOM::Sequence.new('300C,0004', :parent => fg_item)
#
# Beam Sequence:
#
b_seq = DICOM::Sequence.new('300A,00B0', :parent => dcm)
if p
# If no fields are present, we are not going to be able to make a valid DICOM object:
logger.error("No Field Record present. Unable to build a valid RTPLAN DICOM object.") unless p.fields.length > 0
p.fields.each_with_index do |field, i|
# Fields with modality 'Unspecified' (e.g. CT or 2dkV) must be skipped:
unless field.modality == 'Unspecified'
# If this is an electron beam, a warning should be printed, as these are less reliably converted:
logger.warn("This is not a photon beam (#{field.modality}). Beware that DICOM conversion of Electron beams are experimental, and other modalities are unsupported.") if field.modality != 'Xrays'
# Reset control point 'current value' attributes:
reset_cp_current_attributes
# Beam number and name:
beam_number = field.extended_field ? field.extended_field.original_beam_number : (i + 1).to_s
beam_name = field.extended_field ? field.extended_field.original_beam_name : field.field_name
# Ref Beam Item:
rb_item = DICOM::Item.new(:parent => rb_seq)
# Beam Dose (convert from cGy to Gy):
field_dose = field.field_dose.empty? ? '' : (field.field_dose.to_f * 0.01).round(4).to_s
DICOM::Element.new('300A,0084', field_dose, :parent => rb_item)
# Beam Meterset:
DICOM::Element.new('300A,0086', field.field_monitor_units, :parent => rb_item)
# Referenced Beam Number:
DICOM::Element.new('300C,0006', beam_number, :parent => rb_item)
# Beam Item:
b_item = DICOM::Item.new(:parent => b_seq)
# Optional method values:
# Manufacturer:
DICOM::Element.new('0008,0070', options[:manufacturer], :parent => b_item) if options[:manufacturer]
# Manufacturer's Model Name:
DICOM::Element.new('0008,1090', options[:model], :parent => b_item) if options[:model]
# Device Serial Number:
DICOM::Element.new('0018,1000', options[:serial_number], :parent => b_item) if options[:serial_number]
# Treatment Machine Name (max 16 characters):
DICOM::Element.new('300A,00B2', field.treatment_machine[0..15], :parent => b_item)
# Primary Dosimeter Unit:
DICOM::Element.new('300A,00B3', 'MU', :parent => b_item)
# Source-Axis Distance (convert to mm):
DICOM::Element.new('300A,00B4', "#{field.sad.to_f * 10}", :parent => b_item)
# Beam Number:
DICOM::Element.new('300A,00C0', beam_number, :parent => b_item)
# Beam Name:
DICOM::Element.new('300A,00C2', beam_name, :parent => b_item)
# Beam Description:
DICOM::Element.new('300A,00C3', field.field_note, :parent => b_item)
# Beam Type:
beam_type = case field.treatment_type
when 'Static' then 'STATIC'
when 'StepNShoot' then 'STATIC'
when 'VMAT' then 'DYNAMIC'
else logger.error("The beam type (treatment type) #{field.treatment_type} is not yet supported.")
end
DICOM::Element.new('300A,00C4', beam_type, :parent => b_item)
# Radiation Type:
rad_type = case field.modality
when 'Elect' then 'ELECTRON'
when 'Xrays' then 'PHOTON'
else logger.error("The radiation type (modality) #{field.modality} is not yet supported.")
end
DICOM::Element.new('300A,00C6', rad_type, :parent => b_item)
# Treatment Delivery Type:
DICOM::Element.new('300A,00CE', 'TREATMENT', :parent => b_item)
# Number of Wedges:
DICOM::Element.new('300A,00D0', (field.wedge.empty? ? '0' : '1'), :parent => b_item)
# Number of Compensators:
DICOM::Element.new('300A,00E0', (field.compensator.empty? ? '0' : '1'), :parent => b_item)
# Number of Boli:
DICOM::Element.new('300A,00ED', (field.bolus.empty? ? '0' : '1'), :parent => b_item)
# Number of Blocks:
DICOM::Element.new('300A,00F0', (field.block.empty? ? '0' : '1'), :parent => b_item)
# Final Cumulative Meterset Weight:
DICOM::Element.new('300A,010E', 1, :parent => b_item)
# Referenced Patient Setup Number:
DICOM::Element.new('300C,006A', '1', :parent => b_item)
#
# Beam Limiting Device Sequence:
#
create_beam_limiting_devices(b_item, field)
#
# Block Sequence (if any):
# FIXME: It seems that the Block Sequence (300A,00F4) may be
# difficult (impossible?) to reconstruct based on the RTP file's
# information, and thus it is skipped altogether.
#
#
# Applicator Sequence (if any):
#
unless field.e_applicator.empty?
app_seq = DICOM::Sequence.new('300A,0107', :parent => b_item)
app_item = DICOM::Item.new(:parent => app_seq)
# Applicator ID:
DICOM::Element.new('300A,0108', field.e_field_def_aperture, :parent => app_item)
# Applicator Type:
DICOM::Element.new('300A,0109', "ELECTRON_#{field.e_applicator.upcase}", :parent => app_item)
# Applicator Description:
DICOM::Element.new('300A,010A', "Appl. #{field.e_field_def_aperture}", :parent => app_item)
end
#
# Control Point Sequence:
#
# A field may have 0 (no MLC), 1 (conventional beam with MLC) or 2n (IMRT) control points.
# The DICOM file shall always contain 2n control points (minimum 2).
#
cp_seq = DICOM::Sequence.new('300A,0111', :parent => b_item)
if field.control_points.length < 2
# When we have 0 or 1 control point, use settings from field, and insert MLC settings if present:
# First CP:
cp_item = DICOM::Item.new(:parent => cp_seq)
# Control Point Index:
DICOM::Element.new('300A,0112', "0", :parent => cp_item)
# Nominal Beam Energy:
DICOM::Element.new('300A,0114', "#{field.energy.to_f}", :parent => cp_item)
# Dose Rate Set:
DICOM::Element.new('300A,0115', field.doserate, :parent => cp_item)
# Gantry Angle:
DICOM::Element.new('300A,011E', field.gantry_angle, :parent => cp_item)
# Gantry Rotation Direction:
DICOM::Element.new('300A,011F', (field.arc_direction.empty? ? 'NONE' : field.arc_direction), :parent => cp_item)
# Beam Limiting Device Angle:
DICOM::Element.new('300A,0120', field.collimator_angle, :parent => cp_item)
# Beam Limiting Device Rotation Direction:
DICOM::Element.new('300A,0121', 'NONE', :parent => cp_item)
# Patient Support Angle:
DICOM::Element.new('300A,0122', field.couch_pedestal, :parent => cp_item)
# Patient Support Rotation Direction:
DICOM::Element.new('300A,0123', 'NONE', :parent => cp_item)
# Table Top Eccentric Angle:
DICOM::Element.new('300A,0125', field.couch_angle, :parent => cp_item)
# Table Top Eccentric Rotation Direction:
DICOM::Element.new('300A,0126', 'NONE', :parent => cp_item)
# Table Top Vertical Position:
couch_vert = field.couch_vertical.empty? ? '' : (field.couch_vertical.to_f * 10).to_s
DICOM::Element.new('300A,0128', couch_vert, :parent => cp_item)
# Table Top Longitudinal Position:
couch_long = field.couch_longitudinal.empty? ? '' : (field.couch_longitudinal.to_f * 10).to_s
DICOM::Element.new('300A,0129', couch_long, :parent => cp_item)
# Table Top Lateral Position:
couch_lat = field.couch_lateral.empty? ? '' : (field.couch_lateral.to_f * 10).to_s
DICOM::Element.new('300A,012A', couch_lat, :parent => cp_item)
# Isocenter Position (x\y\z):
if p.site_setup
DICOM::Element.new('300A,012C', "#{(p.site_setup.iso_pos_x.to_f * 10).round(2)}\\#{(p.site_setup.iso_pos_y.to_f * 10).round(2)}\\#{(p.site_setup.iso_pos_z.to_f * 10).round(2)}", :parent => cp_item)
else
logger.warn("No Site Setup record exists for this plan. Unable to provide an isosenter position.")
DICOM::Element.new('300A,012C', '', :parent => cp_item)
end
# Source to Surface Distance:
add_ssd(field.ssd, cp_item)
# Cumulative Meterset Weight:
DICOM::Element.new('300A,0134', '0', :parent => cp_item)
# Beam Limiting Device Position Sequence:
if field.control_points.length > 0
create_beam_limiting_device_positions(cp_item, field.control_points.first, options)
else
create_beam_limiting_device_positions_from_field(cp_item, field, options)
end
# Referenced Dose Reference Sequence:
create_referenced_dose_reference(cp_item) if options[:dose_ref]
# Second CP:
cp_item = DICOM::Item.new(:parent => cp_seq)
# Control Point Index:
DICOM::Element.new('300A,0112', "1", :parent => cp_item)
# Cumulative Meterset Weight:
DICOM::Element.new('300A,0134', '1', :parent => cp_item)
else
# When we have multiple (2 or more) control points, iterate each control point:
field.control_points.each { |cp| create_control_point(cp, cp_seq, options) }
# Make sure that hte cumulative meterset weight of the last control
# point is '1' (exactly equal to final cumulative meterset weight):
cp_seq.items.last['300A,0134'].value = '1'
end
# Number of Control Points:
DICOM::Element.new('300A,0110', b_item['300A,0111'].items.length, :parent => b_item)
end
end
# Number of Beams:
DICOM::Element.new('300A,0080', fg_item['300C,0004'].items.length, :parent => fg_item)
end
# Restore the DICOM logger:
DICOM.logger.level = original_level
return dcm
end
private
# Adds an angular type value to a Control Point Item, by creating the
# necessary DICOM elements.
# Note that the element is only added if there is no 'current' attribute
# defined, or the given value is different form the current attribute.
#
# @param [DICOM::Item] item the DICOM control point item in which to create the elements
# @param [String] angle_tag the DICOM tag of the angle element
# @param [String] direction_tag the DICOM tag of the direction element
# @param [String, NilClass] angle the collimator angle attribute
# @param [String, NilClass] direction the collimator rotation direction attribute
# @param [Symbol] current_angle the instance variable that keeps track of the current value of this attribute
#
def add_angle(item, angle_tag, direction_tag, angle, direction, current_angle)
if !self.send(current_angle) || angle != self.send(current_angle)
self.send("#{current_angle}=", angle)
DICOM::Element.new(angle_tag, angle, :parent => item)
DICOM::Element.new(direction_tag, (direction.empty? ? 'NONE' : direction), :parent => item)
end
end
# Adds a Table Top Position element to a Control Point Item.
# Note that the element is only added if there is no 'current' attribute
# defined, or the given value is different form the current attribute.
#
# @param [DICOM::Item] item the DICOM control point item in which to create the element
# @param [String] tag the DICOM tag of the couch position element
# @param [String, NilClass] value the couch position
# @param [Symbol] current the instance variable that keeps track of the current value of this attribute
#
def add_couch_position(item, tag, value, current)
if !self.send(current) || value != self.send(current)
self.send("#{current}=", value)
DICOM::Element.new(tag, (value.empty? ? '' : value.to_f * 10), :parent => item)
end
end
# Adds a Dose Rate Set element to a Control Point Item.
# Note that the element is only added if there is no 'current' attribute
# defined, or the given value is different form the current attribute.
#
# @param [String, NilClass] value the doserate attribute
# @param [DICOM::Item] item the DICOM control point item in which to create an element
#
def add_doserate(value, item)
if !@current_doserate || value != @current_doserate
@current_doserate = value
DICOM::Element.new('300A,0115', value, :parent => item)
end
end
# Adds a Nominal Beam Energy element to a Control Point Item.
# Note that the element is only added if there is no 'current' attribute
# defined, or the given value is different form the current attribute.
#
# @param [String, NilClass] value the energy attribute
# @param [DICOM::Item] item the DICOM control point item in which to create an element
#
def add_energy(value, item)
if !@current_energy || value != @current_energy
@current_energy = value
DICOM::Element.new('300A,0114', "#{value.to_f}", :parent => item)
end
end
# Adds an Isosenter element to a Control Point Item.
# Note that the element is only added if there is a Site Setup record present,
# and it contains a real (non-empty) value. Also, the element is only added if there
# is no 'current' attribute defined, or the given value is different form the current attribute.
#
# @param [SiteSetup, NilClass] site_setup the associated site setup record
# @param [DICOM::Item] item the DICOM control point item in which to create an element
#
def add_isosenter(site_setup, item)
if site_setup
# Create an element if the value is new or unique:
if !@current_isosenter
iso = "#{(site_setup.iso_pos_x.to_f * 10).round(2)}\\#{(site_setup.iso_pos_y.to_f * 10).round(2)}\\#{(site_setup.iso_pos_z.to_f * 10).round(2)}"
if iso != @current_isosenter
@current_isosenter = iso
DICOM::Element.new('300A,012C', iso, :parent => item)
end
end
else
# Log a warning if this is the first control point:
unless @current_isosenter
logger.warn("No Site Setup record exists for this plan. Unable to provide an isosenter position.")
end
end
end
# Adds a Source to Surface Distance element to a Control Point Item.
# Note that the element is only added if the SSD attribute contains
# real (non-empty) value.
#
# @param [String, NilClass] value the SSD attribute
# @param [DICOM::Item] item the DICOM control point item in which to create an element
#
def add_ssd(value, item)
DICOM::Element.new('300A,0130', "#{value.to_f * 10}", :parent => item) if value && !value.empty?
end
# Creates a control point item in the given control point sequence, based
# on an RTP control point record.
#
# @param [ControlPoint] cp the RTP ControlPoint record to convert
# @param [DICOM::Sequence] sequence the DICOM parent sequence of the item to be created
# @param [Hash] options the options to use for creating the control point
# @option options [Boolean] :dose_ref if set, a Referenced Dose Reference sequence will be included in the generated control point item
# @return [DICOM::Item] the constructed control point DICOM item
#
def create_control_point(cp, sequence, options={})
cp_item = DICOM::Item.new(:parent => sequence)
# Some CP attributes will always be written (CP index, BLD positions & Cumulative meterset weight).
# The other attributes are only written if they are different from the previous control point.
# Control Point Index:
DICOM::Element.new('300A,0112', "#{cp.index}", :parent => cp_item)
# Beam Limiting Device Position Sequence:
create_beam_limiting_device_positions(cp_item, cp, options)
# Source to Surface Distance:
add_ssd(cp.ssd, cp_item)
# Cumulative Meterset Weight:
DICOM::Element.new('300A,0134', cp.monitor_units.to_f, :parent => cp_item)
# Referenced Dose Reference Sequence:
create_referenced_dose_reference(cp_item) if options[:dose_ref]
# Attributes that are only added if they carry an updated value:
# Nominal Beam Energy:
add_energy(cp.energy, cp_item)
# Dose Rate Set:
add_doserate(cp.doserate, cp_item)
# Gantry Angle & Rotation Direction:
add_angle(cp_item, '300A,011E', '300A,011F', cp.gantry_angle, cp.gantry_dir, :current_gantry)
# Beam Limiting Device Angle & Rotation Direction:
add_angle(cp_item, '300A,0120', '300A,0121', cp.collimator_angle, cp.collimator_dir, :current_collimator)
# Patient Support Angle & Rotation Direction:
add_angle(cp_item, '300A,0122', '300A,0123', cp.couch_pedestal, cp.couch_ped_dir, :current_couch_pedestal)
# Table Top Eccentric Angle & Rotation Direction:
add_angle(cp_item, '300A,0125', '300A,0126', cp.couch_angle, cp.couch_dir, :current_couch_angle)
# Table Top Vertical Position:
add_couch_position(cp_item, '300A,0128', cp.couch_vertical, :current_couch_vertical)
# Table Top Longitudinal Position:
add_couch_position(cp_item, '300A,0129', cp.couch_longitudinal, :current_couch_longitudinal)
# Table Top Lateral Position:
add_couch_position(cp_item, '300A,012A', cp.couch_lateral, :current_couch_lateral)
# Isocenter Position (x\y\z):
add_isosenter(cp.parent.parent.site_setup, cp_item)
cp_item
end
# Creates a beam limiting device sequence in the given DICOM object.
#
# @param [DICOM::Item] beam_item the DICOM beam item in which to insert the sequence
# @param [Field] field the RTP field to fetch device parameters from
# @return [DICOM::Sequence] the constructed beam limiting device sequence
#
def create_beam_limiting_devices(beam_item, field)
bl_seq = DICOM::Sequence.new('300A,00B6', :parent => beam_item)
# The ASYMX item ('backup jaws') doesn't exist on all models:
if ['SYM', 'ASY'].include?(field.field_x_mode.upcase)
bl_item_x = DICOM::Item.new(:parent => bl_seq)
DICOM::Element.new('300A,00B8', "ASYMX", :parent => bl_item_x)
DICOM::Element.new('300A,00BC', "1", :parent => bl_item_x)
end
# The ASYMY item is always created:
bl_item_y = DICOM::Item.new(:parent => bl_seq)
# RT Beam Limiting Device Type:
DICOM::Element.new('300A,00B8', "ASYMY", :parent => bl_item_y)
# Number of Leaf/Jaw Pairs:
DICOM::Element.new('300A,00BC', "1", :parent => bl_item_y)
# MLCX item is only created if leaves are defined:
# (NB: The RTP file doesn't specify leaf position boundaries, so we
# have to set these based on a set of known MLC types, their number
# of leaves, and their leaf boundary positions.)
if field.control_points.length > 0
bl_item_mlcx = DICOM::Item.new(:parent => bl_seq)
DICOM::Element.new('300A,00B8', "MLCX", :parent => bl_item_mlcx)
num_leaves = field.control_points.first.mlc_leaves.to_i
DICOM::Element.new('300A,00BC', num_leaves.to_s, :parent => bl_item_mlcx)
DICOM::Element.new('300A,00BE', "#{RTP.leaf_boundaries(num_leaves).join("\\")}", :parent => bl_item_mlcx)
end
bl_seq
end
# Creates a beam limiting device positions sequence in the given DICOM object.
#
# @param [DICOM::Item] cp_item the DICOM control point item in which to insert the sequence
# @param [ControlPoint] cp the RTP control point to fetch device parameters from
# @return [DICOM::Sequence] the constructed beam limiting device positions sequence
#
# Creates an ASYMX or ASYMY item.
#
# @param [ControlPoint] cp the RTP control point to fetch device parameters from
# @param [DICOM::Sequence] dcm_parent the DICOM sequence in which to insert the item
# @param [Symbol] axis the axis for the item (:x or :y)
# @return [DICOM::Item] the constructed ASYMX or ASYMY item
#
def create_asym_item(cp, dcm_parent, axis, options={})
val1 = cp.send("dcm_collimator_#{axis.to_s}1", options[:scale])
val2 = cp.send("dcm_collimator_#{axis.to_s}2", options[:scale])
item = DICOM::Item.new(:parent => dcm_parent)
# RT Beam Limiting Device Type:
DICOM::Element.new('300A,00B8', "ASYM#{axis.to_s.upcase}", :parent => item)
# Leaf/Jaw Positions:
DICOM::Element.new('300A,011C', "#{val1}\\#{val2}", :parent => item)
item
end
# Creates a beam limiting device positions sequence in the given DICOM object.
#
# @param [DICOM::Item] cp_item the DICOM control point item in which to insert the sequence
# @param [Field] field the RTP treatment field to fetch device parameters from
# @return [DICOM::Sequence] the constructed beam limiting device positions sequence
#
def create_beam_limiting_device_positions_from_field(cp_item, field, options={})
dp_seq = DICOM::Sequence.new('300A,011A', :parent => cp_item)
# ASYMX:
dp_item_x = DICOM::Item.new(:parent => dp_seq)
DICOM::Element.new('300A,00B8', "ASYMX", :parent => dp_item_x)
DICOM::Element.new('300A,011C', "#{field.dcm_collimator_x1}\\#{field.dcm_collimator_x2}", :parent => dp_item_x)
# ASYMY:
dp_item_y = DICOM::Item.new(:parent => dp_seq)
DICOM::Element.new('300A,00B8', "ASYMY", :parent => dp_item_y)
DICOM::Element.new('300A,011C', "#{field.dcm_collimator_y1}\\#{field.dcm_collimator_y2}", :parent => dp_item_y)
dp_seq
end
# Creates a dose reference sequence in the given DICOM object.
#
# @param [DICOM::DObject] dcm the DICOM object in which to insert the sequence
# @param [String] description the value to use for Dose Reference Description
# @return [DICOM::Sequence] the constructed dose reference sequence
#
def create_dose_reference(dcm, description)
dr_seq = DICOM::Sequence.new('300A,0010', :parent => dcm)
dr_item = DICOM::Item.new(:parent => dr_seq)
# Dose Reference Number:
DICOM::Element.new('300A,0012', '1', :parent => dr_item)
# Dose Reference Structure Type:
DICOM::Element.new('300A,0014', 'SITE', :parent => dr_item)
# Dose Reference Description:
DICOM::Element.new('300A,0016', description, :parent => dr_item)
# Dose Reference Type:
DICOM::Element.new('300A,0020', 'TARGET', :parent => dr_item)
dr_seq
end
# Creates a referenced dose reference sequence in the given DICOM object.
#
# @param [DICOM::Item] cp_item the DICOM item in which to insert the sequence
# @return [DICOM::Sequence] the constructed referenced dose reference sequence
#
def create_referenced_dose_reference(cp_item)
# Referenced Dose Reference Sequence:
rd_seq = DICOM::Sequence.new('300C,0050', :parent => cp_item)
rd_item = DICOM::Item.new(:parent => rd_seq)
# Cumulative Dose Reference Coeffecient:
DICOM::Element.new('300A,010C', '', :parent => rd_item)
# Referenced Dose Reference Number:
DICOM::Element.new('300C,0051', '1', :parent => rd_item)
rd_seq
end
# Resets the types of control point attributes that are only written to the
# first control point item, and for following control point items only when
# they are different from the 'current' value. When a new field is reached,
# it is essential to reset these attributes, or else we could risk to start
# the field with a control point with missing attributes, if one of its first
# attributes is equal to the last attribute of the previous field.
#
def reset_cp_current_attributes
@current_gantry = nil
@current_collimator = nil
@current_couch_pedestal = nil
@current_couch_angle = nil
@current_couch_vertical = nil
@current_couch_longitudinal = nil
@current_couch_lateral = nil
@current_isosenter = nil
end
end
|
anga/extend_at | lib/extend_at/configuration.rb | ExtendModelAt.Configuration.get_value_of | ruby | def get_value_of(value, model=nil)
if value.kind_of? Symbol
# If the function exist, we execute it
if model.respond_to? value
return model.send value
# if the the function not exist, whe set te symbol as a value
else
return value
end
elsif value.kind_of? Proc
return value.call
else
return value
end
end | Return the value of the execute a function inside the model, for example:
:column => :function
this function execute the function _function_ to get the value and set it his return to column | train | https://github.com/anga/extend_at/blob/db77cf981108b401af0d92a8d7b1008317d9a17d/lib/extend_at/configuration.rb#L151-L165 | class Configuration
#
def run(env=nil,model=nil)
if env.kind_of? Hash
hash = expand_options env, { :not_call_symbol => [:boolean], :not_expand => [:validate, :default] }, model.clone
hash[:columns] = init_columns hash[:columns]
@config = hash
read_associations_configuration
return @config
end
if not env.kind_of? Proc
return {}
else
Environment.new.run env, model
end
end
protected
# Read all model relationships like belongs_to and has_many
def read_associations_configuration
[:has_one, :has_many, :belongs_to].each do |relation|
if @config.keys.include? :"#{relation}"
raise "Invalid #{relation} value" if not [Hash, Array, Symbol].include? @config[:"#{relation}"].class
# We change the user format (Hash, Array or only one element) to Array
if @config[:"#{relation}"].kind_of? Hash
list_models = @config[:"#{relation}"].keys
elsif @config[:"#{relation}"].kind_of? Array
list_models = @config[:"#{relation}"]
# Transform the array of model in a hash with his configuraion (empty, default values)
@config[:"#{relation}"] = {}
list_models.each do |model|
@config[:"#{relation}"][model.to_sym] = {}
end
else
list_models = [@config[:"#{relation}"]]
# Transform the array of model in a hash with his configuraion (empty, default values)
@config[:"#{relation}"] = {}
list_models.each do |model|
@config[:"#{relation}"][model.to_sym] = {}
end
end
# Iterate inside the array and get and create the configuration to that relationship
list_models.each do |model|
# If the user set some configuration (:class_name for example), we use it
if @config[:"#{relation}"][model.to_sym].kind_of? Hash
config = @config[:"#{relation}"][model.to_sym]
# If not, we create it
else
# Change sybol of the class name to hash configuration
@config[:"#{relation}"][model.to_sym] = {}
config = {}
end
# We set the default class_name if is not seted
if config[:class_name].nil?
@config[:"#{relation}"][model.to_sym][:class_name] = model.to_s.classify
else
@config[:"#{relation}"][model.to_sym][:class_name] = config[:class_name]
end
# If the association is belongs_to, we need to define the columns
if relation.to_s == "belongs_to"
if config[:polymorphic] == true
@config[:columns][ :"#{model}_id" ] = { :type => :integer }
@config[:columns][ :"#{model}_type" ] = { :type => :string }
else
@config[:columns][ config[:foreign_key] || :"#{model}_id" ] = { :type => :integer }
@config[:"#{relation}"][model.to_sym][:foreign_key] = config[:foreign_key] || :"#{model}_id" if @config[:"#{relation}"][model.to_sym][:foreign_key].nil?
end
end
# TODO: Continue adding rails features like:
# :autosave
# :class_name
# :conditions
# :counter_cache
# :dependent
# :foreign_key
# :include
# :polymorphic
# :readonly
# :select
# :touch
# :validate
end
end
end
end
def init_columns(columns={})
new = {}
columns.each do |column, config|
new[column] = config
# Stablish the type
if config[:type].class == Class
# If exist :type, is a static column
new[column][:type] = get_type_for_class config[:type]
end
end
new
end
def get_type_for_class(type)
type = type.name
return :any if type == 'NilClass'
return :float if type == 'Float'
return :integer if type == 'Fixnum'
return :text if type == 'String '
return :timestamp if type == 'Time'
return :datetime if type == 'Date'
return :any
end
# Transform the user configuration to hash. For example, if the user use lambda to create the configuration, this function execute the lambda to get the result
# and re-parse it (and so on) to get a full hash configuration
def expand_options(options={}, opts={}, model=nil)
options = get_value_of options, model
config_opts = {
:not_expand => [],
:not_call_symbol => []
}.merge! opts
if options.kind_of? Hash
opts = {}
options.each do |column, config|
if not config_opts[:not_expand].include? column.to_sym
if not config_opts[:not_call_symbol].include? config
opts[column.to_sym] = expand_options(get_value_of(config, model), config_opts, model)
else
opts[column.to_sym] = expand_options(config, config_opts, model)
end
else
opts[column.to_sym] = config
end
end
return opts
else
return get_value_of options, model
end
end
# Return the value of the execute a function inside the model, for example:
# :column => :function
# this function execute the function _function_ to get the value and set it his return to column
end
|
wvanbergen/request-log-analyzer | lib/request_log_analyzer/tracker/hourly_spread.rb | RequestLogAnalyzer::Tracker.HourlySpread.update | ruby | def update(request)
timestamp = request.first(options[:field])
@hour_frequencies[timestamp.to_s[8..9].to_i] += 1
@first = timestamp if timestamp < @first
@last = timestamp if timestamp > @last
end | Check if the timestamp in the request and store it.
<tt>request</tt> The request. | train | https://github.com/wvanbergen/request-log-analyzer/blob/b83865d440278583ac8e4901bb33878244fd7c75/lib/request_log_analyzer/tracker/hourly_spread.rb#L42-L47 | class HourlySpread < Base
attr_reader :hour_frequencies, :first, :last
# Check if timestamp field is set in the options and prepare the result time graph.
def prepare
options[:field] ||= :timestamp
@hour_frequencies = (0...24).map { 0 }
@first, @last = 99_999_999_999_999, 0
end
# Check if the timestamp in the request and store it.
# <tt>request</tt> The request.
# Total amount of requests tracked
def total_requests
@hour_frequencies.reduce(0) { |sum, value| sum + value }
end
# First timestamp encountered
def first_timestamp
DateTime.parse(@first.to_s, '%Y%m%d%H%M%S') rescue nil
end
# Last timestamp encountered
def last_timestamp
DateTime.parse(@last.to_s, '%Y%m%d%H%M%S') rescue nil
end
# Difference between last and first timestamp.
def timespan
last_timestamp - first_timestamp
end
# Generate an hourly spread report to the given output object.
# Any options for the report should have been set during initialize.
# <tt>output</tt> The output object
def report(output)
output.title(title)
if total_requests == 0
output << "None found.\n"
return
end
days = [1, timespan].max
output.table({}, { align: :right }, { type: :ratio, width: :rest, treshold: 0.15 }) do |rows|
@hour_frequencies.each_with_index do |requests, index|
ratio = requests.to_f / total_requests.to_f
requests_per_day = (requests / days).ceil
rows << ["#{index.to_s.rjust(3)}:00", '%d hits/day' % requests_per_day, ratio]
end
end
end
# Returns the title of this tracker for reports
def title
options[:title] || 'Request distribution per hour'
end
# Returns the found frequencies per hour as a hash for YAML exporting
def to_yaml_object
yaml_object = {}
@hour_frequencies.each_with_index do |freq, hour|
yaml_object["#{hour}:00 - #{hour + 1}:00"] = freq
end
yaml_object
end
end
|
murb/workbook | lib/workbook/table.rb | Workbook.Table.push | ruby | def push(row)
row = Workbook::Row.new(row) if row.class == Array
super(row)
row.set_table(self)
end | Add row
@param [Workbook::Table, Array] row to add | train | https://github.com/murb/workbook/blob/2e12f43c882b7c235455192a2fc48183fe6ec965/lib/workbook/table.rb#L110-L114 | class Table < Array
include Workbook::Modules::TableDiffSort
include Workbook::Writers::CsvTableWriter
include Workbook::Writers::JsonTableWriter
include Workbook::Writers::HtmlTableWriter
attr_accessor :name
def initialize row_cel_values=[], sheet=nil, options={}
row_cel_values = [] if row_cel_values == nil
row_cel_values.each_with_index do |r,ri|
if r.is_a? Workbook::Row
r.table = self
else
r = Workbook::Row.new(r,self, options)
end
define_columns_with_row(r) if ri == 0
end
self.sheet = sheet
# Column data is considered as a 'row' with 'cells' that contain 'formatting'
end
# Quick assessor to the book's template, if it exists
#
# @return [Workbook::Template]
def template
sheet.book.template
end
# Returns the header of this table (typically the first row, but can be a different row).
# The header row is also used for finding values in a aribrary row.
#
# @return [Workbook::Row] The header
def header
if defined?(@header) and @header == false
false
elsif defined?(@header) and @header
@header
else
first
end
end
# Set the header of this table (typically the first row, but can be a different row).
# The header row is also used for finding values in a aribrary row.
#
# @param [Workbook::Row, Integer] h should be the row or the index of this table's row
# @return [Workbook::Row] The header
def header= h
if h.is_a? Numeric
@header = self[h]
else
@header = h
end
end
# Returns the index of the header row
#
# @return [Integer] The index of the header row (typically 0)
def header_row_index
self.index(self.header)
end
def define_columns_with_row(r)
self.columns = r.collect do |column|
Workbook::Column.new self, {}
end
end
# Generates a new row, with optionally predefined cell-values, that is already connected to this table.
#
# @param [Array, Workbook::Row] cell_values is an array or row of cell values
# @return [Workbook::Row] the newly created row
def new_row cell_values=[]
r = Workbook::Row.new(cell_values,self)
return r
end
def create_or_open_row_at index
r = self[index]
if r == nil
r = Workbook::Row.new
r.table=(self)
end
r
end
# Removes all empty lines. This function is particularly useful if you typically add lines to the end of a template-table, which sometimes has unremovable empty lines.
#
# @return [Workbook::Table] self
def remove_empty_lines!
self.delete_if{|r| r.nil? or r.compact.empty?}
self
end
# Add row
# @param [Workbook::Table, Array] row to add
# Add row
# @param [Workbook::Table, Array] row to add
def <<(row)
row = Workbook::Row.new(row) if row.class == Array
super(row)
row.set_table(self)
end
def has_contents?
self.clone.remove_empty_lines!.count != 0
end
# Returns true if the row exists in this table
#
# @param [Workbook::Row] row to test for
# @return [Boolean] whether the row exist in this table
def contains_row? row
raise ArgumentError, "table should be a Workbook::Row (you passed a #{t.class})" unless row.is_a?(Workbook::Row)
self.collect{|r| r.object_id}.include? row.object_id
end
# Returns the sheet this table belongs to, creates a new sheet if none exists
#
# @return [Workbook::Sheet] The sheet this table belongs to
def sheet
return @sheet if defined?(@sheet) and !@sheet.nil?
self.sheet= Workbook::Sheet.new(self)
end
# Returns the sheet this table belongs to, creates a new sheet if none exists
#
# @param [Workbook::Sheet] sheet this table belongs to
# @return [Workbook::Sheet] The sheet this table belongs to
def sheet= sheet
@sheet = sheet
end
# Removes all lines from this table
#
# @return [Workbook::Table] (self)
def delete_all
self.delete_if{|b| true}
end
# clones itself *and* the rows it contains
#
# @return [Workbook::Table] The cloned table
def clone
t = self
c = super
c.delete_all
t.each{|r| c << r.clone}
c.header = c[header_row_index] if header_row_index
return c
end
# Overrides normal Array's []-function with support for symbols that identify a column based on the header-values
#
# @example Lookup using fixnum or header value encoded as symbol
# table[0] #=> <Row [a,2,3,4]> (first row)
# table["A1"] #=> <Cell value="a"> (first cell of first row)
#
# @param [Fixnum, String] index_or_string to reference to either the row, or the cell
# @return [Workbook::Row, Workbook::Cell, nil]
def [] index_or_string
if index_or_string.is_a? String
match = index_or_string.match(/([A-Z]+)([0-9]*)/i)
col_index = Workbook::Column.alpha_index_to_number_index(match[1])
row_index = match[2].to_i - 1
return self[row_index][col_index]
elsif index_or_string.is_a? Range
collection = to_a[index_or_string].collect{|a| a.clone}
return Workbook::Table.new collection
elsif index_or_string.is_a? Integer
return to_a[index_or_string]
end
end
# Overrides normal Row's []=-function; automatically converting to row and setting
# with the label correctly
#
# @example Lookup using fixnum or header value encoded as symbol
# `table[0] = <Row [a,2,3,4]>` (set first row)
# `table["A1"] = 2` (set first cell of first row to 2)
#
# @param [Fixnum, String] index_or_string to reference to either the row, or the cell
# @param [Workbook::Table, Array] new_value to set
# @return [Workbook::Cell, nil]
def []= index_or_string, new_value
if index_or_string.is_a? String
match = index_or_string.upcase.match(/([A-Z]*)([0-9]*)/)
cell_index = Workbook::Column.alpha_index_to_number_index(match[1])
row_index = match[2].to_i - 1
self[row_index][cell_index].value = new_value
else
row = new_value
row = Workbook::Row.new(row) unless row.is_a? Workbook::Row
super(index_or_string,row)
row.set_table(self)
end
end
# remove all the trailing empty-rows (returning a trimmed clone)
#
# @param [Integer] desired_row_length of the rows
# @return [Workbook::Row] a trimmed clone of the array
def trim desired_row_length=nil
self.clone.trim!(desired_row_length)
end
# remove all the trailing empty-rows (returning a trimmed self)
#
# @param [Integer] desired_row_length of the new row
# @return [Workbook::Row] self
def trim! desired_row_length=nil
max_length = self.collect{|a| a.trim.length }.max
self_count = self.count-1
self.count.times do |index|
index = self_count - index
if self[index].trim.empty?
self.delete_at(index)
else
break
end
end
self.each{|a| a.trim!(max_length)}
self
end
# Returns The dimensions of this sheet based on longest row
# @return [Array] x-width, y-height
def dimensions
height = self.count
width = self.collect{|a| a.length}.max
[width,height]
end
# Returns an array of Column-classes describing the columns of this table
# @return [Array<Column>] columns
def columns
@columns ||= header.collect do |header_cell|
Column.new(self)
end
end
# Returns an array of Column-classes describing the columns of this table
# @param [Array<Column>] columns
# @return [Array<Column>] columns
def columns= columns
columns.each{|c| c.table=self}
@columns = columns
end
end
|
moneta-rb/moneta | lib/moneta/builder.rb | Moneta.Builder.adapter | ruby | def adapter(adapter, options = {}, &block)
case adapter
when Symbol
use(Adapters.const_get(adapter), options, &block)
when Class
use(adapter, options, &block)
else
raise ArgumentError, 'Adapter must be a Moneta store' unless adapter.respond_to?(:load) && adapter.respond_to?(:store)
raise ArgumentError, 'No options allowed' unless options.empty?
@proxies.unshift adapter
nil
end
end | Add adapter to stack
@param [Symbol/Class/Moneta store] adapter Name of adapter class, adapter class or Moneta store
@param [Hash] options Options hash
@api public | train | https://github.com/moneta-rb/moneta/blob/26a118c8b2c93d11257f4a5fe9334a8157f4db47/lib/moneta/builder.rb#L48-L60 | class Builder
# @yieldparam Builder dsl code block
def initialize(&block)
raise ArgumentError, 'No block given' unless block_given?
@proxies = []
instance_eval(&block)
end
# Build proxy stack
#
# @return [Object] Generated Moneta proxy stack
# @api public
def build
adapter = @proxies.first
if Array === adapter
klass, options, block = adapter
adapter = new_proxy(klass, options, &block)
check_arity(klass, adapter, 1)
end
@proxies[1..-1].inject([adapter]) do |result, proxy|
klass, options, block = proxy
proxy = new_proxy(klass, result.last, options, &block)
check_arity(klass, proxy, 2)
result << proxy
end
end
# Add proxy to stack
#
# @param [Symbol/Class] proxy Name of proxy class or proxy class
# @param [Hash] options Options hash
# @api public
def use(proxy, options = {}, &block)
proxy = Moneta.const_get(proxy) if Symbol === proxy
raise ArgumentError, 'You must give a Class or a Symbol' unless Class === proxy
@proxies.unshift [proxy, options, block]
nil
end
# Add adapter to stack
#
# @param [Symbol/Class/Moneta store] adapter Name of adapter class, adapter class or Moneta store
# @param [Hash] options Options hash
# @api public
protected
def new_proxy(klass, *args, &block)
klass.new(*args, &block)
rescue ArgumentError
check_arity(klass, klass.allocate, args.size)
raise
end
def check_arity(klass, proxy, expected)
args = proxy.method(:initialize).arity.abs
raise(ArgumentError, %{#{klass.name}#new accepts wrong number of arguments (#{args} accepted, #{expected} expected)
Please check your Moneta builder block:
* Proxies must be used before the adapter
* Only one adapter is allowed
* The adapter must be used last
}) if args != expected
end
end
|
DigitPaint/roger | lib/roger/release.rb | Roger.Release.scm | ruby | def scm(force = false)
return @_scm if @_scm && !force
case config[:scm]
when :git
@_scm = Release::Scm::Git.new(path: source_path)
when :fixed
@_scm = Release::Scm::Fixed.new
else
raise "Unknown SCM #{options[:scm].inspect}"
end
end | Get the current SCM object | train | https://github.com/DigitPaint/roger/blob/1153119f170d1b0289b659a52fcbf054df2d9633/lib/roger/release.rb#L84-L95 | class Release
include Roger::Helpers::Logging
include Roger::Helpers::GetFiles
attr_reader :config, :project
attr_reader :stack
class << self
include Roger::Helpers::GetCallable
end
# @option config [:git, :fixed] :scm The SCM to use (default = :git)
# @option config [String, Pathname] :target_path The path/directory to put the release into
# @option config [String, Pathname]:build_path Temporary path used to build the release
# @option config [Boolean] :cleanup_build Wether or not to remove the build_path after we're
# done (default = true)
# @option config [lambda] :cp Function to be called for copying
# @option config [Boolean] :blank Keeps the release clean, don't automatically add any
# processors or finalizers (default = false)
def initialize(project, config = {})
real_project_path = project.path.realpath
defaults = {
scm: :git,
source_path: project.html_path.realpath,
target_path: real_project_path + "releases",
build_path: real_project_path + "build",
cp: lambda do |source, dest|
if RUBY_PLATFORM.match("mswin") || RUBY_PLATFORM.match("mingw")
unless system(["echo d | xcopy", "/E", "/Y", source.to_s.gsub("/", "\\"),
dest.to_s.gsub("/", "\\")].join(" "))
raise "Could not copy build directory using xcopy"
end
else
unless system(Shellwords.join(["cp", "-LR", "#{source}/", dest.to_s]))
raise "Could not copy build directory using cp"
end
end
end,
blank: false,
cleanup_build: true
}
@config = {}.update(defaults).update(config)
@project = project
@stack = []
end
# Accessor for target_path
# The target_path is the path where the finalizers will put the release
#
# @return Pathname the target_path
def target_path
Pathname.new(config[:target_path])
end
# Accessor for build_path
# The build_path is a temporary directory where the release will be built
#
# @return Pathname the build_path
def build_path
Pathname.new(config[:build_path])
end
# Accessor for source_path
# The source path is the root of the project
#
# @return Pathname the source_path
def source_path
Pathname.new(config[:source_path])
end
# Get the current SCM object
# Inject variables into files with an optional filter
#
# @examples
# release.inject({"VERSION" => release.version, "DATE" => release.date},
# :into => %w{_doc/toc.html})
# release.inject({"CHANGELOG" => {:file => "", :filter => BlueCloth}},
# :into => %w{_doc/changelog.html})
def inject(variables, options)
@stack << Injector.new(variables, options)
end
# Use a certain pre-processor
#
# @examples
# release.use :sprockets, sprockets_config
def use(processor, options = {})
@stack << [self.class.get_callable(processor, Roger::Release::Processors.map), options]
end
# Write out the whole release into a directory, zip file or anything you can imagine
# #finalize can be called multiple times, it just will run all of them.
#
# The default finalizer is :dir
#
# @param [Symbol, Proc] Finalizer to use
#
# @examples
# release.finalize :zip
def finalize(finalizer, options = {})
@stack << [self.class.get_callable(finalizer, Roger::Release::Finalizers.map), options]
end
# Files to clean up in the build directory just before finalization happens
#
# @param [String] Pattern to glob within build directory
#
# @examples
# release.cleanup "**/.DS_Store"
def cleanup(pattern)
@stack << Cleaner.new(pattern)
end
# Generates a banner if a block is given, or returns the currently set banner.
# It automatically takes care of adding comment marks around the banner.
#
# The default banner looks like this:
#
# =======================
# = Version : v1.0.0 =
# = Date : 2012-06-20 =
# =======================
#
#
# @option options [:css,:js,:html,false] :comment Wether or not to comment the output and in
# what style. (default=js)
def banner(options = {}, &_block)
options = {
comment: :js
}.update(options)
if block_given?
@_banner = yield.to_s
elsif !@_banner
@_banner = default_banner.join("\n")
end
if options[:comment]
comment(@_banner, style: options[:comment])
else
@_banner
end
end
# Actually perform the release
def run!
project.mode = :release
# Validate paths
validate_paths!
# Extract mockup
copy_source_path_to_build_path!
validate_stack!
# Run stack
run_stack!
# Cleanup
cleanup! if config[:cleanup_build]
ensure
project.mode = nil
end
# @param [String] string The string to comment
#
# @option options [:html, :css, :js] :style The comment style to use
# (default=:js, which is the same as :css)
# @option options [Boolean] :per_line Comment per line or make one block? (default=true)
def comment(string, options = {})
options = {
style: :css,
per_line: true
}.update(options)
commenters = {
html: proc { |s| "<!-- #{s} -->" },
css: proc { |s| "/* #{s} */" },
js: proc { |s| "/* #{s} */" }
}
commenter = commenters[options[:style]] || commenters[:js]
if options[:per_line]
string = string.split(/\r?\n/)
string.map { |s| commenter.call(s) }.join("\n")
else
commenter.call(string)
end
end
protected
def get_files_default_path
build_path
end
def default_banner
banner = [
"Version : #{scm.version}",
"Date : #{scm.date.strftime('%Y-%m-%d')}"
]
# Find longest line
size = banner.map(&:size).max
# Pad all lines
banner.map! { |b| "= #{b.ljust(size)} =" }
div = "=" * banner.first.size
banner.unshift(div)
banner << div
end
# ==============
# = The runway =
# ==============
# Checks if build path exists (and cleans it up)
# Checks if target path exists (if not, creates it)
def validate_paths!
ensure_clean_build_path!
ensure_existing_target_path!
end
def ensure_clean_build_path!
return unless build_path.exist?
log self, "Cleaning up previous build \"#{build_path}\""
rm_rf(build_path)
end
def ensure_existing_target_path!
return if target_path.exist?
log self, "Creating target path \"#{target_path}\""
mkdir target_path
end
# Checks if the project will be runned
# If config[:blank] is true it will automatically add Mockup processor
def validate_stack!
return if config[:blank]
ensure_mockup_processor_in_stack!
ensure_dir_finalizer_in_stack!
end
def ensure_mockup_processor_in_stack!
return if find_in_stack(Roger::Release::Processors::Mockup)
@stack.unshift([Roger::Release::Processors::Mockup.new, {}])
end
def ensure_dir_finalizer_in_stack!
return if find_in_stack(Roger::Release::Finalizers::Dir)
@stack.push([Roger::Release::Finalizers::Dir.new, {}])
end
# Find a processor in the stack
def find_in_stack(klass)
@stack.find { |(processor, _options)| processor.class == klass }
end
def copy_source_path_to_build_path!
if config[:cp]
config[:cp].call(source_path, build_path)
else
mkdir(build_path)
cp_r(source_path.children, build_path)
end
end
def run_stack!
# call all objects in @stack
@stack.each do |task|
if task.is_a?(Array)
task[0].call(self, task[1])
else
task.call(self)
end
end
end
def cleanup!
log(self, "Cleaning up build path #{build_path}")
rm_rf(build_path)
end
end
|
documentcloud/jammit | lib/jammit/command_line.rb | Jammit.CommandLine.ensure_configuration_file | ruby | def ensure_configuration_file
config = @options[:config_paths]
return true if File.exists?(config) && File.readable?(config)
puts "Could not find the asset configuration file \"#{config}\""
exit(1)
end | Make sure that we have a readable configuration file. The @jammit@
command can't run without one. | train | https://github.com/documentcloud/jammit/blob/dc866f1ac3eb069d65215599c451db39d66119a7/lib/jammit/command_line.rb#L37-L42 | class CommandLine
BANNER = <<-EOS
Usage: jammit OPTIONS
Run jammit inside a Rails application to compresses all JS, CSS,
and JST according to config/assets.yml, saving the packaged
files and corresponding gzipped versions.
If you're using "embed_assets", and you wish to precompile the
MHTML stylesheet variants, you must specify the "base-url".
Options:
EOS
# The @Jammit::CommandLine@ runs from the contents of @ARGV@.
def initialize
parse_options
ensure_configuration_file
Jammit.package!(@options)
end
private
# Make sure that we have a readable configuration file. The @jammit@
# command can't run without one.
# Uses @OptionParser@ to grab the options: *--output*, *--config*, and
# *--base-url*...
def parse_options
@options = {
:config_paths => Jammit::DEFAULT_CONFIG_PATH,
:output_folder => nil,
:base_url => nil,
:force => false
}
@option_parser = OptionParser.new do |opts|
opts.on('-o', '--output PATH', 'output folder for packages (default: "public/assets")') do |output_folder|
@options[:output_folder] = output_folder
end
opts.on('-c', '--config PATH', 'path to assets.yml (default: "config/assets.yml")') do |config_path|
@options[:config_paths] = config_path
end
opts.on('-u', '--base-url URL', 'base URL for MHTML (ex: "http://example.com")') do |base_url|
@options[:base_url] = base_url
end
opts.on('-f', '--force', 'force a rebuild of all assets') do |force|
@options[:force] = force
end
opts.on('-p', '--packages LIST', 'list of packages to build (ex: "core,ui", default: all)') do |package_names|
@options[:package_names] = package_names.split(/,\s*/).map {|n| n.to_sym }
end
opts.on('-P', '--public-root PATH', 'path to public assets (default: "public")') do |public_root|
puts "Option for PUBLIC_ROOT"
@options[:public_root] = public_root
end
opts.on_tail('-v', '--version', 'display Jammit version') do
puts "Jammit version #{Jammit::VERSION}"
exit
end
end
@option_parser.banner = BANNER
@option_parser.parse!(ARGV)
end
end
|
arbox/wlapi | lib/wlapi/api.rb | WLAPI.API.intersection | ruby | def intersection(word1, word2, limit = 10)
check_params(word1, word2, limit)
arg1 = ['Wort 1', word1]
arg2 = ['Wort 2', word2]
arg3 = ['Limit', limit]
answer = query(@cl_Kookurrenzschnitt, arg1, arg2, arg3)
get_answer(answer)
end | Returns the intersection of the co-occurrences of the two given words.
The result set is ordered according to the sum of the significances
in descending order. Note that due to the join involved,
this make take some time.
--
let's call it intersection, not kookurrenzschnitt
is being used INTERN, we need additional credentials | train | https://github.com/arbox/wlapi/blob/8a5b1b1bbfa58826107daeeae409e4e22b1c5236/lib/wlapi/api.rb#L345-L354 | class API
include REXML
# SOAP Services Endpoint.
ENDPOINT = 'http://wortschatz.uni-leipzig.de/axis/services'
# The list of accessible services, the MARSService is excluded due
# to its internal authorization.
SERVICES = [
:Baseform, :Cooccurrences, :CooccurrencesAll,
:ExperimentalSynonyms, :Frequencies,
:Kookurrenzschnitt, :Kreuzwortraetsel,
:LeftCollocationFinder, :LeftNeighbours, :NGrams,
:NGramReferences, :RightCollocationFinder,
:RightNeighbours, :Sachgebiet, :Sentences,
:Similarity, :Synonyms, :Thesaurus, :Wordforms
]
# At the creation point clients for all services are being instantiated.
# You can also set the login and the password (it defaults to 'anonymous').
# api = WLAPI::API.new
def initialize(login = 'anonymous', pass = 'anonymous')
# This hash contains the URLs to the single services.
services = {}
SERVICES.each { |service| services[service] = "#{ENDPOINT}/#{service}"}
# cl short for client.
# Dynamically create all the clients and set access credentials.
# It can be a very bad idea to instantiate all the clients at once,
# we should investigate the typical user behaviour.
# If only one service is used in the separate session => rewrite the class!
services.each do |key, val|
cl_name = "@cl_#{key}"
options = {:wsdl => val + '?wsdl',
:namespaces => {'xmlns:dat' => 'http://datatypes.webservice.wortschatz.uni_leipzig.de',
'xmlns:urn' => val},
:basic_auth => ['anonymous', 'anonymous'],
:log => $DEBUG
}
client = Savon.client(options)
eval("#{cl_name} = client")
end
# Savon creates very verbose logs, switching off.
HTTPI.log = false unless $DEBUG
end
# Main methods to access different services.
#
# You can define the limit for the result set, it defaults to 10.
# If you want to get all the results, you should provide a number,
# which would be greater than the result set since we cannot
# predict how many answers the server will give us.
############################################################################
## One parameter methods.
############################################################################
# Returns the frequency and frequency class of the input word.
# Frequency class is computed in relation to the most frequent word
# in the corpus. The higher the class, the rarer the word:
# api.frequencies("Autos") => ["40614", "9"]
# @return [Array] a list
def frequencies(word)
check_params(word)
arg1 = ['Wort', word]
answer = query(@cl_Frequencies, arg1)
get_answer(answer)
end
# Gets the baseform (whatever it is :), not lemma).
# Returns the lemmatized (base) form of the input word
# and the POS tag in an array:
# api.baseform("Auto") => ["Auto", "N"]
# @return [Array] a list
def baseform(word)
check_params(word)
arg1 = ['Wort', word]
answer = query(@cl_Baseform, arg1)
get_answer(answer)
end
# Returns categories for a given input word as an array:
# api.domain("Michael") => ["Vorname", "Nachname", "Männername"]
#--
# Is it a good name? all names are in English, but here..
# let's call it domain, not sachgebiet
def domain(word)
check_params(word)
arg1 = ['Wort', word]
answer = query(@cl_Sachgebiet, arg1)
get_answer(answer)
end
## Two parameter methods.
############################################################################
# Returns all other word forms of the same lemma for a given word form.
# api.wordforms("Auto") => ["Auto", "Autos"]
# @return [Array] a list
def wordforms(word, limit = 10)
check_params(word, limit)
# note, it is the only service which requires 'Word', not 'Wort'
arg1 = ['Word', word]
arg2 = ['Limit', limit]
answer = query(@cl_Wordforms, arg1, arg2)
get_answer(answer)
end
# As the Synonyms service returns synonyms of the given input word.
# However, this first builds a lemma of the input word
# and thus returns more synonyms:
# api.thesaurus("Auto") => ["Auto", "Bahn", "Wagen", "Zug", "Schiff", ...]
# @return [Array] a list
def thesaurus(word, limit = 10)
check_params(word, limit)
arg1 = ['Wort', word]
arg2 = ['Limit', limit]
answer = query(@cl_Thesaurus, arg1, arg2)
get_answer(answer)
end
# This method searches for synonyms.
# Returns synonyms of the input word. In other words, this is a thesaurus.
# api.synonyms("Auto") => ["Kraftwagen", "Automobil", "Benzinkutsche", ...]
# @return [Array] a list
def synonyms(word, limit = 10)
check_params(word, limit)
arg1 = ['Wort', word]
arg2 = ['Limit', limit]
answer = query(@cl_Synonyms, arg1, arg2)
# Synonym service provides multiple values, so we take only odd.
get_answer(answer, '[position() mod 2 = 1 ]')
end
# Returns sample sentences containing the input word.
# The return value is an array:
# api.sentences("Auto") => ["40808144", "Zweitens der freche,
# frische Klang der Hupe
# und drittens die hinreißend gestylten 16-Zoll-Felgen,
# die es leider nur für dieses Auto gibt.", ...]
#--
# @todo ok, but results should be filtered
def sentences(word, limit = 10)
check_params(word, limit)
arg1 = ['Wort', word]
arg2 = ['Limit', limit]
answer = query(@cl_Sentences, arg1, arg2)
get_answer(answer)
end
# For a given input word, returns statistically significant left neighbours
# (words co-occurring immediately to the left of the input word).
# api.left_neighbours("Auto") => ["geparktes", "Auto", "561", ...]
#--
# @todo ok, but results should be filtered
def left_neighbours(word, limit = 10)
check_params(word, limit)
arg1 = ['Wort', word]
arg2 = ['Limit', limit]
answer = query(@cl_LeftNeighbours, arg1, arg2)
get_answer(answer)
end
# For a given input word, returns statistically significant right neighbours
# (words co-occurring immediately to the right of the input word).
# api.right_neighbours("Auto") => ["Auto", "erfaßt", "575", ...]
#--
# @todo ok, but results should be filtered
def right_neighbours(word, limit = 10)
check_params(word, limit)
arg1 = ['Wort', word]
arg2 = ['Limit', limit]
answer = query(@cl_RightNeighbours, arg1, arg2)
get_answer(answer)
end
# Returns automatically computed contextually similar words
# of the input word.
# Such similar words may be antonyms, hyperonyms, synonyms,
# cohyponyms or other.
# Note that due to the huge amount of data any query to this services
# may take a long time.
# api.similarity("Auto") => ["Auto", "Wagen", "26", ...]
def similarity(word, limit = 10)
check_params(word, limit)
arg1 = ['Wort', word]
arg2 = ['Limit', limit]
answer = query(@cl_Similarity, arg1, arg2)
get_answer(answer)
end
# This service delivers an experimental synonyms request for internal tests.
#--
# don't know, if we have to include this service...
def experimental_synonyms(word, limit = 10)
check_params(word, limit)
arg1 = ['Wort', word]
arg2 = ['Limit', limit]
answer = query(@cl_ExperimentalSynonyms, arg1, arg2)
get_answer(answer)
end
# @todo Define the syntax for the pattern, fix the corpus.
def ngrams(pattern, limit = 10)
arg1 = ['Pattern', pattern]
arg2 = ['Limit', limit]
answer = query(@cl_NGrams, arg1, arg2)
# raise(NotImplementedError, 'This method will be implemented in the next release.')
end
# @todo Define the syntax for the pattern, fix the corpus.
def ngram_references(pattern, limit = 10)
arg1 = ['Pattern', pattern]
arg2 = ['Limit', limit]
answer = query(@cl_NGramReferences, arg1, arg2)
# raise(NotImplementedError, 'This method will be implemented in the next release.')
end
## Three parameter methods.
############################################################################
# Attempts to find linguistic collocations that occur to the right
# of the given input word.
# The parameter 'Wortart' accepts four values 'A, V, N, S'
# which stand for adjective, verb, noun and stopword respectively.
# The parameter restricts the type of words found.
# It returns an array:
# api.right_collocation_finder("Auto", "V", 10) =>
# ["Auto", "abfackeln", "V", ...]
def right_collocation_finder(word, pos, limit = 10)
check_params(word, pos, limit)
arg1 = ['Wort', word]
arg2 = ['Wortart', pos]
arg3 = ['Limit', limit]
answer = query(@cl_RightCollocationFinder, arg1, arg2, arg3)
get_answer(answer)
end
# Attempts to find linguistic collocations that occur to the left
# of the given input word.
# The parameter 'Wortart' accepts four values 'A, V, N, S'
# which stand for adjective, verb, noun and stopword respectively.
# The parameter restricts the type of words found.
# It returns an array:
# api.left_collocation_finder("Stuhl", "A", 10) =>
# ["apostolisch", "A", "Stuhl", ...]
def left_collocation_finder(word, pos, limit = 10)
check_params(word, pos, limit)
arg1 = ['Wort', word]
arg2 = ['Wortart', pos]
arg3 = ['Limit', limit]
answer = query(@cl_LeftCollocationFinder, arg1, arg2, arg3)
get_answer(answer)
end
# Returns statistically significant co-occurrences of the input word.
def cooccurrences(word, sign, limit = 10)
check_params(word, sign, limit)
arg1 = ['Wort', word]
arg2 = ['Mindestsignifikanz', sign]
arg3 = ['Limit', limit]
answer = query(@cl_Cooccurrences, arg1, arg2, arg3)
get_answer(answer)
end
# Returns statistically significant co-occurrences of the input word.
# However, it searches in the unrestricted version of the co-occurrences
# table as in the Cooccurrences services,
# which means significantly longer wait times.
def cooccurrences_all(word, sign, limit = 10)
check_params(word, sign, limit)
arg1 = ['Wort', word]
arg2 = ['Mindestsignifikanz', sign]
arg3 = ['Limit', limit]
answer = query(@cl_CooccurrencesAll, arg1, arg2, arg3)
get_answer(answer)
end
# Returns the intersection of the co-occurrences of the two given words.
# The result set is ordered according to the sum of the significances
# in descending order. Note that due to the join involved,
# this make take some time.
#--
# let's call it intersection, not kookurrenzschnitt
# is being used INTERN, we need additional credentials
# Attempts to find suitable words given a pattern as word parameter,
# a word length and the number of words to find at max (limit),
# e.g. <tt>API#crossword('%uto', 4)</tt> would return find 24 results and
# return them as an array: <tt>[Auto, Auto, ...]</tt>:
# api.crossword('%uto') => ["Auto", "Auto", ...]
# SQL like syntax is used for pattern (<tt>%</tt> for an arbitrary string,
# <tt>_</tt> for a single character).
#
# Note: Umlaute will count as one character
#
#--
# Let's keep all public method names in English:
# kreuzwortraetsel => crossword.
def crossword(word, word_length, limit = 10)
check_params(word, word_length, limit)
arg1 = ['Wort', word ]
arg2 = ['Wortlaenge', word_length]
arg3 = ['Limit', limit]
answer = query(@cl_Kreuzwortraetsel, arg1, arg2, arg3)
get_answer(answer)
end
private
# Main query method, it invokes the soap engine.
# It combines all the data to one SOAP request and gets the answer.
# <args> contains an array [[key1, value1], [key2, value2], [key3, value3]]
# with keys and values for the soap query.
def query(cl, *args)
# WSDL is disabled since calling the server for wsdl can last too long.
v = []
body = {
'urn:objRequestParameters' => {
'urn:corpus' => 'de',
'urn:parameters' => {
'urn:dataVectors' => v
}
}
}
# _args_ is an Array of arrays with keys and values
# Setting the first argument (usually 'Wort').
# Setting the second argument (usually 'Limit').
# Setting the third argument (no common value).
args.each do |key, val|
v << {'dat:dataRow' => [key, val]}
end
begin
resp = cl.call(:execute, {:message => body})
rescue => e
raise(WLAPI::ExternalError, e)
end
doc = Document.new(resp.to_xml)
warn(doc) if $DEBUG
doc
end
# This method extracts valuable data from the XML structure
# of the soap response. It returns an array with extracted xml text nodes
# or nil, if the service provided no answer.
def get_answer(doc, mod='')
result = []
# The path seems to be weird, because the namespaces change incrementally
# in the output, so I don't want to treat it here.
# A modifier needed because synonyms service provides duplicate values.
XPath.each(doc, "//result/*/*#{mod}") do |el|
warn(el.text) if $DEBUG
result << el.text
end
result.any? ? result : nil
end
def check_params(*args)
m = caller(1).first.match(/^.+`(.*)'$/)[1]
num_of_args = self.method(m.to_sym).arity
messages = []
# Arity can be negativ => .abs.
case num_of_args.abs
when 1
messages << msg(args[0], m, 'String') unless args[0].is_a?(String)
when 2
messages << msg(args[0], m, 'String') unless args[0].is_a?(String)
messages << msg(args[1], m, 'Numeric') unless args[1].is_a?(Numeric)
when 3
messages << msg(args[0], m, 'String') unless args[0].is_a?(String)
unless args[1].is_a?(String) || args[1].is_a?(Numeric)
messages << msg(args[1], m, 'String or Numeric')
end
messages << msg(args[2], m, 'Numeric') unless args[2].is_a?(Numeric)
end
if messages.any?
fail WLAPI::UserError, messages.join("\n")
end
end
def msg(arg, meth, cls)
"Argument <#{arg}> for the method <#{meth}> should be a <#{cls}>, "\
"not <#{arg.class}>!"
end
end # class API
|
CocoaPods/Xcodeproj | lib/xcodeproj/scheme.rb | Xcodeproj.XCScheme.save_as | ruby | def save_as(project_path, name, shared = true)
scheme_folder_path = if shared
self.class.shared_data_dir(project_path)
else
self.class.user_data_dir(project_path)
end
scheme_folder_path.mkpath
scheme_path = scheme_folder_path + "#{name}.xcscheme"
@file_path = scheme_path
File.open(scheme_path, 'w') do |f|
f.write(to_s)
end
end | Serializes the current state of the object to a ".xcscheme" file.
@param [String, Pathname] project_path
The path where the ".xcscheme" file should be stored.
@param [String] name
The name of the scheme, to have ".xcscheme" appended.
@param [Boolean] shared
true => if the scheme must be a shared scheme (default value)
false => if the scheme must be a user scheme
@return [void]
@example Saving a scheme
scheme.save_as('path/to/Project.xcodeproj') #=> true | train | https://github.com/CocoaPods/Xcodeproj/blob/3be1684437a6f8e69c7836ad4c85a2b78663272f/lib/xcodeproj/scheme.rb#L310-L322 | class XCScheme
# @return [REXML::Document] the XML object that will be manipulated to save
# the scheme file after.
#
attr_reader :doc
# Create a XCScheme either from scratch or using an existing file
#
# @param [String] file_path
# The path of the existing .xcscheme file. If nil will create an empty scheme
#
def initialize(file_path = nil)
if file_path
@file_path = file_path
@doc = File.open(file_path, 'r') do |f|
REXML::Document.new(f)
end
@doc.context[:attribute_quote] = :quote
@scheme = @doc.elements['Scheme']
else
@doc = REXML::Document.new
@doc.context[:attribute_quote] = :quote
@doc << REXML::XMLDecl.new(REXML::XMLDecl::DEFAULT_VERSION, 'UTF-8')
@scheme = @doc.add_element 'Scheme'
@scheme.attributes['LastUpgradeVersion'] = Constants::LAST_UPGRADE_CHECK
@scheme.attributes['version'] = Xcodeproj::Constants::XCSCHEME_FORMAT_VERSION
self.build_action = BuildAction.new
self.test_action = TestAction.new
self.launch_action = LaunchAction.new
self.profile_action = ProfileAction.new
self.analyze_action = AnalyzeAction.new
self.archive_action = ArchiveAction.new
end
end
# Convenience method to quickly add app and test targets to a new scheme.
#
# It will add the runnable_target to the Build, Launch and Profile actions
# and the test_target to the Build and Test actions
#
# @param [Xcodeproj::Project::Object::PBXAbstractTarget] runnable_target
# The target to use for the 'Run', 'Profile' and 'Analyze' actions
#
# @param [Xcodeproj::Project::Object::PBXAbstractTarget] test_target
# The target to use for the 'Test' action
#
# @param [Boolean] launch_target
# Determines if the runnable_target is launchable.
#
def configure_with_targets(runnable_target, test_target, launch_target: false)
if runnable_target
add_build_target(runnable_target)
set_launch_target(runnable_target) if launch_target
end
if test_target
add_build_target(test_target, false) if test_target != runnable_target
add_test_target(test_target)
end
end
public
# @!group Access Action nodes
# @return [XCScheme::BuildAction]
# The Build Action associated with this scheme
#
def build_action
@build_action ||= BuildAction.new(@scheme.elements['BuildAction'])
end
# @param [XCScheme::BuildAction] action
# The Build Action to associate to this scheme
#
def build_action=(action)
@scheme.delete_element('BuildAction')
@scheme.add_element(action.xml_element)
@build_action = action
end
# @return [XCScheme::TestAction]
# The Test Action associated with this scheme
#
def test_action
@test_action ||= TestAction.new(@scheme.elements['TestAction'])
end
# @param [XCScheme::TestAction] action
# The Test Action to associate to this scheme
#
def test_action=(action)
@scheme.delete_element('TestAction')
@scheme.add_element(action.xml_element)
@test_action = action
end
# @return [XCScheme::LaunchAction]
# The Launch Action associated with this scheme
#
def launch_action
@launch_action ||= LaunchAction.new(@scheme.elements['LaunchAction'])
end
# @param [XCScheme::LaunchAction] action
# The Launch Action to associate to this scheme
#
def launch_action=(action)
@scheme.delete_element('LaunchAction')
@scheme.add_element(action.xml_element)
@launch_action = action
end
# @return [XCScheme::ProfileAction]
# The Profile Action associated with this scheme
#
def profile_action
@profile_action ||= ProfileAction.new(@scheme.elements['ProfileAction'])
end
# @param [XCScheme::ProfileAction] action
# The Profile Action to associate to this scheme
#
def profile_action=(action)
@scheme.delete_element('ProfileAction')
@scheme.add_element(action.xml_element)
@profile_action = action
end
# @return [XCScheme::AnalyzeAction]
# The Analyze Action associated with this scheme
#
def analyze_action
@analyze_action ||= AnalyzeAction.new(@scheme.elements['AnalyzeAction'])
end
# @param [XCScheme::AnalyzeAction] action
# The Analyze Action to associate to this scheme
#
def analyze_action=(action)
@scheme.delete_element('AnalyzeAction')
@scheme.add_element(action.xml_element)
@analyze_action = action
end
# @return [XCScheme::ArchiveAction]
# The Archive Action associated with this scheme
#
def archive_action
@archive_action ||= ArchiveAction.new(@scheme.elements['ArchiveAction'])
end
# @param [XCScheme::ArchiveAction] action
# The Archive Action to associate to this scheme
#
def archive_action=(action)
@scheme.delete_element('ArchiveAction')
@scheme.add_element(action.xml_element)
@archive_action = action
end
# @!group Target methods
# Add a target to the list of targets to build in the build action.
#
# @param [Xcodeproj::Project::Object::AbstractTarget] build_target
# A target used by scheme in the build step.
#
# @param [Bool] build_for_running
# Whether to build this target in the launch action. Often false for test targets.
#
def add_build_target(build_target, build_for_running = true)
entry = BuildAction::Entry.new(build_target)
entry.build_for_testing = true
entry.build_for_running = build_for_running
entry.build_for_profiling = build_for_running
entry.build_for_archiving = build_for_running
entry.build_for_analyzing = build_for_running
build_action.add_entry(entry)
end
# Add a target to the list of targets to build in the build action.
#
# @param [Xcodeproj::Project::Object::AbstractTarget] test_target
# A target used by scheme in the test step.
#
def add_test_target(test_target)
testable = TestAction::TestableReference.new(test_target)
test_action.add_testable(testable)
end
# Sets a runnable target to be the target of the launch action of the scheme.
#
# @param [Xcodeproj::Project::Object::AbstractTarget] build_target
# A target used by scheme in the launch step.
#
def set_launch_target(build_target)
launch_runnable = BuildableProductRunnable.new(build_target, 0)
launch_action.buildable_product_runnable = launch_runnable
profile_runnable = BuildableProductRunnable.new(build_target)
profile_action.buildable_product_runnable = profile_runnable
macro_exp = MacroExpansion.new(build_target)
test_action.add_macro_expansion(macro_exp)
end
# @!group Class methods
#-------------------------------------------------------------------------#
# Share a User Scheme. Basically this method move the xcscheme file from
# the xcuserdata folder to xcshareddata folder.
#
# @param [String] project_path
# Path of the .xcodeproj folder.
#
# @param [String] scheme_name
# The name of scheme that will be shared.
#
# @param [String] user
# The user name that have the scheme.
#
def self.share_scheme(project_path, scheme_name, user = nil)
to_folder = shared_data_dir(project_path)
to_folder.mkpath
to = to_folder + "#{scheme_name}.xcscheme"
from = user_data_dir(project_path, user) + "#{scheme_name}.xcscheme"
FileUtils.mv(from, to)
end
# @return [Pathname]
#
def self.shared_data_dir(project_path)
project_path = Pathname.new(project_path)
project_path + 'xcshareddata/xcschemes'
end
# @return [Pathname]
#
def self.user_data_dir(project_path, user = nil)
project_path = Pathname.new(project_path)
user ||= ENV['USER']
project_path + "xcuserdata/#{user}.xcuserdatad/xcschemes"
end
public
# @!group Serialization
#-------------------------------------------------------------------------#
# Serializes the current state of the object to a String.
#
# @note The goal of the string representation is to match Xcode output as
# close as possible to aide comparison.
#
# @return [String] the XML string value of the current state of the object.
#
def to_s
formatter = XMLFormatter.new(2)
formatter.compact = false
out = ''
formatter.write(@doc, out)
out.gsub!("<?xml version='1.0' encoding='UTF-8'?>", '<?xml version="1.0" encoding="UTF-8"?>')
out << "\n"
out
end
# Serializes the current state of the object to a ".xcscheme" file.
#
# @param [String, Pathname] project_path
# The path where the ".xcscheme" file should be stored.
#
# @param [String] name
# The name of the scheme, to have ".xcscheme" appended.
#
# @param [Boolean] shared
# true => if the scheme must be a shared scheme (default value)
# false => if the scheme must be a user scheme
#
# @return [void]
#
# @example Saving a scheme
# scheme.save_as('path/to/Project.xcodeproj') #=> true
#
# Serializes the current state of the object to the original ".xcscheme"
# file this XCScheme was created from, overriding the original file.
#
# Requires that the XCScheme object was initialized using a file path.
#
def save!
raise Informative, 'This XCScheme object was not initialized ' \
'using a file path. Use save_as instead.' unless @file_path
File.open(@file_path, 'w') do |f|
f.write(to_s)
end
end
#-------------------------------------------------------------------------#
# XML formatter which closely mimics the output generated by Xcode.
#
class XMLFormatter < REXML::Formatters::Pretty
def write_element(node, output)
@indentation = 3
output << ' ' * @level
output << "<#{node.expanded_name}"
@level += @indentation
node.context = node.parent.context # HACK: to ensure strings are properly quoted
node.attributes.each_attribute do |attr|
output << "\n"
output << ' ' * @level
output << attr.to_string.sub(/=/, ' = ')
end unless node.attributes.empty?
output << '>'
output << "\n"
node.children.each do |child|
next if child.is_a?(REXML::Text) && child.to_s.strip.length == 0
write(child, output)
output << "\n"
end
@level -= @indentation
output << ' ' * @level
output << "</#{node.expanded_name}>"
end
end
#-------------------------------------------------------------------------#
end
|
xing/beetle | lib/beetle/client.rb | Beetle.Client.rpc | ruby | def rpc(message_name, data=nil, opts={})
message_name = validated_message_name(message_name)
publisher.rpc(message_name, data, opts)
end | sends the given message to one of the configured servers and returns the result of running the associated handler.
unexpected behavior can ensue if the message gets routed to more than one recipient, so be careful. | train | https://github.com/xing/beetle/blob/42322edc78e6e181b3b9ee284c3b00bddfc89108/lib/beetle/client.rb#L201-L204 | class Client
include Logging
# the AMQP servers available for publishing
attr_reader :servers
# additional AMQP servers available for subscribing. useful for migration scenarios.
attr_reader :additional_subscription_servers
# an options hash for the configured exchanges
attr_reader :exchanges
# an options hash for the configured queues
attr_reader :queues
# an options hash for the configured queue bindings
attr_reader :bindings
# an options hash for the configured messages
attr_reader :messages
# the deduplication store to use for this client
attr_reader :deduplication_store
# accessor for the beetle configuration
attr_reader :config
# create a fresh Client instance from a given configuration object
def initialize(config = Beetle.config)
@config = config
@exchanges = {}
@queues = {}
@messages = {}
@bindings = {}
@deduplication_store = DeduplicationStore.new(config)
load_brokers_from_config
end
# register an exchange with the given _name_ and a set of _options_:
# [<tt>:type</tt>]
# the type option will be overwritten and always be <tt>:topic</tt>, beetle does not allow fanout exchanges
# [<tt>:durable</tt>]
# the durable option will be overwritten and always be true. this is done to ensure that exchanges are never deleted
def register_exchange(name, options={})
name = name.to_s
raise ConfigurationError.new("exchange #{name} already configured") if exchanges.include?(name)
exchanges[name] = options.symbolize_keys.merge(:type => :topic, :durable => true, :queues => [])
end
# register a durable, non passive, non auto_deleted queue with the given _name_ and an _options_ hash:
# [<tt>:exchange</tt>]
# the name of the exchange this queue will be bound to (defaults to the name of the queue)
# [<tt>:key</tt>]
# the binding key (defaults to the name of the queue)
# [<tt>:lazy</tt>]
# whether the queue should use lazy mode (defaults to <tt>config.lazy_queues_enabled</tt>)
# [<tt>:dead_lettering</tt>]
# whether the queue should use dead lettering (defaults to <tt>config.dead_lettering_enabled</tt>)
# automatically registers the specified exchange if it hasn't been registered yet
def register_queue(name, options={})
name = name.to_s
raise ConfigurationError.new("queue #{name} already configured") if queues.include?(name)
opts = {
:exchange => name, :key => name, :auto_delete => false, :amqp_name => name,
:lazy => config.lazy_queues_enabled, :dead_lettering => config.dead_lettering_enabled
}.merge!(options.symbolize_keys)
opts.merge! :durable => true, :passive => false, :exclusive => false
exchange = opts.delete(:exchange).to_s
key = opts.delete(:key)
queues[name] = opts
register_binding(name, :exchange => exchange, :key => key)
end
# register an additional binding for an already configured queue _name_ and an _options_ hash:
# [<tt>:exchange</tt>]
# the name of the exchange this queue will be bound to (defaults to the name of the queue)
# [<tt>:key</tt>]
# the binding key (defaults to the name of the queue)
# automatically registers the specified exchange if it hasn't been registered yet
def register_binding(queue_name, options={})
name = queue_name.to_s
opts = options.symbolize_keys
exchange = (opts[:exchange] || name).to_s
key = (opts[:key] || name).to_s
(bindings[name] ||= []) << {:exchange => exchange, :key => key}
register_exchange(exchange) unless exchanges.include?(exchange)
queues = exchanges[exchange][:queues]
queues << name unless queues.include?(name)
end
# register a persistent message with a given _name_ and an _options_ hash:
# [<tt>:key</tt>]
# specifies the routing key for message publishing (defaults to the name of the message)
# [<tt>:ttl</tt>]
# specifies the time interval after which the message will be silently dropped (seconds).
# defaults to Message::DEFAULT_TTL.
# [<tt>:redundant</tt>]
# specifies whether the message should be published redundantly (defaults to false)
def register_message(message_name, options={})
name = message_name.to_s
raise ConfigurationError.new("message #{name} already configured") if messages.include?(name)
opts = {:exchange => name, :key => name}.merge!(options.symbolize_keys)
opts.merge! :persistent => true
exchange = opts[:exchange] = opts[:exchange].to_s
register_exchange(exchange) unless exchanges.include?(exchange)
messages[name] = opts
end
# registers a handler for a list of queues (which must have been registered
# previously). The handler will be invoked when any messages arrive on the queue.
#
# Examples:
# register_handler([:foo, :bar], :timeout => 10.seconds) { |message| puts "received #{message}" }
#
# on_error = lambda{ puts "something went wrong with baz" }
# on_failure = lambda{ puts "baz has finally failed" }
#
# register_handler(:baz, :exceptions => 1, :errback => on_error, :failback => on_failure) { puts "received baz" }
#
# register_handler(:bar, BarHandler)
#
# For details on handler classes see class Beetle::Handler
def register_handler(queues, *args, &block)
queues = determine_queue_names(Array(queues))
opts = args.last.is_a?(Hash) ? args.pop : {}
handler = args.shift
raise ArgumentError.new("too many arguments for handler registration") unless args.empty?
subscriber.register_handler(queues, opts, handler, &block)
end
# this is a convenience method to configure exchanges, queues, messages and handlers
# with a common set of options. allows one to call all register methods without the
# register_ prefix. returns self. if the passed in block has no parameters, the block
# will be evaluated in the context of the client configurator.
#
# Example: (block with config argument)
# client = Beetle.client.new.configure :exchange => :foobar do |config|
# config.queue :q1, :key => "foo"
# config.queue :q2, :key => "bar"
# config.message :foo
# config.message :bar
# config.handler :q1 { puts "got foo"}
# config.handler :q2 { puts "got bar"}
# end
#
# Example: (block without config argument)
# client = Beetle.client.new.configure :exchange => :foobar do
# queue :q1, :key => "foo"
# queue :q2, :key => "bar"
# message :foo
# message :bar
# handler :q1 { puts "got foo"}
# handler :q2 { puts "got bar"}
# end
#
def configure(options={}, &block)
configurator = Configurator.new(self, options)
if block.arity == 1
yield configurator
else
configurator.instance_eval(&block)
end
self
end
# publishes a message. the given options hash is merged with options given on message registration.
# WARNING: empty message bodies can lead to problems.
def publish(message_name, data=nil, opts={})
message_name = validated_message_name(message_name)
publisher.publish(message_name, data, opts)
end
# sends the given message to one of the configured servers and returns the result of running the associated handler.
#
# unexpected behavior can ensue if the message gets routed to more than one recipient, so be careful.
# purges the given queues on all configured servers
def purge(*queues)
queues = determine_queue_names(queues)
publisher.purge(queues)
end
# declares all queues, binds them and creates/updates all policies
def setup_queues_and_policies(queues)
publisher.setup_queues_and_policies(queues)
end
# start listening to all registered queues. Calls #listen_queues internally
# runs the given block before entering the eventmachine loop.
def listen(_deprecated_messages=nil, &block)
raise Error.new("Beetle::Client#listen no longer works with arguments. Please use #listen_queues(['queue1', 'queue2']) instead") if _deprecated_messages
listen_queues(&block)
end
# start listening to a list of queues (default to all registered queues).
# runs the given block before entering the eventmachine loop.
def listen_queues(*queues, &block)
queues = determine_queue_names(queues)
subscriber.listen_queues(queues, &block)
end
# stops the subscriber by closing all channels and connections. note this an
# asynchronous operation due to the underlying eventmachine mechanism.
def stop_listening
@subscriber.stop! if @subscriber
end
# disconnects the publisher from all servers it's currently connected to
def stop_publishing
@publisher.stop if @publisher
end
# pause listening on a list of queues
def pause_listening(*queues)
queues = determine_queue_names(queues)
subscriber.pause_listening(queues)
end
# resume listening on a list of queues
def resume_listening(*queues)
queues = determine_queue_names(queues)
subscriber.resume_listening(queues)
end
# traces queues without consuming them. useful for debugging message flow.
def trace(queue_names=self.queues.keys, tracer=nil, &block)
queues_to_trace = self.queues.slice(*queue_names)
queues_to_trace.each do |name, opts|
opts.merge! :durable => false, :auto_delete => true, :amqp_name => queue_name_for_tracing(opts[:amqp_name])
end
tracer ||=
lambda do |msg|
puts "-----===== new message =====-----"
puts "SERVER: #{msg.server}"
puts "HEADER: #{msg.header.attributes[:headers].inspect}"
puts "EXCHANGE: #{msg.header.method.exchange}"
puts "KEY: #{msg.header.method.routing_key}"
puts "MSGID: #{msg.msg_id}"
puts "DATA: #{msg.data}"
end
register_handler(queue_names){|msg| tracer.call msg }
listen_queues(queue_names, &block)
end
# evaluate the ruby files matching the given +glob+ pattern in the context of the client instance.
def load(glob)
b = binding
Dir[glob].each do |f|
eval(File.read(f), b, f)
end
end
def reset
stop_publishing
stop_listening
config.reload
load_brokers_from_config
rescue Exception => e
logger.warn("Error resetting client")
logger.warn(e)
ensure
@publisher = nil
@subscriber = nil
end
private
def determine_queue_names(queues)
if queues.empty?
self.queues.keys
else
queues.flatten.map{|q| validated_queue_name(q)}
end
end
def validated_queue_name(queue_name)
queue_name = queue_name.to_s
raise UnknownQueue.new("unknown queue #{queue_name}") unless queues.include?(queue_name)
queue_name
end
def validated_message_name(message_name)
message_name = message_name.to_s
raise UnknownMessage.new("unknown message #{message_name}") unless messages.include?(message_name)
message_name
end
class Configurator #:nodoc:all
def initialize(client, options={})
@client = client
@options = options
end
def method_missing(method, *args, &block)
super unless %w(exchange queue binding message handler).include?(method.to_s)
options = @options.merge(args.last.is_a?(Hash) ? args.pop : {})
@client.send("register_#{method}", *(args+[options]), &block)
end
# need to override binding explicitely
def binding(*args, &block)
method_missing(:binding, *args, &block)
end
end
def publisher
@publisher ||= Publisher.new(self)
end
def subscriber
@subscriber ||= Subscriber.new(self)
end
def queue_name_for_tracing(queue)
"trace-#{queue}-#{Beetle.hostname}-#{$$}"
end
def load_brokers_from_config
@servers = config.servers.split(/ *, */)
@additional_subscription_servers = config.additional_subscription_servers.split(/ *, */)
end
end
|
hashicorp/vault-ruby | lib/vault/api/sys/audit.rb | Vault.Sys.audits | ruby | def audits
json = client.get("/v1/sys/audit")
json = json[:data] if json[:data]
return Hash[*json.map do |k,v|
[k.to_s.chomp("/").to_sym, Audit.decode(v)]
end.flatten]
end | List all audits for the vault.
@example
Vault.sys.audits #=> { :file => #<Audit> }
@return [Hash<Symbol, Audit>] | train | https://github.com/hashicorp/vault-ruby/blob/02f0532a802ba1a2a0d8703a4585dab76eb9d864/lib/vault/api/sys/audit.rb#L28-L34 | class Sys
# List all audits for the vault.
#
# @example
# Vault.sys.audits #=> { :file => #<Audit> }
#
# @return [Hash<Symbol, Audit>]
# Enable a particular audit. Note: the +options+ depend heavily on the
# type of audit being enabled. Please refer to audit-specific documentation
# for which need to be enabled.
#
# @example
# Vault.sys.enable_audit("/file-audit", "file", "File audit", path: "/path/on/disk") #=> true
#
# @param [String] path
# the path to mount the audit
# @param [String] type
# the type of audit to enable
# @param [String] description
# a human-friendly description of the audit backend
# @param [Hash] options
# audit-specific options
#
# @return [true]
def enable_audit(path, type, description, options = {})
client.put("/v1/sys/audit/#{encode_path(path)}", JSON.fast_generate(
type: type,
description: description,
options: options,
))
return true
end
# Disable a particular audit. If an audit does not exist, and error will be
# raised.
#
# @param [String] path
# the path of the audit to disable
#
# @return [true]
def disable_audit(path)
client.delete("/v1/sys/audit/#{encode_path(path)}")
return true
end
# Generates a HMAC verifier for a given input.
#
# @example
# Vault.sys.audit_hash("file-audit", "my input") #=> "hmac-sha256:30aa7de18a5e90bbc1063db91e7c387b32b9fa895977eb8c177bbc91e7d7c542"
#
# @param [String] path
# the path of the audit backend
# @param [String] input
# the input to generate a HMAC for
#
# @return [String]
def audit_hash(path, input)
json = client.post("/v1/sys/audit-hash/#{encode_path(path)}", JSON.fast_generate(input: input))
json = json[:data] if json[:data]
json[:hash]
end
end
|
phallguy/scorpion | lib/scorpion/stinger.rb | Scorpion.Stinger.sting! | ruby | def sting!( object )
return object unless scorpion
if object
assign_scorpion object
assign_scorpion_to_enumerable object
end
object
end | Sting an object so that it will be injected with the scorpion and use it
to resolve all dependencies.
@param [#scorpion] object to sting.
@return [object] the object that was stung. | train | https://github.com/phallguy/scorpion/blob/0bc9c1111a37e35991d48543dec88a36f16d7aee/lib/scorpion/stinger.rb#L34-L43 | module Stinger
@wrappers ||= {}
def self.wrap( instance, stinger )
return instance unless instance
klass = @wrappers[instance.class] ||=
Class.new( instance.class ) do
def initialize( instance, stinger )
@__instance__ = instance
@__stinger__ = stinger
end
def respond_to?( *args )
@__instance__.respond_to?( *args )
end
private
def method_missing( *args, &block ) # rubocop:disable Style/MethodMissing
@__stinger__.sting! @__instance__.__send__( *args, &block )
end
end
klass.new instance, stinger
end
# Sting an object so that it will be injected with the scorpion and use it
# to resolve all dependencies.
# @param [#scorpion] object to sting.
# @return [object] the object that was stung.
private
def assign_scorpion( object )
return unless object.respond_to?( :scorpion=, true )
# Only set scorpion if it hasn't been set yet.
current_scorpion = object.send :scorpion
if current_scorpion
scorpion.logger.warn I18n.translate :mixed_scorpions, scope: [:scorpion, :warnings, :messages] if current_scorpion != scorpion # rubocop:disable Metrics/LineLength
else
object.send :scorpion=, scorpion
end
end
def assign_scorpion_to_enumerable( objects )
return unless objects.respond_to? :each
# Don't eager load relations that haven't been loaded yet.
return if objects.respond_to?( :loaded? ) && !objects.loaded?
objects.each { |v| sting! v }
end
end
|
xing/beetle | lib/beetle/client.rb | Beetle.Client.register_binding | ruby | def register_binding(queue_name, options={})
name = queue_name.to_s
opts = options.symbolize_keys
exchange = (opts[:exchange] || name).to_s
key = (opts[:key] || name).to_s
(bindings[name] ||= []) << {:exchange => exchange, :key => key}
register_exchange(exchange) unless exchanges.include?(exchange)
queues = exchanges[exchange][:queues]
queues << name unless queues.include?(name)
end | register an additional binding for an already configured queue _name_ and an _options_ hash:
[<tt>:exchange</tt>]
the name of the exchange this queue will be bound to (defaults to the name of the queue)
[<tt>:key</tt>]
the binding key (defaults to the name of the queue)
automatically registers the specified exchange if it hasn't been registered yet | train | https://github.com/xing/beetle/blob/42322edc78e6e181b3b9ee284c3b00bddfc89108/lib/beetle/client.rb#L103-L112 | class Client
include Logging
# the AMQP servers available for publishing
attr_reader :servers
# additional AMQP servers available for subscribing. useful for migration scenarios.
attr_reader :additional_subscription_servers
# an options hash for the configured exchanges
attr_reader :exchanges
# an options hash for the configured queues
attr_reader :queues
# an options hash for the configured queue bindings
attr_reader :bindings
# an options hash for the configured messages
attr_reader :messages
# the deduplication store to use for this client
attr_reader :deduplication_store
# accessor for the beetle configuration
attr_reader :config
# create a fresh Client instance from a given configuration object
def initialize(config = Beetle.config)
@config = config
@exchanges = {}
@queues = {}
@messages = {}
@bindings = {}
@deduplication_store = DeduplicationStore.new(config)
load_brokers_from_config
end
# register an exchange with the given _name_ and a set of _options_:
# [<tt>:type</tt>]
# the type option will be overwritten and always be <tt>:topic</tt>, beetle does not allow fanout exchanges
# [<tt>:durable</tt>]
# the durable option will be overwritten and always be true. this is done to ensure that exchanges are never deleted
def register_exchange(name, options={})
name = name.to_s
raise ConfigurationError.new("exchange #{name} already configured") if exchanges.include?(name)
exchanges[name] = options.symbolize_keys.merge(:type => :topic, :durable => true, :queues => [])
end
# register a durable, non passive, non auto_deleted queue with the given _name_ and an _options_ hash:
# [<tt>:exchange</tt>]
# the name of the exchange this queue will be bound to (defaults to the name of the queue)
# [<tt>:key</tt>]
# the binding key (defaults to the name of the queue)
# [<tt>:lazy</tt>]
# whether the queue should use lazy mode (defaults to <tt>config.lazy_queues_enabled</tt>)
# [<tt>:dead_lettering</tt>]
# whether the queue should use dead lettering (defaults to <tt>config.dead_lettering_enabled</tt>)
# automatically registers the specified exchange if it hasn't been registered yet
def register_queue(name, options={})
name = name.to_s
raise ConfigurationError.new("queue #{name} already configured") if queues.include?(name)
opts = {
:exchange => name, :key => name, :auto_delete => false, :amqp_name => name,
:lazy => config.lazy_queues_enabled, :dead_lettering => config.dead_lettering_enabled
}.merge!(options.symbolize_keys)
opts.merge! :durable => true, :passive => false, :exclusive => false
exchange = opts.delete(:exchange).to_s
key = opts.delete(:key)
queues[name] = opts
register_binding(name, :exchange => exchange, :key => key)
end
# register an additional binding for an already configured queue _name_ and an _options_ hash:
# [<tt>:exchange</tt>]
# the name of the exchange this queue will be bound to (defaults to the name of the queue)
# [<tt>:key</tt>]
# the binding key (defaults to the name of the queue)
# automatically registers the specified exchange if it hasn't been registered yet
# register a persistent message with a given _name_ and an _options_ hash:
# [<tt>:key</tt>]
# specifies the routing key for message publishing (defaults to the name of the message)
# [<tt>:ttl</tt>]
# specifies the time interval after which the message will be silently dropped (seconds).
# defaults to Message::DEFAULT_TTL.
# [<tt>:redundant</tt>]
# specifies whether the message should be published redundantly (defaults to false)
def register_message(message_name, options={})
name = message_name.to_s
raise ConfigurationError.new("message #{name} already configured") if messages.include?(name)
opts = {:exchange => name, :key => name}.merge!(options.symbolize_keys)
opts.merge! :persistent => true
exchange = opts[:exchange] = opts[:exchange].to_s
register_exchange(exchange) unless exchanges.include?(exchange)
messages[name] = opts
end
# registers a handler for a list of queues (which must have been registered
# previously). The handler will be invoked when any messages arrive on the queue.
#
# Examples:
# register_handler([:foo, :bar], :timeout => 10.seconds) { |message| puts "received #{message}" }
#
# on_error = lambda{ puts "something went wrong with baz" }
# on_failure = lambda{ puts "baz has finally failed" }
#
# register_handler(:baz, :exceptions => 1, :errback => on_error, :failback => on_failure) { puts "received baz" }
#
# register_handler(:bar, BarHandler)
#
# For details on handler classes see class Beetle::Handler
def register_handler(queues, *args, &block)
queues = determine_queue_names(Array(queues))
opts = args.last.is_a?(Hash) ? args.pop : {}
handler = args.shift
raise ArgumentError.new("too many arguments for handler registration") unless args.empty?
subscriber.register_handler(queues, opts, handler, &block)
end
# this is a convenience method to configure exchanges, queues, messages and handlers
# with a common set of options. allows one to call all register methods without the
# register_ prefix. returns self. if the passed in block has no parameters, the block
# will be evaluated in the context of the client configurator.
#
# Example: (block with config argument)
# client = Beetle.client.new.configure :exchange => :foobar do |config|
# config.queue :q1, :key => "foo"
# config.queue :q2, :key => "bar"
# config.message :foo
# config.message :bar
# config.handler :q1 { puts "got foo"}
# config.handler :q2 { puts "got bar"}
# end
#
# Example: (block without config argument)
# client = Beetle.client.new.configure :exchange => :foobar do
# queue :q1, :key => "foo"
# queue :q2, :key => "bar"
# message :foo
# message :bar
# handler :q1 { puts "got foo"}
# handler :q2 { puts "got bar"}
# end
#
def configure(options={}, &block)
configurator = Configurator.new(self, options)
if block.arity == 1
yield configurator
else
configurator.instance_eval(&block)
end
self
end
# publishes a message. the given options hash is merged with options given on message registration.
# WARNING: empty message bodies can lead to problems.
def publish(message_name, data=nil, opts={})
message_name = validated_message_name(message_name)
publisher.publish(message_name, data, opts)
end
# sends the given message to one of the configured servers and returns the result of running the associated handler.
#
# unexpected behavior can ensue if the message gets routed to more than one recipient, so be careful.
def rpc(message_name, data=nil, opts={})
message_name = validated_message_name(message_name)
publisher.rpc(message_name, data, opts)
end
# purges the given queues on all configured servers
def purge(*queues)
queues = determine_queue_names(queues)
publisher.purge(queues)
end
# declares all queues, binds them and creates/updates all policies
def setup_queues_and_policies(queues)
publisher.setup_queues_and_policies(queues)
end
# start listening to all registered queues. Calls #listen_queues internally
# runs the given block before entering the eventmachine loop.
def listen(_deprecated_messages=nil, &block)
raise Error.new("Beetle::Client#listen no longer works with arguments. Please use #listen_queues(['queue1', 'queue2']) instead") if _deprecated_messages
listen_queues(&block)
end
# start listening to a list of queues (default to all registered queues).
# runs the given block before entering the eventmachine loop.
def listen_queues(*queues, &block)
queues = determine_queue_names(queues)
subscriber.listen_queues(queues, &block)
end
# stops the subscriber by closing all channels and connections. note this an
# asynchronous operation due to the underlying eventmachine mechanism.
def stop_listening
@subscriber.stop! if @subscriber
end
# disconnects the publisher from all servers it's currently connected to
def stop_publishing
@publisher.stop if @publisher
end
# pause listening on a list of queues
def pause_listening(*queues)
queues = determine_queue_names(queues)
subscriber.pause_listening(queues)
end
# resume listening on a list of queues
def resume_listening(*queues)
queues = determine_queue_names(queues)
subscriber.resume_listening(queues)
end
# traces queues without consuming them. useful for debugging message flow.
def trace(queue_names=self.queues.keys, tracer=nil, &block)
queues_to_trace = self.queues.slice(*queue_names)
queues_to_trace.each do |name, opts|
opts.merge! :durable => false, :auto_delete => true, :amqp_name => queue_name_for_tracing(opts[:amqp_name])
end
tracer ||=
lambda do |msg|
puts "-----===== new message =====-----"
puts "SERVER: #{msg.server}"
puts "HEADER: #{msg.header.attributes[:headers].inspect}"
puts "EXCHANGE: #{msg.header.method.exchange}"
puts "KEY: #{msg.header.method.routing_key}"
puts "MSGID: #{msg.msg_id}"
puts "DATA: #{msg.data}"
end
register_handler(queue_names){|msg| tracer.call msg }
listen_queues(queue_names, &block)
end
# evaluate the ruby files matching the given +glob+ pattern in the context of the client instance.
def load(glob)
b = binding
Dir[glob].each do |f|
eval(File.read(f), b, f)
end
end
def reset
stop_publishing
stop_listening
config.reload
load_brokers_from_config
rescue Exception => e
logger.warn("Error resetting client")
logger.warn(e)
ensure
@publisher = nil
@subscriber = nil
end
private
def determine_queue_names(queues)
if queues.empty?
self.queues.keys
else
queues.flatten.map{|q| validated_queue_name(q)}
end
end
def validated_queue_name(queue_name)
queue_name = queue_name.to_s
raise UnknownQueue.new("unknown queue #{queue_name}") unless queues.include?(queue_name)
queue_name
end
def validated_message_name(message_name)
message_name = message_name.to_s
raise UnknownMessage.new("unknown message #{message_name}") unless messages.include?(message_name)
message_name
end
class Configurator #:nodoc:all
def initialize(client, options={})
@client = client
@options = options
end
def method_missing(method, *args, &block)
super unless %w(exchange queue binding message handler).include?(method.to_s)
options = @options.merge(args.last.is_a?(Hash) ? args.pop : {})
@client.send("register_#{method}", *(args+[options]), &block)
end
# need to override binding explicitely
def binding(*args, &block)
method_missing(:binding, *args, &block)
end
end
def publisher
@publisher ||= Publisher.new(self)
end
def subscriber
@subscriber ||= Subscriber.new(self)
end
def queue_name_for_tracing(queue)
"trace-#{queue}-#{Beetle.hostname}-#{$$}"
end
def load_brokers_from_config
@servers = config.servers.split(/ *, */)
@additional_subscription_servers = config.additional_subscription_servers.split(/ *, */)
end
end
|
dagrz/nba_stats | lib/nba_stats/stats/box_score_advanced.rb | NbaStats.BoxScoreAdvanced.box_score_advanced | ruby | def box_score_advanced(
game_id,
range_type=0,
start_period=0,
end_period=0,
start_range=0,
end_range=0
)
NbaStats::Resources::BoxScoreAdvanced.new(
get(BOX_SCORE_ADVANCED_PATH, {
:GameID => game_id,
:RangeType => range_type,
:StartPeriod => start_period,
:EndPeriod => end_period,
:StartRange => start_range,
:EndRange => end_range
})
)
end | Calls the boxscoreadvanced API and returns a BoxScoreAdvanced resource.
@param game_id [String]
@param range_type [Integer]
@param start_period [Integer]
@param end_period [Integer]
@param start_range [Integer]
@param end_range [xxxxxxxxxx]
@return [NbaStats::Resources::BoxScoreAdvanced] | train | https://github.com/dagrz/nba_stats/blob/d6fe6cf81f74a2ce7a054aeec5e9db59a6ec42aa/lib/nba_stats/stats/box_score_advanced.rb#L19-L37 | module BoxScoreAdvanced
# The path of the boxscoreadvanced API
BOX_SCORE_ADVANCED_PATH = '/stats/boxscoreadvanced'
# Calls the boxscoreadvanced API and returns a BoxScoreAdvanced resource.
#
# @param game_id [String]
# @param range_type [Integer]
# @param start_period [Integer]
# @param end_period [Integer]
# @param start_range [Integer]
# @param end_range [xxxxxxxxxx]
# @return [NbaStats::Resources::BoxScoreAdvanced]
end # BoxScoreAdvanced
|
kmewhort/similarity_tree | lib/similarity_tree/similarity_tree.rb | SimilarityTree.SimilarityTree.prune | ruby | def prune(nodes)
nodes.each do |node|
node.parent.children.reject!{|n| n == node} if (node != @root) && (node.diff_score < @score_threshold)
end
end | prune away nodes that don't meet the configured score threshold | train | https://github.com/kmewhort/similarity_tree/blob/d688c6d86e2a5a81ff71e81ef805c9af6cb8c8e7/lib/similarity_tree/similarity_tree.rb#L75-L79 | class SimilarityTree
# initialize/build the tree hierarchy from an existing similarity matrix
def initialize(root_id, similarity_matrix, score_threshold = 0)
@nodes = similarity_matrix.map {|key, row| Node.new(key, 0)}
@root = @nodes.find {|n| n.id == root_id}
@root.diff_score = nil
@similarity_matrix = similarity_matrix
@score_threshold = score_threshold
end
# build the tree and return the root node
def build
build_tree
@root
end
private
def build_tree
tree = @root
flat = [@root]
# for each non-root node
@nodes.delete_if{|n| n == @root}.map do |n|
# find the best match to the nodes already in the tree
closest_diff_score = 0
closest = nil
flat.each do |m|
diff_score = @similarity_matrix[n.id][m.id]
if closest.nil? || (diff_score > closest_diff_score)
closest_diff_score = diff_score
closest = m
end
end
# if the closest match is the root node, or if the closest match's diff score with it's parent is stronger
# than between the present node and that parent, add as a child of the match
if (closest == @root) || (closest.diff_score >= @similarity_matrix[n.id][closest.parent.id])
n.parent = closest
closest.children << n
n.diff_score = @similarity_matrix[n.id][closest.id]
# else, if the new node is more similar to the parent, rotate so that the existing node becomes the child
else
# place children with the closest matching of the two
closest.children.dup.each do |child|
if @similarity_matrix[child.id][n.id] > child.diff_score
child.parent = n
closest.children.delete_if{|child_i| child_i == child }
n.children << child
child.diff_score = @similarity_matrix[child.id][n.id]
end
end
# connect the new node to the parent
n.parent = closest.parent
n.parent.children << n
n.diff_score = @similarity_matrix[n.id][n.parent.id]
# add the existing node as a child
closest.parent = n
n.parent.children.delete_if{|child_i| child_i == closest}
n.children << closest
closest.diff_score = @similarity_matrix[closest.id][n.id]
end
flat << n
end
prune(flat)
end
# prune away nodes that don't meet the configured score threshold
end
|
chaintope/bitcoinrb | lib/bitcoin/tx.rb | Bitcoin.Tx.sighash_for_input | ruby | def sighash_for_input(input_index, output_script, hash_type: SIGHASH_TYPE[:all],
sig_version: :base, amount: nil, skip_separator_index: 0)
raise ArgumentError, 'input_index must be specified.' unless input_index
raise ArgumentError, 'does not exist input corresponding to input_index.' if input_index >= inputs.size
raise ArgumentError, 'script_pubkey must be specified.' unless output_script
raise ArgumentError, 'unsupported sig version specified.' unless SIG_VERSION.include?(sig_version)
if sig_version == :witness_v0 || Bitcoin.chain_params.fork_chain?
raise ArgumentError, 'amount must be specified.' unless amount
sighash_for_witness(input_index, output_script, hash_type, amount, skip_separator_index)
else
sighash_for_legacy(input_index, output_script, hash_type)
end
end | get signature hash
@param [Integer] input_index input index.
@param [Integer] hash_type signature hash type
@param [Bitcoin::Script] output_script script pubkey or script code. if script pubkey is P2WSH, set witness script to this.
@param [Integer] amount bitcoin amount locked in input. required for witness input only.
@param [Integer] skip_separator_index If output_script is P2WSH and output_script contains any OP_CODESEPARATOR,
the script code needs is the witnessScript but removing everything up to and including the last executed OP_CODESEPARATOR before the signature checking opcode being executed. | train | https://github.com/chaintope/bitcoinrb/blob/39396e4c9815214d6b0ab694fa8326978a7f5438/lib/bitcoin/tx.rb#L192-L205 | class Tx
MAX_STANDARD_VERSION = 2
# The maximum weight for transactions we're willing to relay/mine
MAX_STANDARD_TX_WEIGHT = 400000
MARKER = 0x00
FLAG = 0x01
attr_accessor :version
attr_accessor :marker
attr_accessor :flag
attr_reader :inputs
attr_reader :outputs
attr_accessor :lock_time
def initialize
@inputs = []
@outputs = []
@version = 1
@lock_time = 0
end
alias_method :in, :inputs
alias_method :out, :outputs
def self.parse_from_payload(payload, non_witness: false)
buf = payload.is_a?(String) ? StringIO.new(payload) : payload
tx = new
tx.version = buf.read(4).unpack('V').first
in_count = Bitcoin.unpack_var_int_from_io(buf)
witness = false
if in_count.zero? && !non_witness
tx.marker = 0
tx.flag = buf.read(1).unpack('c').first
if tx.flag.zero?
buf.pos -= 1
else
in_count = Bitcoin.unpack_var_int_from_io(buf)
witness = true
end
end
in_count.times do
tx.inputs << TxIn.parse_from_payload(buf)
end
out_count = Bitcoin.unpack_var_int_from_io(buf)
out_count.times do
tx.outputs << TxOut.parse_from_payload(buf)
end
if witness
in_count.times do |i|
tx.inputs[i].script_witness = Bitcoin::ScriptWitness.parse_from_payload(buf)
end
end
tx.lock_time = buf.read(4).unpack('V').first
tx
end
def hash
to_payload.bth.to_i(16)
end
def tx_hash
Bitcoin.double_sha256(serialize_old_format).bth
end
def txid
tx_hash.rhex
end
def witness_hash
Bitcoin.double_sha256(to_payload).bth
end
def wtxid
witness_hash.rhex
end
# get the witness commitment of coinbase tx.
# if this tx does not coinbase or not have commitment, return nil.
def witness_commitment
return nil unless coinbase_tx?
outputs.each do |output|
commitment = output.script_pubkey.witness_commitment
return commitment if commitment
end
nil
end
def to_payload
witness? ? serialize_witness_format : serialize_old_format
end
def coinbase_tx?
inputs.length == 1 && inputs.first.coinbase?
end
def witness?
!inputs.find { |i| !i.script_witness.empty? }.nil?
end
def ==(other)
to_payload == other.to_payload
end
# serialize tx with old tx format
def serialize_old_format
buf = [version].pack('V')
buf << Bitcoin.pack_var_int(inputs.length) << inputs.map(&:to_payload).join
buf << Bitcoin.pack_var_int(outputs.length) << outputs.map(&:to_payload).join
buf << [lock_time].pack('V')
buf
end
# serialize tx with segwit tx format
# https://github.com/bitcoin/bips/blob/master/bip-0144.mediawiki
def serialize_witness_format
buf = [version, MARKER, FLAG].pack('Vcc')
buf << Bitcoin.pack_var_int(inputs.length) << inputs.map(&:to_payload).join
buf << Bitcoin.pack_var_int(outputs.length) << outputs.map(&:to_payload).join
buf << witness_payload << [lock_time].pack('V')
buf
end
def witness_payload
inputs.map { |i| i.script_witness.to_payload }.join
end
# check this tx is standard.
def standard?
return false if version > MAX_STANDARD_VERSION
return false if weight > MAX_STANDARD_TX_WEIGHT
inputs.each do |i|
# Biggest 'standard' txin is a 15-of-15 P2SH multisig with compressed keys (remember the 520 byte limit on redeemScript size).
# That works out to a (15*(33+1))+3=513 byte redeemScript, 513+1+15*(73+1)+3=1627
# bytes of scriptSig, which we round off to 1650 bytes for some minor future-proofing.
# That's also enough to spend a 20-of-20 CHECKMULTISIG scriptPubKey, though such a scriptPubKey is not considered standard.
return false if i.script_sig.size > 1650
return false unless i.script_sig.push_only?
end
data_count = 0
outputs.each do |o|
return false unless o.script_pubkey.standard?
data_count += 1 if o.script_pubkey.op_return?
# TODO add non P2SH multisig relay(permitbaremultisig)
# TODO add dust relay check
end
return false if data_count > 1
true
end
# The serialized transaction size
def size
to_payload.bytesize
end
# The virtual transaction size (differs from size for witness transactions)
def vsize
(weight.to_f / 4).ceil
end
# calculate tx weight
# weight = (legacy tx payload) * 3 + (witness tx payload)
def weight
if witness?
serialize_old_format.bytesize * (WITNESS_SCALE_FACTOR - 1) + serialize_witness_format.bytesize
else
serialize_old_format.bytesize * WITNESS_SCALE_FACTOR
end
end
# get signature hash
# @param [Integer] input_index input index.
# @param [Integer] hash_type signature hash type
# @param [Bitcoin::Script] output_script script pubkey or script code. if script pubkey is P2WSH, set witness script to this.
# @param [Integer] amount bitcoin amount locked in input. required for witness input only.
# @param [Integer] skip_separator_index If output_script is P2WSH and output_script contains any OP_CODESEPARATOR,
# the script code needs is the witnessScript but removing everything up to and including the last executed OP_CODESEPARATOR before the signature checking opcode being executed.
# verify input signature.
# @param [Integer] input_index
# @param [Bitcoin::Script] script_pubkey the script pubkey for target input.
# @param [Integer] amount the amount of bitcoin, require for witness program only.
# @param [Array] flags the flags used when execute script interpreter.
def verify_input_sig(input_index, script_pubkey, amount: nil, flags: STANDARD_SCRIPT_VERIFY_FLAGS)
script_sig = inputs[input_index].script_sig
has_witness = inputs[input_index].has_witness?
if script_pubkey.p2sh?
flags << SCRIPT_VERIFY_P2SH
redeem_script = Script.parse_from_payload(script_sig.chunks.last)
script_pubkey = redeem_script if redeem_script.p2wpkh?
end
if has_witness || Bitcoin.chain_params.fork_chain?
verify_input_sig_for_witness(input_index, script_pubkey, amount, flags)
else
verify_input_sig_for_legacy(input_index, script_pubkey, flags)
end
end
def to_h
{
txid: txid, hash: witness_hash.rhex, version: version, size: size, vsize: vsize, locktime: lock_time,
vin: inputs.map(&:to_h), vout: outputs.map.with_index{|tx_out, index| tx_out.to_h.merge({n: index})}
}
end
private
# generate sighash with legacy format
def sighash_for_legacy(index, script_code, hash_type)
ins = inputs.map.with_index do |i, idx|
if idx == index
i.to_payload(script_code.delete_opcode(Bitcoin::Opcodes::OP_CODESEPARATOR))
else
case hash_type & 0x1f
when SIGHASH_TYPE[:none], SIGHASH_TYPE[:single]
i.to_payload(Bitcoin::Script.new, 0)
else
i.to_payload(Bitcoin::Script.new)
end
end
end
outs = outputs.map(&:to_payload)
out_size = Bitcoin.pack_var_int(outputs.size)
case hash_type & 0x1f
when SIGHASH_TYPE[:none]
outs = ''
out_size = Bitcoin.pack_var_int(0)
when SIGHASH_TYPE[:single]
return "\x01".ljust(32, "\x00") if index >= outputs.size
outs = outputs[0...(index + 1)].map.with_index { |o, idx| (idx == index) ? o.to_payload : o.to_empty_payload }.join
out_size = Bitcoin.pack_var_int(index + 1)
end
if hash_type & SIGHASH_TYPE[:anyonecanpay] != 0
ins = [ins[index]]
end
buf = [[version].pack('V'), Bitcoin.pack_var_int(ins.size),
ins, out_size, outs, [lock_time, hash_type].pack('VV')].join
Bitcoin.double_sha256(buf)
end
# generate sighash with BIP-143 format
# https://github.com/bitcoin/bips/blob/master/bip-0143.mediawiki
def sighash_for_witness(index, script_pubkey_or_script_code, hash_type, amount, skip_separator_index)
hash_prevouts = Bitcoin.double_sha256(inputs.map{|i|i.out_point.to_payload}.join)
hash_sequence = Bitcoin.double_sha256(inputs.map{|i|[i.sequence].pack('V')}.join)
outpoint = inputs[index].out_point.to_payload
amount = [amount].pack('Q')
nsequence = [inputs[index].sequence].pack('V')
hash_outputs = Bitcoin.double_sha256(outputs.map{|o|o.to_payload}.join)
script_code = script_pubkey_or_script_code.to_script_code(skip_separator_index)
case (hash_type & 0x1f)
when SIGHASH_TYPE[:single]
hash_outputs = index >= outputs.size ? "\x00".ljust(32, "\x00") : Bitcoin.double_sha256(outputs[index].to_payload)
hash_sequence = "\x00".ljust(32, "\x00")
when SIGHASH_TYPE[:none]
hash_sequence = hash_outputs = "\x00".ljust(32, "\x00")
end
if (hash_type & SIGHASH_TYPE[:anyonecanpay]) != 0
hash_prevouts = hash_sequence ="\x00".ljust(32, "\x00")
end
hash_type |= (Bitcoin.chain_params.fork_id << 8) if Bitcoin.chain_params.fork_chain?
buf = [ [version].pack('V'), hash_prevouts, hash_sequence, outpoint,
script_code ,amount, nsequence, hash_outputs, [@lock_time, hash_type].pack('VV')].join
Bitcoin.double_sha256(buf)
end
# verify input signature for legacy tx.
def verify_input_sig_for_legacy(input_index, script_pubkey, flags)
script_sig = inputs[input_index].script_sig
checker = Bitcoin::TxChecker.new(tx: self, input_index: input_index)
interpreter = Bitcoin::ScriptInterpreter.new(checker: checker, flags: flags)
interpreter.verify_script(script_sig, script_pubkey)
end
# verify input signature for witness tx.
def verify_input_sig_for_witness(input_index, script_pubkey, amount, flags)
flags |= SCRIPT_VERIFY_WITNESS
flags |= SCRIPT_VERIFY_WITNESS_PUBKEYTYPE
checker = Bitcoin::TxChecker.new(tx: self, input_index: input_index, amount: amount)
interpreter = Bitcoin::ScriptInterpreter.new(checker: checker, flags: flags)
i = inputs[input_index]
script_sig = i.script_sig
witness = i.script_witness
interpreter.verify_script(script_sig, script_pubkey, witness)
end
end
|
davidbarral/sugarfree-config | lib/sugarfree-config/config.rb | SugarfreeConfig.Config.fetch_config | ruby | def fetch_config
Rails.logger.debug "Loading #{@file}::#{@env}" if Object.const_defined?('Rails') && Rails.logger.present?
YAML::load_file(@file)[@env.to_s]
end | Fetch the config from the file | train | https://github.com/davidbarral/sugarfree-config/blob/76b590627d50cd50b237c21fdf8ea3022ebbdf42/lib/sugarfree-config/config.rb#L47-L50 | class Config
#
# Creates a new config object and load the config file into memory
#
def initialize(options)
options = default_options.merge(options)
@file = options[:file]
@reload = options[:reload]
@env = options[:env]
end
#
# Returns all the config as a big hash
#
def to_hash
values
end
#
# Here is the magic. The first request to config returns a new
# ConfigIterator that will handle the first +symbol+
#
def method_missing(symbol, *args)
ConfigIterator.new(values, symbol).next
end
protected
def values
@config = fetch_config unless @config && !@reload
@config
end
#
# Fetch the config from the file
#
#
# Default configuration options for Rails and non Rails applications
#
def default_options
if Object.const_defined?('Rails')
{
:file => Rails.root.join('config', 'config.yml'),
:reload => Rails.env.development?,
:env => Rails.env
}
else
{
:file => File.expand_path("config.yml"),
:reload => false,
:env => "development"
}
end
end
end
|
jdigger/git-process | lib/git-process/git_remote.rb | GitProc.GitRemote.repo_name | ruby | def repo_name
unless @repo_name
url = config["remote.#{name}.url"]
raise GitProcessError.new("There is no #{name} url set up.") if url.nil? or url.empty?
uri = Addressable::URI.parse(url)
@repo_name = uri.path.sub(/\.git/, '').sub(/^\//, '')
end
@repo_name
end | The name of the repository
@example
repo_name #=> "jdigger/git-process"
@return [String] the name of the repository | train | https://github.com/jdigger/git-process/blob/5853aa94258e724ce0dbc2f1e7407775e1630964/lib/git-process/git_remote.rb#L84-L92 | class GitRemote
# @param [GitProc::GitConfig] gitconfig
def initialize(gitconfig)
@gitconfig = gitconfig
end
# @return [#info, #warn, #debug, #error]
def logger
@logger ||= @gitconfig.logger
end
# @return [GitProc::GitConfig]
def config
@gitconfig
end
# @deprecated
# @todo Remove
def server_name
@server_name ||= self.remote_name
end
# @return [Boolean] does this have a remote defined?
def exists?
if @has_remote.nil?
@has_remote = (config.gitlib.command(:remote) != '')
end
logger.debug { "Does a remote exist? #{@has_remote}" }
@has_remote
end
#
# The name of the repository
#
# @example
# repo_name #=> "jdigger/git-process"
#
# @return [String] the name of the repository
#
# Returns the "remote name" to use. By convention the most common name is "origin".
#
# If the Git configuration "gitProcess.remoteName" is set, that will always be used. Otherwise this
# simple returns the first name it finds in the list of remotes.
#
# @return [String, nil] the remote name, or nil if there are none defined
def remote_name
unless @remote_name
@remote_name = config['gitProcess.remoteName']
if @remote_name.nil? or @remote_name.empty?
remotes = self.remote_names
if remotes.empty?
@remote_name = nil
else
@remote_name = remotes[0]
raise "remote name is not a String: #{@remote_name.inspect}" unless @remote_name.is_a? String
end
end
logger.debug { "Using remote name of '#{@remote_name}'" }
end
@remote_name
end
alias :name :remote_name
#
# Takes {#remote_name} and combines it with {GitConfig#master_branch}.
#
# @example
# master_branch_name #=> origin/master
#
# @return [String] the complete remote name of the integration branch
#
def master_branch_name
"#{self.name}/#{config.master_branch}"
end
alias :remote_integration_branch_name :master_branch_name
def remote_names
remote_str = config.gitlib.command(:remote, [:show])
if remote_str.nil? or remote_str.empty?
[]
else
remote_str.split(/\n/)
end
end
#
# Expands the git configuration server name to a url.
#
# Takes into account further expanding an SSH uri that uses SSH aliasing in .ssh/config
#
# @param [String] server_name the git configuration server name; defaults to 'origin'
#
# @option opts [String] :ssh_config_file the SSH config file to use; defaults to ~/.ssh/config
#
# @return the fully expanded URL; never nil
#
# @raise [GitHubService::NoRemoteRepository] there is not a URL set for the server name
# @raise [URI::InvalidURIError] the retrieved URL does not have a schema
# @raise [GitHubService::NoRemoteRepository] if could not figure out a host for the retrieved URL
# @raise [::ArgumentError] if a server name is not provided
#
# @todo use the netrc gem
def expanded_url(server_name = 'origin', raw_url = nil, opts = {})
if raw_url.nil?
raise ArgumentError.new('Need server_name') unless server_name
conf_key = "remote.#{server_name}.url"
url = config[conf_key]
raise GitHubService::NoRemoteRepository.new("There is no value set for '#{conf_key}'") if url.nil? or url.empty?
else
raise GitHubService::NoRemoteRepository.new("There is no value set for '#{raw_url}'") if raw_url.nil? or raw_url.empty?
url = raw_url
end
if /^\S+@/ =~ url
url.sub(/^(\S+@\S+?):(.*)$/, "ssh://\\1/\\2")
else
uri = URI.parse(url)
host = uri.host
scheme = uri.scheme
raise URI::InvalidURIError.new("Need a scheme in URI: '#{url}'") unless scheme
if scheme == 'file'
url
elsif host.nil?
# assume that the 'scheme' is the named configuration in ~/.ssh/config
rv = GitRemote.hostname_and_user_from_ssh_config(scheme, opts[:ssh_config_file] ||= "#{ENV['HOME']}/.ssh/config")
raise GitHubService::NoRemoteRepository.new("Could not determine a host from #{url}") if rv.nil?
host = rv[0]
user = rv[1]
url.sub(/^\S+:(\S+)$/, "ssh://#{user}@#{host}/\\1")
else
url
end
end
end
# @return [void]
def add_remote(remote_name, url)
config.gitlib.command(:remote, ['add', remote_name, url])
end
alias :add :add_remote
# @todo use the netrc gem
#noinspection RubyClassMethodNamingConvention
def self.hostname_and_user_from_ssh_config(host_alias, config_file)
if File.exists?(config_file)
config_lines = File.new(config_file).readlines
in_host_section = false
host_name = nil
user_name = nil
config_lines.each do |line|
line.chop!
if /^\s*Host\s+#{host_alias}\s*$/ =~ line
in_host_section = true
next
end
if in_host_section and (/^\s*HostName\s+\S+\s*$/ =~ line)
host_name = line.sub(/^\s*HostName\s+(\S+)\s*$/, '\1')
break unless user_name.nil?
elsif in_host_section and (/^\s*User\s+\S+\s*$/ =~ line)
user_name = line.sub(/^\s*User\s+(\S+)\s*$/, '\1')
break unless host_name.nil?
elsif in_host_section and (/^\s*Host\s+.*$/ =~ line)
break
end
end
host_name.nil? ? nil : [host_name, user_name]
else
nil
end
end
end
|
puppetlabs/beaker-aws | lib/beaker/hypervisor/aws_sdk.rb | Beaker.AwsSdk.ensure_group | ruby | def ensure_group(vpc, ports, sg_cidr_ips = ['0.0.0.0/0'])
@logger.notify("aws-sdk: Ensure security group exists for ports #{ports.to_s}, create if not")
name = group_id(ports)
group = client.describe_security_groups(
:filters => [
{ :name => 'group-name', :values => [name] },
{ :name => 'vpc-id', :values => [vpc.vpc_id] },
]
).security_groups.first
if group.nil?
group = create_group(vpc, ports, sg_cidr_ips)
end
group
end | Return an existing group, or create new one
Accepts a VPC as input for checking & creation.
@param vpc [Aws::EC2::VPC] the AWS vpc control object
@param ports [Array<Number>] an array of port numbers
@param sg_cidr_ips [Array<String>] CIDRs used for outbound security group rule
@return [Aws::EC2::SecurityGroup] created security group
@api private | train | https://github.com/puppetlabs/beaker-aws/blob/f2e448b4e7c7ccb17940b86afc25cee5eb5cbb39/lib/beaker/hypervisor/aws_sdk.rb#L1007-L1023 | class AwsSdk < Beaker::Hypervisor
ZOMBIE = 3 #anything older than 3 hours is considered a zombie
PING_SECURITY_GROUP_NAME = 'beaker-ping'
attr_reader :default_region
# Initialize AwsSdk hypervisor driver
#
# @param [Array<Beaker::Host>] hosts Array of Beaker::Host objects
# @param [Hash<String, String>] options Options hash
def initialize(hosts, options)
@hosts = hosts
@options = options
@logger = options[:logger]
@default_region = ENV['AWS_REGION'] || 'us-west-2'
# Get AWS credentials
creds = options[:use_fog_credentials] ? load_credentials() : nil
config = {
:credentials => creds,
:logger => Logger.new($stdout),
:log_level => :debug,
:log_formatter => Aws::Log::Formatter.colored,
:retry_limit => 12,
:region => ENV['AWS_REGION'] || 'us-west-2'
}.delete_if{ |k,v| v.nil? }
Aws.config.update(config)
@client = {}
@client.default_proc = proc do |hash, key|
hash[key] = Aws::EC2::Client.new(:region => key)
end
test_split_install()
end
def client(region = default_region)
@client[region]
end
# Provision all hosts on EC2 using the Aws::EC2 API
#
# @return [void]
def provision
start_time = Time.now
# Perform the main launch work
launch_all_nodes()
# Add metadata tags to each instance
# tagging early as some nodes take longer
# to initialize and terminate before it has
# a chance to provision
add_tags()
# adding the correct security groups to the
# network interface, as during the `launch_all_nodes()`
# step they never get assigned, although they get created
modify_network_interface()
wait_for_status_netdev()
# Grab the ip addresses and dns from EC2 for each instance to use for ssh
populate_dns()
#enable root if user is not root
enable_root_on_hosts()
# Set the hostname for each box
set_hostnames()
# Configure /etc/hosts on each host
configure_hosts()
@logger.notify("aws-sdk: Provisioning complete in #{Time.now - start_time} seconds")
nil #void
end
def regions
@regions ||= client.describe_regions.regions.map(&:region_name)
end
# Kill all instances.
#
# @param instances [Enumerable<Aws::EC2::Types::Instance>]
# @return [void]
def kill_instances(instances)
running_instances = instances.compact.select do |instance|
instance_by_id(instance.instance_id).state.name == 'running'
end
instance_ids = running_instances.map(&:instance_id)
return nil if instance_ids.empty?
@logger.notify("aws-sdk: killing EC2 instance(s) #{instance_ids.join(', ')}")
client.terminate_instances(:instance_ids => instance_ids)
nil
end
# Cleanup all earlier provisioned hosts on EC2 using the Aws::EC2 library
#
# It goes without saying, but a #cleanup does nothing without a #provision
# method call first.
#
# @return [void]
def cleanup
# Provisioning should have set the host 'instance' values.
kill_instances(@hosts.map{ |h| h['instance'] }.select{ |x| !x.nil? })
delete_key_pair_all_regions()
nil
end
# Print instances to the logger. Instances will be from all regions
# associated with provided key name and limited by regex compared to
# instance status. Defaults to running instances.
#
# @param [String] key The key_name to match for
# @param [Regex] status The regular expression to match against the instance's status
def log_instances(key = key_name, status = /running/)
instances = []
regions.each do |region|
@logger.debug "Reviewing: #{region}"
client(region).describe_instances.reservations.each do |reservation|
reservation.instances.each do |instance|
if (instance.key_name =~ /#{key}/) and (instance.state.name =~ status)
instances << instance
end
end
end
end
output = ""
instances.each do |instance|
dns_name = instance.public_dns_name || instance.private_dns_name
output << "#{instance.instance_id} keyname: #{instance.key_name}, dns name: #{dns_name}, private ip: #{instance.private_ip_address}, ip: #{instance.public_ip_address}, launch time #{instance.launch_time}, status: #{instance.state.name}\n"
end
@logger.notify("aws-sdk: List instances (keyname: #{key})")
@logger.notify("#{output}")
end
# Provided an id return an instance object.
# Instance object will respond to methods described here: {http://docs.aws.amazon.com/AWSRubySDK/latest/AWS/EC2/Instance.html AWS Instance Object}.
# @param [String] id The id of the instance to return
# @return [Aws::EC2::Types::Instance] An Aws::EC2 instance object
def instance_by_id(id)
client.describe_instances(:instance_ids => [id]).reservations.first.instances.first
end
# Return all instances currently on ec2.
# @see AwsSdk#instance_by_id
# @return [Array<Aws::Ec2::Types::Instance>] An array of Aws::EC2 instance objects
def instances
client.describe_instances.reservations.map(&:instances).flatten
end
# Provided an id return a VPC object.
# VPC object will respond to methods described here: {http://docs.aws.amazon.com/AWSRubySDK/latest/AWS/EC2/VPC.html AWS VPC Object}.
# @param [String] id The id of the VPC to return
# @return [Aws::EC2::Types::Vpc] An Aws::EC2 vpc object
def vpc_by_id(id)
client.describe_vpcs(:vpc_ids => [id]).vpcs.first
end
# Return all VPCs currently on ec2.
# @see AwsSdk#vpc_by_id
# @return [Array<Aws::EC2::Types::Vpc>] An array of Aws::EC2 vpc objects
def vpcs
client.describe_vpcs.vpcs
end
# Provided an id return a security group object
# Security object will respond to methods described here: {http://docs.aws.amazon.com/AWSRubySDK/latest/AWS/EC2/SecurityGroup.html AWS SecurityGroup Object}.
# @param [String] id The id of the security group to return
# @return [Aws::EC2::Types::SecurityGroup] An Aws::EC2 security group object
def security_group_by_id(id)
client.describe_security_groups(:group_ids => [id]).security_groups.first
end
# Return all security groups currently on ec2.
# @see AwsSdk#security_goup_by_id
# @return [Array<Aws::EC2::Types::SecurityGroup>] An array of Aws::EC2 security group objects
def security_groups
client.describe_security_groups.security_groups
end
# Shutdown and destroy ec2 instances idenfitied by key that have been alive
# longer than ZOMBIE hours.
#
# @param [Integer] max_age The age in hours that a machine needs to be older than to be considered a zombie
# @param [String] key The key_name to match for
def kill_zombies(max_age = ZOMBIE, key = key_name)
@logger.notify("aws-sdk: Kill Zombies! (keyname: #{key}, age: #{max_age} hrs)")
instances_to_kill = []
time_now = Time.now.getgm #ec2 uses GM time
#examine all available regions
regions.each do |region|
@logger.debug "Reviewing: #{region}"
client(region).describe_instances.reservations.each do |reservation|
reservation.instances.each do |instance|
if (instance.key_name =~ /#{key}/)
@logger.debug "Examining #{instance.instance_id} (keyname: #{instance.key_name}, launch time: #{instance.launch_time}, state: #{instance.state.name})"
if ((time_now - instance.launch_time) > max_age*60*60) and instance.state.name !~ /terminated/
@logger.debug "Kill! #{instance.instance_id}: #{instance.key_name} (Current status: #{instance.state.name})"
instances_to_kill << instance
end
end
end
end
end
kill_instances(instances_to_kill)
delete_key_pair_all_regions(key_name_prefix)
@logger.notify "#{key}: Killed #{instances_to_kill.length} instance(s)"
end
# Destroy any volumes marked 'available', INCLUDING THOSE YOU DON'T OWN! Use with care.
def kill_zombie_volumes
# Occasionaly, tearing down ec2 instances leaves orphaned EBS volumes behind -- these stack up quickly.
# This simply looks for EBS volumes that are not in use
@logger.notify("aws-sdk: Kill Zombie Volumes!")
volume_count = 0
regions.each do |region|
@logger.debug "Reviewing: #{region}"
available_volumes = client(region).describe_volumes(
:filters => [
{ :name => 'status', :values => ['available'], }
]
).volumes
available_volumes.each do |volume|
begin
client(region).delete_volume(:volume_id => volume.id)
volume_count += 1
rescue Aws::EC2::Errors::InvalidVolume::NotFound => e
@logger.debug "Failed to remove volume: #{volume.id} #{e}"
end
end
end
@logger.notify "Freed #{volume_count} volume(s)"
end
# Create an EC2 instance for host, tag it, and return it.
#
# @return [void]
# @api private
def create_instance(host, ami_spec, subnet_id)
amitype = host['vmname'] || host['platform']
amisize = host['amisize'] || 'm1.small'
vpc_id = host['vpc_id'] || @options['vpc_id'] || nil
host['sg_cidr_ips'] = host['sg_cidr_ips'] || '0.0.0.0/0';
sg_cidr_ips = host['sg_cidr_ips'].split(',')
assoc_pub_ip_addr = host['associate_public_ip_address']
if vpc_id && !subnet_id
raise RuntimeError, "A subnet_id must be provided with a vpc_id"
end
if assoc_pub_ip_addr && !subnet_id
raise RuntimeError, "A subnet_id must be provided when configuring assoc_pub_ip_addr"
end
# Use snapshot provided for this host
image_type = host['snapshot']
raise RuntimeError, "No snapshot/image_type provided for EC2 provisioning" unless image_type
ami = ami_spec[amitype]
ami_region = ami[:region]
# Main region object for ec2 operations
region = ami_region
# If we haven't defined a vpc_id then we use the default vpc for the provided region
unless vpc_id
@logger.notify("aws-sdk: filtering available vpcs in region by 'isDefault'")
default_vpcs = client(region).describe_vpcs(:filters => [{:name => 'isDefault', :values => ['true']}])
vpc_id = if default_vpcs.vpcs.empty?
nil
else
default_vpcs.vpcs.first.vpc_id
end
end
# Grab the vpc object based upon provided id
vpc = vpc_id ? client(region).describe_vpcs(:vpc_ids => [vpc_id]).vpcs.first : nil
# Grab image object
image_id = ami[:image][image_type.to_sym]
@logger.notify("aws-sdk: Checking image #{image_id} exists and getting its root device")
image = client(region).describe_images(:image_ids => [image_id]).images.first
raise RuntimeError, "Image not found: #{image_id}" if image.nil?
@logger.notify("Image Storage Type: #{image.root_device_type}")
# Transform the images block_device_mappings output into a format
# ready for a create.
block_device_mappings = []
if image.root_device_type == :ebs
orig_bdm = image.block_device_mappings
@logger.notify("aws-sdk: Image block_device_mappings: #{orig_bdm}")
orig_bdm.each do |block_device|
block_device_mappings << {
:device_name => block_device.device_name,
:ebs => {
# Change the default size of the root volume.
:volume_size => host['volume_size'] || block_device.ebs.volume_size,
# This is required to override the images default for
# delete_on_termination, forcing all volumes to be deleted once the
# instance is terminated.
:delete_on_termination => true,
}
}
end
end
security_group = ensure_group(vpc || region, Beaker::EC2Helper.amiports(host), sg_cidr_ips)
#check if ping is enabled
ping_security_group = ensure_ping_group(vpc || region, sg_cidr_ips)
msg = "aws-sdk: launching %p on %p using %p/%p%s" %
[host.name, amitype, amisize, image_type,
subnet_id ? ("in %p" % subnet_id) : '']
@logger.notify(msg)
config = {
:max_count => 1,
:min_count => 1,
:image_id => image_id,
:monitoring => {
:enabled => true,
},
:key_name => ensure_key_pair(region).key_pairs.first.key_name,
:instance_type => amisize,
:disable_api_termination => false,
:instance_initiated_shutdown_behavior => "terminate",
}
if assoc_pub_ip_addr
# this never gets created, so they end up with
# default security group which only allows for
# ssh access from outside world which
# doesn't work well with remote devices etc.
config[:network_interfaces] = [{
:subnet_id => subnet_id,
:groups => [security_group.group_id, ping_security_group.group_id],
:device_index => 0,
:associate_public_ip_address => assoc_pub_ip_addr,
}]
else
config[:subnet_id] = subnet_id
end
config[:block_device_mappings] = block_device_mappings if image.root_device_type == :ebs
reservation = client(region).run_instances(config)
reservation.instances.first
end
# For each host, create an EC2 instance in one of the specified
# subnets and push it onto instances_created. Each subnet will be
# tried at most once for each host, and more than one subnet may
# be tried if capacity constraints are encountered. Each Hash in
# instances_created will contain an :instance and :host value.
#
# @param hosts [Enumerable<Host>]
# @param subnets [Enumerable<String>]
# @param ami_spec [Hash]
# @param instances_created Enumerable<Hash{Symbol=>EC2::Instance,Host}>
# @return [void]
# @api private
def launch_nodes_on_some_subnet(hosts, subnets, ami_spec, instances_created)
# Shuffle the subnets so we don't always hit the same one
# first, and cycle though the subnets independently of the
# host, so we stick with one that's working. Try each subnet
# once per-host.
if subnets.nil? or subnets.empty?
return
end
subnet_i = 0
shuffnets = subnets.shuffle
hosts.each do |host|
instance = nil
shuffnets.length.times do
begin
subnet_id = shuffnets[subnet_i]
instance = create_instance(host, ami_spec, subnet_id)
instances_created.push({:instance => instance, :host => host})
break
rescue Aws::EC2::Errors::InsufficientInstanceCapacity
@logger.notify("aws-sdk: hit #{subnet_id} capacity limit; moving on")
subnet_i = (subnet_i + 1) % shuffnets.length
end
end
if instance.nil?
raise RuntimeError, "unable to launch host in any requested subnet"
end
end
end
# Create EC2 instances for all hosts, tag them, and wait until
# they're running. When a host provides a subnet_id, create the
# instance in that subnet, otherwise prefer a CONFIG subnet_id.
# If neither are set but there is a CONFIG subnet_ids list,
# attempt to create the host in each specified subnet, which might
# fail due to capacity constraints, for example. Specifying both
# a CONFIG subnet_id and subnet_ids will provoke an error.
#
# @return [void]
# @api private
def launch_all_nodes
@logger.notify("aws-sdk: launch all hosts in configuration")
ami_spec = YAML.load_file(@options[:ec2_yaml])["AMI"]
global_subnet_id = @options['subnet_id']
global_subnets = @options['subnet_ids']
if global_subnet_id and global_subnets
raise RuntimeError, 'Config specifies both subnet_id and subnet_ids'
end
no_subnet_hosts = []
specific_subnet_hosts = []
some_subnet_hosts = []
@hosts.each do |host|
if global_subnet_id or host['subnet_id']
specific_subnet_hosts.push(host)
elsif global_subnets
some_subnet_hosts.push(host)
else
no_subnet_hosts.push(host)
end
end
instances = [] # Each element is {:instance => i, :host => h}
begin
@logger.notify("aws-sdk: launch instances not particular about subnet")
launch_nodes_on_some_subnet(some_subnet_hosts, global_subnets, ami_spec,
instances)
@logger.notify("aws-sdk: launch instances requiring a specific subnet")
specific_subnet_hosts.each do |host|
subnet_id = host['subnet_id'] || global_subnet_id
instance = create_instance(host, ami_spec, subnet_id)
instances.push({:instance => instance, :host => host})
end
@logger.notify("aws-sdk: launch instances requiring no subnet")
no_subnet_hosts.each do |host|
instance = create_instance(host, ami_spec, nil)
instances.push({:instance => instance, :host => host})
end
wait_for_status(:running, instances)
rescue Exception => ex
@logger.notify("aws-sdk: exception #{ex.class}: #{ex}")
kill_instances(instances.map{|x| x[:instance]})
raise ex
end
# At this point, all instances should be running since wait
# either returns on success or throws an exception.
if instances.empty?
raise RuntimeError, "Didn't manage to launch any EC2 instances"
end
# Assign the now known running instances to their hosts.
instances.each {|x| x[:host]['instance'] = x[:instance]}
nil
end
# Wait until all instances reach the desired state. Each Hash in
# instances must contain an :instance and :host value.
#
# @param state_name [String] EC2 state to wait for, 'running', 'stopped', etc.
# @param instances Enumerable<Hash{Symbol=>EC2::Instance,Host}>
# @param block [Proc] more complex checks can be made by passing a
# block in. This overrides the status parameter.
# EC2::Instance objects from the hosts will be
# yielded to the passed block
# @return [void]
# @api private
# FIXME: rename to #wait_for_state
def wait_for_status(state_name, instances, &block)
# Wait for each node to reach status :running
@logger.notify("aws-sdk: Waiting for all hosts to be #{state_name}")
instances.each do |x|
name = x[:host] ? x[:host].name : x[:name]
instance = x[:instance]
@logger.notify("aws-sdk: Wait for node #{name} to be #{state_name}")
# Here we keep waiting for the machine state to reach 'running' with an
# exponential backoff for each poll.
# TODO: should probably be a in a shared method somewhere
for tries in 1..10
refreshed_instance = instance_by_id(instance.instance_id)
if refreshed_instance.nil?
@logger.debug("Instance #{name} not yet available (#{e})")
else
if block_given?
test_result = yield refreshed_instance
else
test_result = refreshed_instance.state.name.to_s == state_name.to_s
end
if test_result
x[:instance] = refreshed_instance
# Always sleep, so the next command won't cause a throttle
backoff_sleep(tries)
break
elsif tries == 10
raise "Instance never reached state #{state_name}"
end
end
backoff_sleep(tries)
end
end
end
# Handles special checks needed for netdev platforms.
#
# @note if any host is an netdev one, these checks will happen once across all
# of the hosts, and then we'll exit
#
# @return [void]
# @api private
def wait_for_status_netdev()
@hosts.each do |host|
if host['platform'] =~ /f5-|netscaler/
wait_for_status(:running, @hosts)
wait_for_status(nil, @hosts) do |instance|
instance_status_collection = client.describe_instance_status({:instance_ids => [instance.instance_id]})
first_instance = instance_status_collection.first[:instance_statuses].first
first_instance[:instance_status][:status] == "ok" if first_instance
end
break
end
end
end
# Add metadata tags to all instances
#
# @return [void]
# @api private
def add_tags
@hosts.each do |host|
instance = host['instance']
# Define tags for the instance
@logger.notify("aws-sdk: Add tags for #{host.name}")
tags = [
{
:key => 'jenkins_build_url',
:value => @options[:jenkins_build_url],
},
{
:key => 'Name',
:value => host.name,
},
{
:key => 'department',
:value => @options[:department],
},
{
:key => 'project',
:value => @options[:project],
},
{
:key => 'created_by',
:value => @options[:created_by],
},
]
host[:host_tags].each do |name, val|
tags << { :key => name.to_s, :value => val }
end
client.create_tags(
:resources => [instance.instance_id],
:tags => tags.reject { |r| r[:value].nil? },
)
end
nil
end
# Add correct security groups to hosts network_interface
# as during the create_instance stage it is too early in process
# to configure
#
# @return [void]
# @api private
def modify_network_interface
@hosts.each do |host|
instance = host['instance']
host['sg_cidr_ips'] = host['sg_cidr_ips'] || '0.0.0.0/0';
sg_cidr_ips = host['sg_cidr_ips'].split(',')
# Define tags for the instance
@logger.notify("aws-sdk: Update network_interface for #{host.name}")
security_group = ensure_group(instance[:network_interfaces].first, Beaker::EC2Helper.amiports(host), sg_cidr_ips)
ping_security_group = ensure_ping_group(instance[:network_interfaces].first, sg_cidr_ips)
client.modify_network_interface_attribute(
:network_interface_id => "#{instance[:network_interfaces].first[:network_interface_id]}",
:groups => [security_group.group_id, ping_security_group.group_id],
)
end
nil
end
# Populate the hosts IP address from the EC2 dns_name
#
# @return [void]
# @api private
def populate_dns
# Obtain the IP addresses and dns_name for each host
@hosts.each do |host|
@logger.notify("aws-sdk: Populate DNS for #{host.name}")
instance = host['instance']
host['ip'] = instance.public_ip_address || instance.private_ip_address
host['private_ip'] = instance.private_ip_address
host['dns_name'] = instance.public_dns_name || instance.private_dns_name
@logger.notify("aws-sdk: name: #{host.name} ip: #{host['ip']} private_ip: #{host['private_ip']} dns_name: #{host['dns_name']}")
end
nil
end
# Return a valid /etc/hosts line for a given host
#
# @param [Beaker::Host] host Beaker::Host object for generating /etc/hosts entry
# @param [Symbol] interface Symbol identifies which ip should be used for host
# @return [String] formatted hosts entry for host
# @api private
def etc_hosts_entry(host, interface = :ip)
name = host.name
domain = get_domain_name(host)
ip = host[interface.to_s]
"#{ip}\t#{name} #{name}.#{domain} #{host['dns_name']}\n"
end
# Configure /etc/hosts for each node
#
# @note f5 hosts are skipped since this isn't a valid step there
#
# @return [void]
# @api private
def configure_hosts
non_netdev_windows_hosts = @hosts.select{ |h| !(h['platform'] =~ /f5-|netscaler|windows/) }
non_netdev_windows_hosts.each do |host|
host_entries = non_netdev_windows_hosts.map do |h|
h == host ? etc_hosts_entry(h, :private_ip) : etc_hosts_entry(h)
end
host_entries.unshift "127.0.0.1\tlocalhost localhost.localdomain\n"
set_etc_hosts(host, host_entries.join(''))
end
nil
end
# Enables root for instances with custom username like ubuntu-amis
#
# @return [void]
# @api private
def enable_root_on_hosts
@hosts.each do |host|
if host['disable_root_ssh'] == true
@logger.notify("aws-sdk: Not enabling root for instance as disable_root_ssh is set to 'true'.")
else
@logger.notify("aws-sdk: Enabling root ssh")
enable_root(host)
end
end
end
# Enables root access for a host when username is not root
#
# @return [void]
# @api private
def enable_root(host)
if host['user'] != 'root'
if host['platform'] =~ /f5-/
enable_root_f5(host)
elsif host['platform'] =~ /netscaler/
enable_root_netscaler(host)
else
copy_ssh_to_root(host, @options)
enable_root_login(host, @options)
host['user'] = 'root'
end
host.close
end
end
# Enables root access for a host on an f5 platform
# @note This method does not support other platforms
#
# @return nil
# @api private
def enable_root_f5(host)
for tries in 1..10
begin
#This command is problematic as the F5 is not always done loading
if host.exec(Command.new("modify sys db systemauth.disablerootlogin value false"), :acceptable_exit_codes => [0,1]).exit_code == 0 \
and host.exec(Command.new("modify sys global-settings gui-setup disabled"), :acceptable_exit_codes => [0,1]).exit_code == 0 \
and host.exec(Command.new("save sys config"), :acceptable_exit_codes => [0,1]).exit_code == 0
backoff_sleep(tries)
break
elsif tries == 10
raise "Instance was unable to be configured"
end
rescue Beaker::Host::CommandFailure => e
@logger.debug("Instance not yet configured (#{e})")
end
backoff_sleep(tries)
end
host['user'] = 'admin'
sha256 = Digest::SHA256.new
password = sha256.hexdigest((1..50).map{(rand(86)+40).chr}.join.gsub(/\\/,'\&\&')) + 'password!'
# disabling password policy to account for the enforcement level set
# and the generated password is sometimes too `01070366:3: Bad password (admin): BAD PASSWORD: \
# it is too simplistic/systematic`
host.exec(Command.new('modify auth password-policy policy-enforcement disabled'))
host.exec(Command.new("modify auth user admin password #{password}"))
@logger.notify("f5: Configured admin password to be #{password}")
host.close
host['ssh'] = {:password => password}
end
# Enables root access for a host on an netscaler platform
# @note This method does not support other platforms
#
# @return nil
# @api private
def enable_root_netscaler(host)
host['ssh'] = {:password => host['instance'].instance_id}
@logger.notify("netscaler: nsroot password is #{host['instance'].instance_id}")
end
# Set the :vmhostname for each host object to be the dns_name, which is accessible
# publicly. Then configure each ec2 machine to that dns_name, so that when facter
# is installed the facts for hostname and domain match the dns_name.
#
# if :use_beaker_hostnames: is true, set the :vmhostname and hostname of each ec2
# machine to the host[:name] from the beaker hosts file.
#
# @return [@hosts]
# @api private
def set_hostnames
if @options[:use_beaker_hostnames]
@hosts.each do |host|
host[:vmhostname] = host.name
if host['platform'] =~ /el-7/
# on el-7 hosts, the hostname command doesn't "stick" randomly
host.exec(Command.new("hostnamectl set-hostname #{host.name}"))
elsif host['platform'] =~ /windows/
@logger.notify('aws-sdk: Change hostname on windows is not supported.')
else
next if host['platform'] =~ /f5-|netscaler/
host.exec(Command.new("hostname #{host.name}"))
if host['vmname'] =~ /^amazon/
# Amazon Linux requires this to preserve host name changes across reboots.
# http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/set-hostname.html
# Also note that without an elastic ip set, while this will
# preserve the hostname across a full shutdown/startup of the vm
# (as opposed to a reboot) -- the ip address will have changed.
host.exec(Command.new("sed -ie '/^HOSTNAME/ s/=.*/=#{host.name}/' /etc/sysconfig/network"))
end
end
end
else
@hosts.each do |host|
host[:vmhostname] = host[:dns_name]
if host['platform'] =~ /el-7/
# on el-7 hosts, the hostname command doesn't "stick" randomly
host.exec(Command.new("hostnamectl set-hostname #{host.hostname}"))
elsif host['platform'] =~ /windows/
@logger.notify('aws-sdk: Change hostname on windows is not supported.')
else
next if host['platform'] =~ /ft-|netscaler/
host.exec(Command.new("hostname #{host.hostname}"))
if host['vmname'] =~ /^amazon/
# See note above
host.exec(Command.new("sed -ie '/^HOSTNAME/ s/=.*/=#{host.hostname}/' /etc/sysconfig/network"))
end
end
end
end
end
# Calculates and waits a back-off period based on the number of tries
#
# Logs each backupoff time and retry value to the console.
#
# @param tries [Number] number of tries to calculate back-off period
# @return [void]
# @api private
def backoff_sleep(tries)
# Exponential with some randomization
sleep_time = 2 ** tries
@logger.notify("aws-sdk: Sleeping #{sleep_time} seconds for attempt #{tries}.")
sleep sleep_time
nil
end
# Retrieve the public key locally from the executing users ~/.ssh directory
#
# @return [String] contents of public key
# @api private
def public_key
keys = Array(@options[:ssh][:keys])
keys << '~/.ssh/id_rsa'
keys << '~/.ssh/id_dsa'
key_file = keys.find do |key|
key_pub = key + '.pub'
File.exist?(File.expand_path(key_pub)) && File.exist?(File.expand_path(key))
end
if key_file
@logger.debug("Using public key: #{key_file}")
else
raise RuntimeError, "Expected to find a public key, but couldn't in #{keys}"
end
File.read(File.expand_path(key_file + '.pub'))
end
# Generate a key prefix for key pair names
#
# @note This is the part of the key that will stay static between Beaker
# runs on the same host.
#
# @return [String] Beaker key pair name based on sanitized hostname
def key_name_prefix
safe_hostname = Socket.gethostname.gsub('.', '-')
"Beaker-#{local_user}-#{safe_hostname}"
end
# Generate a reusable key name from the local hosts hostname
#
# @return [String] safe key name for current host
# @api private
def key_name
"#{key_name_prefix}-#{@options[:aws_keyname_modifier]}-#{@options[:timestamp].strftime("%F_%H_%M_%S_%N")}"
end
# Returns the local user running this tool
#
# @return [String] username of local user
# @api private
def local_user
ENV['USER']
end
# Creates the KeyPair for this test run
#
# @param region [Aws::EC2::Region] region to create the key pair in
# @return [Aws::EC2::KeyPair] created key_pair
# @api private
def ensure_key_pair(region)
pair_name = key_name()
delete_key_pair(region, pair_name)
create_new_key_pair(region, pair_name)
end
# Deletes key pairs from all regions
#
# @param [String] keypair_name_filter if given, will get all keypairs that match
# a simple {::String#start_with?} filter. If no filter is given, the basic key
# name returned by {#key_name} will be used.
#
# @return nil
# @api private
def delete_key_pair_all_regions(keypair_name_filter=nil)
region_keypairs_hash = my_key_pairs(keypair_name_filter)
region_keypairs_hash.each_pair do |region, keypair_name_array|
keypair_name_array.each do |keypair_name|
delete_key_pair(region, keypair_name)
end
end
end
# Gets the Beaker user's keypairs by region
#
# @param [String] name_filter if given, will get all keypairs that match
# a simple {::String#start_with?} filter. If no filter is given, the basic key
# name returned by {#key_name} will be used.
#
# @return [Hash{String=>Array[String]}] a hash of region name to
# an array of the keypair names that match for the filter
# @api private
def my_key_pairs(name_filter=nil)
keypairs_by_region = {}
key_name_filter = name_filter ? "#{name_filter}-*" : key_name
regions.each do |region|
keypairs_by_region[region] = client(region).describe_key_pairs(
:filters => [{ :name => 'key-name', :values => [key_name_filter] }]
).key_pairs.map(&:key_name)
end
keypairs_by_region
end
# Deletes a given key pair
#
# @param [Aws::EC2::Region] region the region the key belongs to
# @param [String] pair_name the name of the key to be deleted
#
# @api private
def delete_key_pair(region, pair_name)
kp = client(region).describe_key_pairs(:key_names => [pair_name]).key_pairs.first
unless kp.nil?
@logger.debug("aws-sdk: delete key pair in region: #{region}")
client(region).delete_key_pair(:key_name => pair_name)
end
rescue Aws::EC2::Errors::InvalidKeyPairNotFound
nil
end
# Create a new key pair for a given Beaker run
#
# @param [Aws::EC2::Region] region the region the key pair will be imported into
# @param [String] pair_name the name of the key to be created
#
# @return [Aws::EC2::KeyPair] key pair created
# @raise [RuntimeError] raised if AWS keypair not created
def create_new_key_pair(region, pair_name)
@logger.debug("aws-sdk: importing new key pair: #{pair_name}")
client(region).import_key_pair(:key_name => pair_name, :public_key_material => public_key)
begin
client(region).wait_until(:key_pair_exists, { :key_names => [pair_name] }, :max_attempts => 5, :delay => 2)
rescue Aws::Waiters::Errors::WaiterFailed
raise RuntimeError, "AWS key pair #{pair_name} can not be queried, even after import"
end
end
# Return a reproducable security group identifier based on input ports
#
# @param ports [Array<Number>] array of port numbers
# @return [String] group identifier
# @api private
def group_id(ports)
if ports.nil? or ports.empty?
raise ArgumentError, "Ports list cannot be nil or empty"
end
unless ports.is_a? Set
ports = Set.new(ports)
end
# Lolwut, #hash is inconsistent between ruby processes
"Beaker-#{Zlib.crc32(ports.inspect)}"
end
# Return an existing group, or create new one
#
# Accepts a VPC as input for checking & creation.
#
# @param vpc [Aws::EC2::VPC] the AWS vpc control object
# @param sg_cidr_ips [Array<String>] CIDRs used for outbound security group rule
# @return [Aws::EC2::SecurityGroup] created security group
# @api private
def ensure_ping_group(vpc, sg_cidr_ips = ['0.0.0.0/0'])
@logger.notify("aws-sdk: Ensure security group exists that enables ping, create if not")
group = client.describe_security_groups(
:filters => [
{ :name => 'group-name', :values => [PING_SECURITY_GROUP_NAME] },
{ :name => 'vpc-id', :values => [vpc.vpc_id] },
]
).security_groups.first
if group.nil?
group = create_ping_group(vpc, sg_cidr_ips)
end
group
end
# Return an existing group, or create new one
#
# Accepts a VPC as input for checking & creation.
#
# @param vpc [Aws::EC2::VPC] the AWS vpc control object
# @param ports [Array<Number>] an array of port numbers
# @param sg_cidr_ips [Array<String>] CIDRs used for outbound security group rule
# @return [Aws::EC2::SecurityGroup] created security group
# @api private
# Create a new ping enabled security group
#
# Accepts a region or VPC for group creation.
#
# @param region_or_vpc [Aws::EC2::Region, Aws::EC2::VPC] the AWS region or vpc control object
# @param sg_cidr_ips [Array<String>] CIDRs used for outbound security group rule
# @return [Aws::EC2::SecurityGroup] created security group
# @api private
def create_ping_group(region_or_vpc, sg_cidr_ips = ['0.0.0.0/0'])
@logger.notify("aws-sdk: Creating group #{PING_SECURITY_GROUP_NAME}")
cl = region_or_vpc.is_a?(String) ? client(region_or_vpc) : client
params = {
:description => 'Custom Beaker security group to enable ping',
:group_name => PING_SECURITY_GROUP_NAME,
}
params[:vpc_id] = region_or_vpc.vpc_id if region_or_vpc.is_a?(Aws::EC2::Types::Vpc)
group = cl.create_security_group(params)
sg_cidr_ips.each do |cidr_ip|
add_ingress_rule(
cl,
group,
cidr_ip,
'8', # 8 == ICMPv4 ECHO request
'-1', # -1 == All ICMP codes
'icmp',
)
end
group
end
# Create a new security group
#
# Accepts a region or VPC for group creation.
#
# @param region_or_vpc [Aws::EC2::Region, Aws::EC2::VPC] the AWS region or vpc control object
# @param ports [Array<Number>] an array of port numbers
# @param sg_cidr_ips [Array<String>] CIDRs used for outbound security group rule
# @return [Aws::EC2::SecurityGroup] created security group
# @api private
def create_group(region_or_vpc, ports, sg_cidr_ips = ['0.0.0.0/0'])
name = group_id(ports)
@logger.notify("aws-sdk: Creating group #{name} for ports #{ports.to_s}")
@logger.notify("aws-sdk: Creating group #{name} with CIDR IPs #{sg_cidr_ips.to_s}")
cl = region_or_vpc.is_a?(String) ? client(region_or_vpc) : client
params = {
:description => "Custom Beaker security group for #{ports.to_a}",
:group_name => name,
}
params[:vpc_id] = region_or_vpc.vpc_id if region_or_vpc.is_a?(Aws::EC2::Types::Vpc)
group = cl.create_security_group(params)
unless ports.is_a? Set
ports = Set.new(ports)
end
sg_cidr_ips.each do |cidr_ip|
ports.each do |port|
add_ingress_rule(cl, group, cidr_ip, port, port)
end
end
group
end
# Authorizes connections from certain CIDR to a range of ports
#
# @param cl [Aws::EC2::Client]
# @param sg_group [Aws::EC2::SecurityGroup] the AWS security group
# @param cidr_ip [String] CIDR used for outbound security group rule
# @param from_port [String] Starting Port number in the range
# @param to_port [String] Ending Port number in the range
# @return [void]
# @api private
def add_ingress_rule(cl, sg_group, cidr_ip, from_port, to_port, protocol = 'tcp')
cl.authorize_security_group_ingress(
:cidr_ip => cidr_ip,
:ip_protocol => protocol,
:from_port => from_port,
:to_port => to_port,
:group_id => sg_group.group_id,
)
end
# Return a hash containing AWS credentials
#
# @return [Hash<Symbol, String>] AWS credentials
# @api private
def load_credentials
return load_env_credentials if load_env_credentials.set?
load_fog_credentials(@options[:dot_fog])
end
# Return AWS credentials loaded from environment variables
#
# @param prefix [String] environment variable prefix
# @return [Aws::Credentials] ec2 credentials
# @api private
def load_env_credentials(prefix='AWS')
Aws::Credentials.new(
ENV["#{prefix}_ACCESS_KEY_ID"],
ENV["#{prefix}_SECRET_ACCESS_KEY"],
ENV["#{prefix}_SESSION_TOKEN"]
)
end
# Return a hash containing the fog credentials for EC2
#
# @param dot_fog [String] dot fog path
# @return [Aws::Credentials] ec2 credentials
# @api private
def load_fog_credentials(dot_fog = '.fog')
default = get_fog_credentials(dot_fog)
raise "You must specify an aws_access_key_id in your .fog file (#{dot_fog}) for ec2 instances!" unless default[:aws_access_key_id]
raise "You must specify an aws_secret_access_key in your .fog file (#{dot_fog}) for ec2 instances!" unless default[:aws_secret_access_key]
Aws::Credentials.new(
default[:aws_access_key_id],
default[:aws_secret_access_key],
default[:aws_session_token]
)
end
# Adds port 8143 to host[:additional_ports]
# if master, database and dashboard are not on same instance
def test_split_install
@hosts.each do |host|
mono_roles = ['master', 'database', 'dashboard']
roles_intersection = host[:roles] & mono_roles
if roles_intersection.size != 3 && roles_intersection.any?
host[:additional_ports] ? host[:additional_ports].push(8143) : host[:additional_ports] = [8143]
end
end
end
end
|
mongodb/mongo-ruby-driver | lib/mongo/server.rb | Mongo.Server.handle_auth_failure! | ruby | def handle_auth_failure!
yield
rescue Mongo::Error::SocketTimeoutError
# possibly cluster is slow, do not give up on it
raise
rescue Mongo::Error::SocketError
# non-timeout network error
unknown!
pool.disconnect!
raise
rescue Auth::Unauthorized
# auth error, keep server description and topology as they are
pool.disconnect!
raise
end | Handle authentication failure.
@example Handle possible authentication failure.
server.handle_auth_failure! do
Auth.get(user).login(self)
end
@raise [ Auth::Unauthorized ] If the authentication failed.
@return [ Object ] The result of the block execution.
@since 2.3.0 | train | https://github.com/mongodb/mongo-ruby-driver/blob/dca26d0870cb3386fad9ccc1d17228097c1fe1c8/lib/mongo/server.rb#L367-L381 | class Server
extend Forwardable
include Monitoring::Publishable
include Event::Publisher
# The default time in seconds to timeout a connection attempt.
#
# @since 2.4.3
CONNECT_TIMEOUT = 10.freeze
# Instantiate a new server object. Will start the background refresh and
# subscribe to the appropriate events.
#
# @api private
#
# @example Initialize the server.
# Mongo::Server.new('127.0.0.1:27017', cluster, monitoring, listeners)
#
# @note Server must never be directly instantiated outside of a Cluster.
#
# @param [ Address ] address The host:port address to connect to.
# @param [ Cluster ] cluster The cluster the server belongs to.
# @param [ Monitoring ] monitoring The monitoring.
# @param [ Event::Listeners ] event_listeners The event listeners.
# @param [ Hash ] options The server options.
#
# @option options [ Boolean ] :monitor For internal driver use only:
# whether to monitor the server after instantiating it.
# @option options [ true, false ] :monitoring_io For internal driver
# use only. Set to false to prevent SDAM-related I/O from being
# done by this server. Note: setting this option to false will make
# the server non-functional. It is intended for use in tests which
# manually invoke SDAM state transitions.
#
# @since 2.0.0
def initialize(address, cluster, monitoring, event_listeners, options = {})
@address = address
@cluster = cluster
@monitoring = monitoring
options = options.dup
monitor = options.delete(:monitor)
@options = options.freeze
@event_listeners = event_listeners
@connection_id_gen = Class.new do
include Id
end
@monitor = Monitor.new(address, event_listeners, monitoring,
options.merge(app_metadata: Monitor::AppMetadata.new(cluster.options)))
unless monitor == false
start_monitoring
end
@connected = true
@pool_lock = Mutex.new
end
# @return [ String ] The configured address for the server.
attr_reader :address
# @return [ Cluster ] cluster The server cluster.
attr_reader :cluster
# @return [ Monitor ] monitor The server monitor.
attr_reader :monitor
# @return [ Hash ] The options hash.
attr_reader :options
# @return [ Monitoring ] monitoring The monitoring.
attr_reader :monitoring
# Get the description from the monitor and scan on monitor.
def_delegators :monitor,
:description,
:scan!,
:heartbeat_frequency,
:last_scan,
:compressor
alias :heartbeat_frequency_seconds :heartbeat_frequency
# Delegate convenience methods to the monitor description.
def_delegators :description,
:arbiter?,
:features,
:ghost?,
:max_wire_version,
:max_write_batch_size,
:max_bson_object_size,
:max_message_size,
:tags,
:average_round_trip_time,
:mongos?,
:other?,
:primary?,
:replica_set_name,
:secondary?,
:standalone?,
:unknown?,
:last_write_date,
:logical_session_timeout
# Get the app metadata from the cluster.
def_delegators :cluster,
:app_metadata,
:cluster_time,
:update_cluster_time
def_delegators :features,
:check_driver_support!
# Is this server equal to another?
#
# @example Is the server equal to the other?
# server == other
#
# @param [ Object ] other The object to compare to.
#
# @return [ true, false ] If the servers are equal.
#
# @since 2.0.0
def ==(other)
return false unless other.is_a?(Server)
address == other.address
end
# Get a new context for this server in which to send messages.
#
# @example Get the server context.
# server.context
#
# @return [ Mongo::Server::Context ] context The server context.
#
# @since 2.0.0
#
# @deprecated Will be removed in version 3.0
def context
Context.new(self)
end
# Determine if a connection to the server is able to be established and
# messages can be sent to it.
#
# @example Is the server connectable?
# server.connectable?
#
# @return [ true, false ] If the server is connectable.
#
# @since 2.1.0
#
# @deprecated No longer necessary with Server Selection specification.
def connectable?; end
# Disconnect the server from the connection.
#
# @example Disconnect the server.
# server.disconnect!
#
# @param [ Boolean ] wait Whether to wait for background threads to
# finish running.
#
# @return [ true ] Always true with no exception.
#
# @since 2.0.0
def disconnect!(wait=false)
begin
# For backwards compatibility we disconnect/clear the pool rather
# than close it here.
pool.disconnect!
rescue Error::PoolClosedError
# If the pool was already closed, we don't need to do anything here.
end
monitor.stop!(wait)
@connected = false
true
end
# Whether the server is connected.
#
# @return [ true|false ] Whether the server is connected.
#
# @api private
# @since 2.7.0
def connected?
@connected
end
# When the server is flagged for garbage collection, stop the monitor
# thread.
#
# @example Finalize the object.
# Server.finalize(monitor)
#
# @param [ Server::Monitor ] monitor The server monitor.
#
# @since 2.2.0
def self.finalize(monitor)
proc { monitor.stop! }
end
# Start monitoring the server.
#
# Used internally by the driver to add a server to a cluster
# while delaying monitoring until the server is in the cluster.
#
# @api private
def start_monitoring
publish_sdam_event(
Monitoring::SERVER_OPENING,
Monitoring::Event::ServerOpening.new(address, cluster.topology)
)
if options[:monitoring_io] != false
monitor.run!
ObjectSpace.define_finalizer(self, self.class.finalize(monitor))
end
end
# Get a pretty printed server inspection.
#
# @example Get the server inspection.
# server.inspect
#
# @return [ String ] The nice inspection string.
#
# @since 2.0.0
def inspect
"#<Mongo::Server:0x#{object_id} address=#{address.host}:#{address.port}>"
end
# @note This method is experimental and subject to change.
#
# @api experimental
# @since 2.7.0
def summary
status = case
when primary?
'PRIMARY'
when secondary?
'SECONDARY'
when standalone?
'STANDALONE'
when arbiter?
'ARBITER'
when ghost?
'GHOST'
when other?
'OTHER'
when unknown?
'UNKNOWN'
else
# Since the summary method is often used for debugging, do not raise
# an exception in case none of the expected types matched
''
end
if replica_set_name
status += " replica_set=#{replica_set_name}"
end
address_bit = if address
"#{address.host}:#{address.port}"
else
'nil'
end
"#<Server address=#{address_bit} #{status}>"
end
# Get the connection pool for this server.
#
# @example Get the connection pool for the server.
# server.pool
#
# @return [ Mongo::Server::ConnectionPool ] The connection pool.
#
# @since 2.0.0
def pool
@pool_lock.synchronize do
@pool ||= ConnectionPool.new(self, options)
end
end
# Determine if the provided tags are a subset of the server's tags.
#
# @example Are the provided tags a subset of the server's tags.
# server.matches_tag_set?({ 'rack' => 'a', 'dc' => 'nyc' })
#
# @param [ Hash ] tag_set The tag set to compare to the server's tags.
#
# @return [ true, false ] If the provided tags are a subset of the server's tags.
#
# @since 2.0.0
def matches_tag_set?(tag_set)
tag_set.keys.all? do |k|
tags[k] && tags[k] == tag_set[k]
end
end
# Restart the server monitor.
#
# @example Restart the server monitor.
# server.reconnect!
#
# @return [ true ] Always true.
#
# @since 2.1.0
def reconnect!
if options[:monitoring_io] != false
monitor.restart!
end
@connected = true
end
# Execute a block of code with a connection, that is checked out of the
# server's pool and then checked back in.
#
# @example Send a message with the connection.
# server.with_connection do |connection|
# connection.dispatch([ command ])
# end
#
# @return [ Object ] The result of the block execution.
#
# @since 2.3.0
def with_connection(&block)
pool.with_connection(&block)
end
# Handle handshake failure.
#
# @since 2.7.0
# @api private
def handle_handshake_failure!
yield
rescue Mongo::Error::SocketError, Mongo::Error::SocketTimeoutError
unknown!
raise
end
# Handle authentication failure.
#
# @example Handle possible authentication failure.
# server.handle_auth_failure! do
# Auth.get(user).login(self)
# end
#
# @raise [ Auth::Unauthorized ] If the authentication failed.
#
# @return [ Object ] The result of the block execution.
#
# @since 2.3.0
# Whether the server supports modern read retries.
#
# @api private
def retry_reads?
!!(features.sessions_enabled? && logical_session_timeout)
end
# Will writes sent to this server be retried.
#
# @example Will writes be retried.
# server.retry_writes?
#
# @return [ true, false ] If writes will be retried.
#
# @note Retryable writes are only available on server versions 3.6+ and with
# sharded clusters or replica sets.
#
# @since 2.5.0
def retry_writes?
!!(features.sessions_enabled? && logical_session_timeout && !standalone?)
end
# Marks server unknown and publishes the associated SDAM event
# (server description changed).
#
# @since 2.4.0, SDAM events are sent as of version 2.7.0
def unknown!
# Just dispatch the description changed event here, SDAM flow
# will update description on the server without in-place mutations
# and invoke SDAM transitions as needed.
publish(Event::DESCRIPTION_CHANGED, description, Description.new(address))
end
# @api private
def update_description(description)
monitor.instance_variable_set('@description', description)
end
# @api private
def next_connection_id
@connection_id_gen.next_id
end
end
|
kontena/kontena | cli/lib/kontena/cli/master/login_command.rb | Kontena::Cli::Master.LoginCommand.authentication_path | ruby | def authentication_path(local_port: nil, invite_code: nil, expires_in: nil, remote: false)
auth_url_params = {}
if remote
auth_url_params[:redirect_uri] = "/code"
elsif local_port
auth_url_params[:redirect_uri] = "http://localhost:#{local_port}/cb"
else
raise ArgumentError, "Local port not defined and not performing remote login"
end
auth_url_params[:invite_code] = invite_code if invite_code
auth_url_params[:expires_in] = expires_in if expires_in
"/authenticate?#{URI.encode_www_form(auth_url_params)}"
end | Build a path for master authentication
@param local_port [Fixnum] tcp port where localhost webserver is listening
@param invite_code [String] an invitation code generated when user was invited
@param expires_in [Fixnum] expiration time for the requested access token
@param remote [Boolean] true when performing a login where the code is displayed on the web page
@return [String] | train | https://github.com/kontena/kontena/blob/5cb5b4457895985231ac88e78c8cbc5a8ffb5ec7/cli/lib/kontena/cli/master/login_command.rb#L126-L138 | class LoginCommand < Kontena::Command
include Kontena::Cli::Common
parameter "[URL]", "Kontena Master URL or name"
option ['-j', '--join'], '[INVITE_CODE]', "Join master using an invitation code"
option ['-t', '--token'], '[TOKEN]', 'Use a pre-generated access token', environment_variable: 'KONTENA_TOKEN'
option ['-n', '--name'], '[NAME]', 'Set server name', environment_variable: 'KONTENA_MASTER'
option ['-c', '--code'], '[CODE]', 'Use authorization code generated during master install'
option ['-r', '--[no-]remote'], :flag, 'Login using a browser on another device', default: Kontena.browserless?
option ['-e', '--expires-in'], '[SECONDS]', 'Request token with expiration of X seconds. Use 0 to never expire', default: 7200
option ['-v', '--verbose'], :flag, 'Increase output verbosity'
option ['-f', '--force'], :flag, 'Force reauthentication'
option ['-s', '--silent'], :flag, 'Reduce output verbosity'
option ['--grid'], '[GRID]', 'Set grid'
option ['--no-login-info'], :flag, "Don't show login info", hidden: true
def execute
if self.code
exit_with_error "Can't use --token and --code together" if self.token
exit_with_error "Can't use --join and --code together" if self.join
end
if self.force?
exit_with_error "Can't use --code and --force together" if self.code
exit_with_error "Can't use --token and --force together" if self.token
end
server = select_a_server(self.name, self.url)
if self.token
# If a --token was given create a token with access_token set to --token value
server.token = Kontena::Cli::Config::Token.new(access_token: self.token, parent_type: :master, parent_name: server.name)
elsif server.token.nil? || self.force?
# Force reauth or no existing token, create a token with no access_token
server.token = Kontena::Cli::Config::Token.new(parent_type: :master, parent_name: server.name)
end
if self.grid
self.skip_grid_auto_select = true if self.respond_to?(:skip_grid_auto_select?)
server.grid = self.grid
end
# set server token by exchanging code if --code given
if self.code
use_authorization_code(server, self.code)
exit 0
end
# unless an invitation code was supplied, check auth and exit
# if existing auth works already.
unless self.join || self.force?
if auth_works?(server)
update_server_to_config(server)
display_login_info(only: :master) unless self.no_login_info?
exit 0
end
end
auth_params = {
remote: self.remote?,
invite_code: self.join,
expires_in: self.expires_in
}
if self.remote?
# no local browser? tell user to launch an external one
display_remote_message(server, auth_params)
auth_code = prompt.ask("Enter code displayed in browser:")
use_authorization_code(server, auth_code)
else
# local web flow
web_flow(server, auth_params)
end
display_login_info(only: :master) unless (running_silent? || self.no_login_info?)
end
def next_default_name
next_name('kontena-master')
end
def next_name(base)
if config.find_server(base)
new_name = base.dup
unless new_name =~ /\-\d+$/
new_name += "-2"
end
new_name.succ! until config.find_server(new_name).nil?
new_name
else
base
end
end
def master_account
@master_account ||= config.find_account('master')
end
def use_authorization_code(server, code)
response = vspinner "Exchanging authorization code for an access token from Kontena Master" do
Kontena::Client.new(server.url, server.token).exchange_code(code)
end
update_server(server, response)
update_server_to_config(server)
end
# Check if the existing (or --token) authentication works without reauthenticating
def auth_works?(server)
return false unless (server && server.token && server.token.access_token)
vspinner "Testing if authentication works using current access token" do
Kontena::Client.new(server.url, server.token).authentication_ok?(master_account.userinfo_endpoint)
end
end
# Build a path for master authentication
#
# @param local_port [Fixnum] tcp port where localhost webserver is listening
# @param invite_code [String] an invitation code generated when user was invited
# @param expires_in [Fixnum] expiration time for the requested access token
# @param remote [Boolean] true when performing a login where the code is displayed on the web page
# @return [String]
# Request a redirect to the authentication url from master
#
# @param master_url [String] master root url
# @param auth_params [Hash] auth parameters (keyword arguments of #authentication_path)
# @return [String] url to begin authentication web flow
def authentication_url_from_master(master_url, auth_params)
client = Kontena::Client.new(master_url)
vspinner "Sending authentication request to receive an authorization URL" do
response = client.request(
http_method: :get,
path: authentication_path(auth_params),
expects: [501, 400, 302, 403],
auth: false
)
if client.last_response.status == 302
client.last_response.headers['Location']
elsif response.kind_of?(Hash)
exit_with_error [response['error'], response['error_description']].compact.join(' : ')
elsif response.kind_of?(String) && response.length > 1
exit_with_error response
else
exit_with_error "Invalid response to authentication request : HTTP#{client.last_response.status} #{client.last_response.body if debug?}"
end
end
end
def display_remote_message(server, auth_params)
url = authentication_url_from_master(server.url, auth_params.merge(remote: true))
if running_silent?
sputs url
else
puts "Visit this URL in a browser:"
puts "#{url}"
end
end
def web_flow(server, auth_params)
require_relative '../localhost_web_server'
require 'kontena/cli/browser_launcher'
web_server = Kontena::LocalhostWebServer.new
url = authentication_url_from_master(server.url, auth_params.merge(local_port: web_server.port))
uri = URI.parse(url)
puts "Opening a browser to #{uri.scheme}://#{uri.host}"
puts
puts "If you are running this command over an ssh connection or it's"
puts "otherwise not possible to open a browser from this terminal"
puts "then you must use the --remote flag or use a pregenerated"
puts "access token using the --token option."
puts
puts "Once the authentication is complete you can close the browser"
puts "window or tab and return to this window to continue."
puts
any_key_to_continue(10)
puts "If the browser does not open, try visiting this URL manually:"
puts "#{uri.to_s}"
puts
server_thread = Thread.new { Thread.main['response'] = web_server.serve_one }
Kontena::Cli::BrowserLauncher.open(uri.to_s)
spinner "Waiting for browser authorization response" do
server_thread.join
end
update_server(server, Thread.main['response'])
update_server_to_config(server)
end
def update_server(server, response)
update_server_token(server, response)
update_server_name(server, response)
update_server_username(server, response)
end
def update_server_name(server, response)
return nil unless server.name.nil?
if response.kind_of?(Hash) && response['server'] && response['server']['name']
server.name = next_name(response['server']['name'])
else
server.name = next_default_name
end
end
def update_server_username(server, response)
return nil unless response.kind_of?(Hash)
return nil unless response['user']
server.token.username = response['user']['name'] || response['user']['email']
server.username = server.token.username
end
def update_server_token(server, response)
if !response.kind_of?(Hash)
raise TypeError, "Response type mismatch - expected Hash, got #{response.class}"
elsif response['code']
use_authorization_code(server, response['code'])
elsif response['error']
exit_with_error "Authentication failed: #{response['error']} #{response['error_description']}"
else
server.token = Kontena::Cli::Config::Token.new
server.token.access_token = response['access_token']
server.token.refresh_token = response['refresh_token']
server.token.expires_at = response['expires_at']
end
end
def update_server_to_config(server)
server.name ||= next_default_name
config.servers << server unless config.servers.include?(server)
config.current_master = server.name
config.write
config.reset_instance
end
# Figure out or create a server based on url or name.
#
# No name or url provided: try to use current_master
# A name provided with --name but no url defined: try to find a server by name from config
# An URL starting with 'http' provided: try to find a server by url from config
# An URL not starting with 'http' provided: try to find a server by name
# An URL and a name provided
# - If a server is found by name: use entry and update URL to the provided url
# - Else create a new entry with the url and name
#
# @param name [String] master name
# @param url [String] master url or name
# @return [Kontena::Cli::Config::Server]
def select_a_server(name, url)
# no url, no name, try to use current master
if url.nil? && name.nil?
if config.current_master
return config.current_master
else
exit_with_error 'URL not specified and current master not selected'
end
end
if name && url
exact_match = config.find_server_by(url: url, name: name)
return exact_match if exact_match # found an exact match, going to use that one.
name_match = config.find_server(name)
if name_match
#found a server with the provided name, set the provided url to it and return
name_match.url = url
return name_match
else
# nothing found, create new.
return Kontena::Cli::Config::Server.new(name: name, url: url)
end
elsif name
# only --name provided, try to find a server with that name
name_match = config.find_server(name)
if name_match && name_match.url
return name_match
else
exit_with_error "Master #{name} was found from config, but it does not have an URL and no URL was provided on command line"
end
elsif url
# only url provided
if url =~ /^https?:\/\//
# url is actually an url
url_match = config.find_server_by(url: url)
if url_match
return url_match
else
return Kontena::Cli::Config::Server.new(url: url, name: nil)
end
else
name_match = config.find_server(url)
if name_match
unless name_match.url
exit_with_error "Master #{url} was found from config, but it does not have an URL and no URL was provided on command line"
end
return name_match
else
exit_with_error "Can't find a master with name #{name} from configuration"
end
end
end
end
end
|
oniram88/imdb-scan | lib/imdb/movie.rb | IMDB.Movie.title | ruby | def title
doc.at("//head/meta[@name='title']")["content"].split(/\(.*\)/)[0].strip! ||
doc.at("h1.header").children.first.text.strip
end | Get movie title
@return [String] | train | https://github.com/oniram88/imdb-scan/blob/e358adaba3db178df42c711c79c894f14d83c742/lib/imdb/movie.rb#L46-L50 | class Movie < IMDB::Skeleton
attr_accessor :link, :imdb_id
def initialize(id_of)
# !!!DON'T FORGET DEFINE NEW METHODS IN SUPER!!!
super("Movie", { :imdb_id => String,
:poster => String,
:title => String,
:release_date => String,
:cast => Array,
:photos => Array,
:director => String,
:director_person => Person,
:genres => Array,
:rating => Float,
:movielength => Integer,
:short_description => String,
:writers => Array }, [:imdb_id])
@imdb_id = id_of
@link = "http://www.imdb.com/title/tt#{@imdb_id}"
end
# Get movie poster address
# @return [String]
def poster
src = doc.at("#img_primary img")["src"] rescue nil
unless src.nil?
if src.match(/\._V1/)
return src.match(/(.*)\._V1.*(.jpg)/)[1, 2].join
else
return src
end
end
src
end
# Get movie title
# @return [String]
# Get movie cast listing
# @return [Cast[]]
def cast
doc.search("table.cast tr").map do |link|
#picture = link.children[0].search("img")[0]["src"] rescue nil
#name = link.children[1].content.strip rescue nil
id = link.children[1].search('a[@href^="/name/nm"]').first["href"].match(/\/name\/nm([0-9]+)/)[1] rescue nil
char = link.children[3].content.strip rescue nil
unless id.nil?
person = IMDB::Person.new(id)
IMDB::Cast.new(self, person, char)
end
end.compact
end
# Get movie photos
# @return [Array]
def photos
begin
doc.search('#main .media_index_thumb_list img').map { |i| i["src"] }
rescue
nil
end
end
# Get release date
# @return [String]
def release_date
if (node =doc.css('.infobar span.nobr meta[itemprop="datePublished"]')).length > 0
date = node.first['content']
if date.match /^\d{4}$/
"#{date}-01-01"
else
Date.parse(date).to_s
end
else
year = doc.at("h1.header .nobr").text[/\d{4}/]
"#{year}-01-01"
end
rescue
nil
end
# Get Director
# @return [String]
def director
self.director_person.name rescue nil
end
# Get Director Person class
# @return [Person]
def director_person
begin
link=doc.xpath("//h4[contains(., 'Director')]/..").at('a[@href^="/name/nm"]')
profile = link['href'].match(/\/name\/nm([0-9]+)/)[1] rescue nil
IMDB::Person.new(profile) unless profile.nil?
rescue
nil
end
end
# Genre List
# @return [Array]
def genres
doc.xpath("//h4[contains(., 'Genre')]/..").search("a").map { |g|
g.content.strip unless g.content =~ /See more/
}.compact
rescue
nil
end
# Writer List
# @return [Float]
def rating
@rating ||= doc.search(".star-box-giga-star").text.strip.to_f
rescue
nil
end
#Get the movielength of the movie in minutes
# @return [Integer]
def movielength
doc.at("//h4[text()='Runtime:']/..").inner_html[/\d+ min/].to_i rescue nil
end
# Writer List
# @return [Array]
def writers
doc.css("h4:contains('Writing')").first.next_element.css('a[@href^="/name/nm"]').map { |w|
profile = w['href'].match(/\/name\/nm([0-9]+)/)[1] rescue nil
IMDB::Person.new(profile) unless profile.nil?
}
end
# @return [String]
def short_description
doc.at("#overview-top p[itemprop=description]").try(:text).try(:strip)
end
private
def doc
if caller[0] =~ /`([^']*)'/ and ($1 == "cast" or $1 == "writers")
@doc_full ||= Nokogiri::HTML(open("#{@link}/fullcredits"))
elsif caller[0] =~ /`([^']*)'/ and ($1 == "photos")
@doc_photo ||= Nokogiri::HTML(open("#{@link}/mediaindex"))
else
@doc ||= Nokogiri::HTML(open("#{@link}"))
end
end
end # Movie
|
medcat/brandish | lib/brandish/application.rb | Brandish.Application.directory_global_option | ruby | def directory_global_option
@directory = Pathname.new(Dir.pwd)
global_option("--directory PATH") do |path|
@directory = Pathname.new(path).expand_path(Dir.pwd)
end
end | Defines the directory global option. This sets {#directory} to its
default value, and defines an option that can set it.
@return [void] | train | https://github.com/medcat/brandish/blob/c63f91dbb356aa0958351ad9bcbdab0c57e7f649/lib/brandish/application.rb#L90-L95 | class Application
include Commander::Methods
# The name of the configure file. This should be `"brandish.config.rb"`,
# but may change in the future (maybe `Brandishfile`?).
#
# @return [::String]
attr_reader :config_file
# The executing directory for the application. This is provided with the
# global option `-d`. Using this should be the same as using `cd` and
# executing the command.
#
# @return [::String]
attr_reader :directory
# Defines and runs the command line interface.
#
# @see #call
# @return [void]
def self.call
new.call
end
# Defines and runs the command line interface.
#
# @see #program_information
# @see #configure_global_option
# @see #directory_global_option
# @see InitializeCommand.define
# @see BenchCommand.define
# @see BuildCommand.define
# @see ServeCommand.define
# @return [void]
def call
program_information
configure_global_option
directory_global_option
command(:initialize) { |c| InitializeCommand.define(self, c) }
command(:bench) { |c| BenchCommand.define(self, c) }
command(:build) { |c| BuildCommand.define(self, c) }
command(:serve) { |c| ServeCommand.define(self, c) }
alias_command(:init, :initialize)
default_command(:build)
run!
end
# The program information. This is for use with Commander.
#
# @return [void]
def program_information
program :name, "Brandish"
program :version, Brandish::VERSION
program :help_formatter, :compact
program :help_paging, false
program :description, "A multi-format document generator."
program :help, "Author", "Jeremy Rodi <jeremy.rodi@medcat.me>"
program :help, "License", "MIT License Copyright (c) 2017 Jeremy Rodi"
end
# Defines the config global option. This sets {#config_file} to its
# default file, and defines an option that can set it.
#
# @return [void]
def configure_global_option
@config_file = "brandish.config.rb"
global_option("--config FILE") { |f| @config_file = f }
end
# Defines the directory global option. This sets {#directory} to its
# default value, and defines an option that can set it.
#
# @return [void]
# Options that are passed to the `progress` method provided by Commander.
# This makes it look "nice."
#
# @return [{::Symbol => ::String}]
PROGRESS_OPTIONS = {
title: " Building...", progress_str: "#", incomplete_str: " ",
format: ":title <:progress_bar> :percent_complete%",
complete_message: " Build complete!"
}.freeze
# The width of all of the set text items in the progress bar. This is
# used to dynamically determine the with of the progress bar later on.
#
# @return [::Numeric]
PROGRESS_WIDTH = " Building... < > 000% ".length
# Creates a progress bar on the terminal based off of the given array.
# This mostly passes everything on to the `progress` method provided by
# Commander, but with a few options added.
#
# @param array [::Array] The array of items that are being processed.
# @yield [item] Once for every item in the array. Once the block ends,
# the progress bar increments.
# @yieldparam item [::Object] One of the items in the array.
# @return [void]
def progress(array, &block)
# rubocop:disable Style/GlobalVars
width = $terminal.terminal_size[0] - PROGRESS_WIDTH
# rubocop:enable Style/GlobalVars
options = PROGRESS_OPTIONS.merge(width: width)
super(array, options, &block)
end
# If the configuration isn't already loaded, load it; otherwise, just
# return the already loaded version of the configuration file.
#
# @return [Configure]
def load_configuration_file
Brandish.configuration || load_configuration_file!
end
# Forces the configuration file to be loaded, even if it already was;
# this first resets the configuration using {Brandish.reset_configuration},
# then it loads the {#config_file_path}. If no configuration was provided,
# it fails; if it didn't load properly, it fails.
#
# @raise [RuntimeError] If no configuration was provided, or if it didn't
# load properly.
# @return [Configure]
def load_configuration_file!
Brandish.reset_configuration
load load_paths.find(@config_file).to_s
fail "No configuration provided" unless Brandish.configuration
Brandish.configuration
end
private
def load_paths
@_load_paths ||= Brandish::PathSet.new.tap { |s| s << @directory }
end
end
|
chaintope/bitcoinrb | lib/bitcoin/gcs_filter.rb | Bitcoin.GCSFilter.golomb_rice_encode | ruby | def golomb_rice_encode(bit_writer, p, x)
q = x >> p
while q > 0
nbits = q <= 64 ? q : 64
bit_writer.write(-1, nbits) # 18446744073709551615 is 2**64 - 1 = ~0ULL in cpp.
q -= nbits
end
bit_writer.write(0, 1)
bit_writer.write(x, p)
end | encode golomb rice | train | https://github.com/chaintope/bitcoinrb/blob/39396e4c9815214d6b0ab694fa8326978a7f5438/lib/bitcoin/gcs_filter.rb#L115-L124 | class GCSFilter
MAX_ELEMENTS_SIZE = 4294967296 # 2**32
attr_reader :p # Golomb-Rice coding parameter
attr_reader :m # Inverse false positive rate
attr_reader :n # Number of elements in the filter
attr_reader :key # SipHash key
attr_reader :encoded # encoded filter with hex format.
# initialize Filter object.
# @param [String] key the 128-bit key used to randomize the SipHash outputs.
# @param [Integer] p the bit parameter of the Golomb-Rice coding.
# @param [Integer] m which determines the false positive rate.
# @param [Array] elements the filter elements.
# @param [String] encoded_filter encoded filter with hex format.
# @return [Bitcoin::GCSFilter]
def initialize(key, p, m, elements: nil, encoded_filter: nil)
raise 'specify either elements or encoded_filter.' if elements.nil? && encoded_filter.nil?
raise 'p must be <= 32' if p > 32
@key = key
@p = p
@m = m
if elements
raise 'elements size must be < 2**32.' if elements.size >= MAX_ELEMENTS_SIZE
@n = elements.size
encoded = Bitcoin.pack_var_int(@n)
bit_writer = Bitcoin::BitStreamWriter.new
unless elements.empty?
last_value = 0
hashed_set = elements.map{|e| hash_to_range(e) }.sort
hashed_set.each do |v|
delta = v - last_value
golomb_rice_encode(bit_writer, p, delta)
last_value = v
end
end
bit_writer.flush
encoded << bit_writer.stream
@encoded = encoded.bth
else
@encoded = encoded_filter
@n, payload = Bitcoin.unpack_var_int(encoded_filter.htb)
end
end
# Range of element hashes, F = N * M
def f
n * m
end
# Hash a data element to an integer in the range [0, F).
# @param [String] element with binary format.
# @return [Integer]
def hash_to_range(element)
hash = SipHash.digest(key, element)
map_into_range(hash, f)
end
# Checks if the element may be in the set. False positives are possible with probability 1/M.
# @param [String] element with binary format
# @return [Boolean] whether element in set.
def match?(element)
query = hash_to_range(element)
match_internal?([query], 1)
end
# Checks if any of the given elements may be in the set. False positives are possible with probability 1/M per element checked.
# This is more efficient that checking Match on multiple elements separately.
# @param [Array] elements list of elements with binary format.
# @return [Boolean] whether element in set.
def match_any?(elements)
queries = elements.map{|e| hash_to_range(e) }.sort
match_internal?(queries, queries.size)
end
private
# hash are then mapped uniformly over the desired range by multiplying with F and taking the top 64 bits of the 128-bit result.
# https://lemire.me/blog/2016/06/27/a-fast-alternative-to-the-modulo-reduction/
# https://stackoverflow.com/a/26855440
def map_into_range(x, y)
(x * y) >> 64
end
# Checks if the elements may be in the set.
# @param [Array[Integer]] hashes the query hash list.
# @param [Integer] size query size.
# @return [Boolean] whether elements in set.
def match_internal?(hashes, size)
n, payload = Bitcoin.unpack_var_int(encoded.htb)
bit_reader = Bitcoin::BitStreamReader.new(payload)
value = 0
hashes_index = 0
n.times do
delta = golomb_rice_decode(bit_reader, p)
value += delta
loop do
return false if hashes_index == size
return true if hashes[hashes_index] == value
break if hashes[hashes_index] > value
hashes_index += 1
end
end
false
end
# encode golomb rice
# decode golomb rice
def golomb_rice_decode(bit_reader, p)
q = 0
while bit_reader.read(1) == 1
q +=1
end
r = bit_reader.read(p)
(q << p) + r
end
end
|
litaio/lita | lib/lita/user.rb | Lita.User.save | ruby | def save
mention_name = metadata[:mention_name] || metadata["mention_name"]
current_keys = metadata.keys
redis_keys = redis.hkeys("id:#{id}")
delete_keys = (redis_keys - current_keys)
redis.pipelined do
redis.hdel("id:#{id}", *delete_keys) if delete_keys.any?
redis.hmset("id:#{id}", *metadata.to_a.flatten)
redis.set("name:#{name}", id)
redis.set("mention_name:#{mention_name}", id) if mention_name
end
end | Saves the user record to Redis, overwriting any previous data for the
current ID and user name.
@return [void] | train | https://github.com/litaio/lita/blob/c1a1f85f791b74e40ee6a1e2d53f19b5f7cbe0ba/lib/lita/user.rb#L112-L125 | class User
class << self
# The +Redis::Namespace+ for user persistence.
# @return [Redis::Namespace] The Redis connection.
def redis
@redis ||= Redis::Namespace.new("users", redis: Lita.redis)
end
# Creates a new user with the given ID, or merges and saves supplied
# metadata to an existing user with the given ID.
# @param id [Integer, String] A unique identifier for the user.
# @param metadata [Hash] An optional hash of metadata about the user.
# @option metadata [String] name (id) The display name of the user.
# @return [User] The user.
def create(id, metadata = {})
existing_user = find_by_id(id)
metadata = Util.stringify_keys(metadata)
metadata = existing_user.metadata.merge(metadata) if existing_user
user = new(id, metadata)
user.save
user
end
# Finds a user by ID.
# @param id [Integer, String] The user's unique ID.
# @return [User, nil] The user or +nil+ if no such user is known.
def find_by_id(id)
metadata = redis.hgetall("id:#{id}")
new(id, metadata) if metadata.key?("name")
end
# Finds a user by mention name.
# @param mention_name [String] The user's mention name.
# @return [User, nil] The user or +nil+ if no such user is known.
# @since 3.0.0
def find_by_mention_name(mention_name)
id = redis.get("mention_name:#{mention_name}")
find_by_id(id) if id
end
# Finds a user by display name.
# @param name [String] The user's name.
# @return [User, nil] The user or +nil+ if no such user is known.
def find_by_name(name)
id = redis.get("name:#{name}")
find_by_id(id) if id
end
# Attempts to find a user with a name starting with the provided string.
# @param name [String] The first characters in the user's name.
# @return [User, nil] The user, or +nil+ if zero or greater than 1 matches were found.
# @since 3.0.0
def find_by_partial_name(name)
keys = redis.keys("name:#{name}*")
if keys.length == 1
id = redis.get(keys.first)
find_by_id(id)
end
end
# Finds a user by ID, mention name, name, or partial name.
# @param identifier [String] The user's ID, name, partial name, or mention name.
# @return [User, nil] The user or +nil+ if no users were found.
# @since 3.0.0
def fuzzy_find(identifier)
find_by_id(identifier) || find_by_mention_name(identifier) ||
find_by_name(identifier) || find_by_partial_name(identifier)
end
end
# The user's unique ID.
# @return [String] The user's ID.
attr_reader :id
# A hash of arbitrary metadata about the user.
# @return [Hash] The user's metadata.
attr_reader :metadata
# The user's name as displayed in the chat.
# @return [String] The user's name.
attr_reader :name
# @param id [Integer, String] The user's unique ID.
# @param metadata [Hash] Arbitrary user metadata.
# @option metadata [String] name (id) The user's display name.
def initialize(id, metadata = {})
@id = id.to_s
@metadata = Util.stringify_keys(metadata)
@name = @metadata["name"] || @id
ensure_name_metadata_set
end
# The name used to "mention" the user in a group chat.
# @return [String] The user's mention name.
# @since 3.1.0
def mention_name
metadata["mention_name"] || name
end
# Saves the user record to Redis, overwriting any previous data for the
# current ID and user name.
# @return [void]
# Compares the user against another user object to determine equality. Users
# are considered equal if they have the same ID and name.
# @param other (User) The user to compare against.
# @return [Boolean] True if users are equal, false otherwise.
def ==(other)
other.respond_to?(:id) && id == other.id && other.respond_to?(:name) && name == other.name
end
alias eql? ==
# Generates a +Fixnum+ hash value for this user object. Implemented to support equality.
# @return [Fixnum] The hash value.
# @see Object#hash
def hash
id.hash ^ name.hash
end
private
# Ensure the user's metadata contains their name, to ensure their Redis hash contains at least
# one value. It's not possible to store an empty hash key in Redis.
def ensure_name_metadata_set
username = metadata.delete("name")
metadata["name"] = username || id
end
# The Redis connection for user persistence.
def redis
self.class.redis
end
end
|
alexreisner/geocoder | lib/geocoder/stores/active_record.rb | Geocoder::Store.ActiveRecord.nearbys | ruby | def nearbys(radius = 20, options = {})
return nil unless geocoded?
options.merge!(:exclude => self) unless send(self.class.primary_key).nil?
self.class.near(self, radius, options)
end | Get nearby geocoded objects.
Takes the same options hash as the near class method (scope).
Returns nil if the object is not geocoded. | train | https://github.com/alexreisner/geocoder/blob/e087dc2759264ee6f307b926bb2de4ec2406859e/lib/geocoder/stores/active_record.rb#L287-L291 | module ActiveRecord
include Base
##
# Implementation of 'included' hook method.
#
def self.included(base)
base.extend ClassMethods
base.class_eval do
# scope: geocoded objects
scope :geocoded, lambda {
where("#{table_name}.#{geocoder_options[:latitude]} IS NOT NULL " +
"AND #{table_name}.#{geocoder_options[:longitude]} IS NOT NULL")
}
# scope: not-geocoded objects
scope :not_geocoded, lambda {
where("#{table_name}.#{geocoder_options[:latitude]} IS NULL " +
"OR #{table_name}.#{geocoder_options[:longitude]} IS NULL")
}
# scope: not-reverse geocoded objects
scope :not_reverse_geocoded, lambda {
where("#{table_name}.#{geocoder_options[:fetched_address]} IS NULL")
}
##
# Find all objects within a radius of the given location.
# Location may be either a string to geocode or an array of
# coordinates (<tt>[lat,lon]</tt>). Also takes an options hash
# (see Geocoder::Store::ActiveRecord::ClassMethods.near_scope_options
# for details).
#
scope :near, lambda{ |location, *args|
latitude, longitude = Geocoder::Calculations.extract_coordinates(location)
if Geocoder::Calculations.coordinates_present?(latitude, longitude)
options = near_scope_options(latitude, longitude, *args)
select(options[:select]).where(options[:conditions]).
order(options[:order])
else
# If no lat/lon given we don't want any results, but we still
# need distance and bearing columns so you can add, for example:
# .order("distance")
select(select_clause(nil, null_value, null_value)).where(false_condition)
end
}
##
# Find all objects within the area of a given bounding box.
# Bounds must be an array of locations specifying the southwest
# corner followed by the northeast corner of the box
# (<tt>[[sw_lat, sw_lon], [ne_lat, ne_lon]]</tt>).
#
scope :within_bounding_box, lambda{ |*bounds|
sw_lat, sw_lng, ne_lat, ne_lng = bounds.flatten if bounds
if sw_lat && sw_lng && ne_lat && ne_lng
where(Geocoder::Sql.within_bounding_box(
sw_lat, sw_lng, ne_lat, ne_lng,
full_column_name(geocoder_options[:latitude]),
full_column_name(geocoder_options[:longitude])
))
else
select(select_clause(nil, null_value, null_value)).where(false_condition)
end
}
end
end
##
# Methods which will be class methods of the including class.
#
module ClassMethods
def distance_from_sql(location, *args)
latitude, longitude = Geocoder::Calculations.extract_coordinates(location)
if Geocoder::Calculations.coordinates_present?(latitude, longitude)
distance_sql(latitude, longitude, *args)
end
end
##
# Get options hash suitable for passing to ActiveRecord.find to get
# records within a radius (in kilometers) of the given point.
# Options hash may include:
#
# * +:units+ - <tt>:mi</tt> or <tt>:km</tt>; to be used.
# for interpreting radius as well as the +distance+ attribute which
# is added to each found nearby object.
# Use Geocoder.configure[:units] to configure default units.
# * +:bearing+ - <tt>:linear</tt> or <tt>:spherical</tt>.
# the method to be used for calculating the bearing (direction)
# between the given point and each found nearby point;
# set to false for no bearing calculation. Use
# Geocoder.configure[:distances] to configure default calculation method.
# * +:select+ - string with the SELECT SQL fragment (e.g. “id, name”)
# * +:select_distance+ - whether to include the distance alias in the
# SELECT SQL fragment (e.g. <formula> AS distance)
# * +:select_bearing+ - like +:select_distance+ but for bearing.
# * +:order+ - column(s) for ORDER BY SQL clause; default is distance;
# set to false or nil to omit the ORDER BY clause
# * +:exclude+ - an object to exclude (used by the +nearbys+ method)
# * +:distance_column+ - used to set the column name of the calculated distance.
# * +:bearing_column+ - used to set the column name of the calculated bearing.
# * +:min_radius+ - the value to use as the minimum radius.
# ignored if database is sqlite.
# default is 0.0
#
def near_scope_options(latitude, longitude, radius = 20, options = {})
if options[:units]
options[:units] = options[:units].to_sym
end
latitude_attribute = options[:latitude] || geocoder_options[:latitude]
longitude_attribute = options[:longitude] || geocoder_options[:longitude]
options[:units] ||= (geocoder_options[:units] || Geocoder.config.units)
select_distance = options.fetch(:select_distance) { true }
options[:order] = "" if !select_distance && !options.include?(:order)
select_bearing = options.fetch(:select_bearing) { true }
bearing = bearing_sql(latitude, longitude, options)
distance = distance_sql(latitude, longitude, options)
distance_column = options.fetch(:distance_column) { 'distance' }
bearing_column = options.fetch(:bearing_column) { 'bearing' }
# If radius is a DB column name, bounding box should include
# all rows within the maximum radius appearing in that column.
# Note: performance is dependent on variability of radii.
bb_radius = radius.is_a?(Symbol) ? maximum(radius) : radius
b = Geocoder::Calculations.bounding_box([latitude, longitude], bb_radius, options)
args = b + [
full_column_name(latitude_attribute),
full_column_name(longitude_attribute)
]
bounding_box_conditions = Geocoder::Sql.within_bounding_box(*args)
if using_unextended_sqlite?
conditions = bounding_box_conditions
else
min_radius = options.fetch(:min_radius, 0).to_f
# if radius is a DB column name,
# find rows between min_radius and value in column
if radius.is_a?(Symbol)
c = "BETWEEN ? AND #{radius}"
a = [min_radius]
else
c = "BETWEEN ? AND ?"
a = [min_radius, radius]
end
conditions = [bounding_box_conditions + " AND (#{distance}) " + c] + a
end
{
:select => select_clause(options[:select],
select_distance ? distance : nil,
select_bearing ? bearing : nil,
distance_column,
bearing_column),
:conditions => add_exclude_condition(conditions, options[:exclude]),
:order => options.include?(:order) ? options[:order] : "#{distance_column} ASC"
}
end
##
# SQL for calculating distance based on the current database's
# capabilities (trig functions?).
#
def distance_sql(latitude, longitude, options = {})
method_prefix = using_unextended_sqlite? ? "approx" : "full"
Geocoder::Sql.send(
method_prefix + "_distance",
latitude, longitude,
full_column_name(options[:latitude] || geocoder_options[:latitude]),
full_column_name(options[:longitude]|| geocoder_options[:longitude]),
options
)
end
##
# SQL for calculating bearing based on the current database's
# capabilities (trig functions?).
#
def bearing_sql(latitude, longitude, options = {})
if !options.include?(:bearing)
options[:bearing] = Geocoder.config.distances
end
if options[:bearing]
method_prefix = using_unextended_sqlite? ? "approx" : "full"
Geocoder::Sql.send(
method_prefix + "_bearing",
latitude, longitude,
full_column_name(options[:latitude] || geocoder_options[:latitude]),
full_column_name(options[:longitude]|| geocoder_options[:longitude]),
options
)
end
end
##
# Generate the SELECT clause.
#
def select_clause(columns, distance = nil, bearing = nil, distance_column = 'distance', bearing_column = 'bearing')
if columns == :id_only
return full_column_name(primary_key)
elsif columns == :geo_only
clause = ""
else
clause = (columns || full_column_name("*"))
end
if distance
clause += ", " unless clause.empty?
clause += "#{distance} AS #{distance_column}"
end
if bearing
clause += ", " unless clause.empty?
clause += "#{bearing} AS #{bearing_column}"
end
clause
end
##
# Adds a condition to exclude a given object by ID.
# Expects conditions as an array or string. Returns array.
#
def add_exclude_condition(conditions, exclude)
conditions = [conditions] if conditions.is_a?(String)
if exclude
conditions[0] << " AND #{full_column_name(primary_key)} != ?"
conditions << exclude.id
end
conditions
end
def using_unextended_sqlite?
using_sqlite? && !using_sqlite_with_extensions?
end
def using_sqlite?
!!connection.adapter_name.match(/sqlite/i)
end
def using_sqlite_with_extensions?
connection.adapter_name.match(/sqlite/i) &&
defined?(::SqliteExt) &&
%W(MOD POWER SQRT PI SIN COS ASIN ATAN2).all?{ |fn_name|
connection.raw_connection.function_created?(fn_name)
}
end
def using_postgres?
connection.adapter_name.match(/postgres/i)
end
##
# Use OID type when running in PosgreSQL
#
def null_value
using_postgres? ? 'NULL::text' : 'NULL'
end
##
# Value which can be passed to where() to produce no results.
#
def false_condition
using_unextended_sqlite? ? 0 : "false"
end
##
# Prepend table name if column name doesn't already contain one.
#
def full_column_name(column)
column = column.to_s
column.include?(".") ? column : [table_name, column].join(".")
end
end
##
# Get nearby geocoded objects.
# Takes the same options hash as the near class method (scope).
# Returns nil if the object is not geocoded.
#
##
# Look up coordinates and assign to +latitude+ and +longitude+ attributes
# (or other as specified in +geocoded_by+). Returns coordinates (array).
#
def geocode
do_lookup(false) do |o,rs|
if r = rs.first
unless r.latitude.nil? or r.longitude.nil?
o.__send__ "#{self.class.geocoder_options[:latitude]}=", r.latitude
o.__send__ "#{self.class.geocoder_options[:longitude]}=", r.longitude
end
r.coordinates
end
end
end
alias_method :fetch_coordinates, :geocode
##
# Look up address and assign to +address+ attribute (or other as specified
# in +reverse_geocoded_by+). Returns address (string).
#
def reverse_geocode
do_lookup(true) do |o,rs|
if r = rs.first
unless r.address.nil?
o.__send__ "#{self.class.geocoder_options[:fetched_address]}=", r.address
end
r.address
end
end
end
alias_method :fetch_address, :reverse_geocode
end
|
sup-heliotrope/sup | lib/sup/modes/thread_index_mode.rb | Redwood.ThreadIndexMode.actually_toggle_starred | ruby | def actually_toggle_starred t
if t.has_label? :starred # if ANY message has a star
t.remove_label :starred # remove from all
UpdateManager.relay self, :unstarred, t.first
lambda do
t.first.add_label :starred
UpdateManager.relay self, :starred, t.first
regen_text
end
else
t.first.add_label :starred # add only to first
UpdateManager.relay self, :starred, t.first
lambda do
t.remove_label :starred
UpdateManager.relay self, :unstarred, t.first
regen_text
end
end
end | returns an undo lambda | train | https://github.com/sup-heliotrope/sup/blob/36f95462e3014c354c577d63a78ba030c4b84474/lib/sup/modes/thread_index_mode.rb#L292-L310 | class ThreadIndexMode < LineCursorMode
DATE_WIDTH = Time::TO_NICE_S_MAX_LEN
MIN_FROM_WIDTH = 15
LOAD_MORE_THREAD_NUM = 20
HookManager.register "index-mode-size-widget", <<EOS
Generates the per-thread size widget for each thread.
Variables:
thread: The message thread to be formatted.
EOS
HookManager.register "index-mode-date-widget", <<EOS
Generates the per-thread date widget for each thread.
Variables:
thread: The message thread to be formatted.
EOS
HookManager.register "mark-as-spam", <<EOS
This hook is run when a thread is marked as spam
Variables:
thread: The message thread being marked as spam.
EOS
register_keymap do |k|
k.add :load_threads, "Load #{LOAD_MORE_THREAD_NUM} more threads", 'M'
k.add_multi "Load all threads (! to confirm) :", '!' do |kk|
kk.add :load_all_threads, "Load all threads (may list a _lot_ of threads)", '!'
end
k.add :read_and_archive, "Archive thread (remove from inbox) and mark read", 'A'
k.add :cancel_search, "Cancel current search", :ctrl_g
k.add :reload, "Refresh view", '@'
k.add :toggle_archived, "Toggle archived status", 'a'
k.add :toggle_starred, "Star or unstar all messages in thread", '*'
k.add :toggle_new, "Toggle new/read status of all messages in thread", 'N'
k.add :edit_labels, "Edit or add labels for a thread", 'l'
k.add :edit_message, "Edit message (drafts only)", 'e'
k.add :toggle_spam, "Mark/unmark thread as spam", 'S'
k.add :toggle_deleted, "Delete/undelete thread", 'd'
k.add :kill, "Kill thread (never to be seen in inbox again)", '&'
k.add :flush_index, "Flush all changes now", '$'
k.add :jump_to_next_new, "Jump to next new thread", :tab
k.add :reply, "Reply to latest message in a thread", 'r'
k.add :reply_all, "Reply to all participants of the latest message in a thread", 'G'
k.add :forward, "Forward latest message in a thread", 'f'
k.add :toggle_tagged, "Tag/untag selected thread", 't'
k.add :toggle_tagged_all, "Tag/untag all threads", 'T'
k.add :tag_matching, "Tag matching threads", 'g'
k.add :apply_to_tagged, "Apply next command to all tagged threads", '+', '='
k.add :join_threads, "Force tagged threads to be joined into the same thread", '#'
k.add :undo, "Undo the previous action", 'u'
end
def initialize hidden_labels=[], load_thread_opts={}
super()
@mutex = Mutex.new # covers the following variables:
@threads = []
@hidden_threads = {}
@size_widget_width = nil
@size_widgets = []
@date_widget_width = nil
@date_widgets = []
@tags = Tagger.new self
## these guys, and @text and @lines, are not covered
@load_thread = nil
@load_thread_opts = load_thread_opts
@hidden_labels = hidden_labels + LabelManager::HIDDEN_RESERVED_LABELS
@date_width = DATE_WIDTH
@interrupt_search = false
initialize_threads # defines @ts and @ts_mutex
update # defines @text and @lines
UpdateManager.register self
@save_thread_mutex = Mutex.new
@last_load_more_size = nil
to_load_more do |size|
next if @last_load_more_size == 0
load_threads :num => size,
:when_done => lambda { |num| @last_load_more_size = num }
end
end
def unsaved?; dirty? end
def lines; @text.length; end
def [] i; @text[i]; end
def contains_thread? t; @threads.include?(t) end
def reload
drop_all_threads
UndoManager.clear
BufferManager.draw_screen
load_threads :num => buffer.content_height
end
## open up a thread view window
def select t=nil, when_done=nil
t ||= cursor_thread or return
Redwood::reporting_thread("load messages for thread-view-mode") do
num = t.size
message = "Loading #{num.pluralize 'message body'}..."
BufferManager.say(message) do |sid|
t.each_with_index do |(m, *_), i|
next unless m
BufferManager.say "#{message} (#{i}/#{num})", sid if t.size > 1
m.load_from_source!
end
end
mode = ThreadViewMode.new t, @hidden_labels, self
BufferManager.spawn t.subj, mode
BufferManager.draw_screen
mode.jump_to_first_open if $config[:jump_to_open_message]
BufferManager.draw_screen # lame TODO: make this unnecessary
## the first draw_screen is needed before topline and botline
## are set, and the second to show the cursor having moved
t.remove_label :unread
Index.save_thread t
update_text_for_line curpos
UpdateManager.relay self, :read, t.first
when_done.call if when_done
end
end
def multi_select threads
threads.each { |t| select t }
end
## these two methods are called by thread-view-modes when the user
## wants to view the previous/next thread without going back to
## index-mode. we update the cursor as a convenience.
def launch_next_thread_after thread, &b
launch_another_thread thread, 1, &b
end
def launch_prev_thread_before thread, &b
launch_another_thread thread, -1, &b
end
def launch_another_thread thread, direction, &b
l = @lines[thread] or return
target_l = l + direction
t = @mutex.synchronize do
if target_l >= 0 && target_l < @threads.length
@threads[target_l]
end
end
if t # there's a next thread
set_cursor_pos target_l # move out of mutex?
select t, b
elsif b # no next thread. call the block anyways
b.call
end
end
def handle_single_message_labeled_update sender, m
## no need to do anything different here; we don't differentiate
## messages from their containing threads
handle_labeled_update sender, m
end
def handle_labeled_update sender, m
if(t = thread_containing(m))
l = @lines[t] or return
update_text_for_line l
elsif is_relevant?(m)
add_or_unhide m
end
end
def handle_simple_update sender, m
t = thread_containing(m) or return
l = @lines[t] or return
update_text_for_line l
end
%w(read unread archived starred unstarred).each do |state|
define_method "handle_#{state}_update" do |*a|
handle_simple_update(*a)
end
end
## overwrite me!
def is_relevant? m; false; end
def handle_added_update sender, m
add_or_unhide m
BufferManager.draw_screen
end
def handle_updated_update sender, m
t = thread_containing(m) or return
l = @lines[t] or return
@ts_mutex.synchronize do
@ts.delete_message m
@ts.add_message m
end
Index.save_thread t, sync_back = false
update_text_for_line l
end
def handle_location_deleted_update sender, m
t = thread_containing(m)
delete_thread t if t and t.first.id == m.id
@ts_mutex.synchronize do
@ts.delete_message m if t
end
update
end
def handle_single_message_deleted_update sender, m
@ts_mutex.synchronize do
return unless @ts.contains? m
@ts.remove_id m.id
end
update
end
def handle_deleted_update sender, m
t = @ts_mutex.synchronize { @ts.thread_for m }
return unless t
hide_thread t
update
end
def handle_killed_update sender, m
t = @ts_mutex.synchronize { @ts.thread_for m }
return unless t
hide_thread t
update
end
def handle_spammed_update sender, m
t = @ts_mutex.synchronize { @ts.thread_for m }
return unless t
hide_thread t
update
end
def handle_undeleted_update sender, m
add_or_unhide m
end
def handle_unkilled_update sender, m
add_or_unhide m
end
def undo
UndoManager.undo
end
def update
old_cursor_thread = cursor_thread
@mutex.synchronize do
## let's see you do THIS in python
@threads = @ts.threads.select { |t| !@hidden_threads.member?(t) }.select(&:has_message?).sort_by(&:sort_key)
@size_widgets = @threads.map { |t| size_widget_for_thread t }
@size_widget_width = @size_widgets.max_of { |w| w.display_length }
@date_widgets = @threads.map { |t| date_widget_for_thread t }
@date_widget_width = @date_widgets.max_of { |w| w.display_length }
end
set_cursor_pos @threads.index(old_cursor_thread)||curpos
regen_text
end
def edit_message
return unless(t = cursor_thread)
message, *_ = t.find { |m, *o| m.has_label? :draft }
if message
mode = ResumeMode.new message
BufferManager.spawn "Edit message", mode
else
BufferManager.flash "Not a draft message!"
end
end
## returns an undo lambda
def toggle_starred
t = cursor_thread or return
undo = actually_toggle_starred t
UndoManager.register "toggling thread starred status", undo, lambda { Index.save_thread t }
update_text_for_line curpos
cursor_down
Index.save_thread t
end
def multi_toggle_starred threads
UndoManager.register "toggling #{threads.size.pluralize 'thread'} starred status",
threads.map { |t| actually_toggle_starred t },
lambda { threads.each { |t| Index.save_thread t } }
regen_text
threads.each { |t| Index.save_thread t }
end
## returns an undo lambda
def actually_toggle_archived t
thread = t
pos = curpos
if t.has_label? :inbox
t.remove_label :inbox
UpdateManager.relay self, :archived, t.first
lambda do
thread.apply_label :inbox
update_text_for_line pos
UpdateManager.relay self,:unarchived, thread.first
end
else
t.apply_label :inbox
UpdateManager.relay self, :unarchived, t.first
lambda do
thread.remove_label :inbox
update_text_for_line pos
UpdateManager.relay self, :unarchived, thread.first
end
end
end
## returns an undo lambda
def actually_toggle_spammed t
thread = t
if t.has_label? :spam
t.remove_label :spam
add_or_unhide t.first
UpdateManager.relay self, :unspammed, t.first
lambda do
thread.apply_label :spam
self.hide_thread thread
UpdateManager.relay self,:spammed, thread.first
end
else
t.apply_label :spam
hide_thread t
UpdateManager.relay self, :spammed, t.first
lambda do
thread.remove_label :spam
add_or_unhide thread.first
UpdateManager.relay self,:unspammed, thread.first
end
end
end
## returns an undo lambda
def actually_toggle_deleted t
if t.has_label? :deleted
t.remove_label :deleted
add_or_unhide t.first
UpdateManager.relay self, :undeleted, t.first
lambda do
t.apply_label :deleted
hide_thread t
UpdateManager.relay self, :deleted, t.first
end
else
t.apply_label :deleted
hide_thread t
UpdateManager.relay self, :deleted, t.first
lambda do
t.remove_label :deleted
add_or_unhide t.first
UpdateManager.relay self, :undeleted, t.first
end
end
end
def toggle_archived
t = cursor_thread or return
undo = actually_toggle_archived t
UndoManager.register "deleting/undeleting thread #{t.first.id}", undo, lambda { update_text_for_line curpos },
lambda { Index.save_thread t }
update_text_for_line curpos
Index.save_thread t
end
def multi_toggle_archived threads
undos = threads.map { |t| actually_toggle_archived t }
UndoManager.register "deleting/undeleting #{threads.size.pluralize 'thread'}", undos, lambda { regen_text },
lambda { threads.each { |t| Index.save_thread t } }
regen_text
threads.each { |t| Index.save_thread t }
end
def toggle_new
t = cursor_thread or return
t.toggle_label :unread
update_text_for_line curpos
cursor_down
Index.save_thread t
end
def multi_toggle_new threads
threads.each { |t| t.toggle_label :unread }
regen_text
threads.each { |t| Index.save_thread t }
end
def multi_toggle_tagged threads
@mutex.synchronize { @tags.drop_all_tags }
regen_text
end
def join_threads
## this command has no non-tagged form. as a convenience, allow this
## command to be applied to tagged threads without hitting ';'.
@tags.apply_to_tagged :join_threads
end
def multi_join_threads threads
@ts.join_threads threads or return
threads.each { |t| Index.save_thread t }
@tags.drop_all_tags # otherwise we have tag pointers to invalid threads!
update
end
def jump_to_next_new
n = @mutex.synchronize do
((curpos + 1) ... lines).find { |i| @threads[i].has_label? :unread } ||
(0 ... curpos).find { |i| @threads[i].has_label? :unread }
end
if n
## jump there if necessary
jump_to_line n unless n >= topline && n < botline
set_cursor_pos n
else
BufferManager.flash "No new messages."
end
end
def toggle_spam
t = cursor_thread or return
multi_toggle_spam [t]
end
## both spam and deleted have the curious characteristic that you
## always want to hide the thread after either applying or removing
## that label. in all thread-index-views except for
## label-search-results-mode, when you mark a message as spam or
## deleted, you want it to disappear immediately; in LSRM, you only
## see deleted or spam emails, and when you undelete or unspam them
## you also want them to disappear immediately.
def multi_toggle_spam threads
undos = threads.map { |t| actually_toggle_spammed t }
threads.each { |t| HookManager.run("mark-as-spam", :thread => t) }
UndoManager.register "marking/unmarking #{threads.size.pluralize 'thread'} as spam",
undos, lambda { regen_text }, lambda { threads.each { |t| Index.save_thread t } }
regen_text
threads.each { |t| Index.save_thread t }
end
def toggle_deleted
t = cursor_thread or return
multi_toggle_deleted [t]
end
## see comment for multi_toggle_spam
def multi_toggle_deleted threads
undos = threads.map { |t| actually_toggle_deleted t }
UndoManager.register "deleting/undeleting #{threads.size.pluralize 'thread'}",
undos, lambda { regen_text }, lambda { threads.each { |t| Index.save_thread t } }
regen_text
threads.each { |t| Index.save_thread t }
end
def kill
t = cursor_thread or return
multi_kill [t]
end
def flush_index
@flush_id = BufferManager.say "Flushing index..."
Index.save_index
BufferManager.clear @flush_id
end
## m-m-m-m-MULTI-KILL
def multi_kill threads
UndoManager.register "killing/unkilling #{threads.size.pluralize 'threads'}" do
threads.each do |t|
if t.toggle_label :killed
add_or_unhide t.first
else
hide_thread t
end
end.each do |t|
UpdateManager.relay self, :labeled, t.first
Index.save_thread t
end
regen_text
end
threads.each do |t|
if t.toggle_label :killed
hide_thread t
else
add_or_unhide t.first
end
end.each do |t|
# send 'labeled'... this might be more specific
UpdateManager.relay self, :labeled, t.first
Index.save_thread t
end
killed, unkilled = threads.partition { |t| t.has_label? :killed }.map(&:size)
BufferManager.flash "#{killed.pluralize 'thread'} killed, #{unkilled} unkilled"
regen_text
end
def cleanup
UpdateManager.unregister self
if @load_thread
@load_thread.kill
BufferManager.clear @mbid if @mbid
sleep 0.1 # TODO: necessary?
BufferManager.erase_flash
end
dirty_threads = @mutex.synchronize { (@threads + @hidden_threads.keys).select { |t| t.dirty? } }
fail "dirty threads remain" unless dirty_threads.empty?
super
end
def toggle_tagged
t = cursor_thread or return
@mutex.synchronize { @tags.toggle_tag_for t }
update_text_for_line curpos
cursor_down
end
def toggle_tagged_all
@mutex.synchronize { @threads.each { |t| @tags.toggle_tag_for t } }
regen_text
end
def tag_matching
query = BufferManager.ask :search, "tag threads matching (regex): "
return if query.nil? || query.empty?
query = begin
/#{query}/i
rescue RegexpError => e
BufferManager.flash "error interpreting '#{query}': #{e.message}"
return
end
@mutex.synchronize { @threads.each { |t| @tags.tag t if thread_matches?(t, query) } }
regen_text
end
def apply_to_tagged; @tags.apply_to_tagged; end
def edit_labels
thread = cursor_thread or return
speciall = (@hidden_labels + LabelManager::RESERVED_LABELS).uniq
old_labels = thread.labels
pos = curpos
keepl, modifyl = thread.labels.partition { |t| speciall.member? t }
user_labels = BufferManager.ask_for_labels :label, "Labels for thread: ", modifyl.sort_by {|x| x.to_s}, @hidden_labels
return unless user_labels
thread.labels = Set.new(keepl) + user_labels
user_labels.each { |l| LabelManager << l }
update_text_for_line curpos
UndoManager.register "labeling thread" do
thread.labels = old_labels
update_text_for_line pos
UpdateManager.relay self, :labeled, thread.first
Index.save_thread thread
end
UpdateManager.relay self, :labeled, thread.first
Index.save_thread thread
end
def multi_edit_labels threads
user_labels = BufferManager.ask_for_labels :labels, "Add/remove labels (use -label to remove): ", [], @hidden_labels
return unless user_labels
user_labels.map! { |l| (l.to_s =~ /^-/)? [l.to_s.gsub(/^-?/, '').to_sym, true] : [l, false] }
hl = user_labels.select { |(l,_)| @hidden_labels.member? l }
unless hl.empty?
BufferManager.flash "'#{hl}' is a reserved label!"
return
end
old_labels = threads.map { |t| t.labels.dup }
threads.each do |t|
user_labels.each do |(l, to_remove)|
if to_remove
t.remove_label l
else
t.apply_label l
LabelManager << l
end
end
UpdateManager.relay self, :labeled, t.first
end
regen_text
UndoManager.register "labeling #{threads.size.pluralize 'thread'}" do
threads.zip(old_labels).map do |t, old_labels|
t.labels = old_labels
UpdateManager.relay self, :labeled, t.first
Index.save_thread t
end
regen_text
end
threads.each { |t| Index.save_thread t }
end
def reply type_arg=nil
t = cursor_thread or return
m = t.latest_message
return if m.nil? # probably won't happen
m.load_from_source!
mode = ReplyMode.new m, type_arg
BufferManager.spawn "Reply to #{m.subj}", mode
end
def reply_all; reply :all; end
def forward
t = cursor_thread or return
m = t.latest_message
return if m.nil? # probably won't happen
m.load_from_source!
ForwardMode.spawn_nicely :message => m
end
def load_n_threads_background n=LOAD_MORE_THREAD_NUM, opts={}
return if @load_thread # todo: wrap in mutex
@load_thread = Redwood::reporting_thread("load threads for thread-index-mode") do
num = load_n_threads n, opts
opts[:when_done].call(num) if opts[:when_done]
@load_thread = nil
end
end
## TODO: figure out @ts_mutex in this method
def load_n_threads n=LOAD_MORE_THREAD_NUM, opts={}
@interrupt_search = false
@mbid = BufferManager.say "Searching for threads..."
ts_to_load = n
ts_to_load = ts_to_load + @ts.size unless n == -1 # -1 means all threads
orig_size = @ts.size
last_update = Time.now
@ts.load_n_threads(ts_to_load, opts) do |i|
if (Time.now - last_update) >= 0.25
BufferManager.say "Loaded #{i.pluralize 'thread'}...", @mbid
update
BufferManager.draw_screen
last_update = Time.now
end
::Thread.pass
break if @interrupt_search
end
@ts.threads.each { |th| th.labels.each { |l| LabelManager << l } }
update
BufferManager.clear @mbid if @mbid
@mbid = nil
BufferManager.draw_screen
@ts.size - orig_size
end
ignore_concurrent_calls :load_n_threads
def status
if (l = lines) == 0
"line 0 of 0"
else
"line #{curpos + 1} of #{l}"
end
end
def cancel_search
@interrupt_search = true
end
def load_all_threads
load_threads :num => -1
end
def load_threads opts={}
if opts[:num].nil?
n = ThreadIndexMode::LOAD_MORE_THREAD_NUM
else
n = opts[:num]
end
myopts = @load_thread_opts.merge({ :when_done => (lambda do |num|
opts[:when_done].call(num) if opts[:when_done]
if num > 0
BufferManager.flash "Found #{num.pluralize 'thread'}."
else
BufferManager.flash "No matches."
end
end)})
if opts[:background] || opts[:background].nil?
load_n_threads_background n, myopts
else
load_n_threads n, myopts
end
end
ignore_concurrent_calls :load_threads
def read_and_archive
return unless cursor_thread
thread = cursor_thread # to make sure lambda only knows about 'old' cursor_thread
was_unread = thread.labels.member? :unread
UndoManager.register "reading and archiving thread" do
thread.apply_label :inbox
thread.apply_label :unread if was_unread
add_or_unhide thread.first
Index.save_thread thread
end
cursor_thread.remove_label :unread
cursor_thread.remove_label :inbox
hide_thread cursor_thread
regen_text
Index.save_thread thread
end
def multi_read_and_archive threads
old_labels = threads.map { |t| t.labels.dup }
threads.each do |t|
t.remove_label :unread
t.remove_label :inbox
hide_thread t
end
regen_text
UndoManager.register "reading and archiving #{threads.size.pluralize 'thread'}" do
threads.zip(old_labels).each do |t, l|
t.labels = l
add_or_unhide t.first
Index.save_thread t
end
regen_text
end
threads.each { |t| Index.save_thread t }
end
def resize rows, cols
regen_text
super
end
protected
def add_or_unhide m
@ts_mutex.synchronize do
if (is_relevant?(m) || @ts.is_relevant?(m)) && !@ts.contains?(m)
@ts.load_thread_for_message m, @load_thread_opts
end
@hidden_threads.delete @ts.thread_for(m)
end
update
end
def thread_containing m; @ts_mutex.synchronize { @ts.thread_for m } end
## used to tag threads by query. this can be made a lot more sophisticated,
## but for right now we'll do the obvious this.
def thread_matches? t, query
t.subj =~ query || t.snippet =~ query || t.participants.any? { |x| x.longname =~ query }
end
def size_widget_for_thread t
HookManager.run("index-mode-size-widget", :thread => t) || default_size_widget_for(t)
end
def date_widget_for_thread t
HookManager.run("index-mode-date-widget", :thread => t) || default_date_widget_for(t)
end
def cursor_thread; @mutex.synchronize { @threads[curpos] }; end
def drop_all_threads
@tags.drop_all_tags
initialize_threads
update
end
def delete_thread t
@mutex.synchronize do
i = @threads.index(t) or return
@threads.delete_at i
@size_widgets.delete_at i
@date_widgets.delete_at i
@tags.drop_tag_for t
end
end
def hide_thread t
@mutex.synchronize do
i = @threads.index(t) or return
raise "already hidden" if @hidden_threads[t]
@hidden_threads[t] = true
@threads.delete_at i
@size_widgets.delete_at i
@date_widgets.delete_at i
@tags.drop_tag_for t
end
end
def update_text_for_line l
return unless l # not sure why this happens, but it does, occasionally
need_update = false
@mutex.synchronize do
# and certainly not sure why this happens..
#
# probably a race condition between thread modification and updating
# going on.
return if @threads[l].empty?
@size_widgets[l] = size_widget_for_thread @threads[l]
@date_widgets[l] = date_widget_for_thread @threads[l]
## if a widget size has increased, we need to redraw everyone
need_update =
(@size_widgets[l].size > @size_widget_width) or
(@date_widgets[l].size > @date_widget_width)
end
if need_update
update
else
@text[l] = text_for_thread_at l
buffer.mark_dirty if buffer
end
end
def regen_text
threads = @mutex.synchronize { @threads }
@text = threads.map_with_index { |t, i| text_for_thread_at i }
@lines = threads.map_with_index { |t, i| [t, i] }.to_h
buffer.mark_dirty if buffer
end
def authors; map { |m, *o| m.from if m }.compact.uniq; end
## preserve author order from the thread
def author_names_and_newness_for_thread t, limit=nil
new = {}
seen = {}
authors = t.map do |m, *o|
next unless m && m.from
new[m.from] ||= m.has_label?(:unread)
next if seen[m.from]
seen[m.from] = true
m.from
end.compact
result = []
authors.each do |a|
break if limit && result.size >= limit
name = if AccountManager.is_account?(a)
"me"
elsif t.authors.size == 1
a.mediumname
else
a.shortname
end
result << [name, new[a]]
end
if result.size == 1 && (author_and_newness = result.assoc("me"))
unless (recipients = t.participants - t.authors).empty?
result = recipients.collect do |r|
break if limit && result.size >= limit
name = (recipients.size == 1) ? r.mediumname : r.shortname
["(#{name})", author_and_newness[1]]
end
end
end
result
end
AUTHOR_LIMIT = 5
def text_for_thread_at line
t, size_widget, date_widget = @mutex.synchronize do
[@threads[line], @size_widgets[line], @date_widgets[line]]
end
starred = t.has_label? :starred
## format the from column
cur_width = 0
ann = author_names_and_newness_for_thread t, AUTHOR_LIMIT
from = []
ann.each_with_index do |(name, newness), i|
break if cur_width >= from_width
last = i == ann.length - 1
abbrev =
if cur_width + name.display_length > from_width
name.slice_by_display_length(from_width - cur_width - 1) + "."
elsif cur_width + name.display_length == from_width
name.slice_by_display_length(from_width - cur_width)
else
if last
name.slice_by_display_length(from_width - cur_width)
else
name.slice_by_display_length(from_width - cur_width - 1) + ","
end
end
cur_width += abbrev.display_length
if last && from_width > cur_width
abbrev += " " * (from_width - cur_width)
end
from << [(newness ? :index_new_color : (starred ? :index_starred_color : :index_old_color)), abbrev]
end
is_me = AccountManager.method(:is_account?)
directly_participated = t.direct_participants.any?(&is_me)
participated = directly_participated || t.participants.any?(&is_me)
subj_color =
if t.has_label?(:draft)
:index_draft_color
elsif t.has_label?(:unread)
:index_new_color
elsif starred
:index_starred_color
elsif Colormap.sym_is_defined(:index_subject_color)
:index_subject_color
else
:index_old_color
end
size_padding = @size_widget_width - size_widget.display_length
size_widget_text = sprintf "%#{size_padding}s%s", "", size_widget
date_padding = @date_widget_width - date_widget.display_length
date_widget_text = sprintf "%#{date_padding}s%s", "", date_widget
[
[:tagged_color, @tags.tagged?(t) ? ">" : " "],
[:date_color, date_widget_text],
[:starred_color, (starred ? "*" : " ")],
] +
from +
[
[:size_widget_color, size_widget_text],
[:with_attachment_color , t.labels.member?(:attachment) ? "@" : " "],
[:to_me_color, directly_participated ? ">" : (participated ? '+' : " ")],
] +
(t.labels - @hidden_labels).sort_by {|x| x.to_s}.map {
|label| [Colormap.sym_is_defined("label_#{label}_color".to_sym) || :label_color, "#{label} "]
} +
[
[subj_color, t.subj + (t.subj.empty? ? "" : " ")],
[:snippet_color, t.snippet],
]
end
def dirty?; @mutex.synchronize { (@hidden_threads.keys + @threads).any? { |t| t.dirty? } } end
private
def default_size_widget_for t
case t.size
when 1
""
else
"(#{t.size})"
end
end
def default_date_widget_for t
t.date.getlocal.to_nice_s
end
def from_width
if buffer
[(buffer.content_width.to_f * 0.2).to_i, MIN_FROM_WIDTH].max
else
MIN_FROM_WIDTH # not sure why the buffer is gone
end
end
def initialize_threads
@ts = ThreadSet.new Index.instance, $config[:thread_by_subject]
@ts_mutex = Mutex.new
@hidden_threads = {}
end
end
|
rossf7/elasticrawl | lib/elasticrawl/job.rb | Elasticrawl.Job.confirm_message | ruby | def confirm_message
cluster = Cluster.new
case self.type
when 'Elasticrawl::ParseJob'
message = segment_list
else
message = []
end
message.push('Job configuration')
message.push(self.job_desc)
message.push('')
message.push(cluster.cluster_desc)
message.join("\n")
end | Displays a confirmation message showing the configuration of the
Elastic MapReduce job flow and cluster. | train | https://github.com/rossf7/elasticrawl/blob/db70bb6819c86805869f389daf1920f3acc87cef/lib/elasticrawl/job.rb#L8-L24 | class Job < ActiveRecord::Base
has_many :job_steps
# Displays a confirmation message showing the configuration of the
# Elastic MapReduce job flow and cluster.
# Displays the Job Name and Elastic MapReduce Job Flow ID if the job was
# launched successfully.
def result_message
"\nJob: #{self.job_name} Job Flow ID: #{self.job_flow_id}"
end
# Displays the history of the current job. Called by the status command.
def history
launch_time = "Launched: #{self.created_at.strftime('%Y-%m-%d %H:%M:%S')}"
"#{self.job_name} #{launch_time} #{self.job_desc}"
end
protected
# Calls the Elastic MapReduce API to create a Job Flow. Returns the Job Flow ID.
def run_job_flow(emr_config)
cluster = Cluster.new
job_flow = cluster.create_job_flow(self, emr_config)
job_steps.each do |step|
job_flow.add_step(step.job_flow_step(job_config))
end
begin
job_flow.run
rescue StandardError => e
raise ElasticMapReduceAccessError, e.message
end
end
# Returns an S3 location for storing either data or logs.
def build_s3_uri(s3_path)
URI::Generic.build(:scheme => 's3',
:host => bucket_name,
:path => s3_path).to_s
end
# Returns the S3 bucket name configured by the user using the init command.
def bucket_name
config = Config.new
config.load_config('jobs')['s3_bucket_name']
end
# Sets the job name which is the current Unix timestamp in milliseconds.
# This is the same naming format used for Common Crawl segment names.
def set_job_name
(Time.now.to_f * 1000).to_i.to_s
end
end
|
plexus/analects | lib/analects/encoding.rb | Analects.Encoding.ratings | ruby | def ratings(str)
all_valid_cjk(str).map do |enc|
[
enc,
recode(enc, str).codepoints.map do |point|
Analects::Models::Zi.codepoint_ranges.map.with_index do |range, idx|
next 6 - idx if range.include?(point)
0
end.inject(:+)
end.inject(:+)
]
end.sort_by(&:last).reverse
end | Crude way to guess which encoding it is | train | https://github.com/plexus/analects/blob/3ef5c9b54b5d31fd1c3b7143f9e5e4ae40185dd9/lib/analects/encoding.rb#L34-L46 | module Encoding
extend self
GB = ::Encoding::GB18030
BIG5 = ::Encoding::BIG5_UAO
def recode(enc, str)
str.force_encoding(enc).encode('UTF-8')
end
def from_gb(str)
recode(GB, str)
end
def from_big5(str)
recode(BIG5, str)
end
def valid_cjk(str)
[GB, BIG5].map do |enc|
begin
recode(enc, str)
enc
rescue ::Encoding::UndefinedConversionError
rescue ::Encoding::InvalidByteSequenceError
end
end.compact
end
# Crude way to guess which encoding it is
end
|
arvicco/win_gui | lib/win_gui/window.rb | WinGui.Window.click | ruby | def click(opts={})
control = child(opts)
if control
left, top, right, bottom = control.get_window_rect
where = opts[:point] || opts[:where] || opts[:position]
point = case where
when Array
where # Explicit screen coords
when :random
[left + rand(right - left), top + rand(bottom - top)] # Random point within control window
else
[(left + right) / 2, (top + bottom) / 2] # Center of a control window
end
WinGui.set_cursor_pos *point
button = opts[:mouse_button] || opts[:mouse] || opts[:which]
down, up = (button == :right) ?
[WinGui::MOUSEEVENTF_RIGHTDOWN, WinGui::MOUSEEVENTF_RIGHTUP] :
[WinGui::MOUSEEVENTF_LEFTDOWN, WinGui::MOUSEEVENTF_LEFTUP]
WinGui.mouse_event down, 0, 0, 0, 0
WinGui.mouse_event up, 0, 0, 0, 0
point
else
nil
end
end | Emulates click of the control identified by opts (:id, :title, :class).
Beware of keyboard shortcuts in button titles! So, use "&Yes" instead of just "Yes".
Returns screen coordinates of click point if successful, nil if control was not found
:id:: integer control id (such as IDOK, IDCANCEL, etc)
:title:: window title
:class:: window class
:raise:: raise this exception instead of returning nil if nothing found
:position/point/where:: location where the click is to be applied - default :center
:mouse_button/button/which:: mouse button which to click - default :right | train | https://github.com/arvicco/win_gui/blob/a3a4c18db2391144fcb535e4be2f0fb47e9dcec7/lib/win_gui/window.rb#L114-L142 | class Window
def initialize(handle)
@handle = handle
end
attr_reader :handle
class << self
# Looks up window handle using code specified in attached block (either with or without :timeout).
# Returns either Window instance (for a found handle) or nil if nothing found.
# Private method to dry up other window lookup methods
#
def lookup_window opts # :yields: index, position
# Need this to avoid handle considered local var in begin..end block
handle = yield
if opts[:timeout]
begin
timeout(opts[:timeout]) do
sleep SLEEP_DELAY until handle = yield
end
rescue Timeout::Error
nil
end
end
raise opts[:raise] if opts[:raise] && !handle
Window.new(handle) if handle
end
def lookup_window_in_collection opts, &collection_proc
class_name = opts[:class]
title = opts[:title]
id = opts[:id]
class_regexp = class_name.is_a? Regexp
title_regexp = title.is_a? Regexp
lookup_window(opts) do
collection_proc.call.each do |handle|
win = Window.new handle
id_match = !id || win.id == id
title_match = !title || win.title == title ||
title_regexp && win.title =~ title
class_match = !class_name || win.class_name == class_name ||
class_regexp && win.class_name =~ class_name
return win if class_match && title_match && id_match
end
nil
end
end
# Finds top level window by title/class, returns wrapped Window object or nil (raises exception if asked to).
# If timeout option given, waits for window to appear within timeout, returns nil if it didn't.
# Options:
# :title:: window title (String or Regexp)
# :class:: window class (String or Regexp)
# :timeout:: timeout (seconds)
# :raise:: raise this exception instead of returning nil if nothing found
#
def top_level opts={}
if opts[:class].is_a?(Regexp) || opts[:title].is_a?(Regexp)
lookup_window_in_collection(opts) { WinGui.enum_windows }
else
lookup_window(opts) { WinGui.find_window opts[:class], opts[:title] }
end
end
alias find top_level
end
# Finds child window (control) by either control ID or window class/title.
# By default, only direct children are searched.
# Options:
# :id:: integer control id (such as IDOK, IDCANCEL, etc)
# :title:: window title (String or Regexp)
# :class:: window class (String or Regexp)
# :indirect:: search all descendants, not only direct children
# :timeout:: timeout (seconds)
# :raise:: raise this exception instead of returning nil if nothing found
#
def child(opts={})
if opts[:indirect]
self.class.lookup_window_in_collection(opts) { enum_child_windows }
elsif opts[:class].is_a?(Regexp) || opts[:title].is_a?(Regexp)
self.class.lookup_window_in_collection(opts) do
enum_child_windows.select { |handle| child? handle }
end
else
self.class.lookup_window opts do
opts[:id] ? get_dlg_item(opts[:id]) : find_window_ex(0, opts[:class], opts[:title])
end
end
end
# Returns array of Windows that are descendants (not only DIRECT children) of a given Window
#
def children
enum_child_windows.map { |child_handle| Window.new child_handle }
end
# Emulates click of the control identified by opts (:id, :title, :class).
# Beware of keyboard shortcuts in button titles! So, use "&Yes" instead of just "Yes".
# Returns screen coordinates of click point if successful, nil if control was not found
# :id:: integer control id (such as IDOK, IDCANCEL, etc)
# :title:: window title
# :class:: window class
# :raise:: raise this exception instead of returning nil if nothing found
# :position/point/where:: location where the click is to be applied - default :center
# :mouse_button/button/which:: mouse button which to click - default :right
#
# Waits for this window to close with timeout (default CLOSE_TIMEOUT).
#
def wait_for_close(timeout=CLOSE_TIMEOUT)
timeout(timeout) do
sleep SLEEP_DELAY while window_visible?
end
end
# We alias convenience method shut_window (from Win::Gui::Window) with even more convenient
# window.close
# Please keep in mind that Win32 API has another function CloseWindow that merely MINIMIZES window.
# If you want to invoke this function, you can do it like this:
# window.close_window
#
def close
shut_window
end
# Alias for [get_]window_text
#
def title
get_window_text
end
def thread
get_window_thread_process_id.first
end
def process
get_window_thread_process_id.last
end
alias pid process
# Control ID associated with the window (only makes sense for controls)
def id
get_dlg_ctrl_id
end
# Since Window instances wrap actual window handles, they should directly support Win32 API functions
# manipulating these handles. Therefore, when unsupported instance method is invoked, we check if
# WinGui responds to such method, and if yes, call it with our window handle as a first argument.
# This gives us all handle-related WinGui functions as instance methods for Window instances, like so:
# window.visible?
# This API is much more Ruby-like compared to:
# visible?(window.handle)
# Of course, if we invoke WinGui function that DOESN'T accept handle as a first arg this way, we are screwed.
# Call such functions only like this:
# WinGui.function(*args)
# TODO: Such setup is problematic if WinGui is included into Window ancestor chain.
# TODO: In this case, all WinGui functions become available as instance methods, and method_missing never fires.
# TODO: It may be a better solution to explicitly define all needed instance methods,
# TODO: instead of showing off cool meta-programming skillz. ;-)
#
def method_missing(name, *args, &block)
if WinGui.respond_to? name
# puts "Window #{@handle} calling: #{name} #{@handle} #{args} &#{block}"
WinGui.send(name, @handle, *args, &block)
else
super
end
end
end
|
Danieth/rb_maxima | lib/maxima/histogram.rb | Maxima.Histogram.to_percentage | ruby | def to_percentage()
@to_percentage ||=
begin
sum = points.sum(&:last)
Histogram.new(
points.map do |(x,y)|
[
x,
y.fdiv(sum)
]
end
)
end
end | PDF | train | https://github.com/Danieth/rb_maxima/blob/21ac2ecb2bd55a7f653ef23d7ff59f4067efdca2/lib/maxima/histogram.rb#L48-L61 | class Histogram < Unit
attr_accessor :points
def self.between(min, max, function = ->(x) { x }, steps = 100)
Histogram.new(
*[].tap do |points|
(min..max).step((max - min).fdiv(steps)).each do |x|
points.push([x, function.call(x)])
end
end
)
end
def polynomial_fit(degrees)
Polynomial.fit(self, degrees)[:function]
end
def self.from_csv(csv)
Histogram.new(
*CSV.read(csv).map { |array| array.map(&:to_f) }
)
end
def self.parse(s)
Histogram.new((eval s), maxima_output: s)
end
def initialize(*points, **options)
super(**options)
while points.is_a?(Array) && points.first.is_a?(Array) && points.first.first.is_a?(Array)
points = points.flatten(1)
end
unless points.is_a?(Array) && points.first.is_a?(Array) && points.first.length == 2
throw :invalid_histogram_points
end
@points = points
end
def to_a
@points
end
# PDF
# literal CDF
def integral()
begin
sum = 0
Histogram.new(
points.map do |(x, y)|
sum += y
[x, sum]
end
)
end
end
def to_gnu_plot()
[*points.map(&:to_a).transpose, w: "points"]
end
def <=>(other)
case other
when Array, Histogram
self.to_a <=> other.to_a
else
-1
end
end
end
|
ebfjunior/juno-report | lib/juno-report/pdf.rb | JunoReport.Pdf.initialize_footer_values | ruby | def initialize_footer_values
@sections[:body][:settings][:groups].each do |group|
current_footer = {}
@sections[:groups][group.to_sym][:footer].each { |field, settings| current_footer[field] = nil } unless @sections[:groups][group.to_sym][:footer].nil?
@footers[group.to_sym] = current_footer unless current_footer.empty?
end if has_groups?
raise "The report must have at least a footer on body section" if @sections[:body][:footer].nil?
current_footer = {}
@sections[:body][:footer].each { |field, settings| current_footer[field] = nil }
@footers[:body] = current_footer unless current_footer.empty?
end | Create a structure to calculate the footer values for all groups. Appends the footer body to total values too. | train | https://github.com/ebfjunior/juno-report/blob/139f2a1733e0d7a68160b338cc1a4645f05d5953/lib/juno-report/pdf.rb#L197-L207 | module Pdf
#Responsible for generate a report, based on rules passed as parameter in Juno::Report::generate.
#Juno Reports has support groups, just by especifying them at the rules file.
#Receives a collection as parameter, which should be a Array of records of the report.
def generate(collection)
@defaults = {
:style => :normal,
:size => 12,
:align => :left,
:format => false,
:font => 'Times-Roman',
:type => :text,
:color => '000000',
:fixed => false
}
get_sections
set_pos_y
@defaults.merge!(@sections[:defaults]) unless @sections[:defaults].nil?
collection = [collection] unless collection.is_a?(Array) or collection.is_a?(ActiveRecord::Relation)
print_section :page unless @sections[:page].nil?
set_pos_y (@sections[:body][:settings][:posY] || 0)
@current_groups = {}
@footers = {}
@count = 0
unless @sections[:groups].empty?
reset_groups_values
else
draw_columns
end
initialize_footer_values
can_print_footer = false
collection.each do |record|
@record = record.is_a?(Hash) ? ReportObject.new(record) : record #Convert the hash on a Object to futurely extend a module
headers_to_print, headers_height = calculate_header
unless headers_to_print.empty?
@count = 0
draw_footer headers_to_print, @sections[:groups] if can_print_footer
if @posY - headers_height < 2*@sections[:body][:settings][:height]
new_page
else
headers_to_print.each { |group| print_section group, @record, true }
draw_columns
end
end
can_print_footer = true
update_footer_values
print_section :body, @record
@count += 1
end
draw_footer(@sections[:body][:settings][:groups].collect {|group| group.to_sym}, @sections[:groups]) if has_groups?
draw_footer [:body], @sections
@pdf
end
protected
#Creates a new page, restarting the vertical position of the pointer.
#Print the whole header for the current groups and the columns of the report.
def new_page
@pdf.start_new_page
set_pos_y
print_section :page unless @sections[:page].nil?
set_pos_y (@sections[:body][:settings][:posY] || 0)
@current_groups.each do |field, value|
print_section field.to_sym, @record, true
end
draw_columns
end
#Generic function to print a section like :body, :page or the group sections.
def print_section(section_name, values = nil, group = false)
section = !group ? @sections[section_name] : @sections[:groups][section_name]
set_pos_y(section[:settings][:posY] || 0) unless section_name.eql?(:body) || section[:settings].nil?
new_page if @posY < 30
if section_name.eql? :body and @count % 2 != 0
@pdf.fill_color "F7F7F7"
width = @options[:page_layout] == :portrait ? 530 : 770
@pdf.fill_rectangle [0, @posY+(section[:settings][:height]/2)], width, section[:settings][:height]
end
section[:fields].each do |field, settings|
symbolize! settings[1] unless settings[1].nil?
set_pos_y settings[1][:posY] unless settings[1].nil? || settings[1][:posY].nil?
settings = [settings[0], @posY, (@defaults.merge (settings[1] || { }))]
settings[2][:style] = settings[2][:style].to_sym
set_options settings[2]
value = set_value values, settings, section_name, field, group
draw_text value, settings
end
set_pos_y (section[:settings][:height]) unless section[:settings].nil? || section[:settings][:height].nil?
end
def set_value(values, settings, section_name, field, group)
if group and !values.class.reflect_on_association(section_name).nil?
resource = values.send(section_name.to_sym)
else
resource = values
end
field.to_s.split(".").each do |part|
resource = resource.send(part) if !resource.class.reflect_on_association(part).nil?
end if settings[2][:value].nil?
field = field.to_s.split(".").last
value = settings[2][:value].nil? ? (resource.respond_to?(field) ? resource.send(field) : "") : settings[2][:value]
unless settings[2][:format].blank?
value = JunoReport::Pdf::Formatters.send(settings[2][:format], value)
end
string_cut = settings[2][:cut].nil? ? value : value[0..settings[2][:cut]]
string_cut
end
#Print a horizontal line with the whole width of the page.
def draw_line(y)
width = @options[:page_layout] == :portrait ? 530 : 770
@pdf.stroke { @pdf.horizontal_line 0, width, :at => y }
end
#Update the pointer vertical position to the specified value or 'zero' if the parameter is nil.
#Obs: Prawn pointer is decrescent, in other words, the left-top corner position is (0, 750). For
#semantic purposes, we set the same corner as (0, 0).
def set_pos_y(posY = nil)
height = @options[:page_layout] == :portrait ? 750 : 520
@posY = height if @posY.nil?
@posY = posY.nil? ? height : @posY - posY
end
#Convert to symbol all hash keys, recursively.
def symbolize! hash
hash.symbolize_keys!
hash.values.select{|v| v.is_a? Hash}.each{|h| symbolize!(h)}
end
#Convert the structure of the rules to facilitate the generating proccess.
def get_sections
symbolize! @rules
raise "[body] section on YAML file is needed to generate the report." if @rules[:body].nil?
@sections = {:page => @rules[:page], :body => @rules[:body], :defaults => @rules[:defaults], :groups => {}}
@sections[:body][:settings][:groups].each { |group| @sections[:groups][group.to_sym] = @rules[group.to_sym] } if has_groups?
end
#@current_groups storages the value for all groups. When a value is changed, the header is printed.
#This function set nil value for every item in @current_groups if the parameter is not passed. Otherwise,
#only the forward groups will be cleaned to avoid conflict problems with others groups.
def reset_groups_values current_group = nil
groups = @sections[:body][:settings][:groups]
groups.each_with_index do |group, idx|
@current_groups[group] = nil if current_group.nil? || groups.index(current_group.to_s) <= idx
end
end
#Calculates the headers which must be printed before print the current record.
#The function also returns the current header height to create a new page if the
#page remaining space is smaller than (header + a record height)
def calculate_header
headers = []
height = 0
@current_groups.each do |field, current_value|
identifier_field = @sections[:groups][field.to_sym][:settings][:identifier_field] || nil
value = (!@record.class.reflect_on_association(field).nil? and !identifier_field.nil?) ? @record.send(field.to_sym).send(identifier_field) : @record.send(field)
if value != current_value
reset_groups_values field
headers << field.to_sym
height += @sections[:groups][field.to_sym][:settings][:height] + @sections[:groups][field.to_sym][:settings][:posY]
@current_groups[field] = value
end
end unless @current_groups.empty?
[headers, height]
end
#Create a structure to calculate the footer values for all groups. Appends the footer body to total values too.
#Call the function that calculates the footer values for all groups and the total body footer, with
#different source for each
def update_footer_values
@sections[:body][:settings][:groups].reverse_each do |group|
calculate_footer_values group, @sections[:groups][group.to_sym][:footer]
end if has_groups?
calculate_footer_values :body, @sections[:body][:footer]
end
#Returns the values to the group passed as parameter. If :behavior setting is used, so a
#function in [lib/pdf/behaviors.rb] calculates the value of current field, else the report
#method is called
def calculate_footer_values group, source
@footers[group.to_sym].each do |field, value|
footer_rule = source[field]
symbolize! footer_rule[1]
unless footer_rule[1][:behavior].nil?
@footers[group.to_sym][field] = JunoReport::Pdf::Behaviors.send footer_rule[1][:behavior].to_sym, value, (@record.respond_to?(field) ? @record.send(field) : nil)
else
if footer_rule[1][:value].blank?
value = !@record.class.reflect_on_association(group.to_sym).nil? ? @record.send(group.to_sym).send(field.to_sym) : @record.send(field)
else
value = footer_rule[1][:value]
end
@footers[group.to_sym][field] = footer_rule[1][:label].to_s + value
end unless @footers[group.to_sym].nil? || footer_rule[1].nil?
end
end
#Print the footers according to the groups and source specified
def draw_footer footers_to_print, source
footers_to_print.reverse_each do |group|
draw_line(@posY + @sections[:body][:settings][:height]/2)
source[group][:footer].each do |field, settings|
settings = [settings[0], @posY, (@defaults.merge (settings[1] || { }).symbolize_keys!)]
settings[2][:style] = settings[2][:style].to_sym
set_options settings[2]
draw_text @footers[group][field], settings
end
draw_line(@posY - @sections[:body][:settings][:height]/4)
set_pos_y @sections[:body][:settings][:height]
reset_footer group
end
end
#Resets the footer to next groups
def reset_footer(group); @footers[group].each { |field, value| @footers[group][field] = nil }; end
#Based on the Key names of the :body section at the rules, the function draw columns with
#baselines on the top and bottom of the header.
def draw_columns
@sections[:body][:fields].each do |field, settings|
settings = [settings[0], @posY, (@defaults.merge (settings[1] || { }).symbolize_keys!)]
settings[2][:style] = settings[2][:style].to_sym
set_options settings[2]
draw_line(@posY + @sections[:body][:settings][:height]/2)
field = settings[2][:column] || field.to_s.split('_').inject('') do |str, part|
str << part.camelize << " "
end
draw_text field, settings
end
draw_line(@posY - @sections[:body][:settings][:height]/2)
set_pos_y @sections[:body][:settings][:height]
end
def has_groups?
!@sections[:body][:settings][:groups].nil?
end
end
|
Plasmarobo/simpleoutput | lib/simpleoutput.rb | SimpleOutput.SimpleOutputPlugin.get_data_as_points | ruby | def get_data_as_points
series_data = {}
@x.each_pair do |(key, x_series)|
#For each series of data
y_series = @y[key]
series_data[key] = []
x_series.each_with_index do |x_line, index|
#For each line
series_data[key] << [] #Create an empty set
y_line = y_series[index]
x_line.each_with_index do |x_point, index|
y_point = y_line[index]
series_data[key].last << [x_point, y_point]
end
end
end
return series_data
end | Internal Helpers | train | https://github.com/Plasmarobo/simpleoutput/blob/bbb572355348239dea35eb364f1bf5fa7bd638fd/lib/simpleoutput.rb#L163-L181 | class SimpleOutputPlugin
def initialize()
@x = {}
@y = {}
@series_names = {}
@data_id = 0
@annotations = {}
@current_name = "NameError"
@series_id = 0
@metadata = {}
end
#Virtual Functions
def options_callback(options)
end
def set_x_callback(data, name, options)
end
def set_y_callback(data, name, options)
end
def append_callback(x,y,name,options)
end
def new_data_callback(name)
end
#CORE functions
def translate_name(name)
if name == nil
name = @current_name
end
return name
end
def advance_series(name=nil)
@series_id += 1
@current_name = name == nil ? "series-#{@series_id}" : name
self.new_data_callback(name)
if !@series_names.has_key?(@current_name)
@series_names[@current_name] = []
end
@annotations[@current_name] = []
@current_name
end
def append_series_name(name=nil, options={})
name = translate_name(name)
if !@series_names.has_key?(name)
@series_names[name] = []
end
if options.has_key?('series')
@series_names[name] << options['series']
else
@series_names[name] << "data-#{@data_id}"
@data_id += 1
end
end
def new_data_check(name=nil)
(!@x.has_key?(name)) || (!@y.has_key?(name))
end
def set_x_data(data, name, options={})
@x[name] = []
@x[name] << data
self.set_x_callback(data, name, options)
end
def set_y_data(data, name, options={})
@y[name] = []
@y[name] << data
self.set_y_callback(data, name, options)
end
def new_data( x=[], y=[],name=nil, options={})
name = self.advance_series(name)
self.set_x_data(x, name, options)
self.set_y_data(y, name, options)
self.append_series_name(name,options)
self.options_callback(options)
end
#Interface Functions ===================================
def append_xy( x=[], y=[],name=nil, options={})
name = translate_name(name)
if !self.new_data_check(name)
@x[name] << x
@y[name] << y
self.append_series_name(name, options)
self.options_callback(options)
self.append_callback(x,y,name,options)
else
self.new_data(x,y,name,options)
end
end
def set_xy(x=[], y=[], name=nil, options={})
self.new_data(x,y,name,options)
end
def append_points(points =[], name=nil, options={})
x = []
y = []
points.each do |point|
x << point[0]
y << point[1]
end
self.append_xy(x,y,name,options)
end
def set_points(points = [], name=nil, options={})
x = []
y = []
points.each do |point|
x << point[0]
y << point[1]
end
self.set_xy(x,y,name, options)
end
def append_hash(hash = {}, name=nil, options={})
name = translate_name(name)
x, y = self.hash_to_xy(hash)
self.append_xy(x,y,name,options)
end
def set_hash(hash ={}, name=nil, options={})
x, y = self.hash_to_xy(hash)
self.set_xy(x,y,name,options)
end
def annotate(annotation, name=nil, options = {})
name = translate_name(name)
@annotations[name] << annotation
self.options_callback(options)
end
def set_options(name=nil, options = {})
self.options_callback(options)
end
#Internal Helpers
def get_data_as_points
series_data = {}
@x.each_pair do |(key, x_series)|
#For each series of data
y_series = @y[key]
series_data[key] = []
x_series.each_with_index do |x_line, index|
#For each line
series_data[key] << [] #Create an empty set
y_line = y_series[index]
x_line.each_with_index do |x_point, index|
y_point = y_line[index]
series_data[key].last << [x_point, y_point]
end
end
end
return series_data
end
def get_data_as_xy
series_data = {}
@x.each_pair do |(key, x_series)|
y_series = @y[key]
series_data[key] = []
x_series.each_with_index do |x_line, index|
y_line = y_series[index]
series_data[key] << [x_line, y_line]
end
end
return series_data
end
def get_series_hashes
data_hash = {}
@x.each_pair do |(key, x_series)|
data_hash[key] = {}
y_series = @y[key]
x_series.each_with_index do |x_data, index|
y_data = y_series[index]
series_key = @series_names[key][index]
data_hash[key][series_key] = {}
x_data.each_with_index do |x_point, index|
y_point = y_data[index]
data_hash[key][series_key][x_point] = y_point
end
end
end
return data_hash
end
#Output
def save()
end
protected
def hash_to_xy(hash)
x = []
y = []
hash.each_with_index do |(key, value), index|
if key.is_a? Numeric
x << key
else
x << index
end
if value.is_a? Numeric
y << value
else
y << 0
end
end
return x, y
end
end
|
xi-livecode/xi | lib/xi/pattern.rb | Xi.Pattern.each | ruby | def each
return enum_for(__method__) unless block_given?
each_event { |v, _, _, i|
break if i > 0
yield v
}
end | Calls the given block once for each value in source
@example
Pattern.new([1, 2, 3]).each.to_a
# => [1, 2, 3]
@return [Enumerator]
@yield [Object] value | train | https://github.com/xi-livecode/xi/blob/215dfb84899b3dd00f11089ae3eab0febf498e95/lib/xi/pattern.rb#L241-L248 | class Pattern
extend Generators
include Transforms
# Array or Proc that produces values or events
attr_reader :source
# Event delta in terms of cycles (default: 1)
attr_reader :delta
# Hash that contains metadata related to pattern usage
attr_reader :metadata
# Size of pattern
attr_reader :size
# Duration of pattern
attr_reader :duration
# Creates a new Pattern given either a +source+ or a +block+ that yields
# events.
#
# If a block is given, +yielder+ parameter must yield +value+ and +start+
# (optional) for each event.
#
# @example Pattern from an Array
# Pattern.new(['a', 'b', 'c']).take(5)
# # => [['a', 0, 1, 0],
# # ['b', 1, 1, 0],
# # ['c', 2, 1, 0],
# # ['a', 3, 1, 1], # starts cycling...
# # ['b', 4, 1, 1]]
#
# @example Pattern from a block that yields only values.
# Pattern.new { |y| y << rand(100) }.take(5)
# # => [[52, 0, 1, 0],
# # [8, 1, 1, 0],
# # [83, 2, 1, 0],
# # [25, 3, 1, 0],
# # [3, 4, 1, 0]]
#
# @param source [Array]
# @param size [Integer] number of events per iteration
# @param delta [Numeric, Array<Numeric>, Pattern<Numeric>] event delta
# @param metadata [Hash]
# @yield [yielder, delta] yielder and event delta
# @yieldreturn [value, start, duration]
# @return [Pattern]
#
def initialize(source=nil, size: nil, delta: nil, **metadata, &block)
if source.nil? && block.nil?
fail ArgumentError, 'must provide source or block'
end
if delta && delta.respond_to?(:size) && !(delta.size < Float::INFINITY)
fail ArgumentError, 'delta cannot be infinite'
end
# If delta is an array of 1 or 0 values, flatten array
delta = delta.first if delta.is_a?(Array) && delta.size <= 1
# Block takes precedence as source, even though +source+ can be used to
# infer attributes
@source = block || source
# Infer attributes from +source+ if it is a pattern
if source.is_a?(Pattern)
@delta = source.delta
@size = source.size
@metadata = source.metadata
else
@delta = 1
@size = (source.respond_to?(:size) ? source.size : nil) ||
Float::INFINITY
@metadata = {}
end
# Flatten source if it is a pattern
@source = @source.source if @source.is_a?(Pattern)
# Override or merge custom attributes if they were specified
@size = size if size
@delta = delta if delta
@metadata.merge!(metadata)
# Flatten delta values to an array, if it is an enumerable or pattern
@delta = @delta.to_a if @delta.respond_to?(:to_a)
# Set duration based on delta values
@duration = delta_values.reduce(:+) || 0
end
# Create a new Pattern given an array of +args+
#
# @see Pattern#initialize
#
# @param args [Array]
# @param kwargs [Hash]
# @return [Pattern]
#
def self.[](*args, **kwargs)
new(args, **kwargs)
end
# Returns a new Pattern with the same +source+, but with +delta+ overriden
# and +metadata+ merged.
#
# @param delta [Array<Numeric>, Pattern<Numeric>, Numeric]
# @param metadata [Hash]
# @return [Pattern]
#
def p(*delta, **metadata)
delta = delta.compact.empty? ? @delta : delta
Pattern.new(@source, delta: delta, size: @size, **@metadata.merge(metadata))
end
# Returns true if pattern is infinite
#
# A Pattern is infinite if it was created from a Proc or another infinite
# pattern, and size was not specified.
#
# @return [Boolean]
# @see #finite?
#
def infinite?
@size == Float::INFINITY
end
# Returns true if pattern is finite
#
# A pattern is finite if it has a finite size.
#
# @return [Boolean]
# @see #infinite?
#
def finite?
!infinite?
end
# Calls the given block once for each event, passing its value, start
# position, duration and iteration as parameters.
#
# +cycle+ can be any number, even if there is no event that starts exactly
# at that moment. It will start from the next event.
#
# If no block is given, an enumerator is returned instead.
#
# Enumeration loops forever, and starts yielding events based on pattern's
# delta and from the +cycle+ position, which is by default 0.
#
# @example block yields value, start, duration and iteration
# Pattern.new([1, 2], delta: 0.25).each_event.take(4)
# # => [[1, 0.0, 0.25, 0],
# # [2, 0.25, 0.25, 0],
# # [1, 0.5, 0.25, 1],
# # [2, 0.75, 0.25, 1]]
#
# @example +cycle+ is used to start iterating from that moment in time
# Pattern.new([:a, :b, :c], delta: 1/2).each_event(42).take(4)
# # => [[:a, (42/1), (1/2), 28],
# # [:b, (85/2), (1/2), 28],
# # [:c, (43/1), (1/2), 28],
# # [:a, (87/2), (1/2), 29]]
#
# @example +cycle+ can also be a fractional number
# Pattern.new([:a, :b, :c]).each_event(0.97).take(3)
# # => [[:b, 1, 1, 0],
# # [:c, 2, 1, 0],
# # [:a, 3, 1, 1]]
#
# @param cycle [Numeric]
# @yield [v, s, d, i] value, start, duration and iteration
# @return [Enumerator]
#
def each_event(cycle=0)
return enum_for(__method__, cycle) unless block_given?
EventEnumerator.new(self, cycle).each { |v, s, d, i| yield v, s, d, i }
end
# Calls the given block passing the delta of each value in pattern
#
# This method is used internally by {#each_event} to calculate when each
# event in pattern occurs in time. If no block is given, an Enumerator is
# returned instead.
#
# @param index [Numeric]
# @yield [d] duration
# @return [Enumerator]
#
def each_delta(index=0)
return enum_for(__method__, index) unless block_given?
delta = @delta
if delta.is_a?(Array)
size = delta.size
return if size == 0
start = index.floor
i = start % size
loop do
yield delta[i]
i = (i + 1) % size
start += 1
end
elsif delta.is_a?(Pattern)
delta.each_event(index) { |v, _| yield v }
else
loop { yield delta }
end
end
# Calls the given block once for each value in source
#
# @example
# Pattern.new([1, 2, 3]).each.to_a
# # => [1, 2, 3]
#
# @return [Enumerator]
# @yield [Object] value
#
# Same as {#each} but in reverse order
#
# @example
# Pattern.new([1, 2, 3]).reverse_each.to_a
# # => [3, 2, 1]
#
# @return [Enumerator]
# @yield [Object] value
#
def reverse_each
return enum_for(__method__) unless block_given?
each.to_a.reverse.each { |v| yield v }
end
# Returns an array of values from a single iteration of pattern
#
# @return [Array] values
# @see #to_events
#
def to_a
fail StandardError, 'pattern is infinite' if infinite?
each.to_a
end
# Returns an array of events (i.e. a tuple [value, start, duration,
# iteration]) from the first iteration.
#
# Only applies to finite patterns.
#
# @return [Array] events
# @see #to_a
#
def to_events
fail StandardError, 'pattern is infinite' if infinite?
each_event.take(size)
end
# Returns a new Pattern with the results of running +block+ once for every
# value in +self+
#
# If no block is given, an Enumerator is returned.
#
# @yield [v, s, d, i] value, start, duration and iteration
# @yieldreturn [v, s, d] value, start (optional) and duration (optional)
# @return [Pattern]
#
def map
return enum_for(__method__) unless block_given?
Pattern.new(self) do |y, d|
each_event do |v, s, ed, i|
y << yield(v, s, ed, i)
end
end
end
alias_method :collect, :map
# Returns a Pattern containing all events of +self+ for which +block+ is
# true.
#
# If no block is given, an Enumerator is returned.
#
# @see Pattern#reject
#
# @yield [v, s, d, i] value, start, duration and iteration
# @yieldreturn [Boolean] whether value is selected
# @return [Pattern]
#
def select
return enum_for(__method__) unless block_given?
Pattern.new(self) do |y, d|
each_event do |v, s, ed, i|
y << v if yield(v, s, ed, i)
end
end
end
alias_method :find_all, :select
# Returns a Pattern containing all events of +self+ for which +block+
# is false.
#
# If no block is given, an Enumerator is returned.
#
# @see Pattern#select
#
# @yield [v, s, d, i] value, start, duration and iteration
# @yieldreturn [Boolean] whether event is rejected
# @return [Pattern]
#
def reject
return enum_for(__method__) unless block_given?
select { |v, s, d, i| !yield(v, s, d, i) }
end
# Returns the first +n+ events from the pattern, starting from +cycle+
#
# @param n [Integer]
# @param cycle [Numeric]
# @return [Array] values
#
def take(n, cycle=0)
each_event(cycle).take(n)
end
# Returns the first +n+ values from +self+, starting from +cycle+.
#
# Only values are returned, start position and duration are ignored.
#
# @see #take
#
def take_values(*args)
take(*args).map(&:first)
end
# @see #take_values
def peek(n=10, *args)
take_values(n, *args)
end
# @see #take
def peek_events(n=10, cycle=0)
take(n, cycle)
end
# Returns the first element, or the first +n+ elements, of the pattern.
#
# If the pattern is empty, the first form returns nil, and the second form
# returns an empty array.
#
# @see #take
#
# @param n [Integer]
# @param args same arguments as {#take}
# @return [Object, Array]
#
def first(n=nil, *args)
res = take(n || 1, *args)
n.nil? ? res.first : res
end
# Returns a string containing a human-readable representation
#
# When source is not a Proc, this string can be evaluated to construct the
# same instance.
#
# @return [String]
#
def inspect
ss = if @source.respond_to?(:join)
@source.map(&:inspect).join(', ')
elsif @source.is_a?(Proc)
"?proc"
else
@source.inspect
end
ms = @metadata.reject { |_, v| v.nil? }
ms.merge!(delta: delta) if delta != 1
ms = ms.map { |k, v| "#{k}: #{v.inspect}" }.join(', ')
"P[#{ss}#{", #{ms}" unless ms.empty?}]"
end
alias_method :to_s, :inspect
# Returns pattern interation size or length
#
# This is usually calculated from the least-common multiple between the sum
# of delta values and the size of the pattern. If pattern is infinite,
# pattern size is assumed to be 1, so iteration size depends on delta
# values.
#
# @return [Integer]
#
def iteration_size
finite? ? delta_size.lcm(@size) : delta_size
end
# @private
def ==(o)
self.class == o.class &&
delta == o.delta &&
size == o.size &&
duration == o.duration &&
metadata == o.metadata &&
(finite? && to_a == o.to_a)
end
private
class EventEnumerator
def initialize(pattern, cycle)
@cycle = cycle
@source = pattern.source
@size = pattern.size
@iter_size = pattern.iteration_size
@iter = pattern.duration > 0 ? (cycle / pattern.duration).floor : 0
@delta_enum = pattern.each_delta(@iter * @iter_size)
@start = @iter * pattern.duration
@prev_ev = nil
@i = 0
end
def each(&block)
return enum_for(__method__, @cycle) unless block_given?
return if @size == 0
if @source.respond_to?(:call)
loop do
yielder = ::Enumerator::Yielder.new do |value|
each_block(value, &block)
end
@source.call(yielder, @delta_enum.peek)
end
elsif @source.respond_to?(:each_event)
@source.each_event(@start) do |value, _|
each_block(value, &block)
end
elsif @source.respond_to?(:[])
loop do
each_block(@source[@i % @size], &block)
end
else
fail StandardError, 'invalid source'
end
end
private
def each_block(value)
delta = @delta_enum.peek
if @start >= @cycle
if @prev_ev
yield @prev_ev if @start > @cycle
@prev_ev = nil
end
yield value, @start, delta, @iter
else
@prev_ev = [value, @start, delta, @iter]
end
@iter += 1 if @i + 1 == @iter_size
@i = (@i + 1) % @iter_size
@start += delta
@delta_enum.next
end
end
def delta_values
each_delta.take(iteration_size)
end
def delta_size
@delta.respond_to?(:each) && @delta.respond_to?(:size) ? @delta.size : 1
end
end
|
forward3d/rbhive | lib/rbhive/t_c_l_i_connection.rb | RBHive.TCLIConnection.async_execute | ruby | def async_execute(query)
@logger.info("Executing query asynchronously: #{query}")
exec_result = @client.ExecuteStatement(
Hive2::Thrift::TExecuteStatementReq.new(
sessionHandle: @session.sessionHandle,
statement: query,
runAsync: true
)
)
raise_error_if_failed!(exec_result)
op_handle = exec_result.operationHandle
# Return handles to get hold of this query / session again
{
session: @session.sessionHandle,
guid: op_handle.operationId.guid,
secret: op_handle.operationId.secret
}
end | Async execute | train | https://github.com/forward3d/rbhive/blob/a630b57332f2face03501da3ecad2905c78056fa/lib/rbhive/t_c_l_i_connection.rb#L193-L211 | class TCLIConnection
attr_reader :client
def initialize(server, port = 10_000, options = {}, logger = StdOutLogger.new)
options ||= {} # backwards compatibility
raise "'options' parameter must be a hash" unless options.is_a?(Hash)
if options[:transport] == :sasl and options[:sasl_params].nil?
raise ":transport is set to :sasl, but no :sasl_params option was supplied"
end
# Defaults to buffered transport, Hive 0.10, 1800 second timeout
options[:transport] ||= :buffered
options[:hive_version] ||= 10
options[:timeout] ||= 1800
@options = options
# Look up the appropriate Thrift protocol version for the supplied Hive version
@thrift_protocol_version = thrift_hive_protocol(options[:hive_version])
@logger = logger
@transport = thrift_transport(server, port)
@protocol = Thrift::BinaryProtocol.new(@transport)
@client = Hive2::Thrift::TCLIService::Client.new(@protocol)
@session = nil
@logger.info("Connecting to HiveServer2 #{server} on port #{port}")
end
def thrift_hive_protocol(version)
HIVE_THRIFT_MAPPING[version] || raise("Invalid Hive version")
end
def thrift_transport(server, port)
@logger.info("Initializing transport #{@options[:transport]}")
case @options[:transport]
when :buffered
return Thrift::BufferedTransport.new(thrift_socket(server, port, @options[:timeout]))
when :sasl
return Thrift::SaslClientTransport.new(thrift_socket(server, port, @options[:timeout]),
parse_sasl_params(@options[:sasl_params]))
when :http
return Thrift::HTTPClientTransport.new("http://#{server}:#{port}/cliservice")
else
raise "Unrecognised transport type '#{transport}'"
end
end
def thrift_socket(server, port, timeout)
socket = Thrift::Socket.new(server, port)
socket.timeout = timeout
socket
end
# Processes SASL connection params and returns a hash with symbol keys or a nil
def parse_sasl_params(sasl_params)
# Symbilize keys in a hash
if sasl_params.kind_of?(Hash)
return sasl_params.inject({}) do |memo,(k,v)|
memo[k.to_sym] = v;
memo
end
end
return nil
end
def open
@transport.open
end
def close
@transport.close
end
def open_session
@session = @client.OpenSession(prepare_open_session(@thrift_protocol_version))
end
def close_session
@client.CloseSession prepare_close_session
@session = nil
end
def session
@session && @session.sessionHandle
end
def client
@client
end
def execute(query)
@logger.info("Executing Hive Query: #{query}")
req = prepare_execute_statement(query)
exec_result = client.ExecuteStatement(req)
raise_error_if_failed!(exec_result)
exec_result
end
def priority=(priority)
set("mapred.job.priority", priority)
end
def queue=(queue)
set("mapred.job.queue.name", queue)
end
def set(name,value)
@logger.info("Setting #{name}=#{value}")
self.execute("SET #{name}=#{value}")
end
# Async execute
# Is the query complete?
def async_is_complete?(handles)
async_state(handles) == :finished
end
# Is the query actually running?
def async_is_running?(handles)
async_state(handles) == :running
end
# Has the query failed?
def async_is_failed?(handles)
async_state(handles) == :error
end
def async_is_cancelled?(handles)
async_state(handles) == :cancelled
end
def async_cancel(handles)
@client.CancelOperation(prepare_cancel_request(handles))
end
# Map states to symbols
def async_state(handles)
response = @client.GetOperationStatus(
Hive2::Thrift::TGetOperationStatusReq.new(operationHandle: prepare_operation_handle(handles))
)
case response.operationState
when Hive2::Thrift::TOperationState::FINISHED_STATE
return :finished
when Hive2::Thrift::TOperationState::INITIALIZED_STATE
return :initialized
when Hive2::Thrift::TOperationState::RUNNING_STATE
return :running
when Hive2::Thrift::TOperationState::CANCELED_STATE
return :cancelled
when Hive2::Thrift::TOperationState::CLOSED_STATE
return :closed
when Hive2::Thrift::TOperationState::ERROR_STATE
return :error
when Hive2::Thrift::TOperationState::UKNOWN_STATE
return :unknown
when Hive2::Thrift::TOperationState::PENDING_STATE
return :pending
when nil
raise "No operation state found for handles - has the session been closed?"
else
return :state_not_in_protocol
end
end
# Async fetch results from an async execute
def async_fetch(handles, max_rows = 100)
# Can't get data from an unfinished query
unless async_is_complete?(handles)
raise "Can't perform fetch on a query in state: #{async_state(handles)}"
end
# Fetch and
fetch_rows(prepare_operation_handle(handles), :first, max_rows)
end
# Performs a query on the server, fetches the results in batches of *batch_size* rows
# and yields the result batches to a given block as arrays of rows.
def async_fetch_in_batch(handles, batch_size = 1000, &block)
raise "No block given for the batch fetch request!" unless block_given?
# Can't get data from an unfinished query
unless async_is_complete?(handles)
raise "Can't perform fetch on a query in state: #{async_state(handles)}"
end
# Now let's iterate over the results
loop do
rows = fetch_rows(prepare_operation_handle(handles), :next, batch_size)
break if rows.empty?
yield rows
end
end
def async_close_session(handles)
validate_handles!(handles)
@client.CloseSession(Hive2::Thrift::TCloseSessionReq.new( sessionHandle: handles[:session] ))
end
# Pull rows from the query result
def fetch_rows(op_handle, orientation = :first, max_rows = 1000)
fetch_req = prepare_fetch_results(op_handle, orientation, max_rows)
fetch_results = @client.FetchResults(fetch_req)
raise_error_if_failed!(fetch_results)
rows = fetch_results.results.rows
TCLIResultSet.new(rows, TCLISchemaDefinition.new(get_schema_for(op_handle), rows.first))
end
# Performs a explain on the supplied query on the server, returns it as a ExplainResult.
# (Only works on 0.12 if you have this patch - https://issues.apache.org/jira/browse/HIVE-5492)
def explain(query)
rows = []
fetch_in_batch("EXPLAIN " + query) do |batch|
rows << batch.map { |b| b[:Explain] }
end
ExplainResult.new(rows.flatten)
end
# Performs a query on the server, fetches up to *max_rows* rows and returns them as an array.
def fetch(query, max_rows = 100)
# Execute the query and check the result
exec_result = execute(query)
raise_error_if_failed!(exec_result)
# Get search operation handle to fetch the results
op_handle = exec_result.operationHandle
# Fetch the rows
fetch_rows(op_handle, :first, max_rows)
end
# Performs a query on the server, fetches the results in batches of *batch_size* rows
# and yields the result batches to a given block as arrays of rows.
def fetch_in_batch(query, batch_size = 1000, &block)
raise "No block given for the batch fetch request!" unless block_given?
# Execute the query and check the result
exec_result = execute(query)
raise_error_if_failed!(exec_result)
# Get search operation handle to fetch the results
op_handle = exec_result.operationHandle
# Prepare fetch results request
fetch_req = prepare_fetch_results(op_handle, :next, batch_size)
# Now let's iterate over the results
loop do
rows = fetch_rows(op_handle, :next, batch_size)
break if rows.empty?
yield rows
end
end
def create_table(schema)
execute(schema.create_table_statement)
end
def drop_table(name)
name = name.name if name.is_a?(TableSchema)
execute("DROP TABLE `#{name}`")
end
def replace_columns(schema)
execute(schema.replace_columns_statement)
end
def add_columns(schema)
execute(schema.add_columns_statement)
end
def method_missing(meth, *args)
client.send(meth, *args)
end
private
def prepare_open_session(client_protocol)
req = ::Hive2::Thrift::TOpenSessionReq.new( @options[:sasl_params].nil? ? [] : @options[:sasl_params] )
req.client_protocol = client_protocol
req
end
def prepare_close_session
::Hive2::Thrift::TCloseSessionReq.new( sessionHandle: self.session )
end
def prepare_execute_statement(query)
::Hive2::Thrift::TExecuteStatementReq.new( sessionHandle: self.session, statement: query.to_s, confOverlay: {} )
end
def prepare_fetch_results(handle, orientation=:first, rows=100)
orientation_value = "FETCH_#{orientation.to_s.upcase}"
valid_orientations = ::Hive2::Thrift::TFetchOrientation::VALUE_MAP.values
unless valid_orientations.include?(orientation_value)
raise ArgumentError, "Invalid orientation: #{orientation.inspect}"
end
orientation_const = eval("::Hive2::Thrift::TFetchOrientation::#{orientation_value}")
::Hive2::Thrift::TFetchResultsReq.new(
operationHandle: handle,
orientation: orientation_const,
maxRows: rows
)
end
def prepare_operation_handle(handles)
validate_handles!(handles)
Hive2::Thrift::TOperationHandle.new(
operationId: Hive2::Thrift::THandleIdentifier.new(guid: handles[:guid], secret: handles[:secret]),
operationType: Hive2::Thrift::TOperationType::EXECUTE_STATEMENT,
hasResultSet: false
)
end
def prepare_cancel_request(handles)
Hive2::Thrift::TCancelOperationReq.new(
operationHandle: prepare_operation_handle(handles)
)
end
def validate_handles!(handles)
unless handles.has_key?(:guid) and handles.has_key?(:secret) and handles.has_key?(:session)
raise "Invalid handles hash: #{handles.inspect}"
end
end
def get_schema_for(handle)
req = ::Hive2::Thrift::TGetResultSetMetadataReq.new( operationHandle: handle )
metadata = client.GetResultSetMetadata( req )
metadata.schema
end
# Raises an exception if given operation result is a failure
def raise_error_if_failed!(result)
return if result.status.statusCode == 0
error_message = result.status.errorMessage || 'Execution failed!'
raise RBHive::TCLIConnectionError.new(error_message)
end
end
|
state-machines/state_machines | lib/state_machines/machine.rb | StateMachines.Machine.transition | ruby | def transition(options)
raise ArgumentError, 'Must specify :on event' unless options[:on]
branches = []
options = options.dup
event(*Array(options.delete(:on))) { branches << transition(options) }
branches.length == 1 ? branches.first : branches
end | Creates a new transition that determines what to change the current state
to when an event fires.
== Defining transitions
The options for a new transition uses the Hash syntax to map beginning
states to ending states. For example,
transition :parked => :idling, :idling => :first_gear, :on => :ignite
In this case, when the +ignite+ event is fired, this transition will cause
the state to be +idling+ if it's current state is +parked+ or +first_gear+
if it's current state is +idling+.
To help define these implicit transitions, a set of helpers are available
for slightly more complex matching:
* <tt>all</tt> - Matches every state in the machine
* <tt>all - [:parked, :idling, ...]</tt> - Matches every state except those specified
* <tt>any</tt> - An alias for +all+ (matches every state in the machine)
* <tt>same</tt> - Matches the same state being transitioned from
See StateMachines::MatcherHelpers for more information.
Examples:
transition all => nil, :on => :ignite # Transitions to nil regardless of the current state
transition all => :idling, :on => :ignite # Transitions to :idling regardless of the current state
transition all - [:idling, :first_gear] => :idling, :on => :ignite # Transitions every state but :idling and :first_gear to :idling
transition nil => :idling, :on => :ignite # Transitions to :idling from the nil state
transition :parked => :idling, :on => :ignite # Transitions to :idling if :parked
transition [:parked, :stalled] => :idling, :on => :ignite # Transitions to :idling if :parked or :stalled
transition :parked => same, :on => :park # Loops :parked back to :parked
transition [:parked, :stalled] => same, :on => [:park, :stall] # Loops either :parked or :stalled back to the same state on the park and stall events
transition all - :parked => same, :on => :noop # Loops every state but :parked back to the same state
# Transitions to :idling if :parked, :first_gear if :idling, or :second_gear if :first_gear
transition :parked => :idling, :idling => :first_gear, :first_gear => :second_gear, :on => :shift_up
== Verbose transitions
Transitions can also be defined use an explicit set of configuration
options:
* <tt>:from</tt> - A state or array of states that can be transitioned from.
If not specified, then the transition can occur for *any* state.
* <tt>:to</tt> - The state that's being transitioned to. If not specified,
then the transition will simply loop back (i.e. the state will not change).
* <tt>:except_from</tt> - A state or array of states that *cannot* be
transitioned from.
These options must be used when defining transitions within the context
of a state.
Examples:
transition :to => nil, :on => :park
transition :to => :idling, :on => :ignite
transition :except_from => [:idling, :first_gear], :to => :idling, :on => :ignite
transition :from => nil, :to => :idling, :on => :ignite
transition :from => [:parked, :stalled], :to => :idling, :on => :ignite
== Conditions
In addition to the state requirements for each transition, a condition
can also be defined to help determine whether that transition is
available. These options will work on both the normal and verbose syntax.
Configuration options:
* <tt>:if</tt> - A method, proc or string to call to determine if the
transition should occur (e.g. :if => :moving?, or :if => lambda {|vehicle| vehicle.speed > 60}).
The condition should return or evaluate to true or false.
* <tt>:unless</tt> - A method, proc or string to call to determine if the
transition should not occur (e.g. :unless => :stopped?, or :unless => lambda {|vehicle| vehicle.speed <= 60}).
The condition should return or evaluate to true or false.
Examples:
transition :parked => :idling, :on => :ignite, :if => :moving?
transition :parked => :idling, :on => :ignite, :unless => :stopped?
transition :idling => :first_gear, :first_gear => :second_gear, :on => :shift_up, :if => :seatbelt_on?
transition :from => :parked, :to => :idling, :on => ignite, :if => :moving?
transition :from => :parked, :to => :idling, :on => ignite, :unless => :stopped?
== Order of operations
Transitions are evaluated in the order in which they're defined. As a
result, if more than one transition applies to a given object, then the
first transition that matches will be performed. | train | https://github.com/state-machines/state_machines/blob/10b03af5fc9245bcb09bbd9c40c58ffba9a85422/lib/state_machines/machine.rb#L1426-L1434 | class Machine
include EvalHelpers
include MatcherHelpers
class << self
# Attempts to find or create a state machine for the given class. For
# example,
#
# StateMachines::Machine.find_or_create(Vehicle)
# StateMachines::Machine.find_or_create(Vehicle, :initial => :parked)
# StateMachines::Machine.find_or_create(Vehicle, :status)
# StateMachines::Machine.find_or_create(Vehicle, :status, :initial => :parked)
#
# If a machine of the given name already exists in one of the class's
# superclasses, then a copy of that machine will be created and stored
# in the new owner class (the original will remain unchanged).
def find_or_create(owner_class, *args, &block)
options = args.last.is_a?(Hash) ? args.pop : {}
name = args.first || :state
# Find an existing machine
machine = owner_class.respond_to?(:state_machines) &&
(args.first && owner_class.state_machines[name] || !args.first &&
owner_class.state_machines.values.first) || nil
if machine
# Only create a new copy if changes are being made to the machine in
# a subclass
if machine.owner_class != owner_class && (options.any? || block_given?)
machine = machine.clone
machine.initial_state = options[:initial] if options.include?(:initial)
machine.owner_class = owner_class
end
# Evaluate DSL
machine.instance_eval(&block) if block_given?
else
# No existing machine: create a new one
machine = new(owner_class, name, options, &block)
end
machine
end
def draw(*)
fail NotImplementedError
end
# Default messages to use for validation errors in ORM integrations
attr_accessor :default_messages
attr_accessor :ignore_method_conflicts
end
@default_messages = {
:invalid => 'is invalid',
:invalid_event => 'cannot transition when %s',
:invalid_transition => 'cannot transition via "%1$s"'
}
# Whether to ignore any conflicts that are detected for helper methods that
# get generated for a machine's owner class. Default is false.
@ignore_method_conflicts = false
# The class that the machine is defined in
attr_reader :owner_class
# The name of the machine, used for scoping methods generated for the
# machine as a whole (not states or events)
attr_reader :name
# The events that trigger transitions. These are sorted, by default, in
# the order in which they were defined.
attr_reader :events
# A list of all of the states known to this state machine. This will pull
# states from the following sources:
# * Initial state
# * State behaviors
# * Event transitions (:to, :from, and :except_from options)
# * Transition callbacks (:to, :from, :except_to, and :except_from options)
# * Unreferenced states (using +other_states+ helper)
#
# These are sorted, by default, in the order in which they were referenced.
attr_reader :states
# The callbacks to invoke before/after a transition is performed
#
# Maps :before => callbacks and :after => callbacks
attr_reader :callbacks
# The action to invoke when an object transitions
attr_reader :action
# An identifier that forces all methods (including state predicates and
# event methods) to be generated with the value prefixed or suffixed,
# depending on the context.
attr_reader :namespace
# Whether the machine will use transactions when firing events
attr_reader :use_transactions
# Creates a new state machine for the given attribute
def initialize(owner_class, *args, &block)
options = args.last.is_a?(Hash) ? args.pop : {}
options.assert_valid_keys(:attribute, :initial, :initialize, :action, :plural, :namespace, :integration, :messages, :use_transactions)
# Find an integration that matches this machine's owner class
if options.include?(:integration)
@integration = options[:integration] && StateMachines::Integrations.find_by_name(options[:integration])
else
@integration = StateMachines::Integrations.match(owner_class)
end
if @integration
extend @integration
options = (@integration.defaults || {}).merge(options)
end
# Add machine-wide defaults
options = {:use_transactions => true, :initialize => true}.merge(options)
# Set machine configuration
@name = args.first || :state
@attribute = options[:attribute] || @name
@events = EventCollection.new(self)
@states = StateCollection.new(self)
@callbacks = {:before => [], :after => [], :failure => []}
@namespace = options[:namespace]
@messages = options[:messages] || {}
@action = options[:action]
@use_transactions = options[:use_transactions]
@initialize_state = options[:initialize]
@action_hook_defined = false
self.owner_class = owner_class
# Merge with sibling machine configurations
add_sibling_machine_configs
# Define class integration
define_helpers
define_scopes(options[:plural])
after_initialize
# Evaluate DSL
instance_eval(&block) if block_given?
self.initial_state = options[:initial] unless sibling_machines.any?
end
# Creates a copy of this machine in addition to copies of each associated
# event/states/callback, so that the modifications to those collections do
# not affect the original machine.
def initialize_copy(orig) #:nodoc:
super
@events = @events.dup
@events.machine = self
@states = @states.dup
@states.machine = self
@callbacks = {:before => @callbacks[:before].dup, :after => @callbacks[:after].dup, :failure => @callbacks[:failure].dup}
end
# Sets the class which is the owner of this state machine. Any methods
# generated by states, events, or other parts of the machine will be defined
# on the given owner class.
def owner_class=(klass)
@owner_class = klass
# Create modules for extending the class with state/event-specific methods
@helper_modules = helper_modules = {:instance => HelperModule.new(self, :instance), :class => HelperModule.new(self, :class)}
owner_class.class_eval do
extend helper_modules[:class]
include helper_modules[:instance]
end
# Add class-/instance-level methods to the owner class for state initialization
unless owner_class < StateMachines::InstanceMethods
owner_class.class_eval do
extend StateMachines::ClassMethods
include StateMachines::InstanceMethods
end
define_state_initializer if @initialize_state
end
# Record this machine as matched to the name in the current owner class.
# This will override any machines mapped to the same name in any superclasses.
owner_class.state_machines[name] = self
end
# Sets the initial state of the machine. This can be either the static name
# of a state or a lambda block which determines the initial state at
# creation time.
def initial_state=(new_initial_state)
@initial_state = new_initial_state
add_states([@initial_state]) unless dynamic_initial_state?
# Update all states to reflect the new initial state
states.each { |state| state.initial = (state.name == @initial_state) }
# Output a warning if there are conflicting initial states for the machine's
# attribute
initial_state = states.detect { |state| state.initial }
if !owner_class_attribute_default.nil? && (dynamic_initial_state? || !owner_class_attribute_default_matches?(initial_state))
warn(
"Both #{owner_class.name} and its #{name.inspect} machine have defined "\
"a different default for \"#{attribute}\". Use only one or the other for "\
"defining defaults to avoid unexpected behaviors."
)
end
end
# Gets the initial state of the machine for the given object. If a dynamic
# initial state was configured for this machine, then the object will be
# passed into the lambda block to help determine the actual state.
#
# == Examples
#
# With a static initial state:
#
# class Vehicle
# state_machine :initial => :parked do
# ...
# end
# end
#
# vehicle = Vehicle.new
# Vehicle.state_machine.initial_state(vehicle) # => #<StateMachines::State name=:parked value="parked" initial=true>
#
# With a dynamic initial state:
#
# class Vehicle
# attr_accessor :force_idle
#
# state_machine :initial => lambda {|vehicle| vehicle.force_idle ? :idling : :parked} do
# ...
# end
# end
#
# vehicle = Vehicle.new
#
# vehicle.force_idle = true
# Vehicle.state_machine.initial_state(vehicle) # => #<StateMachines::State name=:idling value="idling" initial=false>
#
# vehicle.force_idle = false
# Vehicle.state_machine.initial_state(vehicle) # => #<StateMachines::State name=:parked value="parked" initial=false>
def initial_state(object)
states.fetch(dynamic_initial_state? ? evaluate_method(object, @initial_state) : @initial_state) if instance_variable_defined?('@initial_state')
end
# Whether a dynamic initial state is being used in the machine
def dynamic_initial_state?
instance_variable_defined?('@initial_state') && @initial_state.is_a?(Proc)
end
# Initializes the state on the given object. Initial values are only set if
# the machine's attribute hasn't been previously initialized.
#
# Configuration options:
# * <tt>:force</tt> - Whether to initialize the state regardless of its
# current value
# * <tt>:to</tt> - A hash to set the initial value in instead of writing
# directly to the object
def initialize_state(object, options = {})
state = initial_state(object)
if state && (options[:force] || initialize_state?(object))
value = state.value
if hash = options[:to]
hash[attribute.to_s] = value
else
write(object, :state, value)
end
end
end
# Gets the actual name of the attribute on the machine's owner class that
# stores data with the given name.
def attribute(name = :state)
name == :state ? @attribute : :"#{self.name}_#{name}"
end
# Defines a new helper method in an instance or class scope with the given
# name. If the method is already defined in the scope, then this will not
# override it.
#
# If passing in a block, there are two side effects to be aware of
# 1. The method cannot be chained, meaning that the block cannot call +super+
# 2. If the method is already defined in an ancestor, then it will not get
# overridden and a warning will be output.
#
# Example:
#
# # Instance helper
# machine.define_helper(:instance, :state_name) do |machine, object|
# machine.states.match(object).name
# end
#
# # Class helper
# machine.define_helper(:class, :state_machine_name) do |machine, klass|
# "State"
# end
#
# You can also define helpers using string evaluation like so:
#
# # Instance helper
# machine.define_helper :instance, <<-end_eval, __FILE__, __LINE__ + 1
# def state_name
# self.class.state_machine(:state).states.match(self).name
# end
# end_eval
#
# # Class helper
# machine.define_helper :class, <<-end_eval, __FILE__, __LINE__ + 1
# def state_machine_name
# "State"
# end
# end_eval
def define_helper(scope, method, *args, &block)
helper_module = @helper_modules.fetch(scope)
if block_given?
if !self.class.ignore_method_conflicts && conflicting_ancestor = owner_class_ancestor_has_method?(scope, method)
ancestor_name = conflicting_ancestor.name && !conflicting_ancestor.name.empty? ? conflicting_ancestor.name : conflicting_ancestor.to_s
warn "#{scope == :class ? 'Class' : 'Instance'} method \"#{method}\" is already defined in #{ancestor_name}, use generic helper instead or set StateMachines::Machine.ignore_method_conflicts = true."
else
name = self.name
helper_module.class_eval do
define_method(method) do |*block_args|
block.call((scope == :instance ? self.class : self).state_machine(name), self, *block_args)
end
end
end
else
helper_module.class_eval(method, *args)
end
end
# Customizes the definition of one or more states in the machine.
#
# Configuration options:
# * <tt>:value</tt> - The actual value to store when an object transitions
# to the state. Default is the name (stringified).
# * <tt>:cache</tt> - If a dynamic value (via a lambda block) is being used,
# then setting this to true will cache the evaluated result
# * <tt>:if</tt> - Determines whether an object's value matches the state
# (e.g. :value => lambda {Time.now}, :if => lambda {|state| !state.nil?}).
# By default, the configured value is matched.
# * <tt>:human_name</tt> - The human-readable version of this state's name.
# By default, this is either defined by the integration or stringifies the
# name and converts underscores to spaces.
#
# == Customizing the stored value
#
# Whenever a state is automatically discovered in the state machine, its
# default value is assumed to be the stringified version of the name. For
# example,
#
# class Vehicle
# state_machine :initial => :parked do
# event :ignite do
# transition :parked => :idling
# end
# end
# end
#
# In the above state machine, there are two states automatically discovered:
# :parked and :idling. These states, by default, will store their stringified
# equivalents when an object moves into that state (e.g. "parked" / "idling").
#
# For legacy systems or when tying state machines into existing frameworks,
# it's oftentimes necessary to need to store a different value for a state
# than the default. In order to continue taking advantage of an expressive
# state machine and helper methods, every defined state can be re-configured
# with a custom stored value. For example,
#
# class Vehicle
# state_machine :initial => :parked do
# event :ignite do
# transition :parked => :idling
# end
#
# state :idling, :value => 'IDLING'
# state :parked, :value => 'PARKED
# end
# end
#
# This is also useful if being used in association with a database and,
# instead of storing the state name in a column, you want to store the
# state's foreign key:
#
# class VehicleState < ActiveRecord::Base
# end
#
# class Vehicle < ActiveRecord::Base
# state_machine :attribute => :state_id, :initial => :parked do
# event :ignite do
# transition :parked => :idling
# end
#
# states.each do |state|
# self.state(state.name, :value => lambda { VehicleState.find_by_name(state.name.to_s).id }, :cache => true)
# end
# end
# end
#
# In the above example, each known state is configured to store it's
# associated database id in the +state_id+ attribute. Also, notice that a
# lambda block is used to define the state's value. This is required in
# situations (like testing) where the model is loaded without any existing
# data (i.e. no VehicleState records available).
#
# One caveat to the above example is to keep performance in mind. To avoid
# constant db hits for looking up the VehicleState ids, the value is cached
# by specifying the <tt>:cache</tt> option. Alternatively, a custom
# caching strategy can be used like so:
#
# class VehicleState < ActiveRecord::Base
# cattr_accessor :cache_store
# self.cache_store = ActiveSupport::Cache::MemoryStore.new
#
# def self.find_by_name(name)
# cache_store.fetch(name) { find(:first, :conditions => {:name => name}) }
# end
# end
#
# === Dynamic values
#
# In addition to customizing states with other value types, lambda blocks
# can also be specified to allow for a state's value to be determined
# dynamically at runtime. For example,
#
# class Vehicle
# state_machine :purchased_at, :initial => :available do
# event :purchase do
# transition all => :purchased
# end
#
# event :restock do
# transition all => :available
# end
#
# state :available, :value => nil
# state :purchased, :if => lambda {|value| !value.nil?}, :value => lambda {Time.now}
# end
# end
#
# In the above definition, the <tt>:purchased</tt> state is customized with
# both a dynamic value *and* a value matcher.
#
# When an object transitions to the purchased state, the value's lambda
# block will be called. This will get the current time and store it in the
# object's +purchased_at+ attribute.
#
# *Note* that the custom matcher is very important here. Since there's no
# way for the state machine to figure out an object's state when it's set to
# a runtime value, it must be explicitly defined. If the <tt>:if</tt> option
# were not configured for the state, then an ArgumentError exception would
# be raised at runtime, indicating that the state machine could not figure
# out what the current state of the object was.
#
# == Behaviors
#
# Behaviors define a series of methods to mixin with objects when the current
# state matches the given one(s). This allows instance methods to behave
# a specific way depending on what the value of the object's state is.
#
# For example,
#
# class Vehicle
# attr_accessor :driver
# attr_accessor :passenger
#
# state_machine :initial => :parked do
# event :ignite do
# transition :parked => :idling
# end
#
# state :parked do
# def speed
# 0
# end
#
# def rotate_driver
# driver = self.driver
# self.driver = passenger
# self.passenger = driver
# true
# end
# end
#
# state :idling, :first_gear do
# def speed
# 20
# end
#
# def rotate_driver
# self.state = 'parked'
# rotate_driver
# end
# end
#
# other_states :backing_up
# end
# end
#
# In the above example, there are two dynamic behaviors defined for the
# class:
# * +speed+
# * +rotate_driver+
#
# Each of these behaviors are instance methods on the Vehicle class. However,
# which method actually gets invoked is based on the current state of the
# object. Using the above class as the example:
#
# vehicle = Vehicle.new
# vehicle.driver = 'John'
# vehicle.passenger = 'Jane'
#
# # Behaviors in the "parked" state
# vehicle.state # => "parked"
# vehicle.speed # => 0
# vehicle.rotate_driver # => true
# vehicle.driver # => "Jane"
# vehicle.passenger # => "John"
#
# vehicle.ignite # => true
#
# # Behaviors in the "idling" state
# vehicle.state # => "idling"
# vehicle.speed # => 20
# vehicle.rotate_driver # => true
# vehicle.driver # => "John"
# vehicle.passenger # => "Jane"
#
# As can be seen, both the +speed+ and +rotate_driver+ instance method
# implementations changed how they behave based on what the current state
# of the vehicle was.
#
# === Invalid behaviors
#
# If a specific behavior has not been defined for a state, then a
# NoMethodError exception will be raised, indicating that that method would
# not normally exist for an object with that state.
#
# Using the example from before:
#
# vehicle = Vehicle.new
# vehicle.state = 'backing_up'
# vehicle.speed # => NoMethodError: undefined method 'speed' for #<Vehicle:0xb7d296ac> in state "backing_up"
#
# === Using matchers
#
# The +all+ / +any+ matchers can be used to easily define behaviors for a
# group of states. Note, however, that you cannot use these matchers to
# set configurations for states. Behaviors using these matchers can be
# defined at any point in the state machine and will always get applied to
# the proper states.
#
# For example:
#
# state_machine :initial => :parked do
# ...
#
# state all - [:parked, :idling, :stalled] do
# validates_presence_of :speed
#
# def speed
# gear * 10
# end
# end
# end
#
# == State-aware class methods
#
# In addition to defining scopes for instance methods that are state-aware,
# the same can be done for certain types of class methods.
#
# Some libraries have support for class-level methods that only run certain
# behaviors based on a conditions hash passed in. For example:
#
# class Vehicle < ActiveRecord::Base
# state_machine do
# ...
# state :first_gear, :second_gear, :third_gear do
# validates_presence_of :speed
# validates_inclusion_of :speed, :in => 0..25, :if => :in_school_zone?
# end
# end
# end
#
# In the above ActiveRecord model, two validations have been defined which
# will *only* run when the Vehicle object is in one of the three states:
# +first_gear+, +second_gear+, or +third_gear. Notice, also, that if/unless
# conditions can continue to be used.
#
# This functionality is not library-specific and can work for any class-level
# method that is defined like so:
#
# def validates_presence_of(attribute, options = {})
# ...
# end
#
# The minimum requirement is that the last argument in the method be an
# options hash which contains at least <tt>:if</tt> condition support.
def state(*names, &block)
options = names.last.is_a?(Hash) ? names.pop : {}
options.assert_valid_keys(:value, :cache, :if, :human_name)
# Store the context so that it can be used for / matched against any state
# that gets added
@states.context(names, &block) if block_given?
if names.first.is_a?(Matcher)
# Add any states referenced in the matcher. When matchers are used,
# states are not allowed to be configured.
raise ArgumentError, "Cannot configure states when using matchers (using #{options.inspect})" if options.any?
states = add_states(names.first.values)
else
states = add_states(names)
# Update the configuration for the state(s)
states.each do |state|
if options.include?(:value)
state.value = options[:value]
self.states.update(state)
end
state.human_name = options[:human_name] if options.include?(:human_name)
state.cache = options[:cache] if options.include?(:cache)
state.matcher = options[:if] if options.include?(:if)
end
end
states.length == 1 ? states.first : states
end
alias_method :other_states, :state
# Gets the current value stored in the given object's attribute.
#
# For example,
#
# class Vehicle
# state_machine :initial => :parked do
# ...
# end
# end
#
# vehicle = Vehicle.new # => #<Vehicle:0xb7d94ab0 @state="parked">
# Vehicle.state_machine.read(vehicle, :state) # => "parked" # Equivalent to vehicle.state
# Vehicle.state_machine.read(vehicle, :event) # => nil # Equivalent to vehicle.state_event
def read(object, attribute, ivar = false)
attribute = self.attribute(attribute)
if ivar
object.instance_variable_defined?("@#{attribute}") ? object.instance_variable_get("@#{attribute}") : nil
else
object.send(attribute)
end
end
# Sets a new value in the given object's attribute.
#
# For example,
#
# class Vehicle
# state_machine :initial => :parked do
# ...
# end
# end
#
# vehicle = Vehicle.new # => #<Vehicle:0xb7d94ab0 @state="parked">
# Vehicle.state_machine.write(vehicle, :state, 'idling') # => Equivalent to vehicle.state = 'idling'
# Vehicle.state_machine.write(vehicle, :event, 'park') # => Equivalent to vehicle.state_event = 'park'
# vehicle.state # => "idling"
# vehicle.event # => "park"
def write(object, attribute, value, ivar = false)
attribute = self.attribute(attribute)
ivar ? object.instance_variable_set("@#{attribute}", value) : object.send("#{attribute}=", value)
end
# Defines one or more events for the machine and the transitions that can
# be performed when those events are run.
#
# This method is also aliased as +on+ for improved compatibility with
# using a domain-specific language.
#
# Configuration options:
# * <tt>:human_name</tt> - The human-readable version of this event's name.
# By default, this is either defined by the integration or stringifies the
# name and converts underscores to spaces.
#
# == Instance methods
#
# The following instance methods are generated when a new event is defined
# (the "park" event is used as an example):
# * <tt>park(..., run_action = true)</tt> - Fires the "park" event,
# transitioning from the current state to the next valid state. If the
# last argument is a boolean, it will control whether the machine's action
# gets run.
# * <tt>park!(..., run_action = true)</tt> - Fires the "park" event,
# transitioning from the current state to the next valid state. If the
# transition fails, then a StateMachines::InvalidTransition error will be
# raised. If the last argument is a boolean, it will control whether the
# machine's action gets run.
# * <tt>can_park?(requirements = {})</tt> - Checks whether the "park" event
# can be fired given the current state of the object. This will *not* run
# validations or callbacks in ORM integrations. It will only determine if
# the state machine defines a valid transition for the event. To check
# whether an event can fire *and* passes validations, use event attributes
# (e.g. state_event) as described in the "Events" documentation of each
# ORM integration.
# * <tt>park_transition(requirements = {})</tt> - Gets the next transition
# that would be performed if the "park" event were to be fired now on the
# object or nil if no transitions can be performed. Like <tt>can_park?</tt>
# this will also *not* run validations or callbacks. It will only
# determine if the state machine defines a valid transition for the event.
#
# With a namespace of "car", the above names map to the following methods:
# * <tt>can_park_car?</tt>
# * <tt>park_car_transition</tt>
# * <tt>park_car</tt>
# * <tt>park_car!</tt>
#
# The <tt>can_park?</tt> and <tt>park_transition</tt> helpers both take an
# optional set of requirements for determining what transitions are available
# for the current object. These requirements include:
# * <tt>:from</tt> - One or more states to transition from. If none are
# specified, then this will be the object's current state.
# * <tt>:to</tt> - One or more states to transition to. If none are
# specified, then this will match any to state.
# * <tt>:guard</tt> - Whether to guard transitions with the if/unless
# conditionals defined for each one. Default is true.
#
# == Defining transitions
#
# +event+ requires a block which allows you to define the possible
# transitions that can happen as a result of that event. For example,
#
# event :park, :stop do
# transition :idling => :parked
# end
#
# event :first_gear do
# transition :parked => :first_gear, :if => :seatbelt_on?
# transition :parked => same # Allow to loopback if seatbelt is off
# end
#
# See StateMachines::Event#transition for more information on
# the possible options that can be passed in.
#
# *Note* that this block is executed within the context of the actual event
# object. As a result, you will not be able to reference any class methods
# on the model without referencing the class itself. For example,
#
# class Vehicle
# def self.safe_states
# [:parked, :idling, :stalled]
# end
#
# state_machine do
# event :park do
# transition Vehicle.safe_states => :parked
# end
# end
# end
#
# == Overriding the event method
#
# By default, this will define an instance method (with the same name as the
# event) that will fire the next possible transition for that. Although the
# +before_transition+, +after_transition+, and +around_transition+ hooks
# allow you to define behavior that gets executed as a result of the event's
# transition, you can also override the event method in order to have a
# little more fine-grained control.
#
# For example:
#
# class Vehicle
# state_machine do
# event :park do
# ...
# end
# end
#
# def park(*)
# take_deep_breath # Executes before the transition (and before_transition hooks) even if no transition is possible
# if result = super # Runs the transition and all before/after/around hooks
# applaud # Executes after the transition (and after_transition hooks)
# end
# result
# end
# end
#
# There are a few important things to note here. First, the method
# signature is defined with an unlimited argument list in order to allow
# callers to continue passing arguments that are expected by state_machine.
# For example, it will still allow calls to +park+ with a single parameter
# for skipping the configured action.
#
# Second, the overridden event method must call +super+ in order to run the
# logic for running the next possible transition. In order to remain
# consistent with other events, the result of +super+ is returned.
#
# Third, any behavior defined in this method will *not* get executed if
# you're taking advantage of attribute-based event transitions. For example:
#
# vehicle = Vehicle.new
# vehicle.state_event = 'park'
# vehicle.save
#
# In this case, the +park+ event will run the before/after/around transition
# hooks and transition the state, but the behavior defined in the overriden
# +park+ method will *not* be executed.
#
# == Defining additional arguments
#
# Additional arguments can be passed into events and accessed by transition
# hooks like so:
#
# class Vehicle
# state_machine do
# after_transition :on => :park do |vehicle, transition|
# kind = *transition.args # :parallel
# ...
# end
# after_transition :on => :park, :do => :take_deep_breath
#
# event :park do
# ...
# end
#
# def take_deep_breath(transition)
# kind = *transition.args # :parallel
# ...
# end
# end
# end
#
# vehicle = Vehicle.new
# vehicle.park(:parallel)
#
# *Remember* that if the last argument is a boolean, it will be used as the
# +run_action+ parameter to the event action. Using the +park+ action
# example from above, you can might call it like so:
#
# vehicle.park # => Uses default args and runs machine action
# vehicle.park(:parallel) # => Specifies the +kind+ argument and runs the machine action
# vehicle.park(:parallel, false) # => Specifies the +kind+ argument and *skips* the machine action
#
# If you decide to override the +park+ event method *and* define additional
# arguments, you can do so as shown below:
#
# class Vehicle
# state_machine do
# event :park do
# ...
# end
# end
#
# def park(kind = :parallel, *args)
# take_deep_breath if kind == :parallel
# super
# end
# end
#
# Note that +super+ is called instead of <tt>super(*args)</tt>. This allow
# the entire arguments list to be accessed by transition callbacks through
# StateMachines::Transition#args.
#
# === Using matchers
#
# The +all+ / +any+ matchers can be used to easily execute blocks for a
# group of events. Note, however, that you cannot use these matchers to
# set configurations for events. Blocks using these matchers can be
# defined at any point in the state machine and will always get applied to
# the proper events.
#
# For example:
#
# state_machine :initial => :parked do
# ...
#
# event all - [:crash] do
# transition :stalled => :parked
# end
# end
#
# == Example
#
# class Vehicle
# state_machine do
# # The park, stop, and halt events will all share the given transitions
# event :park, :stop, :halt do
# transition [:idling, :backing_up] => :parked
# end
#
# event :stop do
# transition :first_gear => :idling
# end
#
# event :ignite do
# transition :parked => :idling
# transition :idling => same # Allow ignite while still idling
# end
# end
# end
def event(*names, &block)
options = names.last.is_a?(Hash) ? names.pop : {}
options.assert_valid_keys(:human_name)
# Store the context so that it can be used for / matched against any event
# that gets added
@events.context(names, &block) if block_given?
if names.first.is_a?(Matcher)
# Add any events referenced in the matcher. When matchers are used,
# events are not allowed to be configured.
raise ArgumentError, "Cannot configure events when using matchers (using #{options.inspect})" if options.any?
events = add_events(names.first.values)
else
events = add_events(names)
# Update the configuration for the event(s)
events.each do |event|
event.human_name = options[:human_name] if options.include?(:human_name)
# Add any states that may have been referenced within the event
add_states(event.known_states)
end
end
events.length == 1 ? events.first : events
end
alias_method :on, :event
# Creates a new transition that determines what to change the current state
# to when an event fires.
#
# == Defining transitions
#
# The options for a new transition uses the Hash syntax to map beginning
# states to ending states. For example,
#
# transition :parked => :idling, :idling => :first_gear, :on => :ignite
#
# In this case, when the +ignite+ event is fired, this transition will cause
# the state to be +idling+ if it's current state is +parked+ or +first_gear+
# if it's current state is +idling+.
#
# To help define these implicit transitions, a set of helpers are available
# for slightly more complex matching:
# * <tt>all</tt> - Matches every state in the machine
# * <tt>all - [:parked, :idling, ...]</tt> - Matches every state except those specified
# * <tt>any</tt> - An alias for +all+ (matches every state in the machine)
# * <tt>same</tt> - Matches the same state being transitioned from
#
# See StateMachines::MatcherHelpers for more information.
#
# Examples:
#
# transition all => nil, :on => :ignite # Transitions to nil regardless of the current state
# transition all => :idling, :on => :ignite # Transitions to :idling regardless of the current state
# transition all - [:idling, :first_gear] => :idling, :on => :ignite # Transitions every state but :idling and :first_gear to :idling
# transition nil => :idling, :on => :ignite # Transitions to :idling from the nil state
# transition :parked => :idling, :on => :ignite # Transitions to :idling if :parked
# transition [:parked, :stalled] => :idling, :on => :ignite # Transitions to :idling if :parked or :stalled
#
# transition :parked => same, :on => :park # Loops :parked back to :parked
# transition [:parked, :stalled] => same, :on => [:park, :stall] # Loops either :parked or :stalled back to the same state on the park and stall events
# transition all - :parked => same, :on => :noop # Loops every state but :parked back to the same state
#
# # Transitions to :idling if :parked, :first_gear if :idling, or :second_gear if :first_gear
# transition :parked => :idling, :idling => :first_gear, :first_gear => :second_gear, :on => :shift_up
#
# == Verbose transitions
#
# Transitions can also be defined use an explicit set of configuration
# options:
# * <tt>:from</tt> - A state or array of states that can be transitioned from.
# If not specified, then the transition can occur for *any* state.
# * <tt>:to</tt> - The state that's being transitioned to. If not specified,
# then the transition will simply loop back (i.e. the state will not change).
# * <tt>:except_from</tt> - A state or array of states that *cannot* be
# transitioned from.
#
# These options must be used when defining transitions within the context
# of a state.
#
# Examples:
#
# transition :to => nil, :on => :park
# transition :to => :idling, :on => :ignite
# transition :except_from => [:idling, :first_gear], :to => :idling, :on => :ignite
# transition :from => nil, :to => :idling, :on => :ignite
# transition :from => [:parked, :stalled], :to => :idling, :on => :ignite
#
# == Conditions
#
# In addition to the state requirements for each transition, a condition
# can also be defined to help determine whether that transition is
# available. These options will work on both the normal and verbose syntax.
#
# Configuration options:
# * <tt>:if</tt> - A method, proc or string to call to determine if the
# transition should occur (e.g. :if => :moving?, or :if => lambda {|vehicle| vehicle.speed > 60}).
# The condition should return or evaluate to true or false.
# * <tt>:unless</tt> - A method, proc or string to call to determine if the
# transition should not occur (e.g. :unless => :stopped?, or :unless => lambda {|vehicle| vehicle.speed <= 60}).
# The condition should return or evaluate to true or false.
#
# Examples:
#
# transition :parked => :idling, :on => :ignite, :if => :moving?
# transition :parked => :idling, :on => :ignite, :unless => :stopped?
# transition :idling => :first_gear, :first_gear => :second_gear, :on => :shift_up, :if => :seatbelt_on?
#
# transition :from => :parked, :to => :idling, :on => ignite, :if => :moving?
# transition :from => :parked, :to => :idling, :on => ignite, :unless => :stopped?
#
# == Order of operations
#
# Transitions are evaluated in the order in which they're defined. As a
# result, if more than one transition applies to a given object, then the
# first transition that matches will be performed.
# Creates a callback that will be invoked *before* a transition is
# performed so long as the given requirements match the transition.
#
# == The callback
#
# Callbacks must be defined as either an argument, in the :do option, or
# as a block. For example,
#
# class Vehicle
# state_machine do
# before_transition :set_alarm
# before_transition :set_alarm, all => :parked
# before_transition all => :parked, :do => :set_alarm
# before_transition all => :parked do |vehicle, transition|
# vehicle.set_alarm
# end
# ...
# end
# end
#
# Notice that the first three callbacks are the same in terms of how the
# methods to invoke are defined. However, using the <tt>:do</tt> can
# provide for a more fluid DSL.
#
# In addition, multiple callbacks can be defined like so:
#
# class Vehicle
# state_machine do
# before_transition :set_alarm, :lock_doors, all => :parked
# before_transition all => :parked, :do => [:set_alarm, :lock_doors]
# before_transition :set_alarm do |vehicle, transition|
# vehicle.lock_doors
# end
# end
# end
#
# Notice that the different ways of configuring methods can be mixed.
#
# == State requirements
#
# Callbacks can require that the machine be transitioning from and to
# specific states. These requirements use a Hash syntax to map beginning
# states to ending states. For example,
#
# before_transition :parked => :idling, :idling => :first_gear, :do => :set_alarm
#
# In this case, the +set_alarm+ callback will only be called if the machine
# is transitioning from +parked+ to +idling+ or from +idling+ to +parked+.
#
# To help define state requirements, a set of helpers are available for
# slightly more complex matching:
# * <tt>all</tt> - Matches every state/event in the machine
# * <tt>all - [:parked, :idling, ...]</tt> - Matches every state/event except those specified
# * <tt>any</tt> - An alias for +all+ (matches every state/event in the machine)
# * <tt>same</tt> - Matches the same state being transitioned from
#
# See StateMachines::MatcherHelpers for more information.
#
# Examples:
#
# before_transition :parked => [:idling, :first_gear], :do => ... # Matches from parked to idling or first_gear
# before_transition all - [:parked, :idling] => :idling, :do => ... # Matches from every state except parked and idling to idling
# before_transition all => :parked, :do => ... # Matches all states to parked
# before_transition any => same, :do => ... # Matches every loopback
#
# == Event requirements
#
# In addition to state requirements, an event requirement can be defined so
# that the callback is only invoked on specific events using the +on+
# option. This can also use the same matcher helpers as the state
# requirements.
#
# Examples:
#
# before_transition :on => :ignite, :do => ... # Matches only on ignite
# before_transition :on => all - :ignite, :do => ... # Matches on every event except ignite
# before_transition :parked => :idling, :on => :ignite, :do => ... # Matches from parked to idling on ignite
#
# == Verbose Requirements
#
# Requirements can also be defined using verbose options rather than the
# implicit Hash syntax and helper methods described above.
#
# Configuration options:
# * <tt>:from</tt> - One or more states being transitioned from. If none
# are specified, then all states will match.
# * <tt>:to</tt> - One or more states being transitioned to. If none are
# specified, then all states will match.
# * <tt>:on</tt> - One or more events that fired the transition. If none
# are specified, then all events will match.
# * <tt>:except_from</tt> - One or more states *not* being transitioned from
# * <tt>:except_to</tt> - One more states *not* being transitioned to
# * <tt>:except_on</tt> - One or more events that *did not* fire the transition
#
# Examples:
#
# before_transition :from => :ignite, :to => :idling, :on => :park, :do => ...
# before_transition :except_from => :ignite, :except_to => :idling, :except_on => :park, :do => ...
#
# == Conditions
#
# In addition to the state/event requirements, a condition can also be
# defined to help determine whether the callback should be invoked.
#
# Configuration options:
# * <tt>:if</tt> - A method, proc or string to call to determine if the
# callback should occur (e.g. :if => :allow_callbacks, or
# :if => lambda {|user| user.signup_step > 2}). The method, proc or string
# should return or evaluate to a true or false value.
# * <tt>:unless</tt> - A method, proc or string to call to determine if the
# callback should not occur (e.g. :unless => :skip_callbacks, or
# :unless => lambda {|user| user.signup_step <= 2}). The method, proc or
# string should return or evaluate to a true or false value.
#
# Examples:
#
# before_transition :parked => :idling, :if => :moving?, :do => ...
# before_transition :on => :ignite, :unless => :seatbelt_on?, :do => ...
#
# == Accessing the transition
#
# In addition to passing the object being transitioned, the actual
# transition describing the context (e.g. event, from, to) can be accessed
# as well. This additional argument is only passed if the callback allows
# for it.
#
# For example,
#
# class Vehicle
# # Only specifies one parameter (the object being transitioned)
# before_transition all => :parked do |vehicle|
# vehicle.set_alarm
# end
#
# # Specifies 2 parameters (object being transitioned and actual transition)
# before_transition all => :parked do |vehicle, transition|
# vehicle.set_alarm(transition)
# end
# end
#
# *Note* that the object in the callback will only be passed in as an
# argument if callbacks are configured to *not* be bound to the object
# involved. This is the default and may change on a per-integration basis.
#
# See StateMachines::Transition for more information about the
# attributes available on the transition.
#
# == Usage with delegates
#
# As noted above, state_machine uses the callback method's argument list
# arity to determine whether to include the transition in the method call.
# If you're using delegates, such as those defined in ActiveSupport or
# Forwardable, the actual arity of the delegated method gets masked. This
# means that callbacks which reference delegates will always get passed the
# transition as an argument. For example:
#
# class Vehicle
# extend Forwardable
# delegate :refresh => :dashboard
#
# state_machine do
# before_transition :refresh
# ...
# end
#
# def dashboard
# @dashboard ||= Dashboard.new
# end
# end
#
# class Dashboard
# def refresh(transition)
# # ...
# end
# end
#
# In the above example, <tt>Dashboard#refresh</tt> *must* defined a
# +transition+ argument. Otherwise, an +ArgumentError+ exception will get
# raised. The only way around this is to avoid the use of delegates and
# manually define the delegate method so that the correct arity is used.
#
# == Examples
#
# Below is an example of a class with one state machine and various types
# of +before+ transitions defined for it:
#
# class Vehicle
# state_machine do
# # Before all transitions
# before_transition :update_dashboard
#
# # Before specific transition:
# before_transition [:first_gear, :idling] => :parked, :on => :park, :do => :take_off_seatbelt
#
# # With conditional callback:
# before_transition all => :parked, :do => :take_off_seatbelt, :if => :seatbelt_on?
#
# # Using helpers:
# before_transition all - :stalled => same, :on => any - :crash, :do => :update_dashboard
# ...
# end
# end
#
# As can be seen, any number of transitions can be created using various
# combinations of configuration options.
def before_transition(*args, &block)
options = (args.last.is_a?(Hash) ? args.pop : {})
options[:do] = args if args.any?
add_callback(:before, options, &block)
end
# Creates a callback that will be invoked *after* a transition is
# performed so long as the given requirements match the transition.
#
# See +before_transition+ for a description of the possible configurations
# for defining callbacks.
def after_transition(*args, &block)
options = (args.last.is_a?(Hash) ? args.pop : {})
options[:do] = args if args.any?
add_callback(:after, options, &block)
end
# Creates a callback that will be invoked *around* a transition so long as
# the given requirements match the transition.
#
# == The callback
#
# Around callbacks wrap transitions, executing code both before and after.
# These callbacks are defined in the exact same manner as before / after
# callbacks with the exception that the transition must be yielded to in
# order to finish running it.
#
# If defining +around+ callbacks using blocks, you must yield within the
# transition by directly calling the block (since yielding is not allowed
# within blocks).
#
# For example,
#
# class Vehicle
# state_machine do
# around_transition do |block|
# Benchmark.measure { block.call }
# end
#
# around_transition do |vehicle, block|
# logger.info "vehicle was #{state}..."
# block.call
# logger.info "...and is now #{state}"
# end
#
# around_transition do |vehicle, transition, block|
# logger.info "before #{transition.event}: #{vehicle.state}"
# block.call
# logger.info "after #{transition.event}: #{vehicle.state}"
# end
# end
# end
#
# Notice that referencing the block is similar to doing so within an
# actual method definition in that it is always the last argument.
#
# On the other hand, if you're defining +around+ callbacks using method
# references, you can yield like normal:
#
# class Vehicle
# state_machine do
# around_transition :benchmark
# ...
# end
#
# def benchmark
# Benchmark.measure { yield }
# end
# end
#
# See +before_transition+ for a description of the possible configurations
# for defining callbacks.
def around_transition(*args, &block)
options = (args.last.is_a?(Hash) ? args.pop : {})
options[:do] = args if args.any?
add_callback(:around, options, &block)
end
# Creates a callback that will be invoked *after* a transition failures to
# be performed so long as the given requirements match the transition.
#
# See +before_transition+ for a description of the possible configurations
# for defining callbacks. *Note* however that you cannot define the state
# requirements in these callbacks. You may only define event requirements.
#
# = The callback
#
# Failure callbacks get invoked whenever an event fails to execute. This
# can happen when no transition is available, a +before+ callback halts
# execution, or the action associated with this machine fails to succeed.
# In any of these cases, any failure callback that matches the attempted
# transition will be run.
#
# For example,
#
# class Vehicle
# state_machine do
# after_failure do |vehicle, transition|
# logger.error "vehicle #{vehicle} failed to transition on #{transition.event}"
# end
#
# after_failure :on => :ignite, :do => :log_ignition_failure
#
# ...
# end
# end
def after_failure(*args, &block)
options = (args.last.is_a?(Hash) ? args.pop : {})
options[:do] = args if args.any?
options.assert_valid_keys(:on, :do, :if, :unless)
add_callback(:failure, options, &block)
end
# Generates a list of the possible transition sequences that can be run on
# the given object. These paths can reveal all of the possible states and
# events that can be encountered in the object's state machine based on the
# object's current state.
#
# Configuration options:
# * +from+ - The initial state to start all paths from. By default, this
# is the object's current state.
# * +to+ - The target state to end all paths on. By default, paths will
# end when they loop back to the first transition on the path.
# * +deep+ - Whether to allow the target state to be crossed more than once
# in a path. By default, paths will immediately stop when the target
# state (if specified) is reached. If this is enabled, then paths can
# continue even after reaching the target state; they will stop when
# reaching the target state a second time.
#
# *Note* that the object is never modified when the list of paths is
# generated.
#
# == Examples
#
# class Vehicle
# state_machine :initial => :parked do
# event :ignite do
# transition :parked => :idling
# end
#
# event :shift_up do
# transition :idling => :first_gear, :first_gear => :second_gear
# end
#
# event :shift_down do
# transition :second_gear => :first_gear, :first_gear => :idling
# end
# end
# end
#
# vehicle = Vehicle.new # => #<Vehicle:0xb7c27024 @state="parked">
# vehicle.state # => "parked"
#
# vehicle.state_paths
# # => [
# # [#<StateMachines::Transition attribute=:state event=:ignite from="parked" from_name=:parked to="idling" to_name=:idling>,
# # #<StateMachines::Transition attribute=:state event=:shift_up from="idling" from_name=:idling to="first_gear" to_name=:first_gear>,
# # #<StateMachines::Transition attribute=:state event=:shift_up from="first_gear" from_name=:first_gear to="second_gear" to_name=:second_gear>,
# # #<StateMachines::Transition attribute=:state event=:shift_down from="second_gear" from_name=:second_gear to="first_gear" to_name=:first_gear>,
# # #<StateMachines::Transition attribute=:state event=:shift_down from="first_gear" from_name=:first_gear to="idling" to_name=:idling>],
# #
# # [#<StateMachines::Transition attribute=:state event=:ignite from="parked" from_name=:parked to="idling" to_name=:idling>,
# # #<StateMachines::Transition attribute=:state event=:shift_up from="idling" from_name=:idling to="first_gear" to_name=:first_gear>,
# # #<StateMachines::Transition attribute=:state event=:shift_down from="first_gear" from_name=:first_gear to="idling" to_name=:idling>]
# # ]
#
# vehicle.state_paths(:from => :parked, :to => :second_gear)
# # => [
# # [#<StateMachines::Transition attribute=:state event=:ignite from="parked" from_name=:parked to="idling" to_name=:idling>,
# # #<StateMachines::Transition attribute=:state event=:shift_up from="idling" from_name=:idling to="first_gear" to_name=:first_gear>,
# # #<StateMachines::Transition attribute=:state event=:shift_up from="first_gear" from_name=:first_gear to="second_gear" to_name=:second_gear>]
# # ]
#
# In addition to getting the possible paths that can be accessed, you can
# also get summary information about the states / events that can be
# accessed at some point along one of the paths. For example:
#
# # Get the list of states that can be accessed from the current state
# vehicle.state_paths.to_states # => [:idling, :first_gear, :second_gear]
#
# # Get the list of events that can be accessed from the current state
# vehicle.state_paths.events # => [:ignite, :shift_up, :shift_down]
def paths_for(object, requirements = {})
PathCollection.new(object, self, requirements)
end
# Marks the given object as invalid with the given message.
#
# By default, this is a no-op.
def invalidate(_object, _attribute, _message, _values = [])
end
# Gets a description of the errors for the given object. This is used to
# provide more detailed information when an InvalidTransition exception is
# raised.
def errors_for(_object)
''
end
# Resets any errors previously added when invalidating the given object.
#
# By default, this is a no-op.
def reset(_object)
end
# Generates the message to use when invalidating the given object after
# failing to transition on a specific event
def generate_message(name, values = [])
message = (@messages[name] || self.class.default_messages[name])
# Check whether there are actually any values to interpolate to avoid
# any warnings
if message.scan(/%./).any? { |match| match != '%%' }
message % values.map { |value| value.last }
else
message
end
end
# Runs a transaction, rolling back any changes if the yielded block fails.
#
# This is only applicable to integrations that involve databases. By
# default, this will not run any transactions since the changes aren't
# taking place within the context of a database.
def within_transaction(object)
if use_transactions
transaction(object) { yield }
else
yield
end
end
def draw(*)
fail NotImplementedError
end
# Determines whether an action hook was defined for firing attribute-based
# event transitions when the configured action gets called.
def action_hook?(self_only = false)
@action_hook_defined || !self_only && owner_class.state_machines.any? { |name, machine| machine.action == action && machine != self && machine.action_hook?(true) }
end
protected
# Runs additional initialization hooks. By default, this is a no-op.
def after_initialize
end
# Looks up other machines that have been defined in the owner class and
# are targeting the same attribute as this machine. When accessing
# sibling machines, they will be automatically copied for the current
# class if they haven't been already. This ensures that any configuration
# changes made to the sibling machines only affect this class and not any
# base class that may have originally defined the machine.
def sibling_machines
owner_class.state_machines.inject([]) do |machines, (name, machine)|
if machine.attribute == attribute && machine != self
machines << (owner_class.state_machine(name) {})
end
machines
end
end
# Determines if the machine's attribute needs to be initialized. This
# will only be true if the machine's attribute is blank.
def initialize_state?(object)
value = read(object, :state)
(value.nil? || value.respond_to?(:empty?) && value.empty?) && !states[value, :value]
end
# Adds helper methods for interacting with the state machine, including
# for states, events, and transitions
def define_helpers
define_state_accessor
define_state_predicate
define_event_helpers
define_path_helpers
define_action_helpers if define_action_helpers?
define_name_helpers
end
# Defines the initial values for state machine attributes. Static values
# are set prior to the original initialize method and dynamic values are
# set *after* the initialize method in case it is dependent on it.
def define_state_initializer
define_helper :instance, <<-end_eval, __FILE__, __LINE__ + 1
def initialize(*)
self.class.state_machines.initialize_states(self) { super }
end
end_eval
end
# Adds reader/writer methods for accessing the state attribute
def define_state_accessor
attribute = self.attribute
@helper_modules[:instance].class_eval { attr_reader attribute } unless owner_class_ancestor_has_method?(:instance, attribute)
@helper_modules[:instance].class_eval { attr_writer attribute } unless owner_class_ancestor_has_method?(:instance, "#{attribute}=")
end
# Adds predicate method to the owner class for determining the name of the
# current state
def define_state_predicate
call_super = !!owner_class_ancestor_has_method?(:instance, "#{name}?")
define_helper :instance, <<-end_eval, __FILE__, __LINE__ + 1
def #{name}?(*args)
args.empty? && (#{call_super} || defined?(super)) ? super : self.class.state_machine(#{name.inspect}).states.matches?(self, *args)
end
end_eval
end
# Adds helper methods for getting information about this state machine's
# events
def define_event_helpers
# Gets the events that are allowed to fire on the current object
define_helper(:instance, attribute(:events)) do |machine, object, *args|
machine.events.valid_for(object, *args).map { |event| event.name }
end
# Gets the next possible transitions that can be run on the current
# object
define_helper(:instance, attribute(:transitions)) do |machine, object, *args|
machine.events.transitions_for(object, *args)
end
# Fire an arbitrary event for this machine
define_helper(:instance, "fire_#{attribute(:event)}") do |machine, object, event, *args|
machine.events.fetch(event).fire(object, *args)
end
# Add helpers for tracking the event / transition to invoke when the
# action is called
if action
event_attribute = attribute(:event)
define_helper(:instance, event_attribute) do |machine, object|
# Interpret non-blank events as present
event = machine.read(object, :event, true)
event && !(event.respond_to?(:empty?) && event.empty?) ? event.to_sym : nil
end
# A roundabout way of writing the attribute is used here so that
# integrations can hook into this modification
define_helper(:instance, "#{event_attribute}=") do |machine, object, value|
machine.write(object, :event, value, true)
end
event_transition_attribute = attribute(:event_transition)
define_helper :instance, <<-end_eval, __FILE__, __LINE__ + 1
protected; attr_accessor #{event_transition_attribute.inspect}
end_eval
end
end
# Adds helper methods for getting information about this state machine's
# available transition paths
def define_path_helpers
# Gets the paths of transitions available to the current object
define_helper(:instance, attribute(:paths)) do |machine, object, *args|
machine.paths_for(object, *args)
end
end
# Determines whether action helpers should be defined for this machine.
# This is only true if there is an action configured and no other machines
# have process this same configuration already.
def define_action_helpers?
action && !owner_class.state_machines.any? { |name, machine| machine.action == action && machine != self }
end
# Adds helper methods for automatically firing events when an action
# is invoked
def define_action_helpers
if action_hook
@action_hook_defined = true
define_action_hook
end
end
# Hooks directly into actions by defining the same method in an included
# module. As a result, when the action gets invoked, any state events
# defined for the object will get run. Method visibility is preserved.
def define_action_hook
action_hook = self.action_hook
action = self.action
private_action_hook = owner_class.private_method_defined?(action_hook)
# Only define helper if it hasn't
define_helper :instance, <<-end_eval, __FILE__, __LINE__ + 1
def #{action_hook}(*)
self.class.state_machines.transitions(self, #{action.inspect}).perform { super }
end
private #{action_hook.inspect} if #{private_action_hook}
end_eval
end
# The method to hook into for triggering transitions when invoked. By
# default, this is the action configured for the machine.
#
# Since the default hook technique relies on module inheritance, the
# action must be defined in an ancestor of the owner classs in order for
# it to be the action hook.
def action_hook
action && owner_class_ancestor_has_method?(:instance, action) ? action : nil
end
# Determines whether there's already a helper method defined within the
# given scope. This is true only if one of the owner's ancestors defines
# the method and is further along in the ancestor chain than this
# machine's helper module.
def owner_class_ancestor_has_method?(scope, method)
return false unless owner_class_has_method?(scope, method)
superclasses = owner_class.ancestors.select { |ancestor| ancestor.is_a?(Class) }[1..-1]
if scope == :class
current = owner_class.singleton_class
superclass = superclasses.first
else
current = owner_class
superclass = owner_class.superclass
end
# Generate the list of modules that *only* occur in the owner class, but
# were included *prior* to the helper modules, in addition to the
# superclasses
ancestors = current.ancestors - superclass.ancestors + superclasses
ancestors = ancestors[ancestors.index(@helper_modules[scope])..-1].reverse
# Search for for the first ancestor that defined this method
ancestors.detect do |ancestor|
ancestor = ancestor.singleton_class if scope == :class && ancestor.is_a?(Class)
ancestor.method_defined?(method) || ancestor.private_method_defined?(method)
end
end
def owner_class_has_method?(scope, method)
target = scope == :class ? owner_class.singleton_class : owner_class
target.method_defined?(method) || target.private_method_defined?(method)
end
# Adds helper methods for accessing naming information about states and
# events on the owner class
def define_name_helpers
# Gets the humanized version of a state
define_helper(:class, "human_#{attribute(:name)}") do |machine, klass, state|
machine.states.fetch(state).human_name(klass)
end
# Gets the humanized version of an event
define_helper(:class, "human_#{attribute(:event_name)}") do |machine, klass, event|
machine.events.fetch(event).human_name(klass)
end
# Gets the state name for the current value
define_helper(:instance, attribute(:name)) do |machine, object|
machine.states.match!(object).name
end
# Gets the human state name for the current value
define_helper(:instance, "human_#{attribute(:name)}") do |machine, object|
machine.states.match!(object).human_name(object.class)
end
end
# Defines the with/without scope helpers for this attribute. Both the
# singular and plural versions of the attribute are defined for each
# scope helper. A custom plural can be specified if it cannot be
# automatically determined by either calling +pluralize+ on the attribute
# name or adding an "s" to the end of the name.
def define_scopes(custom_plural = nil)
plural = custom_plural || pluralize(name)
[:with, :without].each do |kind|
[name, plural].map { |s| s.to_s }.uniq.each do |suffix|
method = "#{kind}_#{suffix}"
if scope = send("create_#{kind}_scope", method)
# Converts state names to their corresponding values so that they
# can be looked up properly
define_helper(:class, method) do |machine, klass, *states|
run_scope(scope, machine, klass, states)
end
end
end
end
end
# Generates the results for the given scope based on one or more states to
# filter by
def run_scope(scope, machine, klass, states)
values = states.flatten.map { |state| machine.states.fetch(state).value }
scope.call(klass, values)
end
# Pluralizes the given word using #pluralize (if available) or simply
# adding an "s" to the end of the word
def pluralize(word)
word = word.to_s
if word.respond_to?(:pluralize)
word.pluralize
else
"#{name}s"
end
end
# Creates a scope for finding objects *with* a particular value or values
# for the attribute.
#
# By default, this is a no-op.
def create_with_scope(name)
end
# Creates a scope for finding objects *without* a particular value or
# values for the attribute.
#
# By default, this is a no-op.
def create_without_scope(name)
end
# Always yields
def transaction(object)
yield
end
# Gets the initial attribute value defined by the owner class (outside of
# the machine's definition). By default, this is always nil.
def owner_class_attribute_default
nil
end
# Checks whether the given state matches the attribute default specified
# by the owner class
def owner_class_attribute_default_matches?(state)
state.matches?(owner_class_attribute_default)
end
# Updates this machine based on the configuration of other machines in the
# owner class that share the same target attribute.
def add_sibling_machine_configs
# Add existing states
sibling_machines.each do |machine|
machine.states.each { |state| states << state unless states[state.name] }
end
end
# Adds a new transition callback of the given type.
def add_callback(type, options, &block)
callbacks[type == :around ? :before : type] << callback = Callback.new(type, options, &block)
add_states(callback.known_states)
callback
end
# Tracks the given set of states in the list of all known states for
# this machine
def add_states(new_states)
new_states.map do |new_state|
# Check for other states that use a different class type for their name.
# This typically prevents string / symbol misuse.
if new_state && conflict = states.detect { |state| state.name && state.name.class != new_state.class }
raise ArgumentError, "#{new_state.inspect} state defined as #{new_state.class}, #{conflict.name.inspect} defined as #{conflict.name.class}; all states must be consistent"
end
unless state = states[new_state]
states << state = State.new(self, new_state)
# Copy states over to sibling machines
sibling_machines.each { |machine| machine.states << state }
end
state
end
end
# Tracks the given set of events in the list of all known events for
# this machine
def add_events(new_events)
new_events.map do |new_event|
# Check for other states that use a different class type for their name.
# This typically prevents string / symbol misuse.
if conflict = events.detect { |event| event.name.class != new_event.class }
raise ArgumentError, "#{new_event.inspect} event defined as #{new_event.class}, #{conflict.name.inspect} defined as #{conflict.name.class}; all events must be consistent"
end
unless event = events[new_event]
events << event = Event.new(self, new_event)
end
event
end
end
end
|
sailthru/sailthru-ruby-client | lib/sailthru/client.rb | Sailthru.Client.save_alert | ruby | def save_alert(email, type, template, _when = nil, options = {})
data = options
data[:email] = email
data[:type] = type
data[:template] = template
if (type == 'weekly' || type == 'daily')
data[:when] = _when
end
api_post(:alert, data)
end | params
email, String
type, String
template, String
_when, String
options, hash
Add a new alert to a user. You can add either a realtime or a summary alert (daily/weekly).
_when is only required when alert type is weekly or daily | train | https://github.com/sailthru/sailthru-ruby-client/blob/978deed2b25769a73de14107cb2a0c93143522e4/lib/sailthru/client.rb#L566-L575 | class Client
DEFAULT_API_URI = 'https://api.sailthru.com'
include Helpers
attr_accessor :verify_ssl
# params:
# api_key, String
# secret, String
# api_uri, String
#
# Instantiate a new client; constructor optionally takes overrides for key/secret/uri and proxy server settings.
def initialize(api_key=nil, secret=nil, api_uri=nil, proxy_host=nil, proxy_port=nil, opts={})
@api_key = api_key || Sailthru.api_key || raise(ArgumentError, "You must provide an API key or call Sailthru.credentials() first")
@secret = secret || Sailthru.secret || raise(ArgumentError, "You must provide your secret or call Sailthru.credentials() first")
@api_uri = api_uri.nil? ? DEFAULT_API_URI : api_uri
@proxy_host = proxy_host
@proxy_port = proxy_port
@verify_ssl = true
@opts = opts
@last_rate_limit_info = {}
end
# params:
# template_name, String
# email, String
# vars, Hash
# options, Hash
# replyto: override Reply-To header
# test: send as test email (subject line will be marked, will not count towards stats)
# returns:
# Hash, response data from server
def send_email(template_name, email, vars={}, options = {}, schedule_time = nil, limit = {})
post = {}
post[:template] = template_name
post[:email] = email
post[:vars] = vars if vars.length >= 1
post[:options] = options if options.length >= 1
post[:schedule_time] = schedule_time if !schedule_time.nil?
post[:limit] = limit if limit.length >= 1
api_post(:send, post)
end
def multi_send(template_name, emails, vars={}, options = {}, schedule_time = nil, evars = {})
post = {}
post[:template] = template_name
post[:email] = emails
post[:vars] = vars if vars.length >= 1
post[:options] = options if options.length >= 1
post[:schedule_time] = schedule_time if !schedule_time.nil?
post[:evars] = evars if evars.length >= 1
api_post(:send, post)
end
# params:
# send_id, Fixnum
# returns:
# Hash, response data from server
#
# Get the status of a send.
def get_send(send_id)
api_get(:send, {:send_id => send_id.to_s})
end
def cancel_send(send_id)
api_delete(:send, {:send_id => send_id.to_s})
end
# params:
# name, String
# list, String
# schedule_time, String
# from_name, String
# from_email, String
# subject, String
# content_html, String
# content_text, String
# options, Hash
# returns:
# Hash, response data from server
#
# Schedule a mass mail blast
def schedule_blast(name, list, schedule_time, from_name, from_email, subject, content_html, content_text, options = {})
post = options ? options : {}
post[:name] = name
post[:list] = list
post[:schedule_time] = schedule_time
post[:from_name] = from_name
post[:from_email] = from_email
post[:subject] = subject
post[:content_html] = content_html
post[:content_text] = content_text
api_post(:blast, post)
end
# Schedule a mass mail blast from template
def schedule_blast_from_template(template, list, schedule_time, options={})
post = options ? options : {}
post[:copy_template] = template
post[:list] = list
post[:schedule_time] = schedule_time
api_post(:blast, post)
end
# Schedule a mass mail blast from previous blast
def schedule_blast_from_blast(blast_id, schedule_time, options={})
post = options ? options : {}
post[:copy_blast] = blast_id
#post[:name] = name
post[:schedule_time] = schedule_time
api_post(:blast, post)
end
# params
# blast_id, Fixnum | String
# name, String
# list, String
# schedule_time, String
# from_name, String
# from_email, String
# subject, String
# content_html, String
# content_text, String
# options, hash
#
# updates existing blast
def update_blast(blast_id, name = nil, list = nil, schedule_time = nil, from_name = nil, from_email = nil, subject = nil, content_html = nil, content_text = nil, options = {})
data = options ? options : {}
data[:blast_id] = blast_id
if name != nil
data[:name] = name
end
if list != nil
data[:list] = list
end
if schedule_time != nil
data[:schedule_time] = schedule_time
end
if from_name != nil
data[:from_name] = from_name
end
if from_email != nil
data[:from_email] = from_email
end
if subject != nil
data[:subject] = subject
end
if content_html != nil
data[:content_html] = content_html
end
if content_text != nil
data[:content_text] = content_text
end
api_post(:blast, data)
end
# params:
# blast_id, Fixnum | String
# options, hash
# returns:
# Hash, response data from server
#
# Get information on a previously scheduled email blast
def get_blast(blast_id, options={})
options[:blast_id] = blast_id.to_s
api_get(:blast, options)
end
# params:
# blast_id, Fixnum | String
#
# Cancel a scheduled Blast
def cancel_blast(blast_id)
api_post(:blast, {:blast_id => blast_id, :schedule_time => ''})
end
# params:
# blast_id, Fixnum | String
#
# Delete a Blast
def delete_blast(blast_id)
api_delete(:blast, {:blast_id => blast_id})
end
# params:
# email, String
# returns:
# Hash, response data from server
#
# Return information about an email address, including replacement vars and lists.
def get_email(email)
api_get(:email, {:email => email})
end
# params:
# email, String
# vars, Hash
# lists, Hash mapping list name => 1 for subscribed, 0 for unsubscribed
# options, Hash mapping optional parameters
# returns:
# Hash, response data from server
#
# Set replacement vars and/or list subscriptions for an email address.
def set_email(email, vars = {}, lists = {}, templates = {}, options = {})
data = options
data[:email] = email
data[:vars] = vars unless vars.empty?
data[:lists] = lists unless lists.empty?
data[:templates] = templates unless templates.empty?
api_post(:email, data)
end
# params:
# new_email, String
# old_email, String
# options, Hash mapping optional parameters
# returns:
# Hash of response data.
#
# change a user's email address.
def change_email(new_email, old_email, options = {})
data = options
data[:email] = new_email
data[:change_email] = old_email
api_post(:email, data)
end
# returns:
# Hash of response data.
#
# Get all templates
def get_templates(templates = {})
api_get(:template, templates)
end
# params:
# template_name, String
# returns:
# Hash of response data.
#
# Get a template.
def get_template(template_name)
api_get(:template, {:template => template_name})
end
# params:
# template_name, String
# template_fields, Hash
# returns:
# Hash containg response from the server.
#
# Save a template.
def save_template(template_name, template_fields)
data = template_fields
data[:template] = template_name
api_post(:template, data)
end
# params:
# template_name, String
# returns:
# Hash of response data.
#
# Delete a template.
def delete_template(template_name)
api_delete(:template, {:template => template_name})
end
# params:
# params, Hash
# request, String
# returns:
# boolean, Returns true if the incoming request is an authenticated verify post.
def receive_verify_post(params, request)
if request.post?
[:action, :email, :send_id, :sig].each { |key| return false unless params.has_key?(key) }
return false unless params[:action] == :verify
sig = params.delete(:sig)
params.delete(:controller)
return false unless sig == get_signature_hash(params, @secret)
_send = get_send(params[:send_id])
return false unless _send.has_key?('email')
return false unless _send['email'] == params[:email]
return true
else
return false
end
end
# params:
# params, Hash
# request, String
# returns:
# TrueClass or FalseClass, Returns true if the incoming request is an authenticated optout post.
def receive_optout_post(params, request)
if request.post?
[:action, :email, :sig].each { |key| return false unless params.has_key?(key) }
return false unless params[:action] == 'optout'
sig = params.delete(:sig)
params.delete(:controller)
sig == get_signature_hash(params, @secret)
else
false
end
end
# List Postbacks must be enabled by Sailthru
# Contact your account manager or contact support to have this enabled
#
# params:
# params, Hash
# request, String
# returns:
# TrueClass or FalseClass, Returns true if the incoming request is an authenticated list post.
def receive_list_post(params, request)
if request.post?
[:action, :email, :sig].each { |key| return false unless params.has_key?(key) }
return false unless params[:action] == 'update'
sig = params.delete(:sig)
params.delete(:controller)
sig == get_signature_hash(params, @secret)
else
false
end
end
# params:
# params, Hash
# request, String
# returns:
# TrueClass or FalseClass, Returns true if the incoming request is an authenticated hardbounce post.
def receive_hardbounce_post(params, request)
if request.post?
[:action, :email, :sig].each { |key| return false unless params.has_key?(key) }
return false unless params[:action] == 'hardbounce'
sig = params.delete(:sig)
params.delete(:controller)
sig == get_signature_hash(params, @secret)
else
false
end
end
# params:
# email, String
# items, Array of Hashes
# incomplete, Integer
# message_id, String
# options, Hash
# returns:
# hash, response from server
#
# Record that a user has made a purchase, or has added items to their purchase total.
def purchase(email, items, incomplete = nil, message_id = nil, options = {})
data = options
data[:email] = email
data[:items] = items
if incomplete != nil
data[:incomplete] = incomplete.to_i
end
if message_id != nil
data[:message_id] = message_id
end
api_post(:purchase, data)
end
# <b>DEPRECATED:</b> Please use either stats_list or stats_blast
# params:
# stat, String
#
# returns:
# hash, response from server
# Request various stats from Sailthru.
def get_stats(stat)
warn "[DEPRECATION] `get_stats` is deprecated. Please use `stats_list` and `stats_blast` instead"
api_get(:stats, {:stat => stat})
end
# params
# list, String
# date, String
#
# returns:
# hash, response from server
# Retrieve information about your subscriber counts on a particular list, on a particular day.
def stats_list(list = nil, date = nil)
data = {}
if list != nil
data[:list] = list
end
if date != nil
data[:date] = date
end
data[:stat] = 'list'
api_get(:stats, data)
end
# params
# blast_id, String
# start_date, String
# end_date, String
# options, Hash
#
# returns:
# hash, response from server
# Retrieve information about a particular blast or aggregated information from all of blasts over a specified date range
def stats_blast(blast_id = nil, start_date = nil, end_date = nil, options = {})
data = options
if blast_id != nil
data[:blast_id] = blast_id
end
if start_date != nil
data[:start_date] = start_date
end
if end_date != nil
data[:end_date] = end_date
end
data[:stat] = 'blast'
api_get(:stats, data)
end
# params
# template, String
# start_date, String
# end_date, String
# options, Hash
#
# returns:
# hash, response from server
# Retrieve information about a particular blast or aggregated information from all of blasts over a specified date range
def stats_send(template = nil, start_date = nil, end_date = nil, options = {})
data = options
if template != nil
data[:template] = template
end
if start_date != nil
data[:start_date] = start_date
end
if end_date != nil
data[:end_date] = end_date
end
data[:stat] = 'send'
api_get(:stats, data)
end
# <b>DEPRECATED:</b> Please use save_content
# params
# title, String
# url, String
# date, String
# tags, Array or Comma separated string
# vars, Hash
# options, Hash
#
# Push a new piece of content to Sailthru, triggering any applicable alerts.
# http://docs.sailthru.com/api/content
def push_content(title, url, date = nil, tags = nil, vars = {}, options = {})
data = options
data[:title] = title
data[:url] = url
if date != nil
data[:date] = date
end
if tags != nil
if tags.class == Array
tags = tags.join(',')
end
data[:tags] = tags
end
if vars.length > 0
data[:vars] = vars
end
api_post(:content, data)
end
# params
# id, String – An identifier for the item (by default, the item’s URL).
# options, Hash - Containing any of the parameters described on
# https://getstarted.sailthru.com/developers/api/content/#POST_Mode
#
# Push a new piece of content to Sailthru, triggering any applicable alerts.
# http://docs.sailthru.com/api/content
def save_content(id, options)
data = options
data[:id] = id
data[:tags] = data[:tags].join(',') if data[:tags].respond_to?(:join)
api_post(:content, data)
end
# params
# list, String
#
# Get information about a list.
def get_list(list)
api_get(:list, {:list => list})
end
# params
#
# Get information about all lists
def get_lists
api_get(:list, {})
end
# params
# list, String
# options, Hash
# Create a list, or update a list.
def save_list(list, options = {})
data = options
data[:list] = list
api_post(:list, data)
end
# params
# list, String
#
# Deletes a list
def delete_list(list)
api_delete(:list, {:list => list})
end
# params
# email, String
#
# get user alert data
def get_alert(email)
api_get(:alert, {:email => email})
end
# params
# email, String
# type, String
# template, String
# _when, String
# options, hash
#
# Add a new alert to a user. You can add either a realtime or a summary alert (daily/weekly).
# _when is only required when alert type is weekly or daily
# params
# email, String
# alert_id, String
#
# delete user alert
def delete_alert(email, alert_id)
data = {:email => email, :alert_id => alert_id}
api_delete(:alert, data)
end
# params
# job, String
# options, hash
# report_email, String
# postback_url, String
# binary_key, String
#
# interface for making request to job call
def process_job(job, options = {}, report_email = nil, postback_url = nil, binary_key = nil)
data = options
data['job'] = job
if !report_email.nil?
data['report_email'] = report_email
end
if !postback_url.nil?
data['postback_url'] = postback_url
end
api_post(:job, data, binary_key)
end
# params
# emails, String | Array
# implementation for import_job
def process_import_job(list, emails, report_email = nil, postback_url = nil, options = {})
data = options
data['list'] = list
data['emails'] = Array(emails).join(',')
process_job(:import, data, report_email, postback_url)
end
# implementation for import job using file upload
def process_import_job_from_file(list, file_path, report_email = nil, postback_url = nil, options = {})
data = options
data['list'] = list
data['file'] = file_path
process_job(:import, data, report_email, postback_url, 'file')
end
# implementation for update job using file upload
def process_update_job_from_file(file_path, report_email = nil, postback_url = nil, options = {})
data = options
data['file'] = file_path
process_job(:update, data, report_email, postback_url, 'file')
end
# implementation for purchase import job using file upload
def process_purchase_import_job_from_file(file_path, report_email = nil, postback_url = nil, options = {})
data = options
data['file'] = file_path
process_job(:purchase_import, data, report_email, postback_url, 'file')
end
# implementation for snapshot job
def process_snapshot_job(query = {}, report_email = nil, postback_url = nil, options = {})
data = options
data['query'] = query
process_job(:snapshot, data, report_email, postback_url)
end
# implementation for export list job
def process_export_list_job(list, report_email = nil, postback_url = nil, options = {})
data = options
data['list'] = list
process_job(:export_list_data, data, report_email, postback_url)
end
# get status of a job
def get_job_status(job_id)
api_get(:job, {'job_id' => job_id})
end
# Get user by Sailthru ID
def get_user_by_sid(id, fields = {})
api_get(:user, {'id' => id, 'fields' => fields})
end
# Get user by specified key
def get_user_by_key(id, key, fields = {})
data = {
'id' => id,
'key' => key,
'fields' => fields
}
api_get(:user, data)
end
# Create new user, or update existing user
def save_user(id, options = {})
data = options
data['id'] = id
api_post(:user, data)
end
# params
# Get an existing trigger
def get_triggers
api_get(:trigger, {})
end
# params
# template, String
# trigger_id, String
# Get an existing trigger
def get_trigger_by_template(template, trigger_id = nil)
data = {}
data['template'] = template
if trigger_id != nil then data['trigger_id'] = trigger_id end
api_get(:trigger, data)
end
# params
# event, String
# Get an existing trigger
def get_trigger_by_event(event)
data = {}
data['event'] = event
api_get(:trigger, data)
end
# params
# template, String
# time, String
# time_unit, String
# event, String
# zephyr, String
# Create or update a trigger
def post_template_trigger(template, time, time_unit, event, zephyr)
data = {}
data['template'] = template
data['time'] = time
data['time_unit'] = time_unit
data['event'] = event
data['zephyr'] = zephyr
api_post(:trigger, data)
end
# params
# template, String
# time, String
# time_unit, String
# zephyr, String
# Create or update a trigger
def post_event_trigger(event, time, time_unit, zephyr)
data = {}
data['time'] = time
data['time_unit'] = time_unit
data['event'] = event
data['zephyr'] = zephyr
api_post(:trigger, data)
end
# params
# id, String
# event, String
# options, Hash (Can contain vars, Hash and/or key)
# Notify Sailthru of an Event
def post_event(id, event, options = {})
data = options
data['id'] = id
data['event'] = event
api_post(:event, data)
end
# Perform API GET request
def api_get(action, data)
api_request(action, data, 'GET')
end
# Perform API POST request
def api_post(action, data, binary_key = nil)
api_request(action, data, 'POST', binary_key)
end
#Perform API DELETE request
def api_delete(action, data)
api_request(action, data, 'DELETE')
end
# params
# endpoint, String a e.g. "user" or "send"
# method, String "GET" or "POST"
# returns
# Hash rate info
# Get rate info for a particular endpoint/method, as of the last time a request was sent to the given endpoint/method
# Includes the following keys:
# limit: the per-minute limit for the given endpoint/method
# remaining: the number of allotted requests remaining in the current minute for the given endpoint/method
# reset: unix timestamp of the top of the next minute, when the rate limit will reset
def get_last_rate_limit_info(endpoint, method)
rate_info_key = get_rate_limit_info_key(endpoint, method)
@last_rate_limit_info[rate_info_key]
end
protected
# params:
# action, String
# data, Hash
# request, String "GET" or "POST"
# returns:
# Hash
#
# Perform an API request, using the shared-secret auth hash.
#
def api_request(action, data, request_type, binary_key = nil)
if !binary_key.nil?
binary_key_data = data[binary_key]
data.delete(binary_key)
end
if data[:format].nil? || data[:format] == 'json'
data = prepare_json_payload(data)
else
data[:api_key] = @api_key
data[:format] ||= 'json'
data[:sig] = get_signature_hash(data, @secret)
end
if !binary_key.nil?
data[binary_key] = binary_key_data
end
_result = http_request(action, data, request_type, binary_key)
# NOTE: don't do the unserialize here
if data[:format] == 'json'
begin
unserialized = JSON.parse(_result)
return unserialized ? unserialized : _result
rescue JSON::JSONError => e
return {'error' => e}
end
end
_result
end
# set up our post request
def set_up_post_request(uri, data, headers, binary_key = nil)
if !binary_key.nil?
binary_data = data[binary_key]
if binary_data.is_a?(StringIO)
data[binary_key] = UploadIO.new(
binary_data, "text/plain", "local.path"
)
else
data[binary_key] = UploadIO.new(
File.open(binary_data), "text/plain"
)
end
req = Net::HTTP::Post::Multipart.new(uri.path, data)
else
req = Net::HTTP::Post.new(uri.path, headers)
req.set_form_data(data)
end
req
end
# params:
# uri, String
# data, Hash
# method, String "GET" or "POST"
# returns:
# String, body of response
def http_request(action, data, method = 'POST', binary_key = nil)
data = flatten_nested_hash(data, false)
uri = "#{@api_uri}/#{action}"
if method != 'POST'
uri += "?" + data.map{ |key, value| "#{CGI::escape(key.to_s)}=#{CGI::escape(value.to_s)}" }.join("&")
end
req = nil
headers = {"User-Agent" => "Sailthru API Ruby Client #{Sailthru::VERSION}"}
_uri = URI.parse(uri)
if method == 'POST'
req = set_up_post_request(
_uri, data, headers, binary_key
)
else
request_uri = "#{_uri.path}?#{_uri.query}"
if method == 'DELETE'
req = Net::HTTP::Delete.new(request_uri, headers)
else
req = Net::HTTP::Get.new(request_uri, headers)
end
end
begin
http = Net::HTTP::Proxy(@proxy_host, @proxy_port).new(_uri.host, _uri.port)
if _uri.scheme == 'https'
http.ssl_version = :TLSv1
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE if @verify_ssl != true # some openSSL client doesn't work without doing this
http.ssl_timeout = @opts[:http_ssl_timeout] || 5
end
http.open_timeout = @opts[:http_open_timeout] || 5
http.read_timeout = @opts[:http_read_timeout] || 10
http.close_on_empty_response = @opts[:http_close_on_empty_response] || true
response = http.start do
http.request(req)
end
rescue Timeout::Error, Errno::ETIMEDOUT => e
raise UnavailableError, "Timed out: #{_uri}"
rescue => e
raise ClientError, "Unable to open stream to #{_uri}: #{e.message}"
end
save_rate_limit_info(action, method, response)
response.body || raise(ClientError, "No response received from stream: #{_uri}")
end
def http_multipart_request(uri, data)
Net::HTTP::Post::Multipart.new url.path,
"file" => UploadIO.new(data['file'], "application/octet-stream")
end
def prepare_json_payload(data)
payload = {
:api_key => @api_key,
:format => 'json', #<3 XML
:json => data.to_json
}
payload[:sig] = get_signature_hash(payload, @secret)
payload
end
def save_rate_limit_info(action, method, response)
limit = response['x-rate-limit-limit'].to_i
remaining = response['x-rate-limit-remaining'].to_i
reset = response['x-rate-limit-reset'].to_i
if limit.nil? or remaining.nil? or reset.nil?
return
end
rate_info_key = get_rate_limit_info_key(action, method)
@last_rate_limit_info[rate_info_key] = {
limit: limit,
remaining: remaining,
reset: reset
}
end
def get_rate_limit_info_key(endpoint, method)
:"#{endpoint}_#{method.downcase}"
end
end
|
baroquebobcat/sinatra-twitter-oauth | lib/sinatra-twitter-oauth/helpers.rb | Sinatra::TwitterOAuth.Helpers.redirect_to_twitter_auth_url | ruby | def redirect_to_twitter_auth_url
request_token = get_request_token
session[:request_token] = request_token.token
session[:request_token_secret]= request_token.secret
redirect request_token.authorize_url.gsub('authorize','authenticate')
end | gets the request token and redirects to twitter's OAuth endpoint | train | https://github.com/baroquebobcat/sinatra-twitter-oauth/blob/8e11cc2a223a45b7c09152a492c48387f86cca2f/lib/sinatra-twitter-oauth/helpers.rb#L57-L64 | module Helpers
# The current logged in user
def user
@user
end
# Redirects to login unless there is an authenticated user
def login_required
setup_client
@user = ::TwitterOAuth::User.new(@client, session[:user]) if session[:user]
@rate_limit_status = @client.rate_limit_status
redirect '/login' unless user
end
def setup_client # :nodoc:
@client ||= ::TwitterOAuth::Client.new(
:consumer_secret => options.twitter_oauth_config[:secret],
:consumer_key => options.twitter_oauth_config[:key],
:token => session[:access_token],
:secret => session[:secret_token]
)
end
def get_request_token # :nodoc:
setup_client
begin
@client.authentication_request_token(:oauth_callback=>options.twitter_oauth_config[:callback])
rescue StandardError => e
halt 500,'check your key & secret'
end
end
def get_access_token # :nodoc:
setup_client
begin
@client.authorize(
session[:request_token],
session[:request_token_secret],
:oauth_verifier => params[:oauth_verifier]
)
rescue OAuth::Unauthorized => e
nil
end
end
# gets the request token and redirects to twitter's OAuth endpoint
# attempts to get the access token(MUST be used after user has been redirected back from twitter)
def authenticate!
access_token = get_access_token
if @client.authorized?
session[:access_token] = access_token.token
session[:secret_token] = access_token.secret
session[:user] = @client.info
session[:user]
else
nil
end
end
#removes all the session data defined by the extension
def clear_oauth_session
session[:user] = nil
session[:request_token] = nil
session[:request_token_secret] = nil
session[:access_token] = nil
session[:secret_token] = nil
end
end
|
rmagick/rmagick | lib/rvg/rvg.rb | Magick.RVG.background_image= | ruby | def background_image=(bg_image)
warn 'background_image= has no effect in nested RVG objects' if @nested
raise ArgumentError, "background image must be an Image (got #{bg_image.class})" if bg_image && !bg_image.is_a?(Magick::Image)
@background_image = bg_image
end | Sets an image to use as the canvas background. See background_position= for layout options. | train | https://github.com/rmagick/rmagick/blob/ef6688ed9d76bf123c2ea1a483eff8635051adb7/lib/rvg/rvg.rb#L140-L145 | class RVG
include Stylable
include Transformable
include Stretchable
include Embellishable
include Describable
include Duplicatable
private
# background_fill defaults to 'none'. If background_fill has been set to something
# else, combine it with the background_fill_opacity.
def bgfill
if @background_fill.nil?
color = Magick::Pixel.new(0, 0, 0, Magick::TransparentOpacity)
else
color = @background_fill
color.opacity = (1.0 - @background_fill_opacity) * Magick::TransparentOpacity
end
color
end
def new_canvas
if @background_pattern
canvas = Magick::Image.new(@width, @height, @background_pattern)
elsif @background_image
canvas = if @width != @background_image.columns || @height != @background_image.rows
case @background_position
when :scaled
@background_image.resize(@width, @height)
when :tiled
Magick::Image.new(@width, @height, Magick::TextureFill.new(@background_image))
when :fit
width = @width
height = @height
bgcolor = bgfill
@background_image.change_geometry(Magick::Geometry.new(width, height)) do |new_cols, new_rows|
bg_image = @background_image.resize(new_cols, new_rows)
if bg_image.columns != width || bg_image.rows != height
bg = Magick::Image.new(width, height) { self.background_color = bgcolor }
bg_image = bg.composite!(bg_image, Magick::CenterGravity, Magick::OverCompositeOp)
end
bg_image
end
end
else
@background_image.copy
end
else
bgcolor = bgfill
canvas = Magick::Image.new(Integer(@width), Integer(@height)) { self.background_color = bgcolor }
end
canvas[:desc] = @desc if @desc
canvas[:title] = @title if @title
canvas[:metadata] = @metadata if @metadata
canvas
end
if ENV['debug_prim']
def print_gc(gc)
primitives = gc.inspect.split(/\n/)
indent = 0
primitives.each do |cmd|
indent -= 1 if cmd['pop ']
print((' ' * indent), cmd, "\n")
indent += 1 if cmd['push ']
end
end
end
public
WORD_SEP = / / # Regexp to separate words
# The background image specified by background_image=
attr_reader :background_image
# The background image layout specified by background_position=
attr_reader :background_position
# The background fill color specified by background_fill=
attr_reader :background_fill
# The background fill color opacity specified by background_fill_opacity=
attr_reader :background_fill_opacity
# The image after drawing has completed
attr_reader :canvas
# For embedded RVG objects, the x-axis coordinate of the upper-left corner
attr_reader :x
# For embedded RVG objects, the x-axis coordinate of the upper-left corner
attr_reader :y
attr_reader :width, :height
# Sets an image to use as the canvas background. See background_position= for layout options.
# Sets an object to use to fill the canvas background.
# The object must have a <tt>fill</tt> method. See the <b>Fill Classes</b>
# section in the RMagick doc for more information.
def background_pattern=(filler)
warn 'background_pattern= has no effect in nested RVG objects' if @nested
@background_pattern = filler
end
# How to position the background image on the canvas. One of the following symbols:
# [:scaled] Scale the image to the canvas width and height.
# [:tiled] Tile the image across the canvas.
# [:fit] Scale the image to fit within the canvas while retaining the
# image proportions. Center the image on the canvas. Color any part of
# the canvas not covered by the image with the background color.
def background_position=(pos)
warn 'background_position= has no effect in nested RVG objects' if @nested
bg_pos = pos.to_s.downcase
raise ArgumentError, "background position must be `scaled', `tiled', or `fit' (#{pos} given)" unless %w[scaled tiled fit].include?(bg_pos)
@background_position = bg_pos.to_sym
end
# Sets the canvas background color. Either a Magick::Pixel or a color name.
# The default fill is "none", that is, transparent black.
def background_fill=(color)
warn 'background_fill= has no effect in nested RVG objects' if @nested
if !color.is_a?(Magick::Pixel)
begin
@background_fill = Magick::Pixel.from_color(color)
rescue Magick::ImageMagickError
raise ArgumentError, "unknown color `#{color}'"
rescue TypeError
raise TypeError, "cannot convert #{color.class} into Pixel"
rescue StandardError
raise ArgumentError, "argument must be a color name or a Pixel (got #{color.class})"
end
else
@background_fill = color
end
end
# Opacity of the background fill color, a number between 0.0 (transparent) and
# 1.0 (opaque). The default is 1.0 when the background_fill= attribute has been set.
def background_fill_opacity=(opacity)
warn 'background_fill_opacity= has no effect in nested RVG objects' if @nested
begin
@background_fill_opacity = Float(opacity)
rescue ArgumentError
raise ArgumentError, "background_fill_opacity must be a number between 0 and 1 (#{opacity} given)"
end
end
# Draw a +width+ x +height+ image. The image is specified by calling
# one or more drawing methods on the RVG object.
# You can group the drawing method calls in the optional associated block.
# The +x+ and +y+ arguments have no meaning for the outermost RVG object.
# On nested RVG objects [+x+, +y+] is the coordinate of the upper-left
# corner in the containing canvas on which the nested RVG object is placed.
#
# Drawing occurs on a +canvas+ created by the #draw method. By default the
# canvas is transparent. You can specify a different canvas with the
# #background_fill= or #background_image= methods.
#
# RVG objects are _containers_. That is, styles and transforms defined
# on the object are used by contained objects such as shapes, text, and
# groups unless overridden by an inner container or the object itself.
def initialize(width = nil, height = nil)
super
@width = width
@height = height
@content = Content.new
@canvas = nil
@background_fill = nil
@background_fill_opacity = 1.0 # applies only if background_fill= is used
@background_position = :scaled
@background_pattern, @background_image, @desc, @title, @metadata = nil
@x = 0.0
@y = 0.0
@nested = false
yield(self) if block_given?
end
# Construct a canvas or reuse an existing canvas.
# Execute drawing commands. Return the canvas.
def draw
raise StandardError, 'draw not permitted in nested RVG objects' if @nested
@canvas ||= new_canvas # allow drawing over existing canvas
gc = Utility::GraphicContext.new
add_outermost_primitives(gc)
pp(self) if ENV['debug_rvg']
print_gc(gc) if ENV['debug_prim']
gc.draw(@canvas)
@canvas
end
# Accept #use arguments. Use (x,y) to generate an additional translate.
# Override @width and @height if new values are supplied.
def ref(x, y, rw, rh) #:nodoc:
translate(x, y) if x != 0 || y != 0
@width = rw if rw
@height = rh if rh
end
# Used by Magick::Embellishable.rvg to set non-0 x- and y-coordinates
def corner(x, y) #:nodoc:
@nested = true
@x = Float(x)
@y = Float(y)
translate(@x, @y) if @x != 0.0 || @y != 0.0
end
# Primitives for the outermost RVG object
def add_outermost_primitives(gc) #:nodoc:
add_transform_primitives(gc)
gc.push
add_viewbox_primitives(@width, @height, gc)
add_style_primitives(gc)
@content.each { |element| element.add_primitives(gc) }
gc.pop
self
end
# Primitives for nested RVG objects
def add_primitives(gc) #:nodoc:
raise ArgumentError, 'RVG width or height undefined' if @width.nil? || @height.nil?
return self if @width.zero? || @height.zero?
gc.push
add_outermost_primitives(gc)
gc.pop
end
end # end class RVG
|
oleganza/btcruby | lib/btcruby/proof_of_work.rb | BTC.ProofOfWork.bits_from_target | ruby | def bits_from_target(target)
exponent = 3
signed = (target < 0)
target = -target if signed
while target > 0x7fffff
target >>= 8
exponent += 1
end
# The 0x00800000 bit denotes the sign.
# Thus, if it is already set, divide the mantissa by 256 and increase the exponent.
if (target & 0x00800000) > 0
target >>= 8
exponent += 1
end
result = (exponent << 24) + target
result = result | 0x00800000 if signed
result
end | Note on Satoshi Compact format (used for 'bits' value).
The "compact" format is a representation of a whole
number N using an unsigned 32bit number similar to a
floating point format.
The most significant 8 bits are the unsigned exponent of base 256.
This exponent can be thought of as "number of bytes of N".
The lower 23 bits are the mantissa.
Bit number 24 (0x800000) represents the sign of N.
N = (-1^sign) * mantissa * 256^(exponent-3)
Satoshi's original implementation used BN_bn2mpi() and BN_mpi2bn().
MPI uses the most significant bit of the first byte as sign.
Thus 0x1234560000 is compact (0x05123456)
and 0xc0de000000 is compact (0x0600c0de)
(0x05c0de00) would be -0x40de000000
Converts 256-bit integer to 32-bit compact representation. | train | https://github.com/oleganza/btcruby/blob/0aa0231a29dfc3c9f7fc54b39686aed10b6d9808/lib/btcruby/proof_of_work.rb#L32-L49 | module ProofOfWork
extend self
MAX_TARGET_MAINNET = 0x00000000ffff0000000000000000000000000000000000000000000000000000
MAX_TARGET_TESTNET = 0x00000007fff80000000000000000000000000000000000000000000000000000
# Note on Satoshi Compact format (used for 'bits' value).
#
# The "compact" format is a representation of a whole
# number N using an unsigned 32bit number similar to a
# floating point format.
# The most significant 8 bits are the unsigned exponent of base 256.
# This exponent can be thought of as "number of bytes of N".
# The lower 23 bits are the mantissa.
# Bit number 24 (0x800000) represents the sign of N.
# N = (-1^sign) * mantissa * 256^(exponent-3)
#
# Satoshi's original implementation used BN_bn2mpi() and BN_mpi2bn().
# MPI uses the most significant bit of the first byte as sign.
# Thus 0x1234560000 is compact (0x05123456)
# and 0xc0de000000 is compact (0x0600c0de)
# (0x05c0de00) would be -0x40de000000
# Converts 256-bit integer to 32-bit compact representation.
# Converts 32-bit compact representation to a 256-bit integer.
# int32 -> bigint
def target_from_bits(bits)
exponent = ((bits >> 24) & 0xff)
mantissa = bits & 0x7fffff
mantissa *= -1 if (bits & 0x800000) > 0
(mantissa * (256**(exponent-3))).to_i
end
# Computes bits from difficulty.
# Could be inaccurate since difficulty is a limited-precision floating-point number.
# Default max_target is for Bitcoin mainnet.
# float -> int32
def bits_from_difficulty(difficulty, max_target: MAX_TARGET_MAINNET)
bits_from_target(target_from_difficulty(difficulty, max_target: max_target))
end
# Computes difficulty from bits.
# Default max_target is for Bitcoin mainnet.
# int32 -> float
def difficulty_from_bits(bits, max_target: MAX_TARGET_MAINNET)
difficulty_from_target(target_from_bits(bits), max_target: max_target)
end
# Computes target from difficulty.
# Could be inaccurate since difficulty is a limited-precision floating-point number.
# Default max_target is for Bitcoin mainnet.
# float -> bigint
def target_from_difficulty(difficulty, max_target: MAX_TARGET_MAINNET)
(max_target / difficulty).round.to_i
end
# Compute relative difficulty from a given target.
# E.g. returns 2.5 if target is 2.5 times harder to reach than the max_target.
# Default max_target is for Bitcoin mainnet.
# bigint -> float
def difficulty_from_target(target, max_target: MAX_TARGET_MAINNET)
(max_target / target.to_f)
end
# Converts target integer to a binary 32-byte hash.
# bigint -> hash256
def hash_from_target(target)
bytes = []
while target > 0
bytes << (target % 256)
target /= 256
end
BTC::Data.data_from_bytes(bytes).ljust(32, "\x00".b)
end
# Converts 32-byte hash to target big integer (hash is treated as little-endian integer)
# hash256 -> bigint
def target_from_hash(hash)
target = 0
i = 0
hash.each_byte do |byte|
target += byte * (256**i)
i += 1
end
target
end
# TODO: add retargeting calculation routines
# Compute amount of work expressed as a target
# Based on `arith_uint256 GetBlockProof(const CBlockIndex& block)` from Bitcoin Core
# bigint -> bigint
def work_from_target(target)
# We need to compute 2**256 / (target+1), but we can't represent 2**256
# as it's too large for a arith_uint256. However, as 2**256 is at least as large
# as target+1, it is equal to ((2**256 - target - 1) / (target+1)) + 1,
# or ~target / (target+1) + 1.
# In Ruby bigint is signed, so we can't use '~', but we can use 2**256
return ((2**256 - target - 1) / (target + 1)) + 1
end
# hash256 -> bigint
def work_from_hash(hash)
work_from_target(target_from_hash(hash))
end
end # ProofOfWork
|
tubedude/xirr | lib/xirr/bisection.rb | Xirr.Bisection.xirr | ruby | def xirr(midpoint, options)
# Initial values
left = [BigDecimal.new(-0.99999999, Xirr::PRECISION), cf.irr_guess].min
right = [BigDecimal.new(9.99999999, Xirr::PRECISION), cf.irr_guess + 1].max
@original_right = right
midpoint ||= cf.irr_guess
midpoint, runs = loop_rates(left, midpoint, right, options[:iteration_limit])
get_answer(midpoint, options, runs)
end | Calculates yearly Internal Rate of Return
@return [BigDecimal]
@param midpoint [Float]
An initial guess rate will override the {Cashflow#irr_guess} | train | https://github.com/tubedude/xirr/blob/e8488a95b217c463d54a5d311ce02a9474f22f7e/lib/xirr/bisection.rb#L11-L23 | class Bisection
include Base
# Calculates yearly Internal Rate of Return
# @return [BigDecimal]
# @param midpoint [Float]
# An initial guess rate will override the {Cashflow#irr_guess}
private
# @param midpoint [BigDecimal]
# @return [Boolean]
# Checks if result is the right limit.
def right_limit_reached?(midpoint)
(@original_right - midpoint).abs < Xirr::EPS
end
# @param left [BigDecimal]
# @param midpoint [BigDecimal]
# @param right [BigDecimal]
# @return [Array]
# Calculates the Bisections
def bisection(left, midpoint, right)
_left = xnpv(left).positive?
_mid = xnpv(midpoint).positive?
if _left && _mid
return left, left, left, true if xnpv(right).positive? # Not Enough Precision in the left to find the IRR
end
if _left == _mid
return midpoint, format_irr(midpoint, right), right, false # Result is to the Right
else
return left, format_irr(left, midpoint), midpoint, false # Result is to the Left
end
end
# @param left [Float]
# @param right [Float]
# @return [Float] IRR of the Cashflow
def format_irr(left, right)
irr = (right+left) / 2
end
def get_answer(midpoint, options, runs)
if runs >= options[:iteration_limit]
if options[:raise_exception]
raise ArgumentError, "Did not converge after #{runs} tries."
else
nil
end
else
midpoint.round Xirr::PRECISION
end
end
def loop_rates(left, midpoint, right, iteration_limit)
runs = 0
while (right - left).abs > Xirr::EPS && runs < iteration_limit do
runs += 1
left, midpoint, right, should_stop = bisection(left, midpoint, right)
break if should_stop
if right_limit_reached?(midpoint)
right *= 2
@original_right *= 2
end
end
return midpoint, runs
end
end
|
dagrz/nba_stats | lib/nba_stats/stats/draft_combine_spot_shooting.rb | NbaStats.DraftCombineSpotShooting.draft_combine_spot_shooting | ruby | def draft_combine_spot_shooting(
season_year,
league_id=NbaStats::Constants::LEAGUE_ID_NBA
)
NbaStats::Resources::DraftCombineSpotShooting.new(
get(DRAFT_COMBINE_SPOT_SHOOTING_PATH, {
:LeagueID => league_id,
:SeasonYear => season_year
})
)
end | Calls the draftcombinespotshooting API and returns a DraftCombineSpotShooting resource.
@param season_year [String]
@param league_id [String]
@return [NbaStats::Resources::DraftCombineSpotShooting] | train | https://github.com/dagrz/nba_stats/blob/d6fe6cf81f74a2ce7a054aeec5e9db59a6ec42aa/lib/nba_stats/stats/draft_combine_spot_shooting.rb#L15-L25 | module DraftCombineSpotShooting
# The path of the draftcombinespotshooting API
DRAFT_COMBINE_SPOT_SHOOTING_PATH = '/stats/draftcombinespotshooting'
# Calls the draftcombinespotshooting API and returns a DraftCombineSpotShooting resource.
#
# @param season_year [String]
# @param league_id [String]
# @return [NbaStats::Resources::DraftCombineSpotShooting]
end # DraftCombineSpotShooting
|
oleganza/btcruby | lib/btcruby/proof_of_work.rb | BTC.ProofOfWork.target_from_bits | ruby | def target_from_bits(bits)
exponent = ((bits >> 24) & 0xff)
mantissa = bits & 0x7fffff
mantissa *= -1 if (bits & 0x800000) > 0
(mantissa * (256**(exponent-3))).to_i
end | Converts 32-bit compact representation to a 256-bit integer.
int32 -> bigint | train | https://github.com/oleganza/btcruby/blob/0aa0231a29dfc3c9f7fc54b39686aed10b6d9808/lib/btcruby/proof_of_work.rb#L53-L58 | module ProofOfWork
extend self
MAX_TARGET_MAINNET = 0x00000000ffff0000000000000000000000000000000000000000000000000000
MAX_TARGET_TESTNET = 0x00000007fff80000000000000000000000000000000000000000000000000000
# Note on Satoshi Compact format (used for 'bits' value).
#
# The "compact" format is a representation of a whole
# number N using an unsigned 32bit number similar to a
# floating point format.
# The most significant 8 bits are the unsigned exponent of base 256.
# This exponent can be thought of as "number of bytes of N".
# The lower 23 bits are the mantissa.
# Bit number 24 (0x800000) represents the sign of N.
# N = (-1^sign) * mantissa * 256^(exponent-3)
#
# Satoshi's original implementation used BN_bn2mpi() and BN_mpi2bn().
# MPI uses the most significant bit of the first byte as sign.
# Thus 0x1234560000 is compact (0x05123456)
# and 0xc0de000000 is compact (0x0600c0de)
# (0x05c0de00) would be -0x40de000000
# Converts 256-bit integer to 32-bit compact representation.
def bits_from_target(target)
exponent = 3
signed = (target < 0)
target = -target if signed
while target > 0x7fffff
target >>= 8
exponent += 1
end
# The 0x00800000 bit denotes the sign.
# Thus, if it is already set, divide the mantissa by 256 and increase the exponent.
if (target & 0x00800000) > 0
target >>= 8
exponent += 1
end
result = (exponent << 24) + target
result = result | 0x00800000 if signed
result
end
# Converts 32-bit compact representation to a 256-bit integer.
# int32 -> bigint
# Computes bits from difficulty.
# Could be inaccurate since difficulty is a limited-precision floating-point number.
# Default max_target is for Bitcoin mainnet.
# float -> int32
def bits_from_difficulty(difficulty, max_target: MAX_TARGET_MAINNET)
bits_from_target(target_from_difficulty(difficulty, max_target: max_target))
end
# Computes difficulty from bits.
# Default max_target is for Bitcoin mainnet.
# int32 -> float
def difficulty_from_bits(bits, max_target: MAX_TARGET_MAINNET)
difficulty_from_target(target_from_bits(bits), max_target: max_target)
end
# Computes target from difficulty.
# Could be inaccurate since difficulty is a limited-precision floating-point number.
# Default max_target is for Bitcoin mainnet.
# float -> bigint
def target_from_difficulty(difficulty, max_target: MAX_TARGET_MAINNET)
(max_target / difficulty).round.to_i
end
# Compute relative difficulty from a given target.
# E.g. returns 2.5 if target is 2.5 times harder to reach than the max_target.
# Default max_target is for Bitcoin mainnet.
# bigint -> float
def difficulty_from_target(target, max_target: MAX_TARGET_MAINNET)
(max_target / target.to_f)
end
# Converts target integer to a binary 32-byte hash.
# bigint -> hash256
def hash_from_target(target)
bytes = []
while target > 0
bytes << (target % 256)
target /= 256
end
BTC::Data.data_from_bytes(bytes).ljust(32, "\x00".b)
end
# Converts 32-byte hash to target big integer (hash is treated as little-endian integer)
# hash256 -> bigint
def target_from_hash(hash)
target = 0
i = 0
hash.each_byte do |byte|
target += byte * (256**i)
i += 1
end
target
end
# TODO: add retargeting calculation routines
# Compute amount of work expressed as a target
# Based on `arith_uint256 GetBlockProof(const CBlockIndex& block)` from Bitcoin Core
# bigint -> bigint
def work_from_target(target)
# We need to compute 2**256 / (target+1), but we can't represent 2**256
# as it's too large for a arith_uint256. However, as 2**256 is at least as large
# as target+1, it is equal to ((2**256 - target - 1) / (target+1)) + 1,
# or ~target / (target+1) + 1.
# In Ruby bigint is signed, so we can't use '~', but we can use 2**256
return ((2**256 - target - 1) / (target + 1)) + 1
end
# hash256 -> bigint
def work_from_hash(hash)
work_from_target(target_from_hash(hash))
end
end # ProofOfWork
|
murb/workbook | lib/workbook/row.rb | Workbook.Row.[]= | ruby | def []= index_or_hash, value
index = index_or_hash
if index_or_hash.is_a? Symbol
index = table_header_keys.index(index_or_hash)
elsif index_or_hash.is_a? String and index_or_hash.match(/^[A-Z]*$/)
# it looks like a column indicator
index = Workbook::Column.alpha_index_to_number_index(index_or_hash)
elsif index_or_hash.is_a? String
symbolized = Workbook::Cell.new(index_or_hash, {row:self}).to_sym
index = table_header_keys.index(symbolized)
end
value_celled = Workbook::Cell.new
if value.is_a? Workbook::Cell
value_celled = value
else
current_cell = self[index]
if current_cell.is_a? Workbook::Cell
value_celled = current_cell
end
value_celled.value=(value)
end
value_celled.row = self
super(index,value_celled)
end | Overrides normal Array's []=-function with support for symbols that identify a column based on the header-values
@example Lookup using fixnum or header value encoded as symbol (strings are converted to symbols)
row[1] #=> <Cell value="a">
row[:a] #=> <Cell value="a">
@param [Fixnum, Symbol, String] index_or_hash that identifies the column
@param [String, Fixnum, NilClass, Date, DateTime, Time, Float] value
@return [Workbook::Cell, nil] | train | https://github.com/murb/workbook/blob/2e12f43c882b7c235455192a2fc48183fe6ec965/lib/workbook/row.rb#L136-L160 | class Row < Array
include Workbook::Modules::Cache
alias_method :compare_without_header, :<=>
attr_accessor :placeholder # The placeholder attribute is used in compares (corresponds to newly created or removed lines (depending which side you're on)
attr_accessor :format
# Initialize a new row
#
# @param [Workbook::Row, Array<Workbook::Cell>, Array] cells list of cells to initialize the row with, default is empty
# @param [Workbook::Table] table a row normally belongs to a table, reference it here
# @param [Hash] options Supported options: parse_cells_on_batch_creation (parse cell values during row-initalization, default: false), cell_parse_options (default {}, see Workbook::Modules::TypeParser)
def initialize cells=[], table=nil, options={}
options=options ? {:parse_cells_on_batch_creation=>false,:cell_parse_options=>{},:clone_cells=>false}.merge(options) : {}
cells = [] if cells==nil
self.table= table
cells.each do |c|
if c.is_a? Workbook::Cell
c = c.clone if options[:clone_cells]
else
c = Workbook::Cell.new(c, {row:self})
c.parse!(options[:cell_parse_options]) if options[:parse_cells_on_batch_creation]
end
push c
end
end
# An internal function used in diffs
#
# @return [Boolean] returns true when this row is not an actual row, but a placeholder row to 'compare' against
def placeholder?
placeholder ? true : false
end
# Returns the table this row belongs to
#
# @return [Workbook::Table] the table this row belongs to
def table
@table if defined?(@table)
end
# Set reference to the table this row belongs to without adding the row to the table
#
# @param [Workbook::Table] t the table this row belongs to
def set_table(t)
@table = t
end
# Set reference to the table this row belongs to and add the row to this table
#
# @param [Workbook::Table] t the table this row belongs to
def table= t
raise ArgumentError, "table should be a Workbook::Table (you passed a #{t.class})" unless t.is_a?(Workbook::Table) or t == nil
if t
@table = t
table.push(self) #unless table.index(self) and self.placeholder?
end
end
# Add cell
# @param [Workbook::Cell, Numeric,String,Time,Date,TrueClass,FalseClass,NilClass] cell or value to add
def push(cell)
cell = Workbook::Cell.new(cell, {row:self}) unless cell.class == Workbook::Cell
super(cell)
end
# Add cell
# @param [Workbook::Cell, Numeric,String,Time,Date,TrueClass,FalseClass,NilClass] cell or value to add
def <<(cell)
cell = Workbook::Cell.new(cell, {row:self}) unless cell.class == Workbook::Cell
super(cell)
end
# plus
# @param [Workbook::Row, Array] row to add
# @return [Workbook::Row] a new row, not linked to the table
def +(row)
rv = super(row)
rv = Workbook::Row.new(rv) unless rv.class == Workbook::Row
return rv
end
# concat
# @param [Workbook::Row, Array] row to add
# @return [self] self
def concat(row)
row = Workbook::Row.new(row) unless row.class == Workbook::Row
super(row)
end
# Overrides normal Array's []-function with support for symbols that identify a column based on the header-values and / or
#
# @example Lookup using fixnum or header value encoded as symbol
# row[1] #=> <Cell value="a">
# row["A"] #=> <Cell value="a">
# row[:a] #=> <Cell value="a">
#
# @param [Fixnum, Symbol, String] index_or_hash that identifies the column (strings are converted to symbols)
# @return [Workbook::Cell, nil]
def [](index_or_hash)
if index_or_hash.is_a? Symbol
rv = nil
begin
rv = to_hash[index_or_hash]
rescue NoMethodError
end
return rv
elsif index_or_hash.is_a? String and index_or_hash.match(/^[A-Z]*$/)
# it looks like a column indicator
return to_a[Workbook::Column.alpha_index_to_number_index(index_or_hash)]
elsif index_or_hash.is_a? String
symbolized = Workbook::Cell.new(index_or_hash, {row:self}).to_sym
self[symbolized]
else
if index_or_hash
return to_a[index_or_hash]
end
end
end
# Overrides normal Array's []=-function with support for symbols that identify a column based on the header-values
#
# @example Lookup using fixnum or header value encoded as symbol (strings are converted to symbols)
# row[1] #=> <Cell value="a">
# row[:a] #=> <Cell value="a">
#
# @param [Fixnum, Symbol, String] index_or_hash that identifies the column
# @param [String, Fixnum, NilClass, Date, DateTime, Time, Float] value
# @return [Workbook::Cell, nil]
# Returns an array of cells allows you to find cells by a given color, normally a string containing a hex
#
# @param [String] color a CSS-style hex-string
# @param [Hash] options Option :hash_keys (default true) returns row as an array of symbols
# @return [Array<Symbol>, Workbook::Row<Workbook::Cell>]
def find_cells_by_background_color color=:any, options={}
options = {:hash_keys=>true}.merge(options)
cells = self.collect {|c| c if c.format.has_background_color?(color) }.compact
r = Row.new cells
options[:hash_keys] ? r.to_symbols : r
end
# Returns true when the row belongs to a table and it is the header row (typically the first row)
#
# @return [Boolean]
def header?
table != nil and self.object_id == table_header.object_id
end
# Is this the first row in the table
#
# @return [Boolean, NilClass] returns nil if it doesn't belong to a table, false when it isn't the first row of a table and true when it is.
def first?
table != nil and self.object_id == table.first.object_id
end
# Returns true when all the cells in the row have values whose to_s value equals an empty string
#
# @return [Boolean]
def no_values?
all? {|c| c.value.to_s == ''}
end
# Converts a row to an array of symbol representations of the row content, see also: Workbook::Cell#to_sym
# @return [Array<Symbol>] returns row as an array of symbols
def to_symbols
fetch_cache(:to_symbols){
collect{|c| c.to_sym}
}
end
# Converts the row to an array of Workbook::Cell's
# @return [Array<Workbook::Cell>] returns row as an array of symbols
def to_a
self.collect{|c| c}
end
def table_header
table.header
end
def table_header_keys
table_header.to_symbols
end
# Returns a hash representation of this row
#
# @return [Hash]
def to_hash
keys = table_header_keys
values = self
hash = {}
keys.each_with_index {|k,i| hash[k]=values[i]}
return hash
end
# Quick assessor to the book's template, if it exists
#
# @return [Workbook::Template]
def template
table.template if table
end
# Returns a hash representation of this row
#
# it differs from #to_hash as it doesn't contain the Workbook's Workbook::Cell-objects,
# but the actual values contained in these cells
#
# @return [Hash]
def to_hash_with_values
keys = table_header_keys
values = self
@hash_with_values = {}
keys.each_with_index {|k,i| v=values[i]; v=v.value if v; @hash_with_values[k]=v}
return @hash_with_values
end
# Compares one row wiht another
#
# @param [Workbook::Row] other row to compare against
# @return [Workbook::Row] a row with the diff result.
def <=> other
a = self.header? ? 0 : 1
b = other.header? ? 0 : 1
return (a <=> b) if (a==0 or b==0)
compare_without_header other
end
# The first cell of the row is considered to be the key
#
# @return [Workbook::Cell] the key cell
def key
first
end
# Compact detaches the row from the table
def compact
r = self.clone
r = r.collect{|c| c unless c.nil?}.compact
end
# clone the row with together with the cells
#
# @return [Workbook::Row] a cloned copy of self with cells
def clone
Workbook::Row.new(self, nil, {:clone_cells=>true})
end
# remove all the trailing nil-cells (returning a trimmed clone)
#
# @param [Integer] desired_length of the new row
# @return [Workbook::Row] a trimmed clone of the array
def trim(desired_length=nil)
self.clone.trim!(desired_length)
end
# remove all the trailing nil-cells (returning a trimmed self)
#
# @param [Integer] desired_length of the new row
# @return [Workbook::Row] self
def trim!(desired_length=nil)
self_count = self.count-1
self.count.times do |index|
index = self_count - index
if desired_length and index < desired_length
break
elsif desired_length and index >= desired_length
self.delete_at(index)
elsif self[index].nil?
self.delete_at(index)
else
break
end
end
(desired_length - self.count).times{|a| self << (Workbook::Cell.new(nil))} if desired_length and (desired_length - self.count) > 0
self
end
end
|
rmagick/rmagick | lib/rmagick_internal.rb | Magick.Draw.pattern | ruby | def pattern(name, x, y, width, height)
push('defs')
push("pattern #{name} #{x} #{y} #{width} #{height}")
push('graphic-context')
yield
ensure
pop('graphic-context')
pop('pattern')
pop('defs')
end | Define a pattern. In the block, call primitive methods to
draw the pattern. Reference the pattern by using its name
as the argument to the 'fill' or 'stroke' methods | train | https://github.com/rmagick/rmagick/blob/ef6688ed9d76bf123c2ea1a483eff8635051adb7/lib/rmagick_internal.rb#L435-L444 | class Draw
# Thse hashes are used to map Magick constant
# values to the strings used in the primitives.
ALIGN_TYPE_NAMES = {
LeftAlign.to_i => 'left',
RightAlign.to_i => 'right',
CenterAlign.to_i => 'center'
}.freeze
ANCHOR_TYPE_NAMES = {
StartAnchor.to_i => 'start',
MiddleAnchor.to_i => 'middle',
EndAnchor.to_i => 'end'
}.freeze
DECORATION_TYPE_NAMES = {
NoDecoration.to_i => 'none',
UnderlineDecoration.to_i => 'underline',
OverlineDecoration.to_i => 'overline',
LineThroughDecoration.to_i => 'line-through'
}.freeze
FONT_WEIGHT_NAMES = {
AnyWeight.to_i => 'all',
NormalWeight.to_i => 'normal',
BoldWeight.to_i => 'bold',
BolderWeight.to_i => 'bolder',
LighterWeight.to_i => 'lighter'
}.freeze
GRAVITY_NAMES = {
NorthWestGravity.to_i => 'northwest',
NorthGravity.to_i => 'north',
NorthEastGravity.to_i => 'northeast',
WestGravity.to_i => 'west',
CenterGravity.to_i => 'center',
EastGravity.to_i => 'east',
SouthWestGravity.to_i => 'southwest',
SouthGravity.to_i => 'south',
SouthEastGravity.to_i => 'southeast'
}.freeze
PAINT_METHOD_NAMES = {
PointMethod.to_i => 'point',
ReplaceMethod.to_i => 'replace',
FloodfillMethod.to_i => 'floodfill',
FillToBorderMethod.to_i => 'filltoborder',
ResetMethod.to_i => 'reset'
}.freeze
STRETCH_TYPE_NAMES = {
NormalStretch.to_i => 'normal',
UltraCondensedStretch.to_i => 'ultra-condensed',
ExtraCondensedStretch.to_i => 'extra-condensed',
CondensedStretch.to_i => 'condensed',
SemiCondensedStretch.to_i => 'semi-condensed',
SemiExpandedStretch.to_i => 'semi-expanded',
ExpandedStretch.to_i => 'expanded',
ExtraExpandedStretch.to_i => 'extra-expanded',
UltraExpandedStretch.to_i => 'ultra-expanded',
AnyStretch.to_i => 'all'
}.freeze
STYLE_TYPE_NAMES = {
NormalStyle.to_i => 'normal',
ItalicStyle.to_i => 'italic',
ObliqueStyle.to_i => 'oblique',
AnyStyle.to_i => 'all'
}.freeze
private
def enquote(str)
if str.length > 2 && /\A(?:\"[^\"]+\"|\'[^\']+\'|\{[^\}]+\})\z/.match(str)
str
else
'"' + str + '"'
end
end
public
# Apply coordinate transformations to support scaling (s), rotation (r),
# and translation (t). Angles are specified in radians.
def affine(sx, rx, ry, sy, tx, ty)
primitive 'affine ' + format('%g,%g,%g,%g,%g,%g', sx, rx, ry, sy, tx, ty)
end
# Draw an arc.
def arc(start_x, start_y, end_x, end_y, start_degrees, end_degrees)
primitive 'arc ' + format('%g,%g %g,%g %g,%g',
start_x, start_y, end_x, end_y, start_degrees, end_degrees)
end
# Draw a bezier curve.
def bezier(*points)
if points.length.zero?
Kernel.raise ArgumentError, 'no points specified'
elsif points.length.odd?
Kernel.raise ArgumentError, 'odd number of arguments specified'
end
primitive 'bezier ' + points.join(',')
end
# Draw a circle
def circle(origin_x, origin_y, perim_x, perim_y)
primitive 'circle ' + format('%g,%g %g,%g', origin_x, origin_y, perim_x, perim_y)
end
# Invoke a clip-path defined by def_clip_path.
def clip_path(name)
primitive "clip-path #{name}"
end
# Define the clipping rule.
def clip_rule(rule)
Kernel.raise ArgumentError, "Unknown clipping rule #{rule}" unless %w[evenodd nonzero].include?(rule.downcase)
primitive "clip-rule #{rule}"
end
# Define the clip units
def clip_units(unit)
Kernel.raise ArgumentError, "Unknown clip unit #{unit}" unless %w[userspace userspaceonuse objectboundingbox].include?(unit.downcase)
primitive "clip-units #{unit}"
end
# Set color in image according to specified colorization rule. Rule is one of
# point, replace, floodfill, filltoborder,reset
def color(x, y, method)
Kernel.raise ArgumentError, "Unknown PaintMethod: #{method}" unless PAINT_METHOD_NAMES.key?(method.to_i)
primitive "color #{x},#{y},#{PAINT_METHOD_NAMES[method.to_i]}"
end
# Specify EITHER the text decoration (none, underline, overline,
# line-through) OR the text solid background color (any color name or spec)
def decorate(decoration)
if DECORATION_TYPE_NAMES.key?(decoration.to_i)
primitive "decorate #{DECORATION_TYPE_NAMES[decoration.to_i]}"
else
primitive "decorate #{enquote(decoration)}"
end
end
# Define a clip-path. A clip-path is a sequence of primitives
# bracketed by the "push clip-path <name>" and "pop clip-path"
# primitives. Upon advice from the IM guys, we also bracket
# the clip-path primitives with "push(pop) defs" and "push
# (pop) graphic-context".
def define_clip_path(name)
push('defs')
push("clip-path \"#{name}\"")
push('graphic-context')
yield
ensure
pop('graphic-context')
pop('clip-path')
pop('defs')
end
# Draw an ellipse
def ellipse(origin_x, origin_y, width, height, arc_start, arc_end)
primitive 'ellipse ' + format('%g,%g %g,%g %g,%g',
origin_x, origin_y, width, height, arc_start, arc_end)
end
# Let anything through, but the only defined argument
# is "UTF-8". All others are apparently ignored.
def encoding(encoding)
primitive "encoding #{encoding}"
end
# Specify object fill, a color name or pattern name
def fill(colorspec)
primitive "fill #{enquote(colorspec)}"
end
alias fill_color fill
alias fill_pattern fill
# Specify fill opacity (use "xx%" to indicate percentage)
def fill_opacity(opacity)
primitive "fill-opacity #{opacity}"
end
def fill_rule(rule)
Kernel.raise ArgumentError, "Unknown fill rule #{rule}" unless %w[evenodd nonzero].include?(rule.downcase)
primitive "fill-rule #{rule}"
end
# Specify text drawing font
def font(name)
primitive "font \'#{name}\'"
end
def font_family(name)
primitive "font-family \'#{name}\'"
end
def font_stretch(stretch)
Kernel.raise ArgumentError, 'Unknown stretch type' unless STRETCH_TYPE_NAMES.key?(stretch.to_i)
primitive "font-stretch #{STRETCH_TYPE_NAMES[stretch.to_i]}"
end
def font_style(style)
Kernel.raise ArgumentError, 'Unknown style type' unless STYLE_TYPE_NAMES.key?(style.to_i)
primitive "font-style #{STYLE_TYPE_NAMES[style.to_i]}"
end
# The font weight argument can be either a font weight
# constant or [100,200,...,900]
def font_weight(weight)
if FONT_WEIGHT_NAMES.key?(weight.to_i)
primitive "font-weight #{FONT_WEIGHT_NAMES[weight.to_i]}"
else
primitive "font-weight #{weight}"
end
end
# Specify the text positioning gravity, one of:
# NorthWest, North, NorthEast, West, Center, East, SouthWest, South, SouthEast
def gravity(grav)
Kernel.raise ArgumentError, 'Unknown text positioning gravity' unless GRAVITY_NAMES.key?(grav.to_i)
primitive "gravity #{GRAVITY_NAMES[grav.to_i]}"
end
# IM 6.5.5-8 and later
def interline_spacing(space)
begin
Float(space)
rescue ArgumentError
Kernel.raise ArgumentError, 'invalid value for interline_spacing'
rescue TypeError
Kernel.raise TypeError, "can't convert #{space.class} into Float"
end
primitive "interline-spacing #{space}"
end
# IM 6.4.8-3 and later
def interword_spacing(space)
begin
Float(space)
rescue ArgumentError
Kernel.raise ArgumentError, 'invalid value for interword_spacing'
rescue TypeError
Kernel.raise TypeError, "can't convert #{space.class} into Float"
end
primitive "interword-spacing #{space}"
end
# IM 6.4.8-3 and later
def kerning(space)
begin
Float(space)
rescue ArgumentError
Kernel.raise ArgumentError, 'invalid value for kerning'
rescue TypeError
Kernel.raise TypeError, "can't convert #{space.class} into Float"
end
primitive "kerning #{space}"
end
# Draw a line
def line(start_x, start_y, end_x, end_y)
primitive 'line ' + format('%g,%g %g,%g', start_x, start_y, end_x, end_y)
end
# Set matte (make transparent) in image according to the specified
# colorization rule
def matte(x, y, method)
Kernel.raise ArgumentError, 'Unknown paint method' unless PAINT_METHOD_NAMES.key?(method.to_i)
primitive "matte #{x},#{y} #{PAINT_METHOD_NAMES[method.to_i]}"
end
# Specify drawing fill and stroke opacities. If the value is a string
# ending with a %, the number will be multiplied by 0.01.
def opacity(opacity)
if opacity.is_a?(Numeric)
Kernel.raise ArgumentError, 'opacity must be >= 0 and <= 1.0' if opacity < 0 || opacity > 1.0
end
primitive "opacity #{opacity}"
end
# Draw using SVG-compatible path drawing commands. Note that the
# primitive requires that the commands be surrounded by quotes or
# apostrophes. Here we simply use apostrophes.
def path(cmds)
primitive "path '" + cmds + "'"
end
# Define a pattern. In the block, call primitive methods to
# draw the pattern. Reference the pattern by using its name
# as the argument to the 'fill' or 'stroke' methods
# Set point to fill color.
def point(x, y)
primitive "point #{x},#{y}"
end
# Specify the font size in points. Yes, the primitive is "font-size" but
# in other places this value is called the "pointsize". Give it both names.
def pointsize(points)
primitive "font-size #{points}"
end
alias font_size pointsize
# Draw a polygon
def polygon(*points)
if points.length.zero?
Kernel.raise ArgumentError, 'no points specified'
elsif points.length.odd?
Kernel.raise ArgumentError, 'odd number of points specified'
end
primitive 'polygon ' + points.join(',')
end
# Draw a polyline
def polyline(*points)
if points.length.zero?
Kernel.raise ArgumentError, 'no points specified'
elsif points.length.odd?
Kernel.raise ArgumentError, 'odd number of points specified'
end
primitive 'polyline ' + points.join(',')
end
# Return to the previously-saved set of whatever
# pop('graphic-context') (the default if no arguments)
# pop('defs')
# pop('gradient')
# pop('pattern')
def pop(*what)
if what.length.zero?
primitive 'pop graphic-context'
else
# to_s allows a Symbol to be used instead of a String
primitive 'pop ' + what.map(&:to_s).join(' ')
end
end
# Push the current set of drawing options. Also you can use
# push('graphic-context') (the default if no arguments)
# push('defs')
# push('gradient')
# push('pattern')
def push(*what)
if what.length.zero?
primitive 'push graphic-context'
else
# to_s allows a Symbol to be used instead of a String
primitive 'push ' + what.map(&:to_s).join(' ')
end
end
# Draw a rectangle
def rectangle(upper_left_x, upper_left_y, lower_right_x, lower_right_y)
primitive 'rectangle ' + format('%g,%g %g,%g',
upper_left_x, upper_left_y, lower_right_x, lower_right_y)
end
# Specify coordinate space rotation. "angle" is measured in degrees
def rotate(angle)
primitive "rotate #{angle}"
end
# Draw a rectangle with rounded corners
def roundrectangle(center_x, center_y, width, height, corner_width, corner_height)
primitive 'roundrectangle ' + format('%g,%g,%g,%g,%g,%g',
center_x, center_y, width, height, corner_width, corner_height)
end
# Specify scaling to be applied to coordinate space on subsequent drawing commands.
def scale(x, y)
primitive "scale #{x},#{y}"
end
def skewx(angle)
primitive "skewX #{angle}"
end
def skewy(angle)
primitive "skewY #{angle}"
end
# Specify the object stroke, a color name or pattern name.
def stroke(colorspec)
primitive "stroke #{enquote(colorspec)}"
end
alias stroke_color stroke
alias stroke_pattern stroke
# Specify if stroke should be antialiased or not
def stroke_antialias(bool)
bool = bool ? '1' : '0'
primitive "stroke-antialias #{bool}"
end
# Specify a stroke dash pattern
def stroke_dasharray(*list)
if list.length.zero?
primitive 'stroke-dasharray none'
else
list.each do |x|
Kernel.raise ArgumentError, "dash array elements must be > 0 (#{x} given)" if x <= 0
end
primitive "stroke-dasharray #{list.join(',')}"
end
end
# Specify the initial offset in the dash pattern
def stroke_dashoffset(value = 0)
primitive "stroke-dashoffset #{value}"
end
def stroke_linecap(value)
Kernel.raise ArgumentError, "Unknown linecap type: #{value}" unless %w[butt round square].include?(value.downcase)
primitive "stroke-linecap #{value}"
end
def stroke_linejoin(value)
Kernel.raise ArgumentError, "Unknown linejoin type: #{value}" unless %w[round miter bevel].include?(value.downcase)
primitive "stroke-linejoin #{value}"
end
def stroke_miterlimit(value)
Kernel.raise ArgumentError, 'miterlimit must be >= 1' if value < 1
primitive "stroke-miterlimit #{value}"
end
# Specify opacity of stroke drawing color
# (use "xx%" to indicate percentage)
def stroke_opacity(value)
primitive "stroke-opacity #{value}"
end
# Specify stroke (outline) width in pixels.
def stroke_width(pixels)
primitive "stroke-width #{pixels}"
end
# Draw text at position x,y. Add quotes to text that is not already quoted.
def text(x, y, text)
Kernel.raise ArgumentError, 'missing text argument' if text.to_s.empty?
if text.length > 2 && /\A(?:\"[^\"]+\"|\'[^\']+\'|\{[^\}]+\})\z/.match(text)
# text already quoted
elsif !text['\'']
text = '\'' + text + '\''
elsif !text['"']
text = '"' + text + '"'
elsif !(text['{'] || text['}'])
text = '{' + text + '}'
else
# escape existing braces, surround with braces
text = '{' + text.gsub(/[}]/) { |b| '\\' + b } + '}'
end
primitive "text #{x},#{y} #{text}"
end
# Specify text alignment relative to a given point
def text_align(alignment)
Kernel.raise ArgumentError, "Unknown alignment constant: #{alignment}" unless ALIGN_TYPE_NAMES.key?(alignment.to_i)
primitive "text-align #{ALIGN_TYPE_NAMES[alignment.to_i]}"
end
# SVG-compatible version of text_align
def text_anchor(anchor)
Kernel.raise ArgumentError, "Unknown anchor constant: #{anchor}" unless ANCHOR_TYPE_NAMES.key?(anchor.to_i)
primitive "text-anchor #{ANCHOR_TYPE_NAMES[anchor.to_i]}"
end
# Specify if rendered text is to be antialiased.
def text_antialias(boolean)
boolean = boolean ? '1' : '0'
primitive "text-antialias #{boolean}"
end
# Specify color underneath text
def text_undercolor(color)
primitive "text-undercolor #{enquote(color)}"
end
# Specify center of coordinate space to use for subsequent drawing
# commands.
def translate(x, y)
primitive "translate #{x},#{y}"
end
end # class Magick::Draw
|
sup-heliotrope/sup | lib/sup/interactive_lock.rb | Redwood.InteractiveLock.lock_interactively | ruby | def lock_interactively stream=$stderr
begin
Index.lock
rescue Index::LockError => e
begin
Process.kill 0, e.pid.to_i # 0 signal test the existence of PID
stream.puts <<EOS
Error: the index is locked by another process! User '#{e.user}' on
host '#{e.host}' is running #{e.pname} with pid #{e.pid}.
The process was alive as of at least #{time_ago_in_words e.mtime} ago.
EOS
stream.print "Should I ask that process to kill itself (y/n)? "
stream.flush
if $stdin.gets =~ /^\s*y(es)?\s*$/i
Process.kill "TERM", e.pid.to_i
sleep DELAY
stream.puts "Let's try that again."
begin
Index.lock
rescue Index::LockError => e
stream.puts "I couldn't lock the index. The lockfile might just be stale."
stream.print "Should I just remove it and continue? (y/n) "
stream.flush
if $stdin.gets =~ /^\s*y(es)?\s*$/i
begin
FileUtils.rm e.path
rescue Errno::ENOENT
stream.puts "The lockfile doesn't exists. We continue."
end
stream.puts "Let's try that one more time."
begin
Index.lock
rescue Index::LockError => e
stream.puts "I couldn't unlock the index."
return false
end
return true
end
end
end
rescue Errno::ESRCH # no such process
stream.puts "I couldn't lock the index. The lockfile might just be stale."
begin
FileUtils.rm e.path
rescue Errno::ENOENT
stream.puts "The lockfile doesn't exists. We continue."
end
stream.puts "Let's try that one more time."
begin
sleep DELAY
Index.lock
rescue Index::LockError => e
stream.puts "I couldn't unlock the index."
return false
end
return true
end
stream.puts "Sorry, couldn't unlock the index."
return false
end
return true
end | seconds | train | https://github.com/sup-heliotrope/sup/blob/36f95462e3014c354c577d63a78ba030c4b84474/lib/sup/interactive_lock.rb#L24-L86 | module InteractiveLock
def pluralize number_of, kind; "#{number_of} #{kind}" + (number_of == 1 ? "" : "s") end
def time_ago_in_words time
secs = (Time.now - time).to_i
mins = secs / 60
time = if mins == 0
pluralize secs, "second"
else
pluralize mins, "minute"
end
end
DELAY = 5 # seconds
end
|
amatsuda/rfd | lib/rfd.rb | Rfd.Controller.view | ruby | def view
pager = ENV['PAGER'] || 'less'
execute_external_command do
unless in_zip?
system %Q[#{pager} "#{current_item.path}"]
else
begin
tmpdir, tmpfile_name = nil
Zip::File.open(current_zip) do |zip|
tmpdir = Dir.mktmpdir
FileUtils.mkdir_p File.join(tmpdir, File.dirname(current_item.name))
tmpfile_name = File.join(tmpdir, current_item.name)
File.open(tmpfile_name, 'w') {|f| f.puts zip.file.read(current_item.name)}
end
system %Q[#{pager} "#{tmpfile_name}"]
ensure
FileUtils.remove_entry_secure tmpdir if tmpdir
end
end
end
end | Open current file or directory with the viewer. | train | https://github.com/amatsuda/rfd/blob/403c0bc0ff0a9da1d21220b479d5a42008512b78/lib/rfd.rb#L708-L728 | class Controller
include Rfd::Commands
attr_reader :header_l, :header_r, :main, :command_line, :items, :displayed_items, :current_row, :current_page, :current_dir, :current_zip
# :nodoc:
def initialize
@main = MainWindow.new
@header_l = HeaderLeftWindow.new
@header_r = HeaderRightWindow.new
@command_line = CommandLineWindow.new
@debug = DebugWindow.new if ENV['DEBUG']
@direction, @dir_history, @last_command, @times, @yanked_items = nil, [], nil, nil, nil
end
# The main loop.
def run
loop do
begin
number_pressed = false
ret = case (c = Curses.getch)
when 10, 13 # enter, return
enter
when 27 # ESC
q
when ' ' # space
space
when 127 # DEL
del
when Curses::KEY_DOWN
j
when Curses::KEY_UP
k
when Curses::KEY_LEFT
h
when Curses::KEY_RIGHT
l
when Curses::KEY_CTRL_A..Curses::KEY_CTRL_Z
chr = ((c - 1 + 65) ^ 0b0100000).chr
public_send "ctrl_#{chr}" if respond_to?("ctrl_#{chr}")
when ?0..?9
public_send c
number_pressed = true
when ?!..?~
if respond_to? c
public_send c
else
debug "key: #{c}" if ENV['DEBUG']
end
when Curses::KEY_MOUSE
if (mouse_event = Curses.getmouse)
case mouse_event.bstate
when Curses::BUTTON1_CLICKED
click y: mouse_event.y, x: mouse_event.x
when Curses::BUTTON1_DOUBLE_CLICKED
double_click y: mouse_event.y, x: mouse_event.x
end
end
else
debug "key: #{c}" if ENV['DEBUG']
end
Curses.doupdate if ret
@times = nil unless number_pressed
rescue StopIteration
raise
rescue => e
command_line.show_error e.to_s
raise if ENV['DEBUG']
end
end
ensure
Curses.close_screen
end
# Change the number of columns in the main window.
def spawn_panes(num)
main.number_of_panes = num
@current_row = @current_page = 0
end
# Number of times to repeat the next command.
def times
(@times || 1).to_i
end
# The file or directory on which the cursor is on.
def current_item
items[current_row]
end
# * marked files and directories.
def marked_items
items.select(&:marked?)
end
# Marked files and directories or Array(the current file or directory).
#
# . and .. will not be included.
def selected_items
((m = marked_items).any? ? m : Array(current_item)).reject {|i| %w(. ..).include? i.name}
end
# Move the cursor to specified row.
#
# The main window and the headers will be updated reflecting the displayed files and directories.
# The row number can be out of range of the current page.
def move_cursor(row = nil)
if row
if (prev_item = items[current_row])
main.draw_item prev_item
end
page = row / max_items
switch_page page if page != current_page
main.activate_pane row / maxy
@current_row = row
else
@current_row = 0
end
item = items[current_row]
main.draw_item item, current: true
main.display current_page
header_l.draw_current_file_info item
@current_row
end
# Change the current directory.
def cd(dir = '~', pushd: true)
dir = load_item path: expand_path(dir) unless dir.is_a? Item
unless dir.zip?
Dir.chdir dir
@current_zip = nil
else
@current_zip = dir
end
@dir_history << current_dir if current_dir && pushd
@current_dir, @current_page, @current_row = dir, 0, nil
main.activate_pane 0
ls
@current_dir
end
# cd to the previous directory.
def popd
cd @dir_history.pop, pushd: false if @dir_history.any?
end
# Fetch files from current directory.
# Then update each windows reflecting the newest information.
def ls
fetch_items_from_filesystem_or_zip
sort_items_according_to_current_direction
@current_page ||= 0
draw_items
move_cursor (current_row ? [current_row, items.size - 1].min : nil)
draw_marked_items
draw_total_items
true
end
# Sort the whole files and directories in the current directory, then refresh the screen.
#
# ==== Parameters
# * +direction+ - Sort order in a String.
# nil : order by name
# r : reverse order by name
# s, S : order by file size
# sr, Sr: reverse order by file size
# t : order by mtime
# tr : reverse order by mtime
# c : order by ctime
# cr : reverse order by ctime
# u : order by atime
# ur : reverse order by atime
# e : order by extname
# er : reverse order by extname
def sort(direction = nil)
@direction, @current_page = direction, 0
sort_items_according_to_current_direction
switch_page 0
move_cursor 0
end
# Change the file permission of the selected files and directories.
#
# ==== Parameters
# * +mode+ - Unix chmod string (e.g. +w, g-r, 755, 0644)
def chmod(mode = nil)
return unless mode
begin
Integer mode
mode = Integer mode.size == 3 ? "0#{mode}" : mode
rescue ArgumentError
end
FileUtils.chmod mode, selected_items.map(&:path)
ls
end
# Change the file owner of the selected files and directories.
#
# ==== Parameters
# * +user_and_group+ - user name and group name separated by : (e.g. alice, nobody:nobody, :admin)
def chown(user_and_group)
return unless user_and_group
user, group = user_and_group.split(':').map {|s| s == '' ? nil : s}
FileUtils.chown user, group, selected_items.map(&:path)
ls
end
# Fetch files from current directory or current .zip file.
def fetch_items_from_filesystem_or_zip
unless in_zip?
@items = Dir.foreach(current_dir).map {|fn|
load_item dir: current_dir, name: fn
}.to_a.partition {|i| %w(. ..).include? i.name}.flatten
else
@items = [load_item(dir: current_dir, name: '.', stat: File.stat(current_dir)),
load_item(dir: current_dir, name: '..', stat: File.stat(File.dirname(current_dir)))]
zf = Zip::File.new current_dir
zf.each {|entry|
next if entry.name_is_directory?
stat = zf.file.stat entry.name
@items << load_item(dir: current_dir, name: entry.name, stat: stat)
}
end
end
# Focus at the first file or directory of which name starts with the given String.
def find(str)
index = items.index {|i| i.index > current_row && i.name.start_with?(str)} || items.index {|i| i.name.start_with? str}
move_cursor index if index
end
# Focus at the last file or directory of which name starts with the given String.
def find_reverse(str)
index = items.reverse.index {|i| i.index < current_row && i.name.start_with?(str)} || items.reverse.index {|i| i.name.start_with? str}
move_cursor items.size - index - 1 if index
end
# Height of the currently active pane.
def maxy
main.maxy
end
# Number of files or directories that the current main window can show in a page.
def max_items
main.max_items
end
# Update the main window with the loaded files and directories. Also update the header.
def draw_items
main.newpad items
@displayed_items = items[current_page * max_items, max_items]
main.display current_page
header_l.draw_path_and_page_number path: current_dir.path, current: current_page + 1, total: total_pages
end
# Sort the loaded files and directories in already given sort order.
def sort_items_according_to_current_direction
case @direction
when nil
@items = items.shift(2) + items.partition(&:directory?).flat_map(&:sort)
when 'r'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort.reverse}
when 'S', 's'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort_by {|i| -i.size}}
when 'Sr', 'sr'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort_by(&:size)}
when 't'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort {|x, y| y.mtime <=> x.mtime}}
when 'tr'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort_by(&:mtime)}
when 'c'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort {|x, y| y.ctime <=> x.ctime}}
when 'cr'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort_by(&:ctime)}
when 'u'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort {|x, y| y.atime <=> x.atime}}
when 'ur'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort_by(&:atime)}
when 'e'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort {|x, y| y.extname <=> x.extname}}
when 'er'
@items = items.shift(2) + items.partition(&:directory?).flat_map {|arr| arr.sort_by(&:extname)}
end
items.each.with_index {|item, index| item.index = index}
end
# Search files and directories from the current directory, and update the screen.
#
# * +pattern+ - Search pattern against file names in Ruby Regexp string.
#
# === Example
#
# a : Search files that contains the letter "a" in their file name
# .*\.pdf$ : Search PDF files
def grep(pattern = '.*')
regexp = Regexp.new(pattern)
fetch_items_from_filesystem_or_zip
@items = items.shift(2) + items.select {|i| i.name =~ regexp}
sort_items_according_to_current_direction
draw_items
draw_total_items
switch_page 0
move_cursor 0
end
# Copy selected files and directories to the destination.
def cp(dest)
unless in_zip?
src = (m = marked_items).any? ? m.map(&:path) : current_item
FileUtils.cp_r src, expand_path(dest)
else
raise 'cping multiple items in .zip is not supported.' if selected_items.size > 1
Zip::File.open(current_zip) do |zip|
entry = zip.find_entry(selected_items.first.name).dup
entry.name, entry.name_length = dest, dest.size
zip.instance_variable_get(:@entry_set) << entry
end
end
ls
end
# Move selected files and directories to the destination.
def mv(dest)
unless in_zip?
src = (m = marked_items).any? ? m.map(&:path) : current_item
FileUtils.mv src, expand_path(dest)
else
raise 'mving multiple items in .zip is not supported.' if selected_items.size > 1
rename "#{selected_items.first.name}/#{dest}"
end
ls
end
# Rename selected files and directories.
#
# ==== Parameters
# * +pattern+ - new filename, or a shash separated Regexp like string
def rename(pattern)
from, to = pattern.sub(/^\//, '').sub(/\/$/, '').split '/'
if to.nil?
from, to = current_item.name, from
else
from = Regexp.new from
end
unless in_zip?
selected_items.each do |item|
name = item.name.gsub from, to
FileUtils.mv item, current_dir.join(name) if item.name != name
end
else
Zip::File.open(current_zip) do |zip|
selected_items.each do |item|
name = item.name.gsub from, to
zip.rename item.name, name
end
end
end
ls
end
# Soft delete selected files and directories.
#
# If the OS is not OSX, performs the same as `delete` command.
def trash
unless in_zip?
if osx?
FileUtils.mv selected_items.map(&:path), File.expand_path('~/.Trash/')
else
#TODO support other OS
FileUtils.rm_rf selected_items.map(&:path)
end
else
return unless ask %Q[Trashing zip entries is not supported. Actually the files will be deleted. Are you sure want to proceed? (y/n)]
delete
end
@current_row -= selected_items.count {|i| i.index <= current_row}
ls
end
# Delete selected files and directories.
def delete
unless in_zip?
FileUtils.rm_rf selected_items.map(&:path)
else
Zip::File.open(current_zip) do |zip|
zip.select {|e| selected_items.map(&:name).include? e.to_s}.each do |entry|
if entry.name_is_directory?
zip.dir.delete entry.to_s
else
zip.file.delete entry.to_s
end
end
end
end
@current_row -= selected_items.count {|i| i.index <= current_row}
ls
end
# Create a new directory.
def mkdir(dir)
unless in_zip?
FileUtils.mkdir_p current_dir.join(dir)
else
Zip::File.open(current_zip) do |zip|
zip.dir.mkdir dir
end
end
ls
end
# Create a new empty file.
def touch(filename)
unless in_zip?
FileUtils.touch current_dir.join(filename)
else
Zip::File.open(current_zip) do |zip|
# zip.file.open(filename, 'w') {|_f| } #HAXX this code creates an unneeded temporary file
zip.instance_variable_get(:@entry_set) << Zip::Entry.new(current_zip, filename)
end
end
ls
end
# Create a symlink to the current file or directory.
def symlink(name)
FileUtils.ln_s current_item, name
ls
end
# Yank selected file / directory names.
def yank
@yanked_items = selected_items
end
# Paste yanked files / directories here.
def paste
if @yanked_items
if current_item.directory?
FileUtils.cp_r @yanked_items.map(&:path), current_item
else
@yanked_items.each do |item|
if items.include? item
i = 1
while i += 1
new_item = load_item dir: current_dir, name: "#{item.basename}_#{i}#{item.extname}", stat: item.stat
break unless File.exist? new_item.path
end
FileUtils.cp_r item, new_item
else
FileUtils.cp_r item, current_dir
end
end
end
ls
end
end
# Copy selected files and directories' path into clipboard on OSX.
def clipboard
IO.popen('pbcopy', 'w') {|f| f << selected_items.map(&:path).join(' ')} if osx?
end
# Archive selected files and directories into a .zip file.
def zip(zipfile_name)
return unless zipfile_name
zipfile_name += '.zip' unless zipfile_name.end_with? '.zip'
Zip::File.open(zipfile_name, Zip::File::CREATE) do |zipfile|
selected_items.each do |item|
next if item.symlink?
if item.directory?
Dir[item.join('**/**')].each do |file|
zipfile.add file.sub("#{current_dir}/", ''), file
end
else
zipfile.add item.name, item
end
end
end
ls
end
# Unarchive .zip and .tar.gz files within selected files and directories into current_directory.
def unarchive
unless in_zip?
zips, gzs = selected_items.partition(&:zip?).tap {|z, others| break [z, *others.partition(&:gz?)]}
zips.each do |item|
FileUtils.mkdir_p current_dir.join(item.basename)
Zip::File.open(item) do |zip|
zip.each do |entry|
FileUtils.mkdir_p File.join(item.basename, File.dirname(entry.to_s))
zip.extract(entry, File.join(item.basename, entry.to_s)) { true }
end
end
end
gzs.each do |item|
Zlib::GzipReader.open(item) do |gz|
Gem::Package::TarReader.new(gz) do |tar|
dest_dir = current_dir.join (gz.orig_name || item.basename).sub(/\.tar$/, '')
tar.each do |entry|
dest = nil
if entry.full_name == '././@LongLink'
dest = File.join dest_dir, entry.read.strip
next
end
dest ||= File.join dest_dir, entry.full_name
if entry.directory?
FileUtils.mkdir_p dest, :mode => entry.header.mode
elsif entry.file?
FileUtils.mkdir_p dest_dir
File.open(dest, 'wb') {|f| f.print entry.read}
FileUtils.chmod entry.header.mode, dest
elsif entry.header.typeflag == '2' # symlink
File.symlink entry.header.linkname, dest
end
unless Dir.exist? dest_dir
FileUtils.mkdir_p dest_dir
File.open(File.join(dest_dir, gz.orig_name || item.basename), 'wb') {|f| f.print gz.read}
end
end
end
end
end
else
Zip::File.open(current_zip) do |zip|
zip.select {|e| selected_items.map(&:name).include? e.to_s}.each do |entry|
FileUtils.mkdir_p File.join(current_zip.dir, current_zip.basename, File.dirname(entry.to_s))
zip.extract(entry, File.join(current_zip.dir, current_zip.basename, entry.to_s)) { true }
end
end
end
ls
end
# Current page is the first page?
def first_page?
current_page == 0
end
# Do we have more pages?
def last_page?
current_page == total_pages - 1
end
# Number of pages in the current directory.
def total_pages
(items.size - 1) / max_items + 1
end
# Move to the given page number.
#
# ==== Parameters
# * +page+ - Target page number
def switch_page(page)
main.display (@current_page = page)
@displayed_items = items[current_page * max_items, max_items]
header_l.draw_path_and_page_number path: current_dir.path, current: current_page + 1, total: total_pages
end
# Update the header information concerning currently marked files or directories.
def draw_marked_items
items = marked_items
header_r.draw_marked_items count: items.size, size: items.inject(0) {|sum, i| sum += i.size}
end
# Update the header information concerning total files and directories in the current directory.
def draw_total_items
header_r.draw_total_items count: items.size, size: items.inject(0) {|sum, i| sum += i.size}
end
# Swktch on / off marking on the current file or directory.
def toggle_mark
main.toggle_mark current_item
end
# Get a char as a String from user input.
def get_char
c = Curses.getch
c if (0..255) === c.ord
end
def clear_command_line
command_line.writeln 0, ""
command_line.clear
command_line.noutrefresh
end
# Accept user input, and directly execute it as a Ruby method call to the controller.
#
# ==== Parameters
# * +preset_command+ - A command that would be displayed at the command line before user input.
def process_command_line(preset_command: nil)
prompt = preset_command ? ":#{preset_command} " : ':'
command_line.set_prompt prompt
cmd, *args = command_line.get_command(prompt: prompt).split(' ')
if cmd && !cmd.empty? && respond_to?(cmd)
ret = self.public_send cmd, *args
clear_command_line
ret
end
rescue Interrupt
clear_command_line
end
# Accept user input, and directly execute it in an external shell.
def process_shell_command
command_line.set_prompt ':!'
cmd = command_line.get_command(prompt: ':!')[1..-1]
execute_external_command pause: true do
system cmd
end
rescue Interrupt
ensure
command_line.clear
command_line.noutrefresh
end
# Let the user answer y or n.
#
# ==== Parameters
# * +prompt+ - Prompt message
def ask(prompt = '(y/n)')
command_line.set_prompt prompt
command_line.refresh
while (c = Curses.getch)
next unless [?N, ?Y, ?n, ?y, 3, 27] .include? c # N, Y, n, y, ^c, esc
command_line.clear
command_line.noutrefresh
break (c == 'y') || (c == 'Y')
end
end
# Open current file or directory with the editor.
def edit
execute_external_command do
editor = ENV['EDITOR'] || 'vim'
unless in_zip?
system %Q[#{editor} "#{current_item.path}"]
else
begin
tmpdir, tmpfile_name = nil
Zip::File.open(current_zip) do |zip|
tmpdir = Dir.mktmpdir
FileUtils.mkdir_p File.join(tmpdir, File.dirname(current_item.name))
tmpfile_name = File.join(tmpdir, current_item.name)
File.open(tmpfile_name, 'w') {|f| f.puts zip.file.read(current_item.name)}
system %Q[#{editor} "#{tmpfile_name}"]
zip.add(current_item.name, tmpfile_name) { true }
end
ls
ensure
FileUtils.remove_entry_secure tmpdir if tmpdir
end
end
end
end
# Open current file or directory with the viewer.
def move_cursor_by_click(y: nil, x: nil)
if (idx = main.pane_index_at(y: y, x: x))
row = current_page * max_items + main.maxy * idx + y - main.begy
move_cursor row if (row >= 0) && (row < items.size)
end
end
private
def execute_external_command(pause: false)
Curses.def_prog_mode
Curses.close_screen
yield
ensure
Curses.reset_prog_mode
Curses.getch if pause
#NOTE needs to draw borders and ls again here since the stdlib Curses.refresh fails to retrieve the previous screen
Rfd::Window.draw_borders
Curses.refresh
ls
end
def expand_path(path)
File.expand_path path.start_with?('/', '~') ? path : current_dir ? current_dir.join(path) : path
end
def load_item(path: nil, dir: nil, name: nil, stat: nil)
Item.new dir: dir || File.dirname(path), name: name || File.basename(path), stat: stat, window_width: main.width
end
def osx?
@_osx ||= RbConfig::CONFIG['host_os'] =~ /darwin/
end
def in_zip?
@current_zip
end
def debug(str)
@debug.debug str
end
end
|
interagent/committee | lib/committee/drivers/open_api_2.rb | Committee::Drivers.OpenAPI2.parse | ruby | def parse(data)
REQUIRED_FIELDS.each do |field|
if !data[field]
raise ArgumentError, "Committee: no #{field} section in spec data."
end
end
if data['swagger'] != '2.0'
raise ArgumentError, "Committee: driver requires OpenAPI 2.0."
end
schema = Schema.new
schema.driver = self
schema.base_path = data['basePath'] || ''
# Arbitrarily choose the first media type found in these arrays. This
# appraoch could probably stand to be improved, but at least users will
# for now have the option of turning media type validation off if they so
# choose.
schema.consumes = data['consumes'].first
schema.produces = data['produces'].first
schema.definitions, store = parse_definitions!(data)
schema.routes = parse_routes!(data, schema, store)
schema
end | Parses an API schema and builds a set of route definitions for use with
Committee.
The expected input format is a data hash with keys as strings (as opposed
to symbols) like the kind produced by JSON.parse or YAML.load. | train | https://github.com/interagent/committee/blob/810fadcea1bc1c529627d47325c1008b5c33b0a4/lib/committee/drivers/open_api_2.rb#L41-L68 | class OpenAPI2 < Committee::Drivers::Driver
def default_coerce_date_times
false
end
# Whether parameters that were form-encoded will be coerced by default.
def default_coerce_form_params
true
end
def default_allow_get_body
true
end
# Whether parameters in a request's path will be considered and coerced by
# default.
def default_path_params
true
end
# Whether parameters in a request's query string will be considered and
# coerced by default.
def default_query_params
true
end
def default_validate_success_only
true
end
def name
:open_api_2
end
# Parses an API schema and builds a set of route definitions for use with
# Committee.
#
# The expected input format is a data hash with keys as strings (as opposed
# to symbols) like the kind produced by JSON.parse or YAML.load.
def schema_class
Committee::Drivers::OpenAPI2::Schema
end
# Link abstracts an API link specifically for OpenAPI 2.
class Link
# The link's input media type. i.e. How requests should be encoded.
attr_accessor :enc_type
attr_accessor :href
# The link's output media type. i.e. How responses should be encoded.
attr_accessor :media_type
attr_accessor :method
# The link's input schema. i.e. How we validate an endpoint's incoming
# parameters.
attr_accessor :schema
attr_accessor :status_success
# The link's output schema. i.e. How we validate an endpoint's response
# data.
attr_accessor :target_schema
attr_accessor :header_schema
def rel
raise "Committee: rel not implemented for OpenAPI"
end
end
class SchemaBuilder
def initialize(link_data)
self.link_data = link_data
end
private
LINK_REQUIRED_FIELDS = [
:name
].map(&:to_s).freeze
attr_accessor :link_data
def check_required_fields!(param_data)
LINK_REQUIRED_FIELDS.each do |field|
if !param_data[field]
raise ArgumentError,
"Committee: no #{field} section in link data."
end
end
end
end
class HeaderSchemaBuilder < SchemaBuilder
def call
if link_data["parameters"]
link_schema = JsonSchema::Schema.new
link_schema.properties = {}
link_schema.required = []
header_parameters = link_data["parameters"].select { |param_data| param_data["in"] == "header" }
header_parameters.each do |param_data|
check_required_fields!(param_data)
param_schema = JsonSchema::Schema.new
param_schema.type = [param_data["type"]]
link_schema.properties[param_data["name"]] = param_schema
if param_data["required"] == true
link_schema.required << param_data["name"]
end
end
link_schema
end
end
end
# ParameterSchemaBuilder converts OpenAPI 2 link parameters, which are not
# quite JSON schemas (but will be in OpenAPI 3) into synthetic schemas that
# we can use to do some basic request validation.
class ParameterSchemaBuilder < SchemaBuilder
# Returns a tuple of (schema, schema_data) where only one of the two
# values is present. This is either a full schema that's ready to go _or_
# a hash of unparsed schema data.
def call
if link_data["parameters"]
body_param = link_data["parameters"].detect { |p| p["in"] == "body" }
if body_param
check_required_fields!(body_param)
if link_data["parameters"].detect { |p| p["in"] == "form" } != nil
raise ArgumentError, "Committee: can't mix body parameter " \
"with form parameters."
end
schema_data = body_param["schema"]
[nil, schema_data]
else
link_schema = JsonSchema::Schema.new
link_schema.properties = {}
link_schema.required = []
parameters = link_data["parameters"].reject { |param_data| param_data["in"] == "header" }
parameters.each do |param_data|
check_required_fields!(param_data)
param_schema = JsonSchema::Schema.new
# We could probably use more validation here, but the formats of
# OpenAPI 2 are based off of what's available in JSON schema, and
# therefore this should map over quite well.
param_schema.type = [param_data["type"]]
param_schema.enum = param_data["enum"] unless param_data["enum"].nil?
# validation: string
param_schema.format = param_data["format"] unless param_data["format"].nil?
param_schema.pattern = Regexp.new(param_data["pattern"]) unless param_data["pattern"].nil?
param_schema.min_length = param_data["minLength"] unless param_data["minLength"].nil?
param_schema.max_length = param_data["maxLength"] unless param_data["maxLength"].nil?
# validation: array
param_schema.min_items = param_data["minItems"] unless param_data["minItems"].nil?
param_schema.max_items = param_data["maxItems"] unless param_data["maxItems"].nil?
param_schema.unique_items = param_data["uniqueItems"] unless param_data["uniqueItems"].nil?
# validation: number/integer
param_schema.min = param_data["minimum"] unless param_data["minimum"].nil?
param_schema.min_exclusive = param_data["exclusiveMinimum"] unless param_data["exclusiveMinimum"].nil?
param_schema.max = param_data["maximum"] unless param_data["maximum"].nil?
param_schema.max_exclusive = param_data["exclusiveMaximum"] unless param_data["exclusiveMaximum"].nil?
param_schema.multiple_of = param_data["multipleOf"] unless param_data["multipleOf"].nil?
# And same idea: despite parameters not being schemas, the items
# key (if preset) is actually a schema that defines each item of an
# array type, so we can just reflect that directly onto our
# artifical schema.
if param_data["type"] == "array" && param_data["items"]
param_schema.items = param_data["items"]
end
link_schema.properties[param_data["name"]] = param_schema
if param_data["required"] == true
link_schema.required << param_data["name"]
end
end
[link_schema, nil]
end
end
end
end
class Schema < Committee::Drivers::Schema
attr_accessor :base_path
attr_accessor :consumes
# A link back to the derivative instace of Committee::Drivers::Driver
# that create this schema.
attr_accessor :driver
attr_accessor :definitions
attr_accessor :produces
attr_accessor :routes
attr_reader :validator_option
def build_router(options)
@validator_option = Committee::SchemaValidator::Option.new(options, self, :hyper_schema)
Committee::SchemaValidator::HyperSchema::Router.new(self, @validator_option)
end
end
private
DEFINITIONS_PSEUDO_URI = "http://json-schema.org/committee-definitions"
# These are fields that the OpenAPI 2 spec considers mandatory to be
# included in the document's top level.
REQUIRED_FIELDS = [
:consumes,
:definitions,
:paths,
:produces,
:swagger,
].map(&:to_s).freeze
def find_best_fit_response(link_data)
if response_data = link_data["responses"]["200"] || response_data = link_data["responses"][200]
[200, response_data]
elsif response_data = link_data["responses"]["201"] || response_data = link_data["responses"][201]
[201, response_data]
else
# Sort responses so that we can try to prefer any 3-digit status code.
# If there are none, we'll just take anything from the list.
ordered_responses = link_data["responses"].
select { |k, v| k.to_s =~ /[0-9]{3}/ }
if first = ordered_responses.first
[first[0].to_i, first[1]]
else
[nil, nil]
end
end
end
def href_to_regex(href)
href.gsub(/\{(.*?)\}/, '(?<\1>[^/]+)')
end
def parse_definitions!(data)
# The "definitions" section of an OpenAPI 2 spec is a valid JSON schema.
# We extract it from the spec and parse it as a schema in isolation so
# that all references to it will still have correct paths (i.e. we can
# still find a resource at '#/definitions/resource' instead of
# '#/resource').
schema = JsonSchema.parse!({
"definitions" => data['definitions'],
})
schema.expand_references!
schema.uri = DEFINITIONS_PSEUDO_URI
# So this is a little weird: an OpenAPI specification is _not_ a valid
# JSON schema and yet it self-references like it is a valid JSON schema.
# To work around this what we do is parse its "definitions" section as a
# JSON schema and then build a document store here containing that. When
# trying to resolve a reference from elsewhere in the spec, we build a
# synthetic schema with a JSON reference to the document created from
# "definitions" and then expand references against this store.
store = JsonSchema::DocumentStore.new
store.add_schema(schema)
[schema, store]
end
def parse_routes!(data, schema, store)
routes = {}
# This is a performance optimization: instead of going through each link
# and parsing out its JSON schema separately, instead we just aggregate
# all schemas into one big hash and then parse it all at the end. After
# we parse it, go through each link and assign a proper schema object. In
# practice this comes out to somewhere on the order of 50x faster.
schemas_data = { "properties" => {} }
# Exactly the same idea, but for response schemas.
target_schemas_data = { "properties" => {} }
data['paths'].each do |path, methods|
href = schema.base_path + path
schemas_data["properties"][href] = { "properties" => {} }
target_schemas_data["properties"][href] = { "properties" => {} }
methods.each do |method, link_data|
method = method.upcase
link = Link.new
link.enc_type = schema.consumes
link.href = href
link.media_type = schema.produces
link.method = method
# Convert the spec's parameter pseudo-schemas into JSON schemas that
# we can use for some basic request validation.
link.schema, schema_data = ParameterSchemaBuilder.new(link_data).call
link.header_schema = HeaderSchemaBuilder.new(link_data).call
# If data came back instead of a schema (this occurs when a route has
# a single `body` parameter instead of a collection of URL/query/form
# parameters), store it for later parsing.
if schema_data
schemas_data["properties"][href]["properties"][method] = schema_data
end
# Arbitrarily pick one response for the time being. Prefers in order:
# a 200, 201, any 3-digit numerical response, then anything at all.
status, response_data = find_best_fit_response(link_data)
if status
link.status_success = status
# A link need not necessarily specify a target schema.
if response_data["schema"]
target_schemas_data["properties"][href]["properties"][method] =
response_data["schema"]
end
end
rx = %r{^#{href_to_regex(link.href)}$}
Committee.log_debug "Created route: #{link.method} #{link.href} (regex #{rx})"
routes[method] ||= []
routes[method] << [rx, link]
end
end
# See the note on our DocumentStore's initialization in
# #parse_definitions!, but what we're doing here is prefixing references
# with a specialized internal URI so that they can reference definitions
# from another document in the store.
schemas =
rewrite_references_and_parse(schemas_data, store)
target_schemas =
rewrite_references_and_parse(target_schemas_data, store)
# As noted above, now that we've parsed our aggregate response schema, go
# back through each link and them their response schema.
routes.each do |method, method_routes|
method_routes.each do |(_, link)|
# request
#
# Differs slightly from responses in that the schema may already have
# been set for endpoints with non-body parameters, so check for nil
# before we set it.
if schema = schemas.properties[link.href].properties[method]
link.schema = schema
end
# response
link.target_schema =
target_schemas.properties[link.href].properties[method]
end
end
routes
end
def rewrite_references_and_parse(schemas_data, store)
schemas = rewrite_references(schemas_data)
schemas = JsonSchema.parse!(schemas_data)
schemas.expand_references!(:store => store)
schemas
end
def rewrite_references(schema)
if schema.is_a?(Hash)
ref = schema["$ref"]
if ref && ref.is_a?(String) && ref[0] == "#"
schema["$ref"] = DEFINITIONS_PSEUDO_URI + ref
else
schema.each do |_, v|
rewrite_references(v)
end
end
end
schema
end
end
|