_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q26400
Omnibus.GitCache.incremental
test
def incremental log.internal(log_key) { "Performing incremental cache" } create_cache_path remove_git_dirs git_cmd("add -A -f") begin git_cmd(%Q{commit -q -m "Backup of #{tag}"}) rescue CommandFailed => e raise unless e.message.include?("nothing to commit") end git_cmd(%Q{tag -f "#{tag}"}) end
ruby
{ "resource": "" }
q26401
Omnibus.GitCache.remove_git_dirs
test
def remove_git_dirs log.internal(log_key) { "Removing git directories" } Dir.glob("#{install_dir}/**/{,.*}/config").reject do |path| REQUIRED_GIT_FILES.any? do |required_file| !File.exist?(File.join(File.dirname(path), required_file)) end end.each do |path| log.internal(log_key) { "Removing git dir `#{path}'" } FileUtils.rm_rf(File.dirname(path)) end true end
ruby
{ "resource": "" }
q26402
Omnibus.Packager::APPX.write_manifest_file
test
def write_manifest_file render_template(resource_path("AppxManifest.xml.erb"), destination: "#{windows_safe_path(project.install_dir)}/AppxManifest.xml", variables: { name: project.package_name, friendly_name: project.friendly_name, version: windows_package_version, maintainer: project.maintainer, certificate_subject: certificate_subject.gsub('"', """), } ) end
ruby
{ "resource": "" }
q26403
Omnibus.Util.shellout
test
def shellout(*args) options = args.last.kind_of?(Hash) ? args.pop : {} options = SHELLOUT_OPTIONS.merge(options) command_string = args.join(" ") in_msys = options.delete(:in_msys_bash) && ENV["MSYSTEM"] # Mixlib will handle escaping characters for cmd but our command might # contain '. For now, assume that won't happen because I don't know # whether this command is going to be played via cmd or through # ProcessCreate. command_string = "bash -c \'#{command_string}\'" if in_msys # Grab the log_level log_level = options.delete(:log_level) # Set the live stream if one was not given options[:live_stream] ||= log.live_stream(:internal) # Since Mixlib::ShellOut supports :environment and :env, we want to # standardize here if options[:env] options[:environment] = options.fetch(:environment, {}).merge(options[:env]) end # Log any environment options given unless options[:environment].empty? log.public_send(log_level, log_key) { "Environment:" } options[:environment].sort.each do |key, value| log.public_send(log_level, log_key) { " #{key}=#{value.inspect}" } end end # Log the actual command log.public_send(log_level, log_key) { "$ #{command_string}" } cmd = Mixlib::ShellOut.new(command_string, options) cmd.environment["HOME"] = "/tmp" unless ENV["HOME"] cmd.run_command cmd end
ruby
{ "resource": "" }
q26404
Omnibus.Util.shellout!
test
def shellout!(*args) cmd = shellout(*args) cmd.error! cmd rescue Mixlib::ShellOut::ShellCommandFailed raise CommandFailed.new(cmd) rescue Mixlib::ShellOut::CommandTimeout raise CommandTimeout.new(cmd) end
ruby
{ "resource": "" }
q26405
Omnibus.Util.retry_block
test
def retry_block(logstr, retried_exceptions = [], retries = Omnibus::Config.fetcher_retries, &block) yield rescue Exception => e raise e unless retried_exceptions.any? { |eclass| e.is_a?(eclass) } if retries != 0 log.info(log_key) { "Retrying failed #{logstr} due to #{e} (#{retries} retries left)..." } retries -= 1 retry else log.error(log_key) { "#{logstr} failed - #{e.class}!" } raise end end
ruby
{ "resource": "" }
q26406
Omnibus.Util.windows_safe_path
test
def windows_safe_path(*pieces) path = File.join(*pieces) if File::ALT_SEPARATOR path.gsub(File::SEPARATOR, File::ALT_SEPARATOR) else path end end
ruby
{ "resource": "" }
q26407
Omnibus.Util.compiler_safe_path
test
def compiler_safe_path(*pieces) path = File.join(*pieces) path = path.sub(/^([A-Za-z]):\//, "/\\1/") if ENV["MSYSTEM"] path end
ruby
{ "resource": "" }
q26408
Omnibus.Util.create_directory
test
def create_directory(*paths) path = File.join(*paths) log.debug(log_key) { "Creating directory `#{path}'" } FileUtils.mkdir_p(path) path end
ruby
{ "resource": "" }
q26409
Omnibus.Util.remove_directory
test
def remove_directory(*paths) path = File.join(*paths) log.debug(log_key) { "Remove directory `#{path}'" } FileUtils.rm_rf(path) path end
ruby
{ "resource": "" }
q26410
Omnibus.Util.copy_file
test
def copy_file(source, destination) log.debug(log_key) { "Copying `#{source}' to `#{destination}'" } FileUtils.cp(source, destination) destination end
ruby
{ "resource": "" }
q26411
Omnibus.Util.remove_file
test
def remove_file(*paths) path = File.join(*paths) log.debug(log_key) { "Removing file `#{path}'" } FileUtils.rm_f(path) path end
ruby
{ "resource": "" }
q26412
Omnibus.Util.create_file
test
def create_file(*paths, &block) path = File.join(*paths) log.debug(log_key) { "Creating file `#{path}'" } FileUtils.mkdir_p(File.dirname(path)) if block File.open(path, "wb") { |f| f.write(yield) } else FileUtils.touch(path) end path end
ruby
{ "resource": "" }
q26413
Omnibus.Util.create_link
test
def create_link(a, b) log.debug(log_key) { "Linking `#{a}' to `#{b}'" } FileUtils.ln_s(a, b) end
ruby
{ "resource": "" }
q26414
Omnibus.Licensing.validate_license_info
test
def validate_license_info # First check the project licensing information # Check existence of licensing information if project.license == "Unspecified" licensing_warning("Project '#{project.name}' does not contain licensing information.") end # Check license file exists if project.license != "Unspecified" && project.license_file.nil? licensing_warning("Project '#{project.name}' does not point to a license file.") end # Check used license is a standard license if project.license != "Unspecified" && !STANDARD_LICENSES.include?(project.license) licensing_info("Project '#{project.name}' is using '#{project.license}' which is not one of the standard licenses identified in https://opensource.org/licenses/alphabetical. Consider using one of the standard licenses.") end # Now let's check the licensing info for software components license_map.each do |software_name, license_info| # First check if the software specified a license if license_info[:license] == "Unspecified" licensing_warning("Software '#{software_name}' does not contain licensing information.") end # Check if the software specifies any license files if license_info[:license] != "Unspecified" && license_info[:license_files].empty? licensing_warning("Software '#{software_name}' does not point to any license files.") end # Check if the software license is one of the standard licenses if license_info[:license] != "Unspecified" && !STANDARD_LICENSES.include?(license_info[:license]) licensing_info("Software '#{software_name}' uses license '#{license_info[:license]}' which is not one of the standard licenses identified in https://opensource.org/licenses/alphabetical. Consider using one of the standard licenses.") end end end
ruby
{ "resource": "" }
q26415
Omnibus.Licensing.project_license_content
test
def project_license_content project.license_file.nil? ? "" : IO.read(File.join(Config.project_root, project.license_file)) end
ruby
{ "resource": "" }
q26416
Omnibus.Licensing.license_map
test
def license_map @license_map ||= begin map = {} project.library.each do |component| # Some of the components do not bundle any software but contain # some logic that we use during the build. These components are # covered under the project's license and they do not need specific # license files. next if component.license == :project_license map[component.name] = { license: component.license, license_files: component.license_files, version: component.version, project_dir: component.project_dir, } end map end end
ruby
{ "resource": "" }
q26417
Omnibus.Licensing.process_transitive_dependency_licensing_info
test
def process_transitive_dependency_licensing_info Dir.glob("#{cache_dir}/*/*-dependency-licenses.json").each do |license_manifest_path| license_manifest_data = FFI_Yajl::Parser.parse(File.read(license_manifest_path)) project_name = license_manifest_data["project_name"] dependency_license_dir = File.dirname(license_manifest_path) license_manifest_data["dependency_managers"].each do |dep_mgr_name, dependencies| dep_license_map[dep_mgr_name] ||= {} dependencies.each do |dependency| # Copy dependency files dependency["license_files"].each do |f| license_path = File.join(dependency_license_dir, f) output_path = File.join(output_dir, f) FileUtils.cp(license_path, output_path) end dep_name = dependency["name"] dep_version = dependency["version"] # If we already have this dependency we do not need to add it again. if dep_license_map[dep_mgr_name][dep_name] && dep_license_map[dep_mgr_name][dep_name][dep_version] dep_license_map[dep_mgr_name][dep_name][dep_version]["dependency_of"] << project_name else dep_license_map[dep_mgr_name][dep_name] ||= {} dep_license_map[dep_mgr_name][dep_name][dep_version] = { "license" => dependency["license"], "license_files" => dependency["license_files"], "dependency_of" => [ project_name ], } end end end end FileUtils.rm_rf(cache_dir) end
ruby
{ "resource": "" }
q26418
Omnibus.Licensing.collect_licenses_for
test
def collect_licenses_for(software) return nil if software.license == :project_license software_name = software.name license_data = license_map[software_name] license_files = license_data[:license_files] license_files.each do |license_file| if license_file output_file = license_package_location(software_name, license_file) if local?(license_file) input_file = File.expand_path(license_file, license_data[:project_dir]) if File.exist?(input_file) FileUtils.cp(input_file, output_file) File.chmod 0644, output_file unless windows? else licensing_warning("License file '#{input_file}' does not exist for software '#{software_name}'.") # If we got here, we need to fail now so we don't take a git # cache snapshot, or else the software build could be restored # from cache without fixing the license issue. raise_if_warnings_fatal! end else begin download_file!(license_file, output_file, enable_progress_bar: false) File.chmod 0644, output_file unless windows? rescue SocketError, Errno::ECONNREFUSED, Errno::ECONNRESET, Errno::ENETUNREACH, Timeout::Error, OpenURI::HTTPError, OpenSSL::SSL::SSLError licensing_warning("Can not download license file '#{license_file}' for software '#{software_name}'.") # If we got here, we need to fail now so we don't take a git # cache snapshot, or else the software build could be restored # from cache without fixing the license issue. raise_if_warnings_fatal! end end end end end
ruby
{ "resource": "" }
q26419
Omnibus.Packager::Solaris.write_prototype_file
test
def write_prototype_file shellout! "cd #{install_dirname} && find #{install_basename} -print > #{staging_dir_path('files')}" File.open staging_dir_path("files.clean"), "w+" do |fout| File.open staging_dir_path("files") do |fin| fin.each_line do |line| if line.chomp =~ /\s/ log.warn(log_key) { "Skipping packaging '#{line}' file due to whitespace in filename" } else fout.write(line) end end end end # generate list of control files File.open staging_dir_path("Prototype"), "w+" do |f| f.write <<-EOF.gsub(/^ {10}/, "") i pkginfo i postinstall i postremove EOF end # generate the prototype's file list shellout! "cd #{install_dirname} && pkgproto < #{staging_dir_path('files.clean')} > #{staging_dir_path('Prototype.files')}" # fix up the user and group in the file list to root shellout! "awk '{ $5 = \"root\"; $6 = \"root\"; print }' < #{staging_dir_path('Prototype.files')} >> #{staging_dir_path('Prototype')}" end
ruby
{ "resource": "" }
q26420
Omnibus.Package.content
test
def content @content ||= IO.read(path) rescue Errno::ENOENT raise NoPackageFile.new(path) end
ruby
{ "resource": "" }
q26421
Omnibus.Package.validate!
test
def validate! unless File.exist?(path) raise NoPackageFile.new(path) end unless File.exist?(metadata.path) raise NoPackageMetadataFile.new(metadata.path) end true end
ruby
{ "resource": "" }
q26422
Omnibus.S3Publisher.key_for
test
def key_for(package, *stuff) File.join( Config.s3_publish_pattern % package.metadata, *stuff ) end
ruby
{ "resource": "" }
q26423
Omnibus.BuildVersion.semver
test
def semver build_tag = version_tag # PRERELEASE VERSION if prerelease_version? # ensure all dashes are dots per precedence rules (#12) in Semver # 2.0.0-rc.1 prerelease = prerelease_tag.tr("-", ".") build_tag << "-" << prerelease end # BUILD VERSION # Follows SemVer conventions and the build version begins with a '+'. build_version_items = [] # By default we will append a timestamp to every build. This behavior can # be overriden by setting the OMNIBUS_APPEND_TIMESTAMP environment # variable to a 'falsey' value (ie false, f, no, n or 0). # # format: YYYYMMDDHHMMSS example: 20130131123345 if Config.append_timestamp build_version_items << build_start_time end # We'll append the git describe information unless we are sitting right # on an annotated tag. # # format: git.COMMITS_SINCE_TAG.GIT_SHA example: git.207.694b062 unless commits_since_tag == 0 build_version_items << ["git", commits_since_tag, git_sha_tag].join(".") end unless build_version_items.empty? build_tag << "+" << build_version_items.join(".") end build_tag end
ruby
{ "resource": "" }
q26424
Omnibus.BuildVersion.build_start_time
test
def build_start_time @build_start_time ||= begin if ENV["BUILD_TIMESTAMP"] begin Time.strptime(ENV["BUILD_TIMESTAMP"], "%Y-%m-%d_%H-%M-%S") rescue ArgumentError error_message = "BUILD_TIMESTAMP environment variable " error_message << "should be in YYYY-MM-DD_hh-mm-ss " error_message << "format." raise ArgumentError, error_message end elsif ENV["BUILD_ID"] begin Time.strptime(ENV["BUILD_ID"], "%Y-%m-%d_%H-%M-%S") rescue ArgumentError error_message = "BUILD_ID environment variable " error_message << "should be in YYYY-MM-DD_hh-mm-ss " error_message << "format." raise ArgumentError, error_message end else Time.now.utc end end.strftime(TIMESTAMP_FORMAT) end
ruby
{ "resource": "" }
q26425
Omnibus.Metadata.save
test
def save File.open(path, "w+") do |f| f.write(FFI_Yajl::Encoder.encode(to_hash, pretty: true)) end true end
ruby
{ "resource": "" }
q26426
Omnibus.Packager::RPM.vendor
test
def vendor(val = NULL) if null?(val) @vendor || "Omnibus <omnibus@getchef.com>" else unless val.is_a?(String) raise InvalidValue.new(:vendor, "be a String") end @vendor = val end end
ruby
{ "resource": "" }
q26427
Omnibus.Packager::RPM.license
test
def license(val = NULL) if null?(val) @license || project.license else unless val.is_a?(String) raise InvalidValue.new(:license, "be a String") end @license = val end end
ruby
{ "resource": "" }
q26428
Omnibus.Packager::RPM.build_filepath
test
def build_filepath(path) filepath = rpm_safe("/" + path.gsub("#{build_dir}/", "")) return if config_files.include?(filepath) full_path = build_dir + filepath.gsub("[%]", "%") # FileSyncer.glob quotes pathnames that contain spaces, which is a problem on el7 full_path.delete!('"') # Mark directories with the %dir directive to prevent rpmbuild from counting their contents twice. return mark_filesystem_directories(filepath) if !File.symlink?(full_path) && File.directory?(full_path) filepath end
ruby
{ "resource": "" }
q26429
Omnibus.Packager::RPM.with_rpm_signing
test
def with_rpm_signing(&block) directory = Dir.mktmpdir destination = "#{directory}/sign-rpm" render_template(resource_path("signing.erb"), destination: destination, mode: 0700, variables: { passphrase: signing_passphrase, } ) # Yield the destination to the block yield(destination) ensure remove_file(destination) remove_directory(directory) end
ruby
{ "resource": "" }
q26430
Omnibus.Command::Publish.publish
test
def publish(klass, pattern, options) if options[:platform_mappings] options[:platform_mappings] = FFI_Yajl::Parser.parse(File.read(File.expand_path(options[:platform_mappings]))) end klass.publish(pattern, options) do |package| say("Published '#{package.name}' for #{package.metadata[:platform]}-#{package.metadata[:platform_version]}", :green) end end
ruby
{ "resource": "" }
q26431
Omnibus.Project.build_version
test
def build_version(val = NULL, &block) if block && !null?(val) raise Error, "You cannot specify additional parameters to " \ "#build_version when a block is given!" end if block @build_version_dsl = BuildVersionDSL.new(&block) else if null?(val) @build_version_dsl.build_version else @build_version_dsl = BuildVersionDSL.new(val) end end end
ruby
{ "resource": "" }
q26432
Omnibus.Project.package
test
def package(id, &block) unless block raise InvalidValue.new(:package, "have a block") end packagers[id] << block end
ruby
{ "resource": "" }
q26433
Omnibus.Project.compress
test
def compress(id, &block) if block compressors[id] << block else compressors[id] << Proc.new {} end end
ruby
{ "resource": "" }
q26434
Omnibus.Project.override
test
def override(name, val = NULL) if null?(val) overrides[name.to_sym] else overrides[name.to_sym] = val end end
ruby
{ "resource": "" }
q26435
Omnibus.Project.license_file_path
test
def license_file_path(path = NULL) if null?(path) @license_file_path || File.join(install_dir, "LICENSE") else @license_file_path = File.join(install_dir, path) end end
ruby
{ "resource": "" }
q26436
Omnibus.Project.dependency?
test
def dependency?(software) name = software.is_a?(Software) ? software.name : software dependencies.include?(name) end
ruby
{ "resource": "" }
q26437
Omnibus.Project.built_manifest
test
def built_manifest log.info(log_key) { "Building version manifest" } m = Omnibus::Manifest.new(build_version, build_git_revision, license) softwares.each do |software| m.add(software.name, software.manifest_entry) end m end
ruby
{ "resource": "" }
q26438
Omnibus.Project.write_text_manifest
test
def write_text_manifest File.open(text_manifest_path, "w") do |f| f.puts "#{name} #{build_version}" f.puts "" f.puts Omnibus::Reports.pretty_version_map(self) end end
ruby
{ "resource": "" }
q26439
Omnibus.Packager::DEB.write_conffiles_file
test
def write_conffiles_file return if project.config_files.empty? render_template(resource_path("conffiles.erb"), destination: File.join(debian_dir, "conffiles"), variables: { config_files: project.config_files, } ) end
ruby
{ "resource": "" }
q26440
Omnibus.Packager::DEB.package_size
test
def package_size @package_size ||= begin path = "#{project.install_dir}/**/*" total = FileSyncer.glob(path).inject(0) do |size, path| unless File.directory?(path) || File.symlink?(path) size += File.size(path) end size end # Per http://www.debian.org/doc/debian-policy/ch-controlfields.html, the # disk space is given as the integer value of the estimated installed # size in bytes, divided by 1024 and rounded up. total / 1024 end end
ruby
{ "resource": "" }
q26441
Omnibus.GitFetcher.dir_empty?
test
def dir_empty?(dir) Dir.entries(dir).reject { |d| [".", ".."].include?(d) }.empty? end
ruby
{ "resource": "" }
q26442
Omnibus.GitFetcher.force_recreate_project_dir!
test
def force_recreate_project_dir! log.warn(log_key) { "Removing existing directory #{project_dir} before cloning" } FileUtils.rm_rf(project_dir) Dir.mkdir(project_dir) end
ruby
{ "resource": "" }
q26443
Omnibus.GitFetcher.current_revision
test
def current_revision cmd = git("rev-parse HEAD") cmd.stdout.strip rescue CommandFailed log.debug(log_key) { "unable to determine current revision" } nil end
ruby
{ "resource": "" }
q26444
Omnibus.GitFetcher.contains_revision?
test
def contains_revision?(rev) cmd = git("cat-file -t #{rev}") cmd.stdout.strip == "commit" rescue CommandFailed log.debug(log_key) { "unable to determine presence of commit #{rev}" } false end
ruby
{ "resource": "" }
q26445
OpenSSL.BN.to_ssh
test
def to_ssh if zero? return [0].pack("N") else buf = to_s(2) if buf.getbyte(0)[7] == 1 return [buf.length + 1, 0, buf].pack("NCA*") else return [buf.length, buf].pack("NA*") end end end
ruby
{ "resource": "" }
q26446
Net::SSH::Transport::Kex.DiffieHellmanGroupExchangeSHA1.compute_need_bits
test
def compute_need_bits # for Compatibility: OpenSSH requires (need_bits * 2 + 1) length of parameter need_bits = data[:need_bytes] * 8 * 2 + 1 data[:minimum_dh_bits] ||= MINIMUM_BITS if need_bits < data[:minimum_dh_bits] need_bits = data[:minimum_dh_bits] elsif need_bits > MAXIMUM_BITS need_bits = MAXIMUM_BITS end data[:need_bits] = need_bits data[:need_bytes] = need_bits / 8 end
ruby
{ "resource": "" }
q26447
Net::SSH::Transport::Kex.DiffieHellmanGroupExchangeSHA1.get_parameters
test
def get_parameters compute_need_bits # request the DH key parameters for the given number of bits. buffer = Net::SSH::Buffer.from(:byte, KEXDH_GEX_REQUEST, :long, data[:minimum_dh_bits], :long, data[:need_bits], :long, MAXIMUM_BITS) connection.send_message(buffer) buffer = connection.next_message raise Net::SSH::Exception, "expected KEXDH_GEX_GROUP, got #{buffer.type}" unless buffer.type == KEXDH_GEX_GROUP p = buffer.read_bignum g = buffer.read_bignum [p, g] end
ruby
{ "resource": "" }
q26448
Net::SSH::Transport::Kex.DiffieHellmanGroupExchangeSHA1.build_signature_buffer
test
def build_signature_buffer(result) response = Net::SSH::Buffer.new response.write_string data[:client_version_string], data[:server_version_string], data[:client_algorithm_packet], data[:server_algorithm_packet], result[:key_blob] response.write_long MINIMUM_BITS, data[:need_bits], MAXIMUM_BITS response.write_bignum dh.p, dh.g, dh.pub_key, result[:server_dh_pubkey], result[:shared_secret] response end
ruby
{ "resource": "" }
q26449
SitePrism.Loadable.when_loaded
test
def when_loaded # Get original loaded value, in case we are nested # inside another when_loaded block. previously_loaded = loaded # Within the block, check (and cache) loaded?, to see whether the # page has indeed loaded according to the rules defined by the user. self.loaded = loaded? # If the page hasn't loaded. Then crash and return the error message. # If one isn't defined, just return the Error code. raise SitePrism::FailedLoadValidationError, load_error unless loaded # Return the yield value of the block if one was supplied. yield self if block_given? ensure self.loaded = previously_loaded end
ruby
{ "resource": "" }
q26450
SitePrism.Loadable.load_validations_pass?
test
def load_validations_pass? self.class.load_validations.all? do |validation| passed, message = instance_eval(&validation) self.load_error = message if message && !passed passed end end
ruby
{ "resource": "" }
q26451
SitePrism.DSL.raise_if_block
test
def raise_if_block(obj, name, has_block, type) return unless has_block SitePrism.logger.debug("Type passed in: #{type}") SitePrism.logger.warn('section / iFrame can only accept blocks.') SitePrism.logger.error("#{obj.class}##{name} does not accept blocks") raise SitePrism::UnsupportedBlockError end
ruby
{ "resource": "" }
q26452
SitePrism.DSL.merge_args
test
def merge_args(find_args, runtime_args, visibility_args = {}) find_args = find_args.dup runtime_args = runtime_args.dup options = visibility_args.dup SitePrism.logger.debug("Initial args: #{find_args}, #{runtime_args}.") recombine_args(find_args, runtime_args, options) return [*find_args, *runtime_args] if options.empty? [*find_args, *runtime_args, options] end
ruby
{ "resource": "" }
q26453
SitePrism.DSL.recombine_args
test
def recombine_args(find_args, runtime_args, options) options.merge!(find_args.pop) if find_args.last.is_a? Hash options.merge!(runtime_args.pop) if runtime_args.last.is_a? Hash options[:wait] = wait_time unless wait_key_present?(options) end
ruby
{ "resource": "" }
q26454
SitePrism.ElementChecker.elements_to_check
test
def elements_to_check if _expected_items SitePrism.logger.debug('Expected Items has been set.') _mapped_items.select { |item_name| _expected_items.include?(item_name) } else _mapped_items end end
ruby
{ "resource": "" }
q26455
SitePrism.AddressableUrlMatcher.matches?
test
def matches?(url, expected_mappings = {}) actual_mappings = mappings(url) return false unless actual_mappings expected_mappings.empty? || all_expected_mappings_match?(expected_mappings, actual_mappings) end
ruby
{ "resource": "" }
q26456
SitePrism.AddressableUrlMatcher.component_matches
test
def component_matches(component, uri) component_template = component_templates[component] return {} unless component_template component_url = uri.public_send(component).to_s mappings = component_template.extract(component_url) return mappings if mappings # to support Addressable's expansion of queries # ensure it's parsing the fragment as appropriate (e.g. {?params*}) prefix = component_prefixes[component] return nil unless prefix component_template.extract(prefix + component_url) end
ruby
{ "resource": "" }
q26457
SitePrism.AddressableUrlMatcher.to_substituted_uri
test
def to_substituted_uri url = pattern substitutions.each_pair { |slug, value| url = url.sub(slug, value) } begin Addressable::URI.parse(url) rescue Addressable::URI::InvalidURIError SitePrism.logger.warn("Ensure you don't use templated port numbers.") raise SitePrism::InvalidUrlMatcherError end end
ruby
{ "resource": "" }
q26458
SitePrism.AddressableUrlMatcher.substitution_value
test
def substitution_value(index) sha = Digest::SHA1.digest(index.to_s) Base64.urlsafe_encode64(sha).gsub(/[^A-Za-z]/, '')[0..5] end
ruby
{ "resource": "" }
q26459
JobIteration.EnumeratorBuilder.build_times_enumerator
test
def build_times_enumerator(number, cursor:) raise ArgumentError, "First argument must be an Integer" unless number.is_a?(Integer) wrap(self, build_array_enumerator(number.times.to_a, cursor: cursor)) end
ruby
{ "resource": "" }
q26460
JobIteration.EnumeratorBuilder.build_array_enumerator
test
def build_array_enumerator(enumerable, cursor:) unless enumerable.is_a?(Array) raise ArgumentError, "enumerable must be an Array" end if enumerable.any? { |i| defined?(ActiveRecord) && i.is_a?(ActiveRecord::Base) } raise ArgumentError, "array cannot contain ActiveRecord objects" end drop = if cursor.nil? 0 else cursor + 1 end wrap(self, enumerable.each_with_index.drop(drop).to_enum { enumerable.size }) end
ruby
{ "resource": "" }
q26461
JobIteration.EnumeratorBuilder.build_lock_queue_enumerator
test
def build_lock_queue_enumerator(lock_queue, at_most_once:) unless lock_queue.is_a?(BackgroundQueue::LockQueue::RedisQueue) || lock_queue.is_a?(BackgroundQueue::LockQueue::RolloutRedisQueue) raise ArgumentError, "an argument to #build_lock_queue_enumerator must be a LockQueue" end wrap(self, BackgroundQueue::LockQueueEnumerator.new(lock_queue, at_most_once: at_most_once).to_enum) end
ruby
{ "resource": "" }
q26462
JobIteration.EnumeratorBuilder.build_active_record_enumerator_on_records
test
def build_active_record_enumerator_on_records(scope, cursor:, **args) enum = build_active_record_enumerator( scope, cursor: cursor, **args ).records wrap(self, enum) end
ruby
{ "resource": "" }
q26463
JobIteration.EnumeratorBuilder.build_active_record_enumerator_on_batches
test
def build_active_record_enumerator_on_batches(scope, cursor:, **args) enum = build_active_record_enumerator( scope, cursor: cursor, **args ).batches wrap(self, enum) end
ruby
{ "resource": "" }
q26464
JobIteration.CsvEnumerator.batches
test
def batches(batch_size:, cursor:) @csv.lazy .each_slice(batch_size) .each_with_index .drop(cursor.to_i) .to_enum { (count_rows_in_file.to_f / batch_size).ceil } end
ruby
{ "resource": "" }
q26465
PaperTrail.VersionConcern.reify
test
def reify(options = {}) unless self.class.column_names.include? "object" raise "reify can't be called without an object column" end return nil if object.nil? ::PaperTrail::Reifier.reify(self, options) end
ruby
{ "resource": "" }
q26466
PaperTrail.VersionConcern.version_limit
test
def version_limit if self.class.item_subtype_column_present? klass = (item_subtype || item_type).constantize if klass&.paper_trail_options&.key?(:limit) return klass.paper_trail_options[:limit] end end PaperTrail.config.version_limit end
ruby
{ "resource": "" }
q26467
PaperTrail.ModelConfig.on_create
test
def on_create @model_class.after_create { |r| r.paper_trail.record_create if r.paper_trail.save_version? } return if @model_class.paper_trail_options[:on].include?(:create) @model_class.paper_trail_options[:on] << :create end
ruby
{ "resource": "" }
q26468
PaperTrail.ModelConfig.on_destroy
test
def on_destroy(recording_order = "before") unless %w[after before].include?(recording_order.to_s) raise ArgumentError, 'recording order can only be "after" or "before"' end if recording_order.to_s == "after" && cannot_record_after_destroy? raise E_CANNOT_RECORD_AFTER_DESTROY end @model_class.send( "#{recording_order}_destroy", lambda do |r| return unless r.paper_trail.save_version? r.paper_trail.record_destroy(recording_order) end ) return if @model_class.paper_trail_options[:on].include?(:destroy) @model_class.paper_trail_options[:on] << :destroy end
ruby
{ "resource": "" }
q26469
PaperTrail.ModelConfig.on_update
test
def on_update @model_class.before_save { |r| r.paper_trail.reset_timestamp_attrs_for_update_if_needed } @model_class.after_update { |r| if r.paper_trail.save_version? r.paper_trail.record_update( force: false, in_after_callback: true, is_touch: false ) end } @model_class.after_update { |r| r.paper_trail.clear_version_instance } return if @model_class.paper_trail_options[:on].include?(:update) @model_class.paper_trail_options[:on] << :update end
ruby
{ "resource": "" }
q26470
PaperTrail.ModelConfig.on_touch
test
def on_touch @model_class.after_touch { |r| r.paper_trail.record_update( force: true, in_after_callback: true, is_touch: true ) } end
ruby
{ "resource": "" }
q26471
PaperTrail.ModelConfig.check_presence_of_item_subtype_column
test
def check_presence_of_item_subtype_column(options) return unless options.key?(:limit) return if version_class.item_subtype_column_present? raise format(E_MODEL_LIMIT_REQUIRES_ITEM_SUBTYPE, @model_class.name) end
ruby
{ "resource": "" }
q26472
PaperTrail.RecordTrail.save_version?
test
def save_version? if_condition = @record.paper_trail_options[:if] unless_condition = @record.paper_trail_options[:unless] (if_condition.blank? || if_condition.call(@record)) && !unless_condition.try(:call, @record) end
ruby
{ "resource": "" }
q26473
Listen.QueueOptimizer._squash_changes
test
def _squash_changes(changes) # We combine here for backward compatibility # Newer clients should receive dir and path separately changes = changes.map { |change, dir, path| [change, dir + path] } actions = changes.group_by(&:last).map do |path, action_list| [_logical_action_for(path, action_list.map(&:first)), path.to_s] end config.debug("listen: raw changes: #{actions.inspect}") { modified: [], added: [], removed: [] }.tap do |squashed| actions.each do |type, path| squashed[type] << path unless type.nil? end config.debug("listen: final changes: #{squashed.inspect}") end end
ruby
{ "resource": "" }
q26474
Ancestry.ClassMethods.to_node
test
def to_node object if object.is_a?(self.ancestry_base_class) then object else unscoped_where{|scope| scope.find object} end end
ruby
{ "resource": "" }
q26475
Ancestry.ClassMethods.scope_depth
test
def scope_depth depth_options, depth depth_options.inject(self.ancestry_base_class) do |scope, option| scope_name, relative_depth = option if [:before_depth, :to_depth, :at_depth, :from_depth, :after_depth].include? scope_name scope.send scope_name, depth + relative_depth else raise Ancestry::AncestryException.new("Unknown depth option: #{scope_name}.") end end end
ruby
{ "resource": "" }
q26476
Ancestry.ClassMethods.orphan_strategy=
test
def orphan_strategy= orphan_strategy # Check value of orphan strategy, only rootify, adopt, restrict or destroy is allowed if [:rootify, :adopt, :restrict, :destroy].include? orphan_strategy class_variable_set :@@orphan_strategy, orphan_strategy else raise Ancestry::AncestryException.new("Invalid orphan strategy, valid ones are :rootify,:adopt, :restrict and :destroy.") end end
ruby
{ "resource": "" }
q26477
Ancestry.ClassMethods.arrange
test
def arrange options = {} if (order = options.delete(:order)) arrange_nodes self.ancestry_base_class.order(order).where(options) else arrange_nodes self.ancestry_base_class.where(options) end end
ruby
{ "resource": "" }
q26478
Ancestry.ClassMethods.arrange_serializable
test
def arrange_serializable options={}, nodes=nil, &block nodes = arrange(options) if nodes.nil? nodes.map do |parent, children| if block_given? yield parent, arrange_serializable(options, children, &block) else parent.serializable_hash.merge 'children' => arrange_serializable(options, children) end end end
ruby
{ "resource": "" }
q26479
Ancestry.ClassMethods.build_ancestry_from_parent_ids!
test
def build_ancestry_from_parent_ids! parent_id = nil, ancestry = nil unscoped_where do |scope| scope.where(:parent_id => parent_id).find_each do |node| node.without_ancestry_callbacks do node.update_attribute ancestry_column, ancestry end build_ancestry_from_parent_ids! node.id, if ancestry.nil? then "#{node.id}" else "#{ancestry}/#{node.id}" end end end end
ruby
{ "resource": "" }
q26480
Ancestry.ClassMethods.rebuild_depth_cache!
test
def rebuild_depth_cache! raise Ancestry::AncestryException.new("Cannot rebuild depth cache for model without depth caching.") unless respond_to? :depth_cache_column self.ancestry_base_class.transaction do unscoped_where do |scope| scope.find_each do |node| node.update_attribute depth_cache_column, node.depth end end end end
ruby
{ "resource": "" }
q26481
Ancestry.MaterializedPath.indirect_conditions
test
def indirect_conditions(object) t = arel_table node = to_node(object) # rails has case sensitive matching. if ActiveRecord::VERSION::MAJOR >= 5 t[ancestry_column].matches("#{node.child_ancestry}/%", nil, true) else t[ancestry_column].matches("#{node.child_ancestry}/%") end end
ruby
{ "resource": "" }
q26482
Gitlab.Request.request_defaults
test
def request_defaults(sudo = nil) self.class.default_params sudo: sudo raise Error::MissingCredentials, 'Please set an endpoint to API' unless @endpoint self.class.default_params.delete(:sudo) if sudo.nil? end
ruby
{ "resource": "" }
q26483
Gitlab.Configuration.options
test
def options VALID_OPTIONS_KEYS.inject({}) do |option, key| option.merge!(key => send(key)) end end
ruby
{ "resource": "" }
q26484
Gitlab.Configuration.reset
test
def reset self.endpoint = ENV['GITLAB_API_ENDPOINT'] self.private_token = ENV['GITLAB_API_PRIVATE_TOKEN'] || ENV['GITLAB_API_AUTH_TOKEN'] self.httparty = get_httparty_config(ENV['GITLAB_API_HTTPARTY_OPTIONS']) self.sudo = nil self.user_agent = DEFAULT_USER_AGENT end
ruby
{ "resource": "" }
q26485
Gitlab.Configuration.get_httparty_config
test
def get_httparty_config(options) return if options.nil? httparty = Gitlab::CLI::Helpers.yaml_load(options) raise ArgumentError, 'HTTParty config should be a Hash.' unless httparty.is_a? Hash Gitlab::CLI::Helpers.symbolize_keys httparty end
ruby
{ "resource": "" }
q26486
Socketry.Timeout.start_timer
test
def start_timer(timer = DEFAULT_TIMER.new) raise Socketry::InternalError, "timer already started" if defined?(@timer) raise Socketry::InternalError, "deadline already set" if defined?(@deadline) @deadline = nil @timer = timer @timer.start true end
ruby
{ "resource": "" }
q26487
Socketry.Timeout.set_timeout
test
def set_timeout(timeout) raise Socketry::InternalError, "deadline already set" if @deadline return unless timeout raise Socketry::TimeoutError, "time expired" if timeout < 0 @deadline = lifetime + timeout end
ruby
{ "resource": "" }
q26488
Socketry.Timeout.time_remaining
test
def time_remaining(timeout) return unless timeout raise Socketry::InternalError, "no deadline set" unless @deadline remaining = @deadline - lifetime raise Socketry::TimeoutError, "time expired" if remaining <= 0 remaining end
ruby
{ "resource": "" }
q26489
JSON.Validator.build_schemas
test
def build_schemas(parent_schema) schema = parent_schema.schema # Build ref schemas if they exist if schema["$ref"] load_ref_schema(parent_schema, schema["$ref"]) end case schema["extends"] when String load_ref_schema(parent_schema, schema["extends"]) when Array schema['extends'].each do |type| handle_schema(parent_schema, type) end end # Check for schemas in union types ["type", "disallow"].each do |key| if schema[key].is_a?(Array) schema[key].each do |type| if type.is_a?(Hash) handle_schema(parent_schema, type) end end end end # Schema properties whose values are objects, the values of which # are themselves schemas. %w[definitions properties patternProperties].each do |key| next unless value = schema[key] value.each do |k, inner_schema| handle_schema(parent_schema, inner_schema) end end # Schema properties whose values are themselves schemas. %w[additionalProperties additionalItems dependencies extends].each do |key| next unless schema[key].is_a?(Hash) handle_schema(parent_schema, schema[key]) end # Schema properties whose values may be an array of schemas. %w[allOf anyOf oneOf not].each do |key| next unless value = schema[key] Array(value).each do |inner_schema| handle_schema(parent_schema, inner_schema) end end # Items are always schemas if schema["items"] items = schema["items"].clone items = [items] unless items.is_a?(Array) items.each do |item| handle_schema(parent_schema, item) end end # Convert enum to a ArraySet if schema["enum"].is_a?(Array) schema["enum"] = ArraySet.new(schema["enum"]) end end
ruby
{ "resource": "" }
q26490
JSON.Validator.handle_schema
test
def handle_schema(parent_schema, obj) if obj.is_a?(Hash) schema_uri = parent_schema.uri.dup schema = JSON::Schema.new(obj, schema_uri, parent_schema.validator) if obj['id'] self.class.add_schema(schema) end build_schemas(schema) end end
ruby
{ "resource": "" }
q26491
CanCan.Rule.matches_conditions?
test
def matches_conditions?(action, subject, extra_args) if @match_all call_block_with_all(action, subject, extra_args) elsif @block && !subject_class?(subject) @block.call(subject, *extra_args) elsif @conditions.kind_of?(Hash) && subject.class == Hash nested_subject_matches_conditions?(subject) elsif @conditions.kind_of?(Hash) && !subject_class?(subject) matches_conditions_hash?(subject) else # Don't stop at "cannot" definitions when there are conditions. @conditions.empty? ? true : @base_behavior end end
ruby
{ "resource": "" }
q26492
CanCan.Ability.alias_action
test
def alias_action(*args) target = args.pop[:to] validate_target(target) aliased_actions[target] ||= [] aliased_actions[target] += args end
ruby
{ "resource": "" }
q26493
CanCan.Ability.expand_actions
test
def expand_actions(actions) actions.map do |action| aliased_actions[action] ? [action, *expand_actions(aliased_actions[action])] : action end.flatten end
ruby
{ "resource": "" }
q26494
CanCan.Ability.aliases_for_action
test
def aliases_for_action(action) results = [action] aliased_actions.each do |aliased_action, actions| results += aliases_for_action(aliased_action) if actions.include? action end results end
ruby
{ "resource": "" }
q26495
CanCan.Ability.relevant_rules
test
def relevant_rules(action, subject) rules.reverse.select do |rule| rule.expanded_actions = expand_actions(rule.actions) rule.relevant? action, subject end end
ruby
{ "resource": "" }
q26496
FakeFS.File.create_missing_file
test
def create_missing_file raise Errno::EISDIR, path.to_s if File.directory?(@path) return if File.exist?(@path) # Unnecessary check, probably. dirname = RealFile.dirname @path unless dirname == '.' dir = FileSystem.find dirname raise Errno::ENOENT, path.to_s unless dir.is_a? FakeDir end @file = FileSystem.add(path, FakeFile.new) end
ruby
{ "resource": "" }
q26497
FakeFS.Pathname.each_filename
test
def each_filename # :yield: filename return to_enum(__method__) unless block_given? _prefix, names = split_names(@path) names.each { |filename| yield filename } nil end
ruby
{ "resource": "" }
q26498
FakeFS.Pathname.descend
test
def descend vs = [] ascend { |v| vs << v } vs.reverse_each { |v| yield v } nil end
ruby
{ "resource": "" }
q26499
FakeFS.Pathname.ascend
test
def ascend path = @path yield self while (r = chop_basename(path)) path, _name = r break if path.empty? yield self.class.new(del_trailing_separator(path)) end end
ruby
{ "resource": "" }