_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q26300
Grenache.Base.announce
validation
def announce(key, port, opts={}, &block) payload = [key,port] link.send 'announce', payload, opts, &block if config.auto_announce periodically(config.auto_announce_interval) do link.send 'announce', payload, opts, &block end end end
ruby
{ "resource": "" }
q26301
Validation.Condition._logical_operator
validation
def _logical_operator(delegated, *conditions) unless conditions.all?{|c|conditionable? c} raise TypeError, 'wrong object for condition' end ->v{ conditions.__send__(delegated) {|condition| _valid? condition, v } } end
ruby
{ "resource": "" }
q26302
RightHook.SpecHelpers.post_with_signature
validation
def post_with_signature(opts) path = opts.fetch(:path) payload = opts.fetch(:payload) secret = opts.fetch(:secret) post path, {payload: payload}, generate_secret_header(secret, URI.encode_www_form(payload: payload)) end
ruby
{ "resource": "" }
q26303
Rothko.Drawing.get_height
validation
def get_height(img) new_height = (img.height / (img.width.to_f / self.width.to_f)).ceil end
ruby
{ "resource": "" }
q26304
Rothko.Drawing.create_color_string
validation
def create_color_string (0...img.height).map do |y| (0...img.width).map do |x| pix = self.img[x,y] pix_vals = [r(pix), g(pix), b(pix)] find_closest_term_color(pix_vals) end end.join end
ruby
{ "resource": "" }
q26305
Rothko.Drawing.find_closest_term_color
validation
def find_closest_term_color(pixel_values) color = "" lil_dist = 195075 @@palette.each do |col_name, col_values| dist = find_distance(col_values, pixel_values) if dist < lil_dist lil_dist = dist color = col_name end end color end
ruby
{ "resource": "" }
q26306
Rothko.Drawing.draw_line
validation
def draw_line(pixels) pix_line = "" pixels.each do |pixel| pix_line = pix_line + " ".colorize(:background => find_color(pixel)) end puts pix_line end
ruby
{ "resource": "" }
q26307
Jail.Cdnjs.tree
validation
def tree @tree and return @tree @tree = [] file_set = version_files while child = file_set.shift tree << child #if child.dir? if child.type == "dir" file_set.unshift( github.where(child.path).contents ).flatten! end end @tree end
ruby
{ "resource": "" }
q26308
NasaApod.Client.search
validation
def search(options = {}) self.date = options[:date] || date self.hd = options[:hd] || hd response = HTTParty.get(DEFAULT_URL, query: attributes) handle_response(response) end
ruby
{ "resource": "" }
q26309
QuackConcurrency.Sleeper.process_timeout
validation
def process_timeout(timeout) unless timeout == nil raise TypeError, "'timeout' must be nil or a Numeric" unless timeout.is_a?(Numeric) raise ArgumentError, "'timeout' must not be negative" if timeout.negative? end timeout = nil if timeout == Float::INFINITY timeout end
ruby
{ "resource": "" }
q26310
RightHook.Authenticator.find_or_create_authorization_by_note
validation
def find_or_create_authorization_by_note(note) found_auth = list_authorizations.find {|auth| auth.note == note} if found_auth found_auth.token else create_authorization(note) end end
ruby
{ "resource": "" }
q26311
Grenache.Link.send
validation
def send(type, payload, opts = {}, &block) res = http_send type, Oj.dump({"rid" => uuid, "data" => payload}) block.call(res) if block res end
ruby
{ "resource": "" }
q26312
Jquids.IncludesHelper.jquids_includes
validation
def jquids_includes(options = {}) # Set the format for the datepickers Jquids.format = options[:format] if options.has_key?(:format) html_out = "" if options.has_key?(:style) html_out << stylesheet_link_tag(jq_ui_stylesheet(options[:style])) + "\n" unless options[:style] == nil or options[:style] == :none or options[:style] == false else html_out << stylesheet_link_tag(jq_ui_stylesheet) + "\n" end jq_vrs = options.has_key?(:jQuery) ? options[:jQuery] : Jquids::JQVersions.last ui_vrs = options.has_key?(:jQueryUI) ? options[:jQueryUI] : Jquids::UIVersions.last trtp_vrs = options.has_key?(:TRTimepicker) ? options[:TRTimepicker] : :none # A little bit of css of the timepicker, and it is not added if the # timepicker javascript file is not included unless trtp_vrs == :none or trtp_vrs == false or trtp_vrs == nil html_out << "<style type=\"text/css\">.ui-timepicker-div .ui-widget-header{margin-bottom:8px;}.ui-timepicker-div dl{text-align:left;}.ui-timepicker-div dl dt{height:25px;}.ui-timepicker-div dl dd{margin:-25px 0 10px 65px;}.ui-timepicker-div td{font-size:90%;}</style>\n" end html_out << javascript_include_tag(jq_ui_javascripts(jq_vrs, ui_vrs, trtp_vrs)) + "\n" options[:datepicker_options] ||= {} # Some opiniated defaults (basically an attempt to make the jQuery # datepicker similar to the calendar_date_select with out making # modifications or having local dependencies) options[:datepicker_options][:showOtherMonths] = true if options[:datepicker_options][:showOtherMonths].nil? options[:datepicker_options][:selectOtherMonths] = true if options[:datepicker_options][:selectOtherMonths].nil? options[:datepicker_options][:changeMonth] = true if options[:datepicker_options][:changeMonth].nil? options[:datepicker_options][:changeYear] = true if options[:datepicker_options][:changeYear].nil? options[:datepicker_options][:dateFormat] = Jquids.format[:js_date] Jquids.jquids_process_options(options) # Decides whether the 'to_json' method exists (part of rails 3) or if the # gem needs to us the json gem datepicker_options = if options[:datepicker_options].respond_to?(:to_json) options.delete(:datepicker_options).to_json else begin JSON.unparse(options.delete(:datepicker_options)) rescue "" end end html_out << '<script type="text/javascript">$.datepicker.setDefaults(' + datepicker_options + ');' unless trtp_vrs == :none or trtp_vrs == false or trtp_vrs == nil options[:timepicker_options] ||= {} # Some opiniated defaults (basically an attempt to make the jQuery # datepicker similar to the calendar_date_select with out making # modifications or having local dependencies) # Sets the time format based off of the current format options[:timepicker_options][:ampm] = Jquids.format[:ampm] options[:timepicker_options][:timeFormat] = Jquids.format[:tr_js_time] timepicker_options = if options[:timepicker_options].respond_to?(:to_json) options.delete(:timepicker_options).to_json else begin JSON.unparse(options.delete(:timepicker_options)) rescue "" end end html_out << '$.timepicker.setDefaults(' + timepicker_options + ');' end # A minified version of this javascript. # <script type="text/javascript"> # $(document).ready(function(){ # $(".jquids_dp").each(function(){ # var s = $(this).attr("data-jquipicker"); # $(this).attr("data-jquipicker") ? $(this).datepicker(JSON.parse(s)) : $(this).datepicker(); # }); # $(".jquids_tp").each(function(){ # var s = $(this).attr("data-jquipicker"); # $(this).attr("data-jquipicker") ? $(this).timepicker(JSON.parse(s)) : $(this).timepicker(); # }); # $(".jquids_dtp").each(function(){ # var s=$(this).attr("data-jquipicker"); # $(this).attr("data-jquipicker")?$(this).datetimepicker(JSON.parse(s)) : $(this).datetimepicker() # }) # }); # </script> # # Used to parse out options for each datepicker instance html_out << '$(document).ready(function(){$(".jquids_dp").each(function(){var s=$(this).attr("data-jquipicker");$(this).attr("data-jquipicker")?$(this).datepicker(JSON.parse(s)):$(this).datepicker()});$(".jquids_tp").each(function(){var s=$(this).attr("data-jquipicker");$(this).attr("data-jquipicker")?$(this).timepicker(JSON.parse(s)):$(this).timepicker()});$(".jquids_dtp").each(function(){var s=$(this).attr("data-jquipicker");$(this).attr("data-jquipicker")?$(this).datetimepicker(JSON.parse(s)):$(this).datetimepicker()})});</script>' if html_out.respond_to?(:html_safe) return html_out.html_safe else return html_out end end
ruby
{ "resource": "" }
q26313
Euler.ConfigOptions.method_missing
validation
def method_missing method, *args, &block if args.empty? @config.send(method) else @config.send("#{method}=", args.first) end end
ruby
{ "resource": "" }
q26314
HijriUmmAlqura.Hijri.to_s
validation
def to_s today = arabno_to_hindi(day) + " " today = today + HijriUmmAlqura::MONTHNAMES[month] + " " today = today + arabno_to_hindi(year) + " هـ" end
ruby
{ "resource": "" }
q26315
HijriUmmAlqura.Hijri.jd
validation
def jd(date = self) index = (12 * (date.year - 1)) + date.month - 16260 mcjdn = date.day + HijriUmmAlqura::UMMALQURA_DAT[index - 1] - 1 mcjdn = mcjdn + 2400000 - 0.5 return mcjdn end
ruby
{ "resource": "" }
q26316
HijriUmmAlqura.Hijri.gd
validation
def gd(date = self) j_date = jd(date) g_date = HijriUmmAlqura.jd_to_gd(j_date) return g_date end
ruby
{ "resource": "" }
q26317
HijriUmmAlqura.Hijri.add
validation
def add(date = self, offset, period) y = period == 'y' ? (date.year + offset) : date.year m = period == 'm' ? (month_of_year(date.year, date.month) + offset) : month_of_year(date.year, date.month) d = date.day begin if (period == 'd' || period == 'w') week_days = period == 'w' ? 7 : 1 j_date = jd j_date = j_date + offset * week_days result = HijriUmmAlqura.jd(j_date) return result elsif (period == 'm') rys = resync_year_month(y, m) y = rys[0] m = rys[1] return HijriUmmAlqura.format_date([y, m, d]) elsif (period == 'y') return HijriUmmAlqura.format_date([y, m, d]) end rescue Exception => e puts "Exception details: #{e.class} #{e.message}" end end
ruby
{ "resource": "" }
q26318
HijriUmmAlqura.Hijri.+
validation
def + (n) case n when Numeric then j_date = jd + n * 1 result = HijriUmmAlqura.jd(j_date) return result end raise TypeError, 'expected numeric' end
ruby
{ "resource": "" }
q26319
QuackConcurrency.Future.raise
validation
def raise(exception = nil) exception = case when exception == nil then StandardError.new when exception.is_a?(Exception) then exception when Exception >= exception then exception.new else Kernel.raise(TypeError, "'exception' must be nil or an instance of or descendant of Exception") end @mutex.synchronize do Kernel.raise(Complete) if @complete @complete = true @exception = exception @waiter.resume_all_indefinitely end nil end
ruby
{ "resource": "" }
q26320
Megam.ScmmAccount.to_hash
validation
def to_hash index_hash = Hash.new index_hash["json_claz"] = self.class.name index_hash["creationDate"] = creationDate index_hash["admin"] = admin index_hash["type"] = type index_hash["password"] = password index_hash["name"] = name index_hahs["mail"] = mail index_hash["displayName"] = displayName index_hash["lastModified"] = lastModified index_hash["active"] = active index_hash["some_msg"] = some_msg index_hash end
ruby
{ "resource": "" }
q26321
Mingle4r.CommonClassMethods.site=
validation
def site=(site) if site != self.site @site = site uri = URI.parse(site) @user = URI.decode(uri.user) if(uri.user) @password = URI.decode(uri.password) if(uri.password) @resource_class = self.send(:create_resource_class) end @site end
ruby
{ "resource": "" }
q26322
Mingle4r.CommonClassMethods.find
validation
def find(*args) scope = args.slice!(0) options = args.slice!(0) || {} @resource_class.find(scope, options) end
ruby
{ "resource": "" }
q26323
Mingle4r.CommonClassMethods.create_resource_class
validation
def create_resource_class raise "Please set the site for #{self} class." unless(self.site) created_class = Class.new(MingleResource) created_class.format = :xml setup_class(created_class) created_class end
ruby
{ "resource": "" }
q26324
Bixby.App.run!
validation
def run! # load agent from config or cli opts agent = load_agent() fix_ownership() # debug mode, stay in front if @config[:debug] then Logging::Logger.root.add_appenders("stdout") return start_websocket_client() end # start daemon validate_argv() daemon_dir = Bixby.path("var") ensure_state_dir(daemon_dir) close_fds() daemon_opts = { :dir => daemon_dir, :dir_mode => :normal, :log_output => true, :stop_proc => lambda { logger.info "Agent shutdown on service stop command" } } Daemons.run_proc("bixby-agent", daemon_opts) do Logging.logger.root.clear_appenders start_websocket_client() end end
ruby
{ "resource": "" }
q26325
Bixby.App.start_websocket_client
validation
def start_websocket_client # make sure log level is still set correctly here Bixby::Log.setup_logger(:level => Logging.appenders["file"].level) logger.info "Started Bixby Agent #{Bixby::Agent::VERSION}" @client = Bixby::WebSocket::Client.new(Bixby.agent.manager_ws_uri, AgentHandler) trap_signals() @client.start end
ruby
{ "resource": "" }
q26326
Bixby.App.fix_ownership
validation
def fix_ownership return if Process.uid != 0 begin uid = Etc.getpwnam("bixby").uid gid = Etc.getgrnam("bixby").gid # user/group exists, chown File.chown(uid, gid, Bixby.path("var"), Bixby.path("etc")) rescue ArgumentError end end
ruby
{ "resource": "" }
q26327
KubernetesDeploy.DeferredSummaryLogging.print_summary
test
def print_summary(status) status_string = status.to_s.humanize.upcase if status == :success heading("Result: ", status_string, :green) level = :info elsif status == :timed_out heading("Result: ", status_string, :yellow) level = :fatal else heading("Result: ", status_string, :red) level = :fatal end if (actions_sentence = summary.actions_sentence.presence) public_send(level, actions_sentence) blank_line(level) end summary.paragraphs.each do |para| msg_lines = para.split("\n") msg_lines.each { |line| public_send(level, line) } blank_line(level) unless para == summary.paragraphs.last end end
ruby
{ "resource": "" }
q26328
KubernetesDeploy.DeployTask.find_bad_files_from_kubectl_output
test
def find_bad_files_from_kubectl_output(line) # stderr often contains one or more lines like the following, from which we can extract the file path(s): # Error from server (TypeOfError): error when creating "/path/to/service-gqq5oh.yml": Service "web" is invalid: line.scan(%r{"(/\S+\.ya?ml\S*)"}).each_with_object([]) do |matches, bad_files| matches.each do |path| content = File.read(path) if File.file?(path) bad_files << { filename: File.basename(path), err: line, content: content } end end end
ruby
{ "resource": "" }
q26329
KubernetesDeploy.DeployTask.confirm_ejson_keys_not_prunable
test
def confirm_ejson_keys_not_prunable secret = ejson_provisioner.ejson_keys_secret return unless secret.dig("metadata", "annotations", KubernetesResource::LAST_APPLIED_ANNOTATION) @logger.error("Deploy cannot proceed because protected resource " \ "Secret/#{EjsonSecretProvisioner::EJSON_KEYS_SECRET} would be pruned.") raise EjsonPrunableError rescue Kubectl::ResourceNotFoundError => e @logger.debug("Secret/#{EjsonSecretProvisioner::EJSON_KEYS_SECRET} does not exist: #{e}") end
ruby
{ "resource": "" }
q26330
Omnibus.Compressor.for_current_system
test
def for_current_system(compressors) family = Ohai["platform_family"] if family == "mac_os_x" if compressors.include?(:dmg) return DMG end if compressors.include?(:tgz) return TGZ end end if compressors.include?(:tgz) return TGZ else log.info(log_key) { "No compressor defined for `#{family}'." } return Null end end
ruby
{ "resource": "" }
q26331
Omnibus.Packager::BFF.create_bff_file
test
def create_bff_file # We are making the assumption that sudo exists. # Unforunately, the owner of the file in the staging directory is what # will be on the target machine, and mkinstallp can't tell you if that # is a bad thing (it usually is). # The match is so we only pick the lowest level of the project dir. # This implies that if we are in /tmp/staging/project/dir/things, # we will chown from 'project' on, rather than 'project/dir', which leaves # project owned by the build user (which is incorrect) # First - let's find out who we are. shellout!("sudo chown -Rh 0:0 #{File.join(staging_dir, project.install_dir.match(/^\/?(\w+)/).to_s)}") log.info(log_key) { "Creating .bff file" } # Since we want the owner to be root, we need to sudo the mkinstallp # command, otherwise it will not have access to the previously chowned # directory. shellout!("sudo /usr/sbin/mkinstallp -d #{staging_dir} -T #{File.join(staging_dir, 'gen.template')}") # Print the full contents of the inventory file generated by mkinstallp # from within the staging_dir's .info folder (where control files for the # packaging process are kept.) log.debug(log_key) do "With .inventory file of:\n" + File.read("#{File.join( staging_dir, '.info', "#{safe_base_package_name}.inventory" )}") end # Copy the resulting package up to the package_dir FileSyncer.glob(File.join(staging_dir, "tmp/*.bff")).each do |bff| copy_file(bff, File.join(Config.package_dir, create_bff_file_name)) end ensure # chown back to original user's uid/gid so cleanup works correctly original_uid = shellout!("id -u").stdout.chomp original_gid = shellout!("id -g").stdout.chomp shellout!("sudo chown -Rh #{original_uid}:#{original_gid} #{staging_dir}") end
ruby
{ "resource": "" }
q26332
Omnibus.FileSyncer.glob
test
def glob(pattern) pattern = Pathname.new(pattern).cleanpath.to_s Dir.glob(pattern, File::FNM_DOTMATCH).sort.reject do |file| basename = File.basename(file) IGNORED_FILES.include?(basename) end end
ruby
{ "resource": "" }
q26333
Omnibus.FileSyncer.sync
test
def sync(source, destination, options = {}) unless File.directory?(source) raise ArgumentError, "`source' must be a directory, but was a " \ "`#{File.ftype(source)}'! If you just want to sync a file, use " \ "the `copy' method instead." end source_files = all_files_under(source, options) # Ensure the destination directory exists FileUtils.mkdir_p(destination) unless File.directory?(destination) # Copy over the filtered source files source_files.each do |source_file| relative_path = relative_path_for(source_file, source) # Create the parent directory parent = File.join(destination, File.dirname(relative_path)) FileUtils.mkdir_p(parent) unless File.directory?(parent) case File.ftype(source_file).to_sym when :directory FileUtils.mkdir_p("#{destination}/#{relative_path}") when :link target = File.readlink(source_file) Dir.chdir(destination) do FileUtils.ln_sf(target, "#{destination}/#{relative_path}") end when :file source_stat = File.stat(source_file) # Detect 'files' which are hard links and use ln instead of cp to # duplicate them, provided their source is in place already if hardlink? source_stat if existing = hardlink_sources[[source_stat.dev, source_stat.ino]] FileUtils.ln(existing, "#{destination}/#{relative_path}", force: true) else begin FileUtils.cp(source_file, "#{destination}/#{relative_path}") rescue Errno::EACCES FileUtils.cp_r(source_file, "#{destination}/#{relative_path}", remove_destination: true) end hardlink_sources.store([source_stat.dev, source_stat.ino], "#{destination}/#{relative_path}") end else # First attempt a regular copy. If we don't have write # permission on the File, open will probably fail with # EACCES (making it hard to sync files with permission # r--r--r--). Rescue this error and use cp_r's # :remove_destination option. begin FileUtils.cp(source_file, "#{destination}/#{relative_path}") rescue Errno::EACCES FileUtils.cp_r(source_file, "#{destination}/#{relative_path}", remove_destination: true) end end else raise "Unknown file type: `File.ftype(source_file)' at `#{source_file}'!" end end # Remove any files in the destination that are not in the source files destination_files = glob("#{destination}/**/*") # Calculate the relative paths of files so we can compare to the # source. relative_source_files = source_files.map do |file| relative_path_for(file, source) end relative_destination_files = destination_files.map do |file| relative_path_for(file, destination) end # Remove any extra files that are present in the destination, but are # not in the source list extra_files = relative_destination_files - relative_source_files extra_files.each do |file| FileUtils.rm_rf(File.join(destination, file)) end true end
ruby
{ "resource": "" }
q26334
Omnibus.FileSyncer.relative_path_for
test
def relative_path_for(path, parent) Pathname.new(path).relative_path_from(Pathname.new(parent)).to_s end
ruby
{ "resource": "" }
q26335
Omnibus.Compressor::DMG.clean_disks
test
def clean_disks log.info(log_key) { "Cleaning previously mounted disks" } existing_disks = shellout!("mount | grep \"/Volumes/#{volume_name}\" | awk '{print $1}'") existing_disks.stdout.lines.each do |existing_disk| existing_disk.chomp! Omnibus.logger.debug(log_key) do "Detaching disk `#{existing_disk}' before starting dmg packaging." end shellout!("hdiutil detach '#{existing_disk}'") end end
ruby
{ "resource": "" }
q26336
Omnibus.Compressor::DMG.copy_assets_to_dmg
test
def copy_assets_to_dmg log.info(log_key) { "Copying assets into dmg" } FileSyncer.glob("#{resources_dir}/*").each do |file| FileUtils.cp_r(file, "/Volumes/#{volume_name}") end end
ruby
{ "resource": "" }
q26337
Omnibus.Packager::IPS.write_transform_file
test
def write_transform_file render_template(resource_path("doc-transform.erb"), destination: transform_file, variables: { pathdir: project.install_dir.split("/")[1], } ) end
ruby
{ "resource": "" }
q26338
Omnibus.Packager::IPS.write_pkg_metadata
test
def write_pkg_metadata render_template(resource_path("gen.manifestfile.erb"), destination: pkg_metadata_file, variables: { name: safe_base_package_name, fmri_package_name: fmri_package_name, description: project.description, summary: project.friendly_name, arch: safe_architecture, } ) # Append the contents of symlinks_file if it exists if symlinks_file File.open(pkg_metadata_file, "a") do |symlink| symlink.write(render_symlinks) end end # Print the full contents of the rendered template file to generate package contents log.debug(log_key) { "Rendered Template:\n" + File.read(pkg_metadata_file) } end
ruby
{ "resource": "" }
q26339
Omnibus.HealthCheck.health_check_otool
test
def health_check_otool current_library = nil bad_libs = {} read_shared_libs("find #{project.install_dir}/ -type f | egrep '\.(dylib|bundle)$' | xargs otool -L") do |line| case line when /^(.+):$/ current_library = Regexp.last_match[1] when /^\s+(.+) \(.+\)$/ linked = Regexp.last_match[1] name = File.basename(linked) bad_libs = check_for_bad_library(bad_libs, current_library, name, linked) end end bad_libs end
ruby
{ "resource": "" }
q26340
Omnibus.HealthCheck.health_check_aix
test
def health_check_aix current_library = nil bad_libs = {} read_shared_libs("find #{project.install_dir}/ -type f | xargs file | grep \"RISC System\" | awk -F: '{print $1}' | xargs -n 1 ldd") do |line| case line when /^(.+) needs:$/ current_library = Regexp.last_match[1] log.debug(log_key) { "Analyzing dependencies for #{current_library}" } when /^\s+(.+)$/ name = Regexp.last_match[1] linked = Regexp.last_match[1] bad_libs = check_for_bad_library(bad_libs, current_library, name, linked) when /File is not an executable XCOFF file/ # ignore non-executable files else log.warn(log_key) { "Line did not match for #{current_library}\n#{line}" } end end bad_libs end
ruby
{ "resource": "" }
q26341
Omnibus.HealthCheck.health_check_ldd
test
def health_check_ldd regexp_ends = ".*(" + IGNORED_ENDINGS.map { |e| e.gsub(/\./, '\.') }.join("|") + ")$" regexp_patterns = IGNORED_PATTERNS.map { |e| ".*" + e.gsub(/\//, '\/') + ".*" }.join("|") regexp = regexp_ends + "|" + regexp_patterns current_library = nil bad_libs = {} read_shared_libs("find #{project.install_dir}/ -type f -regextype posix-extended ! -regex '#{regexp}' | xargs ldd") do |line| case line when /^(.+):$/ current_library = Regexp.last_match[1] log.debug(log_key) { "Analyzing dependencies for #{current_library}" } when /^\s+(.+) \=\>\s+(.+)( \(.+\))?$/ name = Regexp.last_match[1] linked = Regexp.last_match[2] bad_libs = check_for_bad_library(bad_libs, current_library, name, linked) when /^\s+(.+) \(.+\)$/ next when /^\s+statically linked$/ next when /^\s+libjvm.so/ next when /^\s+libjava.so/ next when /^\s+libmawt.so/ next when /^\s+not a dynamic executable$/ # ignore non-executable files else log.warn(log_key) do "Line did not match for #{current_library}\n#{line}" end end end bad_libs end
ruby
{ "resource": "" }
q26342
Omnibus.HealthCheck.read_shared_libs
test
def read_shared_libs(command) cmd = shellout(command) cmd.stdout.each_line do |line| yield line end end
ruby
{ "resource": "" }
q26343
Omnibus.HealthCheck.check_for_bad_library
test
def check_for_bad_library(bad_libs, current_library, name, linked) safe = nil whitelist_libs = case Ohai["platform"] when "arch" ARCH_WHITELIST_LIBS when "mac_os_x" MAC_WHITELIST_LIBS when "solaris2" SOLARIS_WHITELIST_LIBS when "smartos" SMARTOS_WHITELIST_LIBS when "freebsd" FREEBSD_WHITELIST_LIBS when "aix" AIX_WHITELIST_LIBS else WHITELIST_LIBS end whitelist_libs.each do |reg| safe ||= true if reg.match(name) end whitelist_files.each do |reg| safe ||= true if reg.match(current_library) end log.debug(log_key) { " --> Dependency: #{name}" } log.debug(log_key) { " --> Provided by: #{linked}" } if !safe && linked !~ Regexp.new(project.install_dir) log.debug(log_key) { " -> FAILED: #{current_library} has unsafe dependencies" } bad_libs[current_library] ||= {} bad_libs[current_library][name] ||= {} if bad_libs[current_library][name].key?(linked) bad_libs[current_library][name][linked] += 1 else bad_libs[current_library][name][linked] = 1 end else log.debug(log_key) { " -> PASSED: #{name} is either whitelisted or safely provided." } end bad_libs end
ruby
{ "resource": "" }
q26344
Omnibus.Digestable.digest
test
def digest(path, type = :md5) digest = digest_from_type(type) update_with_file_contents(digest, path) digest.hexdigest end
ruby
{ "resource": "" }
q26345
Omnibus.Digestable.update_with_file_contents
test
def update_with_file_contents(digest, filename) File.open(filename) do |io| while (chunk = io.read(1024 * 8)) digest.update(chunk) end end end
ruby
{ "resource": "" }
q26346
Omnibus.Publisher.packages
test
def packages @packages ||= begin publish_packages = Array.new build_packages = FileSyncer.glob(@pattern).map { |path| Package.new(path) } if @options[:platform_mappings] # the platform map is a simple hash with publish to build platform mappings @options[:platform_mappings].each_pair do |build_platform, publish_platforms| # Splits `ubuntu-12.04` into `ubuntu` and `12.04` build_platform, build_platform_version = build_platform.rpartition("-") - %w{ - } # locate the package for the build platform packages = build_packages.select do |p| p.metadata[:platform] == build_platform && p.metadata[:platform_version] == build_platform_version end if packages.empty? log.warn(log_key) do "Could not locate a package for build platform #{build_platform}-#{build_platform_version}. " \ "Publishing will be skipped for: #{publish_platforms.join(', ')}" end end publish_platforms.each do |publish_platform| publish_platform, publish_platform_version = publish_platform.rpartition("-") - %w{ - } packages.each do |p| # create a copy of our package before mucking with its metadata publish_package = p.dup publish_metadata = p.metadata.dup.to_hash # override the platform and platform version in the metadata publish_metadata[:platform] = publish_platform publish_metadata[:platform_version] = publish_platform_version # Set the updated metadata on the package object publish_package.metadata = Metadata.new(publish_package, publish_metadata) publish_packages << publish_package end end end else publish_packages.concat(build_packages) end if publish_packages.empty? log.info(log_key) { "No packages found, skipping publish" } end publish_packages end end
ruby
{ "resource": "" }
q26347
Omnibus.Packager::PKG.write_distribution_file
test
def write_distribution_file render_template(resource_path("distribution.xml.erb"), destination: "#{staging_dir}/Distribution", mode: 0600, variables: { friendly_name: project.friendly_name, identifier: safe_identifier, version: safe_version, component_pkg: component_pkg, } ) end
ruby
{ "resource": "" }
q26348
Omnibus.ArtifactoryPublisher.artifact_for
test
def artifact_for(artifact) md5 = artifact.respond_to?(:metadata) ? artifact.metadata[:md5] : digest(artifact.path, :md5) sha1 = artifact.respond_to?(:metadata) ? artifact.metadata[:sha1] : digest(artifact.path, :sha1) Artifactory::Resource::Artifact.new( local_path: artifact.path, client: client, checksums: { "md5" => md5, "sha1" => sha1, } ) end
ruby
{ "resource": "" }
q26349
Omnibus.ArtifactoryPublisher.build_for
test
def build_for(packages) metadata = packages.first.metadata name = metadata[:name] # Attempt to load the version manifest data from the packages metadata manifest = if version_manifest = metadata[:version_manifest] Manifest.from_hash(version_manifest) else Manifest.new( metadata[:version], # we already know the `version_manifest` entry is # missing so we can't pull in the `build_git_revision` nil, metadata[:license] ) end # Upload the actual package log.info(log_key) { "Saving build info for #{name}, Build ##{manifest.build_version}" } Artifactory::Resource::Build.new( client: client, name: name, number: manifest.build_version, vcs_revision: manifest.build_git_revision, build_agent: { name: "omnibus", version: Omnibus::VERSION, }, modules: [ { # com.getchef:chef-server:12.0.0 id: [ Config.artifactory_base_path.tr("/", "."), name, manifest.build_version, ].join(":"), artifacts: packages.map do |package| [ { type: File.extname(package.path).split(".").last, sha1: package.metadata[:sha1], md5: package.metadata[:md5], name: package.metadata[:basename], }, { type: File.extname(package.metadata.path).split(".").last, sha1: digest(package.metadata.path, :sha1), md5: digest(package.metadata.path, :md5), name: File.basename(package.metadata.path), }, ] end.flatten, }, ] ) end
ruby
{ "resource": "" }
q26350
Omnibus.ArtifactoryPublisher.client
test
def client @client ||= Artifactory::Client.new( endpoint: Config.artifactory_endpoint, username: Config.artifactory_username, password: Config.artifactory_password, ssl_pem_file: Config.artifactory_ssl_pem_file, ssl_verify: Config.artifactory_ssl_verify, proxy_username: Config.artifactory_proxy_username, proxy_password: Config.artifactory_proxy_password, proxy_address: Config.artifactory_proxy_address, proxy_port: Config.artifactory_proxy_port ) end
ruby
{ "resource": "" }
q26351
Omnibus.ArtifactoryPublisher.metadata_properties_for
test
def metadata_properties_for(package) metadata = { "omnibus.project" => package.metadata[:name], "omnibus.platform" => package.metadata[:platform], "omnibus.platform_version" => package.metadata[:platform_version], "omnibus.architecture" => package.metadata[:arch], "omnibus.version" => package.metadata[:version], "omnibus.iteration" => package.metadata[:iteration], "omnibus.license" => package.metadata[:license], "omnibus.md5" => package.metadata[:md5], "omnibus.sha1" => package.metadata[:sha1], "omnibus.sha256" => package.metadata[:sha256], "omnibus.sha512" => package.metadata[:sha512], "md5" => package.metadata[:md5], "sha1" => package.metadata[:sha1], "sha256" => package.metadata[:sha256], "sha512" => package.metadata[:sha512], }.tap do |h| if build_record? h["build.name"] = package.metadata[:name] h["build.number"] = package.metadata[:version] end end metadata end
ruby
{ "resource": "" }
q26352
Omnibus.ArtifactoryPublisher.remote_path_for
test
def remote_path_for(package) File.join( Config.artifactory_base_path, Config.artifactory_publish_pattern % package.metadata ) end
ruby
{ "resource": "" }
q26353
Omnibus.Packager::MSI.parameters
test
def parameters(val = NULL) if null?(val) @parameters || {} else unless val.is_a?(Hash) raise InvalidValue.new(:parameters, "be a Hash") end @parameters = val end end
ruby
{ "resource": "" }
q26354
Omnibus.Packager::MSI.wix_light_extension
test
def wix_light_extension(extension) unless extension.is_a?(String) raise InvalidValue.new(:wix_light_extension, "be an String") end wix_light_extensions << extension end
ruby
{ "resource": "" }
q26355
Omnibus.Packager::MSI.wix_light_delay_validation
test
def wix_light_delay_validation(val = false) unless val.is_a?(TrueClass) || val.is_a?(FalseClass) raise InvalidValue.new(:iwix_light_delay_validation, "be TrueClass or FalseClass") end @delay_validation ||= val unless @delay_validation return "" end "-sval" end
ruby
{ "resource": "" }
q26356
Omnibus.Packager::MSI.wix_candle_extension
test
def wix_candle_extension(extension) unless extension.is_a?(String) raise InvalidValue.new(:wix_candle_extension, "be an String") end wix_candle_extensions << extension end
ruby
{ "resource": "" }
q26357
Omnibus.Packager::MSI.write_localization_file
test
def write_localization_file render_template(resource_path("localization-#{localization}.wxl.erb"), destination: "#{staging_dir}/localization-#{localization}.wxl", variables: { name: project.package_name, friendly_name: project.friendly_name, maintainer: project.maintainer, } ) end
ruby
{ "resource": "" }
q26358
Omnibus.Packager::MSI.write_parameters_file
test
def write_parameters_file render_template(resource_path("parameters.wxi.erb"), destination: "#{staging_dir}/parameters.wxi", variables: { name: project.package_name, friendly_name: project.friendly_name, maintainer: project.maintainer, upgrade_code: upgrade_code, parameters: parameters, version: windows_package_version, display_version: msi_display_version, } ) end
ruby
{ "resource": "" }
q26359
Omnibus.Packager::MSI.write_source_file
test
def write_source_file paths = [] # Remove C:/ install_dir = project.install_dir.split("/")[1..-1].join("/") # Grab all parent paths Pathname.new(install_dir).ascend do |path| paths << path.to_s end # Create the hierarchy hierarchy = paths.reverse.inject({}) do |hash, path| hash[File.basename(path)] = path.gsub(/[^[:alnum:]]/, "").upcase + "LOCATION" hash end # The last item in the path MUST be named PROJECTLOCATION or else space # robots will cause permanent damage to you and your family. hierarchy[hierarchy.keys.last] = "PROJECTLOCATION" # If the path hierarchy is > 1, the customizable installation directory # should default to the second-to-last item in the hierarchy. If the # hierarchy is smaller than that, then just use the system drive. wix_install_dir = if hierarchy.size > 1 hierarchy.to_a[-2][1] else "WINDOWSVOLUME" end render_template(resource_path("source.wxs.erb"), destination: "#{staging_dir}/source.wxs", variables: { name: project.package_name, friendly_name: project.friendly_name, maintainer: project.maintainer, hierarchy: hierarchy, fastmsi: fast_msi, wix_install_dir: wix_install_dir, } ) end
ruby
{ "resource": "" }
q26360
Omnibus.Packager::MSI.write_bundle_file
test
def write_bundle_file render_template(resource_path("bundle.wxs.erb"), destination: "#{staging_dir}/bundle.wxs", variables: { name: project.package_name, friendly_name: project.friendly_name, maintainer: project.maintainer, upgrade_code: upgrade_code, parameters: parameters, version: windows_package_version, display_version: msi_display_version, msi: windows_safe_path(Config.package_dir, msi_name), } ) end
ruby
{ "resource": "" }
q26361
Omnibus.BuildVersionDSL.resolve
test
def resolve(dependency) if from_dependency? && version_dependency == dependency.name construct_build_version(dependency) log.info(log_key) { "Build Version is set to '#{build_version}'" } end end
ruby
{ "resource": "" }
q26362
Omnibus.BuildVersionDSL.maybe_append_timestamp
test
def maybe_append_timestamp(version) if Config.append_timestamp && !has_timestamp?(version) [version, Omnibus::BuildVersion.build_start_time].join("+") else version end end
ruby
{ "resource": "" }
q26363
Omnibus.BuildVersionDSL.has_timestamp?
test
def has_timestamp?(version) _ver, build_info = version.split("+") return false if build_info.nil? build_info.split(".").any? do |part| begin Time.strptime(part, Omnibus::BuildVersion::TIMESTAMP_FORMAT) true rescue ArgumentError false end end end
ruby
{ "resource": "" }
q26364
Omnibus.BuildVersionDSL.construct_build_version
test
def construct_build_version(version_source = nil) case source_type when :git version = if version_source Omnibus::BuildVersion.new(version_source.project_dir) else Omnibus::BuildVersion.new end output = output_method || :semver self.build_version = version.send(output) when :version if version_source self.build_version = version_source.version else raise "Please tell me the source to get the version from" end else raise "I don't know how to construct a build_version using source '#{source_type}'" end end
ruby
{ "resource": "" }
q26365
Omnibus.Templating.render_template_content
test
def render_template_content(source, variables = {}) template = ERB.new(File.read(source), nil, "-") struct = if variables.empty? Struct.new("Empty") else Struct.new(*variables.keys).new(*variables.values) end template.result(struct.instance_eval { binding }) end
ruby
{ "resource": "" }
q26366
Omnibus.Logger.deprecated
test
def deprecated(progname, &block) meta = Proc.new { "DEPRECATED: #{yield}" } add(LEVELS.index("WARN"), progname, &meta) end
ruby
{ "resource": "" }
q26367
Omnibus.Logger.add
test
def add(severity, progname, &block) return true if io.nil? || severity < level message = format_message(severity, progname, yield) MUTEX.synchronize { io.write(message) } true end
ruby
{ "resource": "" }
q26368
Omnibus.Builder.command
test
def command(command, options = {}) warn_for_shell_commands(command) build_commands << BuildCommand.new("Execute: `#{command}'") do shellout!(command, options) end end
ruby
{ "resource": "" }
q26369
Omnibus.Builder.make
test
def make(*args) options = args.last.is_a?(Hash) ? args.pop : {} make = options.delete(:bin) || # Prefer gmake on non-windows environments. if !windows? && Omnibus.which("gmake") env = options.delete(:env) || {} env = { "MAKE" => "gmake" }.merge(env) options[:env] = env "gmake" else "make" end options[:in_msys_bash] = true make_cmd = ([make] + args).join(" ").strip command(make_cmd, options) end
ruby
{ "resource": "" }
q26370
Omnibus.Builder.appbundle
test
def appbundle(software_name, lockdir: nil, gem: nil, without: nil, extra_bin_files: nil , **options) build_commands << BuildCommand.new("appbundle `#{software_name}'") do bin_dir = "#{install_dir}/bin" appbundler_bin = embedded_bin("appbundler") lockdir ||= begin app_software = project.softwares.find do |p| p.name == software_name end if app_software.nil? raise "could not find software definition for #{software_name}, add a dependency to it, or pass a lockdir argument to appbundle command." end app_software.project_dir end command = [ appbundler_bin, "'#{lockdir}'", "'#{bin_dir}'" ] # This option is almost entirely for support of ChefDK and enables transitive gemfile lock construction in order # to be able to decouple the dev gems for all the different components of ChefDK. AKA: don't use it outside of # ChefDK. You should also explicitly specify the lockdir when going down this road. command << [ "'#{gem}'" ] if gem # FIXME: appbundler lacks support for this argument when not also specifying the gem (2-arg appbundling lacks support) # (if you really need this bug fixed, though, fix it in appbundler, don't try using the 3-arg version to try to # get `--without` support, you will likely wind up going down a sad path). command << [ "--without", without.join(",") ] unless without.nil? command << [ "--extra-bin-files", extra_bin_files.join(",") ] unless extra_bin_files.nil? || extra_bin_files.empty? # Ensure the main bin dir exists FileUtils.mkdir_p(bin_dir) shellout!(command.join(" "), options) end end
ruby
{ "resource": "" }
q26371
Omnibus.Builder.rake
test
def rake(command, options = {}) build_commands << BuildCommand.new("rake `#{command}'") do bin = embedded_bin("rake") shellout!("#{bin} #{command}", options) end end
ruby
{ "resource": "" }
q26372
Omnibus.Builder.touch
test
def touch(file, options = {}) build_commands << BuildCommand.new("touch `#{file}'") do Dir.chdir(software.project_dir) do parent = File.dirname(file) FileUtils.mkdir_p(parent) unless File.directory?(parent) FileUtils.touch(file, options) end end end
ruby
{ "resource": "" }
q26373
Omnibus.Builder.delete
test
def delete(path, options = {}) build_commands << BuildCommand.new("delete `#{path}'") do Dir.chdir(software.project_dir) do FileSyncer.glob(path).each do |file| FileUtils.rm_rf(file, options) end end end end
ruby
{ "resource": "" }
q26374
Omnibus.Builder.copy
test
def copy(source, destination, options = {}) command = "copy `#{source}' to `#{destination}'" build_commands << BuildCommand.new(command) do Dir.chdir(software.project_dir) do files = FileSyncer.glob(source) if files.empty? log.warn(log_key) { "no matched files for glob #{command}" } else files.each do |file| FileUtils.cp_r(file, destination, options) end end end end end
ruby
{ "resource": "" }
q26375
Omnibus.Builder.update_config_guess
test
def update_config_guess(target: ".", install: [:config_guess, :config_sub]) build_commands << BuildCommand.new("update_config_guess `target: #{target} install: #{install.inspect}'") do config_guess_dir = "#{install_dir}/embedded/lib/config_guess" %w{config.guess config.sub}.each do |c| unless File.exist?(File.join(config_guess_dir, c)) raise "Can not find #{c}. Make sure you add a dependency on 'config_guess' in your software definition" end end destination = File.join(software.project_dir, target) FileUtils.mkdir_p(destination) FileUtils.cp_r("#{config_guess_dir}/config.guess", destination) if install.include? :config_guess FileUtils.cp_r("#{config_guess_dir}/config.sub", destination) if install.include? :config_sub end end
ruby
{ "resource": "" }
q26376
Omnibus.Compressor::TGZ.write_tgz
test
def write_tgz # Grab the contents of the gzipped tarball for reading contents = gzipped_tarball # Write the .tar.gz into the staging directory File.open("#{staging_dir}/#{package_name}", "wb") do |tgz| while chunk = contents.read(1024) tgz.write(chunk) end end # Copy the .tar.gz into the package directory FileSyncer.glob("#{staging_dir}/*.tar.gz").each do |tgz| copy_file(tgz, Config.package_dir) end end
ruby
{ "resource": "" }
q26377
Omnibus.Compressor::TGZ.tarball
test
def tarball tarfile = StringIO.new("") Gem::Package::TarWriter.new(tarfile) do |tar| path = "#{staging_dir}/#{packager.package_name}" name = packager.package_name mode = File.stat(path).mode tar.add_file(name, mode) do |tf| File.open(path, "rb") do |file| tf.write(file.read) end end end tarfile.rewind tarfile end
ruby
{ "resource": "" }
q26378
Omnibus.NetFetcher.clean
test
def clean needs_cleaning = File.exist?(project_dir) if needs_cleaning log.info(log_key) { "Cleaning project directory `#{project_dir}'" } FileUtils.rm_rf(project_dir) end create_required_directories deploy needs_cleaning end
ruby
{ "resource": "" }
q26379
Omnibus.NetFetcher.deploy
test
def deploy if downloaded_file.end_with?(*ALL_EXTENSIONS) log.info(log_key) { "Extracting `#{safe_downloaded_file}' to `#{safe_project_dir}'" } extract else log.info(log_key) { "`#{safe_downloaded_file}' is not an archive - copying to `#{safe_project_dir}'" } if File.directory?(downloaded_file) # If the file itself was a directory, copy the whole thing over. This # seems unlikely, because I do not think it is a possible to download # a folder, but better safe than sorry. FileUtils.cp_r("#{downloaded_file}/.", project_dir) else # In the more likely case that we got a "regular" file, we want that # file to live **inside** the project directory. project_dir should already # exist due to create_required_directories FileUtils.cp(downloaded_file, project_dir) end end end
ruby
{ "resource": "" }
q26380
Omnibus.NetFetcher.extract
test
def extract # Only used by tar compression_switch = "" compression_switch = "z" if downloaded_file.end_with?("gz") compression_switch = "--lzma -" if downloaded_file.end_with?("lzma") compression_switch = "j" if downloaded_file.end_with?("bz2") compression_switch = "J" if downloaded_file.end_with?("xz") if Ohai["platform"] == "windows" if downloaded_file.end_with?(*TAR_EXTENSIONS) && source[:extract] != :seven_zip returns = [0] returns << 1 if source[:extract] == :lax_tar shellout!("tar #{compression_switch}xf #{safe_downloaded_file} -C#{safe_project_dir}", returns: returns) elsif downloaded_file.end_with?(*COMPRESSED_TAR_EXTENSIONS) Dir.mktmpdir do |temp_dir| log.debug(log_key) { "Temporarily extracting `#{safe_downloaded_file}' to `#{temp_dir}'" } shellout!("7z.exe x #{safe_downloaded_file} -o#{windows_safe_path(temp_dir)} -r -y") fname = File.basename(downloaded_file, File.extname(downloaded_file)) fname << ".tar" if downloaded_file.end_with?("tgz", "txz") next_file = windows_safe_path(File.join(temp_dir, fname)) log.debug(log_key) { "Temporarily extracting `#{next_file}' to `#{safe_project_dir}'" } shellout!("7z.exe x #{next_file} -o#{safe_project_dir} -r -y") end else shellout!("7z.exe x #{safe_downloaded_file} -o#{safe_project_dir} -r -y") end elsif downloaded_file.end_with?(".7z") shellout!("7z x #{safe_downloaded_file} -o#{safe_project_dir} -r -y") elsif downloaded_file.end_with?(".zip") shellout!("unzip #{safe_downloaded_file} -d #{safe_project_dir}") else shellout!("#{tar} #{compression_switch}xf #{safe_downloaded_file} -C#{safe_project_dir}") end end
ruby
{ "resource": "" }
q26381
Omnibus.NetFetcher.digest_type
test
def digest_type DIGESTS.each do |digest| return digest if source.key? digest end raise ChecksumMissing.new(self) end
ruby
{ "resource": "" }
q26382
Omnibus.NetFetcher.verify_checksum!
test
def verify_checksum! log.info(log_key) { "Verifying checksum" } expected = checksum actual = digest(downloaded_file, digest_type) if expected != actual raise ChecksumMismatch.new(self, expected, actual) end end
ruby
{ "resource": "" }
q26383
Omnibus.Packager::WindowsBase.signing_identity
test
def signing_identity(thumbprint = NULL, params = NULL) unless null?(thumbprint) @signing_identity = {} unless thumbprint.is_a?(String) raise InvalidValue.new(:signing_identity, "be a String") end @signing_identity[:thumbprint] = thumbprint if !null?(params) unless params.is_a?(Hash) raise InvalidValue.new(:params, "be a Hash") end valid_keys = [:store, :timestamp_servers, :machine_store, :algorithm] invalid_keys = params.keys - valid_keys unless invalid_keys.empty? raise InvalidValue.new(:params, "contain keys from [#{valid_keys.join(', ')}]. "\ "Found invalid keys [#{invalid_keys.join(', ')}]") end if !params[:machine_store].nil? && !( params[:machine_store].is_a?(TrueClass) || params[:machine_store].is_a?(FalseClass)) raise InvalidValue.new(:params, "contain key :machine_store of type TrueClass or FalseClass") end else params = {} end @signing_identity[:store] = params[:store] || "My" @signing_identity[:algorithm] = params[:algorithm] || "SHA256" servers = params[:timestamp_servers] || DEFAULT_TIMESTAMP_SERVERS @signing_identity[:timestamp_servers] = [servers].flatten @signing_identity[:machine_store] = params[:machine_store] || false end @signing_identity end
ruby
{ "resource": "" }
q26384
Omnibus.Packager::WindowsBase.sign_package
test
def sign_package(package_file) success = false timestamp_servers.each do |ts| success = try_sign(package_file, ts) break if success end raise FailedToSignWindowsPackage.new if !success end
ruby
{ "resource": "" }
q26385
Omnibus.Packager::WindowsBase.certificate_subject
test
def certificate_subject return "CN=#{project.package_name}" unless signing_identity store = machine_store? ? "LocalMachine" : "CurrentUser" cmd = Array.new.tap do |arr| arr << "powershell.exe" arr << "-ExecutionPolicy Bypass" arr << "-NoProfile" arr << "-Command (Get-Item Cert:/#{store}/#{cert_store_name}/#{thumbprint}).Subject" end.join(" ") shellout!(cmd).stdout.strip end
ruby
{ "resource": "" }
q26386
Omnibus.Software.manifest_entry
test
def manifest_entry @manifest_entry ||= if manifest log.info(log_key) { "Using user-supplied manifest entry for #{name}" } manifest.entry_for(name) else log.info(log_key) { "Resolving manifest entry for #{name}" } to_manifest_entry end end
ruby
{ "resource": "" }
q26387
Omnibus.Software.source
test
def source(val = NULL) unless null?(val) unless val.is_a?(Hash) raise InvalidValue.new(:source, "be a kind of `Hash', but was `#{val.class.inspect}'") end val = canonicalize_source(val) extra_keys = val.keys - [ :git, :file, :path, :url, # fetcher types :md5, :sha1, :sha256, :sha512, # hash type - common to all fetchers :cookie, :warning, :unsafe, :extract, :cached_name, :authorization, # used by net_fetcher :options, # used by path_fetcher :submodules # used by git_fetcher ] unless extra_keys.empty? raise InvalidValue.new(:source, "only include valid keys. Invalid keys: #{extra_keys.inspect}") end duplicate_keys = val.keys & [:git, :file, :path, :url] unless duplicate_keys.size < 2 raise InvalidValue.new(:source, "not include duplicate keys. Duplicate keys: #{duplicate_keys.inspect}") end @source ||= {} @source.merge!(val) end override = canonicalize_source(overrides[:source]) apply_overrides(:source, override) end
ruby
{ "resource": "" }
q26388
Omnibus.Software.version
test
def version(val = NULL, &block) final_version = apply_overrides(:version) if block_given? if val.equal?(NULL) raise InvalidValue.new(:version, "pass a block when given a version argument") else if val == final_version # # Unfortunately we need to make a specific logic here for license files. # We support multiple calls `license_file` and we support overriding the # license files inside a version block. We can not differentiate whether # `license_file` is being called from a version block or not. So we need # to check if the license files are being overridden during the call to # block. # # If so we use the new set, otherwise we restore the old license files. # current_license_files = @license_files @license_files = [] yield new_license_files = @license_files if new_license_files.empty? @license_files = current_license_files end end end end return if final_version.nil? begin Chef::Sugar::Constraints::Version.new(final_version) rescue ArgumentError log.warn(log_key) do "Version #{final_version} for software #{name} was not parseable. " \ "Comparison methods such as #satisfies? will not be available for this version." end final_version end end
ruby
{ "resource": "" }
q26389
Omnibus.Software.whitelist_file
test
def whitelist_file(file) file = Regexp.new(file) unless file.kind_of?(Regexp) whitelist_files << file whitelist_files.dup end
ruby
{ "resource": "" }
q26390
Omnibus.Software.project_file
test
def project_file if fetcher && fetcher.is_a?(NetFetcher) log.deprecated(log_key) do "project_file (DSL). This is a property of the NetFetcher and will " \ "not be publically exposed in the next major release. In general, " \ "you should not be using this method in your software definitions " \ "as it is an internal implementation detail of the NetFetcher. If " \ "you disagree with this statement, you should open an issue on the " \ "Omnibus repository on GitHub an explain your use case. For now, " \ "I will return the path to the downloaded file on disk, but please " \ "rethink the problem you are trying to solve :)." end fetcher.downloaded_file else log.warn(log_key) do "Cannot retrieve a `project_file' for software `#{name}'. This " \ "attribute is actually an internal representation that is unique " \ "to the NetFetcher class and requires the use of a `source' " \ "attribute that is declared using a `:url' key. For backwards-" \ "compatability, I will return `nil', but this is most likely not " \ "your desired behavior." end nil end end
ruby
{ "resource": "" }
q26391
Omnibus.Software.prepend_path
test
def prepend_path(*paths) path_values = Array(paths) path_values << ENV[path_key] separator = File::PATH_SEPARATOR || ":" path_values.join(separator) end
ruby
{ "resource": "" }
q26392
Omnibus.Software.overrides
test
def overrides if null?(@overrides) # lazily initialized because we need the 'name' to be parsed first @overrides = {} @overrides = project.overrides[name.to_sym].dup if project.overrides[name.to_sym] end @overrides end
ruby
{ "resource": "" }
q26393
Omnibus.Software.version_for_cache
test
def version_for_cache @version_for_cache ||= if fetcher.version_for_cache fetcher.version_for_cache elsif version version else log.warn(log_key) do "No version given! This is probably a bad thing. I am going to " \ "assume the version `0.0.0', but that is most certainly not your " \ "desired behavior. If git caching seems off, this is probably why." end "0.0.0" end end
ruby
{ "resource": "" }
q26394
Omnibus.Software.fetcher
test
def fetcher @fetcher ||= if source_type == :url && File.basename(source[:url], "?*").end_with?(*NetFetcher::ALL_EXTENSIONS) Fetcher.fetcher_class_for_source(source).new(manifest_entry, fetch_dir, build_dir) else Fetcher.fetcher_class_for_source(source).new(manifest_entry, project_dir, build_dir) end end
ruby
{ "resource": "" }
q26395
Omnibus.Software.shasum
test
def shasum @shasum ||= begin digest = Digest::SHA256.new update_with_string(digest, project.shasum) update_with_string(digest, builder.shasum) update_with_string(digest, name) update_with_string(digest, version_for_cache) update_with_string(digest, FFI_Yajl::Encoder.encode(overrides)) if filepath && File.exist?(filepath) update_with_file_contents(digest, filepath) else update_with_string(digest, "<DYNAMIC>") end digest.hexdigest end end
ruby
{ "resource": "" }
q26396
Omnibus.Software.canonicalize_source
test
def canonicalize_source(source) if source.is_a?(Hash) && source[:github] source = source.dup source[:git] = "https://github.com/#{source[:github]}.git" source.delete(:github) end source end
ruby
{ "resource": "" }
q26397
Omnibus.Packager::Makeself.write_makeselfinst
test
def write_makeselfinst makeselfinst_staging_path = File.join(staging_dir, "makeselfinst") render_template(resource_path("makeselfinst.erb"), destination: makeselfinst_staging_path, variables: { install_dir: project.install_dir, } ) FileUtils.chmod(0755, makeselfinst_staging_path) end
ruby
{ "resource": "" }
q26398
Omnibus.GitCache.create_cache_path
test
def create_cache_path if File.directory?(cache_path) false else create_directory(File.dirname(cache_path)) git_cmd("init -q") # On windows, git is very picky about single vs double quotes git_cmd("config --local user.name \"Omnibus Git Cache\"") git_cmd("config --local user.email \"omnibus@localhost\"") true end end
ruby
{ "resource": "" }
q26399
Omnibus.GitCache.tag
test
def tag return @tag if @tag log.internal(log_key) { "Calculating tag" } # Accumulate an array of all the software projects that come before # the name and version we are tagging. So if you have # # build_order = [ 1, 2, 3, 4, 5 ] # # And we are tagging 3, you would get dep_list = [ 1, 2 ] dep_list = software.project.library.build_order.take_while do |dep| if dep.name == software.name && dep.version == software.version false else true end end log.internal(log_key) { "dep_list: #{dep_list.map(&:name).inspect}" } # This is the list of all the unqiue shasums of all the software build # dependencies, including the on currently being acted upon. shasums = [dep_list.map(&:shasum), software.shasum].flatten suffix = Digest::SHA256.hexdigest(shasums.join("|")) @tag = "#{software.name}-#{suffix}-#{SERIAL_NUMBER}" log.internal(log_key) { "tag: #{@tag}" } @tag end
ruby
{ "resource": "" }