_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q23300
Humidifier.Stack.add
train
def add(name, resource, attributes = {}) resources[name] = resource resource.update_attributes(attributes) if attributes.any? resource end
ruby
{ "resource": "" }
q23301
Humidifier.Stack.to_cf
train
def to_cf(serializer = :json) resources = static_resources.merge(enumerable_resources) case serializer when :json then JSON.pretty_generate(resources) when :yaml then YAML.dump(resources) end end
ruby
{ "resource": "" }
q23302
CukeSlicer.Slicer.slice
train
def slice(target, filters = {}, format, &block) validate_target(target) validate_filters(filters) validate_format(format) begin target = File.directory?(target) ? CukeModeler::Directory.new(target) : CukeModeler::FeatureFile.new(target) rescue => e if e.message =~ /lexing|parsing/i raise(ArgumentError, "A syntax or lexing problem was encountered while trying to parse #{target}") else raise e end end if target.is_a?(CukeModeler::Directory) sliced_tests = DirectoryExtractor.new.extract(target, filters, format, &block) else sliced_tests = FileExtractor.new.extract(target, filters, format, &block) end sliced_tests end
ruby
{ "resource": "" }
q23303
IIIF.OrderedHash.insert
train
def insert(index, key, value) tmp = IIIF::OrderedHash.new index = self.length + 1 + index if index < 0 if index < 0 m = "Index #{index} is too small for current length (#{length})" raise IndexError, m end if index > 0 i=0 self.each do |k,v| tmp[k] = v self.delete(k) i+=1 break if i == index end end tmp[key] = value tmp.merge!(self) # copy the remaining to tmp self.clear # start over... self.merge!(tmp) # now put them all back self end
ruby
{ "resource": "" }
q23304
IIIF.OrderedHash.remove_empties
train
def remove_empties self.keys.each do |key| if (self[key].kind_of?(Array) && self[key].empty?) || self[key].nil? self.delete(key) end end end
ruby
{ "resource": "" }
q23305
IIIF.OrderedHash.camelize_keys
train
def camelize_keys self.keys.each_with_index do |key, i| if key != key.camelize(:lower) self.insert(i, key.camelize(:lower), self[key]) self.delete(key) end end self end
ruby
{ "resource": "" }
q23306
IIIF.OrderedHash.snakeize_keys
train
def snakeize_keys self.keys.each_with_index do |key, i| if key != key.underscore self.insert(i, key.underscore, self[key]) self.delete(key) end end self end
ruby
{ "resource": "" }
q23307
KingDta.Dtazv.add_z
train
def add_z(bookings) data3 = '0256' data3 += 'Z' sum = 0 bookings.each do |b| sum += b.value.divmod(100)[0] end data3 += '%015i' % sum data3 += '%015i' % bookings.count data3 += '%0221s' % '' raise "DTAUS: Längenfehler Z (#{data3.size} <> 256)\n" if data3.size != 256 dta_string << data3 end
ruby
{ "resource": "" }
q23308
IIIF.HashBehaviours.select
train
def select new_instance = self.class.new if block_given? @data.each { |k,v| new_instance.data[k] = v if yield(k,v) } end return new_instance end
ruby
{ "resource": "" }
q23309
KingDta.Account.bank_account_number=
train
def bank_account_number=(number) raise ArgumentError.new('Bank account number cannot be nil') if number.nil? nr_str = "#{number}".gsub(/\s/,'') raise ArgumentError.new('Bank account number too long, max 10 allowed') if nr_str.length > 10 raise ArgumentError.new('Bank account number cannot be 0') if nr_str == '0' @bank_account_number = nr_str.to_i end
ruby
{ "resource": "" }
q23310
Scheduler.Base.time
train
def time if Time.respond_to?(:zone) && Time.zone self.class.send(:define_method, :time) { Time.zone.now.to_s } else self.class.send(:define_method, :time) { Time.now.to_s } end time end
ruby
{ "resource": "" }
q23311
Bandit.RedisStorage.init
train
def init(key, value) with_failure_grace(value) { @redis.set(key, value) if get(key, nil).nil? } end
ruby
{ "resource": "" }
q23312
Bandit.RedisStorage.get
train
def get(key, default=0) with_failure_grace(default) { val = @redis.get(key) return default if val.nil? val.numeric? ? val.to_i : val } end
ruby
{ "resource": "" }
q23313
Bandit.ViewConcerns.bandit_session_choose
train
def bandit_session_choose(exp) name = "bandit_#{exp}".intern # choose url param with preference value = params[name].nil? ? cookies.signed[name] : params[name] # choose with default, and set cookie cookies.signed[name] = Bandit.get_experiment(exp).choose(value) end
ruby
{ "resource": "" }
q23314
Bandit.ViewConcerns.bandit_sticky_choose
train
def bandit_sticky_choose(exp) name = "bandit_#{exp}".intern # choose url param with preference value = params[name].nil? ? cookies.signed[name] : params[name] # sticky choice may outlast a given alternative alternative = if Bandit.get_experiment(exp).alternatives.include?(value) value else Bandit.get_experiment(exp).choose(value) end # re-set cookie cookies.permanent.signed[name] = alternative end
ruby
{ "resource": "" }
q23315
RubyAem.Aem.sanitise_conf
train
def sanitise_conf(conf) conf[:username] ||= 'admin' conf[:password] ||= 'admin' conf[:protocol] ||= 'http' conf[:host] ||= 'localhost' conf[:port] ||= 4502 conf[:timeout] ||= 300 # handle custom configuration value being passed as a String # e.g. when the values are passed via environment variables conf[:port] = conf[:port].to_i conf[:timeout] = conf[:timeout].to_i conf[:verify_ssl] = conf[:verify_ssl] == 'true' if conf[:verify_ssl].is_a? String conf[:debug] = conf[:debug] == 'true' if conf[:debug].is_a? String end
ruby
{ "resource": "" }
q23316
RubyAem.Aem.config_property
train
def config_property(name, type, value) RubyAem::Resources::ConfigProperty.new(@client, name, type, value) end
ruby
{ "resource": "" }
q23317
RubyAem.Aem.package
train
def package(group_name, package_name, package_version) RubyAem::Resources::Package.new(@client, group_name, package_name, package_version) end
ruby
{ "resource": "" }
q23318
Bandit.BaseStorage.part_key
train
def part_key(exp, alt, date_hour=nil) parts = [ "participants", exp.name, alt ] parts += [ date_hour.date, date_hour.hour ] unless date_hour.nil? make_key parts end
ruby
{ "resource": "" }
q23319
OAuthenticator.RackAuthenticator.unauthenticated_response
train
def unauthenticated_response(errors) # default to a blank realm, I suppose realm = @options[:realm] || '' response_headers = {"WWW-Authenticate" => %Q(OAuth realm="#{realm}"), 'Content-Type' => 'application/json'} body = {'errors' => errors} error_message = begin error_values = errors.values.inject([], &:+) if error_values.size <= 1 error_values.first else # sentencify with periods error_values.map { |v| v =~ /\.\s*\z/ ? v : v + '.' }.join(' ') end end body['error_message'] = error_message if error_message [401, response_headers, [JSON.pretty_generate(body)]] end
ruby
{ "resource": "" }
q23320
Bandit.ControllerConcerns.bandit_simple_convert!
train
def bandit_simple_convert!(exp, alt, count=1) Bandit.get_experiment(exp).convert!(alt, count) end
ruby
{ "resource": "" }
q23321
Bandit.ControllerConcerns.bandit_session_convert!
train
def bandit_session_convert!(exp, alt=nil, count=1) cookiename = "bandit_#{exp}".intern cookiename_converted = "bandit_#{exp}_converted".intern alt ||= cookies.signed[cookiename] unless alt.nil? or cookies.signed[cookiename_converted] Bandit.get_experiment(exp).convert!(alt, count) cookies.delete(cookiename) end end
ruby
{ "resource": "" }
q23322
Bandit.ControllerConcerns.bandit_sticky_convert!
train
def bandit_sticky_convert!(exp, alt=nil, count=1) cookiename = "bandit_#{exp}".intern cookiename_converted = "bandit_#{exp}_converted".intern alt ||= cookies.signed[cookiename] unless alt.nil? or cookies.signed[cookiename_converted] cookies.permanent.signed[cookiename_converted] = "true" Bandit.get_experiment(exp).convert!(alt, count) end end
ruby
{ "resource": "" }
q23323
ELFTools.Dynamic.each_tags
train
def each_tags(&block) return enum_for(:each_tags) unless block_given? arr = [] 0.step do |i| tag = tag_at(i).tap(&block) arr << tag break if tag.header.d_tag == ELFTools::Constants::DT_NULL end arr end
ruby
{ "resource": "" }
q23324
ELFTools.Dynamic.tag_at
train
def tag_at(n) return if n < 0 @tag_at_map ||= {} return @tag_at_map[n] if @tag_at_map[n] dyn = Structs::ELF_Dyn.new(endian: endian) dyn.elf_class = header.elf_class stream.pos = tag_start + n * dyn.num_bytes dyn.offset = stream.pos @tag_at_map[n] = Tag.new(dyn.read(stream), stream, method(:str_offset)) end
ruby
{ "resource": "" }
q23325
ELFTools.ELFFile.build_id
train
def build_id section = section_by_name('.note.gnu.build-id') return nil if section.nil? note = section.notes.first return nil if note.nil? note.desc.unpack('H*').first end
ruby
{ "resource": "" }
q23326
ELFTools.ELFFile.each_sections
train
def each_sections(&block) return enum_for(:each_sections) unless block_given? Array.new(num_sections) do |i| section_at(i).tap(&block) end end
ruby
{ "resource": "" }
q23327
ELFTools.ELFFile.sections_by_type
train
def sections_by_type(type, &block) type = Util.to_constant(Constants::SHT, type) Util.select_by_type(each_sections, type, &block) end
ruby
{ "resource": "" }
q23328
ELFTools.ELFFile.each_segments
train
def each_segments(&block) return enum_for(:each_segments) unless block_given? Array.new(num_segments) do |i| segment_at(i).tap(&block) end end
ruby
{ "resource": "" }
q23329
ELFTools.ELFFile.segments_by_type
train
def segments_by_type(type, &block) type = Util.to_constant(Constants::PT, type) Util.select_by_type(each_segments, type, &block) end
ruby
{ "resource": "" }
q23330
ELFTools.ELFFile.offset_from_vma
train
def offset_from_vma(vma, size = 0) segments_by_type(:load) do |seg| return seg.vma_to_offset(vma) if seg.vma_in?(vma, size) end end
ruby
{ "resource": "" }
q23331
ELFTools.ELFFile.patches
train
def patches patch = {} loaded_headers.each do |header| header.patches.each do |key, val| patch[key + header.offset] = val end end patch end
ruby
{ "resource": "" }
q23332
ELFTools.ELFFile.save
train
def save(filename) stream.pos = 0 all = stream.read.force_encoding('ascii-8bit') patches.each do |pos, val| all[pos, val.size] = val end IO.binwrite(filename, all) end
ruby
{ "resource": "" }
q23333
ELFTools.ELFFile.loaded_headers
train
def loaded_headers explore = lambda do |obj| return obj if obj.is_a?(::ELFTools::Structs::ELFStruct) return obj.map(&explore) if obj.is_a?(Array) obj.instance_variables.map do |s| explore.call(obj.instance_variable_get(s)) end end explore.call(self).flatten end
ruby
{ "resource": "" }
q23334
HttpParser.Parser.parse
train
def parse(inst, data) ::HttpParser.http_parser_execute(inst, @settings, data, data.length) return inst.error? end
ruby
{ "resource": "" }
q23335
RubyAem.Client.call
train
def call(clazz, action, call_params) resource_name = clazz.name.downcase.sub('rubyaem::resources::', '') resource = @spec[resource_name] action_spec = resource['actions'][action] api = @apis[action_spec['api'].to_sym] operation = action_spec['operation'] params = [] required_params = action_spec['params']['required'] || {} required_params.each_value { |value| params.push(value % call_params) } params.push({}) optional_params = action_spec['params']['optional'] || {} optional_params.each { |key, value| add_optional_param(key, value, params, call_params) } base_responses_spec = resource['responses'] || {} action_responses_spec = action_spec['responses'] || {} responses_spec = base_responses_spec.merge(action_responses_spec) begin method = RubyAem::Swagger.operation_to_method(operation) data, status_code, headers = api.send("#{method}_with_http_info", *params) response = RubyAem::Response.new(status_code, data, headers) rescue SwaggerAemClient::ApiError => err response = RubyAem::Response.new(err.code, err.response_body, err.response_headers) end handle(response, responses_spec, call_params) end
ruby
{ "resource": "" }
q23336
RubyAem.Client.add_optional_param
train
def add_optional_param(key, value, params, call_params) # if there is no value in optional param spec, # then only add optional param that is set in call parameters if !value params[-1][key.to_sym] = call_params[key.to_sym] if call_params.key? key.to_sym # if value is provided in optional param spec, # then apply variable interpolation the same way as required param elsif value.class == String case value when '__FILE_PACKAGE__' file_path = "#{call_params[:file_path]}/#{call_params[:package_name]}-#{call_params[:package_version]}.zip" when '__FILE_PLAIN__' file_path = call_params[:file_path] when '__FILE_CERTIFICATE__' file_path = call_params[:file_path_certificate] when '__FILE_PRIVATE_KEY__' file_path = call_params[:file_path_private_key] end if !file_path.nil? File.open(file_path.to_s, 'r') { |file| params[-1][key.to_sym] = file } else params[-1][key.to_sym] = value % call_params end else params[-1][key.to_sym] = value end end
ruby
{ "resource": "" }
q23337
RubyAem.Client.handle
train
def handle(response, responses_spec, call_params) if responses_spec.key?(response.status_code) response_spec = responses_spec[response.status_code] handler = response_spec['handler'] Handlers.send(handler, response, response_spec, call_params) else message = "Unexpected response\nstatus code: #{response.status_code}\nheaders: #{response.headers}\nbody: #{response.body}" result = Result.new(message, response) raise RubyAem::Error.new(message, result) end end
ruby
{ "resource": "" }
q23338
Evercookie.EvercookieController.save
train
def save if data = session[Evercookie.hash_name_for_get] if data[:key] && cookies[data[:key]] session[Evercookie.hash_name_for_saved] = { data[:key] => cookies[data[:key]] } end end render nothing: true end
ruby
{ "resource": "" }
q23339
Evercookie.EvercookieController.ec_png
train
def ec_png if not cookies[Evercookie.cookie_png].present? render :nothing => true, :status => 304 return true end response.headers["Content-Type"] = "image/png" response.headers["Last-Modified"] = "Wed, 30 Jun 2010 21:36:48 GMT" response.headers["Expires"] = "Tue, 31 Dec 2030 23:30:45 GMT" response.headers["Cache-Control"] = "private, max-age=630720000" render text: get_blob_png, status: 200, content_type: 'image/png' end
ruby
{ "resource": "" }
q23340
Evercookie.EvercookieController.ec_etag
train
def ec_etag if not cookies[Evercookie.cookie_etag].present? render :text => request.headers['If-None-Match'] || '', :status => 304 return true end puts "cache value (#{Evercookie.cookie_etag}): #{cookies[Evercookie.cookie_etag]}" response.headers["Etag"] = cookies[Evercookie.cookie_etag] render text: cookies[Evercookie.cookie_etag] end
ruby
{ "resource": "" }
q23341
Evercookie.EvercookieController.ec_cache
train
def ec_cache if not cookies[Evercookie.cookie_cache].present? render :nothing => true, :status => 304 return true end puts "cache value (#{Evercookie.cookie_cache}): #{cookies[Evercookie.cookie_cache]}" response.headers["Content-Type"] = "text/html" response.headers["Last-Modified"] = "Wed, 30 Jun 2010 21:36:48 GMT" response.headers["Expires"] = "Tue, 31 Dec 2030 23:30:45 GMT" response.headers["Cache-Control"] = "private, max-age=630720000" render text: cookies[Evercookie.cookie_cache] end
ruby
{ "resource": "" }
q23342
ELFTools.Note.each_notes
train
def each_notes return enum_for(:each_notes) unless block_given? @notes_offset_map ||= {} cur = note_start notes = [] while cur < note_start + note_total_size stream.pos = cur @notes_offset_map[cur] ||= create_note(cur) note = @notes_offset_map[cur] # name and desc size needs to be 4-bytes align name_size = Util.align(note.header.n_namesz, 2) desc_size = Util.align(note.header.n_descsz, 2) cur += SIZE_OF_NHDR + name_size + desc_size notes << note yield note end notes end
ruby
{ "resource": "" }
q23343
OAuthenticator.SignedRequest.config_method_not_implemented
train
def config_method_not_implemented caller_name = caller[0].match(%r(in `(.*?)'))[1] using_middleware = caller.any? { |l| l =~ %r(oauthenticator/rack_authenticator.rb:.*`call') } message = "method \##{caller_name} must be implemented on a module of oauth config methods, which is " + begin if using_middleware "passed to OAuthenticator::RackAuthenticator using the option :config_methods." else "included in a subclass of OAuthenticator::SignedRequest, typically by passing it to OAuthenticator::SignedRequest.including_config(your_module)." end end + " Please consult the documentation." raise NotImplementedError, message end
ruby
{ "resource": "" }
q23344
HttpParser.Instance.error
train
def error error = (self[:error_upgrade] & 0b1111111) return nil if error == 0 err = ::HttpParser.err_name(error)[4..-1] # HPE_ is at the start of all these errors klass = ERRORS[err.to_sym] err = "#{::HttpParser.err_desc(error)} (#{err})" return klass.nil? ? Error::UNKNOWN.new(err) : klass.new(err) end
ruby
{ "resource": "" }
q23345
OAuthenticator.FaradaySigner.call
train
def call(request_env) media_type = Rack::Request.new('CONTENT_TYPE' => request_env[:request_headers]['Content-Type']).media_type request_attributes = { :request_method => request_env[:method], :uri => request_env[:url], :media_type => media_type, :body => request_env[:body] } # the adapter will set the media type to form-encoded when not otherwise specified on # requests it expects to have a body. see # Net::HTTPGenericRequest#supply_default_content_type called in #send_request_with_body. # other adapters do similarly, I think. if METHODS_WITH_BODIES.include?(request_env[:method].to_s.downcase) && !request_attributes[:media_type] request_attributes[:media_type] = 'application/x-www-form-urlencoded' end oauthenticator_signable_request = OAuthenticator::SignableRequest.new(@options.merge(request_attributes)) request_env[:request_headers]['Authorization'] = oauthenticator_signable_request.authorization @app.call(request_env) end
ruby
{ "resource": "" }
q23346
OAuthenticator.SignableRequest.signature_base
train
def signature_base parts = [normalized_request_method, base_string_uri, normalized_request_params_string] parts.map { |v| OAuthenticator.escape(v) }.join('&') end
ruby
{ "resource": "" }
q23347
OAuthenticator.SignableRequest.base_string_uri
train
def base_string_uri Addressable::URI.parse(@attributes['uri'].to_s).tap do |uri| uri.scheme = uri.scheme.downcase if uri.scheme uri.host = uri.host.downcase if uri.host uri.normalize! uri.fragment = nil uri.query = nil end.to_s end
ruby
{ "resource": "" }
q23348
OAuthenticator.SignableRequest.normalized_request_params_string
train
def normalized_request_params_string normalized_request_params.map { |kv| kv.map { |v| OAuthenticator.escape(v) } }.sort.map { |p| p.join('=') }.join('&') end
ruby
{ "resource": "" }
q23349
OAuthenticator.SignableRequest.normalized_request_params
train
def normalized_request_params query_params + protocol_params.reject { |k,v| %w(realm oauth_signature).include?(k) }.to_a + entity_params end
ruby
{ "resource": "" }
q23350
OAuthenticator.SignableRequest.parse_form_encoded
train
def parse_form_encoded(data) data.split(/[&;]/).map do |pair| key, value = pair.split('=', 2).map { |v| CGI::unescape(v) } [key, value] unless [nil, ''].include?(key) end.compact end
ruby
{ "resource": "" }
q23351
OAuthenticator.SignableRequest.read_body
train
def read_body body = @attributes['body'] if body.nil? '' elsif body.is_a?(String) body elsif body.respond_to?(:read) && body.respond_to?(:rewind) body.rewind body.read.tap do body.rewind end else raise TypeError, "Body must be a String or something IO-like (responding to #read and #rewind). " + "got body = #{body.inspect}" end end
ruby
{ "resource": "" }
q23352
OAuthenticator.SignableRequest.rsa_sha1_signature
train
def rsa_sha1_signature private_key = OpenSSL::PKey::RSA.new(@attributes['consumer_secret']) Base64.encode64(private_key.sign(OpenSSL::Digest::SHA1.new, signature_base)).gsub(/\n/, '') end
ruby
{ "resource": "" }
q23353
OAuthenticator.SignableRequest.hmac_sha1_signature
train
def hmac_sha1_signature # hmac secret is same as plaintext signature secret = plaintext_signature Base64.encode64(OpenSSL::HMAC.digest(OpenSSL::Digest::SHA1.new, secret, signature_base)).gsub(/\n/, '') end
ruby
{ "resource": "" }
q23354
Tetra.Bash.bash
train
def bash(command = nil) Tempfile.open("tetra-history") do |history_file| Tempfile.open("tetra-bashrc") do |bashrc_file| kit = Tetra::Kit.new(@project) ant_path = kit.find_executable("ant") ant_in_kit = ant_path != nil ant_commandline = Tetra::Ant.commandline(@project.full_path, ant_path) mvn_path = kit.find_executable("mvn") mvn_in_kit = mvn_path != nil mvn_commandline = Tetra::Mvn.commandline(@project.full_path, mvn_path) bashrc_content = Bashrc.new(history_file.path, ant_in_kit, ant_commandline, mvn_in_kit, mvn_commandline).to_s log.debug "writing bashrc file: #{bashrc_file.path}" log.debug bashrc_content bashrc_file.write(bashrc_content) bashrc_file.flush if command run("bash --rcfile #{bashrc_file.path} -i -c '#{command}'") [command] else run_interactive("bash --rcfile #{bashrc_file.path} -i") history = File.read(history_file) log.debug "history contents:" log.debug history history.split("\n").map(&:strip) end end end end
ruby
{ "resource": "" }
q23355
Serverspec.Type.bitlbee
train
def bitlbee(port, nick, password, use_ssl=false) Bitlbee.new(port, nick, password, use_ssl) end
ruby
{ "resource": "" }
q23356
Tetra.Git.commit_directories
train
def commit_directories(directories, message) log.debug "committing with message: #{message}" Dir.chdir(@directory) do directories.each do |directory| run("git rm -r --cached --ignore-unmatch #{directory}") run("git add #{directory}") end run("git commit --allow-empty -F -", false, message) end end
ruby
{ "resource": "" }
q23357
Tetra.Git.commit_file
train
def commit_file(path, message) Dir.chdir(@directory) do log.debug "committing path #{path} with message: #{message}" run("git add #{path}") run("git commit --allow-empty -F -", false, message) end end
ruby
{ "resource": "" }
q23358
Tetra.Git.revert_directories
train
def revert_directories(directories, id) Dir.chdir(@directory) do directories.each do |directory| # reverts added and modified files, both in index and working tree run("git checkout -f #{id} -- #{directory}") # compute the list of deleted files files_in_commit = run("git ls-tree --name-only -r #{id} -- #{directory}").split("\n") files_in_head = run("git ls-tree --name-only -r HEAD -- #{directory}").split("\n") files_added_after_head = run("git ls-files -o -- #{directory}").split("\n") files_to_delete = files_in_head - files_in_commit + files_added_after_head files_to_delete.each do |file| FileUtils.rm_rf(file) end end end end
ruby
{ "resource": "" }
q23359
Tetra.Git.disable_special_files
train
def disable_special_files(path) Dir.chdir(File.join(@directory, path)) do Find.find(".") do |file| next unless file =~ /\.git(ignore)?$/ FileUtils.mv(file, "#{file}_disabled_by_tetra") end end end
ruby
{ "resource": "" }
q23360
Tetra.Git.merge_with_id
train
def merge_with_id(path, new_path, id) Dir.chdir(@directory) do run("git show #{id}:#{path} > #{path}.old_version") conflict_count = 0 begin run("git merge-file #{path} #{path}.old_version #{new_path} \ -L \"newly generated\" \ -L \"previously generated\" \ -L \"user edited\"") rescue ExecutionFailed => e if e.status > 0 conflict_count = e.status else raise e end end File.delete("#{path}.old_version") conflict_count end end
ruby
{ "resource": "" }
q23361
Tetra.Git.changed_files
train
def changed_files(directory, id) Dir.chdir(@directory) do tracked_files = [] begin tracked_files += run("git diff-index --name-only #{id} -- #{directory}").split rescue ExecutionFailed => e raise e if e.status != 1 # status 1 is normal end untracked_files = run("git ls-files --exclude-standard --others -- #{directory}").split tracked_files + untracked_files end end
ruby
{ "resource": "" }
q23362
Tetra.Git.archive
train
def archive(directory, id, destination_path) Dir.chdir(@directory) do FileUtils.mkdir_p(File.dirname(destination_path)) run("git archive --format=tar #{id} -- #{directory} | xz -9e > #{destination_path}") end destination_path end
ruby
{ "resource": "" }
q23363
Tetra.Subcommand.configure_log_level
train
def configure_log_level(v, vv, vvv) if vvv log.level = ::Logger::DEBUG elsif vv log.level = ::Logger::INFO elsif v log.level = ::Logger::WARN else log.level = ::Logger::ERROR end end
ruby
{ "resource": "" }
q23364
Tetra.Subcommand.bypass_parsing
train
def bypass_parsing(args) log.level = ::Logger::WARN if args.delete "--verbose" log.level = ::Logger::INFO if args.delete "--very-verbose" log.level = ::Logger::DEBUG if args.delete "--very-very-verbose" @options = args end
ruby
{ "resource": "" }
q23365
Tetra.Subcommand.format_path
train
def format_path(path, project) full_path = ( if Pathname.new(path).relative? File.join(project.full_path, path) else path end ) Pathname.new(full_path).relative_path_from(Pathname.new(Dir.pwd)) end
ruby
{ "resource": "" }
q23366
Tetra.Subcommand.checking_exceptions
train
def checking_exceptions yield rescue Errno::EACCES => e $stderr.puts e rescue Errno::ENOENT => e $stderr.puts e rescue Errno::EEXIST => e $stderr.puts e rescue NoProjectDirectoryError => e $stderr.puts "#{e.directory} is not a tetra project directory, see \"tetra init\"" rescue GitAlreadyInitedError $stderr.puts "This directory is already a tetra project" rescue ExecutionFailed => e $stderr.puts "Failed to run `#{e.commandline}` (exit status #{e.status})" rescue Interrupt $stderr.puts "Execution interrupted by the user" end
ruby
{ "resource": "" }
q23367
Buildbox.Canceler.process_map
train
def process_map output = `ps -eo ppid,pid` processes = {} output.split("\n").each do |line| if result = line.match(/(\d+)\s(\d+)/) parent = result[1].to_i child = result[2].to_i processes[parent] ||= [] processes[parent] << child end end processes end
ruby
{ "resource": "" }
q23368
Buildbox.Command.read_io
train
def read_io(io) data = "" while true begin if Platform.windows? # Windows doesn't support non-blocking reads on # file descriptors or pipes so we have to get # a bit more creative. # Check if data is actually ready on this IO device. # We have to do this since `readpartial` will actually block # until data is available, which can cause blocking forever # in some cases. results = IO.select([io], nil, nil, 0.1) break if !results || results[0].empty? # Read! data << io.readpartial(READ_CHUNK_SIZE) else # Do a simple non-blocking read on the IO object data << io.read_nonblock(READ_CHUNK_SIZE) end rescue Exception => e # The catch-all rescue here is to support multiple Ruby versions, # since we use some Ruby 1.9 specific exceptions. breakable = false # EOFError from OSX, EIO is raised by ubuntu if e.is_a?(EOFError) || e.is_a?(Errno::EIO) # An `EOFError` means this IO object is done! breakable = true elsif defined?(IO::WaitReadable) && e.is_a?(IO::WaitReadable) # IO::WaitReadable is only available on Ruby 1.9+ # An IO::WaitReadable means there may be more IO but this # IO object is not ready to be read from yet. No problem, # we read as much as we can, so we break. breakable = true elsif e.is_a?(Errno::EAGAIN) || e.is_a?(Errno::EWOULDBLOCK) # Otherwise, we just look for the EAGAIN error which should be # all that IO::WaitReadable does in Ruby 1.9. breakable = true end # Break out if we're supposed to. Otherwise re-raise the error # because it is a real problem. break if breakable raise end end data end
ruby
{ "resource": "" }
q23369
Serverspec::Type.Virtualenv.virtualenv?
train
def virtualenv? pip_path = ::File.join(@name, 'bin', 'pip') python_path = ::File.join(@name, 'bin', 'python') act_path = ::File.join(@name, 'bin', 'activate') cmd = "grep -q 'export VIRTUAL_ENV' #{act_path}" @runner.check_file_is_executable(pip_path, 'owner') and @runner.check_file_is_executable(python_path, 'owner') and @runner.check_file_is_readable(act_path, 'owner') and @runner.run_command(cmd).exit_status.to_i == 0 end
ruby
{ "resource": "" }
q23370
Tetra.ProcessRunner.run_interactive
train
def run_interactive(command) log.debug "running `#{command}`" success = system({}, command) log.debug "`#{command}` exited with success #{success}" fail ExecutionFailed.new(command, $CHILD_STATUS, nil, nil) unless success end
ruby
{ "resource": "" }
q23371
Tetra.Project.merge_new_content
train
def merge_new_content(new_content, path, comment, kind) from_directory do log.debug "merging new content to #{path} of kind #{kind}" already_existing = File.exist?(path) generated_comment = "tetra: generated-#{kind}" whole_comment = [comment, generated_comment].join("\n\n") if already_existing unless @git.latest_id(generated_comment) log.debug "committing new file" @git.commit_file(path, whole_comment) end log.debug "moving #{path} to #{path}.tetra_user_edited" File.rename(path, "#{path}.tetra_user_edited") end previous_id = @git.latest_id(generated_comment) File.open(path, "w") { |io| io.write(new_content) } log.debug "committing new content: #{comment}" @git.commit_file(path, whole_comment) if already_existing # 3-way merge conflict_count = @git.merge_with_id(path, "#{path}.tetra_user_edited", previous_id) File.delete("#{path}.tetra_user_edited") @git.commit_file(path, "User changes merged back") if conflict_count == 0 return conflict_count end return 0 end end
ruby
{ "resource": "" }
q23372
Tetra.Project.src_archive
train
def src_archive from_directory do Find.find(File.join("packages", name)) do |file| if File.file?(file) && file.match(/\.(spec)|(sh)|(patch)$/).nil? return File.basename(file) end end nil end end
ruby
{ "resource": "" }
q23373
Docker.Swarm.authenticate!
train
def authenticate!(options = {}, connection = self.connection) creds = options.to_json connection.post('/auth', {}, :body => creds) @creds = creds true rescue Docker::Error::ServerError, Docker::Error::UnauthorizedError raise Docker::Error::AuthenticationError end
ruby
{ "resource": "" }
q23374
Docker.Swarm.validate_version!
train
def validate_version! Docker.info true rescue Docker::Error::DockerError raise Docker::Error::VersionError, "Expected API Version: #{API_VERSION}" end
ruby
{ "resource": "" }
q23375
Tetra.Mockers.create_mock_project
train
def create_mock_project @project_path = File.join("spec", "data", "test-project") Tetra::Project.init(@project_path, false) @project = Tetra::Project.new(@project_path) end
ruby
{ "resource": "" }
q23376
Tetra.Mockers.create_mock_executable
train
def create_mock_executable(executable_name) Dir.chdir(@project_path) do dir = mock_executable_dir(executable_name) FileUtils.mkdir_p(dir) executable_path = mock_executable_path(executable_name) File.open(executable_path, "w") { |io| io.puts "echo $0 $*>test_out" } File.chmod(0777, executable_path) executable_path end end
ruby
{ "resource": "" }
q23377
Tetra.PomGetter.get_pom_from_jar
train
def get_pom_from_jar(file) log.debug("Attempting unpack of #{file} to find a POM") begin Zip::File.foreach(file) do |entry| if entry.name =~ %r{/pom.xml$} log.info("pom.xml found in #{file}##{entry.name}") return entry.get_input_stream.read, :found_in_jar end end rescue Zip::Error log.warn("#{file} does not seem to be a valid jar archive, skipping") rescue TypeError log.warn("#{file} seems to be a valid jar archive but is corrupt, skipping") end nil end
ruby
{ "resource": "" }
q23378
Tetra.PomGetter.get_pom_from_sha1
train
def get_pom_from_sha1(file) log.debug("Attempting SHA1 POM lookup for #{file}") begin if File.file?(file) site = MavenWebsite.new sha1 = Digest::SHA1.hexdigest File.read(file) results = site.search_by_sha1(sha1).select { |result| result["ec"].include?(".pom") } result = results.first unless result.nil? log.info("pom.xml for #{file} found on search.maven.org for sha1 #{sha1}\ (#{result['g']}:#{result['a']}:#{result['v']})" ) group_id, artifact_id, version = site.get_maven_id_from result return site.download_pom(group_id, artifact_id, version), :found_via_sha1 end end rescue NotFoundOnMavenWebsiteError log.warn("Got a 404 error while looking for #{file}'s SHA1 in search.maven.org") end nil end
ruby
{ "resource": "" }
q23379
Tetra.PomGetter.get_pom_from_heuristic
train
def get_pom_from_heuristic(filename) begin log.debug("Attempting heuristic POM search for #{filename}") site = MavenWebsite.new filename = cleanup_name(filename) version_matcher = VersionMatcher.new my_artifact_id, my_version = version_matcher.split_version(filename) log.debug("Guessed artifact id: #{my_artifact_id}, version: #{my_version}") result = site.search_by_name(my_artifact_id).first log.debug("Artifact id search result: #{result}") unless result.nil? group_id, artifact_id, = site.get_maven_id_from result results = site.search_by_group_id_and_artifact_id(group_id, artifact_id) log.debug("All versions: #{results}") their_versions = results.map { |doc| doc["v"] } best_matched_version = ( if !my_version.nil? version_matcher.best_match(my_version, their_versions) else their_versions.max end ) best_matched_result = (results.select { |r| r["v"] == best_matched_version }).first group_id, artifact_id, version = site.get_maven_id_from(best_matched_result) log.warn("pom.xml for #{filename} found on search.maven.org with heuristic search\ (#{group_id}:#{artifact_id}:#{version})" ) return site.download_pom(group_id, artifact_id, version), :found_via_heuristic end rescue NotFoundOnMavenWebsiteError log.warn("Got a 404 error while looking for #{filename} heuristically in search.maven.org") end nil end
ruby
{ "resource": "" }
q23380
Tetra.Scriptable._to_script
train
def _to_script(project) project.from_directory do script_lines = [ "#!/bin/bash", "set -xe", "PROJECT_PREFIX=`readlink -e .`", "cd #{project.latest_dry_run_directory}" ] + aliases(project) + project.build_script_lines new_content = script_lines.join("\n") + "\n" result_dir = File.join(project.packages_dir, project.name) FileUtils.mkdir_p(result_dir) result_path = File.join(result_dir, "build.sh") conflict_count = project.merge_new_content(new_content, result_path, "Build script generated", "script") [result_path, conflict_count] end end
ruby
{ "resource": "" }
q23381
Tetra.Scriptable.aliases
train
def aliases(project) kit = Tetra::Kit.new(project) aliases = [] ant_path = kit.find_executable("ant") ant_commandline = Tetra::Ant.commandline("$PROJECT_PREFIX", ant_path) aliases << "alias ant='#{ant_commandline}'" mvn_path = kit.find_executable("mvn") mvn_commandline = Tetra::Mvn.commandline("$PROJECT_PREFIX", mvn_path) aliases << "alias mvn='#{mvn_commandline} -o'" aliases end
ruby
{ "resource": "" }
q23382
Tetra.ProjectIniter.template_files
train
def template_files(include_bundled_software) result = { "kit" => ".", "packages" => ".", "src" => "." } if include_bundled_software Dir.chdir(TEMPLATE_PATH) do Dir.glob(File.join("bundled", "*")).each do |file| result[file] = "kit" end end end result end
ruby
{ "resource": "" }
q23383
Tetra.ProjectIniter.commit_source_archive
train
def commit_source_archive(file, message) from_directory do result_dir = File.join(packages_dir, name) FileUtils.mkdir_p(result_dir) result_path = File.join(result_dir, File.basename(file)) FileUtils.cp(file, result_path) @git.commit_file(result_path, "Source archive added") unarchiver = if file =~ /\.zip$/ Tetra::Unzip.new else Tetra::Tar.new end Dir.glob(File.join("src", "*")).each { |f| FileUtils.rm_rf(f) } unarchiver.decompress(file, "src") commit_sources(message, true) end end
ruby
{ "resource": "" }
q23384
Serverspec.Type.http_get
train
def http_get(port, host_header, path, timeout_sec=10, protocol='http', bypass_ssl_verify=false) Http_Get.new(port, host_header, path, timeout_sec, protocol, bypass_ssl_verify) end
ruby
{ "resource": "" }
q23385
Tetra.Generatable.generate
train
def generate(template_name, object_binding) erb = ERB.new(File.read(File.join(template_path, template_name)), nil, "<>") erb.result(object_binding) end
ruby
{ "resource": "" }
q23386
SumologicCloudSyslog.Logger.log
train
def log(severity, message, time: nil) time ||= Time.now m = SumologicCloudSyslog::Message.new # Include authentication header m.structured_data << @default_structured_data # Adjust header with current timestamp and severity m.header = @default_header.dup m.header.severity = severity m.header.timestamp = time yield m.header if block_given? m.msg = message transport.write(m.to_s) end
ruby
{ "resource": "" }
q23387
PoiseLanguages.Utils.shelljoin
train
def shelljoin(cmd, whitelist: SHELLJOIN_WHITELIST) cmd.map do |str| if whitelist.any? {|pat| str =~ pat } str else Shellwords.shellescape(str) end end.join(' ') end
ruby
{ "resource": "" }
q23388
PoiseLanguages.Utils.absolute_command
train
def absolute_command(cmd, path: nil) was_array = cmd.is_a?(Array) cmd = if was_array cmd.dup else Shellwords.split(cmd) end # Don't try to touch anything if the first value looks like a flag or a path. if cmd.first && !cmd.first.start_with?('-') && !cmd.first.include?(::File::SEPARATOR) # If which returns false, just leave it I guess. cmd[0] = which(cmd.first, path: path) || cmd.first end cmd = shelljoin(cmd) unless was_array cmd end
ruby
{ "resource": "" }
q23389
WorkflowRb.StepBuilder.then
train
def then(body, &setup) new_step = WorkflowStep.new new_step.body = body @workflow_builder.add_step(new_step) new_builder = StepBuilder.new(@workflow_builder, new_step) if body.kind_of?(Class) new_step.name = body.name end if setup setup.call(new_builder) end new_outcome = StepOutcome.new new_outcome.next_step = new_step.id new_outcome.value = nil @step.outcomes << new_outcome new_builder end
ruby
{ "resource": "" }
q23390
WorkflowRb.StepBuilder.input
train
def input(step_property, &value) mapping = IOMapping.new mapping.property = step_property mapping.value = value @step.inputs << mapping self end
ruby
{ "resource": "" }
q23391
Rijndael.Base.decrypt
train
def decrypt(encrypted) fail ArgumentError, 'No cipher text supplied.' if encrypted.nil? || encrypted.empty? matches = CIPHER_PATTERN.match(encrypted) fail ArgumentError, 'Cipher text has an unsupported format.' if matches.nil? cipher = self.class.cipher cipher.decrypt cipher.key = Base64.decode64(@key) cipher.iv = Base64.decode64(matches[1]) decrypted = cipher.update(Base64.decode64(matches[2])) decrypted << cipher.final end
ruby
{ "resource": "" }
q23392
Fluent.SumologicCloudSyslogOutput.logger
train
def logger(tag) # Try to reuse existing logger @loggers[tag] ||= new_logger(tag) # Create new logger if old one is closed if @loggers[tag].closed? @loggers[tag] = new_logger(tag) end @loggers[tag] end
ruby
{ "resource": "" }
q23393
Royce.Methods.add_role
train
def add_role name if allowed_role? name return if has_role? name role = Role.find_by(name: name.to_s) roles << role end end
ruby
{ "resource": "" }
q23394
UTF8Utils.StringExt.tidy_bytes
train
def tidy_bytes(force = false) if force return unpack("C*").map do |b| tidy_byte(b) end.flatten.compact.pack("C*").unpack("U*").pack("U*") end bytes = unpack("C*") conts_expected = 0 last_lead = 0 bytes.each_index do |i| byte = bytes[i] is_ascii = byte < 128 is_cont = byte > 127 && byte < 192 is_lead = byte > 191 && byte < 245 is_unused = byte > 240 is_restricted = byte > 244 # Impossible or highly unlikely byte? Clean it. if is_unused || is_restricted bytes[i] = tidy_byte(byte) elsif is_cont # Not expecting contination byte? Clean up. Otherwise, now expect one less. conts_expected == 0 ? bytes[i] = tidy_byte(byte) : conts_expected -= 1 else if conts_expected > 0 # Expected continuation, but got ASCII or leading? Clean backwards up to # the leading byte. (1..(i - last_lead)).each {|j| bytes[i - j] = tidy_byte(bytes[i - j])} conts_expected = 0 end if is_lead # Final byte is leading? Clean it. if i == bytes.length - 1 bytes[i] = tidy_byte(bytes.last) else # Valid leading byte? Expect continuations determined by position of # first zero bit, with max of 3. conts_expected = byte < 224 ? 1 : byte < 240 ? 2 : 3 last_lead = i end end end end bytes.empty? ? "" : bytes.flatten.compact.pack("C*").unpack("U*").pack("U*") end
ruby
{ "resource": "" }
q23395
SetupAndTeardownAdapter.ClassMethods.setup
train
def setup(*methods, &block) methods.each do |method| if method.to_s =~ /^setup_(with_controller|fixtures|controller_request_and_response)$/ prepend_before { __send__ method } else before { __send__ method } end end before(&block) if block end
ruby
{ "resource": "" }
q23396
FakedCSV.Config.parse
train
def parse if @config["rows"].nil? || @config["rows"].to_i < 0 @row_count = 100 # default value else @row_count = @config["rows"].to_i end @fields = [] if @config["fields"].nil? || @config["fields"].empty? raise "need 'fields' in the config file and at least 1 field in it" end @config["fields"].each do |cfg| field = {} if cfg["name"].nil? raise "field needs a name" end field[:name] = cfg["name"].to_s if cfg["type"].nil? || cfg["type"].empty? raise "field needs a type" end field[:type] = cfg["type"].to_s unless cfg["inject"].nil? || cfg["inject"].empty? || !cfg["inject"].kind_of?(Array) field[:inject] = cfg["inject"].uniq # get rid of duplicates end unless cfg["rotate"].nil? field[:rotate] = _validate_rotate cfg["rotate"] end case field[:type] when /inc:int/i field[:type] = :inc_int field[:start] = cfg["start"].nil? ? 1 : cfg["start"].to_i field[:step] = cfg["step"].nil? ? 1 : cfg["step"].to_i when /rand:int/i field[:type] = :rand_int if cfg["range"].nil? # no range specified? use the default range: [0, 100] field[:min], field[:max] = 0, 100 else field[:min], field[:max] = _min_max cfg["range"] end when /rand:float/i field[:type] = :rand_float if cfg["range"].nil? # no range specified? use the default range: [0, 1] field[:min], field[:max] = 0, 1 else field[:min], field[:max] = _min_max cfg["range"] end field[:precision] = cfg["precision"].nil? ? 1 : cfg["precision"].to_i when /rand:char/i field[:type] = :rand_char field[:length] = cfg["length"].nil? ? 10 : cfg["length"] field[:format] = cfg["format"] when /fixed/i field[:type] = :fixed raise "need values for fixed type" if cfg["values"].nil? field[:values] = cfg["values"] when /faker:\S+/i field[:type] = cfg["type"] else raise "unsupported type: #{field[:type]}. supported types: #{_supported_types}" end fields << field end end
ruby
{ "resource": "" }
q23397
HAProxy.Parser.parse_server_attributes
train
def parse_server_attributes(value) parts = value.to_s.split(/\s/) current_name = nil pairs = parts.each_with_object({}) { |part, attrs| if SERVER_ATTRIBUTE_NAMES.include?(part) current_name = part attrs[current_name] = [] elsif current_name.nil? raise "Invalid server attribute: #{part}" else attrs[current_name] << part end } clean_parsed_server_attributes(pairs) end
ruby
{ "resource": "" }
q23398
HAProxy.Parser.clean_parsed_server_attributes
train
def clean_parsed_server_attributes(pairs) pairs.each do |k, v| pairs[k] = if v.empty? true else v.join(" ") end end end
ruby
{ "resource": "" }
q23399
JGrep.Parser.parse
train
def parse(substatement = nil, token_index = 0) p_token = nil if substatement c_token, c_token_value = substatement[token_index] else c_token, c_token_value = @scanner.get_token end parenth = 0 until c_token.nil? if substatement token_index += 1 n_token, n_token_value = substatement[token_index] else @scanner.token_index += 1 n_token, n_token_value = @scanner.get_token end next if n_token == " " case c_token when "and" unless (n_token =~ /not|statement|\(|\+|-/) || (scanner.token_index == scanner.arguments.size) raise "Error at column #{scanner.token_index}. \nExpected 'not', 'statement' or '('. Found '#{n_token_value}'" end raise "Error at column #{scanner.token_index}. \n Expression cannot start with 'and'" if p_token.nil? raise "Error at column #{scanner.token_index}. \n #{p_token} cannot be followed by 'and'" if %w[and or].include?(p_token) when "or" unless (n_token =~ /not|statement|\(|\+|-/) || (scanner.token_index == scanner.arguments.size) raise "Error at column #{scanner.token_index}. \nExpected 'not', 'statement', '('. Found '#{n_token_value}'" end raise "Error at column #{scanner.token_index}. \n Expression cannot start with 'or'" if p_token.nil? raise "Error at column #{scanner.token_index}. \n #{p_token} cannot be followed by 'or'" if %w[and or].include?(p_token) when "not" unless n_token =~ /statement|\(|not|\+|-/ raise "Error at column #{scanner.token_index}. \nExpected 'statement' or '('. Found '#{n_token_value}'" end when "statement" if c_token_value.is_a? Array raise "Error at column #{scanner.token_index}\nError, cannot define '[' in a '[...]' block." if substatement parse(c_token_value, 0) end if c_token_value =~ /!=/ c_token_value = c_token_value.gsub("!=", "=") @execution_stack << {"not" => "not"} end if !n_token.nil? && !n_token.match(/and|or|\)/) raise "Error at column #{scanner.token_index}. \nExpected 'and', 'or', ')'. Found '#{n_token_value}'" end when "+" if !n_token.nil? && !n_token.match(/and|or|\)/) raise "Error at column #{scanner.token_index}. \nExpected 'and', 'or', ')'. Found '#{n_token_value}'" end when "-" if !n_token.nil? && !n_token.match(/and|or|\)/) raise "Error at column #{scanner.token_index}. \nExpected 'and', 'or', ')'. Found '#{n_token_value}'" end when ")" if !n_token.nil? && !n_token =~ /|and|or|not|\(/ raise "Error at column #{scanner.token_index}. \nExpected 'and', 'or', 'not' or '('. Found '#{n_token_value}'" end parenth += 1 when "(" unless n_token =~ /statement|not|\(|\+|-/ raise "Error at column #{scanner.token_index}. \nExpected 'statement', '(', not. Found '#{n_token_value}'" end parenth -= 1 else raise "Unexpected token found at column #{scanner.token_index}. '#{c_token_value}'" end unless n_token == " " || substatement @execution_stack << {c_token => c_token_value} end p_token = c_token c_token = n_token c_token_value = n_token_value end return if substatement raise "Error. Missing parentheses ')'." if parenth < 0 raise "Error. Missing parentheses '('." if parenth > 0 end
ruby
{ "resource": "" }