_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q23600
Moodle2CC::Moodle2Converter.Migrator.convert_question_banks
train
def convert_question_banks(question_categories) bank_converter = Moodle2CC::Moodle2Converter::QuestionBankConverter.new question_categories.map { |category| bank_converter.convert(category) } end
ruby
{ "resource": "" }
q23601
Packer.Client.build
train
def build(template, options = {}) args = ['build', '-machine-readable'] args << '-force' if options.key?(:force) args << "-except=#{options[:except].join(',')}" if options.key?(:except) args << "-only=#{options[:only].join(',')}" if options.key?(:only) args << "-parallel=#{options[:parallel]}" if options.key?(:parallel) args << "-var-file=#{options[:var_file]}" if options.key?(:var_file) vars = options[:vars] || {} vars.each { |key, val| args << "-var '#{key}=#{val}'" } args << template Packer::Output::Build.new( command(args, options[:live_stream])) end
ruby
{ "resource": "" }
q23602
Packer.Client.inspect_template
train
def inspect_template(template) args = ['inspect', '-machine-readable', template] Packer::Output::Inspect.new(command(args)) end
ruby
{ "resource": "" }
q23603
Packer.Client.push
train
def push(template, options = {}) args = ['push'] args << "-message=#{options[:message]}" if options.key?(:message) args << "-name=#{options[:name]}" if options.key?(:name) args << "-token=#{options[:token]}" if options.key?(:token) args << "-var-file=#{options[:var_file]}" if options.key?(:var_file) vars = options[:vars] || {} vars.each { |key, val| args << "-var '#{key}=#{val}'" } args << template Packer::Output::Push.new(command(args, options[:live_stream])) end
ruby
{ "resource": "" }
q23604
Packer.Client.validate
train
def validate(template, options = {}) args = ['validate'] args << '-syntax-only' if options.key?(:syntax_only) args << "-except=#{options[:except].join(',')}" if options.key?(:except) args << "-only=#{options[:only].join(',')}" if options.key?(:only) args << "-var-file=#{options[:var_file]}" if options.key?(:var_file) vars = options[:vars] || {} vars.each { |key, val| args << "-var '#{key}=#{val}'" } args << template Packer::Output::Validate.new(command(args, options[:live_stream])) end
ruby
{ "resource": "" }
q23605
VCAP::Services::Base::SnapshotV2.SnapshotClient.update_name
train
def update_name(service_id, snapshot_id, name) return unless service_id && snapshot_id && name verify_input_name(name) key = self.class.redis_key(service_id) # NOTE: idealy should watch on combination of (service_id, snapshot_id) # but current design doesn't support such fine-grained watching. client.watch(key) snapshot = client.hget(redis_key(service_id), snapshot_id) return nil unless snapshot snapshot = Yajl::Parser.parse(snapshot) snapshot["name"] = name res = client.multi do save_snapshot(service_id, snapshot) end unless res raise ServiceError.new(ServiceError::REDIS_CONCURRENT_UPDATE) end true end
ruby
{ "resource": "" }
q23606
Calyx.Modifiers.transform
train
def transform(name, value) if respond_to?(name) send(name, value) elsif value.respond_to?(name) value.send(name) else value end end
ruby
{ "resource": "" }
q23607
Squirm.Procedure.load
train
def load query = (arguments or self).info_sql Squirm.exec(query, [name, schema]) do |result| validate result set_values_from result end self end
ruby
{ "resource": "" }
q23608
Squirm.Procedure.call
train
def call(*args, &block) Squirm.exec query, arguments.format(*args) do |result| if block_given? yield result elsif return_type =~ /\ASETOF/ result.to_a else result.getvalue(0,0) end end end
ruby
{ "resource": "" }
q23609
Squirm.Procedure.validate
train
def validate(result) if result.ntuples == 0 raise NotFound elsif result.ntuples > 1 raise TooManyChoices end end
ruby
{ "resource": "" }
q23610
Interpolate.Points.merge!
train
def merge!(points = {}) # points must be a Hash raise ArgumentError, "key points must be a Hash object" unless points.is_a? Hash # ensure the points are all keyed Numeric-ally points.each do |key, value| raise ArgumentError, "found a point key that is not a Numeric object: #{key.inspect}" unless key.is_a? Numeric end @points.merge!(points) normalize_data self end
ruby
{ "resource": "" }
q23611
Interpolate.Points.at
train
def at(point, &block) # obvious cases first if @sorted.empty? # no key points return nil elsif @sorted.size == 1 # one key point return @sorted.first.last end # out-of-bounds cases next if point <= @min_point # lower than lowest key point return @sorted.first.last elsif point >= @max_point # higher than highest key point return @sorted.last.last end # binary search to find the right interpolation key point/value interval left = 0 right = @sorted.length - 2 # highest point will be included low_point = nil low_value = nil high_point = nil high_value = nil while left <= right middle = (right - left) / 2 + left (low_point, low_value) = @sorted[middle] (high_point, high_value) = @sorted[middle + 1] break if low_point <= point and point <= high_point if point < low_point right = middle - 1 else left = middle + 1 end end # determine the balance ratio span = high_point - low_point balance = (point.to_f - low_point) / span # choose and call the blending function blend = block || @blend_with || DEFAULT_BLEND blend.call(low_value, high_value, balance) end
ruby
{ "resource": "" }
q23612
Moodle2CC::CanvasCC::Models.QuestionBank.find_children_banks
train
def find_children_banks(all_banks, visited_banks=[]) visited_banks << self children = [] sub_children = [] all_banks.each do |bank| children << bank if bank.parent_id && bank.parent_id == self.original_id && !visited_banks.include?(bank) end children.each do |child| sub_children += child.find_children_banks(all_banks, visited_banks) end return children + sub_children end
ruby
{ "resource": "" }
q23613
Ore.Naming.module_of
train
def module_of(word) if COMMON_NAMESPACES.has_key?(word) COMMON_NAMESPACES[word] elsif COMMON_ABBREVIATIONS.has_key?(word) COMMON_ABBREVIATIONS[word] else word.capitalize end end
ruby
{ "resource": "" }
q23614
Ore.Naming.modules_of
train
def modules_of(name) names_in(name).map do |words| words.split('_').map { |word| module_of(word) }.join end end
ruby
{ "resource": "" }
q23615
JAPR.Pipeline.convert
train
def convert @assets.each do |asset| # Convert asset multiple times if more than one converter is found finished = false while finished == false # Find a converter to use klass = JAPR::Converter.klass(asset.filename) # Convert asset if converter is found if klass.nil? finished = true else convert_asset(klass, asset) end end end end
ruby
{ "resource": "" }
q23616
JAPR.Pipeline.bundle
train
def bundle content = @assets.map(&:content).join("\n") hash = JAPR::Pipeline.hash(@source, @manifest, @options) @assets = [JAPR::Asset.new(content, "#{@prefix}-#{hash}#{@type}")] end
ruby
{ "resource": "" }
q23617
JAPR.Pipeline.write_asset_file
train
def write_asset_file(directory, asset) FileUtils.mkpath(directory) unless File.directory?(directory) begin # Save file to disk File.open(File.join(directory, asset.filename), 'w') do |file| file.write(asset.content) end rescue StandardError => se puts "Asset Pipeline: Failed to save '#{asset.filename}' to " \ "disk: #{se.message}" raise se end end
ruby
{ "resource": "" }
q23618
JAPR.Pipeline.markup
train
def markup # Use display_path if defined, otherwise use output_path in url display_path = @options['display_path'] || @options['output_path'] @html = @assets.map do |asset| klass = JAPR::Template.klass(asset.filename) html = klass.new(display_path, asset.filename).html unless klass.nil? html end.join end
ruby
{ "resource": "" }
q23619
SeventeenMon.IP.four_number
train
def four_number @four_number ||= begin fn = ip.split(".").map(&:to_i) raise "ip is no valid" if fn.length != 4 || fn.any?{ |d| d < 0 || d > 255} fn end end
ruby
{ "resource": "" }
q23620
Moodle2CC::CC.Assignment.build_meta_fields
train
def build_meta_fields(mod) fields = {} %w{mod_type assignment_type}.each do |key| fields[key] = mod.send(key) if mod.respond_to?(key) end if mod.grade_item Moodle2CC::Moodle::GradeItem::PROPERTIES.each do |key| fields[key] = mod.grade_item.send(key) if mod.grade_item.respond_to?(key) end end fields end
ruby
{ "resource": "" }
q23621
Squirm.Core.connect
train
def connect(options = {}) return @pool = options[:pool] if options[:pool] options = options.dup timeout = options.delete(:timeout) || 5 pool_size = options.delete(:pool_size) || 1 @pool = Squirm::Pool.new(timeout) pool_size.times do conn = PGconn.open(options) yield conn if block_given? @pool.checkin conn end end
ruby
{ "resource": "" }
q23622
Squirm.Core.exec
train
def exec(*args, &block) if current = Thread.current[:squirm_connection] current.exec(*args, &block) else use {|conn| conn.exec(*args, &block)} end end
ruby
{ "resource": "" }
q23623
Squirm.Core.use
train
def use(conn = nil) conn_given = !!conn conn = conn_given ? conn : @pool.checkout begin yield Thread.current[:squirm_connection] = conn ensure Thread.current[:squirm_connection] = nil @pool.checkin conn unless conn_given end end
ruby
{ "resource": "" }
q23624
VCAP::Services::Base::AsyncJob.Package.manifest=
train
def manifest=(hash) return unless hash raise "Input should be Hash" unless hash.is_a? Hash @manifest.merge! VCAP.symbolize_keys(hash) end
ruby
{ "resource": "" }
q23625
VCAP::Services::Base::AsyncJob.Package.pack
train
def pack(force=nil) if File.exists? @zipfile if force File.delete @zipfile else raise "File #{@zipfile} already exists." end end dirname = File.dirname(@zipfile) raise "Dir #{dirname} is not exists." unless File.exists? dirname raise "Dir #{dirname} is not writable." unless File.writable? dirname Zip::ZipFile.open(@zipfile, Zip::ZipFile::CREATE) do |zf| # manifest file zf.get_output_stream(MANIFEST_FILE) {|o| o << Yajl::Encoder.encode(@manifest)} @files.each do |f, path| zf.add("#{CONTENT_FOLDER}/#{f}", path) end end begin File.chmod(@filemode, @zipfile) rescue => e raise "Fail to change the mode of #{@zipfile} to #{@filemode.to_s(8)}: #{e}" end end
ruby
{ "resource": "" }
q23626
VCAP::Services::Base::AsyncJob.Package.unpack
train
def unpack path raise "File #{@zipfile} not exists." unless File.exists? @zipfile raise "unpack path: #{path} not found." unless Dir.exists? path raise "unpack path: #{path} is not writable." unless File.writable? path files = [] Zip::ZipFile.foreach(@zipfile) do |entry| next if entry.to_s == MANIFEST_FILE entry_name = File.basename entry.to_s dst_path = File.join(path, entry_name) dirname = File.dirname(dst_path) FileUtils.mkdir_p(dirname) unless File.exists? dirname files << dst_path entry.extract(dst_path) end files.freeze yield files if block_given? files rescue => e # auto cleanup if error raised. files.each{|f| File.delete f if File.exists? f} if files raise ServiceError.new(ServiceError::FILE_CORRUPTED) if e.is_a? Zlib::DataError raise e end
ruby
{ "resource": "" }
q23627
VCAP::Services::Base::AsyncJob.Package.load_manifest
train
def load_manifest zf = Zip::ZipFile.open(@zipfile) @manifest = VCAP.symbolize_keys(Yajl::Parser.parse(zf.read(MANIFEST_FILE))) rescue Errno::ENOENT => e raise ServiceError.new(ServiceError::BAD_SERIALIZED_DATAFILE, "request. Missing manifest.") end
ruby
{ "resource": "" }
q23628
Calyx.Registry.filter
train
def filter(name, callable=nil, &block) if block_given? transforms[name.to_sym] = block else transforms[name.to_sym] = callable end end
ruby
{ "resource": "" }
q23629
Calyx.Registry.define_rule
train
def define_rule(name, trace, productions) rules[name.to_sym] = Rule.new(name.to_sym, construct_rule(productions), trace) end
ruby
{ "resource": "" }
q23630
Calyx.Registry.define_context_rule
train
def define_context_rule(name, trace, productions) productions = [productions] unless productions.is_a?(Enumerable) context[name.to_sym] = Rule.new(name.to_sym, construct_rule(productions), trace) end
ruby
{ "resource": "" }
q23631
Calyx.Registry.expand
train
def expand(symbol) expansion = rules[symbol] || context[symbol] if expansion.nil? if @options.strict? raise Errors::UndefinedRule.new(@last_expansion, symbol) else expansion = Production::Terminal.new('') end end @last_expansion = expansion expansion end
ruby
{ "resource": "" }
q23632
Calyx.Registry.transform
train
def transform(name, value) if transforms.key?(name) transforms[name].call(value) else modifiers.transform(name, value) end end
ruby
{ "resource": "" }
q23633
Calyx.Registry.unique_expansion
train
def unique_expansion(symbol) pending = true uniques[symbol] = [] if uniques[symbol].nil? while pending if uniques[symbol].size == expand(symbol).size uniques[symbol] = [] pending = false end result = expand(symbol).evaluate(@options) unless uniques[symbol].include?(result) uniques[symbol] << result pending = false end end result end
ruby
{ "resource": "" }
q23634
Calyx.Registry.evaluate
train
def evaluate(start_symbol=:start, rules_map={}) reset_evaluation_context rules_map.each do |key, value| if rules.key?(key.to_sym) raise Errors::DuplicateRule.new(key) end define_context_rule(key, caller_locations.last, value) end [start_symbol, expand(start_symbol).evaluate(@options)] end
ruby
{ "resource": "" }
q23635
VCAP::Services::Base::AsyncJob.Lock.make_logger
train
def make_logger logger = Logger.new(STDOUT) logger.level = Logger::ERROR logger end
ruby
{ "resource": "" }
q23636
VCAP::Services.AsynchronousServiceGateway.validate_incoming_request
train
def validate_incoming_request unless request.media_type == Rack::Mime.mime_type('.json') error_msg = ServiceError.new(ServiceError::INVALID_CONTENT).to_hash logger.error("Validation failure: #{error_msg.inspect}, request media type: #{request.media_type} is not json") abort_request(error_msg) end unless auth_token && (auth_token == @token) error_msg = ServiceError.new(ServiceError::NOT_AUTHORIZED).to_hash logger.error("Validation failure: #{error_msg.inspect}, expected token: #{@token}, specified token: #{auth_token}") abort_request(error_msg) end unless @handle_fetched error_msg = ServiceError.new(ServiceError::SERVICE_UNAVAILABLE).to_hash logger.error("Validation failure: #{error_msg.inspect}, handles not fetched") abort_request(error_msg) end end
ruby
{ "resource": "" }
q23637
VCAP::Services::Base::SnapshotV2.BaseRollbackSnapshotJob.perform
train
def perform begin required_options :service_id, :snapshot_id @name = options["service_id"] @snapshot_id = options["snapshot_id"] @logger.info("Launch job: #{self.class} for #{name}") lock = create_lock @snapshot_files = [] lock.lock do # extract origin files from package dump_path = get_dump_path(name, snapshot_id) package_file = "#{snapshot_id}.zip" package = Package.load(File.join(dump_path, package_file)) @manifest = package.manifest @snapshot_files = package.unpack(dump_path) @logger.debug("Unpack files from #{package_file}: #{@snapshot_files}") raise "Package file doesn't contain snapshot file." if @snapshot_files.empty? result = execute @logger.info("Results of rollback snapshot: #{result}") completed(Yajl::Encoder.encode({:result => :ok})) @logger.info("Complete job: #{self.class} for #{name}") end rescue => e handle_error(e) ensure set_status({:complete_time => Time.now.to_s}) @snapshot_files.each{|f| File.delete(f) if File.exists? f} if @snapshot_files end end
ruby
{ "resource": "" }
q23638
Before.ClassMethods.enhance_method
train
def enhance_method(method, callbacks) _method = (PREFIX + method.to_s).to_sym alias_method _method, method self.send(:define_method, method) do |*args, &blk| [callbacks, _method].flatten.each do |callback| break unless self.send(callback, *args, &blk) end end end
ruby
{ "resource": "" }
q23639
Moodle2CC::Moodle2Converter.QuestionnaireConverter.convert_rating_question
train
def convert_rating_question(moodle_question, canvas_question) choices = create_rating_choices(moodle_question) canvas_question.responses = [] moodle_question.choices.each_with_index do |answer, answer_idx| response = {:id => "response#{answer_idx + 1}", :choices => []} # add dropdown to the question text canvas_question.material = canvas_question.material.to_s + "<p>#{answer[:content]} [#{response[:id]}]</p>" choices.each_with_index do |choice, choice_idx| response[:choices] << {:id => "#{moodle_question.id}_choice_#{answer_idx}_#{choice_idx}", :text => choice} end canvas_question.responses << response end end
ruby
{ "resource": "" }
q23640
SPOT.Paginator.enumerator
train
def enumerator Enumerator.new do |yielder| response = get_initial_response loop do items = response.records # If there are no records, we're done break if items.empty? # Otherwise, iterate through the records... items.each { |item| yielder << item } # ...and fetch the next page @params ||= {} @params[:page] ||= 1 @params[:page] += 1 response = @service.list(**@params) end end.lazy end
ruby
{ "resource": "" }
q23641
Troles::Common::Api.Write.static_role!
train
def static_role! role_name raise ArgumentError, "Take a single role name, was: #{role_name}" if !role_name || !role_name.kind_of_label? troles_config.add_valid_roles role_name if set_roles role_name define_method :static_roles? do true end end end
ruby
{ "resource": "" }
q23642
Troles::Common::Api.Write.set_roles
train
def set_roles *roles roles_to_set = make_valid_roles(*roles).flat_uniq return false if !roles_to_set || roles_to_set.empty? store.set_roles(roles_to_set) end
ruby
{ "resource": "" }
q23643
PoParser.Entry.untranslated?
train
def untranslated? return false if obsolete? || fuzzy? return @msgstr.map(&:str).join.empty? if @msgstr.is_a? Array @msgstr.nil? || @msgstr.str.empty? end
ruby
{ "resource": "" }
q23644
PoParser.Entry.to_h
train
def to_h instance_variables.each_with_object({}) do |label, hash| object = instance_variable_get(label) # If it's a plural msgstr if object.is_a?(Array) object.each do |entry| hash[entry.type] = entry.to_s unless entry.nil? end else hash[object.type] = object.to_s unless object.nil? end end end
ruby
{ "resource": "" }
q23645
PoParser.Entry.to_s
train
def to_s LABELS.each_with_object([]) do |label, arr| object = instance_variable_get("@#{label}".to_sym) # If it's a plural msgstr if object.is_a?(Array) arr.push(*object.map { |entry| entry.to_s(true) }.compact) else arr << object.to_s(true) unless object.nil? end end.join end
ruby
{ "resource": "" }
q23646
QiwiPay.PaymentOperation.params_hash
train
def params_hash %i[opcode].push(*ATTRIBUTES) .map { |a| [a, send(a).to_s] } .to_h .reject { |_k, v| v.nil? || v.empty? } end
ruby
{ "resource": "" }
q23647
QiwiPay.PaymentOperation.request_params
train
def request_params params_hash.tap do |params| params[:sign] = Signature.new(params, credentials.secret).sign end end
ruby
{ "resource": "" }
q23648
Trole::Storage.BaseOne.set_roles
train
def set_roles *roles raise ArgumentError, "A single role strategy can only allow setting a single role, was: #{roles}" if (roles.size > 1) set_role roles.flat_uniq.first end
ruby
{ "resource": "" }
q23649
Trole::Storage.BitOne.set_role
train
def set_role role num = bitmask.write role set_ds_field(num == 1 ? false : true) # boolean field in Data store end
ruby
{ "resource": "" }
q23650
QiwiPay.Confirmation.to_h
train
def to_h {}.tap do |h| ALLOWED_PARAMS.each { |p| h[p] = send(p) } h[:txn_status_message] = txn_status_message h[:txn_type_message] = txn_type_message end end
ruby
{ "resource": "" }
q23651
QiwiPay.Confirmation.calculated_sign
train
def calculated_sign params = SIGN_PARAMS.each_with_object({}) do |p, h| h[p] = send(p).tap { |v| v ? v.to_s : nil } end Signature.new(params, @secret).sign.upcase end
ruby
{ "resource": "" }
q23652
Trole::Api.Write.set_role
train
def set_role new_role value = make_valid_role new_role return false if !value store.set_role(value) end
ruby
{ "resource": "" }
q23653
Flt.Bytes.to_hex
train
def to_hex(sep_bytes=false) hx = @bytes.unpack('H*')[0].upcase if sep_bytes sep = "" (0...hx.size).step(2) do |i| sep << " " unless i==0 sep << hx[i,2] end hx = sep end hx end
ruby
{ "resource": "" }
q23654
Flt.Bytes.reverse_byte_nibbles!
train
def reverse_byte_nibbles! w = "" w.force_encoding("BINARY") if w.respond_to?(:force_encoding) @bytes.each_byte do |b| w << ((b >> 4)|((b&0xF)<<4)) end @bytes = w __setobj__ @bytes self end
ruby
{ "resource": "" }
q23655
Flt.Bytes.reverse_byte_pairs!
train
def reverse_byte_pairs! w = "" w.force_encoding("BINARY") if w.respond_to?(:force_encoding) (0...@bytes.size).step(2) do |i| w << @bytes[i+1] w << @bytes[i] end @bytes = w __setobj__ @bytes self end
ruby
{ "resource": "" }
q23656
Flt.Bytes.to_bitfields
train
def to_bitfields(lens,byte_endianness=:little_endian, bits_little_endian=false) fields = [] i = to_i(byte_endianness,bits_little_endian) for len in lens mask = (1<<len)-1 fields << (i&mask) i >>= len end fields end
ruby
{ "resource": "" }
q23657
ActiveRecord.PostgreSQLCursor.each
train
def each @model.transaction do begin declare_cursor if @join_dependency rows = Array.new last_id = nil while row = fetch_forward instantiated_row = @join_dependency.instantiate([row], @join_dependency.aliases).first current_id = instantiated_row[@join_dependency.join_root.primary_key] last_id ||= current_id if last_id == current_id rows << row last_id = current_id else yield @join_dependency.instantiate(rows, @join_dependency.aliases).first rows = [ row ] end last_id = current_id end if !rows.empty? yield @join_dependency.instantiate(rows, @join_dependency.aliases).first end else while row = fetch_forward yield @model.instantiate(row) end end ensure close_cursor end end nil end
ruby
{ "resource": "" }
q23658
QiwiPay.Signature.map_sorted
train
def map_sorted(hash) hash.keys .sort_by(&:to_sym) .map { |k| yield k, hash[k] } end
ruby
{ "resource": "" }
q23659
PoParser.Header.merge_to_previous_string
train
def merge_to_previous_string(array) array.each_with_index do |key, index| next unless key.length == 1 array[index - 1][1] += key[0] array.delete_at(index) end end
ruby
{ "resource": "" }
q23660
SPOT.ApiService.get
train
def get(path:, params: {}) params ||= {} if feed_password && !feed_password.empty? params = params.merge(feedPassword: feed_password) end response = make_request(:get, path, params) SPOT::ApiResponse.new(response) end
ruby
{ "resource": "" }
q23661
Troles::Common.Config.auto_config?
train
def auto_config? name return auto_config[name] if !auto_config[name].nil? Troles::Config.auto_config?(name) end
ruby
{ "resource": "" }
q23662
Troles::Common.Config.main_field=
train
def main_field= field_name name = field_name.to_s.alpha_numeric.to_sym raise ArgumentException, "Not a valid field name: #{field_name}" if !valid_field_name?(name) @main_field ||= name end
ruby
{ "resource": "" }
q23663
Troles::Mongoid.Config.configure_relation
train
def configure_relation case strategy when :ref_many has_many_for subject_class, :role, :through => join_key belongs_to_for join_model, subject_class belongs_to_for join_model, object_model has_many_for role, subject_class, :through => join_key when :embed_many embeds_many subject_class, object_model end end
ruby
{ "resource": "" }
q23664
TroleGroups::Api.Write.set_rolegroups
train
def set_rolegroups *rolegroups rolegroups_to_set = make_valid_rolegroups(*rolegroups).flat_uniq return false if !rolegroups_to_set || rolegroups_to_set.empty? group_store.set_rolegroups(rolegroups_to_set) end
ruby
{ "resource": "" }
q23665
PoParser.Po.add
train
def add(entry) return import_hash(entry) if entry.is_a?(Hash) return import_array(entry) if entry.is_a?(Array) raise ArgumentError, 'Must be a hash or an array of hashes' end
ruby
{ "resource": "" }
q23666
PoParser.Po.delete
train
def delete(entry) raise(ArgumentError, 'Must be an Entry') unless entry.is_a?(PoParser::Entry) @entries.delete(entry) end
ruby
{ "resource": "" }
q23667
PoParser.Po.stats
train
def stats untranslated_size = untranslated.size translated_size = translated.size fuzzy_size = fuzzy.size { translated: percentage(translated_size), untranslated: percentage(untranslated_size), fuzzy: percentage(fuzzy_size), } end
ruby
{ "resource": "" }
q23668
PoParser.Po.save_file
train
def save_file raise ArgumentError, 'Need a Path to save the file' if @path.nil? File.open(@path, 'w') { |file| file.write(to_s) } end
ruby
{ "resource": "" }
q23669
DineroMailIpn.NotificationParser.xsd_file
train
def xsd_file(filename) xsd_file_location = File.join(DineroMailIpn.resources_path, "/validation/xsd/#{filename}") xsd_file = File.read(xsd_file_location) xsd_file end
ruby
{ "resource": "" }
q23670
RightGit::Shell.Default.execute
train
def execute(cmd, options = {}) options = { :directory => nil, :outstream => nil, :raise_on_failure => true, :set_env_vars => nil, :clear_env_vars => nil, :logger => default_logger, :timeout => nil, :keep_alive_interval => nil, :keep_alive_timeout => nil }.merge(options) outstream = options[:outstream] logger = options[:logger] if keep_alive_interval = options[:keep_alive_interval] keep_alive_wake_time = ::Time.now + keep_alive_interval else keep_alive_wake_time = nil end if keep_alive_timeout = options[:keep_alive_timeout] unless keep_alive_interval raise ::ArgumentError, ':keep_alive_interval is required when using :keep_alive_timeout' end keep_alive_stop_time = ::Time.now + keep_alive_timeout else keep_alive_stop_time = nil end # build initial popener. exitstatus = nil popener = lambda do |output| output.sync = true loop do # note stdout remains selectable after process dies. if (::IO.select([output], nil, nil, 0.1) rescue nil) if data = output.gets if outstream outstream << data else data = data.strip unless data.empty? logger.info(data) # reset keep alive timer whenever we have normal output. if keep_alive_wake_time keep_alive_wake_time = ::Time.now + keep_alive_interval end end end else break end elsif keep_alive_wake_time now = ::Time.now if keep_alive_stop_time && now >= keep_alive_stop_time keep_alive_wake_time = nil elsif now >= keep_alive_wake_time # keep-alives go to logger, not the outstream, if any. logger.info('.') keep_alive_wake_time = now + keep_alive_interval end now = nil end end end # timeout optionally wraps popener. the timeout must happen inside of the # IO.popen block or else it has no good effect. if timeout = options[:timeout] popener = lambda do |p| lambda do |o| ::Timeout.timeout(timeout) { p.call(o) } end end.call(popener) end # build initial executioner in terms of popener. executioner = lambda do logger.info("+ #{cmd}") error_msg = nil ::IO.popen("#{cmd} 2>&1", 'r') do |output| begin popener.call(output) rescue ::EOFError # done rescue ::Timeout::Error # kill still-running process or else popen's ensure will hang. ::Process.kill('KILL', output.pid) # intentionally not reading last data as that could still block # due to a child of created process inheriting stdout. error_msg = "Execution timed out after #{options[:timeout]} seconds." end end # note that a killed process may exit 0 under Windows. exitstatus = $?.exitstatus if 0 == exitstatus && error_msg exitstatus = 1 end if (exitstatus != 0 && options[:raise_on_failure]) error_msg ||= "Execution failed with exitstatus #{exitstatus}" raise ShellError, error_msg end end # configure executioner (by options) and then invoke executioner. configure_executioner(executioner, options).call return exitstatus end
ruby
{ "resource": "" }
q23671
RightGit::Shell.Default.output_for
train
def output_for(cmd, options = {}) output = StringIO.new execute(cmd, options.merge(:outstream => output)) output.string end
ruby
{ "resource": "" }
q23672
RightGit::Shell.Default.configure_executioner
train
def configure_executioner(executioner, options) # set specific environment variables, if requested. sev = options[:set_env_vars] if (sev && !sev.empty?) executioner = lambda do |e| lambda { set_env_vars(sev) { e.call } } end.call(executioner) end # clear specific environment variables, if requested. cev = options[:clear_env_vars] if (cev && !cev.empty?) executioner = lambda do |e| lambda { clear_env_vars(cev) { e.call } } end.call(executioner) end # working directory. if directory = options[:directory] executioner = lambda do |e, d| lambda { ::Dir.chdir(d) { e.call } } end.call(executioner, directory) end executioner end
ruby
{ "resource": "" }
q23673
RightGit::Shell.Default.set_env_vars
train
def set_env_vars(variables) save_vars = {} variables.each do |k, v| k = k.to_s save_vars[k] = ENV[k] ENV[k] = v.nil? ? v : v.to_s end begin yield ensure variables.each_key do |k| k = k.to_s ENV[k] = save_vars[k] end end true end
ruby
{ "resource": "" }
q23674
Vic.Highlight.bg=
train
def bg=(color) color = Color.new(color) @ctermbg, @guibg = color.to_cterm, color.to_gui self end
ruby
{ "resource": "" }
q23675
Vic.Highlight.fg=
train
def fg=(color) color = Color.new(color) @ctermfg, @guifg = color.to_cterm, color.to_gui self end
ruby
{ "resource": "" }
q23676
Vic.Highlight.select_styles
train
def select_styles(*styles) styles.tap(&:compact).flatten! if styles.empty? or styles.length == 1 && /\Anone\z/io.match(styles[0]) return :NONE end styles.select { |s| FONT_STYLE.match(s) } end
ruby
{ "resource": "" }
q23677
BioTCM::Databases::HGNC.Parser.parse
train
def parse(fin) # Headline names = fin.gets.chomp.split("\t") index2identifier = {} index_hgncid = nil BioTCM::Databases::HGNC::IDENTIFIERS.each do |identifer, name| if identifer == :hgncid index_hgncid = names.index(name) elsif name.is_a?(String) index2identifier[names.index(name)] = identifer if names.index(name) else name.each_with_index do |n, i| next unless names.index(n) index2identifier[names.index(n)] = (i == 0 ? identifer : identifer.to_s) end end end # Dynamically bulid a line processor process_one_line = index2identifier.collect do |index, identifer| # Symbol will be mapped to single item if identifer.is_a?(Symbol) %( unless column[#{index}] == nil || column[#{index}] == "" || column[#{index}] == "-" @#{identifer}2hgncid[column[#{index}]] = column[#{index_hgncid}] @hgncid2#{identifer}[column[#{index_hgncid}]] = column[#{index}] end ) # Others will be mapped to list item else %{ unless column[#{index}] == nil column[#{index}].split(", ").each do |id| } + if identifer == 'symbol' %( if @ambiguous_symbol[id] @ambiguous_symbol[id] << @hgncid2symbol[column[#{index_hgncid}]] elsif @symbol2hgncid[id].nil? @symbol2hgncid[id] = column[#{index_hgncid}] else @ambiguous_symbol[id] = [@hgncid2symbol[column[#{index_hgncid}]]] unless @hgncid2symbol[@symbol2hgncid[id]] == id @ambiguous_symbol[id] << @hgncid2symbol[@symbol2hgncid[id]] @symbol2hgncid.delete(id) end end ) else %( @#{identifer}2hgncid[id] = column[#{index_hgncid}] if @#{identifer}2hgncid[id].nil? ) end + %( end end ) end end.join # Process the content eval %{fin.each do |line|\n column = line.chomp.split("\\t", -1)} + process_one_line + 'end' # rubocop:disable Lint/Eval end
ruby
{ "resource": "" }
q23678
RightScraper::Retrievers.Git.retrieve
train
def retrieve raise RetrieverError.new("git retriever is unavailable") unless available? private_key = @repository.first_credential private_key = nil if private_key && private_key.empty? if is_windows? if private_key with_private_key_windows(private_key) { super } else super end else # always start the ssh agent in Linux so we can disable strict host name # checking, regardless of credentials. ::RightScraper::Processes::SSHAgent.with do |agent| agent.add_key(private_key) if private_key super end end true end
ruby
{ "resource": "" }
q23679
RightScraper::Retrievers.Git.do_clean_all
train
def do_clean_all(git_repo) old_initial_directory = git_repo.repo_dir clean_all_options = { :directories => true, :gitignored => true, :submodules => true } relative_paths = [ '.', git_repo.submodule_paths(:recursive => true) ].flatten relative_paths.each do |relative_path| subdir_path = ::File.expand_path(::File.join(@repo_dir, relative_path)) if ::File.directory?(subdir_path) # reuse shell with any watch parameters already set but vary the # initial directory for each submodule. git_repo.shell.initial_directory = subdir_path git_repo.clean_all(clean_all_options) end end true rescue ::RightGit::RightGitError => e @logger.note_warning(e.message) false ensure git_repo.shell.initial_directory = old_initial_directory end
ruby
{ "resource": "" }
q23680
RightScraper::Retrievers.Git.validate_revision
train
def validate_revision(git_repo, revision) branches = git_repo.branches(:all => true) local_branches = branches.local remote_branches = branches.remote by_name = lambda { |item| item.name == revision } # determine if revision is a tag. remote_name = nil if git_repo.tags.any?(&by_name) if remote_branches.any?(&by_name) # note that git has some resolution scheme for ambiguous SHA, tag, # branch names but we do not support ambiguity. raise RetrieverError, "Ambiguous name is both a remote branch and a tag: #{revision.inspect}" elsif local_branches.any?(&by_name) # odd corner case of a name that once was a remote branch (now # deleted) that has become a tag instead. the user is not exactly # at fault here (aside from being indecisive) so let's attempt to # clean up after him. try switching to another local branch # (i.e. master) and then deleting the obsolete local branch. error_message = "Ambiguous name is both a local branch and a tag: #{revision.inspect}" if revision == DEFAULT_BRANCH_NAME # Darwin Awards winner; scraping with a tag named 'master' :@ raise RetrieverError, error_message else begin # checkout master and delete obsolete local branch. git_repo.checkout_to(DEFAULT_BRANCH_NAME, :force => true) git_repo.spit_output("branch -D #{revision}") rescue ::RightGit::RightGitError # ignore failed attempt to recover; raise original error. raise RetrieverError, error_message end end end else # not a tag; SHA or branch. # # note that we could try to trivially determine if revision was a # SHA by matching the SHA1 pattern except that: # 1) git accepts partial SHAs so long as they uniquely distinguish # a commit for checkout. # 2) a branch or tag could name could match the SHA pattern (i.e. # 40 hexadecimal characters) with no warnings from git. git will # even allow a user to use a SHA as a tag name when that SHA # exists (and may represent a different commit). # confusing tags with SHAs should be universally discouraged but we # need to be flexible here. # # a local branch may no longer exist remotely or may be behind or # have diverged from remote branch. handle all cases. remotes = remote_branches.select(&by_name) if remotes.size > 1 # multiple remote branches exist (from different origins); branch # name is ambiguous. raise RetrieverError, "Ambiguous remote branch name: #{revision.inspect}" elsif remotes.size == 1 # a remote branch exists. remote_name = remotes.first.fullname elsif local_branches.any?(&by_name) # local branch only; failure due to missing remote branch. # # note that obsolete local branches are not supported by retrieval # only because it would give the user a false positive. raise RetrieverError, "Missing remote branch: #{revision.inspect}." end # else a full or partial SHA or unknown revision end remote_name end
ruby
{ "resource": "" }
q23681
RightScraper::Retrievers.Git.without_size_limit
train
def without_size_limit(git_repo) old_max_bytes = git_repo.shell.max_bytes begin git_repo.shell.max_bytes = nil yield ensure git_repo.shell.max_bytes = old_max_bytes end end
ruby
{ "resource": "" }
q23682
RightScraper::Retrievers.Git.without_host_key_checking_linux
train
def without_host_key_checking_linux tmpdir = ::Dir.mktmpdir ssh_cmd = ::File.join(tmpdir, 'ssh') ::File.open(ssh_cmd, 'w') do |cmd| cmd.puts "#!/bin/bash" cmd.puts "exec ssh -o StrictHostKeyChecking=no ${@}" end ::FileUtils.chmod(0700, ssh_cmd) old_env = ::ENV['GIT_SSH'] ::ENV['GIT_SSH'] = ssh_cmd yield ensure ::FileUtils.rm_rf(tmpdir) ::ENV['GIT_SSH'] = old_env end
ruby
{ "resource": "" }
q23683
RightScraper::Retrievers.Git.with_replaced_file
train
def with_replaced_file(filepath, contents) ::Dir.mktmpdir do |temp_dir| begin temp_path = ::File.join(temp_dir, ::File.basename(filepath)) ::FileUtils.mkdir_p(::File.dirname(filepath)) if ::File.file?(filepath) ::FileUtils.mv(filepath, temp_path, :force => true) end ::File.open(filepath, 'w') { |f| f.write(contents) } yield ensure begin if ::File.file?(temp_path) ::FileUtils.mv(temp_path, filepath, :force => true) elsif ::File.file?(filepath) ::File.unlink(filepath) end rescue ::Exception => e @logger.note_warning("Failed to restore #{filepath.inspect}: #{e.message}") end end end end
ruby
{ "resource": "" }
q23684
RightGit::Git.Repository.fetch_all
train
def fetch_all(options = {}) options = { :prune => false }.merge(options) git_args = ['--all'] git_args << '--prune' if options[:prune] fetch(git_args) fetch('--tags') # need a separate call for tags or else you don't get all the tags true end
ruby
{ "resource": "" }
q23685
RightGit::Git.Repository.log
train
def log(revision, options = {}) options = { :skip => nil, :tail => 10_000, :merges => false, :no_merges => false, :full_hashes => false, }.merge(options) skip = options[:skip] git_args = [ 'log', "-n#{options[:tail]}", "--format=\"#{options[:full_hashes] ? Commit::LOG_FORMAT_LONG : Commit::LOG_FORMAT}\"" # double-quotes are Windows friendly ] git_args << "--skip #{skip}" if skip git_args << "--merges" if options[:merges] git_args << "--no-merges" if options[:no_merges] git_args << revision if revision git_output(git_args).lines.map { |line| Commit.new(self, line.strip) } end
ruby
{ "resource": "" }
q23686
RightGit::Git.Repository.clean_all
train
def clean_all(options = {}) options = { :directories => false, :gitignored => false, :submodules => false, }.merge(options) git_args = ['-f'] # force is required or else -n only lists files. git_args << '-f' if options[:submodules] # double-tap -f to kill untracked submodules git_args << '-d' if options[:directories] git_args << '-x' if options[:gitignored] clean(git_args) true end
ruby
{ "resource": "" }
q23687
RightGit::Git.Repository.submodule_paths
train
def submodule_paths(options = {}) options = { :recursive => false }.merge(options) git_args = ['submodule', 'status'] git_args << '--recursive' if options[:recursive] git_output(git_args).lines.map do |line| data = line.chomp if matched = SUBMODULE_STATUS_REGEX.match(data) matched[3] else raise GitError, "Unexpected output from submodule status: #{data.inspect}" end end end
ruby
{ "resource": "" }
q23688
RightGit::Git.Repository.update_submodules
train
def update_submodules(options = {}) options = { :recursive => false }.merge(options) git_args = ['submodule', 'update', '--init'] git_args << '--recursive' if options[:recursive] spit_output(git_args) true end
ruby
{ "resource": "" }
q23689
RightGit::Git.Repository.sha_for
train
def sha_for(revision) # note that 'git show-ref' produces easier-to-parse output but it matches # both local and remote branch to a simple branch name whereas 'git show' # matches at-most-one and requires origin/ for remote branches. git_args = ['show', revision].compact result = nil git_output(git_args).lines.each do |line| if matched = COMMIT_SHA1_REGEX.match(line.strip) result = matched[1] break end end unless result raise GitError, 'Unable to locate commit in show output.' end result end
ruby
{ "resource": "" }
q23690
RightGit::Git.Repository.vet_output
train
def vet_output(*args) last_output = git_output(*args).strip logger.info(last_output) unless last_output.empty? if last_output.downcase =~ /^(error|fatal):/ raise GitError, "Git exited zero but an error was detected in output." end true end
ruby
{ "resource": "" }
q23691
RightScraper::Scanners.CookbookMetadata.end
train
def end(resource) @logger.operation(:metadata_parsing) do if @read_blk metadata = ::JSON.parse(@read_blk.call) resource.metadata = metadata # check for undefined cookbook name. # # note that many specs in right_scraper use badly formed metadata # that is not even a hash so, to avoid having to fix all of them # (and also in case the user's metadata.json is not a hash) check # for the has_key? method. # # if real metadata is not a hash then that should cause failure # at a higher level. if the cookbook name is actually defined as # being 'undefined' then the user gets a warning anyway. if (metadata.respond_to?(:has_key?) && metadata['name'] == UNDEFINED_COOKBOOK_NAME) message = 'Cookbook name appears to be undefined and has been' + ' supplied automatically.' @logger.note_warning(message) end else # should not be scanning at all unless one of the metadata files was # detected before starting scan. fail 'Unexpected missing metadata' end end true ensure @read_blk = nil @cookbook = nil end
ruby
{ "resource": "" }
q23692
RightScraper::Scanners.CookbookMetadata.finish
train
def finish begin ::FileUtils.remove_entry_secure(tls[:tmpdir]) if tls[:tmpdir] rescue ::Exception => e @logger.note_warning(e.message) end ensure # Cleanup thread-local storage tls.clear end
ruby
{ "resource": "" }
q23693
RightScraper::Scanners.CookbookMetadata.generate_metadata_json
train
def generate_metadata_json @logger.operation(:metadata_generation) do # note we will use the same tmpdir path inside and outside the # container only because it is non-trivial to invoke mktmpdir inside # the container. tmpdir, created = create_tmpdir # path constants src_knife_script_path = ::File.expand_path( ::File.join(__FILE__, '../../../../scripts', KNIFE_METADATA_SCRIPT_NAME)) dst_knife_script_dir = tmpdir dst_knife_script_path = ::File.join(dst_knife_script_dir, KNIFE_METADATA_SCRIPT_NAME) jailed_repo_dir = ::File.join(tmpdir, UNDEFINED_COOKBOOK_NAME) jailed_cookbook_dir = (@cookbook.pos == '.' && jailed_repo_dir) || ::File.join(jailed_repo_dir, @cookbook.pos) jailed_metadata_json_path = ::File.join(jailed_cookbook_dir, JSON_METADATA) freed_metadata_dir = (@cookbook.pos == '.' && freed_dir) || ::File.join(freed_dir, @cookbook.pos) freed_metadata_json_path = ::File.join(freed_metadata_dir, JSON_METADATA) # in the multi-pass case we will run this scanner only on the first pass # so the 'metadata.json' file should not exist. the read-only scanner, # which is safe outside of containment, should be used subsequently. # the entire 'freed' directory should have been removed upon the next # successful retrieval so that this scanner will succeed. if ::File.file?(freed_metadata_json_path) raise MetadataError, "Refused to overwrite already-generated metadata file: #{freed_metadata_json_path}" end # jail the repo using the legacy semantics for copying files in and out # of jail. copy_out = { jailed_metadata_json_path => freed_metadata_json_path } # copy files into the jail once per repository (i.e. not once per # cookbook within the repository). if created copy_in = generate_copy_in(@cookbook.repo_dir, jailed_repo_dir) copy_in[src_knife_script_path] = dst_knife_script_path # note that at this point we previously used Warden as a container # for the copied-in files but now we assume that the current process # is already in a container (i.e. Docker) and so this copying is # more about creating a writable directory for knife than about # containment. the checked-out repo should be read-only to this # contained process due to running with limited privileges. do_copy_in(copy_in) end # HACK: support ad-hoc testing in dev-mode by using the current version # for rbenv shell. if ::ENV['RBENV_VERSION'].to_s.empty? ruby = 'ruby' else ruby = `which ruby`.chomp end # execute knife as a child process. any constraints are assumed to be # imposed on the current process by a container (timeout, memory, etc.) shell = ::RightGit::Shell::Default output = StringIO.new begin shell.execute( "#{ruby} #{dst_knife_script_path.inspect} #{jailed_cookbook_dir.inspect} 2>&1", directory: dst_knife_script_dir, outstream: output, raise_on_failure: true, set_env_vars: { LC_ALL: 'en_US.UTF-8' }, # character encoding for emitted JSON clear_env_vars: %w{BUNDLE_BIN_PATH BUNDLE_GEMFILE}, timeout: KNIFE_METADATA_TIMEOUT) output = output.string rescue ::RightGit::Shell::ShellError => e output = output.string raise MetadataError, "Failed to run chef knife: #{e.message}\n#{output[0, 1024]}" end # free files from jail. do_copy_out(copy_out) # load and return freed metadata. return ::File.read(freed_metadata_json_path) end end
ruby
{ "resource": "" }
q23694
RightScraper::Scanners.CookbookMetadata.do_copy_in
train
def do_copy_in(path_map) path_map.each do |src_path, dst_path| if src_path != dst_path ::FileUtils.mkdir_p(::File.dirname(dst_path)) ::FileUtils.cp(src_path, dst_path) end end true end
ruby
{ "resource": "" }
q23695
RightScraper::Scanners.CookbookMetadata.do_copy_out
train
def do_copy_out(path_map) path_map.each do |src_path, dst_path| # constraining the generated 'metadata.json' size is debatable, but # our UI attempts to load metadata JSON into memory far too often to # be blasé about generating multi-megabyte JSON files. unless ::File.file?(src_path) raise MetadataError, "Expected generated file was not found: #{src_path}" end src_size = ::File.stat(src_path).size if src_size <= FREED_FILE_SIZE_CONSTRAINT ::FileUtils.mkdir_p(::File.dirname(dst_path)) ::FileUtils.cp(src_path, dst_path) else raise MetadataError, "Generated file size of" + " #{src_size / 1024} KB" + " exceeded the allowed limit of" + " #{FREED_FILE_SIZE_CONSTRAINT / 1024} KB" end end true end
ruby
{ "resource": "" }
q23696
RightScraper::Scanners.CookbookMetadata.generate_copy_in
train
def generate_copy_in(src_base_path, dst_base_path) src_base_path = ::File.expand_path(src_base_path) dst_base_path = ::File.expand_path(dst_base_path) copy_in = [] recursive_generate_copy_in(copy_in, src_base_path) src_base_path += '/' src_base_path_len = src_base_path.length dst_base_path += '/' copy_in.inject({}) do |h, src_path| h[src_path] = ::File.join(dst_base_path, src_path[src_base_path_len..-1]) h end end
ruby
{ "resource": "" }
q23697
RightScraper::Scanners.CookbookMetadata.recursive_generate_copy_in
train
def recursive_generate_copy_in(copy_in, current_path) limited_files_of(current_path) { |file| copy_in << file } directories_of(current_path) do |dir| recursive_generate_copy_in(copy_in, ::File.join(dir)) end true end
ruby
{ "resource": "" }
q23698
RightScraper::Scanners.CookbookMetadata.limited_files_of
train
def limited_files_of(parent) ::Dir["#{parent}/*"].each do |item| if ::File.file?(item) if ::File.stat(item).size <= JAILED_FILE_SIZE_CONSTRAINT yield item else if ::File.basename(item) == RUBY_METADATA raise MetadataError, 'Metadata source file' + " #{relative_to_repo_dir(item).inspect}" + ' in repository exceeded size constraint of' + " #{JAILED_FILE_SIZE_CONSTRAINT / 1024} KB" else message = 'Ignored a repository file during metadata' + ' generation due to exceeding size constraint of' + " #{JAILED_FILE_SIZE_CONSTRAINT / 1024} KB:" + " #{relative_to_repo_dir(item).inspect}" @logger.info(message) end end end end end
ruby
{ "resource": "" }
q23699
RightScraper::Scanners.CookbookMetadata.directories_of
train
def directories_of(parent) ::Dir["#{parent}/*"].each do |item| case item when '.', '..' # do nothing else yield item if ::File.directory?(item) end end end
ruby
{ "resource": "" }