_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q0
CelluloidPubsub.Reactor.handle_parsed_websocket_message
train
def handle_parsed_websocket_message(json_data) data = json_data.is_a?(Hash) ? json_data.stringify_keys : {} if CelluloidPubsub::Reactor::AVAILABLE_ACTIONS.include?(data['client_action'].to_s) log_debug "#{self.class} finds actions for #{json_data}" delegate_action(data) if data['client_action'].present? else handle_unknown_action(data['channel'], json_data) end end
ruby
{ "resource": "" }
q1
CelluloidPubsub.Reactor.unsubscribe
train
def unsubscribe(channel, _json_data) log_debug "#{self.class} runs 'unsubscribe' method with #{channel}" return unless channel.present? forget_channel(channel) delete_server_subscribers(channel) end
ruby
{ "resource": "" }
q2
CelluloidPubsub.Reactor.delete_server_subscribers
train
def delete_server_subscribers(channel) @server.mutex.synchronize do (@server.subscribers[channel] || []).delete_if do |hash| hash[:reactor] == Actor.current end end end
ruby
{ "resource": "" }
q3
CelluloidPubsub.Reactor.unsubscribe_clients
train
def unsubscribe_clients(channel, _json_data) log_debug "#{self.class} runs 'unsubscribe_clients' method with #{channel}" return if channel.blank? unsubscribe_from_channel(channel) @server.subscribers[channel] = [] end
ruby
{ "resource": "" }
q4
CelluloidPubsub.Reactor.add_subscriber_to_channel
train
def add_subscriber_to_channel(channel, message) registry_channels = CelluloidPubsub::Registry.channels @channels << channel registry_channels << channel unless registry_channels.include?(channel) @server.mutex.synchronize do @server.subscribers[channel] = channel_subscribers(channel).push(reactor: Actor.current, message: message) end end
ruby
{ "resource": "" }
q5
CelluloidPubsub.Reactor.publish
train
def publish(current_topic, json_data) message = json_data['data'].to_json return if current_topic.blank? || message.blank? server_pusblish_event(current_topic, message) rescue => exception log_debug("could not publish message #{message} into topic #{current_topic} because of #{exception.inspect}") end
ruby
{ "resource": "" }
q6
CelluloidPubsub.Reactor.server_pusblish_event
train
def server_pusblish_event(current_topic, message) @server.mutex.synchronize do (@server.subscribers[current_topic].dup || []).pmap do |hash| hash[:reactor].websocket << message end end end
ruby
{ "resource": "" }
q7
CelluloidPubsub.Reactor.unsubscribe_all
train
def unsubscribe_all(_channel, json_data) log_debug "#{self.class} runs 'unsubscribe_all' method" CelluloidPubsub::Registry.channels.dup.pmap do |channel| unsubscribe_clients(channel, json_data) end log_debug 'clearing connections' shutdown end
ruby
{ "resource": "" }
q8
CelluloidPubsub.Reactor.server_kill_reactors
train
def server_kill_reactors(channel) @server.mutex.synchronize do (@server.subscribers[channel].dup || []).pmap do |hash| reactor = hash[:reactor] reactor.websocket.close Celluloid::Actor.kill(reactor) end end end
ruby
{ "resource": "" }
q9
EmailDirect.RelaySend::Email.send
train
def send(options) response = EmailDirect.post "/RelaySends/#{category_id}", :body => options.to_json Hashie::Mash.new(response) end
ruby
{ "resource": "" }
q10
Aequitas.ContextualRuleSet.concat
train
def concat(other) other.rule_sets.each do |context_name, rule_set| add_rules_to_context(context_name, rule_set) end self end
ruby
{ "resource": "" }
q11
Aequitas.ContextualRuleSet.define_context
train
def define_context(context_name) rule_sets.fetch(context_name) do |context_name| rule_sets[context_name] = RuleSet.new end self end
ruby
{ "resource": "" }
q12
Gattica.Engine.do_http_get
train
def do_http_get(query_string) response, data = @http.get(query_string, @headers) # error checking if response.code != '200' case response.code when '400' raise GatticaError::AnalyticsError, response.body + " (status code: #{response.code})" when '401' raise GatticaError::InvalidToken, "Your authorization token is invalid or has expired (status code: #{response.code})" else # some other unknown error raise GatticaError::UnknownAnalyticsError, response.body + " (status code: #{response.code})" end end return data end
ruby
{ "resource": "" }
q13
Gattica.Engine.build_query_string
train
def build_query_string(args,profile) query_params = args.clone ga_start_date = query_params.delete(:start_date) ga_end_date = query_params.delete(:end_date) ga_dimensions = query_params.delete(:dimensions) ga_metrics = query_params.delete(:metrics) ga_sort = query_params.delete(:sort) ga_filters = query_params.delete(:filters) output = "ids=ga:#{profile}&start-date=#{ga_start_date}&end-date=#{ga_end_date}" unless ga_dimensions.nil? || ga_dimensions.empty? output += '&dimensions=' + ga_dimensions.collect do |dimension| "ga:#{dimension}" end.join(',') end unless ga_metrics.nil? || ga_metrics.empty? output += '&metrics=' + ga_metrics.collect do |metric| "ga:#{metric}" end.join(',') end unless ga_sort.nil? || ga_sort.empty? output += '&sort=' + Array(ga_sort).collect do |sort| sort[0..0] == '-' ? "-ga:#{sort[1..-1]}" : "ga:#{sort}" # if the first character is a dash, move it before the ga: end.join(',') end # TODO: update so that in regular expression filters (=~ and !~), any initial special characters in the regular expression aren't also picked up as part of the operator (doesn't cause a problem, but just feels dirty) unless args[:filters].empty? # filters are a little more complicated because they can have all kinds of modifiers output += '&filters=' + args[:filters].collect do |filter| match, name, operator, expression = *filter.match(/^(\w*)\s*([=!<>~@]*)\s*(.*)$/) # splat the resulting Match object to pull out the parts automatically unless name.empty? || operator.empty? || expression.empty? # make sure they all contain something "ga:#{name}#{CGI::escape(operator.gsub(/ /,''))}#{CGI::escape(expression)}" # remove any whitespace from the operator before output else raise GatticaError::InvalidFilter, "The filter '#{filter}' is invalid. Filters should look like 'browser == Firefox' or 'browser==Firefox'" end end.join(';') end query_params.inject(output) {|m,(key,value)| m << "&#{key}=#{value}"} return output end
ruby
{ "resource": "" }
q14
AllscriptsApi.Client.get_token
train
def get_token full_path = build_request_path("/GetToken") response = conn.post do |req| req.url(full_path) req.body = { Username: @username, Password: @password }.to_json end raise(GetTokenError, response.body) unless response.status == 200 @token = response.body end
ruby
{ "resource": "" }
q15
AllscriptsApi.Client.get_user_authentication
train
def get_user_authentication(username, password) @allscripts_username = username params = MagicParams.format(user_id: username, parameter1: password) response = magic("GetUserAuthentication", magic_params: params) response["getuserauthenticationinfo"][0] end
ruby
{ "resource": "" }
q16
AllscriptsApi.Client.validate_sso_token
train
def validate_sso_token(sso_token = nil) sso_token ||= @sso_token params = MagicParams.format(parameter1: sso_token) response = magic("GetTokenValidation", magic_params: params) response["Table"][0] end
ruby
{ "resource": "" }
q17
AllscriptsApi.Client.magic
train
def magic(action, magic_params: MagicParams.format) full_path = build_request_path("/MagicJson") body = build_magic_body(action, magic_params) response = conn.post do |req| req.url(full_path) req.body = body end read_magic_response(action, response) end
ruby
{ "resource": "" }
q18
SknUtils.NestedResult.initialize_for_speed
train
def initialize_for_speed(hash) hash.each_pair do |k,v| key = key_as_sym(k) case v when Array value = v.map { |element| translate_value(element) } container.store(key, value) when Hash container.store(key, NestedResult.new(v)) else container.store(key, v) end end end
ruby
{ "resource": "" }
q19
AirVideo.Client.set_proxy
train
def set_proxy(proxy_server_and_port = "") begin @proxy = URI.parse("http://"+((proxy_server_and_port.empty?) ? ENV['HTTP_PROXY'] : string_proxy)) @http = Net::HTTP::Proxy(@proxy.host, @proxy.port) rescue @proxy = nil @http = Net::HTTP end end
ruby
{ "resource": "" }
q20
Cloudster.Output.output_template
train
def output_template(outputs) resource_name = outputs.keys[0] outputs_array = outputs.values[0].collect each_output_join = outputs_array.collect {|output| {"Fn::Join" => ["|", output]}} return resource_name => { 'Value' => { "Fn::Join" => [ ",", each_output_join] } } end
ruby
{ "resource": "" }
q21
Folio.Ordinal.configure_pagination
train
def configure_pagination(page, options) page = super(page, options) raise ::Folio::InvalidPage unless page.current_page.is_a?(Integer) raise ::Folio::InvalidPage if page.out_of_bounds? page rescue ::WillPaginate::InvalidPage raise ::Folio::InvalidPage end
ruby
{ "resource": "" }
q22
Cloudster.ElasticIp.add_to
train
def add_to(ec2) ec2_template = ec2.template @instance_name = ec2.name elastic_ip_template = template ec2.template.inner_merge(elastic_ip_template) end
ruby
{ "resource": "" }
q23
RubyOnAcid.InputFactory.put
train
def put(key, value) value = value.to_f @input_values[key] = value @smallest_seen_values[key] ||= 0.0 if @largest_seen_values[key] == nil or @smallest_seen_values[key] > @largest_seen_values[key] @largest_seen_values[key] = @smallest_seen_values[key] + 1.0 end @smallest_seen_values[key] = value if value < @smallest_seen_values[key] @largest_seen_values[key] = value if value > @largest_seen_values[key] end
ruby
{ "resource": "" }
q24
RubyOnAcid.InputFactory.assigned_key
train
def assigned_key(key) return @key_assignments[key] if @key_assignments[key] available_keys = @input_values.keys - @key_assignments.values return nil if available_keys.empty? if available_keys.include?(key) @key_assignments[key] = key else @key_assignments[key] = available_keys[rand(available_keys.length)] end @key_assignments[key] end
ruby
{ "resource": "" }
q25
VSS.Engine.search
train
def search(query) # get ranks query_vector = make_query_vector(query) ranks = @documents.map do |document| document_vector = make_vector(document) cosine_rank(query_vector, document_vector) end # now annotate records and return them @records.each_with_index do |record, i| # TODO: do this in a sensible way... record.instance_eval %{def rank; #{ranks[i]}; end} end # exclude 0 rank (no match) and sort by rank @records.reject { |r| r.rank == 0 }.sort { |a,b| b.rank <=> a.rank } end
ruby
{ "resource": "" }
q26
Evnt.Event._init_event_data
train
def _init_event_data(params) # set state @state = { reloaded: !params[:evnt].nil?, saved: true } # set options initial_options = { exceptions: false, silent: false } default_options = _safe_default_options || {} params_options = params[:_options] || {} @options = initial_options.merge(default_options) .merge(params_options) # set name and attributes @name = _safe_name @attributes = _safe_attributes # set payload payload = params.reject { |k, _v| k[0] == '_' } @payload = @state[:reloaded] ? payload : _generate_payload(payload) # set extras @extras = {} extras = params.select { |k, _v| k[0] == '_' } extras.each { |k, v| @extras[k[1..-1].to_sym] = v } end
ruby
{ "resource": "" }
q27
OptionalLogger.Logger.add
train
def add(severity, message = nil, progname_or_message = nil, &block) @logger.add(severity, message, progname_or_message, &block) if @logger end
ruby
{ "resource": "" }
q28
Wbem.CimxmlClient._identify
train
def _identify begin product = nil { "sfcb" => [ "root/interop", "CIM_ObjectManager" ], "pegasus" => [ "root/PG_Internal", "PG_ConfigSetting" ] }.each do |cimom, op| obj = objectpath *op @client.instances(obj).each do |inst| product = inst.Description || cimom break end break if product end rescue Sfcc::Cim::ErrorInvalidClass, Sfcc::Cim::ErrorInvalidNamespace raise "Unknown CIMOM" end product end
ruby
{ "resource": "" }
q29
Wbem.CimxmlClient.each_instance
train
def each_instance( namespace_or_objectpath, classname = nil ) op = if namespace_or_objectpath.is_a? Sfcc::Cim::ObjectPath namespace_or_objectpath else objectpath namespace_or_objectpath, classname end begin @client.instances(op).each do |inst| yield inst end rescue Sfcc::Cim::ErrorInvalidClass, Sfcc::Cim::ErrorInvalidNamespace end end
ruby
{ "resource": "" }
q30
Wbem.CimxmlClient.class_names
train
def class_names op, deep_inheritance = false ret = [] unless op.is_a? Sfcc::Cim::ObjectPath op = Sfcc::Cim::ObjectPath.new(op.to_s, nil) # assume namespace end flags = deep_inheritance ? Sfcc::Flags::DeepInheritance : 0 begin @client.class_names(op, flags).each do |name| ret << name.to_s end rescue Sfcc::Cim::ErrorInvalidNamespace end ret end
ruby
{ "resource": "" }
q31
Wbem.CimxmlClient.each_association
train
def each_association( objectpath ) begin @client.associators(objectpath).each do |assoc| yield assoc end rescue Sfcc::Cim::ErrorInvalidClass, Sfcc::Cim::ErrorInvalidNamespace end end
ruby
{ "resource": "" }
q32
Weibo2.Client.get_token_from_hash
train
def get_token_from_hash(hash) access_token = hash.delete('access_token') || hash.delete(:access_token) || hash.delete('oauth_token') || hash.delete(:oauth_token) opts = {:expires_at => hash["expires"] || hash[:expires], :header_format => "OAuth2 %s", :param_name => "access_token"} @token = OAuth2::AccessToken.new(self, access_token, opts) end
ruby
{ "resource": "" }
q33
Wbem.WsmanClient.epr_uri_for
train
def epr_uri_for(namespace,classname) case @product when :winrm # winrm embeds namespace in resource URI Openwsman::epr_uri_for(namespace,classname) rescue "http://schema.suse.com/wbem/wscim/1/cim-schema/2/#{namespace}/#{classname}" else (Openwsman::epr_prefix_for(classname)+"/#{classname}") rescue "http://schema.suse.com/wbem/wscim/1/cim-schema/2/#{classname}" end end
ruby
{ "resource": "" }
q34
Wbem.WsmanClient._handle_fault
train
def _handle_fault client, result if result.nil? STDERR.puts "Client connection failed:\n\tResult code #{client.response_code}, Fault: #{client.fault_string}" if Wbem.debug return true end if result.fault? fault = Openwsman::Fault.new result if Wbem.debug STDERR.puts "Client protocol failed for (#{client})" STDERR.puts "\tFault code #{fault.code}, subcode #{fault.subcode}" STDERR.puts "\t\treason #{fault.reason}" STDERR.puts "\t\tdetail #{fault.detail}" end return true end false end
ruby
{ "resource": "" }
q35
Wbem.WsmanClient.namespaces
train
def namespaces ns = "root", cn = "__Namespace" result = [] each_instance( ns, cn ) do |inst| name = "#{ns}/#{inst.Name}" result << name result.concat namespaces name, cn end result.uniq end
ruby
{ "resource": "" }
q36
Wbem.WsmanClient.class_names
train
def class_names op, deep_inheritance = false @options.flags = Openwsman::FLAG_ENUMERATION_OPTIMIZATION @options.max_elements = 999 namespace = (op.is_a? Sfcc::Cim::ObjectPath) ? op.namespace : op classname = (op.is_a? Sfcc::Cim::ObjectPath) ? op.classname : nil case @product when :openwsman if @product_version < "2.2" STDERR.puts "ENUMERATE_CLASS_NAMES unsupported for #{@product_vendor} #{@product_version}, please upgrade" return [] end method = Openwsman::CIM_ACTION_ENUMERATE_CLASS_NAMES uri = Openwsman::XML_NS_CIM_INTRINSIC @options.cim_namespace = namespace @options.add_selector("DeepInheritance", "True") if deep_inheritance result = @client.invoke( @options, uri, method ) when :winrm # see https://github.com/kkaempf/openwsman/blob/master/bindings/ruby/tests/winenum.rb filter = Openwsman::Filter.new query = "select * from meta_class" query << " where __SuperClass is #{classname?classname:'null'}" unless deep_inheritance filter.wql query uri = "#{@prefix}#{namespace}/*" result = @client.enumerate( @options, filter, uri ) else raise "Unsupported for WSMAN product #{@product}" end if _handle_fault @client, result return [] end classes = [] case @product when :openwsman # extract invoke result output = result.body[method] output.each do |c| classes << c.to_s end when :winrm # extract enumerate/pull result loop do output = result.Items output.each do |node| classes << node.name.to_s end if output context = result.context break unless context # get the next chunk result = @client.pull( @options, nil, uri, context) break if _handle_fault @client, result end end return classes end
ruby
{ "resource": "" }
q37
HerokuS3Backups.Heroku.download
train
def download(output_filename, options = {target_backup: nil}) raise "Please specify a filename" if output_filename.length.eql?(0) HerokuCLI.cmd("pg:backups:download #{target_backup} --output #{output_filename}", @app_name) end
ruby
{ "resource": "" }
q38
HerokuS3Backups.Heroku.store_on_s3
train
def store_on_s3(backup_location, backup_filename) prod_backup_folder = AWS_S3().buckets.find(ENV["S3_PRODUCTION_BACKUP_BUCKET"]).objects(prefix: backup_location) backup_obj = prod_backup_folder.build("#{backup_location}/#{backup_filename}") # Need to do this to set content length for some reason backup_obj.content = open(@backup_filename) backup_obj.save end
ruby
{ "resource": "" }
q39
Wbem.ClassFactory.gen_method_parameters
train
def gen_method_parameters direction, parameters, file return if parameters.empty? file.print "#{direction.inspect} => [" first = true parameters.each do |p| if first first = false else file.print ", " end # [ <name>, <type>, <out>? ] file.print "#{p.name.inspect}, #{p.type.to_sym.inspect}" end file.print "]" return true end
ruby
{ "resource": "" }
q40
Wbem.ClassFactory.generate
train
def generate name, file require 'erb' template = File.read(File.join(File.dirname(__FILE__), "class_template.erb")) erb = ERB.new(template) code = erb.result(binding) Dir.mkdir(@basedir) unless File.directory?(@basedir) File.open(file, "w+") do |f| f.puts code end end
ruby
{ "resource": "" }
q41
Wbem.ClassFactory.classmap
train
def classmap return @classmap if @classmap # read SCHEMA and build class index to find .mof files quickly @classmap = Hash.new @includes = [ Pathname.new(".") ] SCHEMATA.each do |base, file| @includes << base allow_cim = (file =~ /^CIM_/) # allow CIM_ only for CIM_Schema.mof File.open(File.join(base, file)) do |f| f.each do |l| if l =~ /^\#pragma\sinclude\s?\(\"(([\w\/_]+)\.mof)\"\).*/ # $1 Foo/Bar.mof # $2 Foo/Bar path = $1 names = $2.split("/") name = names[1] || names[0] next unless name =~ /_/ # class name must have underscore (rules out 'qualifiers.mof') # puts "#{path}:#{name}" next if !allow_cim && name =~ /^CIM_/ # skip CIM_ mofs unless allowed if @classmap[name] raise "Dup #{name} : #{@classmap[name]}" else @classmap[name] = { :path => path } end end end end end STDERR.puts "Found MOFs for #{@classmap.size} classes" if Wbem.debug @classmap end
ruby
{ "resource": "" }
q42
Dawg.Finder.query
train
def query(word) node = @the_node results = [] word.split("").each do |letter| next_node = node[letter] if next_node != nil node = next_node next else return [''] end end results << Word.new(word, node.final) results += get_childs(node).map{|s| Word.new(word) + s} results.select{|r| r.final}.map{|r| r.to_s } end
ruby
{ "resource": "" }
q43
DeepDive.::Enumerable._add
train
def _add(v: nil, dupit: nil, oc: nil, patch: {}) unless _pairs? case when self.kind_of?(::Set) when self.kind_of?(::Array) self << _ob_maybe_repl(v: v, dupit: dupit, oc: oc, patch: patch) else raise DeepDiveException.new("Don't know how to add new elements for class #{self.class}") end else self[v.first] = _ob_maybe_repl(v: v.last, dupit: dupit, oc: oc, patch: patch) end end
ruby
{ "resource": "" }
q44
Chimp.ChimpQueue.start
train
def start self.sort_queues! for i in (1..max_threads) @threads << Thread.new(i) do worker = QueueWorker.new worker.delay = @delay worker.retry_count = @retry_count worker.run end end end
ruby
{ "resource": "" }
q45
Chimp.ChimpQueue.push
train
def push(g, w) raise "no group specified" unless g create_group(g) if not ChimpQueue[g] ChimpQueue[g].push(w) unless ChimpQueue[g].get_job(w.job_id) end
ruby
{ "resource": "" }
q46
Chimp.ChimpQueue.shift
train
def shift r = nil @semaphore.synchronize do @group.values.each do |group| if group.ready? r = group.shift Log.debug "Shifting job '#{r.job_id}' from group '#{group.group_id}'" unless r.nil? break end end end return(r) end
ruby
{ "resource": "" }
q47
Chimp.ChimpQueue.quit
train
def quit i = 0 @group.keys.each do |group| wait_until_done(group) do if i < 30 sleep 1 i += 1 print "." else break end end end @threads.each { |t| t.kill } puts " done." end
ruby
{ "resource": "" }
q48
Chimp.ChimpQueue.get_jobs_by_status
train
def get_jobs_by_status(status) r = [] @group.values.each do |group| v = group.get_jobs_by_status(status) if v != nil and v != [] r += v end end return r end
ruby
{ "resource": "" }
q49
YourMembership.Profile.clean
train
def clean(data_hash) clean_hash = {} # Remove Nils data_hash.each do |k, v| clean_hash[k] = v if v end clean_hash end
ruby
{ "resource": "" }
q50
Chimp.ExecutionGroup.push
train
def push(j) raise "invalid work" if j == nil j.job_id = IDManager.get if j.job_id == nil j.group = self @queue.push(j) @jobs_by_id[j.job_id] = j end
ruby
{ "resource": "" }
q51
Chimp.ExecutionGroup.shift
train
def shift updated_queue = [] found_job = nil @queue.each do |job| if found_job || job.status == Executor::STATUS_HOLDING updated_queue.push(job) elsif job.status == Executor::STATUS_NONE found_job = job end end @queue = updated_queue @time_start = Time.now if @time_start == nil return found_job end
ruby
{ "resource": "" }
q52
Chimp.ExecutionGroup.results
train
def results return self.get_jobs.map do |task| next if task == nil next if task.server == nil { :job_id => task.job_id, :name => task.info[0], :host => task.server.name, :status => task.status, :error => task.error, :total => self.get_total_execution_time(task.status, task.time_start, task.time_end), :start => task.time_start, :end => task.time_end, :worker => task } end end
ruby
{ "resource": "" }
q53
Chimp.ExecutionGroup.sort!
train
def sort! if @queue != nil @queue.sort! do |a,b| a.server.nickname <=> b.server.nickname end end end
ruby
{ "resource": "" }
q54
Chimp.ExecutionGroup.get_jobs_by_status
train
def get_jobs_by_status(status) r = [] @jobs_by_id.values.each do |i| r << i if i.status == status.to_sym || status.to_sym == :all end return r end
ruby
{ "resource": "" }
q55
Chimp.ExecutionGroup.done?
train
def done? return ( get_jobs_by_status(Executor::STATUS_NONE).size == 0 && get_jobs_by_status(Executor::STATUS_RUNNING).size == 0 && get_jobs_by_status(Executor::STATUS_DONE).size > 0 ) end
ruby
{ "resource": "" }
q56
Chimp.ExecutionGroup.running?
train
def running? total_jobs_running = get_jobs_by_status(Executor::STATUS_NONE).size + get_jobs_by_status(Executor::STATUS_RUNNING).size + get_jobs_by_status(Executor::STATUS_RETRYING).size (total_jobs_running > 0) end
ruby
{ "resource": "" }
q57
Chimp.ExecutionGroup.queue
train
def queue(id) Log.debug "Queuing held job id #{id}" job = @jobs_by_id[id] job.owner = nil job.time_start = Time.now job.time_end = nil job.status = Executor::STATUS_NONE self.push(job) end
ruby
{ "resource": "" }
q58
Chimp.ExecutionGroup.cancel
train
def cancel(id) Log.warn "Cancelling job id #{id}" job = @jobs_by_id[id] job.status = Executor::STATUS_ERROR job.owner = nil job.time_end = Time.now @queue.delete(job) end
ruby
{ "resource": "" }
q59
RayyanScrapers.EntrezScraper.parse_search_results
train
def parse_search_results(string, extraction_fields = nil) xml = Nokogiri::XML.parse(string, "file:///rawfile.xml") items = xml/"/#{@xml_element_root}/*" total = items.length @logger.debug("Found #{total} articles in input pubmed file") items.each do |item| begin mArticle = RayyanFormats::Target.new failed = false case item.node_name when @xml_element_root_article process_article_detail_page(item, mArticle, extraction_fields) when @xml_element_root_book process_book_detail_page(item, mArticle, extraction_fields) else @logger.warn "Unknown XML format for search result of type #{item.node_name}" failed = true end unless failed pmid = ScraperBase.node_text item, './/PMID' mArticle.sid = pmid mArticle.url = "#{@detail_friendly_url}#{pmid}" yield mArticle, total if block_given? end # unless failed rescue => exception @logger.error "Error processing item in search result of type #{item.node_name} [#{exception}] " + "caused by #{exception.backtrace.first}" end # process item rescue end # items.each total end
ruby
{ "resource": "" }
q60
Xcflushd.PriorityAuthRenewer.async_renew_and_publish_task
train
def async_renew_and_publish_task(channel_msg) Concurrent::Future.new(executor: thread_pool) do success = true begin combination = auth_channel_msg_2_combination(channel_msg) app_auths = app_authorizations(combination) renew(combination[:service_id], combination[:credentials], app_auths) metric_auth = app_auths[combination[:metric]] rescue StandardError # If we do not do rescue, we would not be able to process the same # message again. success = false ensure mark_auth_task_as_finished(channel_msg) end # We only publish a message when there aren't any errors. When # success is false, we could have renewed some auths, so this could # be more fine grained and ping the subscribers that are not interested # in the auths that failed. Also, as we do not publish anything when # there is an error, the subscriber waits until it timeouts. # This is good enough for now, but there is room for improvement. publish_auth(combination, metric_auth) if success end end
ruby
{ "resource": "" }
q61
Gametel.Accessors.text
train
def text(name, locator) define_method("#{name}") do platform.get_text(locator) end define_method("#{name}=") do |value| platform.enter_text(value, locator) end define_method("clear_#{name}") do platform.clear_text(locator) end define_method("#{name}_view") do Gametel::Views::Text.new(platform, locator) end end
ruby
{ "resource": "" }
q62
Gametel.Accessors.button
train
def button(name, locator) define_method(name) do platform.press_button(locator) end define_method("#{name}_view") do Gametel::Views::Button.new(platform, locator) end end
ruby
{ "resource": "" }
q63
Gametel.Accessors.list_item
train
def list_item(name, locator) define_method(name) do platform.press_list_item(locator) end define_method("#{name}_view") do Gametel::Views::ListItem.new(platform, locator) end end
ruby
{ "resource": "" }
q64
Gametel.Accessors.checkbox
train
def checkbox(name, locator) define_method(name) do platform.click_checkbox(locator) end define_method("#{name}_checked?") do Gametel::Views::CheckBox.new(platform, locator).checked? end define_method("#{name}_view") do Gametel::Views::CheckBox.new(platform, locator) end end
ruby
{ "resource": "" }
q65
Gametel.Accessors.radio_button
train
def radio_button(name, locator) define_method(name) do platform.click_radio_button(locator) end define_method("#{name}_view") do Gametel::Views::RadioButton.new(platform, locator) end end
ruby
{ "resource": "" }
q66
Gametel.Accessors.image
train
def image(name, locator) define_method("click_#{name}") do platform.click_image(locator) end define_method("wait_for_#{name}") do wait_until do platform.has_drawable?(locator) end end define_method("#{name}_view") do Gametel::Views::Image.new(platform, locator) end end
ruby
{ "resource": "" }
q67
Knowledge.Learner.gather!
train
def gather! ::Knowledge::Initializer.new( adapters: enabled_adapters, params: additionnal_params, setter: setter, variables: variables ).run end
ruby
{ "resource": "" }
q68
Knowledge.Learner.enable_adapter
train
def enable_adapter(name:, variables: nil) _key, klass = available_adapters.find { |key, _klass| key.to_sym == name.to_sym } raise Knowledge::AdapterNotFound, "Cannot find \"#{name}\" in available adapters" if klass.nil? @enabled_adapters[name.to_sym] = klass set_adapter_variables(name: name, variables: variables) end
ruby
{ "resource": "" }
q69
Knowledge.Learner.register_adapter
train
def register_adapter(name:, klass:, enable: false, variables: nil) @available_adapters[name.to_sym] = klass enable_adapter(name: name) if enable set_adapter_variables(name: name, variables: variables) end
ruby
{ "resource": "" }
q70
Knowledge.Learner.set_adapter_variables
train
def set_adapter_variables(name:, variables: nil) return unless variables case variables when Hash set_adapter_variables_by_hash(name: name, variables: variables) when String set_adapter_variables(name: name, variables: yaml_content(variables)) else raise "Unknown variables type #{variables.class}" end rescue StandardError => e raise ::Knowledge::LearnError, e.message end
ruby
{ "resource": "" }
q71
Knowledge.Learner.set_adapter_variables_by_hash
train
def set_adapter_variables_by_hash(name:, variables:) variables = variables[name.to_s] if variables.key?(name.to_s) variables = variables[name.to_sym] if variables.key?(name.to_sym) @variables[name.to_sym] = variables end
ruby
{ "resource": "" }
q72
Knowledge.Learner.use
train
def use(name:, enable: true) adapter = self.class.adapters[name.to_sym] raise ::Knowledge::RegisterError, "Unable to register following: #{name}" if adapter.nil? register_adapter(name: name.to_sym, klass: adapter, enable: enable) end
ruby
{ "resource": "" }
q73
Knowledge.Learner.fetch_variables_config
train
def fetch_variables_config(path) descriptor = yaml_content(path) @variables = descriptor[::Knowledge.config.environment.to_s] || descriptor end
ruby
{ "resource": "" }
q74
Boson.RepoIndex.update
train
def update(options={}) libraries_to_update = !exists? ? repo.all_libraries : options[:libraries] || changed_libraries read_and_transfer(libraries_to_update) if options[:verbose] puts !exists? ? "Generating index for all #{libraries_to_update.size} libraries. Patience ... is a bitch" : (libraries_to_update.empty? ? "No libraries indexed" : "Indexing the following libraries: #{libraries_to_update.join(', ')}") end Manager.instance.failed_libraries = [] unless libraries_to_update.empty? Manager.load(libraries_to_update, options.merge(:index=>true)) unless Manager.instance.failed_libraries.empty? $stderr.puts("Error: These libraries failed to load while indexing: #{Manager.instance.failed_libraries.join(', ')}") end end write(Manager.instance.failed_libraries) end
ruby
{ "resource": "" }
q75
Boson.RepoIndex.set_command_namespaces
train
def set_command_namespaces lib_commands = @commands.inject({}) {|t,e| (t[e.lib] ||= []) << e; t } namespace_libs = @libraries.select {|e| e.namespace(e.indexed_namespace) } namespace_libs.each {|lib| (lib_commands[lib.name] || []).each {|e| e.namespace = lib.namespace } } end
ruby
{ "resource": "" }
q76
Boson.CommentInspector.scrape
train
def scrape(file_string, line, mod, attribute=nil) hash = scrape_file(file_string, line) || {} options = (arr = hash.delete(:option)) ? parse_option_comments(arr, mod) : {} hash.select {|k,v| v && (attribute.nil? || attribute == k) }.each do |k,v| hash[k] = EVAL_ATTRIBUTES.include?(k) ? eval_comment(v.join(' '), mod, k) : v.join(' ') end (hash[:options] ||= {}).merge!(options) if !options.empty? attribute ? hash[attribute] : hash end
ruby
{ "resource": "" }
q77
Boson.CommentInspector.scrape_file
train
def scrape_file(file_string, line) lines = file_string.split("\n") saved = [] i = line -2 while lines[i] =~ /^\s*#\s*(\S+)/ && i >= 0 saved << lines[i] i -= 1 end saved.empty? ? {} : splitter(saved.reverse) end
ruby
{ "resource": "" }
q78
SettingsOnRails.KeyTreeBuilder.build_nodes
train
def build_nodes value = _target_column for key in _key_chain value[key] = {} unless value[key] value = value[key] end end
ruby
{ "resource": "" }
q79
SettingsOnRails.KeyTreeBuilder._key_chain
train
def _key_chain handler = self key_chain = [] begin key_chain = handler.keys + key_chain handler = handler.parent end while handler key_chain end
ruby
{ "resource": "" }
q80
Copyscape.Response.result_to_hash
train
def result_to_hash(result) result.children.inject({}) do |hash, node| hash[node.name] = node.text hash[node.name] = node.text.to_i if node.text && node.text =~ /^\d+$/ hash end end
ruby
{ "resource": "" }
q81
MiniReadline.History.append_history
train
def append_history(str) return if @options[:no_blanks] && str.strip.empty? if history.include?(str) if @options[:no_dups] return if @options[:no_move] history.delete(str) end end history << str end
ruby
{ "resource": "" }
q82
Chimp.ChimpDaemon.parse_command_line
train
def parse_command_line begin opts = GetoptLong.new( [ '--logfile', '-l', GetoptLong::REQUIRED_ARGUMENT ], [ '--verbose', '-v', GetoptLong::NO_ARGUMENT ], [ '--quiet', '-q', GetoptLong::NO_ARGUMENT ], [ '--concurrency', '-c', GetoptLong::REQUIRED_ARGUMENT ], [ '--delay', '-d', GetoptLong::REQUIRED_ARGUMENT ], [ '--retry', '-y', GetoptLong::REQUIRED_ARGUMENT ], [ '--port', '-p', GetoptLong::REQUIRED_ARGUMENT ], [ '--bind-address', '-b', GetoptLong::REQUIRED_ARGUMENT ], [ '--help', '-h', GetoptLong::NO_ARGUMENT ], [ '--exit', '-x', GetoptLong::NO_ARGUMENT ] ) opts.each do |opt, arg| case opt when '--logfile', '-l' @logfile = arg Log.logger = Logger.new(@logfile) when '--concurrency', '-c' @concurrency = arg.to_i when '--delay', '-d' @delay = arg.to_i when '--retry', '-y' @retry_count = arg.to_i when '--verbose', '-v' @verbose = true when '--quiet', '-q' @quiet = true when '--port', '-p' @port = arg when '--bind-address', '-b' @bind_address = arg.to_s when '--help', '-h' help when '--exit', '-x' uri = "http://localhost:#{@port}/admin" response = RestClient.post uri, { 'shutdown' => true }.to_yaml exit 0 end end rescue GetoptLong::InvalidOption => ex puts "Syntax: chimpd [--logfile=<name>] [--concurrency=<c>] [--delay=<d>] [--retry=<r>] [--port=<p>] [--bind-address=<addr> ] [--verbose]" exit 1 end # # Set up logging/verbosity # Chimp.set_verbose(@verbose, @quiet) if not @verbose ENV['REST_CONNECTION_LOG'] = "/dev/null" ENV['RESTCLIENT_LOG'] = "/dev/null" Log.threshold= Logger::INFO else Log.threshold= Logger::DEBUG end if @quiet Log.threshold = Logger::WARN end end
ruby
{ "resource": "" }
q83
Chimp.ChimpDaemon.spawn_webserver
train
def spawn_webserver opts = { :BindAddress => @bind_address, :Port => @port, :MaxClients => 500, :RequestTimeout => 120, :DoNotReverseLookup => true } if not @verbose opts[:Logger] = WEBrick::Log.new("/dev/null") opts[:AccessLog] = [nil, nil] end @server = ::WEBrick::HTTPServer.new(opts) @server.mount('/', DisplayServlet) @server.mount('/display', DisplayServlet) @server.mount('/job', JobServlet) @server.mount('/group', GroupServlet) @server.mount('/admin', AdminServlet) # # WEBrick threads # @threads << Thread.new(1001) do @server.start end end
ruby
{ "resource": "" }
q84
Chimp.ChimpDaemon.spawn_chimpd_submission_processor
train
def spawn_chimpd_submission_processor n = @concurrency/4 n = 10 if n < 10 Log.debug "Logging into API..." # # There is a race condition logging in with rest_connection. # As a workaround, do a tag query first thing when chimpd starts. # begin c = Chimp.new c.interactive = false c.quiet = true #c.tags = ["bogus:tag=true"] c.run rescue StandardError end puts "chimpd #{VERSION} launched with #{@concurrency} workers" Log.debug "Spawning #{n} submission processing threads" (1..n).each do |n| @threads ||=[] @threads << Thread.new { while true begin queued_request = @chimp_queue.pop group = queued_request.group queued_request.interactive = false tasks = queued_request.process tasks.each do |task| ChimpQueue.instance.push(group, task) end rescue StandardError => ex puts ex.backtrace Log.error " submission processor: group=\"#{group}\" script=\"#{queued_request.script}\": #{ex}" end end } end end
ruby
{ "resource": "" }
q85
Xcflushd.FlusherErrorHandler.log
train
def log(exception) msg = error_msg(exception) case exception when *NON_TEMP_ERRORS logger.error(msg) when *TEMP_ERRORS logger.warn(msg) else logger.error(msg) end end
ruby
{ "resource": "" }
q86
SmsCarrier.Base.sms
train
def sms(options = {}) return @_message if @_sms_was_called && options.blank? m = @_message # Call all the procs (if any) default_values = {} self.class.default.each do |k,v| default_values[k] = v.is_a?(Proc) ? instance_eval(&v) : v end # Handle defaults options = options.reverse_merge(default_values) # Set configure delivery behavior wrap_delivery_behavior!(options.delete(:delivery_method), options.delete(:delivery_method_options)) # Assign all options except body, template_name, and template_path assignable = options.except(:body, :template_name, :template_path) assignable.each { |k, v| m[k] = v } # Render the templates and blocks m.body = response(options) @_sms_was_called = true m end
ruby
{ "resource": "" }
q87
ACTV.Client.assets
train
def assets(q, params={}) response = get("/v2/search.json", params.merge(query: q)) ACTV::SearchResults.from_response(response) end
ruby
{ "resource": "" }
q88
ACTV.Client.organizer
train
def organizer(id, params={}) response = get("/v3/organizers/#{id}.json", params) ACTV::Organizer.from_response response end
ruby
{ "resource": "" }
q89
ACTV.Client.find_asset_by_url
train
def find_asset_by_url(url) url_md5 = Digest::MD5.hexdigest(url) response = get("/v2/seourls/#{url_md5}?load_asset=true") ACTV::Asset.from_response(response) end
ruby
{ "resource": "" }
q90
ACTV.Client.articles
train
def articles(q, params={}) response = get("/v2/search.json", params.merge({query: q, category: 'articles'})) ACTV::ArticleSearchResults.from_response(response) end
ruby
{ "resource": "" }
q91
ACTV.Client.article
train
def article id, params={} request_string = "/v2/assets/#{id}" is_preview, params = params_include_preview? params request_string += '/preview' if is_preview response = get "#{request_string}.json", params article = ACTV::Article.new response[:body] article.is_article? ? article : nil end
ruby
{ "resource": "" }
q92
ACTV.Client.popular_interests
train
def popular_interests(params={}, options={}) response = get("/interest/_search", params, options) ACTV::PopularInterestSearchResults.from_response(response) end
ruby
{ "resource": "" }
q93
ACTV.Client.event_results
train
def event_results(assetId, assetTypeId, options={}) begin response = get("/api/v1/events/#{assetId}/#{assetTypeId}.json", {}, options) ACTV::EventResult.from_response(response) rescue nil end end
ruby
{ "resource": "" }
q94
ACTV.Client.multi_search
train
def multi_search(*options) results = [] query_index = 0 options_hash = options.inject({}) do |hash, options| hash.merge! "query_#{query_index}" => "[#{URI.encode_www_form options}]" query_index += 1 hash end if options_hash.present? response = get("/v2/multisearch", options_hash) response[:body].each_value do |sub_query| sub_query[:results].each do |asset| results << ACTV::Asset.from_response(body: asset) end end end results end
ruby
{ "resource": "" }
q95
ACTV.Client.request
train
def request(method, path, params, options) uri = options[:endpoint] || @endpoint uri = URI(uri) unless uri.respond_to?(:host) uri += path request_headers = {} params[:api_key] = @api_key unless @api_key.nil? if self.credentials? # When posting a file, don't sign any params signature_params = if [:post, :put].include?(method.to_sym) && params.values.any?{|value| value.is_a?(File) || (value.is_a?(Hash) && (value[:io].is_a?(IO) || value[:io].is_a?(StringIO)))} {} else params end authorization = SimpleOAuth::Header.new(method, uri, signature_params, credentials) request_headers[:authorization] = authorization.to_s.sub('OAuth', "Bearer") end connection.url_prefix = options[:endpoint] || @endpoint connection.run_request(method.to_sym, path, nil, request_headers) do |request| unless params.empty? case request.method when :post, :put request.body = params else request.params.update(params) end end yield request if block_given? end.env rescue Faraday::Error::ClientError raise ACTV::Error::ClientError end
ruby
{ "resource": "" }
q96
Phaserunner.Modbus.bulk_log_data
train
def bulk_log_data(registers = register_list) registers.map do |reg| read_scaled_range(reg.start, reg.count) end.flatten end
ruby
{ "resource": "" }
q97
Phaserunner.Modbus.bulk_log_header
train
def bulk_log_header(registers = register_list) registers.map do |reg| range_address_header(reg.start, reg.count) end.flatten end
ruby
{ "resource": "" }
q98
SettingsOnRails.HasSettings.key
train
def key(*keys) options = keys.extract_options! raise ArgumentError.new("has_settings: Option :defaults expected, but got #{options.keys.join(', ')}") unless options.blank? || (options.keys == [:defaults]) keys.each do |key_name| unless key_name.is_a?(Symbol) || key_name.is_a?(String) raise ArgumentError.new("has_settings: symbol or string expected, but got a #{key_name.class}") end end options[:defaults].each do |k, v| has_settings(*keys).attr(k, default: v) end end
ruby
{ "resource": "" }
q99
SettingsOnRails.HasSettings.attr
train
def attr(value, options = {}) unless value.is_a?(Symbol) || value.is_a?(String) raise ArgumentError.new("has_settings: symbol expected, but got a #{value.class}") end raise ArgumentError.new("has_settings: Option :default expected, but got #{options.keys.join(', ')}") unless options.blank? || (options.keys == [:default]) default_value = options[:default] raise 'Error' unless value.to_s =~ REGEX_ATTR _set_value(value.to_s, default_value) end
ruby
{ "resource": "" }

Dataset Card for "CodeSearchNet-ruby-queries-corpus"

More Information needed

Downloads last month
80
Edit dataset card