_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q200
Filegen.ErbGenerator.compile
train
def compile(source, destination) erb = ERB.new(source.read, nil, '-') begin destination.puts erb.result(data.instance_binding) rescue SyntaxError => e raise Exceptions::ErbTemplateHasSyntaxErrors, e.message end end
ruby
{ "resource": "" }
q201
ActsAsSolr.InstanceMethods.solr_save
train
def solr_save return true if indexing_disabled? if evaluate_condition(:if, self) debug "solr_save: #{self.class.name} : #{record_id(self)}" solr_add to_solr_doc solr_commit if configuration[:auto_commit] true else solr_destroy end end
ruby
{ "resource": "" }
q202
ActsAsSolr.InstanceMethods.to_solr_doc
train
def to_solr_doc debug "to_solr_doc: creating doc for class: #{self.class.name}, id: #{record_id(self)}" doc = Solr::Document.new doc.boost = validate_boost(configuration[:boost]) if configuration[:boost] doc << {:id => solr_id, solr_configuration[:type_field] => self.class.name, solr_configuration[:primary_key_field] => record_id(self).to_s} # iterate through the fields and add them to the document, configuration[:solr_fields].each do |field_name, options| next if [self.class.primary_key, "type"].include?(field_name.to_s) field_boost = options[:boost] || solr_configuration[:default_boost] field_type = get_solr_field_type(options[:type]) solr_name = options[:as] || field_name value = self.send("#{field_name}_for_solr") rescue nil next if value.nil? suffix = get_solr_field_type(field_type) value = Array(value).map{ |v| ERB::Util.html_escape(v) } # escape each value value = value.first if value.size == 1 field = Solr::Field.new(:name => "#{solr_name}_#{suffix}", :value => value) processed_boost = validate_boost(field_boost) field.boost = processed_boost doc << field end add_dynamic_attributes(doc) add_includes(doc) add_tags(doc) add_space(doc) debug doc.to_json doc end
ruby
{ "resource": "" }
q203
Typograf.Client.send_request
train
def send_request(text) params = { 'text' => text.encode("cp1251"), } params['xml'] = @xml if @xml request = Net::HTTP::Post.new(@url.path) request.set_form_data(params) begin response = Net::HTTP.new(@url.host, @url.port).start do |http| http.request(request) end rescue StandardError => exception raise NetworkError.new(exception.message, exception.backtrace) end if !response.is_a?(Net::HTTPOK) raise NetworkError, "#{response.code}: #{response.message}" end body = response.body.force_encoding("cp1251").encode("utf-8") # error = "\xCE\xF8\xE8\xE1\xEA\xE0: \xE2\xFB \xE7\xE0\xE1\xFB\xEB\xE8 \xEF\xE5\xF0\xE5\xE4\xE0\xF2\xFC \xF2\xE5\xEA\xF1\xF2" # error.force_encoding("ASCII-8BIT") if error.respond_to?(:force_encoding) if body == "Ошибка: вы забыли передать текст" raise NetworkError, "Ошибка: вы забыли передать текст" end if @options[:symbols] == 2 HTMLEntities.new.decode(body.chomp) else body.chomp end end
ruby
{ "resource": "" }
q204
ActsAsSolr.ActsMethods.acts_as_solr
train
def acts_as_solr(options={}, solr_options={}, &deferred_solr_configuration) $solr_indexed_models << self extend ClassMethods include InstanceMethods include CommonMethods include ParserMethods define_solr_configuration_methods acts_as_taggable_on :tags if options[:taggable] has_many :dynamic_attributes, :as => "dynamicable" if options[:dynamic_attributes] has_one :local, :as => "localizable" if options[:spatial] after_save :solr_save after_destroy :solr_destroy if deferred_solr_configuration self.deferred_solr_configuration = deferred_solr_configuration else process_acts_as_solr(options, solr_options) end end
ruby
{ "resource": "" }
q205
JsonApiClient.Mapper.build_linked_resources_map
train
def build_linked_resources_map(data) data["linked"].each_with_object({}) do |(type, resources), obj| obj[type] ||= {} resources.each do |linked_resource| obj[type][linked_resource["id"]] = linked_resource end end end
ruby
{ "resource": "" }
q206
JsonApiClient.Mapper.build_link_type_map
train
def build_link_type_map(data) data["links"].each_with_object({}) do |(key, value), obj| association = key.split(".").last obj[association] = value["type"].pluralize end end
ruby
{ "resource": "" }
q207
Dk.TreeRunner.build_and_run_task
train
def build_and_run_task(task_class, params = nil) task_run = TaskRun.new(task_class, params) @task_run_stack.last.runs << task_run @task_run_stack.push(task_run) task = super(task_class, params) @task_run_stack.pop task end
ruby
{ "resource": "" }
q208
TinyCI.Runner.run!
train
def run! begin ensure_path target_path setup_log log_info "Commit: #{@commit}" log_info "Cleaning..." clean log_info "Exporting..." ensure_path export_path export begin load_config rescue ConfigMissingError => e log_error e.message log_error 'Removing export...' clean return false end @builder ||= instantiate_builder @tester ||= instantiate_tester @hooker ||= instantiate_hooker log_info "Building..." run_hook! :before_build begin @builder.build rescue => e run_hook! :after_build_failure raise e if ENV['TINYCI_ENV'] == 'test' log_error e log_debug e.backtrace return false else run_hook! :after_build_success ensure run_hook! :after_build end log_info "Testing..." run_hook! :before_test begin @tester.test rescue => e run_hook! :after_test_failure raise e if ENV['TINYCI_ENV'] == 'test' log_error e log_debug e.backtrace return false else run_hook! :after_test_success ensure run_hook! :after_test end log_info "Finished #{@commit}" rescue => e raise e if ENV['TINYCI_ENV'] == 'test' log_error e log_debug e.backtrace return false end true end
ruby
{ "resource": "" }
q209
TinyCI.Runner.instantiate_builder
train
def instantiate_builder klass = TinyCI::Builders.const_get(@config[:builder][:class]) klass.new(@config[:builder][:config].merge(target: export_path), logger: @logger) end
ruby
{ "resource": "" }
q210
TinyCI.Runner.instantiate_hooker
train
def instantiate_hooker return nil unless @config[:hooker].is_a? Hash klass = TinyCI::Hookers.const_get(@config[:hooker][:class]) klass.new(@config[:hooker][:config].merge(target: export_path), logger: @logger) end
ruby
{ "resource": "" }
q211
Grooveshark.Playlist.load_songs
train
def load_songs @songs = [] playlist = @client.request('getPlaylistByID', playlistID: @id) @songs = playlist['songs'].map! do |s| Song.new(s) end if playlist.key?('songs') @songs end
ruby
{ "resource": "" }
q212
AudioMonster.Monster.create_wav_wrapped_mpeg
train
def create_wav_wrapped_mpeg(mpeg_path, result_path, options={}) options.to_options! start_at = get_datetime_for_option(options[:start_at]) end_at = get_datetime_for_option(options[:end_at]) wav_wrapped_mpeg = NuWav::WaveFile.from_mpeg(mpeg_path) cart = wav_wrapped_mpeg.chunks[:cart] cart.title = options[:title] || File.basename(mpeg_path) cart.artist = options[:artist] cart.cut_id = options[:cut_id] cart.producer_app_id = options[:producer_app_id] if options[:producer_app_id] cart.start_date = start_at.strftime(PRSS_DATE_FORMAT) cart.start_time = start_at.strftime(AES46_2002_TIME_FORMAT) cart.end_date = end_at.strftime(PRSS_DATE_FORMAT) cart.end_time = end_at.strftime(AES46_2002_TIME_FORMAT) # pass in the options used by NuWav - # :no_pad_byte - when true, will not add the pad byte to the data chunk nu_wav_options = options.slice(:no_pad_byte) wav_wrapped_mpeg.to_file(result_path, nu_wav_options) check_local_file(result_path) return true end
ruby
{ "resource": "" }
q213
AudioMonster.Monster.run_command
train
def run_command(command, options={}) timeout = options[:timeout] || 7200 # default to adding a nice 13 if nothing specified nice = if options.key?(:nice) (options[:nice] == 'n') ? '' : "nice -n #{options[:nice]} " else 'nice -n 19 ' end echo_return = (options.key?(:echo_return) && !options[:echo_return]) ? '' : '; echo $?' cmd = "#{nice}#{command}#{echo_return}" logger.info "run_command: #{cmd}" begin result = Timeout::timeout(timeout) { Open3::popen3(cmd) do |i,o,e| out_str = "" err_str = "" i.close # important! o.sync = true e.sync = true o.each{|line| out_str << line line.chomp! logger.debug "stdout: #{line}" } e.each { |line| err_str << line line.chomp! logger.debug "stderr: #{line}" } return out_str, err_str end } rescue Timeout::Error => toe logger.error "run_command:Timeout Error - running command, took longer than #{timeout} seconds to execute: '#{cmd}'" raise toe end end
ruby
{ "resource": "" }
q214
Emerson.Responder.key_for_primary
train
def key_for_primary @_key_for_primary ||= if options[:as] options[:as] else controller_name = controller.controller_name (resource.respond_to?(:each) ? controller_name : controller_name.singularize).intern end end
ruby
{ "resource": "" }
q215
Quora.Client.get
train
def get(field, filter = true) if field.nil? or !field.instance_of?(String) raise ArgumentError, "Field value must be a string" end resp = http.get("#{BASEPATH}?fields=#{field}", headers) data = resp.body[RESP_PREFIX.length..-1] data = JSON.parse(data) if filter && data.has_key?(field) data[field] else data end end
ruby
{ "resource": "" }
q216
Quora.Client.method_missing
train
def method_missing(method_id, *arguments, &block) if method_id.to_s =~ /^get_[\w]+/ self.class.send :define_method, method_id do field = method_id.to_s[4..-1] get(field) end self.send(method_id) else super end end
ruby
{ "resource": "" }
q217
UnionStationHooks.RequestReporter.log_controller_action_block
train
def log_controller_action_block(options = {}) if null? do_nothing_on_null(:log_controller_action_block) yield else build_full_controller_action_string(options) has_error = true begin_time = UnionStationHooks.now begin result = yield has_error = false result ensure log_controller_action( options.merge( :begin_time => begin_time, :end_time => UnionStationHooks.now, :has_error => has_error ) ) end end end
ruby
{ "resource": "" }
q218
UnionStationHooks.RequestReporter.log_controller_action
train
def log_controller_action(options) return do_nothing_on_null(:log_controller_action) if null? Utils.require_key(options, :begin_time) Utils.require_key(options, :end_time) if options[:controller_name] build_full_controller_action_string(options) @transaction.message("Controller action: #{@controller_action}") end if options[:method] @transaction.message("Application request method: #{options[:method]}") end @transaction.log_activity('framework request processing', options[:begin_time], options[:end_time], nil, options[:has_error]) end
ruby
{ "resource": "" }
q219
Zadt.Graph.remove_vertex
train
def remove_vertex(vertex) # The vertex must exist raise "not a vertex" unless vertex.is_a?(Vertex) if !vertex raise "Vertex does not exist" # The vertex must not be connected to anything elsif !vertex.connections.empty? raise "Vertex has edges. Break them first." # If it exists and isn't connected, delete it else @vertices.delete(vertex) end end
ruby
{ "resource": "" }
q220
Zadt.Graph.make_connection
train
def make_connection(v1, v2) raise "not a vertex" unless v1.is_a?(Vertex) && v2.is_a?(Vertex) raise "already connected" if is_connected?(v1, v2) # Make new edge edge = Edge.new(v1, v2) # Connect the two using the vertex method "connect" v1.connect(v2, edge) v2.connect(v1, edge) # Add to edge catalog @edges << edge edge end
ruby
{ "resource": "" }
q221
Zadt.Graph.find_connection
train
def find_connection(v1, v2) raise "not a vertex" unless v1.is_a?(Vertex) && v2.is_a?(Vertex) raise "Vertices not connected" if !is_connected?(v1, v2) connection = v1.edges.select {|edge| edge.connection.include?(v2)} raise "Error finding connection" if connection.length > 1 connection.first end
ruby
{ "resource": "" }
q222
Zadt.Graph.is_connected?
train
def is_connected?(v1, v2) raise "not a vertex" unless v1.is_a?(Vertex) && v2.is_a?(Vertex) v1.connections.include?(v2) end
ruby
{ "resource": "" }
q223
ActsAsSolr.ClassMethods.find_by_solr
train
def find_by_solr(query, options={}) data = parse_query(query, options) return parse_results(data, options) end
ruby
{ "resource": "" }
q224
ActsAsSolr.ClassMethods.rebuild_solr_index
train
def rebuild_solr_index(batch_size=300, options = {}, &finder) finder ||= lambda do |ar, sql_options| ar.all sql_options.merge!({:order => self.primary_key, :include => configuration[:solr_includes].keys}) end start_time = Time.now options[:offset] ||= 0 options[:threads] ||= 2 options[:delayed_job] &= defined?(Delayed::Job) if batch_size > 0 items_processed = 0 offset = options[:offset] end_reached = false threads = [] mutex = Mutex.new queue = Queue.new loop do items = finder.call(self, {:limit => batch_size, :offset => offset}) add_batch = items.collect { |content| content.to_solr_doc } offset += items.size end_reached = items.size == 0 break if end_reached if options[:threads] == threads.size threads.first.join threads.shift end queue << [items, add_batch] threads << Thread.new do iteration_start = Time.now iteration_items, iteration_add_batch = queue.pop(true) if options[:delayed_job] delay.solr_add iteration_add_batch else solr_add iteration_add_batch solr_commit end last_id = iteration_items.last.id time_so_far = Time.now - start_time iteration_time = Time.now - iteration_start mutex.synchronize do items_processed += iteration_items.size if options[:delayed_job] logger.info "#{Process.pid}: #{items_processed} items for #{self.name} have been sent to Delayed::Job in #{'%.3f' % time_so_far}s at #{'%.3f' % (items_processed / time_so_far)} items/sec. Last id: #{last_id}" else logger.info "#{Process.pid}: #{items_processed} items for #{self.name} have been batch added to index in #{'%.3f' % time_so_far}s at #{'%.3f' % (items_processed / time_so_far)} items/sec. Last id: #{last_id}" end end end end solr_commit if options[:delayed_job] threads.each{ |t| t.join } else items = finder.call(self, {}) items.each { |content| content.solr_save } items_processed = items.size end if items_processed > 0 solr_optimize time_elapsed = Time.now - start_time logger.info "Index for #{self.name} has been rebuilt (took #{'%.3f' % time_elapsed}s)" else "Nothing to index for #{self.name}" end end
ruby
{ "resource": "" }
q225
S3MetaSync.Syncer.delete_old_temp_folders
train
def delete_old_temp_folders path = File.join(Dir.tmpdir, STAGING_AREA_PREFIX + '*') day = 24 * 60 * 60 dirs = Dir.glob(path) dirs.select! { |dir| Time.now.utc - File.ctime(dir).utc > day } # only stale ones removed = dirs.each { |dir| FileUtils.rm_rf(dir) } log "Removed #{removed} old temp folder(s)" if removed.count > 0 end
ruby
{ "resource": "" }
q226
Linkage.FieldSet.fetch_key
train
def fetch_key(key) string_key = key.to_s keys.detect { |k| k.to_s.casecmp(string_key) == 0 } end
ruby
{ "resource": "" }
q227
SimpleRecord.Translations.ruby_to_sdb
train
def ruby_to_sdb(name, value) return nil if value.nil? name = name.to_s # puts "Converting #{name} to sdb value=#{value}" # puts "atts_local=" + defined_attributes_local.inspect att_meta = get_att_meta(name) if value.is_a? Array ret = value.collect { |x| ruby_to_string_val(att_meta, x) } else ret = ruby_to_string_val(att_meta, value) end unless value.blank? if att_meta.options if att_meta.options[:encrypted] # puts "ENCRYPTING #{name} value #{value}" ret = Translations.encrypt(ret, att_meta.options[:encrypted]) # puts 'encrypted value=' + ret.to_s end if att_meta.options[:hashed] # puts "hashing #{name}" ret = Translations.pass_hash(ret) # puts "hashed value=" + ret.inspect end end end return ret end
ruby
{ "resource": "" }
q228
SimpleRecord.Translations.sdb_to_ruby
train
def sdb_to_ruby(name, value) # puts 'sdb_to_ruby arg=' + name.inspect + ' - ' + name.class.name + ' - value=' + value.to_s return nil if value.nil? att_meta = get_att_meta(name) if att_meta.options if att_meta.options[:encrypted] value = Translations.decrypt(value, att_meta.options[:encrypted]) end if att_meta.options[:hashed] return PasswordHashed.new(value) end end if !has_id_on_end(name) && att_meta.type == :belongs_to class_name = att_meta.options[:class_name] || name.to_s[0...1].capitalize + name.to_s[1...name.to_s.length] # Camelize classnames with underscores (ie my_model.rb --> MyModel) class_name = class_name.camelize # puts "attr=" + @attributes[arg_id].inspect # puts 'val=' + @attributes[arg_id][0].inspect unless @attributes[arg_id].nil? ret = nil arg_id = name.to_s + '_id' arg_id_val = send("#{arg_id}") if arg_id_val if !cache_store.nil? # arg_id_val = @attributes[arg_id][0] cache_key = self.class.cache_key(class_name, arg_id_val) # puts 'cache_key=' + cache_key ret = cache_store.read(cache_key) # puts 'belongs_to incache=' + ret.inspect end if ret.nil? to_eval = "#{class_name}.find('#{arg_id_val}')" # puts 'to eval=' + to_eval begin ret = eval(to_eval) # (defined? #{arg}_id) rescue SimpleRecord::ActiveSdb::ActiveSdbError => ex if ex.message.include? "Couldn't find" ret = RemoteNil.new else raise ex end end end end value = ret else if value.is_a? Array value = value.collect { |x| string_val_to_ruby(att_meta, x) } else value = string_val_to_ruby(att_meta, value) end end value end
ruby
{ "resource": "" }
q229
FamilySearch.URLTemplate.head
train
def head(template_values) raise FamilySearch::Error::MethodNotAllowed unless allow.include?('head') template_values = validate_values(template_values) t = Addressable::Template.new(@template) url = t.expand(template_values).to_s @client.head url end
ruby
{ "resource": "" }
q230
PsUtilities.PreBuiltGet.get_one_student
train
def get_one_student(params) # api_path = "/ws/v1/district/student/{dcid}?expansions=school_enrollment,contact&q=student_username==xxxxxx237" ps_dcid = params[:dcid] || params[:dc_id] || params[:id] api_path = "/ws/v1/student/#{ps_dcid.to_i}" options = { query: { "extensions" => "s_stu_crdc_x,activities,c_studentlocator,u_students_extension,u_studentsuserfields,s_stu_ncea_x,s_stu_edfi_x,studentcorefields", "expansions" => "demographics,addresses,alerts,phones,school_enrollment,ethnicity_race,contact,contact_info,initial_enrollment,schedule_setup,fees,lunch" } } return {"errorMessage"=>{"message"=>"A valid dcid must be entered."}} if "#{ps_dcid.to_i}".eql? "0" answer = api(:get, api_path, options) return { student: (answer["student"] || []) } if answer.code.to_s.eql? "200" # return { student: (answer.parsed_response["student"] || []) } if answer.code.to_s.eql? "200" return {"errorMessage"=>"#{answer.response}"} end
ruby
{ "resource": "" }
q231
PsUtilities.PreBuiltGet.build_query
train
def build_query(params) query = [] query << "school_enrollment.enroll_status_code==#{params[:status_code]}" if params.has_key?(:status_code) query << "school_enrollment.enroll_status==#{params[:enroll_status]}" if params.has_key?(:enroll_status) query << "student_username==#{params[:username]}" if params.has_key?(:username) query << "name.last_name==#{params[:last_name]}" if params.has_key?(:last_name) query << "name.first_name==#{params[:first_name]}" if params.has_key?(:first_name) query << "local_id==#{params[:local_id]}" if params.has_key?(:local_id) query << "local_id==#{params[:student_id]}" if params.has_key?(:student_id) query << "id==#{params[:dcid]}" if params.has_key?(:dcid) query << "id==#{params[:id]}" if params.has_key?(:id) answer = query.join(";") answer end
ruby
{ "resource": "" }
q232
TinyCI.Compactor.directories_to_compact
train
def directories_to_compact builds = Dir.entries builds_dir builds.select! {|e| File.directory? builds_dir(e) } builds.reject! {|e| %w{. ..}.include? e } builds.sort! builds = builds[0..-(@num_builds_to_leave+1)] builds.reject! {|e| @builds_to_leave.include?(e) || @builds_to_leave.include?(builds_dir(e, 'export'))} builds end
ruby
{ "resource": "" }
q233
TinyCI.Compactor.compress_directory
train
def compress_directory(dir) File.open archive_path(dir), 'wb' do |oarchive_path| Zlib::GzipWriter.wrap oarchive_path do |gz| Gem::Package::TarWriter.new gz do |tar| Find.find "#{builds_dir}/"+dir do |f| relative_path = f.sub "#{builds_dir}/", "" mode = File.stat(f).mode size = File.stat(f).size if File.directory? f tar.mkdir relative_path, mode else tar.add_file_simple relative_path, mode, size do |tio| File.open f, 'rb' do |rio| while buffer = rio.read(BLOCKSIZE_TO_READ) tio.write buffer end end end end end end end end end
ruby
{ "resource": "" }
q234
TinyCI.Subprocesses.execute
train
def execute(*command, label: nil) output, status = Open3.capture2(*command.flatten) log_debug caller[0] log_debug "CMD: #{command.join(' ')}" log_debug "OUT: #{output}" unless status.success? log_error output raise SubprocessError.new(label, command.join(' '), status) end output.chomp end
ruby
{ "resource": "" }
q235
TinyCI.Subprocesses.execute_pipe
train
def execute_pipe(*commands, label: nil) stdout, waiters = Open3.pipeline_r(*commands) output = stdout.read waiters.each_with_index do |waiter, i| status = waiter.value unless status.success? log_error output raise SubprocessError.new(label, commands[i].join(' '), status) end end output.chomp end
ruby
{ "resource": "" }
q236
TinyCI.Subprocesses.execute_stream
train
def execute_stream(*command, label: nil, pwd: nil) opts = {} opts[:chdir] = pwd unless pwd.nil? Open3.popen2e(command.join(' '), opts) do |stdin, stdout_and_stderr, wait_thr| stdin.close until stdout_and_stderr.closed? || stdout_and_stderr.eof? line = stdout_and_stderr.gets log_info line.chomp $stdout.flush end unless wait_thr.value.success? raise SubprocessError.new(label, command.join(' '), wait_thr.value) end stdout_and_stderr.close end true end
ruby
{ "resource": "" }
q237
Borderlands.PropertyManager.property
train
def property(contractid, groupid, propertyid) begin property_hash = @client.get_json_body( "/papi/v0/properties/#{propertyid}", { 'contractId' => contractid, 'groupId' => groupid, }, ) rescue puts "# unable to retrieve property for (group=#{groupid},contract=#{contractid},property=#{propertyid}): #{e.message}" end Property.new property_hash['properties']['items'].first end
ruby
{ "resource": "" }
q238
Borderlands.PropertyManager.properties
train
def properties properties = [] contract_group_pairs.each do |cg| begin properties_hash = @client.get_json_body( "/papi/v0/properties/", { 'contractId' => cg[:contract], 'groupId' => cg[:group], } ) if properties_hash && properties_hash['properties']['items'] properties_hash['properties']['items'].each do |prp| properties << Property.new(prp) end end rescue Exception => e # probably due to Akamai PM permissions, don't raise for caller to handle puts "# unable to retrieve properties for (group=#{cg[:group]},contract=#{cg[:contract]}): #{e.message}" end end properties end
ruby
{ "resource": "" }
q239
Borderlands.PropertyManager.hostnames
train
def hostnames(property, skip_update_dns_status = false, version = nil) raise 'property must be a Borderlands::Property object' unless property.is_a? Property version ||= property.productionversion begin hostnames_hash = @client.get_json_body( "/papi/v0/properties/#{property.id}/versions/#{version}/hostnames/", { 'contractId' => property.contractid, 'groupId' => property.groupid }, ) rescue Exception => e raise "unable to retrieve hostnames for #{property.name}: #{e.message}" end if hostnames_hash && hostnames_hash['hostnames'] && hostnames_hash['hostnames']['items'] hostnames = hostnames_hash['hostnames']['items'].map do |ehn| h = Hostname.new ehn h.update_status unless skip_update_dns_status h end else # no hostnames returned hostnames = nil end hostnames end
ruby
{ "resource": "" }
q240
Borderlands.PropertyManager.ruletree
train
def ruletree(property,version = nil) raise 'property must be a Borderlands::Property object' unless property.is_a? Property version ||= property.productionversion tree = nil begin rt = @client.get_json_body( "/papi/v0/properties/#{property.id}/versions/#{version}/rules/", { 'contractId' => property.contractid, 'groupId' => property.groupid }, ) tree = Rule.new rt['rules'] rescue Exception => e raise "unable to retrieve rule tree for #{property.name}: #{e.message}" end tree end
ruby
{ "resource": "" }
q241
UnionStationHooks.SpecHelper.find_passenger_config
train
def find_passenger_config passenger_config = ENV['PASSENGER_CONFIG'] if passenger_config.nil? || passenger_config.empty? passenger_config = find_passenger_config_vendor || find_passenger_config_in_path end if passenger_config.nil? || passenger_config.empty? abort 'ERROR: The unit tests are to be run against a specific ' \ 'Passenger version. However, the \'passenger-config\' command is ' \ 'not found. Please install Passenger, or (if you are sure ' \ 'Passenger is installed) set the PASSENGER_CONFIG environment ' \ 'variable to the \'passenger-config\' command.' end passenger_config end
ruby
{ "resource": "" }
q242
UnionStationHooks.SpecHelper.undo_bundler
train
def undo_bundler clean_env = nil Bundler.with_clean_env do clean_env = ENV.to_hash end ENV.replace(clean_env) end
ruby
{ "resource": "" }
q243
UnionStationHooks.SpecHelper.write_file
train
def write_file(path, content) dir = File.dirname(path) if !File.exist?(dir) FileUtils.mkdir_p(dir) end File.open(path, 'wb') do |f| f.write(content) end end
ruby
{ "resource": "" }
q244
UnionStationHooks.SpecHelper.debug_shell
train
def debug_shell puts '------ Opening debug shell -----' @orig_dir = Dir.pwd begin if respond_to?(:prepare_debug_shell) prepare_debug_shell end system('bash') ensure Dir.chdir(@orig_dir) end puts '------ Exiting debug shell -----' end
ruby
{ "resource": "" }
q245
UnionStationHooks.SpecHelper.eventually
train
def eventually(deadline_duration = 3, check_interval = 0.05) deadline = Time.now + deadline_duration while Time.now < deadline if yield return else sleep(check_interval) end end raise 'Time limit exceeded' end
ruby
{ "resource": "" }
q246
UnionStationHooks.SpecHelper.should_never_happen
train
def should_never_happen(deadline_duration = 0.5, check_interval = 0.05) deadline = Time.now + deadline_duration while Time.now < deadline if yield raise "That which shouldn't happen happened anyway" else sleep(check_interval) end end end
ruby
{ "resource": "" }
q247
Runnable.ClassMethods.define_command
train
def define_command( name, opts = {}, &block ) blocking = opts[:blocking] || false log_path = opts[:log_path] || false commands[name] = { :blocking => blocking } define_method( name ) do |*args| if block run name, block.call(*args), log_path else run name, nil, log_path end join if blocking end end
ruby
{ "resource": "" }
q248
Runnable.ClassMethods.method_missing
train
def method_missing( name, *opts ) raise NoMethodError.new( name.to_s ) unless name.to_s =~ /([a-z]*)_([a-z]*)/ # command_processors if $2 == "processors" commands[$1.to_sym][:outputs] = opts.first[:outputs] commands[$1.to_sym][:exceptions] = opts.first[:exceptions] end end
ruby
{ "resource": "" }
q249
Grooveshark.User.library_remove
train
def library_remove(song) fail ArgumentError, 'Song object required' unless song.is_a?(Song) req = { userID: @id, songID: song.id, albumID: song.album_id, artistID: song.artist_id } @client.request('userRemoveSongFromLibrary', req) end
ruby
{ "resource": "" }
q250
Grooveshark.User.get_playlist
train
def get_playlist(id) result = playlists.select { |p| p.id == id } result.nil? ? nil : result.first end
ruby
{ "resource": "" }
q251
Grooveshark.User.create_playlist
train
def create_playlist(name, description = '', songs = []) @client.request('createPlaylist', 'playlistName' => name, 'playlistAbout' => description, 'songIDs' => songs.map do |s| s.is_a?(Song) ? s.id : s.to_s end) end
ruby
{ "resource": "" }
q252
Grooveshark.User.add_favorite
train
def add_favorite(song) song_id = song.is_a?(Song) ? song.id : song @client.request('favorite', what: 'Song', ID: song_id) end
ruby
{ "resource": "" }
q253
CrmFormatter.Phone.check_phone_status
train
def check_phone_status(hsh) phone = hsh[:phone] phone_f = hsh[:phone_f] status = 'invalid' status = phone != phone_f ? 'formatted' : 'unchanged' if phone && phone_f hsh[:phone_status] = status if status.present? hsh end
ruby
{ "resource": "" }
q254
Eluka.Model.add
train
def add (data, label) raise "No meaningful label associated with data" unless ([:positive, :negative].include? label) #Create a data point in the vector space from the datum given data_point = Eluka::DataPoint.new(data, @analyzer) #Add the data point to the feature space #Expand the training feature space to include the data point @fv_train.add(data_point.vector, @labels[label]) end
ruby
{ "resource": "" }
q255
Rex::SSLScan.Result.add_cipher
train
def add_cipher(version, cipher, key_length, status) unless @supported_versions.include? version raise ArgumentError, "Must be a supported SSL Version" end unless OpenSSL::SSL::SSLContext.new(version).ciphers.flatten.include?(cipher) || @deprecated_weak_ciphers.include?(cipher) raise ArgumentError, "Must be a valid SSL Cipher for #{version}!" end unless key_length.kind_of? Integer raise ArgumentError, "Must supply a valid key length" end unless [:accepted, :rejected].include? status raise ArgumentError, "Status must be either :accepted or :rejected" end strong_cipher_ctx = OpenSSL::SSL::SSLContext.new(version) # OpenSSL Directive For Strong Ciphers # See: http://www.rapid7.com/vulndb/lookup/ssl-weak-ciphers strong_cipher_ctx.ciphers = "ALL:!aNULL:!eNULL:!LOW:!EXP:RC4+RSA:+HIGH:+MEDIUM" if strong_cipher_ctx.ciphers.flatten.include? cipher weak = false else weak = true end cipher_details = {:version => version, :cipher => cipher, :key_length => key_length, :weak => weak, :status => status} @ciphers << cipher_details end
ruby
{ "resource": "" }
q256
BlueprintClient.AssetTypeTemplatesApi.add
train
def add(namespace, asset_type, template_body, opts = {}) data, _status_code, _headers = add_with_http_info(namespace, asset_type, template_body, opts) return data end
ruby
{ "resource": "" }
q257
BlueprintClient.AssetTypeTemplatesApi.delete
train
def delete(namespace, asset_type, opts = {}) data, _status_code, _headers = delete_with_http_info(namespace, asset_type, opts) return data end
ruby
{ "resource": "" }
q258
BlueprintClient.AssetTypeTemplatesApi.put
train
def put(namespace, asset_type, template_body, opts = {}) data, _status_code, _headers = put_with_http_info(namespace, asset_type, template_body, opts) return data end
ruby
{ "resource": "" }
q259
Log.ProgressBar.thr_msg
train
def thr_msg if @history.nil? @history ||= [[@ticks, Time.now] ] else @history << [@ticks, Time.now] max_history ||= case when @ticks > 20 count = @ticks - @last_count count = 1 if count == 0 if @max times = @max / count num = times / 20 num = 2 if num < 2 else num = 10 end count * num else 20 end max_history = 30 if max_history > 30 @history.shift if @history.length > max_history end @mean_max ||= 0 if @history.length > 3 sticks, stime = @history.first ssticks, sstime = @history[-3] lticks, ltime = @history.last mean = @mean = (lticks - sticks).to_f / (ltime - stime) short_mean = (lticks - ssticks).to_f / (ltime - sstime) @mean_max = mean if mean > @mean_max end if short_mean thr = short_mean else thr = begin (@ticks || 1) / (Time.now - @start) rescue 1 end end thr = 0.0000001 if thr == 0 if mean.nil? or mean.to_i > 1 str = "#{ Log.color :blue, thr.to_i.to_s } per sec." #str << " #{ Log.color :yellow, mean.to_i.to_s } avg. #{Log.color :yellow, @mean_max.to_i.to_s} max." if @mean_max > 0 else str = "#{ Log.color :blue, (1/thr).ceil.to_s } secs each" #str << " #{ Log.color :yellow, (1/mean).ceil.to_s } avg. #{Log.color :yellow, (1/@mean_max).ceil.to_s} min." if @mean_max > 0 end str end
ruby
{ "resource": "" }
q260
Rex::SSLScan.Scanner.valid?
train
def valid? begin @host = Rex::Socket.getaddress(@host, true) rescue return false end @port.kind_of?(Integer) && @port >= 0 && @port <= 65535 && @timeout.kind_of?(Integer) end
ruby
{ "resource": "" }
q261
Rex::SSLScan.Scanner.scan
train
def scan scan_result = Rex::SSLScan::Result.new scan_result.openssl_sslv2 = sslv2 # If we can't get any SSL connection, then don't bother testing # individual ciphers. if test_ssl == :rejected and test_tls == :rejected return scan_result end threads = [] ciphers = Queue.new @supported_versions.each do |ssl_version| sslctx = OpenSSL::SSL::SSLContext.new(ssl_version) sslctx.ciphers.each do |cipher_name, ssl_ver, key_length, alg_length| threads << Thread.new do begin status = test_cipher(ssl_version, cipher_name) ciphers << [ssl_version, cipher_name, key_length, status] if status == :accepted and scan_result.cert.nil? scan_result.cert = get_cert(ssl_version, cipher_name) end rescue Rex::SSLScan::Scanner::InvalidCipher next end end end end threads.each { |thr| thr.join } until ciphers.empty? do cipher = ciphers.pop scan_result.add_cipher(*cipher) end scan_result end
ruby
{ "resource": "" }
q262
Rex::SSLScan.Scanner.get_cert
train
def get_cert(ssl_version, cipher) validate_params(ssl_version,cipher) begin scan_client = Rex::Socket::Tcp.create( 'PeerHost' => @host, 'PeerPort' => @port, 'SSL' => true, 'SSLVersion' => ssl_version, 'SSLCipher' => cipher, 'Timeout' => @timeout ) cert = scan_client.peer_cert if cert.kind_of? OpenSSL::X509::Certificate return cert else return nil end rescue ::Exception => e return nil ensure if scan_client scan_client.close end end end
ruby
{ "resource": "" }
q263
Rex::SSLScan.Scanner.validate_params
train
def validate_params(ssl_version, cipher) raise StandardError, "The scanner configuration is invalid" unless valid? unless @supported_versions.include? ssl_version raise StandardError, "SSL Version must be one of: #{@supported_versions.to_s}" end if ssl_version == :SSLv2 and sslv2 == false raise StandardError, "Your OS hates freedom! Your OpenSSL libs are compiled without SSLv2 support!" else unless OpenSSL::SSL::SSLContext.new(ssl_version).ciphers.flatten.include? cipher raise InvalidCipher, "Must be a valid SSL Cipher for #{ssl_version}!" end end end
ruby
{ "resource": "" }
q264
UnionStationHooks.RequestReporter.log_user_activity_begin
train
def log_user_activity_begin(name) return do_nothing_on_null(:log_user_activity_begin) if null? id = next_user_activity_name @transaction.log_activity_begin(id, UnionStationHooks.now, name) id end
ruby
{ "resource": "" }
q265
UnionStationHooks.RequestReporter.log_user_activity_end
train
def log_user_activity_end(id, has_error = false) return do_nothing_on_null(:log_user_activity_end) if null? @transaction.log_activity_end(id, UnionStationHooks.now, has_error) end
ruby
{ "resource": "" }
q266
UnionStationHooks.RequestReporter.log_user_activity
train
def log_user_activity(name, begin_time, end_time, has_error = false) return do_nothing_on_null(:log_user_activity) if null? @transaction.log_activity(next_user_activity_name, begin_time, end_time, name, has_error) end
ruby
{ "resource": "" }
q267
UnionStationHooks.RequestReporter.log_exception
train
def log_exception(exception) transaction = @context.new_transaction( @app_group_name, :exceptions, @key) begin return do_nothing_on_null(:log_exception) if transaction.null? base64_message = exception.message base64_message = exception.to_s if base64_message.empty? base64_message = Utils.base64(base64_message) base64_backtrace = Utils.base64(exception.backtrace.join("\n")) if controller_action_logged? transaction.message("Controller action: #{@controller_action}") end transaction.message("Request transaction ID: #{@txn_id}") transaction.message("Message: #{base64_message}") transaction.message("Class: #{exception.class.name}") transaction.message("Backtrace: #{base64_backtrace}") ensure transaction.close end end
ruby
{ "resource": "" }
q268
UnionStationHooks.RequestReporter.log_database_query
train
def log_database_query(options) return do_nothing_on_null(:log_database_query) if null? Utils.require_key(options, :begin_time) Utils.require_key(options, :end_time) Utils.require_non_empty_key(options, :query) name = options[:name] || 'SQL' begin_time = options[:begin_time] end_time = options[:end_time] query = options[:query] @transaction.log_activity(next_database_query_name, begin_time, end_time, "#{name}\n#{query}") end
ruby
{ "resource": "" }
q269
Linkage.Matcher.mean
train
def mean w = @comparators.collect { |comparator| comparator.weight || 1 } @score_set.open_for_reading @score_set.each_pair do |id_1, id_2, scores| sum = 0 scores.each do |key, value| sum += value * w[key-1] end mean = sum / @comparators.length.to_f if mean >= @threshold changed notify_observers(id_1, id_2, mean) end end @score_set.close end
ruby
{ "resource": "" }
q270
Finitio.SubType.dress
train
def dress(value, handler = DressHelper.new) # Check that the supertype is able to dress the value. # Rewrite and set cause to any encountered TypeError. uped = handler.try(self, value) do super_type.dress(value, handler) end # Check each constraint in turn constraints.each do |constraint| next if constraint===uped msg = handler.default_error_message(self, value) if constraint.named? && constraints.size>1 msg << " (not #{constraint.name})" end handler.fail!(msg) end # seems good, return the uped value uped end
ruby
{ "resource": "" }
q271
Quora.Auth.login
train
def login(user, password) endpoint = URI.parse(QUORA_URI) http = Net::HTTP.new(endpoint.host, endpoint.port) resp = http.get('/login/') cookie = resp["set-cookie"] # TODO: improve this rubbish # get formkey value start = resp.body.index("Q.formkey") formkey = resp.body[start..start+200].split("\"")[1] # get window value start = resp.body.index("webnode2.windowId") window = resp.body[start..start+200].split("\"")[1] # get __vcon_json value start = resp.body.index("InlineLogin") vcon_json = resp.body[start..start+200] start = vcon_json.index("live") vcon_json = vcon_json[start..-1] vcon_json = vcon_json.split("\"")[0] vcon_json = vcon_json.split(":") vcon_json.map! { |value| "\"#{value}\"" } vcon_json = "[#{vcon_json.join(",")}]" vcon_json = CGI::escape(vcon_json) user = CGI::escape(user) password = CGI::escape(password) body = "json=%7B%22args%22%3A%5B%22#{user}%22%2C%22#{password}%22%2Ctrue%5D%2C%22kwargs%22%3A%7B%7D%7D&formkey=#{formkey}&window_id=#{window}&__vcon_json=#{vcon_json}&__vcon_method=do_login" headers = { "Content-Type" => "application/x-www-form-urlencoded", "X-Requested-With" => "XMLHttpRequest", "Accept" => "application/json, text/javascript, */*", "Cookie" => cookie, "User-Agent" => "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/8.0.552.237 Safari/534.10", "Content-Length" => body.length.to_s, "Accept-Charset" => "ISO-8859-1,utf-8;q=0.7,*;q=0.3", "Accept-Language" => "es-ES,es;q=0.8", "Accept-Encoding" => "gzip,deflate,sdch", "Origin" => "http://www.quora.com", "Host" => "www.quora.com", "Referer" => "http://www.quora.com/login/" } resp = http.post("/webnode2/server_call_POST", body, headers) if resp.code == "200" cookie else "" end end
ruby
{ "resource": "" }
q272
ColumnPack.BinPacker.empty_space
train
def empty_space pack_all if @needs_packing max = @sizes.each.max space = 0 @sizes.each { |size| space += max - size } space end
ruby
{ "resource": "" }
q273
ColumnPack.BinPacker.tall_to_middle
train
def tall_to_middle if (@total_bins > 1) && ((@total_bins % 2) != 0) _, max_col = @sizes.each_with_index.max mid_col = @total_bins / 2 temp = @bins[mid_col].clone @bins[mid_col] = @bins[max_col] @bins[max_col] = temp end end
ruby
{ "resource": "" }
q274
PsUtilities.Connection.api
train
def api(verb, api_path, options={}) count = 0 retries = 3 ps_url = base_uri + api_path options = options.merge(headers) begin HTTParty.send(verb, ps_url, options) rescue Net::ReadTimeout, Net::OpenTimeout if count < retries count += 1 retry else { error: "no response (timeout) from URL: #{url}" } end end end
ruby
{ "resource": "" }
q275
PsUtilities.Connection.authenticate
train
def authenticate ps_url = base_uri + auth_path response = HTTParty.post( ps_url, { headers: auth_headers, body: 'grant_type=client_credentials'} ) if response.code.to_s.eql? "200" @auth_info = response.parsed_response @auth_info['token_expires'] = Time.now + response.parsed_response['expires_in'].to_i @headers[:headers].merge!('Authorization' => 'Bearer ' + auth_info['access_token']) return auth_info else # throw error if - error returned -- nothing else will work raise AuthError.new("No Auth Token Returned", ps_url, client ) end end
ruby
{ "resource": "" }
q276
Levelup.Api.apps
train
def apps(app_id = nil) if app_id Endpoints::SpecificApp.new(app_id) else Endpoints::Apps.new(app_access_token) end end
ruby
{ "resource": "" }
q277
Levelup.Api.orders
train
def orders(order_uuid = nil) if order_uuid Endpoints::SpecificOrder.new(order_uuid) else Endpoints::Orders.new end end
ruby
{ "resource": "" }
q278
Kril.Consumer.consume_all
train
def consume_all(topic) config = @config.clone config[:group_id] = SecureRandom.uuid consumer = build_consumer(topic, true, config) consumer.each_message do |message| yield decode(message), consumer end ensure consumer.stop end
ruby
{ "resource": "" }
q279
SudoAttributes.ClassMethods.sudo_create!
train
def sudo_create!(attributes = nil, &block) if attributes.is_a?(Array) attributes.collect { |attr| sudo_create!(attr, &block) } else object = sudo_new(attributes) yield(object) if block_given? object.save! object end end
ruby
{ "resource": "" }
q280
Linkage.Field.ruby_type
train
def ruby_type unless @ruby_type hsh = case @schema[:db_type].downcase when /\A(medium|small)?int(?:eger)?(?:\((\d+)\))?( unsigned)?\z/o if !$1 && $2 && $2.to_i >= 10 && $3 # Unsigned integer type with 10 digits can potentially contain values which # don't fit signed integer type, so use bigint type in target database. {:type=>Bignum} else {:type=>Integer} end when /\Atinyint(?:\((\d+)\))?(?: unsigned)?\z/o {:type =>schema[:type] == :boolean ? TrueClass : Integer} when /\Abigint(?:\((?:\d+)\))?(?: unsigned)?\z/o {:type=>Bignum} when /\A(?:real|float|double(?: precision)?|double\(\d+,\d+\)(?: unsigned)?)\z/o {:type=>Float} when 'boolean' {:type=>TrueClass} when /\A(?:(?:tiny|medium|long|n)?text|clob)\z/o {:type=>String, :text=>true} when 'date' {:type=>Date} when /\A(?:small)?datetime\z/o {:type=>DateTime} when /\Atimestamp(?:\((\d+)\))?(?: with(?:out)? time zone)?\z/o {:type=>DateTime, :size=>($1.to_i if $1)} when /\Atime(?: with(?:out)? time zone)?\z/o {:type=>Time, :only_time=>true} when /\An?char(?:acter)?(?:\((\d+)\))?\z/o {:type=>String, :size=>($1.to_i if $1), :fixed=>true} when /\A(?:n?varchar|character varying|bpchar|string)(?:\((\d+)\))?\z/o {:type=>String, :size=>($1.to_i if $1)} when /\A(?:small)?money\z/o {:type=>BigDecimal, :size=>[19,2]} when /\A(?:decimal|numeric|number)(?:\((\d+)(?:,\s*(\d+))?\))?\z/o s = [($1.to_i if $1), ($2.to_i if $2)].compact {:type=>BigDecimal, :size=>(s.empty? ? nil : s)} when /\A(?:bytea|(?:tiny|medium|long)?blob|(?:var)?binary)(?:\((\d+)\))?\z/o {:type=>File, :size=>($1.to_i if $1)} when /\A(?:year|(?:int )?identity)\z/o {:type=>Integer} else {:type=>String} end hsh.delete_if { |k, v| v.nil? } @ruby_type = {:type => hsh.delete(:type)} @ruby_type[:opts] = hsh if !hsh.empty? end @ruby_type end
ruby
{ "resource": "" }
q281
Crawler.Observer.update
train
def update(response, url) @log.puts "Scanning: #{url}" if response.kind_of?(Net::HTTPClientError) or response.kind_of?(Net::HTTPServerError) @log.puts "#{response.code} encountered for #{url}" end end
ruby
{ "resource": "" }
q282
Skyper.Skype.answer
train
def answer(call) cmd = "ALTER CALL #{call.call_id} ANSWER" r = Skype.send_command cmd raise RuntimeError("Failed to answer call. Skype returned '#{r}'") unless r == cmd end
ruby
{ "resource": "" }
q283
SimpleRecord.Attributes.get_attribute
train
def get_attribute(name) # puts "get_attribute #{name}" # Check if this arg is already converted name_s = name.to_s name = name.to_sym att_meta = get_att_meta(name) # puts "att_meta for #{name}: " + att_meta.inspect if att_meta && att_meta.type == :clob ret = @lobs[name] # puts 'get_attribute clob ' + ret.inspect if ret if ret.is_a? RemoteNil return nil else return ret end end # get it from s3 unless new_record? if self.class.get_sr_config[:single_clob] begin single_clob = s3_bucket(false, :s3_bucket=>:new).get(single_clob_id) single_clob = JSON.parse(single_clob) # puts "single_clob=" + single_clob.inspect single_clob.each_pair do |name2, val| @lobs[name2.to_sym] = val end ret = @lobs[name] SimpleRecord.stats.s3_gets += 1 rescue Aws::AwsError => ex if ex.include?(/NoSuchKey/) || ex.include?(/NoSuchBucket/) ret = nil else raise ex end end else begin ret = s3_bucket.get(s3_lob_id(name)) # puts 'got from s3 ' + ret.inspect SimpleRecord.stats.s3_gets += 1 rescue Aws::AwsError => ex if ex.include?(/NoSuchKey/) || ex.include?(/NoSuchBucket/) ret = nil else raise ex end end end if ret.nil? ret = RemoteNil.new end end @lobs[name] = ret return nil if ret.is_a? RemoteNil return ret else @attributes_rb = {} unless @attributes_rb # was getting errors after upgrade. ret = @attributes_rb[name_s] # instance_variable_get(instance_var) return ret unless ret.nil? return nil if ret.is_a? RemoteNil ret = get_attribute_sdb(name) # p ret ret = sdb_to_ruby(name, ret) # p ret @attributes_rb[name_s] = ret return ret end end
ruby
{ "resource": "" }
q284
OodSupport.ACL.ordered_check
train
def ordered_check(**kwargs) entries.each do |entry| if entry.match(**kwargs) # Check if its an allow or deny acl entry (may not be both) return true if entry.is_allow? return false if entry.is_deny? end end return default # default allow or default deny end
ruby
{ "resource": "" }
q285
UV.ScheduledEvent.inspect
train
def inspect insp = String.new("#<#{self.class}:#{"0x00%x" % (self.__id__ << 1)} ") insp << "trigger_count=#{@trigger_count} " insp << "config=#{info} " if self.respond_to?(:info, true) insp << "next_scheduled=#{to_time(@next_scheduled)} " insp << "last_scheduled=#{to_time(@last_scheduled)} created=#{to_time(@created)}>" insp end
ruby
{ "resource": "" }
q286
UV.OneShot.update
train
def update(time) @last_scheduled = @reactor.now parsed_time = Scheduler.parse_in(time, :quiet) if parsed_time.nil? # Parse at will throw an error if time is invalid parsed_time = Scheduler.parse_at(time) - @scheduler.time_diff else parsed_time += @last_scheduled end @next_scheduled = parsed_time @scheduler.reschedule(self) end
ruby
{ "resource": "" }
q287
UV.Repeat.update
train
def update(every, timezone: nil) time = Scheduler.parse_in(every, :quiet) || Scheduler.parse_cron(every, :quiet, timezone: timezone) raise ArgumentError.new("couldn't parse \"#{o}\"") if time.nil? @every = time reschedule end
ruby
{ "resource": "" }
q288
UV.Scheduler.every
train
def every(time) ms = Scheduler.parse_in(time) event = Repeat.new(self, ms) event.progress &Proc.new if block_given? schedule(event) event end
ruby
{ "resource": "" }
q289
UV.Scheduler.in
train
def in(time) ms = @reactor.now + Scheduler.parse_in(time) event = OneShot.new(self, ms) event.progress &Proc.new if block_given? schedule(event) event end
ruby
{ "resource": "" }
q290
UV.Scheduler.at
train
def at(time) ms = Scheduler.parse_at(time) - @time_diff event = OneShot.new(self, ms) event.progress &Proc.new if block_given? schedule(event) event end
ruby
{ "resource": "" }
q291
UV.Scheduler.cron
train
def cron(schedule, timezone: nil) ms = Scheduler.parse_cron(schedule, timezone: timezone) event = Repeat.new(self, ms) event.progress &Proc.new if block_given? schedule(event) event end
ruby
{ "resource": "" }
q292
UV.Scheduler.reschedule
train
def reschedule(event) # Check promise is not resolved return if event.resolved? @critical.synchronize { # Remove the event from the scheduled list and ensure it is in the schedules set if @schedules.include?(event) remove(event) else @schedules << event end # optimal algorithm for inserting into an already sorted list Bisect.insort(@scheduled, event) # Update the timer check_timer } end
ruby
{ "resource": "" }
q293
UV.Scheduler.unschedule
train
def unschedule(event) @critical.synchronize { # Only call delete and update the timer when required if @schedules.include?(event) @schedules.delete(event) remove(event) check_timer end } end
ruby
{ "resource": "" }
q294
UV.Scheduler.remove
train
def remove(obj) position = nil @scheduled.each_index do |i| # object level comparison if obj.equal? @scheduled[i] position = i break end end @scheduled.slice!(position) unless position.nil? end
ruby
{ "resource": "" }
q295
UV.Scheduler.check_timer
train
def check_timer @reactor.update_time existing = @next schedule = @scheduled.first @next = schedule.nil? ? nil : schedule.next_scheduled if existing != @next # lazy load the timer if @timer.nil? new_timer else @timer.stop end if not @next.nil? in_time = @next - @reactor.now # Ensure there are never negative start times if in_time > 3 @timer.start(in_time) else # Effectively next tick @timer.start(0) end end end end
ruby
{ "resource": "" }
q296
UV.Scheduler.on_timer
train
def on_timer @critical.synchronize { schedule = @scheduled.shift @schedules.delete(schedule) schedule.trigger # execute schedules that are within 3ms of this event # Basic timer coalescing.. now = @reactor.now + 3 while @scheduled.first && @scheduled.first.next_scheduled <= now schedule = @scheduled.shift @schedules.delete(schedule) schedule.trigger end check_timer } end
ruby
{ "resource": "" }
q297
Stately.Core.stately
train
def stately(*opts, &block) options = opts.last.is_a?(Hash) ? opts.last : {} options[:attr] ||= :state @stately_machine = Stately::Machine.new(options[:attr], options[:start]) @stately_machine.instance_eval(&block) if block_given? include Stately::InstanceMethods end
ruby
{ "resource": "" }
q298
Dk.Runner.build_local_cmd
train
def build_local_cmd(task, cmd_str, input, given_opts) Local::Cmd.new(cmd_str, given_opts) end
ruby
{ "resource": "" }
q299
Dk.Runner.build_remote_cmd
train
def build_remote_cmd(task, cmd_str, input, given_opts, ssh_opts) Remote::Cmd.new(cmd_str, ssh_opts) end
ruby
{ "resource": "" }