idx int64 0 24.9k | question stringlengths 68 4.14k | target stringlengths 9 749 |
|---|---|---|
1,400 | def unique_temp ( name ) name = name . to_s if name && ! name . empty? name = name . to_s . gsub ( '<=>' , '$lt_eq_gt' ) . gsub ( '===' , '$eq_eq_eq' ) . gsub ( '==' , '$eq_eq' ) . gsub ( '=~' , '$eq_tilde' ) . gsub ( '!~' , '$excl_tilde' ) . gsub ( '!=' , '$not_eq' ) . gsub ( '<=' , '$lt_eq' ) . gsub ( '>=' , '$gt_eq' ) . gsub ( '=' , '$eq' ) . gsub ( '?' , '$ques' ) . gsub ( '!' , '$excl' ) . gsub ( '/' , '$slash' ) . gsub ( '%' , '$percent' ) . gsub ( '+' , '$plus' ) . gsub ( '-' , '$minus' ) . gsub ( '<' , '$lt' ) . gsub ( '>' , '$gt' ) . gsub ( / \w \$ / , '$' ) end unique = ( @unique += 1 ) "#{'$' unless name.start_with?('$')}#{name}$#{unique}" end | Used to generate a unique id name per file . These are used mainly to name method bodies for methods that use blocks . |
1,401 | def process ( sexp , level = :expr ) return fragment ( '' , scope ) if sexp . nil? if handler = handlers [ sexp . type ] return handler . new ( sexp , level , self ) . compile_to_fragments else error "Unsupported sexp: #{sexp.type}" end end | Process the given sexp by creating a node instance based on its type and compiling it to fragments . |
1,402 | def returns ( sexp ) return returns s ( :nil ) unless sexp case sexp . type when :undef returns s ( :begin , sexp , s ( :nil ) ) when :break , :next , :redo sexp when :yield sexp . updated ( :returnable_yield , nil ) when :when * when_sexp , then_sexp = * sexp sexp . updated ( nil , [ * when_sexp , returns ( then_sexp ) ] ) when :rescue body_sexp , * resbodies , else_sexp = * sexp resbodies = resbodies . map do | resbody | returns ( resbody ) end if else_sexp else_sexp = returns ( else_sexp ) end sexp . updated ( nil , [ returns ( body_sexp ) , * resbodies , else_sexp ] ) when :resbody klass , lvar , body = * sexp sexp . updated ( nil , [ klass , lvar , returns ( body ) ] ) when :ensure rescue_sexp , ensure_body = * sexp sexp = sexp . updated ( nil , [ returns ( rescue_sexp ) , ensure_body ] ) s ( :js_return , sexp ) when :begin , :kwbegin * rest , last = * sexp sexp . updated ( nil , [ * rest , returns ( last ) ] ) when :while , :until , :while_post , :until_post sexp when :return , :js_return , :returnable_yield sexp when :xstr sexp . updated ( nil , [ s ( :js_return , * sexp . children ) ] ) when :if cond , true_body , false_body = * sexp sexp . updated ( nil , [ cond , returns ( true_body ) , returns ( false_body ) ] ) else s ( :js_return , sexp ) . updated ( nil , nil , location : sexp . loc , ) end end | The last sexps in method bodies for example need to be returned in the compiled javascript . Due to syntax differences between javascript any ruby some sexps need to be handled specially . For example if statemented cannot be returned in javascript so instead the truthy and falsy parts of the if statement both need to be returned instead . |
1,403 | def on_error ( t , val , vstack ) raise ParseError , sprintf ( "\nparse error on value %s (%s)" , val . inspect , token_to_str ( t ) || '?' ) end | This method is called when a parse error is found . |
1,404 | def racc_read_token ( t , tok , val ) @racc_debug_out . print 'read ' @racc_debug_out . print tok . inspect , '(' , racc_token2str ( t ) , ') ' @racc_debug_out . puts val . inspect @racc_debug_out . puts end | For debugging output |
1,405 | def memberwise ( op , x ) case x when Benchmark :: Tms Benchmark :: Tms . new ( utime . __send__ ( op , x . utime ) , stime . __send__ ( op , x . stime ) , cutime . __send__ ( op , x . cutime ) , cstime . __send__ ( op , x . cstime ) , real . __send__ ( op , x . real ) ) else Benchmark :: Tms . new ( utime . __send__ ( op , x ) , stime . __send__ ( op , x ) , cutime . __send__ ( op , x ) , cstime . __send__ ( op , x ) , real . __send__ ( op , x ) ) end end | Returns a new Tms object obtained by memberwise operation + op + of the individual times for this Tms object with those of the other Tms object . |
1,406 | def config_option ( name , default_value , options = { } ) compiler = options . fetch ( :compiler_option , nil ) valid_values = options . fetch ( :valid_values , [ true , false ] ) config_options [ name ] = { default : default_value , compiler : compiler } define_singleton_method ( name ) { config . fetch ( name , default_value ) } define_singleton_method ( "#{name}=" ) do | value | unless valid_values . any? { | valid_value | valid_value === value } raise ArgumentError , "Not a valid value for option #{self}.#{name}, provided #{value.inspect}. " "Must be #{valid_values.inspect} === #{value.inspect}" end config [ name ] = value end end | Defines a new configuration option |
1,407 | def read_float s = read_string ( cache : false ) result = if s == 'nan' 0.0 / 0 elsif s == 'inf' 1.0 / 0 elsif s == '-inf' - 1.0 / 0 else s . to_f end @object_cache << result result end | Reads and returns Float from an input stream |
1,408 | def read_bignum sign = read_char == '-' ? - 1 : 1 size = read_fixnum * 2 result = 0 ( 0 ... size ) . each do | exp | result += read_char . ord * 2 ** ( exp * 8 ) end result = result . to_i * sign @object_cache << result result end | Reads and returns Bignum from an input stream |
1,409 | def read_regexp string = read_string ( cache : false ) options = read_byte result = Regexp . new ( string , options ) @object_cache << result result end | Reads and returns Regexp from an input stream |
1,410 | def read_struct klass_name = read ( cache : false ) klass = safe_const_get ( klass_name ) attributes = read_hash ( cache : false ) args = attributes . values_at ( * klass . members ) result = klass . new ( * args ) @object_cache << result result end | Reads and returns a Struct from an input stream |
1,411 | def read_class klass_name = read_string ( cache : false ) result = safe_const_get ( klass_name ) unless result . class == Class raise ArgumentError , "#{klass_name} does not refer to a Class" end @object_cache << result result end | Reads and returns a Class from an input stream |
1,412 | def read_module mod_name = read_string ( cache : false ) result = safe_const_get ( mod_name ) unless result . class == Module raise ArgumentError , "#{mod_name} does not refer to a Module" end @object_cache << result result end | Reads and returns a Module from an input stream |
1,413 | def read_object klass_name = read ( cache : false ) klass = safe_const_get ( klass_name ) object = klass . allocate @object_cache << object ivars = read_hash ( cache : false ) ivars . each do | name , value | if name [ 0 ] == '@' object . instance_variable_set ( name , value ) else ` ` end end object end | Reads and returns an abstract object from an input stream |
1,414 | def read_extended_object mod = safe_const_get ( read ) object = read object . extend ( mod ) object end | Reads an object that was dynamically extended before marshaling like |
1,415 | def run ( & block ) start begin yield rescue Exception ObjectSpace . trace_object_allocations_stop GC . enable raise else stop end end | Collects object allocation and memory of ruby code inside of passed block . |
1,416 | def object_list ( generation ) rvalue_size = GC :: INTERNAL_CONSTANTS [ :RVALUE_SIZE ] helper = Helpers . new result = StatHash . new . compare_by_identity ObjectSpace . each_object do | obj | next unless ObjectSpace . allocation_generation ( obj ) == generation file = ObjectSpace . allocation_sourcefile ( obj ) || "(no name)" next if @ignore_files && @ignore_files =~ file next if @allow_files && ! ( @allow_files =~ file ) klass = obj . class rescue nil unless Class === klass klass = Kernel . instance_method ( :class ) . bind ( obj ) . call end next if @trace && ! trace . include? ( klass ) begin line = ObjectSpace . allocation_sourceline ( obj ) location = helper . lookup_location ( file , line ) class_name = helper . lookup_class_name ( klass ) gem = helper . guess_gem ( file ) memsize = ObjectSpace . memsize_of ( obj ) string = klass == String ? helper . lookup_string ( obj ) : nil memsize = rvalue_size if memsize > 100_000_000_000 result [ obj . __id__ ] = MemoryProfiler :: Stat . new ( class_name , gem , file , location , memsize , string ) rescue end end result end | Iterates through objects in memory of a given generation . Stores results along with meta data of objects collected . |
1,417 | def pretty_print ( io = $stdout , ** options ) return io . pp_object ( self ) if defined? ( PP ) && io . is_a? ( PP ) io = File . open ( options [ :to_file ] , "w" ) if options [ :to_file ] color_output = options . fetch ( :color_output ) { io . respond_to? ( :isatty ) && io . isatty } @colorize = color_output ? Polychrome . new : Monochrome . new if options [ :scale_bytes ] total_allocated_output = scale_bytes ( total_allocated_memsize ) total_retained_output = scale_bytes ( total_retained_memsize ) else total_allocated_output = "#{total_allocated_memsize} bytes" total_retained_output = "#{total_retained_memsize} bytes" end io . puts "Total allocated: #{total_allocated_output} (#{total_allocated} objects)" io . puts "Total retained: #{total_retained_output} (#{total_retained} objects)" if options [ :detailed_report ] != false io . puts TYPES . each do | type | METRICS . each do | metric | NAMES . each do | name | scale_data = metric == "memory" && options [ :scale_bytes ] dump "#{type} #{metric} by #{name}" , self . send ( "#{type}_#{metric}_by_#{name}" ) , io , scale_data end end end end io . puts dump_strings ( io , "Allocated" , strings_allocated , limit : options [ :allocated_strings ] ) io . puts dump_strings ( io , "Retained" , strings_retained , limit : options [ :retained_strings ] ) io . close if io . is_a? File end | Output the results of the report |
1,418 | def execute return unless @start && @limit @next_to_insert = @start while @next_to_insert <= @limit stride = @throttler . stride affected_rows = @connection . update ( copy ( bottom , top ( stride ) ) ) if @throttler && affected_rows > 0 @throttler . run end @printer . notify ( bottom , @limit ) @next_to_insert = top ( stride ) + 1 break if @start == @limit end @printer . end end | Copy from origin to destination in chunks of size stride . Use the throttler class to sleep between each stride . |
1,419 | def rename_column ( old , nu ) col = @origin . columns [ old . to_s ] definition = col [ :type ] definition += ' NOT NULL' unless col [ :is_nullable ] definition += " DEFAULT #{@connection.quote(col[:column_default])}" if col [ :column_default ] ddl ( 'alter table `%s` change column `%s` `%s` %s' % [ @name , old , nu , definition ] ) @renames [ old . to_s ] = nu . to_s end | Rename an existing column . |
1,420 | def remove_index ( columns , index_name = nil ) columns = [ columns ] . flatten . map ( & :to_sym ) from_origin = @origin . indices . find { | _ , cols | cols . map ( & :to_sym ) == columns } index_name ||= from_origin [ 0 ] unless from_origin . nil? index_name ||= idx_name ( @origin . name , columns ) ddl ( 'drop index `%s` on `%s`' % [ index_name , @name ] ) end | Remove an index from a table |
1,421 | def setup_queues logger . info 'setting up queues' vetted = @consumers . reject { | c | group_configured? && group_restricted? ( c ) } vetted . each do | c | setup_queue ( c ) end end | Set up the queues for each of the worker s consumers . |
1,422 | def setup_queue ( consumer ) logger . info "setting up queue: #{consumer.get_queue_name}" queue = @broker . queue ( consumer . get_queue_name , consumer . get_arguments ) @broker . bind_queue ( queue , consumer . routing_keys ) queue . subscribe ( consumer_tag : unique_consumer_tag , manual_ack : true ) do | * args | delivery_info , properties , payload = Hutch :: Adapter . decode_message ( * args ) handle_message ( consumer , delivery_info , properties , payload ) end end | Bind a consumer s routing keys to its queue and set up a subscription to receive messages sent to the queue . |
1,423 | def handle_message ( consumer , delivery_info , properties , payload ) serializer = consumer . get_serializer || Hutch :: Config [ :serializer ] logger . debug { spec = serializer . binary? ? "#{payload.bytesize} bytes" : "#{payload}" "message(#{properties.message_id || '-'}): " + "routing key: #{delivery_info.routing_key}, " + "consumer: #{consumer}, " + "payload: #{spec}" } message = Message . new ( delivery_info , properties , payload , serializer ) consumer_instance = consumer . new . tap { | c | c . broker , c . delivery_info = @broker , delivery_info } with_tracing ( consumer_instance ) . handle ( message ) @broker . ack ( delivery_info . delivery_tag ) rescue => ex acknowledge_error ( delivery_info , properties , @broker , ex ) handle_error ( properties , payload , consumer , ex ) end | Called internally when a new messages comes in from RabbitMQ . Responsible for wrapping up the message and passing it to the consumer . |
1,424 | def set_up_api_connection logger . info "connecting to rabbitmq HTTP API (#{api_config.sanitized_uri})" with_authentication_error_handler do with_connection_error_handler do @api_client = CarrotTop . new ( host : api_config . host , port : api_config . port , user : api_config . username , password : api_config . password , ssl : api_config . ssl ) @api_client . exchanges end end end | Set up the connection to the RabbitMQ management API . Unfortunately this is necessary to do a few things that are impossible over AMQP . E . g . listing queues and bindings . |
1,425 | def bindings results = Hash . new { | hash , key | hash [ key ] = [ ] } api_client . bindings . each do | binding | next if binding [ 'destination' ] == binding [ 'routing_key' ] next unless binding [ 'source' ] == @config [ :mq_exchange ] next unless binding [ 'vhost' ] == @config [ :mq_vhost ] results [ binding [ 'destination' ] ] << binding [ 'routing_key' ] end results end | Return a mapping of queue names to the routing keys they re bound to . |
1,426 | def unbind_redundant_bindings ( queue , routing_keys ) return unless http_api_use_enabled? bindings . each do | dest , keys | next unless dest == queue . name keys . reject { | key | routing_keys . include? ( key ) } . each do | key | logger . debug "removing redundant binding #{queue.name} < queue . unbind ( exchange , routing_key : key ) end end end | Find the existing bindings and unbind any redundant bindings |
1,427 | def bind_queue ( queue , routing_keys ) unbind_redundant_bindings ( queue , routing_keys ) routing_keys . each do | routing_key | logger . debug "creating binding #{queue.name} < queue . bind ( exchange , routing_key : routing_key ) end end | Bind a queue to the broker s exchange on the routing keys provided . Any existing bindings on the queue that aren t present in the array of routing keys will be unbound . |
1,428 | def run ( argv = ARGV ) Hutch :: Config . initialize parse_options ( argv ) daemonise_process write_pid if Hutch :: Config . pidfile Hutch . logger . info "hutch booted with pid #{::Process.pid}" if load_app && start_work_loop == :success Hutch . logger . info 'hutch shut down gracefully' exit 0 else Hutch . logger . info 'hutch terminated due to an error' exit 1 end end | Run a Hutch worker with the command line interface . |
1,429 | def contains? ( point ) point = Geokit :: LatLng . normalize ( point ) res = point . lat > @sw . lat && point . lat < @ne . lat if crosses_meridian? res &= point . lng < @ne . lng || point . lng > @sw . lng else res &= point . lng < @ne . lng && point . lng > @sw . lng end res end | Returns true if the bounds contain the passed point . allows for bounds which cross the meridian |
1,430 | def to_geocodeable_s a = [ street_address , district , city , state , zip , country_code ] . compact a . delete_if { | e | ! e || e == '' } a . join ( ', ' ) end | Returns a comma - delimited string consisting of the street address city state zip and country code . Only includes those attributes that are non - blank . |
1,431 | def build_hash_tree ( tree_scope ) tree = ActiveSupport :: OrderedHash . new id_to_hash = { } tree_scope . each do | ea | h = id_to_hash [ ea . id ] = ActiveSupport :: OrderedHash . new ( id_to_hash [ ea . _ct_parent_id ] || tree ) [ ea ] = h end tree end | Builds nested hash structure using the scope returned from the passed in scope |
1,432 | def find_or_create_by_path ( path , attributes = { } ) subpath = _ct . build_ancestry_attr_path ( path , attributes ) return self if subpath . empty? found = find_by_path ( subpath , attributes ) return found if found attrs = subpath . shift _ct . with_advisory_lock do child = self . children . where ( attrs ) . first || begin _ct . create ( self . class , attrs ) . tap do | ea | ea . _ct_skip_cycle_detection! self . children << ea end end child . find_or_create_by_path ( subpath , attributes ) end end | Find or create a descendant node whose + ancestry_path + will be self . ancestry_path + path |
1,433 | def iconify_auto_link ( field , show_link = true ) if field . is_a? Hash options = field [ :config ] . separator_options || { } text = field [ :value ] . to_sentence ( options ) else text = field end auto_link ( html_escape ( text ) ) do | value | "<span class='glyphicon glyphicon-new-window'></span>#{(' ' + value) if show_link}" end end | Uses Rails auto_link to add links to fields |
1,434 | def sorted_users users = [ ] :: User . find_each do | user | users . push ( UserRecord . new ( user . id , user . user_key , date_since_last_cache ( user ) ) ) end users . sort_by ( & :last_stats_update ) end | Returns an array of users sorted by the date of their last stats update . Users that have not been recently updated will be at the top of the array . |
1,435 | def rescue_and_retry ( fail_message ) Retriable . retriable ( retry_options ) do return yield end rescue StandardError => exception log_message fail_message log_message "Last exception #{exception}" end | This method never fails . It tries multiple times and finally logs the exception |
1,436 | def destroy Hyrax :: Actors :: LeaseActor . new ( curation_concern ) . destroy flash [ :notice ] = curation_concern . lease_history . last if curation_concern . work? && curation_concern . file_sets . present? redirect_to confirm_permission_path else redirect_to edit_lease_path end end | Removes a single lease |
1,437 | def bytes return 0 if member_object_ids . empty? raise "Collection must be saved to query for bytes" if new_record? member_object_ids . collect { | work_id | size_for_work ( work_id ) } . sum end | Compute the sum of each file in the collection using Solr to avoid having to access Fedora |
1,438 | def size_for_work ( work_id ) argz = { fl : "id, #{file_size_field}" , fq : "{!join from=#{member_ids_field} to=id}id:#{work_id}" } files = :: FileSet . search_with_conditions ( { } , argz ) files . reduce ( 0 ) { | sum , f | sum + f [ file_size_field ] . to_i } end | Calculate the size of all the files in the work |
1,439 | def search_results_with_work_count ( access , join_field : "isPartOf_ssim" ) admin_sets = search_results ( access ) ids = admin_sets . map ( & :id ) . join ( ',' ) query = "{!terms f=#{join_field}}#{ids}" results = ActiveFedora :: SolrService . instance . conn . get ( ActiveFedora :: SolrService . select_path , params : { fq : query , rows : 0 , 'facet.field' => join_field } ) counts = results [ 'facet_counts' ] [ 'facet_fields' ] [ join_field ] . each_slice ( 2 ) . to_h file_counts = count_files ( admin_sets ) admin_sets . map do | admin_set | SearchResultForWorkCount . new ( admin_set , counts [ admin_set . id ] . to_i , file_counts [ admin_set . id ] . to_i ) end end | This performs a two pass query first getting the AdminSets and then getting the work and file counts |
1,440 | def count_files ( admin_sets ) file_counts = Hash . new ( 0 ) admin_sets . each do | admin_set | query = "{!join from=file_set_ids_ssim to=id}isPartOf_ssim:#{admin_set.id}" file_results = ActiveFedora :: SolrService . instance . conn . get ( ActiveFedora :: SolrService . select_path , params : { fq : [ query , "has_model_ssim:FileSet" ] , rows : 0 } ) file_counts [ admin_set . id ] = file_results [ 'response' ] [ 'numFound' ] end file_counts end | Count number of files from admin set works |
1,441 | def register_curation_concern ( * curation_concern_types ) Array . wrap ( curation_concern_types ) . flatten . compact . each do | cc_type | @registered_concerns << cc_type unless @registered_concerns . include? ( cc_type ) end end | Registers the given curation concern model in the configuration |
1,442 | def select_options ( access = :deposit ) @service . search_results ( access ) . map do | admin_set | [ admin_set . to_s , admin_set . id , data_attributes ( admin_set ) ] end end | Return AdminSet selectbox options based on access type |
1,443 | def data_attributes ( admin_set ) permission_template = PermissionTemplate . find_by ( source_id : admin_set . id ) return { } unless permission_template attributes_for ( permission_template : permission_template ) end | Create a hash of HTML5 data attributes . These attributes are added to select_options and later utilized by Javascript to limit new Work options based on AdminSet selected |
1,444 | def sharing? ( permission_template : ) wf = workflow ( permission_template : permission_template ) return false unless wf wf . allows_access_grant? end | Does the workflow for the currently selected permission template allow sharing? |
1,445 | def send_local_content response . headers [ 'Accept-Ranges' ] = 'bytes' if request . head? local_content_head elsif request . headers [ 'Range' ] send_range_for_local_file else send_local_file_contents end end | Handle the HTTP show request |
1,446 | def send_range_for_local_file _ , range = request . headers [ 'Range' ] . split ( 'bytes=' ) from , to = range . split ( '-' ) . map ( & :to_i ) to = local_file_size - 1 unless to length = to - from + 1 response . headers [ 'Content-Range' ] = "bytes #{from}-#{to}/#{local_file_size}" response . headers [ 'Content-Length' ] = length . to_s self . status = 206 prepare_local_file_headers send_data IO . binread ( file , length , from ) , local_derivative_download_options . merge ( status : status ) end | render an HTTP Range response |
1,447 | def create admin_set . creator = [ creating_user . user_key ] if creating_user admin_set . save . tap do | result | if result ActiveRecord :: Base . transaction do permission_template = create_permission_template workflow = create_workflows_for ( permission_template : permission_template ) create_default_access_for ( permission_template : permission_template , workflow : workflow ) if admin_set . default_set? end end end end | Creates an admin set setting the creator and the default access controls . |
1,448 | def create_default_access_for ( permission_template : , workflow : ) permission_template . access_grants . create ( agent_type : 'group' , agent_id : :: Ability . registered_group_name , access : Hyrax :: PermissionTemplateAccess :: DEPOSIT ) deposit = Sipity :: Role [ Hyrax :: RoleRegistry :: DEPOSITING ] workflow . update_responsibilities ( role : deposit , agents : Hyrax :: Group . new ( 'registered' ) ) end | Gives deposit access to registered users to default AdminSet |
1,449 | def flatten ( hash ) hash . each_with_object ( { } ) do | ( key , value ) , h | if value . instance_of? ( Hash ) value . map do | k , v | h [ "#{key}.#{k}" ] = v end else h [ key ] = value end end end | Given a deeply nested hash return a single hash |
1,450 | def file_set_ids @file_set_ids ||= begin ActiveFedora :: SolrService . query ( "{!field f=has_model_ssim}FileSet" , rows : 10_000 , fl : ActiveFedora . id_field , fq : "{!join from=ordered_targets_ssim to=id}id:\"#{id}/list_source\"" ) . flat_map { | x | x . fetch ( ActiveFedora . id_field , [ ] ) } end end | These are the file sets that belong to this work but not necessarily in order . Arbitrarily maxed at 10 thousand ; had to specify rows due to solr s default of 10 |
1,451 | def namespaced_resources ( target , opts = { } , & block ) if target . include? ( '/' ) the_namespace = target [ 0 .. target . index ( '/' ) - 1 ] new_target = target [ target . index ( '/' ) + 1 .. - 1 ] namespace the_namespace , ROUTE_OPTIONS . fetch ( the_namespace , { } ) do namespaced_resources ( new_target , opts , & block ) end else resources target , opts do yield if block_given? end end end | Namespaces routes appropriately |
1,452 | def manifest_metadata metadata = [ ] Hyrax . config . iiif_metadata_fields . each do | field | metadata << { 'label' => I18n . t ( "simple_form.labels.defaults.#{field}" ) , 'value' => Array . wrap ( send ( field ) . map { | f | Loofah . fragment ( f . to_s ) . scrub! ( :whitewash ) . to_s } ) } end metadata end | IIIF metadata for inclusion in the manifest Called by the iiif_manifest gem to add metadata |
1,453 | def authorized_item_ids @member_item_list_ids ||= begin items = ordered_ids items . delete_if { | m | ! current_ability . can? ( :read , m ) } if Flipflop . hide_private_items? items end end | list of item ids to display is based on ordered_ids |
1,454 | def paginated_item_list ( page_array : ) Kaminari . paginate_array ( page_array , total_count : page_array . size ) . page ( current_page ) . per ( rows_from_params ) end | Uses kaminari to paginate an array to avoid need for solr documents for items here |
1,455 | def single_item_action_form_fields ( form , document , action ) render 'hyrax/dashboard/collections/single_item_action_fields' , form : form , document : document , action : action end | add hidden fields to a form for performing an action on a single document on a collection |
1,456 | def show @user_collections = user_collections respond_to do | wants | wants . html { presenter && parent_presenter } wants . json do @curation_concern = _curation_concern_type . find ( params [ :id ] ) unless curation_concern authorize! :show , @curation_concern render :show , status : :ok end additional_response_formats ( wants ) wants . ttl do render body : presenter . export_as_ttl , content_type : 'text/turtle' end wants . jsonld do render body : presenter . export_as_jsonld , content_type : 'application/ld+json' end wants . nt do render body : presenter . export_as_nt , content_type : 'application/n-triples' end end end | Finds a solr document matching the id and sets |
1,457 | def search_result_document ( search_params ) _ , document_list = search_results ( search_params ) return document_list . first unless document_list . empty? document_not_found! end | Only returns unsuppressed documents the user has read access to |
1,458 | def attributes_for_actor raw_params = params [ hash_key_for_curation_concern ] attributes = if raw_params work_form_service . form_class ( curation_concern ) . model_attributes ( raw_params ) else { } end uploaded_files = params . fetch ( :uploaded_files , [ ] ) selected_files = params . fetch ( :selected_files , { } ) . values browse_everything_urls = uploaded_files & selected_files . map { | f | f [ :url ] } browse_everything_files = selected_files . select { | v | uploaded_files . include? ( v [ :url ] ) } attributes [ :remote_files ] = browse_everything_files attributes [ :uploaded_files ] = uploaded_files - browse_everything_urls attributes end | Add uploaded_files to the parameters received by the actor . |
1,459 | def build_rendering ( file_set_id ) file_set_document = query_for_rendering ( file_set_id ) label = file_set_document . label . present? ? ": #{file_set_document.label}" : '' mime = file_set_document . mime_type . present? ? file_set_document . mime_type : I18n . t ( "hyrax.manifest.unknown_mime_text" ) { '@id' => Hyrax :: Engine . routes . url_helpers . download_url ( file_set_document . id , host : @hostname ) , 'format' => mime , 'label' => I18n . t ( "hyrax.manifest.download_text" ) + label } end | Build a rendering hash |
1,460 | def select_user ( user , role = 'Depositor' ) first ( 'a.select2-choice' ) . click find ( '.select2-input' ) . set ( user . user_key ) sleep 1 first ( 'div.select2-result-label' ) . click within ( 'div.add-users' ) do select ( role ) find ( 'input.edit-collection-add-sharing-button' ) . click end end | For use with javascript user selector that allows for searching for an existing user and granting them permission to an object . |
1,461 | def select_collection ( collection ) first ( 'a.select2-choice' ) . click find ( '.select2-input' ) . set ( collection . title . first ) expect ( page ) . to have_css ( 'div.select2-result-label' ) first ( 'div.select2-result-label' ) . click first ( '[data-behavior~=add-relationship]' ) . click within ( '[data-behavior~=collection-relationships]' ) do within ( 'table.table.table-striped' ) do expect ( page ) . to have_content ( collection . title . first ) end end end | For use with javascript collection selector that allows for searching for an existing collection from works relationship tab . Adds the collection and validates that the collection is listed in the Collection Relationship table once added . |
1,462 | def select_member_of_collection ( collection ) find ( '#s2id_member_of_collection_ids' ) . click find ( '.select2-input' ) . set ( collection . title . first ) select2_results = [ ] time_elapsed = 0 while select2_results . empty? && time_elapsed < 30 begin_time = Time . now . to_f doc = Nokogiri :: XML . parse ( page . body ) select2_results = doc . xpath ( '//html:li[contains(@class,"select2-result")]' , html : 'http://www.w3.org/1999/xhtml' ) end_time = Time . now . to_f time_elapsed += end_time - begin_time end expect ( page ) . to have_css ( '.select2-result' ) within ".select2-result" do find ( "span" , text : collection . title . first ) . click end end | For use with javascript collection selector that allows for searching for an existing collection from add to collection modal . Does not save the selection . The calling test is expected to click Save and validate the collection membership was added to the work . |
1,463 | def display_image return nil unless :: FileSet . exists? ( id ) && solr_document . image? && current_ability . can? ( :read , id ) original_file = :: FileSet . find ( id ) . original_file url = Hyrax . config . iiif_image_url_builder . call ( original_file . id , request . base_url , Hyrax . config . iiif_image_size_default ) IIIFManifest :: DisplayImage . new ( url , width : 640 , height : 480 , iiif_endpoint : iiif_endpoint ( original_file . id ) ) end | Creates a display image only where FileSet is an image . |
1,464 | def fixity_check_file_version ( file_id , version_uri ) latest_fixity_check = ChecksumAuditLog . logs_for ( file_set . id , checked_uri : version_uri ) . first return latest_fixity_check unless needs_fixity_check? ( latest_fixity_check ) if async_jobs FixityCheckJob . perform_later ( version_uri . to_s , file_set_id : file_set . id , file_id : file_id ) else FixityCheckJob . perform_now ( version_uri . to_s , file_set_id : file_set . id , file_id : file_id ) end end | Retrieve or generate the fixity check for a specific version of a file |
1,465 | def needs_fixity_check? ( latest_fixity_check ) return true unless latest_fixity_check unless latest_fixity_check . updated_at logger . warn "***FIXITY*** problem with fixity check log! Latest Fixity check is not nil, but updated_at is not set #{latest_fixity_check}" return true end days_since_last_fixity_check ( latest_fixity_check ) >= max_days_between_fixity_checks end | Check if time since the last fixity check is greater than the maximum days allowed between fixity checks |
1,466 | def destroy Hyrax :: Actors :: EmbargoActor . new ( curation_concern ) . destroy flash [ :notice ] = curation_concern . embargo_history . last if curation_concern . work? && curation_concern . file_sets . present? redirect_to confirm_permission_path else redirect_to edit_embargo_path end end | Removes a single embargo |
1,467 | def update filter_docs_with_edit_access! copy_visibility = params [ :embargoes ] . values . map { | h | h [ :copy_visibility ] } ActiveFedora :: Base . find ( batch ) . each do | curation_concern | Hyrax :: Actors :: EmbargoActor . new ( curation_concern ) . destroy if curation_concern . file_set? curation_concern . visibility = curation_concern . to_solr [ "visibility_after_embargo_ssim" ] curation_concern . save! elsif copy_visibility . include? ( curation_concern . id ) curation_concern . copy_visibility_to_files end end redirect_to embargoes_path , notice : t ( '.embargo_deactivated' ) end | Updates a batch of embargos |
1,468 | def index_workflow_fields ( solr_document ) return unless object . persisted? entity = PowerConverter . convert_to_sipity_entity ( object ) return if entity . nil? solr_document [ workflow_role_field ] = workflow_roles ( entity ) . map { | role | "#{entity.workflow.permission_template.source_id}-#{entity.workflow.name}-#{role}" } solr_document [ workflow_state_name_field ] = entity . workflow_state . name if entity . workflow_state end | Write the workflow roles and state so one can see where the document moves to next |
1,469 | def add_workflow_responsibilities ( role , agents ) Hyrax :: Workflow :: PermissionGenerator . call ( roles : role , workflow : self , agents : agents ) end | Give workflow responsibilites to the provided agents for the given role |
1,470 | def remove_workflow_responsibilities ( role , allowed_agents ) wf_role = Sipity :: WorkflowRole . find_by ( workflow : self , role_id : role ) wf_role . workflow_responsibilities . where . not ( agent : allowed_agents ) . destroy_all end | Find any workflow_responsibilities held by agents not in the allowed_agents and remove them |
1,471 | def attribute_to_html ( field , options = { } ) unless respond_to? ( field ) Rails . logger . warn ( "#{self.class} attempted to render #{field}, but no method exists with that name." ) return end if options [ :html_dl ] renderer_for ( field , options ) . new ( field , send ( field ) , options ) . render_dl_row else renderer_for ( field , options ) . new ( field , send ( field ) , options ) . render end end | Present the attribute as an HTML table row or dl row . |
1,472 | def append_to_solr_doc ( solr_doc , solr_field_key , field_info , val ) return super unless object . controlled_properties . include? ( solr_field_key . to_sym ) case val when ActiveTriples :: Resource append_label_and_uri ( solr_doc , solr_field_key , field_info , val ) when String append_label ( solr_doc , solr_field_key , field_info , val ) else raise ArgumentError , "Can't handle #{val.class}" end end | We re overiding the default indexer in order to index the RDF labels . In order for this to be called you must specify at least one default indexer on the property . |
1,473 | def fetch_external object . controlled_properties . each do | property | object [ property ] . each do | value | resource = value . respond_to? ( :resource ) ? value . resource : value next unless resource . is_a? ( ActiveTriples :: Resource ) next if value . is_a? ( ActiveFedora :: Base ) fetch_with_persistence ( resource ) end end end | Grab the labels for controlled properties from the remote sources |
1,474 | def append_label ( solr_doc , solr_field_key , field_info , val ) ActiveFedora :: Indexing :: Inserter . create_and_insert_terms ( solr_field_key , val , field_info . behaviors , solr_doc ) ActiveFedora :: Indexing :: Inserter . create_and_insert_terms ( "#{solr_field_key}_label" , val , field_info . behaviors , solr_doc ) end | Use this method to append a string value from a controlled vocabulary field to the solr document . It just puts a copy into the corresponding label field |
1,475 | def include_depositor_facet ( solr_parameters ) solr_parameters [ :" " ] . concat ( [ DepositSearchBuilder . depositor_field ] ) solr_parameters [ :" " ] = :: User . count solr_parameters [ :rows ] = 0 end | includes the depositor_facet to get information on deposits . use caution when combining this with other searches as it sets the rows to zero to just get the facet information |
1,476 | def show user = :: User . from_url_component ( params [ :id ] ) return redirect_to root_path , alert : "User '#{params[:id]}' does not exist" if user . nil? @presenter = Hyrax :: UserProfilePresenter . new ( user , current_ability ) end | Display user profile |
1,477 | def query ( query , args = { } ) args [ :q ] = query args [ :qt ] = 'standard' conn = ActiveFedora :: SolrService . instance . conn result = conn . post ( 'select' , data : args ) result . fetch ( 'response' ) . fetch ( 'docs' ) end | Query solr using POST so that the query doesn t get too large for a URI |
1,478 | def push ( value ) RedisEventStore . instance . lpush ( @key , value ) rescue Redis :: CommandError , Redis :: CannotConnectError RedisEventStore . logger . error ( "unable to push event: #{@key}" ) nil end | Adds a value to the end of a list identified by key |
1,479 | def rollup_messages [ ] . tap do | messages | messages << message if message . present? children &. pluck ( :message ) &. uniq &. each do | child_message | messages << child_message if child_message . present? end end end | Roll up messages for an operation and all of its children |
1,480 | def fail! ( message = nil ) run_callbacks :failure do update ( status : FAILURE , message : message ) parent &. rollup_status end end | Mark this operation as a FAILURE . If this is a child operation roll up to the parent any failures . |
1,481 | def pending_job ( job ) update ( job_class : job . class . to_s , job_id : job . job_id , status : Hyrax :: Operation :: PENDING ) end | Sets the operation status to PENDING |
1,482 | def files result = form . select_files . map { | label , id | { id : id , text : label } } render json : result end | Renders a JSON response with a list of files in this admin set . This is used by the edit form to populate the thumbnail_id dropdown |
1,483 | def id id_attr = resource [ :id ] return id_attr . to_s if id_attr . present? && id_attr . is_a? ( :: Valkyrie :: ID ) && ! id_attr . blank? return "" unless resource . respond_to? ( :alternate_ids ) resource . alternate_ids . first . to_s end | In the context of a Valkyrie resource prefer to use the id if it is provided and fallback to the first of the alternate_ids . If all else fails then the id hasn t been minted and shouldn t yet be set . |
1,484 | def add_access_control_attributes ( af_object ) af_object . visibility = attributes [ :visibility ] unless attributes [ :visibility ] . blank? af_object . read_users = attributes [ :read_users ] unless attributes [ :read_users ] . blank? af_object . edit_users = attributes [ :edit_users ] unless attributes [ :edit_users ] . blank? af_object . read_groups = attributes [ :read_groups ] unless attributes [ :read_groups ] . blank? af_object . edit_groups = attributes [ :edit_groups ] unless attributes [ :edit_groups ] . blank? end | Add attributes from resource which aren t AF properties into af_object |
1,485 | def managed_access return I18n . t ( 'hyrax.dashboard.my.collection_list.managed_access.manage' ) if current_ability . can? ( :edit , solr_document ) return I18n . t ( 'hyrax.dashboard.my.collection_list.managed_access.deposit' ) if current_ability . can? ( :deposit , solr_document ) return I18n . t ( 'hyrax.dashboard.my.collection_list.managed_access.view' ) if current_ability . can? ( :read , solr_document ) '' end | For the Managed Collections tab determine the label to use for the level of access the user has for this admin set . Checks from most permissive to most restrictive . |
1,486 | def gid URI :: GID . build ( app : GlobalID . app , model_name : model_name . name . parameterize . to_sym , model_id : id ) . to_s if id end | Return the Global Identifier for this collection type . |
1,487 | def release_date return Time . zone . today if release_no_delay? return self [ :release_date ] unless release_max_embargo? Time . zone . today + RELEASE_EMBARGO_PERIODS . fetch ( release_period ) . months end | Override release_date getter to return a dynamically calculated date of release based one release requirements . Returns embargo date when release_max_embargo? == true . Returns today s date when release_no_delay? == true . |
1,488 | def download_users ( id ) doc = permissions_doc ( id ) return [ ] if doc . nil? users = Array ( doc [ self . class . read_user_field ] ) + Array ( doc [ self . class . edit_user_field ] ) Rails . logger . debug ( "[CANCAN] download_users: #{users.inspect}" ) users end | Grant all users with read or edit access permission to download |
1,489 | def trophy_abilities can [ :create , :destroy ] , Trophy do | t | doc = ActiveFedora :: Base . search_by_id ( t . work_id , fl : 'depositor_ssim' ) current_user . user_key == doc . fetch ( 'depositor_ssim' ) . first end end | We check based on the depositor because the depositor may not have edit access to the work if it went through a mediated deposit workflow that removes edit access for the depositor . |
1,490 | def user_is_depositor? ( document_id ) Hyrax :: WorkRelation . new . search_with_conditions ( id : document_id , DepositSearchBuilder . depositor_field => current_user . user_key ) . any? end | Returns true if the current user is the depositor of the specified work |
1,491 | def primary_characterization_values ( term ) values = values_for ( term ) values . slice! ( Hyrax . config . fits_message_length , ( values . length - Hyrax . config . fits_message_length ) ) truncate_all ( values ) end | Returns an array of characterization values truncated to 250 characters limited to the maximum number of configured values . |
1,492 | def secondary_characterization_values ( term ) values = values_for ( term ) additional_values = values . slice ( Hyrax . config . fits_message_length , values . length - Hyrax . config . fits_message_length ) return [ ] unless additional_values truncate_all ( additional_values ) end | Returns an array of characterization values truncated to 250 characters that are in excess of the maximum number of configured values . |
1,493 | def search ( options = { } , type = 'people' ) path = "/#{type.to_s}-search" if options . is_a? ( Hash ) fields = options . delete ( :fields ) path += field_selector ( fields ) if fields end options = { :keywords => options } if options . is_a? ( String ) options = format_options_for_query ( options ) result_json = get ( to_uri ( path , options ) ) Mash . from_json ( result_json ) end | Retrieve search results of the given object type |
1,494 | def timestamp value = self [ 'timestamp' ] if value . kind_of? Integer value = value / 1000 if value > 9999999999 Time . at ( value ) else value end end | Convert the timestamp field from a string to a Time object |
1,495 | def identify_manifests ( file_list ) ignored_dirs_with_slash = ignored_dirs . map { | d | if d . end_with? ( "/" ) then d else d + "/" end } allowed_file_list = file_list . reject do | f | ignored_dirs . include? ( f ) || f . start_with? ( * ignored_dirs_with_slash ) end allowed_file_list = allowed_file_list . reject { | f | ignored_files . include? ( f ) } package_managers . map do | pm | allowed_file_list . select do | file_path | pm . match? ( file_path ) end end . flatten . uniq . compact end | this skips manifests sometimes because it doesn t look at file contents and can t establish from only regexes that the thing is a manifest . We exclude rather than include ambiguous filenames because this API is used by libraries . io and we don t want to download all . xml files from GitHub . |
1,496 | def to_hash hash = { } self . class . attribute_map . each_pair do | attr , param | value = self . send ( attr ) next if value . nil? hash [ param ] = _to_hash ( value ) end hash end | Returns the object in the form of hash |
1,497 | def _to_hash ( value ) if value . is_a? ( Array ) value . compact . map { | v | _to_hash ( v ) } elsif value . is_a? ( Hash ) { } . tap do | hash | value . each { | k , v | hash [ k ] = _to_hash ( v ) } end elsif value . respond_to? :to_hash value . to_hash else value end end | Outputs non - array value in the form of hash For object use to_hash . Otherwise just return the value |
1,498 | def get_v1_batch_token_from_headers ( headers ) if headers . is_a? ( Hash ) && headers . has_key? ( 'Link' ) match = / / . match ( headers [ 'Link' ] ) if match uri = URI . parse ( match [ 1 ] ) params = CGI . parse ( uri . query ) return params [ 'batch_token' ] [ 0 ] end end end | Extract batch_token from Link header if present |
1,499 | def list_cash_drawer_shifts ( location_id , opts = { } ) data , _status_code , _headers = list_cash_drawer_shifts_with_http_info ( location_id , opts ) return data end | ListCashDrawerShifts Provides the details for all of a location s cash drawer shifts during a date range . The date range you specify cannot exceed 90 days . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.