idx int64 0 24.9k | question stringlengths 68 4.14k | target stringlengths 9 749 |
|---|---|---|
1,200 | def at_exit ( & block ) return proc { } unless running || block_given? @at_exit = block if block_given? @at_exit ||= proc { SimpleCov . result . format! } end | Gets or sets the behavior to process coverage results . |
1,201 | def project_name ( new_name = nil ) return @project_name if defined? ( @project_name ) && @project_name && new_name . nil? @project_name = new_name if new_name . is_a? ( String ) @project_name ||= File . basename ( root . split ( "/" ) . last ) . capitalize . tr ( "_" , " " ) end | Returns the project name - currently assuming the last dirname in the SimpleCov . root is this . |
1,202 | def parse_filter ( filter_argument = nil , & filter_proc ) filter = filter_argument || filter_proc if filter SimpleCov :: Filter . build_filter ( filter ) else raise ArgumentError , "Please specify either a filter or a block to filter with" end end | The actual filter processor . Not meant for direct use |
1,203 | def merge_resultsets ( result1 , result2 ) ( result1 . keys | result2 . keys ) . each_with_object ( { } ) do | filename , merged | file1 = result1 [ filename ] file2 = result2 [ filename ] merged [ filename ] = merge_file_coverage ( file1 , file2 ) end end | Merges two Coverage . result hashes |
1,204 | def load ( name ) name = name . to_sym raise "Could not find SimpleCov Profile called '#{name}'" unless key? ( name ) SimpleCov . configure ( & self [ name ] ) end | Applies the profile of given name on SimpleCov . configure |
1,205 | def build ( subject ) Kernel . format ( HELP_LINK_TEMPLATE , version : Version :: STRING , item : name_to_param ( subject ) ) end | Build link to the documentation about the given subject for the current version of Reek . The subject can be either a smell type like FeatureEnvy or a general subject like Rake Task . |
1,206 | def load_cache begin file = File . join ( "inline" , File . basename ( so_name ) ) if require file then dir = Inline . directory warn "WAR\NING: #{dir} exists but is not being used" if test ?d , dir and $VERBOSE return true end rescue LoadError end return false end | Attempts to load pre - generated code returning true if it succeeds . |
1,207 | def add_type_converter ( type , r2c , c2r ) warn "WAR\NING: overridding #{type} on #{caller[0]}" if @@type_map . has_key? type @@type_map [ type ] = [ r2c , c2r ] end | Registers C type - casts + r2c + and + c2r + for + type + . |
1,208 | def c src , options = { } options = { :expand_types => true , } . merge options self . generate src , options end | Adds a C function to the source including performing automatic type conversion to arguments and the return value . The Ruby method name can be overridden by providing method_name . Unknown type conversions can be extended by using + add_type_converter + . |
1,209 | def c_singleton src , options = { } options = { :expand_types => true , :singleton => true , } . merge options self . generate src , options end | Same as + c + but adds a class function . |
1,210 | def c_raw_singleton src , options = { } options = { :singleton => true , } . merge options self . generate src , options end | Same as + c_raw + but adds a class function . |
1,211 | def reek_of ( smell_type , smell_details = { } , configuration = Configuration :: AppConfiguration . default ) ShouldReekOf . new ( smell_type , smell_details , configuration ) end | Checks the target source code for instances of smell type and returns true only if it can find one of them that matches . |
1,212 | def reek_only_of ( smell_type , configuration = Configuration :: AppConfiguration . default ) ShouldReekOnlyOf . new ( smell_type , configuration ) end | See the documentaton for reek_of . |
1,213 | def build ( exp , parent_exp = nil ) context_processor = "process_#{exp.type}" if context_processor_exists? ( context_processor ) send ( context_processor , exp , parent_exp ) else process exp end current_context end | Processes the given AST memoizes it and returns a tree of nested contexts . |
1,214 | def process ( exp ) exp . children . grep ( AST :: Node ) . each { | child | build ( child , exp ) } end | Handles every node for which we have no context_processor . |
1,215 | def process_def ( exp , parent ) inside_new_context ( current_context . method_context_class , exp , parent ) do increase_statement_count_by ( exp . body ) process ( exp ) end end | Handles def nodes . |
1,216 | def process_send ( exp , _parent ) process ( exp ) case current_context when Context :: ModuleContext handle_send_for_modules exp when Context :: MethodContext handle_send_for_methods exp end end | Handles send nodes a . k . a . method calls . |
1,217 | def process_if ( exp , _parent ) children = exp . children increase_statement_count_by ( children [ 1 ] ) increase_statement_count_by ( children [ 2 ] ) decrease_statement_count process ( exp ) end | Handles if nodes . |
1,218 | def process_rescue ( exp , _parent ) increase_statement_count_by ( exp . children . first ) decrease_statement_count process ( exp ) end | Handles rescue nodes . |
1,219 | def inside_new_context ( klass , * args ) new_context = append_new_context ( klass , * args ) orig , self . current_context = current_context , new_context yield self . current_context = orig end | Stores a reference to the current context creates a nested new one yields to the given block and then restores the previous context . |
1,220 | def append_new_context ( klass , * args ) klass . new ( * args ) . tap do | new_context | new_context . register_with_parent ( current_context ) end end | Appends a new child context to the current context but does not change the current context . |
1,221 | def value ( key , context ) overrides_for ( context ) . each { | conf | return conf [ key ] if conf . key? ( key ) } options . fetch ( key ) end | Retrieves the value if any for the given + key + in the given + context + . |
1,222 | def for_context ( context ) contexts = hash . keys . select { | ckey | context . matches? ( [ ckey ] ) } contexts . map { | exc | hash [ exc ] } end | Find any overrides that match the supplied context |
1,223 | def dress ( sexp , comment_map ) return sexp unless sexp . is_a? :: Parser :: AST :: Node type = sexp . type children = sexp . children . map { | child | dress ( child , comment_map ) } comments = comment_map [ sexp ] klass_map . klass_for ( type ) . new ( type , children , location : sexp . loc , comments : comments ) end | Recursively enhance an AST with type - dependent mixins and comments . |
1,224 | def append_record_to_messages ( op , meta , header , record , msgs ) case op when UPDATE_OP , UPSERT_OP if meta . has_key? ( ID_FIELD ) header [ UPDATE_OP ] = meta msgs << @dump_proc . call ( header ) << BODY_DELIMITER msgs << @dump_proc . call ( update_body ( record , op ) ) << BODY_DELIMITER return true end when CREATE_OP if meta . has_key? ( ID_FIELD ) header [ CREATE_OP ] = meta msgs << @dump_proc . call ( header ) << BODY_DELIMITER msgs << @dump_proc . call ( record ) << BODY_DELIMITER return true end when INDEX_OP header [ INDEX_OP ] = meta msgs << @dump_proc . call ( header ) << BODY_DELIMITER msgs << @dump_proc . call ( record ) << BODY_DELIMITER return true end return false end | append_record_to_messages adds a record to the bulk message payload to be submitted to Elasticsearch . Records that do not include _id field are skipped when write_operation is configured for create or update |
1,225 | def send_bulk ( data , tag , chunk , bulk_message_count , extracted_values , info ) begin log . on_trace { log . trace "bulk request: #{data}" } response = client ( info . host ) . bulk body : data , index : info . index log . on_trace { log . trace "bulk response: #{response}" } if response [ 'errors' ] error = Fluent :: Plugin :: ElasticsearchErrorHandler . new ( self ) error . handle_error ( response , tag , chunk , bulk_message_count , extracted_values ) end rescue RetryStreamError => e emit_tag = @retry_tag ? @retry_tag : tag router . emit_stream ( emit_tag , e . retry_stream ) rescue => e ignore = @ignore_exception_classes . any? { | clazz | e . class <= clazz } log . warn "Exception ignored in tag #{tag}: #{e.class.name} #{e.message}" if ignore @_es = nil if @reconnect_on_error @_es_info = nil if @reconnect_on_error raise UnrecoverableRequestFailure if ignore && @exception_backup raise RecoverableRequestFailure , "could not push logs to Elasticsearch cluster (#{connection_options_description(info.host)}): #{e.message}" unless ignore end end | send_bulk given a specific bulk request the original tag chunk and bulk_message_count |
1,226 | def responders ( * responders ) self . responder = responders . inject ( Class . new ( responder ) ) do | klass , responder | responder = case responder when Module responder when String , Symbol Responders . const_get ( "#{responder.to_s.camelize}Responder" ) else raise "responder has to be a string, a symbol or a module" end klass . send ( :include , responder ) klass end end | Adds the given responders to the current controller s responder allowing you to cherry - pick which responders you want per controller . |
1,227 | def respond_with ( * resources , & block ) if self . class . mimes_for_respond_to . empty? raise "In order to use respond_with, first you need to declare the " "formats your controller responds to in the class level." end mimes = collect_mimes_from_class_level collector = ActionController :: MimeResponds :: Collector . new ( mimes , request . variant ) block . call ( collector ) if block_given? if format = collector . negotiate_format ( request ) _process_format ( format ) options = resources . size == 1 ? { } : resources . extract_options! options = options . clone options [ :default_response ] = collector . response ( options . delete ( :responder ) || self . class . responder ) . call ( self , resources , options ) else raise ActionController :: UnknownFormat end end | For a given controller action respond_with generates an appropriate response based on the mime - type requested by the client . |
1,228 | def collect_mimes_from_class_level action = action_name . to_sym self . class . mimes_for_respond_to . keys . select do | mime | config = self . class . mimes_for_respond_to [ mime ] if config [ :except ] ! config [ :except ] . include? ( action ) elsif config [ :only ] config [ :only ] . include? ( action ) else true end end end | Collect mimes declared in the class method respond_to valid for the current action . |
1,229 | def algolia_reindex ( batch_size = AlgoliaSearch :: IndexSettings :: DEFAULT_BATCH_SIZE , synchronous = false ) return if algolia_without_auto_index_scope algolia_configurations . each do | options , settings | next if algolia_indexing_disabled? ( options ) next if options [ :slave ] || options [ :replica ] master_index = algolia_ensure_init ( options , settings ) master_settings = master_index . get_settings rescue { } master_settings . merge! ( JSON . parse ( settings . to_settings . to_json ) ) master_settings . delete :slaves master_settings . delete 'slaves' master_settings . delete :replicas master_settings . delete 'replicas' index_name = algolia_index_name ( options ) tmp_options = options . merge ( { :index_name => "#{index_name}.tmp" } ) tmp_options . delete ( :per_environment ) tmp_settings = settings . dup tmp_index = algolia_ensure_init ( tmp_options , tmp_settings , master_settings ) algolia_find_in_batches ( batch_size ) do | group | if algolia_conditional_index? ( tmp_options ) group = group . select { | o | algolia_indexable? ( o , tmp_options ) } end objects = group . map { | o | tmp_settings . get_attributes ( o ) . merge 'objectID' => algolia_object_id_of ( o , tmp_options ) } tmp_index . save_objects ( objects ) end move_task = SafeIndex . move_index ( tmp_index . name , index_name ) master_index . wait_task ( move_task [ "taskID" ] ) if synchronous || options [ :synchronous ] end nil end | reindex whole database using a extra temporary index + move operation |
1,230 | def get_settings ( * args ) SafeIndex . log_or_throw ( :get_settings , @raise_on_failure ) do begin @index . get_settings ( * args ) rescue Algolia :: AlgoliaError => e return { } if e . code == 404 raise e end end end | special handling of get_settings to avoid raising errors on 404 |
1,231 | def define_and_assign_attr_accessors_for_each_filter ( fp ) model_class . filterrific_available_filters . each do | filter_name | self . class . send ( :attr_accessor , filter_name ) v = fp [ filter_name ] self . send ( "#{ filter_name }=" , v ) if v . present? end end | Defines attr accessors for each available_filter on self and assigns values based on fp . |
1,232 | def compute_filterrific_params ( model_class , filterrific_params , opts , persistence_id ) opts = { "sanitize_params" => true } . merge ( opts . stringify_keys ) r = ( filterrific_params . presence || ( persistence_id && session [ persistence_id ] . presence ) || opts [ 'default_filter_params' ] || model_class . filterrific_default_filter_params ) . stringify_keys r . slice! ( * opts [ 'available_filters' ] . map ( & :to_s ) ) if opts [ 'available_filters' ] if opts [ "sanitize_params" ] r . each { | k , v | r [ k ] = sanitize_filterrific_param ( r [ k ] ) } end r end | Computes filterrific params using a number of strategies . Limits params to available_filters if given via opts . |
1,233 | def form_for_filterrific ( record , options = { } , & block ) options [ :as ] ||= :filterrific options [ :html ] ||= { } options [ :html ] [ :method ] ||= :get options [ :html ] [ :id ] ||= :filterrific_filter options [ :url ] ||= url_for ( :controller => controller . controller_name , :action => controller . action_name ) form_for ( record , options , & block ) end | Sets all options on form_for to defaults that work with Filterrific |
1,234 | def filterrific_sorting_link_reverse_order ( filterrific , new_sort_key , opts ) new_sort_direction = 'asc' == opts [ :current_sort_direction ] ? 'desc' : 'asc' new_sorting = safe_join ( [ new_sort_key , new_sort_direction ] , '_' ) css_classes = safe_join ( [ opts [ :active_column_class ] , opts [ :html_attrs ] . delete ( :class ) ] . compact , ' ' ) new_filterrific_params = filterrific . to_hash . with_indifferent_access . merge ( opts [ :sorting_scope_name ] => new_sorting ) url_for_attrs = opts [ :url_for_attrs ] . merge ( :filterrific => new_filterrific_params ) link_to ( safe_join ( [ opts [ :label ] , opts [ :current_sort_direction_indicator ] ] , ' ' ) , url_for ( url_for_attrs ) , opts [ :html_attrs ] . reverse_merge ( :class => css_classes , :method => :get , :remote => true ) ) end | Renders HTML to reverse sort order on currently sorted column . |
1,235 | def to_hash { :installer => installer , :installer_arguments => installer_arguments , :iso_path => iso_path , :iso_upload_path => iso_upload_path , :iso_mount_point => iso_mount_point , :auto_update => auto_update , :auto_reboot => auto_reboot , :no_install => no_install , :no_remote => no_remote , :yes => yes } end | explicit hash to get symbols in hash keys |
1,236 | def execute_on_vm ( vm , options ) check_runable_on ( vm ) options = options . clone _method = options . delete ( :_method ) _rebootable = options . delete ( :_rebootable ) options = vm . config . vbguest . to_hash . merge ( options ) machine = VagrantVbguest :: Machine . new ( vm , options ) status = machine . state vm . env . ui . send ( ( :ok == status ? :success : :warn ) , I18n . t ( "vagrant_vbguest.status.#{status}" , machine . info ) ) if _method != :status machine . send ( _method ) end reboot! ( vm , options ) if _rebootable && machine . reboot? rescue VagrantVbguest :: Installer :: NoInstallerFoundError => e vm . env . ui . error e . message end | Executes a task on a specific VM . |
1,237 | def link_to_add_association ( * args , & block ) if block_given? link_to_add_association ( capture ( & block ) , * args ) elsif args . first . respond_to? ( :object ) association = args . second name = I18n . translate ( "cocoon.#{association}.add" , default : I18n . translate ( 'cocoon.defaults.add' ) ) link_to_add_association ( name , * args ) else name , f , association , html_options = * args html_options ||= { } render_options = html_options . delete ( :render_options ) render_options ||= { } override_partial = html_options . delete ( :partial ) wrap_object = html_options . delete ( :wrap_object ) force_non_association_create = html_options . delete ( :force_non_association_create ) || false form_parameter_name = html_options . delete ( :form_name ) || 'f' count = html_options . delete ( :count ) . to_i html_options [ :class ] = [ html_options [ :class ] , "add_fields" ] . compact . join ( ' ' ) html_options [ :' ' ] = association . to_s . singularize html_options [ :' ' ] = association . to_s . pluralize new_object = create_object ( f , association , force_non_association_create ) new_object = wrap_object . call ( new_object ) if wrap_object . respond_to? ( :call ) html_options [ :' ' ] = CGI . escapeHTML ( render_association ( association , f , new_object , form_parameter_name , render_options , override_partial ) . to_str ) . html_safe html_options [ :' ' ] = count if count > 0 link_to ( name , '#' , html_options ) end end | shows a link that will allow to dynamically add a new associated object . |
1,238 | def capture ( options = { } ) if block_given? begin yield rescue Error raise rescue Exception => e capture_type ( e , options ) raise end else install_at_exit_hook ( options ) end end | Capture and process any exceptions from the given block . |
1,239 | def annotate_exception ( exc , options = { } ) notes = ( exc . instance_variable_defined? ( :@__raven_context ) && exc . instance_variable_get ( :@__raven_context ) ) || { } Raven :: Utils :: DeepMergeHash . deep_merge! ( notes , options ) exc . instance_variable_set ( :@__raven_context , notes ) exc end | Provides extra context to the exception prior to it being handled by Raven . An exception can have multiple annotations which are merged together . |
1,240 | def filter_context ( context ) case context when Array context . map { | arg | filter_context ( arg ) } when Hash Hash [ context . map { | key , value | filter_context_hash ( key , value ) } ] else format_globalid ( context ) end end | Once an ActiveJob is queued ActiveRecord references get serialized into some internal reserved keys such as _aj_globalid . |
1,241 | def run_plugins ( safe = false , attribute_filter = nil ) begin @provides_map . all_plugins ( attribute_filter ) . each do | plugin | @runner . run_plugin ( plugin ) end rescue Ohai :: Exceptions :: AttributeNotFound , Ohai :: Exceptions :: DependencyCycle => e logger . error ( "Encountered error while running plugins: #{e.inspect}" ) raise end critical_failed = Ohai :: Config . ohai [ :critical_plugins ] & @runner . failed_plugins unless critical_failed . empty? msg = "The following Ohai plugins marked as critical failed: #{critical_failed}" if @cli logger . error ( msg ) exit ( true ) else raise Ohai :: Exceptions :: CriticalPluginFailure , "#{msg}. Failing Chef run." end end freeze_strings! end | run all plugins or those that match the attribute filter is provided |
1,242 | def json_pretty_print ( item = nil ) FFI_Yajl :: Encoder . new ( pretty : true , validate_utf8 : false ) . encode ( item || @data ) end | Pretty Print this object as JSON |
1,243 | def find_providers_for ( attributes ) plugins = [ ] attributes . each do | attribute | attrs = select_subtree ( @map , attribute ) raise Ohai :: Exceptions :: AttributeNotFound , "No such attribute: \'#{attribute}\'" unless attrs raise Ohai :: Exceptions :: ProviderNotFound , "Cannot find plugin providing attribute: \'#{attribute}\'" unless attrs [ :_plugins ] plugins += attrs [ :_plugins ] end plugins . uniq end | gather plugins providing exactly the attributes listed |
1,244 | def deep_find_providers_for ( attributes ) plugins = [ ] attributes . each do | attribute | attrs = select_subtree ( @map , attribute ) unless attrs attrs = select_closest_subtree ( @map , attribute ) unless attrs raise Ohai :: Exceptions :: AttributeNotFound , "No such attribute: \'#{attribute}\'" end end collect_plugins_in ( attrs , plugins ) end plugins . uniq end | This function is used to fetch the plugins for the attributes specified in the CLI options to Ohai . It first attempts to find the plugins for the attributes or the sub attributes given . If it can t find any it looks for plugins that might provide the parents of a given attribute and returns the first parent found . |
1,245 | def find_closest_providers_for ( attributes ) plugins = [ ] attributes . each do | attribute | parts = normalize_and_validate ( attribute ) raise Ohai :: Exceptions :: AttributeNotFound , "No such attribute: \'#{attribute}\'" unless @map [ parts [ 0 ] ] attrs = select_closest_subtree ( @map , attribute ) raise Ohai :: Exceptions :: ProviderNotFound , "Cannot find plugin providing attribute: \'#{attribute}\'" unless attrs plugins += attrs [ :_plugins ] end plugins . uniq end | This function is used to fetch the plugins from depends languages statements in plugins . It gathers plugins providing each of the attributes listed or the plugins providing the closest parent attribute |
1,246 | def collect_plugins_in ( provides_map , collected ) provides_map . each_key do | plugin | if plugin . eql? ( "_plugins" ) collected . concat ( provides_map [ plugin ] ) else collect_plugins_in ( provides_map [ plugin ] , collected ) end end collected end | Takes a section of the map recursively searches for a _plugins key to find all the plugins in that section of the map . If given the whole map it will find all of the plugins that have at least one provided attribute . |
1,247 | def plugin_files_by_dir ( plugin_dir = Ohai . config [ :plugin_path ] ) Array ( plugin_dir ) . map do | path | if Dir . exist? ( path ) Ohai :: Log . trace ( "Searching for Ohai plugins in #{path}" ) escaped = ChefConfig :: PathHelper . escape_glob_dir ( path ) Dir [ File . join ( escaped , "**" , "*.rb" ) ] else Ohai :: Log . debug ( "The plugin path #{path} does not exist. Skipping..." ) [ ] end end . flatten end | Searches all plugin paths and returns an Array of file paths to plugins |
1,248 | def load_additional ( from ) from = [ Ohai . config [ :plugin_path ] , from ] . flatten plugin_files_by_dir ( from ) . collect do | plugin_file | logger . trace "Loading additional plugin: #{plugin_file}" plugin = load_plugin_class ( plugin_file ) load_v7_plugin ( plugin ) end end | load additional plugins classes from a given directory |
1,249 | def load_plugin ( plugin_path ) plugin_class = load_plugin_class ( plugin_path ) return nil unless plugin_class . kind_of? ( Class ) if plugin_class < Ohai :: DSL :: Plugin :: VersionVII load_v7_plugin ( plugin_class ) else raise Exceptions :: IllegalPluginDefinition , "cannot create plugin of type #{plugin_class}" end end | Load a specified file as an ohai plugin and creates an instance of it . Not used by ohai itself but is used in the specs to load plugins for testing |
1,250 | def get_cycle ( plugins , cycle_start ) cycle = plugins . drop_while { | plugin | ! plugin . eql? ( cycle_start ) } names = [ ] cycle . each { | plugin | names << plugin . name } names end | Given a list of plugins and the first plugin in the cycle returns the list of plugin source files responsible for the cycle . Does not include plugins that aren t a part of the cycle |
1,251 | def contains? ( x , y ) self_area = triangle_area ( @x1 , @y1 , @x2 , @y2 , @x3 , @y3 ) questioned_area = triangle_area ( @x1 , @y1 , @x2 , @y2 , x , y ) + triangle_area ( @x2 , @y2 , @x3 , @y3 , x , y ) + triangle_area ( @x3 , @y3 , @x1 , @y1 , x , y ) questioned_area <= self_area end | A point is inside a triangle if the area of 3 triangles constructed from triangle sides and the given point is equal to the area of triangle . |
1,252 | def play ( opts = { } , & done_proc ) animation = opts [ :animation ] loop = opts [ :loop ] flip = opts [ :flip ] if ! @playing || ( animation != @playing_animation && animation != nil ) || flip != @flip @playing = true @playing_animation = animation || :default frames = @animations [ @playing_animation ] flip_sprite ( flip ) @done_proc = done_proc case frames when Range @first_frame = frames . first || @defaults [ :frame ] @current_frame = frames . first || @defaults [ :frame ] @last_frame = frames . last when Array @first_frame = 0 @current_frame = 0 @last_frame = frames . length - 1 end @loop = loop == true || @defaults [ :loop ] ? true : false set_frame restart_time end end | Play an animation |
1,253 | def set_frame frames = @animations [ @playing_animation ] case frames when Range reset_clipping_rect @clip_x = @current_frame * @clip_width when Array f = frames [ @current_frame ] @clip_x = f [ :x ] || @defaults [ :clip_x ] @clip_y = f [ :y ] || @defaults [ :clip_y ] @clip_width = f [ :width ] || @defaults [ :clip_width ] @clip_height = f [ :height ] || @defaults [ :clip_height ] @frame_time = f [ :time ] || @defaults [ :frame_time ] end end | Set the position of the clipping retangle based on the current frame |
1,254 | def points_distance ( x1 , y1 , x2 , y2 ) Math . sqrt ( ( x1 - x2 ) ** 2 + ( y1 - y2 ) ** 2 ) end | Calculate the distance between two points |
1,255 | def set ( opts ) @title = opts [ :title ] || @title if Color . is_valid? opts [ :background ] @background = Color . new ( opts [ :background ] ) end @icon = opts [ :icon ] || @icon @width = opts [ :width ] || @width @height = opts [ :height ] || @height @fps_cap = opts [ :fps_cap ] || @fps_cap @viewport_width = opts [ :viewport_width ] || @viewport_width @viewport_height = opts [ :viewport_height ] || @viewport_height @resizable = opts [ :resizable ] || @resizable @borderless = opts [ :borderless ] || @borderless @fullscreen = opts [ :fullscreen ] || @fullscreen @highdpi = opts [ :highdpi ] || @highdpi unless opts [ :diagnostics ] . nil? @diagnostics = opts [ :diagnostics ] ext_diagnostics ( @diagnostics ) end end | Set a window attribute |
1,256 | def add ( o ) case o when nil raise Error , "Cannot add '#{o.class}' to window!" when Array o . each { | x | add_object ( x ) } else add_object ( o ) end end | Add an object to the window |
1,257 | def remove ( o ) if o == nil raise Error , "Cannot remove '#{o.class}' from window!" end if i = @objects . index ( o ) @objects . delete_at ( i ) true else false end end | Remove an object from the window |
1,258 | def on ( event , & proc ) unless @events . has_key? event raise Error , "`#{event}` is not a valid event type" end event_id = new_event_key @events [ event ] [ event_id ] = proc EventDescriptor . new ( event , event_id ) end | Set an event handler |
1,259 | def key_callback ( type , key ) key = key . downcase @events [ :key ] . each do | id , e | e . call ( KeyEvent . new ( type , key ) ) end case type when :down @events [ :key_down ] . each do | id , e | e . call ( KeyEvent . new ( type , key ) ) end when :held @events [ :key_held ] . each do | id , e | e . call ( KeyEvent . new ( type , key ) ) end when :up @events [ :key_up ] . each do | id , e | e . call ( KeyEvent . new ( type , key ) ) end end end | Key callback method called by the native and web extentions |
1,260 | def mouse_callback ( type , button , direction , x , y , delta_x , delta_y ) @events [ :mouse ] . each do | id , e | e . call ( MouseEvent . new ( type , button , direction , x , y , delta_x , delta_y ) ) end case type when :down @events [ :mouse_down ] . each do | id , e | e . call ( MouseEvent . new ( type , button , nil , x , y , nil , nil ) ) end when :up @events [ :mouse_up ] . each do | id , e | e . call ( MouseEvent . new ( type , button , nil , x , y , nil , nil ) ) end when :scroll @events [ :mouse_scroll ] . each do | id , e | e . call ( MouseEvent . new ( type , nil , direction , nil , nil , delta_x , delta_y ) ) end when :move @events [ :mouse_move ] . each do | id , e | e . call ( MouseEvent . new ( type , nil , nil , x , y , delta_x , delta_y ) ) end end end | Mouse callback method called by the native and web extentions |
1,261 | def controller_callback ( which , type , axis , value , button ) @events [ :controller ] . each do | id , e | e . call ( ControllerEvent . new ( which , type , axis , value , button ) ) end case type when :axis @events [ :controller_axis ] . each do | id , e | e . call ( ControllerAxisEvent . new ( which , axis , value ) ) end when :button_down @events [ :controller_button_down ] . each do | id , e | e . call ( ControllerButtonEvent . new ( which , button ) ) end when :button_up @events [ :controller_button_up ] . each do | id , e | e . call ( ControllerButtonEvent . new ( which , button ) ) end end end | Controller callback method called by the native and web extentions |
1,262 | def update_callback @update_proc . call if @console if STDIN . ready? cmd = STDIN . gets begin res = eval ( cmd , TOPLEVEL_BINDING ) STDOUT . puts "=> #{res.inspect}" STDOUT . flush rescue SyntaxError => se STDOUT . puts se STDOUT . flush rescue Exception => e STDOUT . puts e STDOUT . flush end end end end | Update callback method called by the native and web extentions |
1,263 | def add_object ( o ) if ! @objects . include? ( o ) index = @objects . index do | object | object . z > o . z end if index @objects . insert ( index , o ) else @objects . push ( o ) end true else false end end | An an object to the window used by the public add method |
1,264 | def create_view ( name , version : nil , sql_definition : nil , materialized : false ) if version . present? && sql_definition . present? raise ( ArgumentError , "sql_definition and version cannot both be set" , ) end if version . blank? && sql_definition . blank? version = 1 end sql_definition ||= definition ( name , version ) if materialized Scenic . database . create_materialized_view ( name , sql_definition , no_data : no_data ( materialized ) , ) else Scenic . database . create_view ( name , sql_definition ) end end | Create a new database view . |
1,265 | def drop_view ( name , revert_to_version : nil , materialized : false ) if materialized Scenic . database . drop_materialized_view ( name ) else Scenic . database . drop_view ( name ) end end | Drop a database view by name . |
1,266 | def update_view ( name , version : nil , sql_definition : nil , revert_to_version : nil , materialized : false ) if version . blank? && sql_definition . blank? raise ( ArgumentError , "sql_definition or version must be specified" , ) end if version . present? && sql_definition . present? raise ( ArgumentError , "sql_definition and version cannot both be set" , ) end sql_definition ||= definition ( name , version ) if materialized Scenic . database . update_materialized_view ( name , sql_definition , no_data : no_data ( materialized ) , ) else Scenic . database . update_view ( name , sql_definition ) end end | Update a database view to a new version . |
1,267 | def replace_view ( name , version : nil , revert_to_version : nil , materialized : false ) if version . blank? raise ArgumentError , "version is required" end if materialized raise ArgumentError , "Cannot replace materialized views" end sql_definition = definition ( name , version ) Scenic . database . replace_view ( name , sql_definition ) end | Update a database view to a new version using CREATE OR REPLACE VIEW . |
1,268 | def to_h { event_id : event_id , metadata : metadata . to_h , data : data , type : type , } end | Returns a hash representation of the event . |
1,269 | def publish ( events , stream_name : GLOBAL_STREAM , expected_version : :any ) enriched_events = enrich_events_metadata ( events ) serialized_events = serialize_events ( enriched_events ) append_to_stream_serialized_events ( serialized_events , stream_name : stream_name , expected_version : expected_version ) enriched_events . zip ( serialized_events ) do | event , serialized_event | with_metadata ( correlation_id : event . metadata [ :correlation_id ] || event . event_id , causation_id : event . event_id , ) do broker . ( event , serialized_event ) end end self end | Persists events and notifies subscribed handlers about them |
1,270 | def call ( env ) req = Rack :: Request . new ( env ) status , headers , response = @app . call ( env ) config = SecureHeaders . config_for ( req ) flag_cookies! ( headers , override_secure ( env , config . cookies ) ) unless config . cookies == OPT_OUT headers . merge! ( SecureHeaders . header_hash_for ( req ) ) [ status , headers , response ] end | merges the hash of headers into the current header set . |
1,271 | def override_secure ( env , config = { } ) if scheme ( env ) != "https" && config != OPT_OUT config [ :secure ] = OPT_OUT end config end | disable Secure cookies for non - https requests |
1,272 | def validate_samesite_boolean_config! if config [ :samesite ] . key? ( :lax ) && config [ :samesite ] [ :lax ] . is_a? ( TrueClass ) && config [ :samesite ] . key? ( :strict ) raise CookiesConfigError . new ( "samesite cookie config is invalid, combination use of booleans and Hash to configure lax and strict enforcement is not permitted." ) elsif config [ :samesite ] . key? ( :strict ) && config [ :samesite ] [ :strict ] . is_a? ( TrueClass ) && config [ :samesite ] . key? ( :lax ) raise CookiesConfigError . new ( "samesite cookie config is invalid, combination use of booleans and Hash to configure lax and strict enforcement is not permitted." ) end end | when configuring with booleans only one enforcement is permitted |
1,273 | def validate_exclusive_use_of_hash_constraints! ( conf , attribute ) return unless is_hash? ( conf ) if conf . key? ( :only ) && conf . key? ( :except ) raise CookiesConfigError . new ( "#{attribute} cookie config is invalid, simultaneous use of conditional arguments `only` and `except` is not permitted." ) end end | validate exclusive use of only or except but not both at the same time |
1,274 | def validate_exclusive_use_of_samesite_enforcement! ( attribute ) if ( intersection = ( config [ :samesite ] [ :lax ] . fetch ( attribute , [ ] ) & config [ :samesite ] [ :strict ] . fetch ( attribute , [ ] ) ) ) . any? raise CookiesConfigError . new ( "samesite cookie config is invalid, cookie(s) #{intersection.join(', ')} cannot be enforced as lax and strict" ) end end | validate exclusivity of only and except members within strict and lax |
1,275 | def reject_all_values_if_none ( source_list ) if source_list . length > 1 source_list . reject { | value | value == NONE } else source_list end end | Discard any none values if more directives are supplied since none may override values . |
1,276 | def dedup_source_list ( sources ) sources = sources . uniq wild_sources = sources . select { | source | source =~ STAR_REGEXP } if wild_sources . any? sources . reject do | source | ! wild_sources . include? ( source ) && wild_sources . any? { | pattern | File . fnmatch ( pattern , source ) } end else sources end end | Removes duplicates and sources that already match an existing wild card . |
1,277 | def invisible_recaptcha_tags ( options = { } ) options = { callback : 'invisibleRecaptchaSubmit' , ui : :button } . merge options text = options . delete ( :text ) html , tag_attributes = Recaptcha :: ClientHelper . recaptcha_components ( options ) html << recaptcha_default_callback ( options ) if recaptcha_default_callback_required? ( options ) case options [ :ui ] when :button html << %(<button type="submit" #{tag_attributes}>#{text}</button>\n) when :invisible html << %(<div data-size="invisible" #{tag_attributes}></div>\n) when :input html << %(<input type="submit" #{tag_attributes} value="#{text}"/>\n) else raise ( RecaptchaError , "ReCAPTCHA ui `#{options[:ui]}` is not valid." ) end html . respond_to? ( :html_safe ) ? html . html_safe : html end | Invisible reCAPTCHA implementation |
1,278 | def verify_recaptcha ( options = { } ) options = { model : options } unless options . is_a? Hash return true if Recaptcha :: Verify . skip? ( options [ :env ] ) model = options [ :model ] attribute = options [ :attribute ] || :base recaptcha_response = options [ :response ] || params [ 'g-recaptcha-response' ] . to_s begin verified = if recaptcha_response . empty? || recaptcha_response . length > G_RESPONSE_LIMIT false else recaptcha_verify_via_api_call ( request , recaptcha_response , options ) end if verified flash . delete ( :recaptcha_error ) if recaptcha_flash_supported? && ! model true else recaptcha_error ( model , attribute , options [ :message ] , "recaptcha.errors.verification_failed" , "reCAPTCHA verification failed, please try again." ) false end rescue Timeout :: Error if Recaptcha . configuration . handle_timeouts_gracefully recaptcha_error ( model , attribute , options [ :message ] , "recaptcha.errors.recaptcha_unreachable" , "Oops, we failed to validate your reCAPTCHA response. Please try again." ) false else raise RecaptchaError , "Recaptcha unreachable." end rescue StandardError => e raise RecaptchaError , e . message , e . backtrace end end | Your private API can be specified in the + options + hash or preferably using the Configuration . |
1,279 | def for_linter ( linter ) linter_name = case linter when Class linter . name . split ( '::' ) . last when HamlLint :: Linter linter . name end @hash [ 'linters' ] . fetch ( linter_name , { } ) . dup . freeze end | Compares this configuration with another . |
1,280 | def smart_merge ( parent , child ) parent . merge ( child ) do | _key , old , new | case old when Hash smart_merge ( old , new ) else new end end end | Merge two hashes such that nested hashes are merged rather than replaced . |
1,281 | def display_report ( report ) super File . write ( ConfigurationLoader :: AUTO_GENERATED_FILE , config_file_contents ) log . log "Created #{ConfigurationLoader::AUTO_GENERATED_FILE}." log . log "Run `haml-lint --config #{ConfigurationLoader::AUTO_GENERATED_FILE}`" ", or add `inherits_from: #{ConfigurationLoader::AUTO_GENERATED_FILE}` in a " '.haml-lint.yml file.' end | Prints the standard progress reporter output and writes the new config file . |
1,282 | def finished_file ( file , lints ) super if lints . any? lints . each do | lint | linters_with_lints [ lint . linter . name ] |= [ lint . filename ] linters_lint_count [ lint . linter . name ] += 1 end end end | Prints the standard progress report marks and tracks files with lint . |
1,283 | def config_file_contents output = [ ] output << HEADING output << 'linters:' if linters_with_lints . any? linters_with_lints . each do | linter , files | output << generate_config_for_linter ( linter , files ) end output . join ( "\n\n" ) end | The contents of the generated configuration file based on captured lint . |
1,284 | def generate_config_for_linter ( linter , files ) [ ] . tap do | output | output << " # Offense count: #{linters_lint_count[linter]}" output << " #{linter}:" if files . count > exclude_limit output << ' enabled: false' else output << ' exclude:' files . each do | filename | output << %{ - "#{filename}"} end end end . join ( "\n" ) end | Constructs the configuration for excluding a linter in some files . |
1,285 | def load_reporter_class ( reporter_name ) HamlLint :: Reporter . const_get ( "#{reporter_name}Reporter" ) rescue NameError raise HamlLint :: Exceptions :: InvalidCLIOption , "#{reporter_name}Reporter does not exist" end | Returns the class of the specified Reporter . |
1,286 | def visit_root ( node ) @enabled = matcher . match ( File . basename ( node . file ) ) ? true : false end | Enables the linter if the tree is for the right file type . |
1,287 | def visit_tag ( node ) return unless enabled? visit_script ( node ) || if node . parsed_attributes . contains_instance_variables? record_lint ( node , "Avoid using instance variables in #{file_types} views" ) end end | Checks for instance variables in tag nodes when the linter is enabled . |
1,288 | def run ( options = { } ) @config = load_applicable_config ( options ) @files = extract_applicable_files ( config , options ) @linter_selector = HamlLint :: LinterSelector . new ( config , options ) @fail_fast = options . fetch ( :fail_fast , false ) report ( options ) end | Runs the appropriate linters against the desired files given the specified options . |
1,289 | def collect_lints ( file , linter_selector , config ) begin document = HamlLint :: Document . new ( File . read ( file ) , file : file , config : config ) rescue HamlLint :: Exceptions :: ParseError => e return [ HamlLint :: Lint . new ( HamlLint :: Linter :: Syntax . new ( config ) , file , e . line , e . to_s , :error ) ] end linter_selector . linters_for_file ( file ) . map do | linter | linter . run ( document ) end . flatten end | Runs all provided linters using the specified config against the given file . |
1,290 | def extract_applicable_files ( config , options ) included_patterns = options [ :files ] excluded_patterns = config [ 'exclude' ] excluded_patterns += options . fetch ( :excluded_files , [ ] ) HamlLint :: FileFinder . new ( config ) . find ( included_patterns , excluded_patterns ) end | Returns the list of files that should be linted given the specified configuration and options . |
1,291 | def process_files ( report ) files . each do | file | process_file ( file , report ) break if report . failed? && fail_fast? end end | Process the files and add them to the given report . |
1,292 | def process_file ( file , report ) lints = collect_lints ( file , linter_selector , config ) lints . each { | lint | report . add_lint ( lint ) } report . finish_file ( file , lints ) end | Process a file and add it to the given report . |
1,293 | def report ( options ) report = HamlLint :: Report . new ( reporter : options [ :reporter ] , fail_level : options [ :fail_level ] ) report . start ( @files ) process_files ( report ) report end | Generates a report based on the given options . |
1,294 | def find ( patterns , excluded_patterns ) excluded_patterns = excluded_patterns . map { | pattern | normalize_path ( pattern ) } extract_files_from ( patterns ) . reject do | file | excluded_patterns . any? do | exclusion_glob | HamlLint :: Utils . any_glob_matches? ( exclusion_glob , file ) end end end | Create a file finder using the specified configuration . |
1,295 | def extract_files_from ( patterns ) files = [ ] patterns . each do | pattern | if File . file? ( pattern ) files << pattern else begin :: Find . find ( pattern ) do | file | files << file if haml_file? ( file ) end rescue :: Errno :: ENOENT matches = :: Dir . glob ( pattern ) if matches . any? files += matches else raise HamlLint :: Exceptions :: InvalidFilePath , "File path '#{pattern}' does not exist" end end end end files . uniq . sort . map { | file | normalize_path ( file ) } end | Extract the list of matching files given the list of glob patterns file paths and directories . |
1,296 | def haml_file? ( file ) return false unless :: FileTest . file? ( file ) VALID_EXTENSIONS . include? ( :: File . extname ( file ) ) end | Whether the given file should be treated as a Haml file . |
1,297 | def starts_with_reserved_character? ( stringish ) string = stringish . respond_to? ( :children ) ? stringish . children . first : stringish string =~ %r{ \A \s } end | Returns whether a string starts with a character that would otherwise be given special treatment thus making enclosing it in a string necessary . |
1,298 | def run ( document ) @document = document @lints = [ ] visit ( document . tree ) @lints rescue Parser :: SyntaxError => e location = e . diagnostic . location @lints << HamlLint :: Lint . new ( HamlLint :: Linter :: Syntax . new ( config ) , document . file , location . line , e . to_s , :error ) end | Initializes a linter with the specified configuration . |
1,299 | def inline_content_is_string? ( node ) tag_with_inline_content = tag_with_inline_text ( node ) inline_content = inline_node_content ( node ) index = tag_with_inline_content . rindex ( inline_content ) - 1 %w[ ' " ] . include? ( tag_with_inline_content [ index ] ) end | Returns whether the inline content for a node is a string . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.