idx
int64
0
24.9k
question
stringlengths
68
4.14k
target
stringlengths
9
749
20,900
def to_edges ( * args ) selected = args . collect do | arg | case arg when Integer [ @edges [ arg ] ] when EdgeSet arg when Array arg . collect { | v | to_edges ( v ) } . flatten . uniq when Digraph :: Edge [ arg ] else pred = Predicate . to_predicate ( arg ) edges ( pred ) end end . flatten . uniq EdgeSet . new ( selected ) end
Applies argument conventions about selection of edges
20,901
def apply_arg_conventions ( element , args ) args . each do | arg | case arg when Module element . tag ( arg ) when Hash element . add_marks ( arg ) else raise ArgumentError , "Unable to apply argument conventions on #{arg.inspect}" , caller end end element end
Applies argument conventions on _element_
20,902
def content_tag ( name , content = nil , attrs = { } , & body ) if content . is_a? ( Hash ) attrs = content content = nil end if block_given? content = capture ( & body ) end tag = tag_opening ( name , attrs ) tag << ">" . html_safe tag << content tag << "</#{name}>" . html_safe end
Generates a with opening and closing tags and potentially content .
20,903
def render_alert ( type , message ) if type . to_s . index ( 'safe_' ) type = type . to_s [ 5 .. - 1 ] message = message . to_s . html_safe end type = type . to_sym type = :info if type == :notice type = :danger if type == :alert return nil unless [ :info , :success , :danger , :warning ] . include? ( type ) "<div class=\"alert alert-#{type} alert-dismissible\"><button type=\"button\" class=\"close\" data-dismiss=\"alert\" aria-label=\"Close\"><span aria-hidden=\"true\">&times;</span></button>#{render_alert_message(message)}</div>" . html_safe end
Renders an alert message .
20,904
def create_event ( * args ) raise "EventCampaign needs to be saved before Event creation" if self . id . nil? ( args [ 0 ] ) . merge! ( :campaign_id => self . id ) event = ActionKitApi :: Event . new ( * args ) end
Requires at a minimum the creator_id
20,905
def public_search ( * args ) ( args [ 0 ] ) . merge! ( :campaign_id => self . id ) results = ActionKitApi . connection . call ( "Event.public_search" , * args ) results . map do | r | Event . new ( r ) end results end
Will not return private events events that are full deleted or in the past and doesn t return extra fields
20,906
def encrypt ( dek , salt , iter_mag = 10 ) return nil if empty? raise Errors :: InvalidSaltSize , 'too long' if salt . bytes . length > 255 key , iv = _keyiv_gen ( dek , salt , iter_mag ) encryptor = OpenSSL :: Cipher :: AES256 . new ( :CBC ) . encrypt encryptor . key = key encryptor . iv = iv data_packet = _encrypted_data_header_v2 ( salt , iter_mag ) + encryptor . update ( self . to_json ) + encryptor . final _append_crc32 ( data_packet ) end
Encrypts the hash using the data encryption key and salt .
20,907
def authenticate ( email , password , client_ip ) return nil unless Incline :: EmailValidator . valid? ( email ) email = email . downcase engine = get_auth_engine ( email ) if engine return engine . authenticate ( email , password , client_ip ) end user = User . find_by ( email : email ) if user unless user . enabled? add_failure_to user , '(DB) account disabled' , client_ip return nil end if user . authenticate ( password ) add_success_to user , '(DB)' , client_ip return user else add_failure_to user , '(DB) invalid password' , client_ip return nil end end add_failure_to email , 'invalid email' , client_ip nil end
Creates a new user manager .
20,908
def begin_external_authentication ( request ) auth_engines . each do | dom , engine | unless engine . nil? url = engine . begin_external_authentication ( request ) return url unless url . blank? end end nil end
The begin_external_authentication method takes a request object to determine if it should process a login or return nil . If it decides to process authentication it should return a URL to redirect to .
20,909
def register_auth_engine ( engine , * domains ) unless engine . nil? unless engine . is_a? ( :: Incline :: AuthEngineBase ) raise ArgumentError , "The 'engine' parameter must be an instance of an auth engine or a class defining an auth engine." unless engine . is_a? ( :: Class ) engine = engine . new ( @options ) raise ArgumentError , "The 'engine' parameter must be an instance of an auth engine or a class defining an auth engine." unless engine . is_a? ( :: Incline :: AuthEngineBase ) end end domains . map do | dom | dom = dom . to_s . downcase . strip raise ArgumentError , "The domain #{dom.inspect} does not appear to be a valid domain." unless dom =~ / \A \. \Z / dom end . each do | dom | auth_engines [ dom ] = engine end end
Registers an authentication engine for one or more domains .
20,910
def copy_view_files base_path = File . join ( "app/views" , class_path , file_name ) empty_directory base_path @actions = actions . nil? || actions . empty? ? %w( index new create edit update destroy ) : actions @attr_cols = GeneratorUtils :: attr_cols ( table_name ) @col_count = @attr_cols . count @col_count += 1 if @actions . include? ( "edit" ) @col_count += 1 if @actions . include? ( "destroy" ) @search_sort = options . search_sort? ( @actions - %w( create update destroy ) ) . each do | action | @action = action formats . each do | format | @path = File . join ( base_path , filename_with_extensions ( action , format ) ) set_template ( @action , @path ) end end end
This method seems to always get run first
20,911
def to_h copied_object . each_with_object ( { } ) do | pair , memo | value = pair . last if value . is_a? ( Footing :: Hash ) memo [ pair . first ] = value . to_h elsif value . is_a? ( :: Array ) memo [ pair . first ] = value . map do | val | if val . is_a? ( Footing :: Hash ) val . to_h else val end end else memo [ pair . first ] = value end end end
Returns a standard ruby Hash representation of the wrapped Hash .
20,912
def request ( operation , data ) opts = { method : :post , url : api_url , payload : { 'Package' => body ( operation , data ) } , content_type : :xml , verify_ssl : verify_ssl , ssl_ca_file : ssl_ca_file } response = RestClient :: Request . execute ( opts ) response = Response . new ( response ) if ! response . success? && fail_on_error fail Error . new ( response ) end response end
Create an instance of an API client
20,913
def body_parameters ( parameters ) parameters_xml = '' parameters . each_pair do | k , v | key = parameter_key ( k ) val = case v when Hash body_parameters ( v ) when Array v . map { | i | body_parameters ( i ) } . join ( '' ) when nil '' else "<![CDATA[#{v}]]>" end parameters_xml << "<#{key}>#{val}</#{key}>" end parameters_xml end
Build body parameteres xml
20,914
def parameter_key ( term ) string = term . to_s string = string . sub ( / \d / ) { $& . capitalize } string . gsub! ( / \/ \d /i ) { "#{$1}#{$2.capitalize}" } string end
Prepare parameter key
20,915
def get_bulk b = 0.25 * self . lattice_const a1 = Atom . new ( 0 , 0 , 0 , self . cation ) a2 = Atom . new ( b , b , b , self . anion ) v1 = Vector [ 0.5 , 0.5 , 0.0 ] * self . lattice_const v2 = Vector [ 0.5 , 0.0 , 0.5 ] * self . lattice_const v3 = Vector [ 0.0 , 0.5 , 0.5 ] * self . lattice_const zb = Geometry . new ( [ a1 , a2 ] , [ v1 , v2 , v3 ] ) millerx = [ 1 , 0 , 0 ] millery = [ 0 , 1 , 0 ] millerz = [ 0 , 0 , 1 ] zb . set_miller_indices ( millerx , millery , millerz ) return zb end
Initialize the zinc - blende Geometry cation and anion are the atomic species occupying the two different sub - lattices . lattice_const specifies the lattice constant Return the traditional unit cell of bulk zinc blende
20,916
def fill_volume ( volume ) max = volume . max_point min = volume . min_point dx = max [ 0 ] - min [ 0 ] dy = max [ 1 ] - min [ 1 ] dz = max [ 2 ] - min [ 2 ] bulk = get_bulk m = Matrix [ [ dx , 0 , 0 ] , [ 0 , dy , 0 ] , [ 0 , 0 , dz ] ] v = Matrix [ bulk . lattice_vectors [ 0 ] . to_a , bulk . lattice_vectors [ 1 ] . to_a , bulk . lattice_vectors [ 2 ] . to_a ] rep_mat = m * ( v . inverse ) atoms = [ ] 3 . times do | i | n_repeat = rep_mat . row ( i ) nx = ( n_repeat [ 0 ] < 0 ) ? n_repeat [ 0 ] . floor - 1 : n_repeat [ 0 ] . ceil + 1 ny = ( n_repeat [ 1 ] < 0 ) ? n_repeat [ 1 ] . floor - 1 : n_repeat [ 1 ] . ceil + 1 nz = ( n_repeat [ 2 ] < 0 ) ? n_repeat [ 2 ] . floor - 1 : n_repeat [ 2 ] . ceil + 1 atoms += bulk . repeat ( nx , ny , nz ) . atoms . find_all { | a | volume . contains_point ( a . x , a . y , a . z ) } end Geometry . new ( atoms . uniq ) end
Fill the given volume with atoms
20,917
def get_001_surface ( monolayers , vacuum , constrain_layers = 0 ) anion = Atom . new ( 0 , 0 , 0 , self . cation ) cation = Atom . new ( 0.25 * self . lattice_const , 0.25 * self . lattice_const , 0.25 * self . lattice_const , self . anion ) v1 = Vector [ 0.5 , 0.5 , 0 ] * self . lattice_const v2 = Vector [ - 0.5 , 0.5 , 0 ] * self . lattice_const v3 = Vector [ 0.5 , 0 , 0.5 ] * self . lattice_const zb = Geometry . new ( [ anion , cation ] , [ v1 , v2 , v3 ] ) millerX = [ 1 , 0 , 0 ] millerY = [ 0 , 1 , 0 ] millerZ = [ 0 , 0 , 1 ] zb . set_miller_indices ( millerX , millerY , millerZ ) zb = zb . repeat ( 1 , 1 , ( monolayers / 2 ) . ceil ) if 0 < vacuum monolayerSep = v3 [ 2 ] / 2 zb . lattice_vectors [ 2 ] = Vector [ 0 , 0 , ( monolayers - 1 ) * monolayerSep . abs + vacuum . to_f ] zb = zb . correct end minZ = zb . atoms . min { | a , b | a . z <=> b . z } . z zb . atoms . reject! { | a | a . z >= ( minZ + monolayerSep . abs * monolayers ) } zb . atoms . each { | a | if ( a . z < minZ + monolayerSep . abs * constrain_layers ) a . constrain = ".true." end } return zb end
Return a unit cell for a slab of 001 Specify the number of atomic monolayers the vacuum thickness in angstrom and the number of layers to constrain at the base of the slab
20,918
def get_111_surface ( dir , monolayers , vacuum , constrain_layers = 0 ) if dir == "A" top_atom = self . anion bot_atom = self . cation elsif dir == "B" top_atom = self . cation bot_atom = self . anion else raise "Direction must be either A or B" end as1 = Atom . new ( 0.0 , 0.0 , 0.0 , top_atom ) ga1 = Atom . new ( 0.0 , 0.0 , - sqrt ( 3 ) / 4 * self . lattice_const , bot_atom ) v1 = Vector [ 0.5 * sqrt ( 2 ) , 0.0 , 0.0 ] * self . lattice_const v2 = Vector [ sqrt ( 2 ) * 0.25 , sqrt ( 6 ) * 0.25 , 0.0 ] * self . lattice_const v3 = Vector [ sqrt ( 2 ) * 0.25 , sqrt ( 2.0 / 3.0 ) * 0.25 , - 1 * sqrt ( 4.0 / 3.0 ) * 0.5 ] * self . lattice_const zb = Geometry . new ( [ as1 , ga1 ] , [ v1 , v2 , v3 ] ) millerX = [ - 1 , 1 , 0 ] millerY = [ 1 , 1 , - 2 ] millerZ = [ - 1 , - 1 , - 1 ] zb . set_miller_indices ( millerX , millerY , millerZ ) if 0 < vacuum zb = zb . repeat ( 1 , 1 , monolayers + 1 ) bilayerSep = v3 [ 2 ] zb . lattice_vectors [ 2 ] = Vector [ 0 , 0 , ( monolayers - 1 ) * ( bilayerSep . abs ) + vacuum ] minZ = zb . atoms . min { | a , b | a . z <=> b . z } . z maxZ = zb . atoms . max { | a , b | a . z <=> b . z } . z zb . atoms . reject! { | a | a . z == maxZ } zb . atoms . reject! { | a | a . z == minZ } if 0 < constrain_layers minZ = zb . atoms . min { | a , b | a . z <=> b . z } . z constrain_below = minZ + bilayerSep . abs * constrain_layers zb . atoms . each { | a | if ( a . z < constrain_below ) a . constrain = ".true." end } end end zb end
Return a unit cell for a slab of 111 dir is either A or B for the cation or anion terminated slab specify the number of atomic monolayers and the vacuum thickness in angstrom
20,919
def get_112_surface ( monolayers , vacuum = 0 , constrain_layers = 0 ) atom1 = Atom . new ( 0 , 0 , 0 , self . cation ) atom2 = Atom . new ( self . lattice_const * sqrt ( 3 ) / 2 , 0 , 0 , self . anion ) v1 = Vector [ sqrt ( 3 ) , 0 , 0 ] * self . lattice_const v2 = Vector [ 0 , sqrt ( 2 ) / 2 , 0 ] * self . lattice_const v3 = Vector [ 1 / sqrt ( 3 ) , 1 / ( sqrt ( 3 ) * 2 ) , - 1 / ( sqrt ( 3 ) * 2 ) ] * self . lattice_const millerX = Vector [ 1 , 1 , - 2 ] ; millerY = Vector [ - 1 , 1 , 0 ] ; millerZ = Vector [ - 1 , - 1 , - 1 ] zb = Geometry . new ( [ atom1 , atom2 ] , [ v1 , v2 , v3 ] ) zb . set_miller_indices ( millerX , millerY , millerZ ) zb = zb . repeat ( 1 , 1 , monolayers ) if 0 < vacuum monolayerSep = v3 [ 2 ] zb . lattice_vectors [ 2 ] = Vector [ 0 , 0 , ( monolayers * monolayerSep ) . abs + vacuum . to_f ] zb = zb . correct end return zb end
return a unit cell for a slab of 112 specify the number of atomic monolayers and the vacuum thickness in angstrom
20,920
def get_110_surface ( monolayers , vacuum = 0 , constrain_layers = 0 ) atom1 = Atom . new ( 0 , 0 , 0 , self . cation ) atom2 = Atom . new ( self . lattice_const * 1 / ( 2 * sqrt ( 2 ) ) , self . lattice_const * 0.25 , 0.0 , self . anion ) v1 = Vector [ 1 / sqrt ( 2 ) , 0.0 , 0.0 ] * self . lattice_const v2 = Vector [ 0.0 , 1.0 , 0.0 ] * self . lattice_const v3 = Vector [ 1 / ( 2 * sqrt ( 2 ) ) , - 0.5 , 1 / ( 2 * sqrt ( 2 ) ) ] * self . lattice_const millerX = Vector [ 1 , - 1 , 0 ] millerY = Vector [ 0 , 0 , 1 ] millerZ = Vector [ 1 , 1 , 0 ] zb = Geometry . new ( [ atom1 , atom2 ] , [ v1 , v2 , v3 ] ) zb . set_miller_indices ( millerX , millerY , millerZ ) zb = zb . repeat ( 1 , 1 , monolayers ) monolayerSep = v3 [ 2 ] if 0 < vacuum zb . lattice_vectors [ 2 ] = Vector [ 0 , 0 , ( monolayers - 1 ) * monolayerSep . abs + vacuum . to_f ] zb = zb . correct end zb . atoms . each { | a | if ( a . z < monolayerSep * constrain_layers ) a . constrain = ".true." end } return zb end
Return a unit cell for a slab of 110 specify the number of atomic monolayers and the vacuum thickness in angstrom
20,921
def to_compact_hash_with_index hash = { } self . each_with_index do | item , index | next if item . nil? value = yield ( item , index ) next if value . nil_or_empty? hash [ item ] = value end hash end
Returns a new Hash generated from this Collection with a block whose arguments include the enumerated item and its index . Every value which is nil or empty is excluded .
20,922
def partial_sort! unless block_given? then return partial_sort! { | item1 , item2 | item1 <=> item2 } end h = Hash . new { | h , k | h [ k ] = Hash . new } sort! do | a , b | yield ( a , b ) || h [ a ] [ b ] ||= - h [ b ] [ a ] ||= h [ a ] . detect_value { | c , v | v if v == yield ( c , b ) } || 1 end end
Sorts this collection in - place with a partial sort operator block
20,923
def set_inverse ( other , writer , inv_writer ) other . send ( inv_writer , self ) if other send ( writer , other ) end
Sets an attribute inverse by calling the attribute writer method with the other argument . If other is non - nil then the inverse writer method is called on self .
20,924
def set_inversible_noncollection_attribute ( newval , accessors , inverse_writer ) rdr , wtr = accessors oldval = send ( rdr ) return newval if newval . equal? ( oldval ) logger . debug { "Moving #{qp} from #{oldval.qp} to #{newval.qp}..." } if oldval and newval if oldval then clr_wtr = self . class === oldval && oldval . send ( rdr ) . equal? ( self ) ? wtr : inverse_writer oldval . send ( clr_wtr , nil ) end send ( wtr , newval ) if newval then newval . send ( inverse_writer , self ) logger . debug { "Moved #{qp} from #{oldval.qp} to #{newval.qp}." } if oldval end newval end
Sets a non - collection attribute value in a way which enforces inverse integrity .
20,925
def add_to_inverse_collection ( newval , accessors , inverse ) rdr , wtr = accessors oldval = send ( rdr ) return newval if newval == oldval if oldval then coll = oldval . send ( inverse ) coll . delete ( self ) if coll end send ( wtr , newval ) if newval then coll = newval . send ( inverse ) if coll . nil? then coll = block_given? ? yield : Array . new newval . set_property_value ( inverse , coll ) end coll << self if oldval then logger . debug { "Moved #{qp} from #{rdr} #{oldval.qp} #{inverse} to #{newval.qp}." } else logger . debug { "Added #{qp} to #{rdr} #{newval.qp} #{inverse}." } end end newval end
Sets a collection attribute value in a way which enforces inverse integrity . The inverse of the attribute is a collection accessed by calling inverse on newval .
20,926
def expand_pathseg ( handle ) return handle unless handle . is_a? ( Symbol ) pathsegs = ROOT_PATHS [ handle ] or raise ArgumentError , "Don't know how to expand path reference '#{handle.inspect}'." pathsegs . map { | ps | expand_pathseg ( ps ) } . flatten end
A T T E N Z I O N E A R E A P R O T E T T A Recursively expand a path handle .
20,927
def wait ( timeout = nil ) @mutex . synchronize do remaining = Condition :: Result . new ( timeout ) while @count > 0 && remaining . can_wait? remaining = @condition . wait ( @mutex , remaining . remaining_time ) end @count == 0 end end
Create a new CountDownLatch with the initial count .
20,928
def dump_extra_diffs ( field ) extra_diffs = diffs_by_field_type ( field , TYPE_EXTRA ) if extra_diffs . length > 0 header = "Table #{@table_name} has extra #{field}:\n" diff_str = extra_diffs . map do | diff | dump_single ( field , diff [ :generator ] , diff [ :elem ] ) end . join ( "\n\t" ) "#{header}\n\t#{diff_str}\n" end end
Dumps all diffs that have the given field and are of TYPE_EXTRA .
20,929
def dump_missing_diffs ( field ) missing_diffs = diffs_by_field_type ( field , TYPE_MISSING ) if missing_diffs . length > 0 header = "Table #{@table_name} is missing #{field}:\n" diff_str = missing_diffs . map do | diff | dump_single ( field , diff [ :generator ] , diff [ :elem ] ) end . join ( "\n\t" ) "#{header}\n\t#{diff_str}\n" end end
Dumps all diffs that have the given field and are of TYPE_MISSING .
20,930
def dump_mismatch_diffs ( field ) mismatch_diffs = diffs_by_field_type ( field , TYPE_MISMATCH ) if mismatch_diffs . length > 0 header = "Table #{@table_name} has mismatched #{field}:\n" diff_str = mismatch_diffs . map do | diff | "actual: #{dump_single(field, diff[:db_generator], diff[:db_elem])}\n\t" + "expected: #{dump_single(field, diff[:exp_generator], diff[:exp_elem])}" end . join ( "\n\n\t" ) "#{header}\n\t#{diff_str}\n" end end
Dumps all diffs that have the given field and are of TYPE_MISMATCH .
20,931
def to_s parts = FIELDS . flat_map do | field | [ dump_extra_diffs ( field ) , dump_missing_diffs ( field ) , dump_mismatch_diffs ( field ) ] end [ "Table #{@table_name} does not match the expected schema.\n\n" , parts . compact . join ( "\n" ) , "\nYou may disable schema checks by passing :disable => true to model_" , "schema or by setting the ENV variable #{DISABLE_MODEL_SCHEMA_KEY}=1.\n" ] . join end
Combines all dumps into one cohesive error message .
20,932
def filter raise ArgumentError . new ( "A filter block is not given to the visitor filter method" ) unless block_given? self . class . new ( @options ) { | node | yield ( node , node_children ( node ) ) } end
Returns a new Visitor which determines which nodes to visit by applying the given block to this visitor . The filter block arguments consist of a parent node and an array of children nodes for the parent . The block can return nil a single node to visit or a collection of nodes to visit .
20,933
def node_children ( node ) children = @navigator . call ( node ) return Array :: EMPTY_ARRAY if children . nil? Enumerable === children ? children . to_a . compact : [ children ] end
Returns the children to visit for the given node .
20,934
def visit_root ( node , & operator ) clear @exclude . merge! ( cyclic_nodes ( node ) ) if @prune_cycle_flag result = visit_recursive ( node , & operator ) @exclude . clear if @prune_cycle_flag result end
Visits the root node and all descendants .
20,935
def cyclic_nodes ( root ) copts = @options . reject { | k , v | k == :prune_cycle } cyclic = Set . new cycler = Visitor . new ( copts ) do | parent | children = @navigator . call ( parent ) children . each do | child | index = cycler . lineage . index ( child ) if index then cyclic . merge! ( cycler . lineage [ ( index + 1 ) .. - 1 ] ) end end children end cycler . visit ( root ) cyclic end
Returns the nodes which occur within a cycle excluding the cycle entry point .
20,936
def duration ( part = nil , klass = Duration ) if [ :years , :months , :weeks , :days , :hours , :minutes , :seconds ] . include? part klass . new ( part => self ) else klass . new ( self ) end end
Create a Duration object using self where self could represent weeks days hours minutes and seconds .
20,937
def seconds ( part = nil ) h = { :weeks => WEEK , :days => DAY , :hours => HOUR , :minutes => MINUTE } if [ :weeks , :days , :hours , :minutes ] . include? part __send__ ( part ) * h [ part ] else @seconds end end
Get the number of seconds of a given part or simply just get the number of seconds .
20,938
def to_s str = '' each do | part , time | next if time . zero? str << "#{time} #{time == 1 ? part[0..-2] : part}, " end str . chomp ( ', ' ) . sub ( / / , '\1 and \2' ) end
Friendly human - readable string representation of the duration .
20,939
def dump ( filename ) File . open ( filename , 'w' ) do | f | YAML . dump ( self . to_hash , f ) end end
saves configuration into a _yaml_ file named + filename +
20,940
def each rows = [ ] all_keys = Set . new @sources . each_with_index do | source , source_idx | first_row = false source . each do | row | unless row . keys . include? @key raise ReindeerETL :: Errors :: RecordInvalid . new ( "Path#1 missing key: #{@key}" ) end if source_idx == 0 rows << row else source_targets = @target_cols [ source_idx - 1 ] unless @target_cols . nil? rindex = rows . index { | r | r [ @key ] == row [ @key ] } if rindex . nil? if @expect_full_match raise ReindeerETL :: Errors :: RecordInvalid . new ( "Expected full match" ) else next end end if source_targets . nil? or source_targets . empty? rows [ rindex ] = rows [ rindex ] . merge ( row ) else source_targets . each_with_index do | tar , sidx | underscored_tar = h_underscore_string tar if row . keys . map { | k | k [ h_regex , 1 ] } . include? underscored_tar k = row . keys . select { | k | k [ h_regex , 1 ] == underscored_tar } . first hash = h_hash_maker tar , row [ k ] rows [ rindex ] . merge! ( hash ) else val = Object . const_get ( "ReindeerETL::Mods::#{@namespace}::#{tar}" ) . get ( row ) rows [ rindex ] . merge! ( h_hash_maker ( tar , val ) ) end end end end end end rows . each { | r | yield r } end
helper methods have h_ prefix
20,941
def find ( name ) cache ( name ) { @children . find { | x | safe_find ( x , name ) } || failure } end
Method for finding modules to load
20,942
def add Tempfile . open do | tmpfile | tmpfile . puts render ( Templates :: User :: ERB ) tmpfile . flush system ( "qconf -Auser #{tmpfile.path}" ) sleep 5 end end
Add an SGE user
20,943
def decide ( rule , values , strict = true ) rule = rule . clone values = values . clone error unless Undecided :: Evaluator . valid? ( rule , values , strict ) final_expression = Converter . replacing_variables ( rule , values ) eval final_expression rescue => e puts e . message error end
Given a boolean expression and data to replace return result
20,944
def get_default_properties ( event ) client = event [ 'client' ] check = event [ 'check' ] { server_name : client [ 'name' ] , server_ip : client [ 'address' ] , subscriptions : client [ 'subscriptions' ] . join ( ';' ) , environment : client [ 'environment' ] , check_name : check [ 'name' ] , check_command : check [ 'command' ] , check_output : check [ 'output' ] , timestamp : event [ 'timestamp' ] . inspect } end
Gets a default set of properties from the event
20,945
def data_bag_config_for ( environment , source ) data_bag_item = encrypted_data_bag_for ( environment , DATA_BAG ) if data_bag_item . has_key? ( source ) data_bag_item [ source ] elsif DATA_BAG == source data_bag_item else { } end end
Loads the encrypted data bag item and returns credentials for the environment or for a default key .
20,946
def encrypted_data_bag_for ( environment , data_bag ) @encrypted_data_bags = { } unless @encrypted_data_bags if encrypted_data_bags [ data_bag ] return get_from_data_bags_cache ( data_bag ) else data_bag_item = encrypted_data_bag_item ( data_bag , environment ) data_bag_item ||= encrypted_data_bag_item ( data_bag , WILDCARD ) data_bag_item ||= { } @encrypted_data_bags [ data_bag ] = data_bag_item return data_bag_item end end
Looks for the given data bag in the cache and if not found will load a data bag item named for the chef_environment or _wildcard value .
20,947
def copy_to_temp_file ( file , temp_base_name ) Tempfile . new ( temp_base_name , AttachmentMagic . tempfile_path ) . tap do | tmp | tmp . close FileUtils . cp file , tmp . path end end
Copies the given file path to a new tempfile returning the closed tempfile .
20,948
def write_to_temp_file ( data , temp_base_name ) Tempfile . new ( temp_base_name , AttachmentMagic . tempfile_path ) . tap do | tmp | tmp . binmode tmp . write data tmp . close end end
Writes the given data to a new tempfile returning the closed tempfile .
20,949
def uploaded_data = ( file_data ) if file_data . respond_to? ( :content_type ) return nil if file_data . size == 0 self . content_type = detect_mimetype ( file_data ) self . filename = file_data . original_filename if respond_to? ( :filename ) else return nil if file_data . blank? || file_data [ 'size' ] == 0 self . content_type = file_data [ 'content_type' ] self . filename = file_data [ 'filename' ] file_data = file_data [ 'tempfile' ] end if file_data . is_a? ( StringIO ) file_data . rewind set_temp_data file_data . read else self . temp_paths . unshift file_data . tempfile . path end end
This method handles the uploaded file object . If you set the field name to uploaded_data you don t need any special code in your controller .
20,950
def attachment_attributes_valid? [ :size , :content_type ] . each do | attr_name | enum = attachment_options [ attr_name ] errors . add attr_name , I18n . translate ( "activerecord.errors.messages.inclusion" , attr_name => enum ) unless enum . nil? || enum . include? ( send ( attr_name ) ) end end
validates the size and content_type attributes according to the current model s options
20,951
def execute_internal ( _result ) item = LogMessage . new parser = LogParser . new ( @source , @sequence ) sequences = { } while parser . next ( item ) if item . kind? ( ActionCommand :: LOG_KIND_COMMAND_OUTPUT ) && item . root? process_output ( sequences , item ) else process_other ( sequences , item ) end item = LogMessage . new end print_sequences ( sequences ) end
Say hello to the specified person .
20,952
def load styles styles += '.yml' unless styles =~ / \. / styles = File . join ( File . dirname ( __FILE__ ) , "styles" , styles ) unless File . exist? styles YAML . load_file ( styles ) . inject ( { } ) do | h , ( key , value ) | value = { :match => value [ 'match' ] , :style => value [ 'style' ] } h . merge key . to_sym => value end end
Loads styles from a YAML style - sheet and converts the keys to symbols
20,953
def unstyle msg styles . map do | _ , v | v [ :match ] end . flatten . inject ( msg ) do | m , tag | m . gsub ( tag , '' ) end end
Remove all tags from string
20,954
def write str self . class . stream . tap do | stream | stream . write str stream . flush end ; nil end
Write to the out stream and flush it
20,955
def stylize string , styles = [ ] [ styles ] . flatten . inject ( string ) do | str , style | style = style . to_sym if ANSI [ :transforms ] . include? style esc str , * ANSI [ :transforms ] [ style ] elsif ANSI [ :colors ] . include? style esc str , ANSI [ :colors ] [ style ] , ANSI [ :colors ] [ :reset ] else stylize ( str , @styles [ style ] [ :style ] ) end end end
Apply styles to a string
20,956
def parent_of? ( child ) page = self if child if page . id == child . parent_id true else child . parent . nil? ? false : page . parent_of? ( child . parent ) end end end
true if child is children of page
20,957
def active? ( current_page ) if current_page if id == current_page . id true elsif parent_of? ( current_page ) true else false end end end
true if page is equal current_page or parent of current_page
20,958
def load_infile_sql ( path , columns , options = { } ) replacement = opts [ :insert_ignore ] ? :ignore : :replace options = { :update => replacement } . merge ( options ) LoadDataInfileExpression . new ( path , opts [ :from ] . first , columns , options ) . to_sql ( db ) end
Returns the SQL for a LOAD DATA INFILE statement .
20,959
def sign_in_as ( group ) FactoryGirl . create ( :user , umbcusername : 'test_user' , group : group ) page . driver . browser . process_and_follow_redirects ( :get , '/shibbolite/login' , { } , { 'umbcusername' => 'test_user' } ) end
hacked login but the alternative is not having integration tests when using a Shibboleth based auth
20,960
def connect uri Faraday . new ( :url => "#{uri.scheme}://#{uri.host}" ) do | faraday | faraday . request :multipart faraday . request :url_encoded faraday . use :cookie_jar faraday . response :logger faraday . adapter Faraday . default_adapter end end
Establish Faraday connection
20,961
def pretty_print_user_list ( list ) total = 0 data = [ ] data . push ( 'Asset' , 'Total Value' , 'Change % (Week)' ) list . user_pairs . each do | user_pair | data . push ( user_pair . symbol ) data . push ( user_pair . valuation . format ) data . push ( user_pair . perchant_change_week . to_s ) total += user_pair . valuation end data . push ( '' , '' , '' ) data . push ( 'TOTAL:' , total . format , '' ) data . push ( '' , '' , '' ) print_arrays ( data , 3 ) end
Prints out a summary of the user s hodlings formatted nicely
20,962
def print_arrays ( data , cols ) formatted_list = cli . list ( data , :uneven_columns_across , cols ) cli . say ( formatted_list ) end
Data should be an array of arays cols is the number of columns it has Prints the data to screen with equal spacing between them
20,963
def array_char_length ( input_array ) length = 0 input_array . each do | a | length += a . to_s . length end length end
Returns the combined length of charaters in an array
20,964
def select_filters ( target , options ) found = filters_store . fetch ( options , [ ] ) if Hash === options || options . nil? then options ||= { } options . merge! ( :target => target ) found += filters_store . select do | filter_options , filters | options == filter_options end . map ( & :last ) . flatten end found end
Select matching filters and filters including targets when options is a + Hash +
20,965
def save_eventually block = block_given? ? Proc . new : nil _self = self Parse :: Stack :: Async . run do begin result = true _self . save! rescue => e result = false puts "[SaveEventually] Failed for object #{_self.parse_class}##{_self.id}: #{e}" ensure block . call ( result ) if block block = nil _self = nil end end end
Adds support for saving a Parse object in the background .
20,966
def destroy_eventually block = block_given? ? Proc . new : nil _self = self Parse :: Stack :: Async . run do begin result = true _self . destroy rescue => e result = false puts "[DestroyEventually] Failed for object #{_self.parse_class}##{_self.id}: #{e}" ensure block . call ( result ) if block block = nil _self = nil end end end
save_eventually Adds support for deleting a Parse object in the background .
20,967
def export_table export_obj @export_obj = export_obj ExportMetadata . record ( :table => @export_obj . table_name , :database => @export_obj . source_schema , :export_to => load_to , :export_id => @export_obj . id , :filepath => filepath , :store_in => @export_obj . s3_path , :incremental_col => @export_obj . incremental_column ) do | metadata | prepare metadata if ( @export_obj . export_type? == :new && load_to == "mysql" ) || load_to == "mysql" on_failure_state_trans ( metadata , "new" ) on_export_success ( metadata ) initial_export metadata elsif @export_obj . export_type? == :incremental || load_to == "vertica" on_failure_state_trans ( metadata , "failed" ) on_export_success ( metadata ) incremental_export_into_outfile metadata end end end
Gets an Export object and dumps the data Initially using mysqldump Incrementally using mysql - e afterwards
20,968
def initial_export metadata metadata . export_type = "initial" max_value = @export_obj . max_value if @export_obj . incremental_export? cmd = initial_mysqldump_cmd exporting_state_trans puts "Exporting..." result = execute_export ( cmd , metadata ) check_result ( result , 0 ) @export_obj . update_max_val ( max_value ) if @export_obj . incremental_export? end
Exports Table using mysqldump . This method is invoked only once . Dumps with create options no need to create table manaully
20,969
def incremental_export_into_outfile metadata unless @export_obj . is_running? if @export_obj . export_type == "incremental" max_value = @export_obj . max_value metadata . export_type = "incremental" @export_obj . update_max_val if @export_obj . max_incremental_value . blank? end if ( @export_obj . export_type == "all" && @export_obj . export_to == "vertica" ) metadata . export_type = "incremental" end options = { :db => @export_obj . source_schema , :source_schema => @export_obj . source_schema , :table => @export_obj . table_name , :filepath => filepath , :destination_schema => @export_obj . destination_schema , :enclosed_by => Myreplicator . configs [ @export_obj . source_schema ] [ "enclosed_by" ] , :export_id => @export_obj . id } schema_status = Myreplicator :: MysqlExporter . schema_changed? ( options ) Kernel . p "===== schema_status =====" Kernel . p schema_status if schema_status [ :changed ] metadata . export_type = "initial" else options [ :incremental_col ] = @export_obj . incremental_column options [ :incremental_col_type ] = @export_obj . incremental_column_type options [ :export_type ] = @export_obj . export_type options [ :incremental_val ] = [ @export_obj . destination_max_incremental_value , @export_obj . max_incremental_value ] . min end cmd = SqlCommands . mysql_export_outfile ( options ) exporting_state_trans puts "Exporting..." result = execute_export ( cmd , metadata ) check_result ( result , 0 ) if @export_obj . export_type == "incremental" metadata . incremental_val = max_value @export_obj . update_max_val ( max_value ) end end return false end
Exports table incrementally similar to incremental_export method Dumps file in tmp directory specified in myreplicator . yml Note that directory needs 777 permissions for mysql to be able to export the file Uses \\ 0 as the delimiter and new line for lines
20,970
def check_result result , size unless result . nil? raise Exceptions :: ExportError . new ( "Export Error\n#{result}" ) if result . length > 0 end end
Checks the returned resut from SSH CMD Size specifies if there should be any returned results or not
20,971
def zipfile metadata cmd = "cd #{Myreplicator.configs[@export_obj.source_schema]["ssh_tmp_dir"]}; gzip #{@export_obj.filename}" puts cmd zip_result = metadata . ssh . exec! ( cmd ) unless zip_result . nil? raise Exceptions :: ExportError . new ( "Export Error\n#{zip_result}" ) if zip_result . length > 0 end metadata . zipped = true return zip_result end
zips the file on the source DB server
20,972
def load_time_ranges @time_ranges = ActiveSupport :: HashWithIndifferentAccess . new time_ranges = @config [ 'time' ] time_ranges . each do | t , r | time_range = ActiveSupport :: HashWithIndifferentAccess . new src_ranges ||= r src_ranges . map { | k , v | time_range [ k . to_sym ] = rangify_time_boundaries ( v ) } @time_ranges [ t . to_sym ] = time_range end end
Load ranges from config file
20,973
def rangify_time_boundaries ( src ) src . split ( ".." ) . inject { | s , e | s . split ( "." ) . inject { | n , m | n . to_i . send ( m ) } .. e . split ( "." ) . inject { | n , m | n . to_i . send ( m ) } } end
Takes a string like 2 . days .. 3 . weeks and converts to Range object - > 2 . days .. 3 . weeks
20,974
def klass_for_type ( type , singular = false ) type = type . singularize unless singular type . classify . constantize end
Resolves the Class for a type .
20,975
def resource_params attributes = find_in_params ( :attributes ) . try ( :permit , permitted_attributes ) || { } relationships = { } relationships_in_payload = find_in_params ( :relationships ) if relationships_in_payload raw_relationships = relationships_in_payload . clone raw_relationships . each_key do | key | data = raw_relationships . delete ( key ) [ :data ] if permitted_relationships . include? ( key . to_sym ) && data if data . kind_of? ( Array ) relationships [ "#{key.singularize}_ids" ] = extract_ids data else relationships [ "#{key}_id" ] = extract_id data end end end end attributes . merge relationships end
Builds a whitelisted resource_params hash from the permitted_attributes & permitted_relationships arrays . Will automatically attempt to resolve string IDs to numerical IDs in the case the model s slug was passed to the controller as ID .
20,976
def process_line ( line ) @lines << line return process_success if @lines . first == 'OK' return process_error if @lines . first == 'ERROR' return process_end if @lines . first == 'END' return process_sync_error if @lines . first == 'SYNC ERROR' process_unrecognized end
Processes a single line of header information . The return value indicates whether further data is required in order to complete the header .
20,977
def create_request raise AttributeMissing . new "(2500) TransactionReference or OrderReference need to be present." if ( transaction_reference . nil? && order_reference . nil? ) raise AttributeMissing . new "(2500) SiteReference must be present." if ( site_reference . nil? && ( REXML :: XPath . first ( @request_xml , "//SiteReference" ) . text . blank? rescue true ) ) REXML :: XPath . first ( @request_xml , "//Request" ) . attributes [ "Type" ] = "TRANSACTIONQUERY" ops = REXML :: XPath . first ( @request_xml , "//Operation" ) [ "TermUrl" , "MerchantName" , "Currency" , "SettlementDay" ] . each { | e | ops . delete_element e } ( ops . elements [ "SiteReference" ] || ops . add_element ( "SiteReference" ) ) . text = self . site_reference if self . site_reference ( ops . elements [ "TransactionReference" ] || ops . add_element ( "TransactionReference" ) ) . text = self . transaction_reference if self . transaction_reference order = REXML :: XPath . first ( @request_xml , "//Operation" ) ( order . elements [ "OrderReference" ] || order . add_element ( "OrderReference" ) ) . text = self . order_reference if self . order_reference root = @request_xml . root ( root . elements [ "Certificate" ] || root . add_element ( "Certificate" ) ) . text = self . site_alias if self . site_alias end
Write the xml document needed for processing fill in elements need and delete unused ones from the root_xml raises an error if any necessary elements are missing
20,978
def toggle_timer options = self . class . merge_options ( ) options . merge! ( { :body => self . to_xml } ) response = self . class . put ( self . links [ :toggle_timer ] , options ) self . class . check_status_code ( response ) initialize ( response . parsed_response ) if self . stopped_timer stopped_timer = Cashboard :: Struct . new ( self . stopped_timer ) end stopped_timer || nil end
readonly Starts or stops timer depending on its current state .
20,979
def dump ( output ) output . name ( self ) output . attribute ( :input , input ) output . nest ( :context , context ) end
Dump state to output
20,980
def project_story ( project_id , story_id , options = { } ) response_body = nil begin response = connection . get do | req | req . url "/api/v1/projects/#{project_id}/story/#{story_id}" , options end response_body = response . body rescue MultiJson :: DecodeError => e end response_body end
Retrieve information for an individual story of a given project .
20,981
def database_cached_attribute ( * attrs ) attrs . each do | attr | define_method ( "invalidate_#{attr}" ) do | arg = nil | invalidate_cache attr . to_sym end define_method ( "only_#{attr}_changed?" ) do only_change? attr . to_sym end define_method ( "cache_#{attr}" ) do update_cache attr . to_sym end end end
Sets up cache invalidation callbacks for the provided attributes
20,982
def to_s ( pattern = '%l, %f' ) if pattern . is_a? ( Symbol ) return conversational if pattern == :conversational return sortable if pattern == :sortable pattern = PREDEFINED_PATTERNS [ pattern ] end PATTERN_MAP . inject ( pattern ) do | name , mapping | name = name . gsub ( mapping . first , ( send ( mapping . last ) || '' ) ) end end
Creates a name based on pattern provided . Defaults to last first .
20,983
def sortable [ last , [ prefix , first , middle , suffix ] . reject ( & :blank? ) . join ( ' ' ) ] . reject ( & :blank? ) . join ( ', ' ) end
Returns the name in a sortable format .
20,984
def vcl ( op , * params ) response = cmd ( "vcl.#{op}" , * params ) case op when :list response . split ( "\n" ) . map do | line | a = line . split ( / \s / , 3 ) [ a [ 0 ] , a [ 1 ] . to_i , a [ 2 ] ] end else response end end
Manipulate the VCL configuration
20,985
def purge ( * args ) c = 'purge' c << ".#{args.shift}" if [ :url , :hash , :list ] . include? ( args . first ) response = cmd ( c , * args ) case c when 'purge.list' response . split ( "\n" ) . map do | line | a = line . split ( "\t" ) [ a [ 0 ] . to_i , a [ 1 ] ] end else bool response end end
Purge objects from the cache or show the purge queue .
20,986
def stats result = cmd ( "stats" ) Hash [ * result . split ( "\n" ) . map { | line | stat = line . strip! . split ( / \s / , 2 ) [ stat [ 1 ] , stat [ 0 ] . to_i ] } . flatten ] end
Returns a hash of status information
20,987
def last_column max = 0 @data . each do | row | max = row . length if max < row . length end max - 1 end
Gets the last column in the table .
20,988
def build_column ( start_column = nil ) if block_given? raise StandardError . new ( 'build_column block called within row block' ) if @in_row raise StandardError . new ( 'build_column called without valid argument' ) unless start_column . is_a? ( Numeric ) backup_col_start = @col_start backup_col_offset = @col_offset backup_row_offset = @row_offset @col_start = start_column . to_i @col_offset = @col_start @row_offset = 0 yield @col_start = backup_col_start @col_offset = backup_col_offset @row_offset = backup_row_offset end @col_start end
Builds data starting at the specified column .
20,989
def row ( options = { } , & block ) raise StandardError . new ( 'row called within row block' ) if @in_row @in_row = true @col_offset = @col_start options = change_row ( options || { } ) @row_cell_options = @base_cell_options . merge ( options ) fill_cells ( @row_offset , @col_offset ) if @data [ @row_offset ] while @data [ @row_offset ] [ @col_offset ] == :span_placeholder @col_offset += 1 end end yield if block_given? @in_row = false @row_offset += 1 @row_cell_options = nil end
Builds a row in the table .
20,990
def subtable ( cell_options = { } , options = { } , & block ) raise StandardError . new ( 'subtable called outside of row block' ) unless @in_row cell cell_options || { } do PdfTableBuilder . new ( @doc , options || { } , & block ) . to_table end end
Builds a subtable within the current row .
20,991
def cells ( options = { } , & block ) cell_regex = / / options ||= { } result = block_given? ? yield : ( options [ :values ] || [ '' ] ) cell_options = result . map { { } } common_options = { } options . each do | k , v | if ( m = cell_regex . match ( k . to_s ) ) k = k . to_s [ m [ 0 ] . length .. - 1 ] . to_sym cell_options [ m [ 1 ] . to_i - 1 ] [ k ] = v elsif k == :column cell_options [ 0 ] [ k ] = v elsif k != :values common_options [ k ] = v end end cell_options . each_with_index do | opt , idx | cell common_options . merge ( opt ) . merge ( { value : result [ idx ] } ) end end
Creates multiple cells .
20,992
def cell ( options = { } , & block ) raise StandardError . new ( 'cell called outside of row block' ) unless @in_row options = @row_cell_options . merge ( options || { } ) options = change_col ( options ) result = block_given? ? yield : ( options [ :value ] || '' ) options . except! ( :value ) set_cell ( result , nil , nil , options ) end
Generates a cell in the current row .
20,993
def fix_row_widths fill_cells ( @row_offset - 1 , 0 ) max = 0 @data . each_with_index do | row | max = row . length unless max >= row . length end @data . each_with_index do | row , idx | if row . length < max row = row + [ @base_cell_options . merge ( { content : '' } ) ] * ( max - row . length ) @data [ idx ] = row end end end
ensure that all 2nd level arrays are the same size .
20,994
def empty_positions ( & block ) positions = [ ] each_position do | row , column | next if get_cell ( row , column ) yield ( row , column ) if block_given? positions << [ row , column ] end positions end
Returns the corners of the empty cells
20,995
def solved? letter = won_across? return letter if letter letter = won_up_and_down? return letter if letter letter = won_diagonally? return letter if letter false end
Returns true if the board has a wining pattern
20,996
def read ( aSource , & aBlock ) default_values . each do | k , v | done = false if block_given? && ( ( newv = yield ( k , v , aSource && aSource [ k ] ) ) != nil ) self [ k ] = newv done = true end copy_item ( aSource , k ) if ! done && aSource && ! aSource [ k ] . nil? end self end
aBlock allows values to be filtered based on key default and new values
20,997
def reset self . clear me = self @default_values . each { | n , v | me [ n ] = v . is_a? ( Class ) ? nil : v } end
reset values back to defaults
20,998
def ensure_directory dir = File . dirname ( full_filename ) unless File . directory? ( dir ) FileUtils . mkdir_p ( dir ) end end
Ensure the directory for the file exists
20,999
def filename path = resource . url . uri . path if path . end_with? ( '/' ) return path + 'index.html' elsif ! path . split ( '/' ) . last . include? ( "." ) return path + '/index.html' end path end
The actual name of the file