idx int64 0 24.9k | question stringlengths 68 4.14k | target stringlengths 9 749 |
|---|---|---|
22,800 | def authorize_action YamledAcl . init ( current_user_group_name , params [ :controller ] ) allowed_to? ( params [ :action ] ) or raise ( YamledAcl :: AccessDenied ) end | This method should be be called by + before_filter + . |
22,801 | def load config_file = find_file @filenames config = Utils . load_auto config_file config . empty? ? @default_config : @default_config . merge ( config ) end | Find and load config file . |
22,802 | def load_json json_config_file = Utils . lookup_file @filenames [ :json ] config = Utils . load_json json_config_file config . empty? ? @default_config : @default_config . merge ( config ) end | Find and load JSON config file . |
22,803 | def load_yaml yaml_config_files = @filenames [ :yaml ] yaml_config_file = nil yaml_config_files . each do | file | yaml_config_file = Utils . lookup_file file unless yaml_config_file . nil? config = Utils . load_yaml ( yaml_config_file ) return config . empty? ? @default_config : @default_config . merge ( config ) end end @default_config end | Find and load YAML config file . |
22,804 | def new_content ( conflict_start = '<<<' , conflict_switch = '>>>' , conflict_end = '===' ) flat_map do | change | if change . conflict? version_one = change . diff_one . new_content ( conflict_start , conflict_switch , conflict_end ) version_two = change . diff_two . new_content ( conflict_start , conflict_switch , conflict_end ) [ conflict_start ] + version_one + [ conflict_switch ] + version_two + [ conflict_end ] elsif change . deleting? [ ] else [ change . new_element ] end end end | Reconstruct the new sequence from the diff |
22,805 | def populate ( content , ** options ) generator . create_file ( RubyEdit :: SOURCE_FILE_LOCATION , content , force : true , verbose : false , ** options ) end | Populates the sourcefile with the given content |
22,806 | def compute_n ( ic ) temp_ic = ImageCoordinate . cloneCoord ( ic ) n = - 1 x_off = [ - 1 , 0 , 1 ] y_off = [ - 1 , 0 , 1 ] x_off . each do | x | y_off . each do | y | temp_ic [ :x ] = ic [ :x ] + x temp_ic [ :y ] = ic [ :y ] + y if @im . inBounds ( temp_ic ) and @im [ temp_ic ] == @im [ ic ] then n += 1 end end end temp_ic . recycle n end | implementation of skeletonization algorithm presented in Gonzalez & Woods p 651 - 652 |
22,807 | def where ( hash ) res = hash . inject ( squares ) do | memo , ( attribute , value ) | memo . select { | square | square . attribute_match? ( attribute , value ) } end self . class . new ( squares : res ) end | Filter the squares with a hash of attribute and matching values . |
22,808 | def find_by_x_and_y ( x , y ) select { | square | square . x == x && square . y == y } . first end | Find the square with the matching x and y co - ordinates |
22,809 | def in_range ( origin , distance ) select { | square | Vector . new ( origin , square ) . magnitude <= distance } end | Find all squares within distance of square |
22,810 | def at_range ( origin , distance ) select { | square | Vector . new ( origin , square ) . magnitude == distance } end | Find all squares at distance of square |
22,811 | def unblocked ( origin , square_set ) select { | destination | square_set . between ( origin , destination ) . all? ( & :unoccupied? ) } end | Returns destination from the origin that have a clear path |
22,812 | def between ( origin , destination ) vector = Vector . new ( origin , destination ) if vector . diagonal? || vector . orthogonal? point_counter = origin . point direction = vector . direction _squares = [ ] while point_counter != destination . point point_counter = point_counter + direction square = find_by_x_and_y ( point_counter . x , point_counter . y ) if square && square . point != destination . point _squares . push ( square ) end end else _squares = [ ] end self . class . new ( squares : _squares ) end | Returns squares between a and b . Only squares that are in the same diagonal will return squares . |
22,813 | def version logger . debug "Parsing version from output using #{description}" matches = output . match PARSE_VERSION_FROM_OUTPUT raise InvalidFormat . new output if matches . nil? matches . captures [ 0 ] end | Determines the version of Vagrant based on the output |
22,814 | def full_changelog ( first_parent : false , range : nil , branch : nil , from_ancestor_with : nil ) changelog_list = changelog first_parent : first_parent , range : range , branch : branch , from_ancestor_with : from_ancestor_with if ! range . nil? header = full_changelog_header_range range elsif ! branch . nil? header = full_changelog_header_branch branch , from_ancestor_with else header = full_changelog_header_no_range end [ header , changelog_list ] . join "\n" end | Get the full changelog including metadata . |
22,815 | def log_messages ( first_parent = false , range = nil , branch = nil , from_ancestor_with = nil ) walker = Rugged :: Walker . new @repo if ! range . nil? begin walker . push_range range rescue Rugged :: ReferenceError raise Valr :: NotValidRangeError . new range end elsif ! branch . nil? b = @repo . references [ "refs/heads/#{branch}" ] raise Valr :: NotValidBranchError . new branch if b . nil? if ! from_ancestor_with . nil? a = @repo . references [ "refs/heads/#{from_ancestor_with}" ] raise Valr :: NotValidBranchError . new from_ancestor_with if a . nil? base = @repo . merge_base b . target_id , a . target_id walker . push_range "#{base}..#{b.target_id}" else walker . push b . target_id end else walker . push @repo . head . target_id end walker . simplify_first_parent if first_parent message_list = walker . inject ( [ ] ) { | messages , c | messages << c . message } walker . reset message_list end | Get log messages for a repository |
22,816 | def full_changelog_header_range ( range ) from , to = range . split '..' from_commit , to_commit = [ from , to ] . map { | ref | rev_parse ref } Koios :: Doc . write { [ pre ( [ "from: #{from} <#{from_commit.oid}>" , "to: #{to} <#{to_commit.oid}>" ] ) ] } end | Get the header when a range is defined |
22,817 | def full_changelog_header_branch ( branch , ancestor ) h = [ "branch: #{branch} <#{@repo.references["refs/heads/#{branch}"].target_id}>" ] h << "from ancestor with: #{ancestor} <#{@repo.references["refs/heads/#{ancestor}"].target_id}>" unless ancestor . nil? Koios :: Doc . write { [ pre ( h ) ] } end | Get the header when a branch is defined |
22,818 | def match_node ( node , expected , label ) if ! check_type ( node , expected [ 'type' ] , label , expected [ 'ok_empty' ] ) @errors . push ( 'Error: node ' + label + ' is not of an accepted type. Should be one of ' + expected [ 'accepts' ] . join ( ', ' ) ) return false end if ( node . kind_of? ( Hash ) || node . kind_of? ( Array ) ) if node . empty? && ! expected [ 'ok_empty' ] @errors . push ( 'Error: node ' + label + ' cannot be empty' ) return false elsif ! node . empty? && expected . has_key? ( 'accepts' ) valid_content = false if node . kind_of? ( Hash ) matched = [ ] unmatched = [ ] node . each_pair { | key , value | expected [ 'accepts' ] . each { | accepts | result = check_type ( value , accepts , key ) if result matched . push ( key ) if ! unmatched . find_index ( key ) . nil? unmatched . slice ( unmatched . find_index ( key ) ) end break else unmatched . push ( key ) end } } if ( matched . count == node . count ) valid_content = true else unmatched . each { | node | @errors . push ( 'Error: node ' + node + ' is not of an accepted type. Should be one of ' + expected [ 'accepts' ] . join ( ', ' ) ) } end elsif node . kind_of? ( Array ) matched = [ ] unmatched = [ ] node . each_index { | n | expected [ 'accepts' ] . each { | accepts | key = label + '[' + n . to_s + ']' result = check_type ( node [ n ] , accepts , key ) if result matched . push ( key ) if ! unmatched . find_index ( key ) . nil? unmatched . slice ( unmatched . find_index ( key ) ) end break else unmatched . push ( key ) end } } if ( matched . count == node . count ) valid_content = true else unmatched . each { | node | @errors . push ( 'Error: node ' + node + ' is not of an accepted type. Should be one of ' + expected [ 'accepts' ] . join ( ', ' ) ) } end end if ! valid_content @errors . push ( 'Error: node ' + label + ' contains an unaccepted type.' ) return false end end end return true end | Checks file node matches the schema . |
22,819 | def check_type ( node , expected_type , label , accept_nil = false ) valid_type = true ; if ( @types . has_key? ( expected_type ) ) valid_type = match_node ( node , @types [ expected_type ] , label ) elsif node . class . to_s != expected_type && ! ( node . kind_of? ( NilClass ) && ( expected_type == 'empty' || accept_nil ) ) valid_type = false end return valid_type end | Checks that the node is of the correct type |
22,820 | def cleanup ( sprocket_assets = [ :javascripts , :stylesheets ] ) FileUtils . rm_r build_path if File . exists? ( build_path ) Compass :: Exec :: SubCommandUI . new ( [ "clean" , project_root ] ) . run! sprocket_assets . each do | asset | FileUtils . mkdir_p File . join ( build_path , asset . to_s ) end if mustaches_config_file_exists? mustaches_yaml . each_key do | dir | FileUtils . mkdir_p File . join ( build_path , dir . to_s ) end end end | Public function for running cleanup of previous build |
22,821 | def sprockets_build ( sprocket_assets = [ :javascripts , :stylesheets ] ) sprocket_assets . each do | asset_type | load_path = File . join ( @project_root , asset_type . to_s ) next unless File . exists? ( load_path ) sprockets_env . append_path load_path Dir . new ( load_path ) . each do | filename | file = File . join ( load_path , filename ) if File . file? ( file ) asset = sprockets_env [ filename ] attributes = sprockets_env . find_asset ( asset . pathname ) build_file = File . join ( build_path , asset_type . to_s , attributes . logical_path ) File . open ( build_file , 'w' ) do | f | extension = attributes . logical_path . split ( "." ) . last f . write ( minify ( asset , extension ) ) end end end end end | Public function for building sprockets assets and minifying |
22,822 | def mustache_template_build ( dir , template_file , logic_file ) logic_class_name = underscore_to_camelcase ( logic_file ) output_file = logic_file logic_file = camelcase_to_underscore ( logic_file ) require File . join ( project_root , camelcase_to_underscore ( dir ) , logic_file ) FileUtils . mkdir_p File . join ( build_path , dir . to_s ) mustache = Kernel . const_get ( logic_class_name ) . new mustache . template_file = File . join ( project_root , camelcase_to_underscore ( dir ) , template_file ) + ".html.mustache" build_file = File . join ( build_path , dir , "#{output_file}.html" ) File . open ( build_file , 'w' ) do | f | f . write ( mustache . render ) end end | Render html from a mustache template |
22,823 | def underscore_to_camelcase ( underscore_string ) underscore_string = underscore_string . gsub ( / / , ' ' ) . split ( ' ' ) . each { | word | word . capitalize! } . join ( "" ) unless underscore_string . match ( / / ) . nil? underscore_string = underscore_string if underscore_string . match ( / / ) . nil? return underscore_string end | Conver underscore to camelcase |
22,824 | def sprockets_env @sprockets_env ||= Sprockets :: Environment . new ( project_root ) { | env | env . logger = Logger . new ( STDOUT ) } end | Initialize sprockets environment |
22,825 | def minify ( asset , format ) asset = asset . to_s return Uglifier . compile ( asset ) if format . eql? ( "js" ) return YUI :: CssCompressor . new . compress ( asset ) if format . eql? ( "css" ) return asset end | Minify assets in format |
22,826 | def launch_all @servers . each { | s | begin object_behavior ( s , :start ) rescue Exception => e raise e unless e . message =~ / / end } end | Launch all servers in the deployment . |
22,827 | def check_monitoring @servers . each do | server | server . settings response = nil count = 0 until response || count > 20 do begin response = server . monitoring rescue response = nil count += 1 sleep 10 end end raise "Fatal: Failed to verify that monitoring is operational" unless response sleep 60 monitor = server . get_sketchy_data ( { 'start' => - 60 , 'end' => - 20 , 'plugin_name' => "cpu-0" , 'plugin_type' => "cpu-idle" } ) idle_values = monitor [ 'data' ] [ 'value' ] raise "No cpu idle data" unless idle_values . length > 0 raise "No idle time" unless idle_values [ 0 ] > 0 puts "Monitoring is OK for #{server.nickname}" end end | Checks that monitoring is enabled on all servers in the deployment . Will raise an error if monitoring is not enabled . |
22,828 | def add_attributes ( hash ) hash . each_pair do | k , v | k = k . to_sym if k == :id or ! self . respond_to? ( k ) @cached_attrs [ k ] = v meta = class << self ; self ; end meta . send ( :define_method , k ) { @cached_attrs [ k ] } meta . send ( :define_method , "#{k}=" ) do | new_value | @cached_attrs [ k ] = new_value . is_a? ( RedisRecord :: Model ) ? new_value . id : new_value @stored_attrs . delete ( k ) end end end hash end | Add attributes to the instance cache and define the accessor methods |
22,829 | def add_foreign_keys_as_attributes @@reflections [ self . class . name . to_sym ] [ :belongs_to ] . each do | klass | add_attribute klass . to_s . foreign_key . to_sym end end | Add the foreign key for the belongs_to relationships |
22,830 | def image_tag ( image , version : current_version_for_image ( image ) ) org = settings . org project = settings . project tag = "#{org}/#{project}-#{env}-#{image}" if ! version . nil? version = version . to_i tag += if version . negative? current_version = current_version_for_image ( image ) ":#{current_version.to_i + version}" else ":#{version}" end end tag end | Generate the full tag for the given image concatenating the org project env image name and version . |
22,831 | def config_path path = File . expand_path ".dctl.yml" , Dir . pwd unless File . exist? path error = "Could not find config file at #{path}" puts Rainbow ( error ) . red exit 1 end path end | Returns the path to the . dctl . yml file for the current project |
22,832 | def define_custom_commands ( klass ) Array ( settings . custom_commands ) . each do | command , args | klass . send ( :desc , command , "[Custom Command] #{command}" ) concatenated = Array ( args ) . join ( " && " ) . strip klass . send ( :define_method , command , -> do stream_output ( concatenated , exec : true ) end ) end end | If there are user defined commands in . dctl . yml dynamically add them to the passed thor CLI so they may be executed . |
22,833 | def check_settings! required_keys = %w( org project ) required_keys . each do | key | unless Settings . send key error = "Config is missing required key '#{key}'. Please add it " "to #{config_path} and try again." error += "\n\nFor more info, see https://github.com/jutonz/dctl_rb#required-keys" puts Rainbow ( error ) . red exit 1 end end end | Ensure the current project s . dctl . yml contains all the requisite keys . |
22,834 | def select_options ( pairs , current = nil , prompt = nil ) pairs . unshift ( [ prompt , '' ] ) if prompt pairs . map { | label , value | tag ( :option , label , :value => value , :selected => ( current == value ) ) } . join ( "\n" ) end | Accepts a list of pairs and produces option tags . |
22,835 | def errors_on ( object , options = { :class => 'errors' } , & block ) return if object . errors . empty? lines = if object . errors . respond_to? ( :full_messages ) object . errors . full_messages else HamlErrorPresenter . new ( object . errors ) . present ( self , & block ) end haml_tag ( :div , options ) do haml_tag ( :ul ) do lines . each do | error | haml_tag ( :li , error ) end end end end | Presents errors on your form . Takes the explicit approach and assumes that for every form you have the copy for the errors are important instead of producing canned responses . |
22,836 | def percentage ( number , precision = 2 ) return if number . to_s . empty? ret = "%02.#{ precision }f%" % number ret . gsub ( / \. / , '%' ) end | Show the percentage representation of a numeric value . |
22,837 | def google_chart_url max = 120 response = get_all return unless response historical_data = [ ] response . each_with_index do | row , index | break if index >= max next if row . match ( / / ) row = row . split ( / / ) historical_data << row [ 5 ] end return if historical_data . blank? historical_data = historical_data . join ( ',' ) [ self . class . google_chart_base ( @buoy_id ) , '&chd=t:' , historical_data ] . join end | Get a graph of the historical data for the given buoy . |
22,838 | def send_email subject = 'Thanks for contacting us - Octo.ai' opts = { text : 'Hey we will get in touch with you shortly. Thanks :)' , name : self . firstname + ' ' + self . lastname } Octo :: Email . send ( self . email , subject , opts ) Octo . get_config ( :email_to ) . each { | x | opts1 = { text : self . email + ' \n\r ' + self . typeofrequest + '\n\r' + self . message , name : x . fetch ( 'name' ) } Octo :: Email . send ( x . fetch ( 'email' ) , subject , opts1 ) } end | Send Email after model save |
22,839 | def create ( server_name , flavor_name , image_name , files = { } ) request = { "server" => { "name" => server_name , "imageRef" => image_name_to_id ( image_name ) , "flavorRef" => flavor_name_to_id ( flavor_name ) , } } files . each do | path , blob | request [ 'server' ] [ 'personality' ] ||= [ ] request [ 'server' ] [ 'personality' ] << { 'path' => path , 'contents' => Base64 . encode64 ( blob ) } end response = servers_client . post ( servers_path , JSON . dump ( request ) , json_headers ) data = JSON . parse ( response . body ) [ 'server' ] Raca :: Server . new ( @account , @region , data [ 'id' ] ) end | create a new server on Rackspace . |
22,840 | def require ( file ) return if key? ( file ) self [ file ] = deps ( file ) self [ file ] . each do | dependency | self . require dependency end end | Add the given file to this graph . Creates a new entry in the graph the key of which is the relative path to this file and the value is the array of relative paths to its dependencies . Any dependent files will also be recursively added to this graph . |
22,841 | def scan ( glob ) Dir [ glob ] . flat_map { | f | deps ( f ) } . uniq . each do | dependency | self . require dependency end end | Parses all of the files in the given glob and adds their dependencies to the graph . A file in this glob is not added to the graph unless another file in the glob depends on it . |
22,842 | def deps ( file ) parser . parse ( file ) . map { | f | path_to path [ f ] } end | Parses the file and returns the relative paths to its dependencies . |
22,843 | def run running! @current_app = Application . create ( :debug => debug? ) while true sleep 1 focus_changed if @current_app . finished? || backup? end end | Run the loop |
22,844 | def has_access? ( permission , user ) raise UnknownUserException if user . nil? AccessProvider . action_permitted_for_user? ( permission , user ) end | Check the url for each link in view to show it |
22,845 | def run_puppet ( driver_instance , puppet_tests , logger : nil , reset_after_run : true ) logger = logger || @logger logger . debug ( "#{driver_instance.node_name} running #{puppet_tests.size} tests" ) if driver_instance . open logger . debug ( "#{driver_instance.node_name} started" ) if driver_instance . self_test logger . debug ( "#{driver_instance.node_name} self_test OK, running puppet" ) puppet_tests . each { | test_name , puppet_code | if @result_set . class_size ( driver_instance . node_name ) > 0 and reset_after_run driver_instance . reset end setup_test ( driver_instance , test_name ) logger . info ( "running test #{driver_instance.node_name} - #{test_name}" ) relative_puppet_file = commit_testcase ( puppet_tests , driver_instance . node_name , test_name ) driver_instance . sync_testcase ( driver_instance . node_name , test_name ) puppet_file_remote = File . join ( PUPPET_TESTCASE_DIR , relative_puppet_file ) driver_instance . run_puppet_x2 ( puppet_file_remote ) @logger . debug ( "Saved result #{driver_instance.node_name} #{test_name} #{driver_instance.result.passed?}" ) @result_set . save ( driver_instance . node_name , test_name , driver_instance . result ) Report :: log_test_result_or_errors ( @logger , driver_instance . node_name , test_name , driver_instance . result , ) } logger . debug ( "#{driver_instance.node_name} test completed, closing instance" ) else raise "#{driver_instance.node_name} self test failed, unable to continue" end else raise "#{driver_instance.node_name} failed to start, unable to continue" end driver_instance . close end | Run puppet using driver_instance to execute puppet_codes |
22,846 | def extract_parameters ( op_options , node ) logger . debug "Operation node: #{node.inspect}" r = [ ] op_options [ :parameters ] . each do | p | logger . debug " Looking for: tns:#{p.first.camelize_if_symbol(:lower)}" v = node . xpath ( "tns:#{p.first.camelize_if_symbol(:lower)}" , namespaces ) . first if v . nil? elsif p . last == Fixnum v = v . text . to_i elsif p . last == DateTime v = DateTime . parse ( v . text ) else v = v . text end logger . debug " Found: #{v.inspect}" r << v end r end | Extracts all parameters from the operation node and return as an array . |
22,847 | def serialize_soap_result ( op_options , r ) xb = Builder :: XmlMarkup . new xb . instruct! xb . Envelope :xmlns => soap_env_ns , 'xmlns:xsi' => namespaces [ 'xsi' ] do | xenv | xenv . Body do | xbody | xbody . tag! "#{op_options[:public_name]}Response" , :xmlns => namespaces [ 'tns' ] do | xresp | if r . nil? xresp . tag! "#{op_options[:public_name]}Result" , 'xsi:nil' => 'true' else ret = op_options [ :returns ] rescue nil case ret when NilClass , Class xresp . tag! "#{op_options[:public_name]}Result" , r when Hash xresp . tag! "#{op_options[:public_name]}Result" do | xres | ret . each do | k , v | extract_and_serialize_value ( xres , r , k , v ) end end else raise "Unsupported return type: #{ret.inspect}" end end end end end end | Serializes the result of an operation as a SOAP Envelope . |
22,848 | def extract_and_serialize_value ( builder , obj , field , type ) v = if obj . is_a? ( Hash ) obj [ field ] or obj [ field . to_sym ] or obj [ field . to_s . underscore ] or obj [ field . to_s . underscore . to_sym ] elsif obj . respond_to? ( field ) obj . send ( field ) elsif obj . respond_to? ( field . underscore ) obj . send ( field . underscore ) else raise "Could not extract #{field.inspect} from object: #{obj.inspect}" end if v . nil? builder . tag! field . camelize_if_symbol , 'xsi:nil' => 'true' else builder . tag! field . camelize_if_symbol , v end end | Extracts a field from an object casts it to the appropriate type and serializes as XML . |
22,849 | def serialize_soap_fault ( ex ) logger . debug "Serializing SOAP Fault: #{ex.inspect}" xb = Builder :: XmlMarkup . new xb . instruct! xb . tag! 'e:Envelope' , 'xmlns:e' => soap_env_ns do | xenv | xenv . tag! 'e:Body' do | xbody | xbody . tag! 'e:Fault' do | xf | case soap_version when :soap11 xf . faultcode "e:Server.#{ex.class.name}" xf . faultstring ex . message when :soap12 xf . tag! 'e:Code' do | xcode | xcode . tag! 'e:Value' , 'e:Receiver' xcode . tag! 'e:Subcode' do | xsubcode | xsubcode . tag! 'e:Value' , ex . class . name end end xf . tag! 'e:Reason' , ex . message else raise "Unsupported SOAP version: #{soap_version}" end end end end end | Serializes an exception as a SOAP Envelope containing a SOAP Fault . |
22,850 | def view_variables instance_variables . select { | var | additional_var? ( var ) } . map { | var | fetch_ivar_value ( var ) } end | All user defined instance variables for current request . |
22,851 | def index gricer_request = :: Gricer . config . request_model . first_by_id ( params [ :id ] ) gricer_session = :: Gricer . config . session_model . first_by_id ( session [ :gricer_session ] ) if gricer_session gricer_session . javascript = true gricer_session . java = params [ :j ] gricer_session . flash_version = params [ :f ] unless params [ :f ] == 'false' gricer_session . silverlight_version = params [ :sl ] unless params [ :sl ] == 'false' gricer_session . screen_width = params [ :sx ] gricer_session . screen_height = params [ :sy ] gricer_session . screen_size = "#{params[:sx]}x#{params[:sy]}" unless params [ :sx ] . blank? or params [ :sy ] . blank? gricer_session . screen_depth = params [ :sd ] gricer_session . save if gricer_request and gricer_request . session == gricer_session gricer_request . javascript = true gricer_request . window_width = params [ :wx ] gricer_request . window_height = params [ :wy ] if gricer_request . save render text : 'ok' else render text : 'session only' , status : 500 end return else render text : 'session only' return end end render text : 'failed' , status : 500 end | This action stores the data submitted by the Javascript . |
22,852 | def get ( key , default = nil , & block ) value = options [ key ] value = default if value . nil? value = block . call if value . nil? && block return value end | Lookup an option from our options set . |
22,853 | def get! ( key , default = nil , & block ) value = get key , default , & block raise "Nil value found for option: #{key}, #{default}" if value . nil? return value end | Use this the option is mandatory . |
22,854 | def connect if defined? ( Blacklight ) solr_config = Blacklight . solr_config elsif defined? ( Rails . root . to_s ) solr_config = load_rails_config else solr_config = load_fallback_config end if index_full_text == true && solr_config . has_key? ( :fulltext ) && solr_config [ :fulltext ] . has_key? ( 'url' ) solr_config [ :url ] = solr_config [ :fulltext ] [ 'url' ] elsif solr_config . has_key? ( :default ) && solr_config [ :default ] . has_key? ( 'url' ) solr_config [ :url ] = solr_config [ :default ] [ 'url' ] elsif ! solr_config . has_key? ( :url ) raise "Unable to find a solr url in the config file" end @solr = RSolr . connect solr_config rescue RuntimeError => e logger . debug "Unable to establish SOLR Connection with #{solr_config.inspect}. Failed with #{e.message}" raise URI :: InvalidURIError end | This method performs initialization tasks |
22,855 | def generate_dates ( solr_doc ) if solr_doc [ :date_t ] . nil? :: Solrizer :: Extractor . insert_solr_field_value ( solr_doc , :date_t , "9999-99-99" ) end date_value = solr_doc [ :date_t ] if date_value . kind_of? Array date_value = date_value . first end date_obj = Date . _parse ( date_value ) if date_obj [ :mon ] . nil? :: Solrizer :: Extractor . insert_solr_field_value ( solr_doc , :month_facet , "99" ) elsif 0 < date_obj [ :mon ] && date_obj [ :mon ] < 13 :: Solrizer :: Extractor . insert_solr_field_value ( solr_doc , :month_facet , date_obj [ :mon ] . to_s . rjust ( 2 , '0' ) ) else :: Solrizer :: Extractor . insert_solr_field_value ( solr_doc , :month_facet , "99" ) end if date_obj [ :mday ] . nil? :: Solrizer :: Extractor . insert_solr_field_value ( solr_doc , :day_facet , "99" ) elsif 0 < date_obj [ :mday ] && date_obj [ :mday ] < 32 :: Solrizer :: Extractor . insert_solr_field_value ( solr_doc , :day_facet , date_obj [ :mday ] . to_s . rjust ( 2 , '0' ) ) else :: Solrizer :: Extractor . insert_solr_field_value ( solr_doc , :day_facet , "99" ) end return solr_doc end | This method generates the month and day facets from the date_t in solr_doc |
22,856 | def create_document ( obj ) solr_doc = Hash . new model_klazz_array = ActiveFedora :: ContentModel . known_models_for ( obj ) model_klazz_array . delete ( ActiveFedora :: Base ) if obj . class == ActiveFedora :: Base solr_doc = obj . to_solr ( solr_doc ) logger . debug " added base fields from #{obj.class.to_s}" else solr_doc = obj . to_solr ( solr_doc ) model_klazz_array . delete ( obj . class ) logger . debug " added base fields from #{obj.class.to_s} and model fields from #{obj.class.to_s}" end model_klazz_array . each do | klazz | instance = obj . adapt_to ( klazz ) solr_doc = instance . to_solr ( solr_doc , :model_only => true ) logger . debug " added solr fields from #{klazz.to_s}" end :: Solrizer :: Extractor . insert_solr_field_value ( solr_doc , :id_t , "#{obj.pid}" ) :: Solrizer :: Extractor . insert_solr_field_value ( solr_doc , :id , "#{obj.pid}" ) unless solr_doc [ :id ] return solr_doc end | This method creates a Solr - formatted XML document |
22,857 | def post_accounts ( new_account ) @options = { path : '/accounts/content' , body : Megam :: JSONCompat . to_json ( new_account ) } . merge ( @options ) request ( :expects => 201 , :method => :post , :body => @options [ :body ] ) end | The body content needs to be a json . |
22,858 | def store_constraint_matrix ( constr , type ) row = [ ] colno = [ ] constr . expression . terms . each do | var , coeff | row << coeff * 1.0 colno << var . index end row_buffer = build_pointer_array row , :double colno_buffer = build_pointer_array colno , :int ret = LPSolve . add_constraintex ( @ptr , constr . expression . terms . length , row_buffer , colno_buffer , type , constr . rhs ) fail if ret != 1 end | Build the constraint matrix and add it to the model |
22,859 | def birthdate month_data_element = bio_document . at ( "td.label[text()*='Date of Birth']" ) . next_element . first_element_child date_month = month_data_element . inner_text . strip rescue "" year = month_data_element . next_element . inner_text . strip rescue "" Date . parse ( "#{date_month} #{year}" ) rescue nil end | Get The Birth Date |
22,860 | def deathdate date_month = bio_document . at ( "h5[text()*='Date of Death']" ) . next_element . inner_text . strip rescue "" year = bio_document . at ( "a[@href*='death_date']" ) . inner_text . strip rescue "" Date . parse ( "#{date_month} #{year}" ) rescue nil end | Get The death date else nil |
22,861 | def filmography films = main_document . css ( ".filmo-row b a" ) . map { | e | e . get_attribute ( 'href' ) [ / \d / , 1 ] } rescue [ ] films . map { | f | Movie . new ( f . to_i ) } end | Return the Filmography for the moment I can t make subdivision of this then i take all in an array |
22,862 | def all_javascript_paths all_paths = [ ] all_paths += @javascripts all_paths += @background_scripts all_paths += @content_scripts . map { | cs | cs . javascripts } . compact all_paths . flatten . uniq end | Return all the javascript paths in the spec included those nested inside other objects |
22,863 | def all_stylesheet_paths all_paths = [ ] all_paths += @stylesheets all_paths += @content_scripts . map { | cs | cs . stylesheets } . compact all_paths . flatten . uniq end | Return all the stylesheet paths in the spec included those nested inside other objects |
22,864 | def serialize serialized = { } serialized [ :id ] = id serialized [ :title ] = title serialized [ :meta_language ] = meta_language serialized [ :meta_permalink ] = meta_permalink serialized [ :category_father ] = category_father ? category_father . serialize_base : nil serialized [ :category_children ] = serialize_category_children serialized [ :other_informations ] = serialize_other_informations serialized end | This function serializes a complete version of the category . |
22,865 | def serialize_base serialized = { } serialized [ :id ] = id serialized [ :title ] = title serialized [ :meta_language ] = meta_language serialized [ :meta_permalink ] = meta_permalink serialized end | This function serializes a basic version of the category . |
22,866 | def from_criteria ( criteria ) queries = [ ] criteria . each do | k , v | name , operator , index_type , value = nil query_options = { } if ( v . is_a? ( Hash ) ) name = k . to_s query_options . merge! ( v . delete ( :options ) || { } ) sub_queries = [ ] v . each do | kk , vv | operator = kk . to_s . gsub ( '$' , '' ) . upcase || "EQ" if @operators . include? ( operator ) value = vv value = value . to_s if value . is_a? ( MarkLogic :: ObjectId ) sub_queries << build_query ( name , operator , value , query_options ) elsif value . is_a? ( Hash ) child_queries = value . map do | kk , vv | build_query ( kk , vv , query_options ) end sub_queries << Queries :: ContainerQuery . new ( name , Queries :: AndQuery . new ( child_queries ) ) end end if sub_queries . length > 1 queries << Queries :: AndQuery . new ( sub_queries ) elsif sub_queries . length == 1 queries << sub_queries [ 0 ] end else name = k . to_s value = v operator = "EQ" queries << build_query ( name , operator , value , query_options ) end end if queries . length > 1 MarkLogic :: Queries :: AndQuery . new ( * queries ) elsif queries . length == 1 queries [ 0 ] end end | Builds a MarkLogic Query from Mongo Style Criteria |
22,867 | def draw ( im , draw_value = 255.0 ) im . each do | ic | if ( yield ic ) then im . setValue ( ic , draw_value ) end end end | A basic drawing method that iterates through an entire image . At each coordinate an attached block is evaluated for a boolean response that determines whether that coordinate is overwritten with a specified value . The attached block will be given a single parameter which is the current ImageCoordinate . |
22,868 | def draw_shape ( im , location , shape_name = :circle , shape_parameters = 10 ) if self . respond_to? ( shape_name ) then self . send ( shape_name , im , location , shape_parameters ) end end | Draws a specified shape into an image . |
22,869 | def circle ( im , center , radius ) lower = ImageCoordinate [ center [ 0 ] - radius - 1 , center [ 1 ] - radius - 1 , 0 , 0 , 0 ] upper = ImageCoordinate [ center [ 0 ] + radius + 1 , center [ 1 ] + radius + 1 , 0 , 0 , 0 ] im . box_conservative ( lower , upper , [ :x , :y ] ) draw ( im ) do | ic | xdiff = ic [ :x ] - center [ 0 ] ydiff = ic [ :y ] - center [ 1 ] if ( Math . hypot ( xdiff , ydiff ) - radius ) . abs <= Math . sqrt ( 2 ) then true else false end end lower . recycle upper . recycle end | Draws a circle into an image . |
22,870 | def ellipse ( im , foci , radius_inc ) min_x = foci [ 0 ] [ 0 ] max_x = foci [ 1 ] [ 0 ] min_y = foci [ 0 ] [ 1 ] max_y = foci [ 1 ] [ 1 ] if foci [ 1 ] [ 0 ] < min_x then min_x = foci [ 1 ] [ 0 ] max_x = foci [ 0 ] [ 0 ] end if foci [ 1 ] [ 1 ] < min_y then min_y = foci [ 1 ] [ 1 ] max_y = foci [ 0 ] [ 1 ] end radius = radius_inc + Math . hypot ( max_x - min_x , max_y - min_y ) lower = ImageCoordinate [ min_x - radius - 1 , min_y - radius - 1 , 0 , 0 , 0 ] upper = ImageCoordinate [ max_x + radius + 1 , max_y + radius + 1 , 0 , 0 , 0 ] im . box_conservative ( lower , upper , [ :x , :y ] ) draw ( im ) do | ic | xdiff0 = ic [ :x ] - foci [ 0 ] [ 0 ] ydiff0 = ic [ :y ] - foci [ 0 ] [ 1 ] xdiff1 = ic [ :x ] - foci [ 1 ] [ 0 ] ydiff1 = ic [ :y ] - foci [ 1 ] [ 1 ] if ( Math . hypot ( xdiff0 , ydiff0 ) + Math . hypot ( xdiff1 , ydiff1 ) - radius ) . abs <= Math . sqrt ( 2 ) then true else false end end end | Draws an ellipse into an image . |
22,871 | def annotate ( filename ) engine = Sass :: Engine . new ( IO . read ( filename ) , options . merge ( :syntax => guess_syntax ( filename ) ) ) tree = engine . to_tree tree . perform! ( Sass :: Environment . new ) resolve_rules tree @rows = to_rows ( tree ) end | Annotate the named file . Sets and returns rows . |
22,872 | def to_html ( filename ) rows = annotate ( filename ) ERB . new ( IO . read ( File . dirname ( __FILE__ ) + "/annotate/template.erb" ) ) . result ( binding ) rescue Sass :: SyntaxError => error error = Sass :: SyntaxError . exception_to_css error , @options . merge ( :full_exception => true ) ERB . new ( IO . read ( File . dirname ( __FILE__ ) + "/annotate/template.erb" ) ) . result ( binding ) end | Annotate the named file returns HTML document . |
22,873 | def styles Sass :: Engine . new ( IO . read ( File . dirname ( __FILE__ ) + "/annotate/style.scss" ) , :syntax => :scss ) . render end | ERB template uses this to obtain our own stylesheet . |
22,874 | def page_contains? ( text_or_regexp ) if text_or_regexp . class == String return page . has_content? ( text_or_regexp ) elsif text_or_regexp . class == Regexp return page . has_xpath? ( './/*' , :text => text_or_regexp ) else raise ArgumentError , "Expected String or Regexp, got #{text_or_regexp.class}" end end | Return true if the current page contains the given text or regular expression or false if it does not . |
22,875 | def should_see ( texts , scope = { } ) in_scope ( scope ) do texts = [ texts ] if ( texts . class == String || texts . class == Regexp ) unexpected = texts . select do | text | ! page_contains? ( text ) end if ! unexpected . empty? raise Kelp :: Unexpected , "Expected to see: #{texts.inspect}\nDid not see: #{unexpected.inspect}" end end end | Verify the presence of content on the page . Passes when all the given items are found on the page and fails if any of them are not found . |
22,876 | def should_see_in_same_row ( texts , scope = { } ) in_scope ( scope ) do if ! page . has_xpath? ( xpath_row_containing ( texts ) ) raise Kelp :: Unexpected , "Expected, but did not see: #{texts.inspect} in the same row" end end end | Verify that all items appear in the same table row . Passes if a tr element exists containing all the given texts and fails if no such tr exists . The texts may be in any order in the row . |
22,877 | def wait_for_snapshots timeout = 1500 step = 10 while timeout > 0 puts "Checking for snapshot completed" snapshots = behavior ( :find_snapshots ) status = snapshots . map { | x | x . aws_status } break unless status . include? ( "pending" ) sleep step timeout -= step end raise "FATAL: timed out waiting for all snapshots in lineage #{@lineage} to complete" if timeout == 0 end | take the lineage name find all snapshots and sleep until none are in the pending state . |
22,878 | def find_snapshot_timestamp last_snap = behavior ( :find_snapshots ) . last last_snap . tags . detect { | t | t [ "name" ] =~ / \d / } timestamp = $1 end | Returns the timestamp of the latest snapshot for testing OPT_DB_RESTORE_TIMESTAMP_OVERRIDE |
22,879 | def require_file ( name ) name = name . to_s path = File . join ( namespace_root , File . expand_path ( File . join ( '' , name ) ) ) begin require path rescue Gem :: LoadError => e raise ( e ) rescue :: LoadError return nil end return true end | Requires the file with the given name within the namespace root directory . |
22,880 | def const_defined? ( name , * inherit ) if super ( name , * inherit ) true else require_file ( OpenNamespace . const_path ( name ) ) return super ( name , * inherit ) end end | Checks if a constant is defined or attempts loading the constant . |
22,881 | def relative_path current_dir = Pathname . new ( File . expand_path ( "./" ) ) Pathname . new ( path ) . relative_path_from ( current_dir ) . to_s end | Which todo is due sooner? |
22,882 | def lookup_tag_due_date config . fetch ( :tags ) . fetch ( match [ :tag ] ) rescue KeyError msg = "#{match[:tag]} tag not defined in config file" raise KeyError , msg end | A tag was referenced so let s see when that s due |
22,883 | def debug ( msg = nil , & block ) return unless debug? puts "\n[DEBUG] - #{caller.first}" msg . each { | m | puts ( m ) } if msg . is_a? ( Array ) if msg . is_a? ( Hash ) msg . each do | k , v | puts "[#{k.to_s}]" if v . is_a? ( Array ) v . each { | m | puts ( m ) } else puts v end end elsif ( msg . is_a? ( String ) || msg . is_a? ( Symbol ) ) puts msg . to_s end yield if block_given? puts "\n" end | Debug - Wrapper |
22,884 | def rateable = ( rateable ) if opponent . nil? @rateable = false return end @rateable = case rateable when nil then true when false then false else true end end | Rateable flag . If false result is not rateable . Can only be true if there is an opponent . |
22,885 | def auth_settings { 'appsid' => { type : 'api_key' , in : 'query' , key : 'appsid' , value : api_key_with_prefix ( 'appsid' ) } , 'oauth' => { type : 'oauth2' , in : 'header' , key : 'Authorization' , value : "Bearer #{access_token}" } , 'signature' => { type : 'api_key' , in : 'query' , key : 'signature' , value : api_key_with_prefix ( 'signature' ) } , } end | Returns Auth Settings hash for api client . |
22,886 | def execute_internal ( json_request , & block ) if json_request . respond_to? ( :headers ) then json_request . headers [ "Content-Type" ] = "application/json" end req = HTTPI :: Request . new ( :url => @uri , :body => json_request . to_wire ) if json_request . respond_to? :headers then req . headers . merge! ( json_request . headers ) end if block then req . on_body ( & block ) HTTPI . post ( req ) return JsonResponse . new ( "success" ) else res = HTTPI . post ( req ) . body return JsonResponse . from_json ( res ) end end | Execute the request optionally passing a block to handle the response |
22,887 | def write_last_update ( last_update_at ) if ( ! File :: directory? ( "tmp" ) ) then Dir . mkdir ( "tmp" ) end f = File . new ( "tmp/chronuscop.tmp" , "w" ) f . printf ( "%d" , last_update_at . to_i ) f . close ( ) end | Function to write the last update time to a temporary file . |
22,888 | def xml_time_to_integer ( str ) arr = str . gsub ( / / , "-" ) . split ( / / ) year = arr [ 0 ] month = arr [ 1 ] day = arr [ 2 ] hour = arr [ 3 ] min = arr [ 4 ] sec = arr [ 5 ] Time . utc ( year , month , day , hour , min , sec ) . to_i end | To convert xml_time received from the server to integer . |
22,889 | def sync_it_now puts "Attempt Sync" last_update_at = get_last_update_at page = @mechanize_agent . get ( "#{ChronuscopClient.configuration_object.chronuscop_server_address}/projects/#{ChronuscopClient.configuration_object.project_number}/translations.xml/?auth_token=#{ChronuscopClient.configuration_object.api_token}&last_update_at=#{last_update_at}" ) words_hash = XmlSimple . xml_in ( page . body ) if ( ! words_hash ) then puts "Nothing new added." return end all_translations = words_hash [ "translation" ] if ( ! all_translations ) then puts "Nothing new added." return end all_translations . each do | t | @redis_agent . set "#{t["key"]}" , "#{t["value"]}" str = t [ "updated-at" ] [ 0 ] [ "content" ] key_updated_at = xml_time_to_integer ( str ) if ( key_updated_at > last_update_at ) then last_update_at = key_updated_at end end write_last_update ( last_update_at . to_i ) puts "Finished synchronizing !!!" end | This method keeps the remote - keys and the local - keys synchronized . This method should be called only after initializing the configuration object as it uses those configuration values . |
22,890 | def get_input_files ( args = { } ) command = args . fetch ( :command , nil ) if command . nil? CodeLister . files ( args ) else CodeLister . files_from_shell ( command , args . fetch ( :base_dir , "." ) ) end end | Get the list of input file |
22,891 | def execute ( options = { } ) input_files = get_input_files ( options ) input_files . delete_if do | file | File . binary? ( file . gsub ( / \. / , options [ :base_dir ] ) ) end if input_files . empty? puts "No file found for your option: #{options}" return end to_htmls ( input_files , options ) generated_files = input_files . map { | f | "#{f}.xhtml" } index_file = "./index.html" IndexHtml . htmlify generated_files , base_dir : options [ :base_dir ] , output : index_file , drop_ext : true generated_files << index_file if options [ :index ] output_file = "vim_printer_#{File.basename(File.expand_path(options[:base_dir]))}.tar.gz" AgileUtils :: FileUtil . tar_gzip_files ( generated_files , output_file ) AgileUtils :: FileUtil . delete ( generated_files ) FileUtils . rm_rf ( index_file ) if options [ :index ] puts "Your output file is '#{File.absolute_path(output_file)}'" end | Main entry point to export the code |
22,892 | def to_htmls ( files , options = { } ) FileUtils . chdir ( File . expand_path ( options [ :base_dir ] ) ) files . each_with_index do | file , index | puts "FYI: process file #{index + 1} of #{files.size} : #{file}" to_html ( file , options ) end end | convert multiple files to html |
22,893 | def substitute_template_values ( endpoint , request_type , options = { } ) endpoint_templates = @schemas . fetch ( request_type . to_sym , nil ) template = endpoint_templates . fetch ( endpoint . to_sym , nil ) if endpoint_templates if template extracted_options , options = extract_template_options ( options . merge ( { endpoint : endpoint } ) , template ) rendered_template = Liquid :: Template . parse ( template ) . render ( extracted_options ) end return rendered_template , options end | Render out the template values and return the updated options hash |
22,894 | def copySrcToOutputFile ( srcFile , outFile , makeBackup ) if makeBackup makeBackupFile ( outFile ) end FileUtils . cp ( srcFile , outFile , :verbose => true ) end | Copy a source file directly to an output file |
22,895 | def handle_deflation case last_response [ "content-encoding" ] when "gzip" body_io = StringIO . new ( last_response . body ) last_response . body . replace Zlib :: GzipReader . new ( body_io ) . read when "deflate" last_response . body . replace Zlib :: Inflate . inflate ( last_response . body ) end end | Inspired by Ruby 1 . 9 |
22,896 | def value_from_redis if val = Timely . redis . hget ( redis_hash_key , redis_value_key ) val = val . include? ( "." ) ? val . to_f : val . to_i else val = value_without_caching Timely . redis . hset ( redis_hash_key , redis_value_key , val ) end val end | retrieve a cached value from a redis hash . |
22,897 | def using_yaml ( * args ) include InstanceMethods args . each do | arg | case arg when Symbol , String using_yaml_file ( arg . to_s ) when Hash next unless arg . size == 1 && arg . keys . first == :path UsingYAML . path = [ self . inspect , arg . values . first ] end end end | Used to configure UsingYAML for a class by defining what files should be loaded and from where . |
22,898 | def url_for ( options = { } ) if templated? template = Addressable :: Template . new ( base_path ) template = template . expand ( options ) template . to_s else base_path end end | Returns the URL for the route . If it s templated then we utilize the provided options hash to expand the route . Otherwise we return the path |
22,899 | def decrypt ( field ) return if field !~ / \S / iv , encrypted_aes_key , encrypted_text = parse ( field ) private_key = OpenSSL :: PKey :: RSA . new ( File . read ( private_key_filepath ) ) decrypted_aes_key = private_key . private_decrypt ( Base64 . decode64 ( encrypted_aes_key ) ) decrypted_field = decrypt_data ( iv , decrypted_aes_key , encrypted_text ) decrypted_field . force_encoding ( encoding ) end | Decrypt the provided cipher text and return the plaintext Return nil if whitespace |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.