idx
int64
0
24.9k
question
stringlengths
68
4.14k
target
stringlengths
9
749
100
def valid? ( context = nil ) current_context , self . validation_context = validation_context , context errors . clear run_validations! ensure self . validation_context = current_context end
Runs all the specified validations and returns + true + if no errors were added otherwise + false + .
101
def message_verifier ( verifier_name ) @message_verifiers [ verifier_name ] ||= begin secret = key_generator . generate_key ( verifier_name . to_s ) ActiveSupport :: MessageVerifier . new ( secret ) end end
Returns a message verifier object .
102
def env_config @app_env_config ||= begin super . merge ( "action_dispatch.parameter_filter" => config . filter_parameters , "action_dispatch.redirect_filter" => config . filter_redirect , "action_dispatch.secret_key_base" => secret_key_base , "action_dispatch.show_exceptions" => config . action_dispatch . show_exceptions , "action_dispatch.show_detailed_exceptions" => config . consider_all_requests_local , "action_dispatch.logger" => Rails . logger , "action_dispatch.backtrace_cleaner" => Rails . backtrace_cleaner , "action_dispatch.key_generator" => key_generator , "action_dispatch.http_auth_salt" => config . action_dispatch . http_auth_salt , "action_dispatch.signed_cookie_salt" => config . action_dispatch . signed_cookie_salt , "action_dispatch.encrypted_cookie_salt" => config . action_dispatch . encrypted_cookie_salt , "action_dispatch.encrypted_signed_cookie_salt" => config . action_dispatch . encrypted_signed_cookie_salt , "action_dispatch.authenticated_encrypted_cookie_salt" => config . action_dispatch . authenticated_encrypted_cookie_salt , "action_dispatch.use_authenticated_cookie_encryption" => config . action_dispatch . use_authenticated_cookie_encryption , "action_dispatch.encrypted_cookie_cipher" => config . action_dispatch . encrypted_cookie_cipher , "action_dispatch.signed_cookie_digest" => config . action_dispatch . signed_cookie_digest , "action_dispatch.cookies_serializer" => config . action_dispatch . cookies_serializer , "action_dispatch.cookies_digest" => config . action_dispatch . cookies_digest , "action_dispatch.cookies_rotations" => config . action_dispatch . cookies_rotations , "action_dispatch.use_cookies_with_metadata" => config . action_dispatch . use_cookies_with_metadata , "action_dispatch.content_security_policy" => config . content_security_policy , "action_dispatch.content_security_policy_report_only" => config . content_security_policy_report_only , "action_dispatch.content_security_policy_nonce_generator" => config . content_security_policy_nonce_generator ) end end
Stores some of the Rails initial environment parameters which will be used by middlewares and engines to configure themselves .
103
def encrypted ( path , key_path : "config/master.key" , env_key : "RAILS_MASTER_KEY" ) ActiveSupport :: EncryptedConfiguration . new ( config_path : Rails . root . join ( path ) , key_path : Rails . root . join ( key_path ) , env_key : env_key , raise_if_missing_key : config . require_master_key ) end
Shorthand to decrypt any encrypted configurations or files .
104
def ordered_railties @ordered_railties ||= begin order = config . railties_order . map do | railtie | if railtie == :main_app self elsif railtie . respond_to? ( :instance ) railtie . instance else railtie end end all = ( railties - order ) all . push ( self ) unless ( all + order ) . include? ( self ) order . push ( :all ) unless order . include? ( :all ) index = order . index ( :all ) order [ index ] = all order end end
Returns the ordered railties for this application considering railties_order .
105
def raw_post unless has_header? "RAW_POST_DATA" raw_post_body = body set_header ( "RAW_POST_DATA" , raw_post_body . read ( content_length ) ) raw_post_body . rewind if raw_post_body . respond_to? ( :rewind ) end get_header "RAW_POST_DATA" end
Read the request \ body . This is useful for web services that need to work with raw requests directly .
106
def body if raw_post = get_header ( "RAW_POST_DATA" ) raw_post = raw_post . dup . force_encoding ( Encoding :: BINARY ) StringIO . new ( raw_post ) else body_stream end end
The request body is an IO input stream . If the RAW_POST_DATA environment variable is already set wrap it in a StringIO .
107
def GET fetch_header ( "action_dispatch.request.query_parameters" ) do | k | rack_query_params = super || { } Request :: Utils . check_param_encoding ( rack_query_params ) set_header k , Request :: Utils . normalize_encode_params ( rack_query_params ) end rescue Rack :: Utils :: ParameterTypeError , Rack :: Utils :: InvalidParameterError => e raise ActionController :: BadRequest . new ( "Invalid query parameters: #{e.message}" ) end
Override Rack s GET method to support indifferent access .
108
def POST fetch_header ( "action_dispatch.request.request_parameters" ) do pr = parse_formatted_parameters ( params_parsers ) do | params | super || { } end self . request_parameters = Request :: Utils . normalize_encode_params ( pr ) end rescue Rack :: Utils :: ParameterTypeError , Rack :: Utils :: InvalidParameterError => e raise ActionController :: BadRequest . new ( "Invalid request parameters: #{e.message}" ) end
Override Rack s POST method to support indifferent access .
109
def setup ( context , options , as , block ) @options = options @block = block @locals = options [ :locals ] || { } @details = extract_details ( options ) partial = options [ :partial ] if String === partial @has_object = options . key? ( :object ) @object = options [ :object ] @collection = collection_from_options @path = partial else @has_object = true @object = partial @collection = collection_from_object || collection_from_options if @collection paths = @collection_data = @collection . map { | o | partial_path ( o , context ) } if paths . uniq . length == 1 @path = paths . first else paths . map! { | path | retrieve_variable ( path , as ) . unshift ( path ) } @path = nil end else @path = partial_path ( @object , context ) end end self end
Sets up instance variables needed for rendering a partial . This method finds the options and details and extracts them . The method also contains logic that handles the type of object passed in as the partial .
110
def partial_path ( object , view ) object = object . to_model if object . respond_to? ( :to_model ) path = if object . respond_to? ( :to_partial_path ) object . to_partial_path else raise ArgumentError . new ( "'#{object.inspect}' is not an ActiveModel-compatible object. It must implement :to_partial_path." ) end if view . prefix_partial_path_with_controller_namespace prefixed_partial_names [ path ] ||= merge_prefix_into_object_path ( @context_prefix , path . dup ) else path end end
Obtains the path to where the object s partial is located . If the object responds to + to_partial_path + then + to_partial_path + will be called and will provide the path . If the object does not respond to + to_partial_path + then an + ArgumentError + is raised .
111
def serializable_hash ( options = nil ) options ||= { } attribute_names = attributes . keys if only = options [ :only ] attribute_names &= Array ( only ) . map ( & :to_s ) elsif except = options [ :except ] attribute_names -= Array ( except ) . map ( & :to_s ) end hash = { } attribute_names . each { | n | hash [ n ] = read_attribute_for_serialization ( n ) } Array ( options [ :methods ] ) . each { | m | hash [ m . to_s ] = send ( m ) } serializable_add_includes ( options ) do | association , records , opts | hash [ association . to_s ] = if records . respond_to? ( :to_ary ) records . to_ary . map { | a | a . serializable_hash ( opts ) } else records . serializable_hash ( opts ) end end hash end
Returns a serialized hash of your object .
112
def helpers @helpers ||= begin helpers = Module . new all = ActionController :: Base . all_helpers_from_path ( helpers_paths ) ActionController :: Base . modules_for_helpers ( all ) . each do | mod | helpers . include ( mod ) end helpers end end
Returns a module with all the helpers defined for the engine .
113
def app @app || @app_build_lock . synchronize { @app ||= begin stack = default_middleware_stack config . middleware = build_middleware . merge_into ( stack ) config . middleware . build ( endpoint ) end } end
Returns the underlying Rack application for this engine .
114
def routes ( & block ) @routes ||= ActionDispatch :: Routing :: RouteSet . new_with_config ( config ) @routes . append ( & block ) if block_given? @routes end
Defines the routes for this engine . If a block is given to routes it is appended to the engine .
115
def attach ( attachable ) if record . persisted? && ! record . changed? record . update ( name => attachable ) else record . public_send ( "#{name}=" , attachable ) end end
Attaches an + attachable + to the record .
116
def stream ( key ) file = file_for ( key , skip_lookup : false ) chunk_size = 5 . megabytes offset = 0 raise ActiveStorage :: FileNotFoundError unless file . present? while offset < file . size yield file . download ( range : offset .. ( offset + chunk_size - 1 ) ) . string offset += chunk_size end end
Reads the file for the given key in chunks yielding each to the block .
117
def updated? current_watched = watched if @last_watched . size != current_watched . size @watched = current_watched true else current_updated_at = updated_at ( current_watched ) if @last_update_at < current_updated_at @watched = current_watched @updated_at = current_updated_at true else false end end end
It accepts two parameters on initialization . The first is an array of files and the second is an optional hash of directories . The hash must have directories as keys and the value is an array of extensions to be watched under that directory .
118
def max_mtime ( paths ) time_now = Time . now max_mtime = nil paths . each do | path | mtime = File . mtime ( path ) next if time_now . compare_without_coercion ( mtime ) < 0 if max_mtime . nil? || max_mtime . compare_without_coercion ( mtime ) < 0 max_mtime = mtime end end max_mtime end
This method returns the maximum mtime of the files in + paths + or + nil + if the array is empty .
119
def benchmark ( message = "Benchmarking" , options = { } ) if logger options . assert_valid_keys ( :level , :silence ) options [ :level ] ||= :info result = nil ms = Benchmark . ms { result = options [ :silence ] ? logger . silence { yield } : yield } logger . send ( options [ :level ] , "%s (%.1fms)" % [ message , ms ] ) result else yield end end
Allows you to measure the execution time of a block in a template and records the result to the log . Wrap this block around expensive operations or possible bottlenecks to get a time reading for the operation . For example let s say you thought your file processing method was taking too long ; you could wrap it in a benchmark block .
120
def new ( attributes = nil , & block ) block = _deprecated_scope_block ( "new" , & block ) scoping { klass . new ( attributes , & block ) } end
Initializes new record from relation while maintaining the current scope .
121
def create ( attributes = nil , & block ) if attributes . is_a? ( Array ) attributes . collect { | attr | create ( attr , & block ) } else block = _deprecated_scope_block ( "create" , & block ) scoping { klass . create ( attributes , & block ) } end end
Tries to create a new record with the same scoped attributes defined in the relation . Returns the initialized object if validation fails .
122
def to_sql @to_sql ||= begin if eager_loading? apply_join_dependency do | relation , join_dependency | relation = join_dependency . apply_column_aliases ( relation ) relation . to_sql end else conn = klass . connection conn . unprepared_statement { conn . to_sql ( arel ) } end end end
Returns sql statement for the relation .
123
def init_with_attributes ( attributes , new_record = false ) @new_record = new_record @attributes = attributes init_internals yield self if block_given? _run_find_callbacks _run_initialize_callbacks self end
Initialize an empty model object from + attributes + . + attributes + should be an attributes object and unlike the initialize method no assignment calls are made per attribute .
124
def inspect inspection = if defined? ( @attributes ) && @attributes self . class . attribute_names . collect do | name | if has_attribute? ( name ) attr = _read_attribute ( name ) value = if attr . nil? attr . inspect else attr = format_for_inspect ( attr ) inspection_filter . filter_param ( name , attr ) end "#{name}: #{value}" end end . compact . join ( ", " ) else "not initialized" end "#<#{self.class} #{inspection}>" end
Returns the contents of the record as a nicely formatted string .
125
def mail ( headers = { } , & block ) return message if @_mail_was_called && headers . blank? && ! block content_type = headers [ :content_type ] headers = apply_defaults ( headers ) message . charset = charset = headers [ :charset ] wrap_delivery_behavior! ( headers [ :delivery_method ] , headers [ :delivery_method_options ] ) assign_headers_to_message ( message , headers ) responses = collect_responses ( headers , & block ) @_mail_was_called = true create_parts_from_responses ( message , responses ) message . content_type = set_content_type ( message , content_type , headers [ :content_type ] ) message . charset = charset if message . multipart? message . body . set_sort_order ( headers [ :parts_order ] ) message . body . sort_parts! end message end
The main method that creates the message and renders the email templates . There are two ways to call this method with a block or without a block .
126
def attach ( * attachables ) if record . persisted? && ! record . changed? record . update ( name => blobs + attachables . flatten ) else record . public_send ( "#{name}=" , blobs + attachables . flatten ) end end
Attaches one or more + attachables + to the record .
127
def charset = ( charset ) content_type = parsed_content_type_header . mime_type if false == charset set_content_type content_type , nil else set_content_type content_type , charset || self . class . default_charset end end
Sets the HTTP character set . In case of + nil + parameter it sets the charset to + default_charset + .
128
def days_to_week_start ( start_day = Date . beginning_of_week ) start_day_number = DAYS_INTO_WEEK . fetch ( start_day ) ( wday - start_day_number ) % 7 end
Returns the number of days to the start of the week on the given day . Week is assumed to start on + start_day + default is + Date . beginning_of_week + or + config . beginning_of_week + when set .
129
def remote_send ( req , marshalled = false ) send_initial_metadata GRPC . logger . debug ( "sending #{req}, marshalled? #{marshalled}" ) payload = marshalled ? req : @marshal . call ( req ) @call . run_batch ( SEND_MESSAGE => payload ) end
remote_send sends a request to the remote endpoint .
130
def send_status ( code = OK , details = '' , assert_finished = false , metadata : { } ) send_initial_metadata ops = { SEND_STATUS_FROM_SERVER => Struct :: Status . new ( code , details , metadata ) } ops [ RECV_CLOSE_ON_SERVER ] = nil if assert_finished @call . run_batch ( ops ) set_output_stream_done nil end
send_status sends a status to the remote endpoint .
131
def remote_read ops = { RECV_MESSAGE => nil } ops [ RECV_INITIAL_METADATA ] = nil unless @metadata_received batch_result = @call . run_batch ( ops ) unless @metadata_received @call . metadata = batch_result . metadata @metadata_received = true end get_message_from_batch_result ( batch_result ) end
remote_read reads a response from the remote endpoint .
132
def each_remote_read return enum_for ( :each_remote_read ) unless block_given? begin loop do resp = remote_read break if resp . nil? yield resp end ensure set_input_stream_done end end
each_remote_read passes each response to the given block or returns an enumerator the responses if no block is given . Used to generate the request enumerable for server - side client - streaming RPC s .
133
def each_remote_read_then_finish return enum_for ( :each_remote_read_then_finish ) unless block_given? loop do resp = begin remote_read rescue GRPC :: Core :: CallError => e GRPC . logger . warn ( "In each_remote_read_then_finish: #{e}" ) nil end break if resp . nil? yield resp end receive_and_check_status ensure set_input_stream_done end
each_remote_read_then_finish passes each response to the given block or returns an enumerator of the responses if no block is given .
134
def request_response ( req , metadata : { } ) raise_error_if_already_executed ops = { SEND_MESSAGE => @marshal . call ( req ) , SEND_CLOSE_FROM_CLIENT => nil , RECV_INITIAL_METADATA => nil , RECV_MESSAGE => nil , RECV_STATUS_ON_CLIENT => nil } @send_initial_md_mutex . synchronize do unless @metadata_sent ops [ SEND_INITIAL_METADATA ] = @metadata_to_send . merge! ( metadata ) end @metadata_sent = true end begin batch_result = @call . run_batch ( ops ) ensure set_input_stream_done set_output_stream_done end @call . metadata = batch_result . metadata attach_status_results_and_complete_call ( batch_result ) get_message_from_batch_result ( batch_result ) end
request_response sends a request to a GRPC server and returns the response .
135
def client_streamer ( requests , metadata : { } ) raise_error_if_already_executed begin send_initial_metadata ( metadata ) requests . each { | r | @call . run_batch ( SEND_MESSAGE => @marshal . call ( r ) ) } rescue GRPC :: Core :: CallError => e receive_and_check_status raise e rescue => e set_input_stream_done raise e ensure set_output_stream_done end batch_result = @call . run_batch ( SEND_CLOSE_FROM_CLIENT => nil , RECV_INITIAL_METADATA => nil , RECV_MESSAGE => nil , RECV_STATUS_ON_CLIENT => nil ) set_input_stream_done @call . metadata = batch_result . metadata attach_status_results_and_complete_call ( batch_result ) get_message_from_batch_result ( batch_result ) end
client_streamer sends a stream of requests to a GRPC server and returns a single response .
136
def server_streamer ( req , metadata : { } ) raise_error_if_already_executed ops = { SEND_MESSAGE => @marshal . call ( req ) , SEND_CLOSE_FROM_CLIENT => nil } @send_initial_md_mutex . synchronize do unless @metadata_sent ops [ SEND_INITIAL_METADATA ] = @metadata_to_send . merge! ( metadata ) end @metadata_sent = true end begin @call . run_batch ( ops ) rescue GRPC :: Core :: CallError => e receive_and_check_status raise e rescue => e set_input_stream_done raise e ensure set_output_stream_done end replies = enum_for ( :each_remote_read_then_finish ) return replies unless block_given? replies . each { | r | yield r } end
server_streamer sends one request to the GRPC server which yields a stream of responses .
137
def bidi_streamer ( requests , metadata : { } , & blk ) raise_error_if_already_executed begin send_initial_metadata ( metadata ) rescue GRPC :: Core :: CallError => e batch_result = @call . run_batch ( RECV_STATUS_ON_CLIENT => nil ) set_input_stream_done set_output_stream_done attach_status_results_and_complete_call ( batch_result ) raise e rescue => e set_input_stream_done set_output_stream_done raise e end bd = BidiCall . new ( @call , @marshal , @unmarshal , metadata_received : @metadata_received ) bd . run_on_client ( requests , proc { set_input_stream_done } , proc { set_output_stream_done } , & blk ) end
bidi_streamer sends a stream of requests to the GRPC server and yields a stream of responses .
138
def run_server_bidi ( mth , interception_ctx ) view = multi_req_view bidi_call = BidiCall . new ( @call , @marshal , @unmarshal , metadata_received : @metadata_received , req_view : view ) requests = bidi_call . read_next_loop ( proc { set_input_stream_done } , false ) interception_ctx . intercept! ( :bidi_streamer , call : view , method : mth , requests : requests ) do bidi_call . run_on_server ( mth , requests ) end end
run_server_bidi orchestrates a BiDi stream processing on a server .
139
def run_on_client ( requests , set_input_stream_done , set_output_stream_done , & blk ) @enq_th = Thread . new do write_loop ( requests , set_output_stream_done : set_output_stream_done ) end read_loop ( set_input_stream_done , & blk ) end
Creates a BidiCall .
140
def run_on_server ( gen_each_reply , requests ) replies = nil if gen_each_reply . arity == 1 replies = gen_each_reply . call ( requests ) elsif gen_each_reply . arity == 2 replies = gen_each_reply . call ( requests , @req_view ) else fail 'Illegal arity of reply generator' end write_loop ( replies , is_client : false ) end
Begins orchestration of the Bidi stream for a server generating replies .
141
def read_using_run_batch ops = { RECV_MESSAGE => nil } ops [ RECV_INITIAL_METADATA ] = nil unless @metadata_received begin batch_result = @call . run_batch ( ops ) unless @metadata_received @call . metadata = batch_result . metadata @metadata_received = true end batch_result rescue GRPC :: Core :: CallError => e GRPC . logger . warn ( 'bidi call: read_using_run_batch failed' ) GRPC . logger . warn ( e ) nil end end
performs a read using
142
def write_loop ( requests , is_client : true , set_output_stream_done : nil ) GRPC . logger . debug ( 'bidi-write-loop: starting' ) count = 0 requests . each do | req | GRPC . logger . debug ( "bidi-write-loop: #{count}" ) count += 1 payload = @marshal . call ( req ) begin @req_view . send_initial_metadata unless @req_view . nil? @call . run_batch ( SEND_MESSAGE => payload ) rescue GRPC :: Core :: CallError => e GRPC . logger . warn ( 'bidi-write-loop: ended with error' ) GRPC . logger . warn ( e ) break end end GRPC . logger . debug ( "bidi-write-loop: #{count} writes done" ) if is_client GRPC . logger . debug ( "bidi-write-loop: client sent #{count}, waiting" ) begin @call . run_batch ( SEND_CLOSE_FROM_CLIENT => nil ) rescue GRPC :: Core :: CallError => e GRPC . logger . warn ( 'bidi-write-loop: send close failed' ) GRPC . logger . warn ( e ) end GRPC . logger . debug ( 'bidi-write-loop: done' ) end GRPC . logger . debug ( 'bidi-write-loop: finished' ) rescue StandardError => e GRPC . logger . warn ( 'bidi-write-loop: failed' ) GRPC . logger . warn ( e ) if is_client @call . cancel_with_status ( GRPC :: Core :: StatusCodes :: UNKNOWN , "GRPC bidi call error: #{e.inspect}" ) else raise e end ensure set_output_stream_done . call if is_client end
set_output_stream_done is relevant on client - side
143
def read_loop ( set_input_stream_done , is_client : true ) return enum_for ( :read_loop , set_input_stream_done , is_client : is_client ) unless block_given? GRPC . logger . debug ( 'bidi-read-loop: starting' ) begin count = 0 loop do GRPC . logger . debug ( "bidi-read-loop: #{count}" ) count += 1 batch_result = read_using_run_batch if batch_result . nil? || batch_result . message . nil? GRPC . logger . debug ( "bidi-read-loop: null batch #{batch_result}" ) if is_client batch_result = @call . run_batch ( RECV_STATUS_ON_CLIENT => nil ) @call . status = batch_result . status @call . trailing_metadata = @call . status . metadata if @call . status GRPC . logger . debug ( "bidi-read-loop: done status #{@call.status}" ) batch_result . check_status end GRPC . logger . debug ( 'bidi-read-loop: done reading!' ) break end res = @unmarshal . call ( batch_result . message ) yield res end rescue StandardError => e GRPC . logger . warn ( 'bidi: read-loop failed' ) GRPC . logger . warn ( e ) raise e ensure set_input_stream_done . call end GRPC . logger . debug ( 'bidi-read-loop: finished' ) @enq_th . join if is_client end
Provides an enumerator that yields results of remote reads
144
def schedule ( * args , & blk ) return if blk . nil? @stop_mutex . synchronize do if @stopped GRPC . logger . warn ( 'did not schedule job, already stopped' ) return end GRPC . logger . info ( 'schedule another job' ) fail 'No worker threads available' if @ready_workers . empty? worker_queue = @ready_workers . pop fail 'worker already has a task waiting' unless worker_queue . empty? worker_queue << [ blk , args ] end end
Runs the given block on the queue with the provided args .
145
def start @stop_mutex . synchronize do fail 'already stopped' if @stopped end until @workers . size == @size . to_i new_worker_queue = Queue . new @ready_workers << new_worker_queue next_thread = Thread . new ( new_worker_queue ) do | jobs | catch ( :exit ) do loop_execute_jobs ( jobs ) end remove_current_thread end @workers << next_thread end end
Starts running the jobs in the thread pool .
146
def stop GRPC . logger . info ( 'stopping, will wait for all the workers to exit' ) @stop_mutex . synchronize do @stopped = true loop do break unless ready_for_work? worker_queue = @ready_workers . pop worker_queue << [ proc { throw :exit } , [ ] ] end @stop_cond . wait ( @stop_mutex , @keep_alive ) if @workers . size > 0 end forcibly_stop_workers GRPC . logger . info ( 'stopped, all workers are shutdown' ) end
Stops the jobs in the pool
147
def forcibly_stop_workers return unless @workers . size > 0 GRPC . logger . info ( "forcibly terminating #{@workers.size} worker(s)" ) @workers . each do | t | next unless t . alive? begin t . exit rescue StandardError => e GRPC . logger . warn ( 'error while terminating a worker' ) GRPC . logger . warn ( e ) end end end
Forcibly shutdown any threads that are still alive .
148
def stop if @stop_server . nil? == false && @stop_server == false @stop_server = true @stop_server_cv . broadcast return end @run_mutex . synchronize do fail 'Cannot stop before starting' if @running_state == :not_started return if @running_state != :running transition_running_state ( :stopping ) deadline = from_relative_time ( @poll_period ) @server . shutdown_and_notify ( deadline ) end @pool . stop end
Creates a new RpcServer .
149
def transition_running_state ( target_state ) state_transitions = { not_started : :running , running : :stopping , stopping : :stopped } if state_transitions [ @running_state ] == target_state @running_state = target_state else fail "Bad server state transition: #{@running_state}->#{target_state}" end end
Can only be called while holding
150
def handle ( service ) @run_mutex . synchronize do unless @running_state == :not_started fail 'cannot add services if the server has been started' end cls = service . is_a? ( Class ) ? service : service . class assert_valid_service_class ( cls ) add_rpc_descs_for ( service ) end end
handle registration of classes
151
def run @run_mutex . synchronize do fail 'cannot run without registering services' if rpc_descs . size . zero? @pool . start @server . start transition_running_state ( :running ) @run_cond . broadcast end loop_handle_server_calls end
runs the server
152
def run_till_terminated_or_interrupted ( signals , wait_interval = 60 ) @stop_server = false @stop_server_mu = Mutex . new @stop_server_cv = ConditionVariable . new @stop_server_thread = Thread . new do loop do break if @stop_server @stop_server_mu . synchronize do @stop_server_cv . wait ( @stop_server_mu , wait_interval ) end end stop end valid_signals = Signal . list signals . each do | sig | if sig . class == String sig . upcase! if sig . start_with? ( 'SIG' ) sig = sig [ 3 .. - 1 ] end end if valid_signals . value? ( sig ) || valid_signals . key? ( sig ) Signal . trap ( sig ) do @stop_server = true @stop_server_cv . broadcast end else fail "#{sig} not a valid signal" end end run @stop_server_thread . join end
runs the server with signal handlers
153
def available? ( an_rpc ) return an_rpc if @pool . ready_for_work? GRPC . logger . warn ( 'no free worker threads currently' ) noop = proc { | x | x } c = ActiveCall . new ( an_rpc . call , noop , noop , an_rpc . deadline , metadata_received : true , started : false ) c . send_status ( GRPC :: Core :: StatusCodes :: RESOURCE_EXHAUSTED , 'No free threads in thread pool' ) nil end
Sends RESOURCE_EXHAUSTED if there are too many unprocessed jobs
154
def implemented? ( an_rpc ) mth = an_rpc . method . to_sym return an_rpc if rpc_descs . key? ( mth ) GRPC . logger . warn ( "UNIMPLEMENTED: #{an_rpc}" ) noop = proc { | x | x } c = ActiveCall . new ( an_rpc . call , noop , noop , an_rpc . deadline , metadata_received : true , started : false ) c . send_status ( GRPC :: Core :: StatusCodes :: UNIMPLEMENTED , '' ) nil end
Sends UNIMPLEMENTED if the method is not implemented by this server
155
def loop_handle_server_calls fail 'not started' if running_state == :not_started while running_state == :running begin an_rpc = @server . request_call break if ( ! an_rpc . nil? ) && an_rpc . call . nil? active_call = new_active_server_call ( an_rpc ) unless active_call . nil? @pool . schedule ( active_call ) do | ac | c , mth = ac begin rpc_descs [ mth ] . run_server_method ( c , rpc_handlers [ mth ] , @interceptors . build_context ) rescue StandardError c . send_status ( GRPC :: Core :: StatusCodes :: INTERNAL , 'Server handler failed' ) end end end rescue Core :: CallError , RuntimeError => e if running_state == :running GRPC . logger . warn ( "server call failed: #{e}" ) end next end end @run_mutex . synchronize do transition_running_state ( :stopped ) GRPC . logger . info ( "stopped: #{self}" ) @server . close end end
handles calls to the server
156
def request_response ( method , req , marshal , unmarshal , deadline : nil , return_op : false , parent : nil , credentials : nil , metadata : { } ) c = new_active_call ( method , marshal , unmarshal , deadline : deadline , parent : parent , credentials : credentials ) interception_context = @interceptors . build_context intercept_args = { method : method , request : req , call : c . interceptable , metadata : metadata } if return_op c . merge_metadata_to_send ( metadata ) op = c . operation op . define_singleton_method ( :execute ) do interception_context . intercept! ( :request_response , intercept_args ) do c . request_response ( req , metadata : metadata ) end end op else interception_context . intercept! ( :request_response , intercept_args ) do c . request_response ( req , metadata : metadata ) end end end
Creates a new ClientStub .
157
def new_active_call ( method , marshal , unmarshal , deadline : nil , parent : nil , credentials : nil ) deadline = from_relative_time ( @timeout ) if deadline . nil? call = @ch . create_call ( parent , @propagate_mask , method , nil , deadline ) call . set_credentials! credentials unless credentials . nil? ActiveCall . new ( call , marshal , unmarshal , deadline , started : false ) end
Creates a new active stub
158
def lookup_variable ( context , variable ) lookup = context variable . split ( "." ) . each do | value | lookup = lookup [ value ] end lookup || variable end
Lookup a Liquid variable in the given context .
159
def converters @converters ||= site . converters . select { | c | c . matches ( document . extname ) } . sort end
Determine which converters to use based on this document s extension .
160
def run Jekyll . logger . debug "Rendering:" , document . relative_path assign_pages! assign_current_document! assign_highlighter_options! assign_layout_data! Jekyll . logger . debug "Pre-Render Hooks:" , document . relative_path document . trigger_hooks ( :pre_render , payload ) render_document end
Prepare payload and render the document
161
def render_document info = { :registers => { :site => site , :page => payload [ "page" ] } , :strict_filters => liquid_options [ "strict_filters" ] , :strict_variables => liquid_options [ "strict_variables" ] , } output = document . content if document . render_with_liquid? Jekyll . logger . debug "Rendering Liquid:" , document . relative_path output = render_liquid ( output , payload , info , document . path ) end Jekyll . logger . debug "Rendering Markup:" , document . relative_path output = convert ( output . to_s ) document . content = output if document . place_in_layout? Jekyll . logger . debug "Rendering Layout:" , document . relative_path output = place_in_layouts ( output , payload , info ) end output end
Render the document .
162
def place_in_layouts ( content , payload , info ) output = content . dup layout = layouts [ document . data [ "layout" ] . to_s ] validate_layout ( layout ) used = Set . new ( [ layout ] ) payload [ "layout" ] = nil while layout output = render_layout ( output , layout , info ) add_regenerator_dependencies ( layout ) next unless ( layout = site . layouts [ layout . data [ "layout" ] ] ) break if used . include? ( layout ) used << layout end output end
Render layouts and place document content inside .
163
def validate_layout ( layout ) if invalid_layout? ( layout ) Jekyll . logger . warn ( "Build Warning:" , "Layout '#{document.data["layout"]}' requested " "in #{document.relative_path} does not exist." ) elsif ! layout . nil? layout_source = layout . path . start_with? ( site . source ) ? :site : :theme Jekyll . logger . debug "Layout source:" , layout_source end end
Checks if the layout specified in the document actually exists
164
def render_layout ( output , layout , info ) payload [ "content" ] = output payload [ "layout" ] = Utils . deep_merge_hashes ( layout . data , payload [ "layout" ] || { } ) render_liquid ( layout . content , payload , info , layout . relative_path ) end
Render layout content into document . output
165
def assign_pages! payload [ "page" ] = document . to_liquid payload [ "paginator" ] = ( document . pager . to_liquid if document . respond_to? ( :pager ) ) end
Set page content to payload and assign pager if document has one .
166
def read @site . layouts = LayoutReader . new ( site ) . read read_directories read_included_excludes sort_files! @site . data = DataReader . new ( site ) . read ( site . config [ "data_dir" ] ) CollectionReader . new ( site ) . read ThemeAssetsReader . new ( site ) . read end
Read Site data from disk and load it into internal data structures .
167
def retrieve_dirs ( _base , dir , dot_dirs ) dot_dirs . each do | file | dir_path = site . in_source_dir ( dir , file ) rel_path = File . join ( dir , file ) @site . reader . read_directories ( rel_path ) unless @site . dest . chomp ( "/" ) == dir_path end end
Recursively traverse directories with the read_directories function .
168
def get_entries ( dir , subfolder ) base = site . in_source_dir ( dir , subfolder ) return [ ] unless File . exist? ( base ) entries = Dir . chdir ( base ) { filter_entries ( Dir [ "**/*" ] , base ) } entries . delete_if { | e | File . directory? ( site . in_source_dir ( base , e ) ) } end
Read the entries from a particular directory for processing
169
def write each_site_file do | item | item . write ( dest ) if regenerator . regenerate? ( item ) end regenerator . write_metadata Jekyll :: Hooks . trigger :site , :post_write , self end
Write static files pages and posts .
170
def post_attr_hash ( post_attr ) @post_attr_hash [ post_attr ] ||= begin hash = Hash . new { | h , key | h [ key ] = [ ] } posts . docs . each do | p | p . data [ post_attr ] &. each { | t | hash [ t ] << p } end hash . each_value { | posts | posts . sort! . reverse! } hash end end
Construct a Hash of Posts indexed by the specified Post attribute .
171
def find_converter_instance ( klass ) @find_converter_instance ||= { } @find_converter_instance [ klass ] ||= begin converters . find { | converter | converter . instance_of? ( klass ) } || raise ( "No Converters found for #{klass}" ) end end
Get the implementation class for the given Converter . Returns the Converter instance implementing the given Converter . klass - The Class of the Converter to fetch .
172
def instantiate_subclasses ( klass ) klass . descendants . select { | c | ! safe || c . safe } . sort . map do | c | c . new ( config ) end end
klass - class or module containing the subclasses . Returns array of instances of subclasses of parameter . Create array of instances of the subclasses of the class or module passed in as argument .
173
def documents @documents ||= collections . reduce ( Set . new ) do | docs , ( _ , collection ) | docs + collection . docs + collection . files end . to_a end
Get all the documents
174
def configure_cache Jekyll :: Cache . cache_dir = in_source_dir ( config [ "cache_dir" ] , "Jekyll/Cache" ) Jekyll :: Cache . disable_disk_cache! if safe end
Disable Marshaling cache to disk in Safe Mode
175
def find ( path , type , setting ) value = nil old_scope = nil matching_sets ( path , type ) . each do | set | if set [ "values" ] . key? ( setting ) && has_precedence? ( old_scope , set [ "scope" ] ) value = set [ "values" ] [ setting ] old_scope = set [ "scope" ] end end value end
Finds a default value for a given setting filtered by path and type
176
def all ( path , type ) defaults = { } old_scope = nil matching_sets ( path , type ) . each do | set | if has_precedence? ( old_scope , set [ "scope" ] ) defaults = Utils . deep_merge_hashes ( defaults , set [ "values" ] ) old_scope = set [ "scope" ] else defaults = Utils . deep_merge_hashes ( set [ "values" ] , defaults ) end end defaults end
Collects a hash with all default values for a page or post
177
def valid_sets sets = @site . config [ "defaults" ] return [ ] unless sets . is_a? ( Array ) sets . map do | set | if valid? ( set ) ensure_time! ( update_deprecated_types ( set ) ) else Jekyll . logger . warn "Defaults:" , "An invalid front-matter default set was found:" Jekyll . logger . warn set . to_s nil end end . compact end
Returns a list of valid sets
178
def [] ( key ) return @cache [ key ] if @cache . key? ( key ) path = path_to ( hash ( key ) ) if disk_cache_enabled? && File . file? ( path ) && File . readable? ( path ) @cache [ key ] = load ( path ) else raise end end
Retrieve a cached item Raises if key does not exist in cache
179
def []= ( key , value ) @cache [ key ] = value return unless disk_cache_enabled? path = path_to ( hash ( key ) ) value = new Hash ( value ) if value . is_a? ( Hash ) && ! value . default . nil? dump ( path , value ) rescue TypeError Jekyll . logger . debug "Cache:" , "Cannot dump object #{key}" end
Add an item to cache
180
def delete ( key ) @cache . delete ( key ) File . delete ( path_to ( hash ( key ) ) ) if disk_cache_enabled? end
Remove one particular item from the cache
181
def key? ( key ) return true if @cache . key? ( key ) return false unless disk_cache_enabled? path = path_to ( hash ( key ) ) File . file? ( path ) && File . readable? ( path ) end
Check if key already exists in this cache
182
def path_to ( hash = nil ) @base_dir ||= File . join ( Jekyll :: Cache . cache_dir , @name ) return @base_dir if hash . nil? File . join ( @base_dir , hash [ 0 .. 1 ] , hash [ 2 .. - 1 ] ) . freeze end
Given a hashed key return the path to where this item would be saved on disk .
183
def render_liquid ( content , payload , info , path ) _renderer . render_liquid ( content , payload , info , path ) end
Render Liquid in the content
184
def to_liquid ( attrs = nil ) further_data = Hash [ ( attrs || self . class :: ATTRIBUTES_FOR_LIQUID ) . map do | attribute | [ attribute , send ( attribute ) ] end ] defaults = site . frontmatter_defaults . all ( relative_path , type ) Utils . deep_merge_hashes defaults , Utils . deep_merge_hashes ( data , further_data ) end
Convert this Convertible s data to a Hash suitable for use by Liquid .
185
def render_all_layouts ( layouts , payload , info ) _renderer . layouts = layouts self . output = _renderer . place_in_layouts ( output , payload , info ) ensure @_renderer = nil end
Recursively render layouts
186
def do_layout ( payload , layouts ) self . output = _renderer . tap do | renderer | renderer . layouts = layouts renderer . payload = payload end . run Jekyll . logger . debug "Post-Render Hooks:" , relative_path Jekyll :: Hooks . trigger hook_owner , :post_render , self ensure @_renderer = nil end
Add any necessary layouts to this convertible document .
187
def require_theme_deps return false unless site . theme . runtime_dependencies site . theme . runtime_dependencies . each do | dep | next if dep . name == "jekyll" External . require_with_graceful_fail ( dep . name ) if plugin_allowed? ( dep . name ) end end
Require each of the runtime_dependencies specified by the theme s gemspec .
188
def require_plugin_files unless site . safe plugins_path . each do | plugin_search_path | plugin_files = Utils . safe_glob ( plugin_search_path , File . join ( "**" , "*.rb" ) ) Jekyll :: External . require_with_graceful_fail ( plugin_files ) end end end
Require all . rb files if safe mode is off
189
def path if ! @collection . nil? && ! @site . config [ "collections_dir" ] . empty? File . join ( * [ @base , @site . config [ "collections_dir" ] , @dir , @name ] . compact ) else File . join ( * [ @base , @dir , @name ] . compact ) end end
Initialize a new StaticFile .
190
def merge_data! ( other , source : "YAML front matter" ) merge_categories! ( other ) Utils . deep_merge_hashes! ( data , other ) merge_date! ( source ) data end
Merge some data in with this document s data .
191
def deep_merge_hashes! ( target , overwrite ) merge_values ( target , overwrite ) merge_default_proc ( target , overwrite ) duplicate_frozen_values ( target ) target end
Merges a master hash with another hash recursively .
192
def pluralized_array_from_hash ( hash , singular_key , plural_key ) [ ] . tap do | array | value = value_from_singular_key ( hash , singular_key ) value ||= value_from_plural_key ( hash , plural_key ) array << value end . flatten . compact end
Read array from the supplied hash favouring the singular key and then the plural key and handling any nil entries .
193
def has_liquid_construct? ( content ) return false if content . nil? || content . empty? content . include? ( "{%" ) || content . include? ( "{{" ) end
Determine whether the given content string contains Liquid Tags or Vaiables
194
def add_permalink_suffix ( template , permalink_style ) template = template . dup case permalink_style when :pretty template << "/" when :date , :ordinal , :none template << ":output_ext" else template << "/" if permalink_style . to_s . end_with? ( "/" ) template << ":output_ext" if permalink_style . to_s . end_with? ( ":output_ext" ) end template end
Add an appropriate suffix to template so that it matches the specified permalink style .
195
def replace_character_sequence_with_hyphen ( string , mode : "default" ) replaceable_char = case mode when "raw" SLUGIFY_RAW_REGEXP when "pretty" SLUGIFY_PRETTY_REGEXP when "ascii" SLUGIFY_ASCII_REGEXP else SLUGIFY_DEFAULT_REGEXP end string . gsub ( replaceable_char , "-" ) end
Replace each character sequence with a hyphen .
196
def process ( name ) self . ext = File . extname ( name ) self . basename = name [ 0 .. - ext . length - 1 ] . gsub ( %r! \. \z ! , "" ) end
Extract information from the page filename .
197
def render ( layouts , site_payload ) site_payload [ "page" ] = to_liquid site_payload [ "paginator" ] = pager . to_liquid do_layout ( site_payload , layouts ) end
Add any necessary layouts to this post
198
def where_exp ( input , variable , expression ) return input unless input . respond_to? ( :select ) input = input . values if input . is_a? ( Hash ) condition = parse_condition ( expression ) @context . stack do input . select do | object | @context [ variable ] = object condition . evaluate ( @context ) end end || [ ] end
Filters an array of objects against an expression
199
def sort ( input , property = nil , nils = "first" ) raise ArgumentError , "Cannot sort a null object." if input . nil? if property . nil? input . sort else if nils == "first" order = - 1 elsif nils == "last" order = + 1 else raise ArgumentError , "Invalid nils order: " "'#{nils}' is not a valid nils order. It must be 'first' or 'last'." end sort_input ( input , property , order ) end end
Sort an array of objects