idx
int64
0
24.9k
question
stringlengths
68
4.14k
target
stringlengths
9
749
2,600
def update_attributes ( attributes ) attributes . each { | attribute , value | write_attribute ( attribute , value ) } save end
Updates multiple attributes at once saving the object once the updates are complete .
2,601
def delete options = range_key ? { range_key : Dumping . dump_field ( read_attribute ( range_key ) , self . class . attributes [ range_key ] ) } : { } if self . class . attributes [ :lock_version ] conditions = { if : { } } conditions [ :if ] [ :lock_version ] = if changes [ :lock_version ] . nil? lock_version else changes [ :lock_version ] [ 0 ] end options [ :conditions ] = conditions end Dynamoid . adapter . delete ( self . class . table_name , hash_key , options ) rescue Dynamoid :: Errors :: ConditionalCheckFailedException raise Dynamoid :: Errors :: StaleObjectError . new ( self , 'delete' ) end
Delete this object from the datastore .
2,602
def persist ( conditions = nil ) run_callbacks ( :save ) do self . hash_key = SecureRandom . uuid if hash_key . blank? if new_record? conditions ||= { } ( conditions [ :unless_exists ] ||= [ ] ) << self . class . hash_key end if self . class . attributes [ :lock_version ] conditions ||= { } self . lock_version = ( lock_version || 0 ) + 1 ( conditions [ :if ] ||= { } ) [ :lock_version ] = changes [ :lock_version ] [ 0 ] if changes [ :lock_version ] [ 0 ] end attributes_dumped = Dumping . dump_attributes ( attributes , self . class . attributes ) begin Dynamoid . adapter . write ( self . class . table_name , attributes_dumped , conditions ) @new_record = false true rescue Dynamoid :: Errors :: ConditionalCheckFailedException => e if new_record? raise Dynamoid :: Errors :: RecordNotUnique . new ( e , self ) else raise Dynamoid :: Errors :: StaleObjectError . new ( self , 'persist' ) end end end end
Persist the object into the datastore . Assign it an id first if it doesn t have one .
2,603
def adapter unless @adapter_ . value adapter = self . class . adapter_plugin_class . new adapter . connect! if adapter . respond_to? ( :connect! ) @adapter_ . compare_and_set ( nil , adapter ) clear_cache! end @adapter_ . value end
The actual adapter currently in use .
2,604
def benchmark ( method , * args ) start = Time . now result = yield Dynamoid . logger . debug "(#{((Time.now - start) * 1000.0).round(2)} ms) #{method.to_s.split('_').collect(&:upcase).join(' ')}#{" - #{args.inspect}" unless args.nil? || args.empty?}" result end
Shows how long it takes a method to run on the adapter . Useful for generating logged output .
2,605
def read ( table , ids , options = { } , & blk ) if ids . respond_to? ( :each ) batch_get_item ( { table => ids } , options , & blk ) else get_item ( table , ids , options ) end end
Read one or many keys from the selected table . This method intelligently calls batch_get or get on the underlying adapter depending on whether ids is a range or a single key . If a range key is present it will also interpolate that into the ids so that the batch get will acquire the correct record .
2,606
def delete ( table , ids , options = { } ) range_key = options [ :range_key ] if ids . respond_to? ( :each ) ids = if range_key . respond_to? ( :each ) ids . each_with_index . map { | id , i | [ id , range_key [ i ] ] } else range_key ? ids . map { | id | [ id , range_key ] } : ids end batch_delete_item ( table => ids ) else delete_item ( table , ids , options ) end end
Delete an item from a table .
2,607
def scan ( table , query = { } , opts = { } ) benchmark ( 'Scan' , table , query ) { adapter . scan ( table , query , opts ) } end
Scans a table . Generally quite slow ; try to avoid using scan if at all possible .
2,608
def method_missing ( method , * args , & block ) return benchmark ( method , * args ) { adapter . send ( method , * args , & block ) } if adapter . respond_to? ( method ) super end
Delegate all methods that aren t defind here to the underlying adapter .
2,609
def logger = ( logger ) case logger when false , nil then @logger = NullLogger . new when true then @logger = default_logger else @logger = logger if logger . respond_to? ( :info ) end end
If you want to set the logger manually to any output you d like . Or pass false or nil to disable logging entirely .
2,610
def object_to_pdf ( object ) if object . nil? return 'null' elsif object . is_a? ( String ) return format_string_to_pdf object elsif object . is_a? ( Symbol ) return format_name_to_pdf object elsif object . is_a? ( Array ) return format_array_to_pdf object elsif object . is_a? ( Integer ) || object . is_a? ( TrueClass ) || object . is_a? ( FalseClass ) return object . to_s elsif object . is_a? ( Numeric ) return sprintf ( '%f' , object ) elsif object . is_a? ( Hash ) return format_hash_to_pdf object else return '' end end
Formats an object into PDF format . This is used my the PDF object to format the PDF file and it is used in the secure injection which is still being developed .
2,611
def new_page ( mediabox = [ 0 , 0 , 612.0 , 792.0 ] , _location = - 1 ) p = PDFWriter . new ( mediabox ) insert ( - 1 , p ) p end
adds a new page to the end of the PDF object .
2,612
def to_pdf ( options = { } ) @version = 1.5 if @version . to_f == 0.0 @info [ :ModDate ] = @info [ :CreationDate ] = Time . now . strftime "D:%Y%m%d%H%M%S%:::z'00" @info [ :Subject ] = options [ :subject ] if options [ :subject ] @info [ :Producer ] = options [ :producer ] if options [ :producer ] catalog = rebuild_catalog_and_objects renumber_object_ids out = [ ] xref = [ ] indirect_object_count = 1 out << "%PDF-#{@version}\n%\xFF\xFF\xFF\xFF\xFF\x00\x00\x00\x00" . force_encoding ( Encoding :: ASCII_8BIT ) loc = 0 out . each { | line | loc += line . bytesize + 1 } @objects . each do | o | indirect_object_count += 1 xref << loc out << object_to_pdf ( o ) loc += out . last . bytesize + 1 end xref_location = loc out << "xref\n0 #{indirect_object_count}\n0000000000 65535 f \n" xref . each { | offset | out << ( out . pop + ( "%010d 00000 n \n" % offset ) ) } out << out . pop + 'trailer' out << "<<\n/Root #{false || "#{catalog[:indirect_reference_id]} #{catalog[:indirect_generation_number]} R"}" out << "/Size #{indirect_object_count}" out << "/Info #{@info[:indirect_reference_id]} #{@info[:indirect_generation_number]} R" out << ">>\nstartxref\n#{xref_location}\n%%EOF" remove_old_ids out . join ( "\n" . force_encoding ( Encoding :: ASCII_8BIT ) ) . force_encoding ( Encoding :: ASCII_8BIT ) end
Formats the data to PDF formats and returns a binary string that represents the PDF file content .
2,613
def pages ( catalogs = nil ) page_list = [ ] catalogs ||= get_existing_catalogs if catalogs . is_a? ( Array ) catalogs . each { | c | page_list . concat pages ( c ) unless c . nil? } elsif catalogs . is_a? ( Hash ) if catalogs [ :is_reference_only ] if catalogs [ :referenced_object ] page_list . concat pages ( catalogs [ :referenced_object ] ) else warn "couldn't follow reference!!! #{catalogs} not found!" end else case catalogs [ :Type ] when :Page page_list << catalogs when :Pages page_list . concat pages ( catalogs [ :Kids ] ) unless catalogs [ :Kids ] . nil? when :Catalog page_list . concat pages ( catalogs [ :Pages ] ) unless catalogs [ :Pages ] . nil? end end end page_list end
this method returns all the pages cataloged in the catalog .
2,614
def fonts ( limit_to_type0 = false ) fonts_array = [ ] pages . each do | pg | if pg [ :Resources ] [ :Font ] pg [ :Resources ] [ :Font ] . values . each do | f | f = f [ :referenced_object ] if f [ :referenced_object ] if ( limit_to_type0 || f [ :Subtype ] == :Type0 ) && f [ :Type ] == :Font && ! fonts_array . include? ( f ) fonts_array << f end end end end fonts_array end
returns an array with the different fonts used in the file .
2,615
def remove ( page_index ) catalog = rebuild_catalog pages_array = catalog [ :Pages ] [ :referenced_object ] [ :Kids ] removed_page = pages_array . delete_at page_index catalog [ :Pages ] [ :referenced_object ] [ :Count ] = pages_array . length removed_page end
removes a PDF page from the file and the catalog
2,616
def register_font_from_pdf_object ( font_name , font_object ) cmap = { } if font_object [ :ToUnicode ] to_unicode = font_object [ :ToUnicode ] to_unicode = to_unicode [ :referenced_object ] if to_unicode [ :is_reference_only ] to_unicode = create_deep_copy to_unicode CombinePDF :: PDFFilter . inflate_object to_unicode cmap = parse_cmap to_unicode [ :raw_stream_content ] else warn "didn't find ToUnicode object for #{font_object}" return false end metrics = { } old_widths = font_object if font_object [ :DescendantFonts ] old_widths = font_object [ :DescendantFonts ] old_widths = old_widths [ :referenced_object ] [ :indirect_without_dictionary ] if old_widths . is_a? ( Hash ) && old_widths [ :is_reference_only ] old_widths = old_widths [ 0 ] [ :referenced_object ] avrg_height = 360 avrg_height = old_widths [ :XHeight ] if old_widths [ :XHeight ] avrg_height = ( avrg_height + old_widths [ :CapHeight ] ) / 2 if old_widths [ :CapHeight ] avrg_width = old_widths [ :AvgWidth ] || 0 avarage_bbox = [ 0 , 0 , avrg_width , avrg_height ] end cmap_inverted = { } cmap . each { | k , v | cmap_inverted [ v . hex ] = k } if old_widths [ :W ] old_widths = old_widths [ :W ] if old_widths . is_a? ( Hash ) && old_widths [ :is_reference_only ] old_widths = old_widths [ :referenced_object ] [ :indirect_without_dictionary ] end old_widths = create_deep_copy old_widths while old_widths [ 0 ] a = old_widths . shift b = old_widths . shift if b . is_a? ( Array ) b . each_index { | i | metrics [ cmap_inverted [ ( a + i ) ] || ( a + i ) ] = { wx : b [ i ] , boundingbox : avarage_bbox } } else c = old_widths . shift ( b - a ) . times { | i | metrics [ cmap_inverted [ ( a + i ) ] || ( a + i ) ] = { wx : c [ 0 ] , boundingbox : avarage_bbox } } end end elsif old_widths [ :Widths ] first_char = old_widths [ :FirstChar ] old_widths = old_widths [ :Widths ] if old_widths . is_a? ( Hash ) && old_widths [ :is_reference_only ] old_widths = old_widths [ :referenced_object ] [ :indirect_without_dictionary ] end old_widths . each_index { | i | metrics [ cmap_inverted [ ( i + first_char ) ] || ( i + first_char ) ] = { wx : old_widths [ i ] , boundingbox : avarage_bbox } } else warn "didn't find widths object for #{old_widths}" return false end cmap = nil if cmap . empty? CombinePDF :: Fonts . register_font font_name , metrics , font_object , cmap end
Register a font that already exists in a pdf object into the font library .
2,617
def init_contents self [ :Contents ] = self [ :Contents ] [ :referenced_object ] [ :indirect_without_dictionary ] if self [ :Contents ] . is_a? ( Hash ) && self [ :Contents ] [ :referenced_object ] && self [ :Contents ] [ :referenced_object ] . is_a? ( Hash ) && self [ :Contents ] [ :referenced_object ] [ :indirect_without_dictionary ] self [ :Contents ] = [ self [ :Contents ] ] unless self [ :Contents ] . is_a? ( Array ) self [ :Contents ] . delete ( is_reference_only : true , referenced_object : { indirect_reference_id : 0 , raw_stream_content : '' } ) self [ :Contents ] . map! { | s | actual_value ( s ) . is_a? ( Array ) ? actual_value ( s ) : s } self [ :Contents ] . flatten! self [ :Contents ] . compact! insert_content 'q' , 0 insert_content 'Q' @contents = '' insert_content @contents @contents end
initializes the content stream in case it was not initialized before
2,618
def insert_content ( object , location = - 1 ) object = { is_reference_only : true , referenced_object : { indirect_reference_id : 0 , raw_stream_content : object } } if object . is_a? ( String ) raise TypeError , 'expected a String or Hash object.' unless object . is_a? ( Hash ) prep_content_array self [ :Contents ] . insert location , object self [ :Contents ] . flatten! self end
adds a string or an object to the content stream at the location indicated
2,619
def graphic_state ( graphic_state_dictionary = { } ) resources [ :ExtGState ] ||= { } gs_res = resources [ :ExtGState ] [ :referenced_object ] || resources [ :ExtGState ] gs_res . each do | k , v | return k if v . is_a? ( Hash ) && v == graphic_state_dictionary end graphic_state_dictionary [ :Type ] = :ExtGState name = SecureRandom . hex ( 9 ) . to_sym gs_res [ name ] = graphic_state_dictionary name end
register or get a registered graphic state dictionary . the method returns the name of the graphos state for use in a content stream .
2,620
def save_excursion old_point = new_mark old_mark = @mark &. dup old_column = @goal_column begin yield ensure point_to_mark ( old_point ) old_point . delete if old_mark @mark . location = old_mark . location old_mark . delete end @goal_column = old_column end end
Don t save Buffer . current .
2,621
def push_mark ( pos = @point ) @mark = new_mark @mark . location = pos @mark_ring . push ( @mark ) if self != Buffer . minibuffer global_mark_ring = Buffer . global_mark_ring if global_mark_ring . empty? || global_mark_ring . current . buffer != self push_global_mark ( pos ) end end end
Set mark at pos and push the mark on the mark ring . Unlike Emacs the new mark is pushed on the mark ring instead of the old one .
2,622
def indent_line result = false level = calculate_indentation return result if level . nil? @buffer . save_excursion do @buffer . beginning_of_line @buffer . composite_edit do if @buffer . looking_at? ( / \t / ) s = @buffer . match_string ( 0 ) break if / \t / !~ s && s . size == level @buffer . delete_region ( @buffer . match_beginning ( 0 ) , @buffer . match_end ( 0 ) ) else break if level == 0 end @buffer . indent_to ( level ) end result = true end pos = @buffer . point @buffer . beginning_of_line @buffer . forward_char while / \t / =~ @buffer . char_after if @buffer . point < pos @buffer . goto_char ( pos ) end result end
Return true if modified .
2,623
def pack ( content , app_id ) random = SecureRandom . hex ( 8 ) text = content . force_encoding ( 'ASCII-8BIT' ) msg_len = [ text . length ] . pack ( 'N' ) encode_padding ( "#{random}#{msg_len}#{text}#{app_id}" ) end
app_id or corp_id
2,624
def validate_interval_lock ( time , start_time ) t0 = starting_unit ( start_time ) t1 = time . send ( type ) t0 >= t1 ? t0 - t1 : INTERVALS [ type ] - t1 + t0 end
Validate if the current time unit matches the same unit from the schedule start time returning the difference to the interval
2,625
def validate_hour_lock ( time , start_time ) h0 = starting_unit ( start_time ) h1 = time . hour if h0 >= h1 h0 - h1 else if dst_offset = TimeUtil . dst_change ( time ) h0 - h1 + dst_offset else 24 - h1 + h0 end end end
Lock the hour if explicitly set by hour_of_day but allow for the nearest hour during DST start to keep the correct interval .
2,626
def validate_day_lock ( time , start_time ) days_in_month = TimeUtil . days_in_month ( time ) date = Date . new ( time . year , time . month , time . day ) if value && value < 0 start = TimeUtil . day_of_month ( value , date ) month_overflow = days_in_month - TimeUtil . days_in_next_month ( time ) elsif value && value > 0 start = value month_overflow = 0 else start = TimeUtil . day_of_month ( start_time . day , date ) month_overflow = 0 end sleeps = start - date . day if value && value > 0 until_next_month = days_in_month + sleeps else until_next_month = start < 28 ? days_in_month : TimeUtil . days_to_next_month ( date ) until_next_month += sleeps - month_overflow end sleeps >= 0 ? sleeps : until_next_month end
For monthly rules that have no specified day value the validation relies on the schedule start time and jumps to include every month even if it has fewer days than the schedule s start day .
2,627
def to_s arr = [ ] if freq = @hash . delete ( 'FREQ' ) arr << "FREQ=#{freq.join(',')}" end arr . concat ( @hash . map do | key , value | if value . is_a? ( Array ) "#{key}=#{value.join(',')}" end end . compact ) arr . join ( ';' ) end
Build for a single rule entry
2,628
def hour_of_day ( * hours ) hours . flatten . each do | hour | unless hour . is_a? ( Integer ) raise ArgumentError , "expecting Integer value for hour, got #{hour.inspect}" end verify_alignment ( hour , :hour , :hour_of_day ) { | error | raise error } validations_for ( :hour_of_day ) << Validation . new ( hour ) end clobber_base_validations ( :hour ) self end
Add hour of day validations
2,629
def add_recurrence_time ( time ) return if time . nil? rule = SingleOccurrenceRule . new ( time ) add_recurrence_rule rule time end
Add a recurrence time to the schedule
2,630
def add_exception_time ( time ) return if time . nil? rule = SingleOccurrenceRule . new ( time ) add_exception_rule rule time end
Add an exception time to the schedule
2,631
def remove_recurrence_time ( time ) found = false @all_recurrence_rules . delete_if do | rule | found = true if rule . is_a? ( SingleOccurrenceRule ) && rule . time == time end time if found end
Remove a recurrence time
2,632
def next_occurrences ( num , from = nil , options = { } ) from = TimeUtil . match_zone ( from , start_time ) || TimeUtil . now ( start_time ) enumerate_occurrences ( from + 1 , nil , options ) . take ( num ) end
The next n occurrences after now
2,633
def previous_occurrence ( from ) from = TimeUtil . match_zone ( from , start_time ) or raise ArgumentError , "Time required, got #{from.inspect}" return nil if from <= start_time enumerate_occurrences ( start_time , from - 1 ) . to_a . last end
The previous occurrence from a given time
2,634
def previous_occurrences ( num , from ) from = TimeUtil . match_zone ( from , start_time ) or raise ArgumentError , "Time required, got #{from.inspect}" return [ ] if from <= start_time a = enumerate_occurrences ( start_time , from - 1 ) . to_a a . size > num ? a [ - 1 * num , a . size ] : a end
The previous n occurrences before a given time
2,635
def occurs_between? ( begin_time , closing_time , options = { } ) enumerate_occurrences ( begin_time , closing_time , options ) . next true rescue StopIteration false end
Return a boolean indicating if an occurrence falls between two times
2,636
def occurs_on? ( date ) date = TimeUtil . ensure_date ( date ) begin_time = TimeUtil . beginning_of_date ( date , start_time ) closing_time = TimeUtil . end_of_date ( date , start_time ) occurs_between? ( begin_time , closing_time ) end
Return a boolean indicating if an occurrence falls on a certain date
2,637
def occurring_at? ( time ) time = TimeUtil . match_zone ( time , start_time ) or raise ArgumentError , "Time required, got #{time.inspect}" if duration > 0 return false if exception_time? ( time ) occurs_between? ( time - duration + 1 , time ) else occurs_at? ( time ) end end
Determine if the schedule is occurring at a given time
2,638
def conflicts_with? ( other_schedule , closing_time = nil ) closing_time = TimeUtil . ensure_time ( closing_time ) unless terminating? || other_schedule . terminating? || closing_time raise ArgumentError , "One or both schedules must be terminating to use #conflicts_with?" end terminating_schedule = self unless terminating? || closing_time terminating_schedule , other_schedule = other_schedule , terminating_schedule end last_time = nil terminating_schedule . each_occurrence do | time | if closing_time && time > closing_time last_time = closing_time break end last_time = time return true if other_schedule . occurring_at? ( time ) end if last_time last_time += terminating_schedule . duration other_schedule . each_occurrence do | time | break if time > last_time return true if terminating_schedule . occurring_at? ( time ) end end false end
Determine if this schedule conflicts with another schedule
2,639
def first ( n = nil ) occurrences = enumerate_occurrences ( start_time ) . take ( n || 1 ) n . nil? ? occurrences . first : occurrences end
Get the first n occurrences or the first occurrence if n is skipped
2,640
def last ( n = nil ) require_terminating_rules occurrences = enumerate_occurrences ( start_time ) . to_a n . nil? ? occurrences . last : occurrences [ - n .. - 1 ] end
Get the final n occurrences of a terminating schedule or the final one if no n is given
2,641
def to_ical ( force_utc = false ) pieces = [ ] pieces << "DTSTART#{IcalBuilder.ical_format(start_time, force_utc)}" pieces . concat recurrence_rules . map { | r | "RRULE:#{r.to_ical}" } pieces . concat exception_rules . map { | r | "EXRULE:#{r.to_ical}" } pieces . concat recurrence_times_without_start_time . map { | t | "RDATE#{IcalBuilder.ical_format(t, force_utc)}" } pieces . concat exception_times . map { | t | "EXDATE#{IcalBuilder.ical_format(t, force_utc)}" } pieces << "DTEND#{IcalBuilder.ical_format(end_time, force_utc)}" if end_time pieces . join ( "\n" ) end
Serialize this schedule to_ical
2,642
def to_hash data = { } data [ :start_time ] = TimeUtil . serialize_time ( start_time ) data [ :start_date ] = data [ :start_time ] if IceCube . compatibility <= 11 data [ :end_time ] = TimeUtil . serialize_time ( end_time ) if end_time data [ :rrules ] = recurrence_rules . map ( & :to_hash ) if IceCube . compatibility <= 11 && exception_rules . any? data [ :exrules ] = exception_rules . map ( & :to_hash ) end data [ :rtimes ] = recurrence_times . map do | rt | TimeUtil . serialize_time ( rt ) end data [ :extimes ] = exception_times . map do | et | TimeUtil . serialize_time ( et ) end data end
Convert the schedule to a hash
2,643
def enumerate_occurrences ( opening_time , closing_time = nil , options = { } ) opening_time = TimeUtil . match_zone ( opening_time , start_time ) closing_time = TimeUtil . match_zone ( closing_time , start_time ) opening_time += TimeUtil . subsec ( start_time ) - TimeUtil . subsec ( opening_time ) opening_time = start_time if opening_time < start_time spans = options [ :spans ] == true && duration != 0 Enumerator . new do | yielder | reset t1 = full_required? ? start_time : opening_time t1 -= duration if spans t1 = start_time if t1 < start_time loop do break unless ( t0 = next_time ( t1 , closing_time ) ) break if closing_time && t0 > closing_time if ( spans ? ( t0 . end_time > opening_time ) : ( t0 >= opening_time ) ) yielder << ( block_given? ? yield ( t0 ) : t0 ) end t1 = t0 + 1 end end end
Find all of the occurrences for the schedule between opening_time and closing_time Iteration is unrolled in pairs to skip duplicate times in end of DST
2,644
def wday_offset ( step_time , start_time ) return 0 if step_time == start_time wday_validations = other_interval_validations . select { | v | v . type == :wday } return 0 if wday_validations . none? days = step_time . to_date - start_time . to_date interval = base_interval_validation . validate ( step_time , start_time ) . to_i min_wday = wday_validations . map { | v | TimeUtil . normalize_wday ( v . day , week_start ) } . min step_wday = TimeUtil . normalize_wday ( step_time . wday , week_start ) days + interval - step_wday + min_wday end
Calculate how many days to the first wday validation in the correct interval week . This may move backwards within the week if starting in an interval week with earlier validations .
2,645
def find ( path , id , params = { } ) response = get ( "/#{path.to_s.pluralize}/#{id}" , params ) trello_class = class_from_path ( path ) trello_class . parse response do | data | data . client = self end end
Finds given resource by id
2,646
def find_many ( trello_class , path , params = { } ) response = get ( path , params ) trello_class . parse_many response do | data | data . client = self end end
Finds given resource by path with params
2,647
def update_fields ( fields ) attributes [ :yellow ] = fields [ 'yellow' ] || attributes [ :yellow ] attributes [ :red ] = fields [ 'red' ] || attributes [ :red ] attributes [ :orange ] = fields [ 'orange' ] || attributes [ :orange ] attributes [ :green ] = fields [ 'green' ] || attributes [ :green ] attributes [ :purple ] = fields [ 'purple' ] || attributes [ :purple ] attributes [ :blue ] = fields [ 'blue' ] || attributes [ :blue ] attributes [ :sky ] = fields [ 'sky' ] || attributes [ :sky ] attributes [ :pink ] = fields [ 'pink' ] || attributes [ :pink ] attributes [ :lime ] = fields [ 'lime' ] || attributes [ :lime ] attributes [ :black ] = fields [ 'black' ] || attributes [ :black ] self end
Update the fields of a label .
2,648
def save return update! if id from_response client . post ( "/webhooks" , { description : description , idModel : id_model , callbackURL : callback_url } ) end
Save the webhook .
2,649
def update! client . put ( "/webhooks/#{id}" , { description : description , idModel : id_model , callbackURL : callback_url , active : active } ) end
Update the webhook .
2,650
def update_fields ( fields ) attributes [ :id ] = fields [ 'id' ] || attributes [ :id ] attributes [ :idPlugin ] = fields [ 'idPlugin' ] || attributes [ :idPlugin ] attributes [ :scope ] = fields [ 'scope' ] || attributes [ :scope ] attributes [ :value ] = JSON . parse ( fields [ 'value' ] ) . presence if fields . has_key? ( 'value' ) attributes [ :idModel ] = fields [ 'idModel' ] || attributes [ :idModel ] attributes [ :access ] = fields [ 'access' ] || attributes [ :access ] self end
Supply a hash of stringkeyed data retrieved from the Trello API representing an attachment .
2,651
def update_fields ( fields ) attributes [ :id ] = fields [ SYMBOL_TO_STRING [ :id ] ] || attributes [ :id ] attributes [ :short_id ] = fields [ SYMBOL_TO_STRING [ :short_id ] ] || attributes [ :short_id ] attributes [ :name ] = fields [ SYMBOL_TO_STRING [ :name ] ] || fields [ :name ] || attributes [ :name ] attributes [ :desc ] = fields [ SYMBOL_TO_STRING [ :desc ] ] || fields [ :desc ] || attributes [ :desc ] attributes [ :due ] = Time . iso8601 ( fields [ SYMBOL_TO_STRING [ :due ] ] ) rescue nil if fields . has_key? ( SYMBOL_TO_STRING [ :due ] ) attributes [ :due ] = fields [ :due ] if fields . has_key? ( :due ) attributes [ :due_complete ] = fields [ SYMBOL_TO_STRING [ :due_complete ] ] if fields . has_key? ( SYMBOL_TO_STRING [ :due_complete ] ) attributes [ :due_complete ] ||= false attributes [ :closed ] = fields [ SYMBOL_TO_STRING [ :closed ] ] if fields . has_key? ( SYMBOL_TO_STRING [ :closed ] ) attributes [ :url ] = fields [ SYMBOL_TO_STRING [ :url ] ] || attributes [ :url ] attributes [ :short_url ] = fields [ SYMBOL_TO_STRING [ :short_url ] ] || attributes [ :short_url ] attributes [ :board_id ] = fields [ SYMBOL_TO_STRING [ :board_id ] ] || attributes [ :board_id ] attributes [ :member_ids ] = fields [ SYMBOL_TO_STRING [ :member_ids ] ] || fields [ :member_ids ] || attributes [ :member_ids ] attributes [ :list_id ] = fields [ SYMBOL_TO_STRING [ :list_id ] ] || fields [ :list_id ] || attributes [ :list_id ] attributes [ :pos ] = fields [ SYMBOL_TO_STRING [ :pos ] ] || fields [ :pos ] || attributes [ :pos ] attributes [ :labels ] = ( fields [ SYMBOL_TO_STRING [ :labels ] ] || [ ] ) . map { | lbl | Trello :: Label . new ( lbl ) } . presence || attributes [ :labels ] . presence || [ ] attributes [ :card_labels ] = fields [ SYMBOL_TO_STRING [ :card_labels ] ] || fields [ :card_labels ] || attributes [ :card_labels ] attributes [ :last_activity_date ] = Time . iso8601 ( fields [ SYMBOL_TO_STRING [ :last_activity_date ] ] ) rescue nil if fields . has_key? ( SYMBOL_TO_STRING [ :last_activity_date ] ) attributes [ :cover_image_id ] = fields [ SYMBOL_TO_STRING [ :cover_image_id ] ] || attributes [ :cover_image_id ] attributes [ :badges ] = fields [ SYMBOL_TO_STRING [ :badges ] ] || attributes [ :badges ] attributes [ :card_members ] = fields [ SYMBOL_TO_STRING [ :card_members ] ] || attributes [ :card_members ] attributes [ :source_card_id ] = fields [ SYMBOL_TO_STRING [ :source_card_id ] ] || fields [ :source_card_id ] || attributes [ :source_card_id ] attributes [ :source_card_properties ] = fields [ SYMBOL_TO_STRING [ :source_card_properties ] ] || fields [ :source_card_properties ] || attributes [ :source_card_properties ] self end
Update the fields of a card .
2,652
def update! @previously_changed = changes payload = Hash [ changes . map { | key , values | [ SYMBOL_TO_STRING [ key . to_sym ] . to_sym , values [ 1 ] ] } ] @changed_attributes . clear client . put ( "/cards/#{id}" , payload ) end
Update an existing record .
2,653
def move_to_list ( list ) list_number = list . is_a? ( String ) ? list : list . id unless list_id == list_number client . put ( "/cards/#{id}/idList" , { value : list_number } ) end end
Move this card to the given list
2,654
def move_to_list_on_any_board ( list_id ) list = List . find ( list_id ) if board . id == list . board_id move_to_list ( list_id ) else move_to_board ( Board . find ( list . board_id ) , list ) end end
Moves this card to the given list no matter which board it is on
2,655
def upvote begin client . post ( "/cards/#{id}/membersVoted" , { value : me . id } ) rescue Trello :: Error => e fail e unless e . message =~ / /i end self end
Current authenticated user upvotes a card
2,656
def remove_upvote begin client . delete ( "/cards/#{id}/membersVoted/#{me.id}" ) rescue Trello :: Error => e fail e unless e . message =~ / /i end self end
Recind upvote . Noop if authenticated user hasn t previously voted
2,657
def add_label ( label ) unless label . valid? errors . add ( :label , "is not valid." ) return Trello . logger . warn "Label is not valid." unless label . valid? end client . post ( "/cards/#{id}/idLabels" , { value : label . id } ) end
Add a label
2,658
def remove_label ( label ) unless label . valid? errors . add ( :label , "is not valid." ) return Trello . logger . warn "Label is not valid." unless label . valid? end client . delete ( "/cards/#{id}/idLabels/#{label.id}" ) end
Remove a label
2,659
def add_attachment ( attachment , name = '' ) if attachment . respond_to? ( :path ) && attachment . respond_to? ( :read ) client . post ( "/cards/#{id}/attachments" , { file : attachment , name : name } ) else client . post ( "/cards/#{id}/attachments" , { url : attachment , name : name } ) end end
Add an attachment to this card
2,660
def attachments attachments = Attachment . from_response client . get ( "/cards/#{id}/attachments" ) MultiAssociation . new ( self , attachments ) . proxy end
Retrieve a list of attachments
2,661
def boards boards = Board . from_response client . get ( "/organizations/#{id}/boards/all" ) MultiAssociation . new ( self , boards ) . proxy end
Returns a list of boards under this organization .
2,662
def members ( params = { } ) members = Member . from_response client . get ( "/organizations/#{id}/members/all" , params ) MultiAssociation . new ( self , members ) . proxy end
Returns an array of members associated with the organization .
2,663
def update_fields ( fields ) attributes [ :action_id ] = fields [ 'id' ] || attributes [ :action_id ] attributes [ :text ] = fields [ 'data' ] [ 'text' ] || attributes [ :text ] attributes [ :date ] = Time . iso8601 ( fields [ 'date' ] ) if fields . has_key? ( 'date' ) attributes [ :member_creator_id ] = fields [ 'idMemberCreator' ] || attributes [ :member_creator_id ] self end
Update the attributes of a Comment
2,664
def update_fields ( fields ) attributes [ :name ] = fields [ 'name' ] || attributes [ :name ] attributes [ :id ] = fields [ 'id' ] || attributes [ :id ] attributes [ :pos ] = fields [ 'pos' ] || attributes [ :pos ] attributes [ :url ] = fields [ 'url' ] || attributes [ :url ] attributes [ :bytes ] = fields [ 'bytes' ] . to_i || attributes [ :bytes ] attributes [ :member_id ] = fields [ 'idMember' ] || attributes [ :member_id ] attributes [ :date ] = Time . parse ( fields [ 'date' ] ) . presence || attributes [ :date ] attributes [ :is_upload ] = fields [ 'isUpload' ] if fields . has_key? ( 'isUpload' ) attributes [ :mime_type ] = fields [ 'mimeType' ] || attributes [ :mime_type ] attributes [ :previews ] = fields [ 'previews' ] if fields . has_key? ( 'previews' ) self end
Update the fields of an attachment .
2,665
def option_value if option_id option_endpoint = "/customFields/#{custom_field_id}/options/#{option_id}" option = CustomFieldOption . from_response client . get ( option_endpoint ) option . value end end
Need to make another call to get the actual value if the custom field type == list
2,666
def update_fields ( fields ) attributes [ :id ] = fields [ 'id' ] || attributes [ :id ] attributes [ :name ] = fields [ 'name' ] || fields [ :name ] || attributes [ :name ] attributes [ :description ] = fields [ 'desc' ] || attributes [ :description ] attributes [ :closed ] = fields [ 'closed' ] if fields . has_key? ( 'closed' ) attributes [ :url ] = fields [ 'url' ] || attributes [ :url ] attributes [ :check_items ] = fields [ 'checkItems' ] if fields . has_key? ( 'checkItems' ) attributes [ :position ] = fields [ 'pos' ] || attributes [ :position ] attributes [ :board_id ] = fields [ 'idBoard' ] || attributes [ :board_id ] attributes [ :card_id ] = fields [ 'idCard' ] || fields [ :card_id ] || attributes [ :card_id ] attributes [ :list_id ] = fields [ 'idList' ] || attributes [ :list_id ] attributes [ :member_ids ] = fields [ 'idMembers' ] || attributes [ :member_ids ] self end
Update the fields of a checklist .
2,667
def save return update! if id from_response ( client . post ( "/checklists" , { name : name , idCard : card_id } ) ) end
Save a record .
2,668
def members members = member_ids . map do | member_id | Member . find ( member_id ) end MultiAssociation . new ( self , members ) . proxy end
Return a list of members active in this checklist .
2,669
def add_item ( name , checked = false , position = 'bottom' ) client . post ( "/checklists/#{id}/checkItems" , { name : name , checked : checked , pos : position } ) end
Add an item to the checklist
2,670
def actions ( options = { } ) actions = Action . from_response client . get ( "#{request_prefix}/actions" , { filter : :all } . merge ( options ) ) MultiAssociation . new ( self , actions ) . proxy end
Returns a list of the actions associated with this object .
2,671
def update_fields ( fields ) attributes [ :id ] = fields [ 'id' ] || attributes [ :id ] attributes [ :state ] = fields [ 'state' ] || attributes [ :state ] attributes [ :item_id ] = fields [ 'idCheckItem' ] || attributes [ :item_id ] self end
Update the fields of an item state .
2,672
def clean return unless text remove_all_newlines replace_double_newlines replace_newlines replace_escaped_newlines @text . apply ( HTML :: All ) replace_punctuation_in_brackets @text . apply ( InlineFormattingRule ) clean_quotations clean_table_of_contents check_for_no_space_in_between_sentences clean_consecutive_characters end
Clean text of unwanted formatting
2,673
def subscribe ( channel , last_message_id : nil , & callback ) raise InvalidChannel unless channel . to_s . start_with? ( "/" ) raise MissingBlock unless block_given? last_message_id = - 1 if last_message_id && ! last_message_id . is_a? ( Integer ) @channels [ channel ] ||= Channel . new channel = @channels [ channel ] channel . last_message_id = last_message_id if last_message_id channel . callbacks . push ( callback ) start if stopped? end
Subscribes to a channel which executes the given callback when a message is published to the channel
2,674
def unsubscribe ( channel , & callback ) if callback @channels [ channel ] . callbacks . delete ( callback ) remove_channel ( channel ) if @channels [ channel ] . callbacks . empty? else remove_channel ( channel ) end stop if @channels . empty? @status end
unsubscribes from a channel
2,675
def connect ( request , _unused_call ) logger . debug ( "RPC Connect: #{request.inspect}" ) socket = build_socket ( env : rack_env ( request ) ) connection = factory . call ( socket ) connection . handle_open if socket . closed? AnyCable :: ConnectionResponse . new ( status : AnyCable :: Status :: FAILURE ) else AnyCable :: ConnectionResponse . new ( status : AnyCable :: Status :: SUCCESS , identifiers : connection . identifiers_json , transmissions : socket . transmissions ) end rescue StandardError => exp notify_exception ( exp , :connect , request ) AnyCable :: ConnectionResponse . new ( status : AnyCable :: Status :: ERROR , error_msg : exp . message ) end
Handle connection request from WebSocket server
2,676
def rack_env ( request ) uri = URI . parse ( request . path ) env = { "REQUEST_METHOD" => "GET" , "SCRIPT_NAME" => "" , "PATH_INFO" => uri . path , "QUERY_STRING" => uri . query , "SERVER_NAME" => uri . host , "SERVER_PORT" => uri . port . to_s , "HTTP_HOST" => uri . host , "REMOTE_ADDR" => request . headers . delete ( "REMOTE_ADDR" ) , "rack.url_scheme" => uri . scheme , "rack.input" => "" } env . merge! ( build_headers ( request . headers ) ) end
Build Rack env from request
2,677
def to_grpc_params { pool_size : rpc_pool_size , max_waiting_requests : rpc_max_waiting_requests , poll_period : rpc_poll_period , pool_keep_alive : rpc_pool_keep_alive , server_args : rpc_server_args } end
Build gRPC server parameters
2,678
def to_redis_params { url : redis_url } . tap do | params | next if redis_sentinels . nil? raise ArgumentError , "redis_sentinels must be an array; got #{redis_sentinels}" unless redis_sentinels . is_a? ( Array ) next if redis_sentinels . empty? params [ :sentinels ] = redis_sentinels . map ( & method ( :parse_sentinel ) ) end end
Build Redis parameters
2,679
def start return if running? raise "Cannot re-start stopped server" if stopped? check_default_host logger . info "RPC server is starting..." @start_thread = Thread . new { grpc_server . run } grpc_server . wait_till_running logger . info "RPC server is listening on #{host}" end
Start gRPC server in background and wait untill it ready to accept connections
2,680
def toc_only ( html ) Jekyll . logger . warn 'Deprecation: toc_only filter is deprecated and will be remove in jekyll-toc v1.0.' , 'Use `{% toc %}` instead of `{{ content | toc_only }}`.' return '' unless toc_enabled? TableOfContents :: Parser . new ( html , toc_config ) . build_toc end
Deprecated method . Removed in v1 . 0 .
2,681
def generate_random_id ( method ) data = File . open ( "/dev/urandom" , "rb" ) do | f | f . read ( 64 ) end case method when :base64 data = base64 ( data ) data . gsub! ( "+" , '' ) data . gsub! ( "/" , '' ) data . gsub! ( / / , '' ) return data when :hex return data . unpack ( 'H*' ) [ 0 ] else raise ArgumentError , "Invalid method #{method.inspect}" end end
Generate a long cryptographically secure random ID string which is also a valid filename .
2,682
def print_exception ( current_location , exception , destination = nil ) if ! exception . is_a? ( SystemExit ) data = exception . backtrace_string ( current_location ) if defined? ( DebugLogging ) && self . is_a? ( DebugLogging ) error ( data ) else destination ||= STDERR destination . puts ( data ) destination . flush if destination . respond_to? ( :flush ) end end end
Print the given exception including the stack trace to STDERR .
2,683
def create_thread_and_abort_on_exception ( * args ) Thread . new do Thread . current . abort_on_exception = true begin yield ( * args ) rescue SystemExit raise rescue Exception => e print_exception ( nil , e ) exit ( 1 ) end end end
A wrapper around Thread . new that installs a default exception handler . If an uncaught exception is encountered it will immediately log the exception and abort the entire program .
2,684
def process_is_alive? ( pid ) begin Process . kill ( 0 , pid ) return true rescue Errno :: ESRCH return false rescue SystemCallError => e return true end end
Checks whether the given process exists .
2,685
def global_backtrace_report if Kernel . respond_to? ( :caller_for_all_threads ) all_thread_stacks = caller_for_all_threads elsif Thread . respond_to? ( :list ) && Thread . public_method_defined? ( :backtrace ) all_thread_stacks = { } Thread . list . each do | thread | all_thread_stacks [ thread ] = thread . backtrace end end output = "========== Process #{Process.pid}: backtrace dump ==========\n" if all_thread_stacks all_thread_stacks . each_pair do | thread , stack | if thread_name = thread [ :name ] thread_name = "(#{thread_name})" end stack ||= [ "(empty)" ] output << ( "-" * 60 ) << "\n" output << "# Thread: #{thread.inspect}#{thread_name}, " if thread == Thread . main output << "[main thread], " end if thread == Thread . current output << "[current thread], " end output << "alive = #{thread.alive?}\n" output << ( "-" * 60 ) << "\n" output << " " << stack . join ( "\n " ) output << "\n\n" end else output << ( "-" * 60 ) << "\n" output << "# Current thread: #{Thread.current.inspect}\n" output << ( "-" * 60 ) << "\n" output << " " << caller . join ( "\n " ) end return output end
Returns a string which reports the backtraces for all threads or if that s not supported the backtrace for the current thread .
2,686
def current_user_name_or_id require 'etc' if ! defined? ( Etc ) begin user = Etc . getpwuid ( Process . uid ) rescue ArgumentError user = nil end if user return user . name else return "##{Process.uid}" end end
Name of the user under which we are executing or the id as fallback N . B . loader_shared_helpers . rb has the same method
2,687
def maybe_make_path_relative_to_app_root ( app_root , abs_path ) if Dir . logical_pwd == app_root && File . dirname ( abs_path ) == app_root File . basename ( abs_path ) else abs_path end end
If the current working directory equals app_root and abs_path is a file inside app_root then returns its basename . Otherwise returns abs_path .
2,688
def before_handling_requests ( forked , options ) if forked srand end if options [ "process_title" ] && ! options [ "process_title" ] . empty? $0 = options [ "process_title" ] + ": " + options [ "app_group_name" ] end if forked && defined? ( ActiveRecord :: Base ) if ActiveRecord :: Base . respond_to? ( :clear_all_connections! ) ActiveRecord :: Base . clear_all_connections! elsif ActiveRecord :: Base . respond_to? ( :clear_active_connections! ) ActiveRecord :: Base . clear_active_connections! elsif ActiveRecord :: Base . respond_to? ( :connected? ) && ActiveRecord :: Base . connected? ActiveRecord :: Base . establish_connection end end PhusionPassenger . call_event ( :starting_worker_process , forked ) if options [ "pool_account_username" ] && options [ "pool_account_password_base64" ] password = options [ "pool_account_password_base64" ] . unpack ( 'm' ) . first PhusionPassenger . call_event ( :credentials , options [ "pool_account_username" ] , password ) else PhusionPassenger . call_event ( :credentials , nil , nil ) end end
To be called before the request handler main loop is entered but after the app startup file has been loaded . This function will fire off necessary events and perform necessary preparation tasks .
2,689
def read_hash buffer = new_buffer if ! @io . read ( HEADER_SIZE , buffer ) return nil end while buffer . size < HEADER_SIZE tmp = @io . read ( HEADER_SIZE - buffer . size ) if tmp . empty? return nil else buffer << tmp end end chunk_size = buffer . unpack ( UINT16_PACK_FORMAT ) [ 0 ] if ! @io . read ( chunk_size , buffer ) return nil end while buffer . size < chunk_size tmp = @io . read ( chunk_size - buffer . size ) if tmp . empty? return nil else buffer << tmp end end result = { } offset = 0 delimiter_pos = buffer . index ( DELIMITER , offset ) while ! delimiter_pos . nil? if delimiter_pos == 0 name = "" else name = buffer [ offset .. delimiter_pos - 1 ] end offset = delimiter_pos + 1 delimiter_pos = buffer . index ( DELIMITER , offset ) if delimiter_pos . nil? raise InvalidHashError elsif delimiter_pos == 0 value = "" else value = buffer [ offset .. delimiter_pos - 1 ] end result [ name ] = value offset = delimiter_pos + 1 delimiter_pos = buffer . index ( DELIMITER , offset ) end return result rescue Errno :: ECONNRESET return nil end
Read an array message from the underlying file descriptor and return the result as a hash instead of an array . This assumes that the array message has an even number of elements . Returns nil when end - of - stream has been reached .
2,690
def read_scalar ( buffer = new_buffer , max_size = nil ) if ! @io . read ( 4 , buffer ) return nil end while buffer . size < 4 tmp = @io . read ( 4 - buffer . size ) if tmp . empty? return nil else buffer << tmp end end size = buffer . unpack ( UINT32_PACK_FORMAT ) [ 0 ] if size == 0 buffer . replace ( '' ) return buffer else if ! max_size . nil? && size > max_size raise SecurityError , "Scalar message size (#{size}) " << "exceeds maximum allowed size (#{max_size})." end if ! @io . read ( size , buffer ) return nil end if buffer . size < size tmp = '' while buffer . size < size if ! @io . read ( size - buffer . size , tmp ) return nil else buffer << tmp end end end return buffer end rescue Errno :: ECONNRESET return nil end
Read a scalar message from the underlying IO object . Returns the read message or nil on end - of - stream .
2,691
def cleanup if @main_loop_thread @main_loop_thread_lock . synchronize do @graceful_termination_pipe [ 1 ] . close rescue nil end @main_loop_thread . join end @server_sockets . each_value do | info | socket = info [ :socket ] type = get_socket_address_type ( info [ :address ] ) begin socket . close if ! socket . closed? rescue Exception => e if e . to_s !~ / / && e . message . to_s !~ / / raise e end end if type == :unix filename = info [ :address ] . sub ( / / , '' ) File . unlink ( filename ) rescue nil end end @owner_pipe . close rescue nil end
Create a new RequestHandler with the given owner pipe . + owner_pipe + must be the readable part of a pipe IO object .
2,692
def main_loop debug ( "Entering request handler main loop" ) reset_signal_handlers begin @graceful_termination_pipe = IO . pipe @graceful_termination_pipe [ 0 ] . close_on_exec! @graceful_termination_pipe [ 1 ] . close_on_exec! @main_loop_thread_lock . synchronize do @main_loop_generation += 1 @main_loop_running = true @main_loop_thread_cond . broadcast @select_timeout = nil @selectable_sockets = [ ] @server_sockets . each_value do | value | socket = value [ 2 ] @selectable_sockets << socket if socket end @selectable_sockets << @owner_pipe @selectable_sockets << @graceful_termination_pipe [ 0 ] end install_useful_signal_handlers start_threads wait_until_termination_requested wait_until_all_threads_are_idle terminate_threads debug ( "Request handler main loop exited normally" ) rescue EOFError trace ( 2 , "Request handler main loop interrupted by EOFError exception" ) rescue Interrupt trace ( 2 , "Request handler main loop interrupted by Interrupt exception" ) rescue SignalException => signal trace ( 2 , "Request handler main loop interrupted by SignalException" ) if signal . message != HARD_TERMINATION_SIGNAL raise end rescue Exception => e trace ( 2 , "Request handler main loop interrupted by #{e.class} exception" ) raise ensure debug ( "Exiting request handler main loop" ) revert_signal_handlers @main_loop_thread_lock . synchronize do @graceful_termination_pipe [ 1 ] . close rescue nil @graceful_termination_pipe [ 0 ] . close rescue nil @selectable_sockets = [ ] @main_loop_generation += 1 @main_loop_running = false @main_loop_thread_cond . broadcast end end end
Enter the request handler s main loop .
2,693
def reset_signal_handlers Signal . list_trappable . each_key do | signal | begin prev_handler = trap ( signal , DEFAULT ) if prev_handler != DEFAULT @previous_signal_handlers [ signal ] = prev_handler end rescue ArgumentError end end trap ( 'HUP' , IGNORE ) PhusionPassenger . call_event ( :after_installing_signal_handlers ) end
Reset signal handlers to their default handler and install some special handlers for a few signals . The previous signal handlers will be put back by calling revert_signal_handlers .
2,694
def default ( context = nil ) raise NoDefaultError , name unless default? value = raw_default ( context ) raise InvalidValueError if value . is_a? ( GroupedInput ) cast ( value , context ) rescue InvalidNestedValueError => error raise InvalidDefaultError , "#{name}: #{value.inspect} (#{error})" rescue InvalidValueError , MissingValueError raise InvalidDefaultError , "#{name}: #{value.inspect}" end
Get the default value .
2,695
def column_for_attribute ( name ) filter = self . class . filters [ name ] FilterColumn . intern ( filter . database_column_type ) if filter end
Returns the column object for the named filter .
2,696
def add_business_days ( date , delta ) date = roll_forward ( date ) delta . times do begin date += day_interval_for ( date ) end until business_day? ( date ) end date end
Add a number of business days to a date . If a non - business day is given counting will start from the next business day . So monday + 1 = tuesday friday + 1 = monday sunday + 1 = tuesday
2,697
def subtract_business_days ( date , delta ) date = roll_backward ( date ) delta . times do begin date -= day_interval_for ( date ) end until business_day? ( date ) end date end
Subtract a number of business days to a date . If a non - business day is given counting will start from the previous business day . So friday - 1 = thursday monday - 1 = friday sunday - 1 = thursday
2,698
def set_working_days ( working_days ) @working_days = ( working_days || default_working_days ) . map do | day | day . downcase . strip [ 0 .. 2 ] . tap do | normalised_day | raise "Invalid day #{day}" unless DAY_NAMES . include? ( normalised_day ) end end extra_working_dates_names = @extra_working_dates . map { | d | d . strftime ( "%a" ) . downcase } return if ( extra_working_dates_names & @working_days ) . none? raise ArgumentError , 'Extra working dates cannot be on working days' end
Internal method for assigning working days from a calendar config .
2,699
def dispatch ( message , channel ) push ( Oj . dump ( message , mode : :compat ) ) unless channel =~ / \A / perform_client_webhook! ( message ) end
Send an event received from Redis to the EventMachine channel which will send it to subscribed clients .