idx
int64
0
24.9k
question
stringlengths
68
4.14k
target
stringlengths
9
749
19,000
def page ( record ) p = @pages . get ( record . page_idx , record ) || load ( record . page_idx , record ) unless p . uid == record . page_idx raise RuntimeError , "Page reference mismatch. Record " + "#{record.page_idx} points to page #{p.uid}" end p end
Return the IDListPage object with the given index .
19,001
def save_page ( p ) if p . record . page_entries != p . values . length raise RuntimeError , "page_entries mismatch for node #{p.uid}" end begin @f . seek ( p . uid * @page_size * 8 ) @f . write ( p . values . pack ( 'Q*' ) ) rescue IOError => e PEROBS . log . fatal "Cannot write cache file #{@file_name}: #{e.message}"...
Save the given IDListPage into the cache file .
19,002
def send ( method , params = [ ] , id = nil ) check_auth ( ) json = { :id => id . nil? || id . is_a? ( Integer ) == false ? rand ( 999 - 100 ) + 100 : id , :jsonrpc => "2.0" , :method => method , :params => params . is_a? ( Array ) ? params : [ ] } . to_json uri = URI . parse ( API_URL ) http = http_or_http_proxy ( uri...
Send a request to CALLR webservice
19,003
def write_object ( id , raw ) if @entries . length > @btreedb . max_blob_size split_blob @btreedb . put_raw_object ( raw , id ) else bytes = raw . bytesize crc32 = Zlib . crc32 ( raw , 0 ) start_address = reserve_bytes ( id , bytes , crc32 ) if write_to_blobs_file ( raw , start_address ) != bytes PEROBS . log . fatal '...
Create a new BTreeBlob object .
19,004
def mark ( id ) found = false @entries . each do | entry | if entry [ ID ] == id entry [ MARKED ] = 1 found = true break end end unless found PEROBS . log . fatal "Cannot find an entry for ID #{'%016X' % id} " + "#{id} to mark" end write_index end
Set a mark on the entry with the given ID .
19,005
def is_marked? ( id , ignore_errors = false ) @entries . each do | entry | return entry [ MARKED ] != 0 if entry [ ID ] == id end return false if ignore_errors PEROBS . log . fatal "Cannot find an entry for ID #{'%016X' % id} to check" end
Check if the entry for a given ID is marked .
19,006
def delete_unmarked_entries deleted_ids = [ ] @entries_by_id . delete_if do | id , e | if e [ MARKED ] == 0 deleted_ids << id true else false end end @entries . delete_if { | e | e [ MARKED ] == 0 } write_index deleted_ids end
Remove all entries from the index that have not been marked .
19,007
def check ( repair = false ) data_file_size = File . exist? ( @blobs_file_name ) ? File . size ( @blobs_file_name ) : 0 next_start = 0 prev_entry = nil @entries . each do | entry | if prev_entry && next_start > entry [ START ] PEROBS . log . fatal "#{@dir}: Index entries are overlapping\n" + "ID: #{'%016X' % prev_entry...
Run a basic consistency check .
19,008
def write_to_blobs_file ( raw , address ) begin File . write ( @blobs_file_name , raw , address ) rescue IOError => e PEROBS . log . fatal "Cannot write blobs file #{@blobs_file_name}: " + e . message end end
Write a string of bytes into the file at the given address .
19,009
def read_from_blobs_file ( entry ) begin raw = File . read ( @blobs_file_name , entry [ BYTES ] , entry [ START ] ) rescue => e PEROBS . log . fatal "Cannot read blobs file #{@blobs_file_name}: " + e . message end if Zlib . crc32 ( raw , 0 ) != entry [ CRC ] PEROBS . log . fatal "BTreeBlob for object #{entry[ID]} has b...
Read _bytes_ bytes from the file starting at offset _address_ .
19,010
def reserve_bytes ( id , bytes , crc32 ) end_of_last_entry = 0 best_fit_start = nil best_fit_bytes = nil best_fit_index = - 1 entry_to_delete = nil @entries . each . with_index do | entry , i | if entry [ ID ] == id entry_to_delete = entry next end gap = entry [ START ] - end_of_last_entry if gap >= bytes && ( best_fit...
Reserve the bytes needed for the specified number of bytes with the given ID .
19,011
def get ( uid , ref = nil ) if ( object = @modified_entries [ uid ] ) return object end if ( object = @unmodified_entries [ uid % @size ] ) && object . uid == uid return object end @klass :: load ( @collection , uid , ref ) end
Retrieve a object reference from the cache .
19,012
def delete ( uid ) @modified_entries . delete ( uid ) index = uid % @size if ( object = @unmodified_entries [ index ] ) && object . uid == uid @unmodified_entries [ index ] = nil end end
Remove a object from the cache .
19,013
def flush ( now = false ) if now || ( @flush_counter -= 1 ) <= 0 @modified_entries . each do | id , object | object . save end @modified_entries = :: Hash . new @flush_counter = @flush_delay end @flush_times += 1 end
Write all excess modified objects into the backing store . If now is true all modified objects will be written .
19,014
def escape_control_chars_in_object! ( object ) case object when String escape_control_chars! ( object ) when Hash escape_control_chars_in_hash! ( object ) when Array escape_control_chars_in_array! ( object ) else object end end
Recursively escape any control characters in object .
19,015
def escape_control_chars! ( string ) string . gsub! ( CONTROL_CHARACTERS ) do | character | UTF8Encoding :: REPLACEMENT_SCHEME [ character ] end string end
Escapes in - place any control characters in string before returning it .
19,016
def find_states ( path ) state = self parts = path . split ( '.' ) ret = [ ] parts . each do | name | state = state . states [ name . to_sym ] ret << state return unless state end ret end
Find all the states along the path
19,017
def initial_state if state = self . class . specification . initial_state find_state ( state . to_s ) elsif leaf? self else states . values . first . initial_state end end
If an initial state is not explicitly specified we choose the first leaf state
19,018
def open ( file_must_exist = false ) if @dirty_flag . is_locked? PEROBS . log . fatal "Index file #{@nodes.file_name} is already " + "locked" end if file_must_exist && ! @nodes . file_exist? PEROBS . log . fatal "Index file #{@nodes.file_name} does not exist" end @node_cache . clear @nodes . open if @nodes . total_entr...
Create a new BTree object .
19,019
def check ( & block ) sync return false unless @nodes . check entries = 0 res = true @progressmeter . start ( 'Checking index structure' , @size ) do | pm | res = @root . check do | k , v | pm . update ( entries += 1 ) block_given? ? yield ( k , v ) : true end end unless entries == @size PEROBS . log . error "The BTree...
Check if the tree file contains any errors .
19,020
def remove ( key ) @size -= 1 unless ( removed_value = @root . remove ( key ) ) . nil? while ! @root . is_leaf && @root . children . size == 1 old_root = @root set_root ( @root . children . first ) @root . parent = nil delete_node ( old_root . node_address ) end @node_cache . flush removed_value end
Find and remove the value associated with the given key . If no entry was found return nil otherwise the found value .
19,021
def add ( other_rating , score ) g , e = other_rating . gravity_expected_score ( rating . mean ) @v_pre += g ** 2 * e * ( 1 - e ) @delta_pre += g * ( score - e ) end
Add an outcome against the rating
19,022
def rate ( tau ) v = @v_pre ** - 1 delta2 = ( @delta_pre * v ) ** 2 sd2 = rating . sd ** 2 a = Math . log ( rating . volatility ** 2 ) if v . finite? f = lambda do | x | expX = Math . exp ( x ) ( expX * ( delta2 - sd2 - v - expX ) ) / ( 2 * ( sd2 + v + expX ) ** 2 ) - ( x - a ) / tau ** 2 end if delta2 > sd2 + v b = Ma...
Rate calculates Rating as at the start of the following period based on game outcomes
19,023
def configure return quick_join if ! join_model [ subject_model , object_model , join_model ] . compact . each do | model | model . configure end end
configure each model in turn
19,024
def get_class type case type when Class type when BaseModel type . my_class when String , Symbol return get_class send ( "#{type}_model" ) if [ :subject , :object , :join ] . include? ( type . to_sym ) type . to_s . constantize else raise "Can't determine a class from: #{type}" end end
retrieves a given Class ie . a type of model
19,025
def load_config_file ( config_file = nil ) curated_file = nil if config_file f = File . expand_path ( config_file ) if File . exists? ( f ) curated_file = f else raise "Supplied config file (#{config_file}) doesn't seem to exist" end else locations_to_try . each do | possible_conf_file | f = File . expand_path ( possib...
This is responsible to return a hash with the contents of a YAML file
19,026
def li_page_active_if ( condition , attributes = { } , & block ) if condition attributes [ :class ] ||= '' attributes [ :class ] += " active" end content_tag ( "li" , attributes , & block ) end
page sidebar navigation
19,027
def after * matchers , & proc proc || raise ( ArgumentError , 'block is missing' ) matchers . flatten! matchers = [ :* ] if matchers . empty? return if after? . find { | x | x [ 0 ] == matchers && x [ 1 ] . source_location == proc . source_location } after? . push ( [ matchers , proc ] ) end
same as before except it will run after matched tests .
19,028
def _prompt if self . class . bombshell_prompt . is_a? String self . class . bombshell_prompt elsif self . class . bombshell_prompt . is_a? Proc and self . class . bombshell_prompt . arity == 1 self . class . bombshell_prompt . call self elsif self . class . bombshell_prompt . is_a? Proc self . class . bombshell_prompt...
Render and return your shell s prompt .
19,029
def audit res = :: Bundler :: Audit :: Scanner . new . scan . to_a . map do | vuln | case vuln when :: Bundler :: Audit :: Scanner :: InsecureSource insecure_source_message vuln when :: Bundler :: Audit :: Scanner :: UnpatchedGem insecure_gem_message vuln else insecure_message vuln end end if res . any? message = "Vuln...
Scans for vulnerabilities and reports them .
19,030
def receive_line ( line ) if @active cmd = parse ( line ) handle cmd end rescue HandlerMissing logger . info "ignoring irc command #{cmd.inspect}: no handler" end
Handler for when a client sends an IRC command
19,031
def check_campfire_authentication ( & callback ) campfire . user ( "me" ) do | user | if user . name yield else command_reply :notice , "AUTH" , "could not connect to campfire: invalid API key" shutdown end end rescue Firering :: Connection :: HTTPError => e command_reply :notice , "AUTH" , "could not connect to campfi...
Checks that the campfire authentication is successful .
19,032
def check_nick_matches_authenticated_user campfire . user ( "me" ) do | user | name = irc_name user . name if name != nick user_reply :nick , name @nick = name end end end
Check to see that the nick as provided during the registration process matches the authenticated campfire user . If the nicks don t match send a nick change to the connected client .
19,033
def rc_setup client , options = { } rc_options_ctl ( client ) . merge! ( rc_options_extract ( client . members , options , :reject ) ) rc_options_new ( client ) . merge! ( rc_options_extract ( client . members , options , :select ) ) end
to mark below private in controllers
19,034
def to_s string = "" @props . sort_by do | key , val | key . to_s end . each do | key , val | string << Encoding . encode ( key . to_s ) << "=" << Encoding . encode ( val ) << "\n" end string end
Converts the properties contained in this object into a string that can be saved directly as a Java properties file .
19,035
def page_list ( answer_sheets , a = nil , custom_pages = nil ) page_list = [ ] answer_sheets . each do | answer_sheet | pages . each do | page | page_list << new_page_link ( answer_sheet , page , a ) end end page_list = page_list + custom_pages unless custom_pages . nil? page_list end
for pages_list sidebar
19,036
def render_partial_or_default ( name , options = { } ) render options . merge ( :partial => name ) rescue ActionView :: MissingTemplate render options . merge ( :partial => "#{controller.class.default_views}/#{name}" ) end
Tries to render the partial if it doesn t exist we will try to find the partial in the default views folder for this controller .
19,037
def client_objects_to_h ( value ) case value . class . to_s when / / client_objects_to_h ( value . to_h ) when 'Hash' Hash [ value . map { | k , v | [ k , client_objects_to_h ( v ) ] } ] when 'Array' value . map { | v | client_objects_to_h ( v ) } else value end end
Recusively converts found MmJsonClient objects to hashes .
19,038
def store ( options ) @options = self . options . merge ( options ) FileUtils . touch ( @path ) File . open ( @path , 'w' ) do | file | YAML . dump ( @options , file ) end end
Store the given options into the file . Existing options with the same keys will be overwritten .
19,039
def retrieve_page return nil unless url spurl = url spurl << ( spurl . include? ( "?" ) ? "&" : "?" ) spurl << "pagewanted=all" p = super ( spurl ) unless retreive_successful? ( p ) self . class . retrieve_session ||= WebPageParser :: HTTP :: Session . new p = super ( spurl ) end if retreive_successful? ( p ) p else ra...
We want to modify the url to request multi - page articles all in one request
19,040
def wrap_io_or_string ( io_or_str ) return io_or_str if io_or_str . respond_to? ( :read_one_char ) return R . new ( io_or_str ) if io_or_str . respond_to? ( :read ) R . new ( StringIO . new ( io_or_str ) ) end
Returns the given String or IO object wrapped in an object that has one method read_one_char - that gets used by all the subsequent parsing steps
19,041
def wrap_up ( expressions , stack , buf , stack_depth , multiple_expressions ) stack << buf if ( buf . length > 0 ) return stack unless multiple_expressions expressions << stack if stack . any? expressions = expressions . map do | e | compact_subexpr ( e , stack_depth + 1 ) end return expressions end
Package the expressions stack and buffer . We use a special flag to tell us whether we need multuple expressions . If we do the expressions will be returned . If not just the stack . Also anything that remains on the stack will be put on the expressions list if multiple_expressions is true .
19,042
def consume_remaining_buffer ( stack , buf ) return if buf . length == 0 stack << buf . dup buf . replace ( '' ) end
If the passed buf contains any bytes put them on the stack and empty the buffer
19,043
def parse_expr ( io , stop_char = nil , stack_depth = 0 , multiple_expressions = false ) expressions = [ ] stack = [ ] buf = '' loop do char = io . read_one_char next if char == "\r" if stop_char && char . nil? raise Error , "IO ran out when parsing a subexpression (expected to end on #{stop_char.inspect})" elsif char ...
Parse from a passed IO object either until an unescaped stop_char is reached or until the IO is exhausted . The last argument is the class used to compose the subexpression being parsed . The subparser is reentrant and not destructive for the object containing it .
19,044
def parse_str ( io , stop_quote ) buf = '' loop do c = io . read_one_char if c . nil? raise Error , "The IO ran out before the end of a literal string" elsif buf . length > 0 && buf [ - 1 .. - 1 ] == ESC buf = buf [ 0 .. - 2 ] buf << c elsif c == stop_quote return buf else buf << c end end end
Parse a string literal in single or double quotes .
19,045
def get_attributes ( klass ) return @attribute_store [ klass ] if @attribute_store . key? ( klass ) attributes = [ ] klass . ancestors . map do | k | hash = k . instance_variable_get ( :@class_kit_attributes ) if hash != nil hash . values . each do | a | attributes . push ( a ) end end end attributes . compact! @attrib...
Get attributes for a given class
19,046
def get_attribute ( klass : , name : ) get_attributes ( klass ) . detect { | a | a [ :name ] == name } || raise ( ClassKit :: Exceptions :: AttributeNotFoundError , "Attribute: #{name}, could not be found." ) end
Get attribute for a given class and name
19,047
def cleanup ( parent = Object , current = @top ) return unless all_project_objects_lookup [ current ] current . constants . each { | const | cleanup current , current . const_get ( const ) } parent . send ( :remove_const , current . to_s . split ( '::' ) . last . to_sym ) end
Recursively removes all constant entries of modules and classes under the MyGemName namespace
19,048
def to_hash ( object , use_alias = false ) return object . map { | i | to_hash ( i , use_alias ) } if object . is_a? ( Array ) validate_class_kit ( object . class ) hash = { } attributes = @attribute_helper . get_attributes ( object . class ) attributes . each do | attribute | key = use_alias ? ( attribute [ :alias ] |...
This method is called to convert a ClassKit object into a Hash .
19,049
def to_json ( object , use_alias = false ) hash = to_hash ( object , use_alias ) JSON . dump ( hash ) end
This method is called to convert a ClassKit object into JSON .
19,050
def from_json ( json : , klass : , use_alias : false ) hash = JSON . load ( json ) from_hash ( hash : hash , klass : klass , use_alias : use_alias ) end
This method is called to convert JSON into a ClassKit object .
19,051
def set_destination_data! route_info = Rails . application . routes . recognize_path self . path self . controller = route_info [ :controller ] self . action = route_info [ :action ] self . defaults = route_info . reject { | k , v | [ :controller , :action ] . include? k } end
This method tries to identify the route contained at self . path to extract the destination s controller action and other arguments and save them into the corresponding controller action and defaults fields of the including objects .
19,052
def corpora ( project_id , opts = { } ) data , _status_code , _headers = corpora_with_http_info ( project_id , opts ) return data end
Returns a collection of Corpora Returns a collection of Corpora
19,053
def corpus ( project_id , name , opts = { } ) data , _status_code , _headers = corpus_with_http_info ( project_id , name , opts ) return data end
Returns the Corpus Returns the Corpus
19,054
def create_corpus ( project_id , name , body , opts = { } ) create_corpus_with_http_info ( project_id , name , body , opts ) return nil end
Creates a new Corpus Creates a new Corpus
19,055
def create_project ( project , opts = { } ) data , _status_code , _headers = create_project_with_http_info ( project , opts ) return data end
Creates a new Project Creates a new Project
19,056
def job ( project_id , job_id , opts = { } ) data , _status_code , _headers = job_with_http_info ( project_id , job_id , opts ) return data end
Returns the Job Returns the Job
19,057
def job_result ( project_id , job_id , opts = { } ) data , _status_code , _headers = job_result_with_http_info ( project_id , job_id , opts ) return data end
Returns the Job Result Returns the Job Result
19,058
def jobs ( project_id , opts = { } ) data , _status_code , _headers = jobs_with_http_info ( project_id , opts ) return data end
Returns a collection of Jobs Returns a collection of Jobs
19,059
def project ( project_id , opts = { } ) data , _status_code , _headers = project_with_http_info ( project_id , opts ) return data end
Returns the Project Returns the Project
19,060
def update_project ( project , opts = { } ) data , _status_code , _headers = update_project_with_http_info ( project , opts ) return data end
Updates an existing Project Updates an existing Project
19,061
def upload_video ( project_id , video_upload_body , opts = { } ) data , _status_code , _headers = upload_video_with_http_info ( project_id , video_upload_body , opts ) return data end
Creates an upload session
19,062
def search_on ( column_source , methods ) Array ( methods ) . each do | method_name | join column_source self . searches += [ { column_source : column_source . to_s , method_name : method_name . to_s } ] end end
Allow user defined fields to sort on in addition to introspected fields
19,063
def read_image raw_image = [ ] @stream . index = @hdr [ 'vox_offset' ] type = NIFTI_DATATYPES [ @hdr [ 'datatype' ] ] format = @stream . format [ type ] @image_rubyarray = @stream . decode ( @stream . rest_length , type ) end
Create a NRead object to parse a nifti file or binary string and set header and image info instance variables .
19,064
def get_image_narray ( image_array , dim ) if Object . const_defined? ( 'NArray' ) @image_narray = pixel_data = NArray . to_na ( image_array ) . reshape! ( * dim [ 1 .. dim [ 0 ] ] ) else add_msg "Can't find NArray, no image_narray created. Please `gem install narray`" end end
Create an narray if the NArray is available Tests if a file is readable and if so opens it .
19,065
def parse_header ( options = { } ) check_header @hdr = parse_basic_header @extended_header = parse_extended_header read_image if options [ :image ] get_image_narray ( @image_rubyarray , @hdr [ 'dim' ] ) if options [ :narray ] @success = true end
Parse the NIFTI Header .
19,066
def parse_basic_header header = { } HEADER_SIGNATURE . each do | header_item | name , length , type = * header_item header [ name ] = @stream . decode ( length , type ) end if header [ 'dim_info' ] header [ 'freq_dim' ] = dim_info_to_freq_dim ( header [ 'dim_info' ] ) header [ 'phase_dim' ] = dim_info_to_phase_dim ( he...
Read the nifti header according to its byte signature . The file stream will be left open and should be positioned at the end of the 348 byte header .
19,067
def parse_extended_header extended = [ ] extension = @stream . decode ( 4 , "BY" ) if extension [ 0 ] != 0 while @stream . index < @hdr [ 'vox_offset' ] esize , ecode = * @stream . decode ( 8 , "UL" ) data = @stream . decode ( esize - 8 , "STR" ) extended << { :esize => esize , :ecode => ecode , :data => data } end end...
Read any extended header information . The file stream will be left at imaging data itself taking vox_offset into account for NIFTI Header Extended Attributes . Pass in the voxel offset so the extended header knows when to stop reading .
19,068
def open_file ( file ) if File . exist? ( file ) if File . readable? ( file ) if not File . directory? ( file ) if File . size ( file ) > 8 begin @file = Zlib :: GzipReader . new ( File . new ( file , "rb" ) ) rescue Zlib :: GzipFile :: Error @file = File . new ( file , "rb" ) end else @msg << "Error! File is too small...
Tests if a file is readable and if so opens it .
19,069
def validation_class ( answer_sheet ) validation = '' validation += ' required' if self . required? ( answer_sheet ) validate_style = [ 'number' , 'currency-dollar' , 'email' , 'url' , 'phone' ] . find { | v | v == self . style } if validate_style validation += ' validate-' + validate_style end validation end
css class names for javascript - based validation
19,070
def write open_file ( @file_name ) if @file init_variables @file_endian = false @stream = Stream . new ( nil , @file_endian ) @stream . set_file ( @file ) write_basic_header write_extended_header write_image @file . close @success = true end end
Creates an NWrite instance .
19,071
def write_basic_header HEADER_SIGNATURE . each do | header_item | begin name , length , type = * header_item str = @stream . encode ( @obj . header [ name ] , type ) padded_str = @stream . encode_string_to_length ( str , length ) @stream . write padded_str @stream . skip length rescue StandardError => e puts name , len...
Write Basic Header
19,072
def write_extended_header unless @obj . extended_header . empty? @stream . write @stream . encode ( [ 1 , 0 , 0 , 0 ] , "BY" ) @obj . extended_header . each do | extension | @stream . write @stream . encode extension [ :esize ] , "UL" @stream . write @stream . encode extension [ :ecode ] , "UL" @stream . write @stream ...
Write Extended Header
19,073
def power_on target = entity_xml class_name = self . class . name . split ( "::" ) . last Config . logger . debug "#{class_name} status: #{target[:status]}" if is_status? ( target , :POWERED_ON ) Config . logger . info "#{class_name} #{target.name} is already powered-on." return end power_on_link = target . power_on_li...
Power on VApp or VM
19,074
def power_off target = entity_xml class_name = self . class . name . split ( "::" ) . last Config . logger . debug "#{class_name} status: #{target[:status]}" if is_status? ( target , :SUSPENDED ) error_msg = "#{class_name} #{target.name} suspended, discard state before powering off." fail class_name == "VApp" ? VappSus...
Power off VApp or VM
19,075
def cancel_encoding ( id , factory_id , opts = { } ) data , _status_code , _headers = cancel_encoding_with_http_info ( id , factory_id , opts ) return data end
Cancels an Encoding .
19,076
def copy_profile ( id , factory_id , copy_profile_body , opts = { } ) data , _status_code , _headers = copy_profile_with_http_info ( id , factory_id , copy_profile_body , opts ) return data end
Copies a given Profile
19,077
def create_encoding ( factory_id , create_encoding_body , opts = { } ) data , _status_code , _headers = create_encoding_with_http_info ( factory_id , create_encoding_body , opts ) return data end
Creates an Encoding
19,078
def create_factory ( create_factory_body , opts = { } ) data , _status_code , _headers = create_factory_with_http_info ( create_factory_body , opts ) return data end
Creates a new factory
19,079
def create_profile ( factory_id , create_profile_body , opts = { } ) data , _status_code , _headers = create_profile_with_http_info ( factory_id , create_profile_body , opts ) return data end
Creates a Profile
19,080
def create_video ( factory_id , create_video_body , opts = { } ) data , _status_code , _headers = create_video_with_http_info ( factory_id , create_video_body , opts ) return data end
Creates a Video from a provided source_url .
19,081
def delete_encoding ( id , factory_id , opts = { } ) data , _status_code , _headers = delete_encoding_with_http_info ( id , factory_id , opts ) return data end
Deletes an Encoding from both Telestream Cloud and your storage . Returns an information whether the operation was successful .
19,082
def delete_profile ( id , factory_id , opts = { } ) data , _status_code , _headers = delete_profile_with_http_info ( id , factory_id , opts ) return data end
Deletes a given Profile
19,083
def delete_video ( id , factory_id , opts = { } ) data , _status_code , _headers = delete_video_with_http_info ( id , factory_id , opts ) return data end
Deletes a Video object .
19,084
def delete_video_source ( id , factory_id , opts = { } ) data , _status_code , _headers = delete_video_source_with_http_info ( id , factory_id , opts ) return data end
Delete a video s source file .
19,085
def encoding ( id , factory_id , opts = { } ) data , _status_code , _headers = encoding_with_http_info ( id , factory_id , opts ) return data end
Returns an Encoding object .
19,086
def encodings ( factory_id , opts = { } ) data , _status_code , _headers = encodings_with_http_info ( factory_id , opts ) return data end
Returns a list of Encoding objects
19,087
def encodings_count ( factory_id , opts = { } ) data , _status_code , _headers = encodings_count_with_http_info ( factory_id , opts ) return data end
Returns a number of Encoding objects created using a given factory .
19,088
def factory ( id , opts = { } ) data , _status_code , _headers = factory_with_http_info ( id , opts ) return data end
Returns a Factory object . Returns a Factory object .
19,089
def notifications ( factory_id , opts = { } ) data , _status_code , _headers = notifications_with_http_info ( factory_id , opts ) return data end
Returns a Factory s notification settings .
19,090
def profile ( id_or_name , factory_id , opts = { } ) data , _status_code , _headers = profile_with_http_info ( id_or_name , factory_id , opts ) return data end
Returns a Profile object .
19,091
def profile_encodings ( id_or_name , factory_id , opts = { } ) data , _status_code , _headers = profile_encodings_with_http_info ( id_or_name , factory_id , opts ) return data end
Returns a list of Encodings that belong to a Profile .
19,092
def profiles ( factory_id , opts = { } ) data , _status_code , _headers = profiles_with_http_info ( factory_id , opts ) return data end
Returns a collection of Profile objects .
19,093
def queued_videos ( factory_id , opts = { } ) data , _status_code , _headers = queued_videos_with_http_info ( factory_id , opts ) return data end
Returns a collection of Video objects queued for encoding .
19,094
def retry_encoding ( id , factory_id , opts = { } ) data , _status_code , _headers = retry_encoding_with_http_info ( id , factory_id , opts ) return data end
Retries a failed encoding .
19,095
def signed_encoding_url ( id , factory_id , opts = { } ) data , _status_code , _headers = signed_encoding_url_with_http_info ( id , factory_id , opts ) return data end
Returns a signed url pointing to an Encoding .
19,096
def signed_encoding_urls ( id , factory_id , opts = { } ) data , _status_code , _headers = signed_encoding_urls_with_http_info ( id , factory_id , opts ) return data end
Returns a list of signed urls pointing to an Encoding s outputs .
19,097
def signed_video_url ( id , factory_id , opts = { } ) data , _status_code , _headers = signed_video_url_with_http_info ( id , factory_id , opts ) return data end
Returns a signed url pointing to a Video .
19,098
def toggle_factory_sync ( id , factory_sync_body , opts = { } ) data , _status_code , _headers = toggle_factory_sync_with_http_info ( id , factory_sync_body , opts ) return data end
Toggles synchronisation settings .
19,099
def update_encoding ( id , factory_id , update_encoding_body , opts = { } ) data , _status_code , _headers = update_encoding_with_http_info ( id , factory_id , update_encoding_body , opts ) return data end
Updates an Encoding