idx
int64
0
24.9k
question
stringlengths
68
4.14k
target
stringlengths
9
749
1,100
def process_zipfile ( zipfilename_or_stream ) @sheet_files = [ ] unless is_stream? ( zipfilename_or_stream ) zip_file = Zip :: File . open ( zipfilename_or_stream ) else zip_file = Zip :: CentralDirectory . new zip_file . read_from_stream zipfilename_or_stream end process_zipfile_entries zip_file . to_a . sort_by ( & :name ) end
Extracts all needed files from the zip file
1,101
def decrypt_if_necessary ( zip_file , content_entry , roo_content_xml_path , options ) if ( manifest_entry = zip_file . glob ( 'META-INF/manifest.xml' ) . first ) roo_manifest_xml_path = File . join ( @tmpdir , 'roo_manifest.xml' ) manifest_entry . extract ( roo_manifest_xml_path ) manifest = :: Roo :: Utils . load_xml ( roo_manifest_xml_path ) encryption_data = manifest . xpath ( "//manifest:file-entry[@manifest:full-path='content.xml']" "/manifest:encryption-data" ) . first unless encryption_data . nil? password = options [ :password ] if ! password . nil? perform_decryption ( encryption_data , password , content_entry , roo_content_xml_path ) else fail ArgumentError , 'file is encrypted but password was not supplied' end end else fail ArgumentError , 'file missing required META-INF/manifest.xml' end end
If the ODS file has an encryption - data element then try to decrypt . If successful the temporary content . xml will be overwritten with decrypted contents .
1,102
def perform_decryption ( encryption_data , password , content_entry , roo_content_xml_path ) algorithm_node = encryption_data . xpath ( 'manifest:algorithm' ) . first key_derivation_node = encryption_data . xpath ( 'manifest:key-derivation' ) . first start_key_generation_node = encryption_data . xpath ( 'manifest:start-key-generation' ) . first if ! algorithm_node . nil? && ! key_derivation_node . nil? && ! start_key_generation_node . nil? algorithm = algorithm_node [ 'manifest:algorithm-name' ] iv = Base64 . decode64 ( algorithm_node [ 'manifest:initialisation-vector' ] ) key_derivation_name = key_derivation_node [ 'manifest:key-derivation-name' ] iteration_count = key_derivation_node [ 'manifest:iteration-count' ] . to_i salt = Base64 . decode64 ( key_derivation_node [ 'manifest:salt' ] ) key_generation_name = start_key_generation_node [ 'manifest:start-key-generation-name' ] hashed_password = password if key_generation_name == 'http://www.w3.org/2000/09/xmldsig#sha256' hashed_password = Digest :: SHA256 . digest ( password ) else fail ArgumentError , "Unknown key generation algorithm #{key_generation_name}" end cipher = find_cipher ( algorithm , key_derivation_name , hashed_password , salt , iteration_count , iv ) begin decrypted = decrypt ( content_entry , cipher ) IO . binwrite ( roo_content_xml_path , Zlib :: Inflate . new ( - Zlib :: MAX_WBITS ) . inflate ( decrypted ) ) rescue StandardError => error raise ArgumentError , "Invalid password or other data error: #{error}" end else fail ArgumentError , 'manifest.xml missing encryption-data elements' end end
Process the ODS encryption manifest and perform the decryption
1,103
def find_cipher_key ( * args ) fail ArgumentError , 'Unknown key derivation name ' , args [ 1 ] unless args [ 1 ] == 'PBKDF2' :: OpenSSL :: PKCS5 . pbkdf2_hmac_sha1 ( args [ 2 ] , args [ 3 ] , args [ 4 ] , args [ 0 ] . key_len ) end
Create a cipher key based on an ODS algorithm string from manifest . xml
1,104
def decrypt ( content_entry , cipher ) decrypted = '' File . open ( @filename , 'rb' ) do | zipfile | zipfile . seek ( content_entry . local_header_offset + content_entry . calculate_local_header_size ) total_to_read = content_entry . compressed_size block_size = 4096 block_size = total_to_read if block_size > total_to_read while ( buffer = zipfile . read ( block_size ) ) decrypted += cipher . update ( buffer ) total_to_read -= buffer . length break if total_to_read == 0 block_size = total_to_read if block_size > total_to_read end end decrypted + cipher . final end
Block decrypt raw bytes from the zip file based on the cipher
1,105
def set_cell_values ( sheet , x , y , i , v , value_type , formula , table_cell , str_v , style_name ) key = [ y , x + i ] @cell_type [ sheet ] ||= { } @cell_type [ sheet ] [ key ] = value_type . to_sym if value_type @formula [ sheet ] ||= { } if formula [ 'of:' , 'oooc:' ] . each do | prefix | if formula [ 0 , prefix . length ] == prefix formula = formula [ prefix . length .. - 1 ] end end @formula [ sheet ] [ key ] = formula end @cell [ sheet ] ||= { } @style [ sheet ] ||= { } @style [ sheet ] [ key ] = style_name case @cell_type [ sheet ] [ key ] when :float @cell [ sheet ] [ key ] = ( table_cell . attributes [ 'value' ] . to_s . include? ( "." ) || table_cell . children . first . text . include? ( "." ) ) ? v . to_f : v . to_i when :percentage @cell [ sheet ] [ key ] = v . to_f when :string @cell [ sheet ] [ key ] = str_v when :date if attribute ( table_cell , 'date-value' ) . size != 'XXXX-XX-XX' . size @cell [ sheet ] [ key ] = DateTime . parse ( attribute ( table_cell , 'date-value' ) . to_s ) @cell_type [ sheet ] [ key ] = :datetime else @cell [ sheet ] [ key ] = table_cell . attributes [ 'date-value' ] end when :time hms = v . split ( ':' ) @cell [ sheet ] [ key ] = hms [ 0 ] . to_i * 3600 + hms [ 1 ] . to_i * 60 + hms [ 2 ] . to_i else @cell [ sheet ] [ key ] = v end end
helper function to set the internal representation of cells
1,106
def num_cells_in_range ( str ) cells = str . split ( ':' ) return 1 if cells . count == 1 raise ArgumentError . new ( "invalid range string: #{str}. Supported range format 'A1:B2'" ) if cells . count != 2 x1 , y1 = extract_coordinate ( cells [ 0 ] ) x2 , y2 = extract_coordinate ( cells [ 1 ] ) ( x2 - ( x1 - 1 ) ) * ( y2 - ( y1 - 1 ) ) end
Compute upper bound for cells in a given cell range .
1,107
def catalog bundle new_map_hash = { } merged = ( bundle . sources . length == source_map_hash . values . length ) bundle . sources . each do | source | if source_map_hash . key? ( source . filename ) if source_map_hash [ source . filename ] . code == source . code && source_map_hash [ source . filename ] . source . synchronized? && source . synchronized? new_map_hash [ source . filename ] = source_map_hash [ source . filename ] elsif ! source . synchronized? new_map_hash [ source . filename ] = source_map_hash [ source . filename ] new_map_hash [ source . filename ] . instance_variable_set ( :@source , source ) else map = Solargraph :: SourceMap . map ( source ) if source_map_hash [ source . filename ] . try_merge! ( map ) new_map_hash [ source . filename ] = source_map_hash [ source . filename ] else new_map_hash [ source . filename ] = map merged = false end end else map = Solargraph :: SourceMap . map ( source ) new_map_hash [ source . filename ] = map merged = false end end return self if merged pins = [ ] reqs = [ ] new_map_hash . values . each do | map | pins . concat map . pins reqs . concat map . requires . map ( & :name ) end reqs . concat bundle . workspace . config . required unless bundle . workspace . require_paths . empty? reqs . delete_if do | r | result = false bundle . workspace . require_paths . each do | l | pn = Pathname . new ( bundle . workspace . directory ) . join ( l , "#{r}.rb" ) if new_map_hash . keys . include? ( pn . to_s ) result = true break end end result end end yard_map . change ( reqs ) new_store = Store . new ( pins + yard_map . pins ) @mutex . synchronize { @cache . clear @source_map_hash = new_map_hash @store = new_store @unresolved_requires = yard_map . unresolved_requires } self end
Catalog a bundle .
1,108
def clip_at filename , position position = Position . normalize ( position ) SourceMap :: Clip . new ( self , cursor_at ( filename , position ) ) end
Get a clip by filename and position .
1,109
def get_constants namespace , context = '' namespace ||= '' cached = cache . get_constants ( namespace , context ) return cached . clone unless cached . nil? skip = [ ] result = [ ] bases = context . split ( '::' ) while bases . length > 0 built = bases . join ( '::' ) fqns = qualify ( namespace , built ) visibility = [ :public ] visibility . push :private if fqns == context result . concat inner_get_constants ( fqns , visibility , skip ) bases . pop end fqns = qualify ( namespace , '' ) visibility = [ :public ] visibility . push :private if fqns == context result . concat inner_get_constants ( fqns , visibility , skip ) cache . set_constants ( namespace , context , result ) result end
Get suggestions for constants in the specified namespace . The result may contain both constant and namespace pins .
1,110
def qualify namespace , context = '' return nil if namespace . nil? return qualify ( context ) if namespace == 'self' cached = cache . get_qualified_namespace ( namespace , context ) return cached . clone unless cached . nil? result = if namespace . start_with? ( '::' ) inner_qualify ( namespace [ 2 .. - 1 ] , '' , [ ] ) else inner_qualify ( namespace , context , [ ] ) end cache . set_qualified_namespace ( namespace , context , result ) result end
Get a fully qualified namespace name . This method will start the search in the specified context until it finds a match for the name .
1,111
def get_instance_variable_pins ( namespace , scope = :instance ) result = [ ] result . concat store . get_instance_variables ( namespace , scope ) sc = qualify ( store . get_superclass ( namespace ) , namespace ) until sc . nil? result . concat store . get_instance_variables ( sc , scope ) sc = qualify ( store . get_superclass ( sc ) , sc ) end result end
Get an array of instance variable pins defined in specified namespace and scope .
1,112
def get_methods fqns , scope : :instance , visibility : [ :public ] , deep : true cached = cache . get_methods ( fqns , scope , visibility , deep ) return cached . clone unless cached . nil? result = [ ] skip = [ ] if fqns == '' result . concat inner_get_methods ( fqns , :class , visibility , deep , skip ) result . concat inner_get_methods ( fqns , :instance , visibility , deep , skip ) result . concat inner_get_methods ( 'Kernel' , :instance , visibility , deep , skip ) else result . concat inner_get_methods ( fqns , scope , visibility , deep , skip ) end resolved = resolve_method_aliases ( result ) cache . set_methods ( fqns , scope , visibility , deep , resolved ) resolved end
Get an array of methods available in a particular context .
1,113
def get_complex_type_methods type , context = '' , internal = false return [ ] if type . undefined? || type . void? result = [ ] if type . duck_type? type . select ( & :duck_type? ) . each do | t | result . push Pin :: DuckMethod . new ( nil , t . tag [ 1 .. - 1 ] ) end result . concat get_methods ( 'Object' ) else unless type . nil? || type . name == 'void' visibility = [ :public ] if type . namespace == context || super_and_sub? ( type . namespace , context ) visibility . push :protected visibility . push :private if internal end result . concat get_methods ( type . namespace , scope : type . scope , visibility : visibility ) end end result end
Get an array of method pins for a complex type .
1,114
def get_method_stack fqns , name , scope : :instance get_methods ( fqns , scope : scope , visibility : [ :private , :protected , :public ] ) . select { | p | p . name == name } end
Get a stack of method pins for a method name in a namespace . The order of the pins corresponds to the ancestry chain with highest precedence first .
1,115
def get_path_suggestions path return [ ] if path . nil? result = [ ] result . concat store . get_path_pins ( path ) resolve_method_aliases ( result ) end
Get an array of all suggestions that match the specified path .
1,116
def search query rake_yard ( store ) found = [ ] code_object_paths . each do | k | if found . empty? || ( query . include? ( '.' ) || query . include? ( '#' ) ) || ! ( k . include? ( '.' ) || k . include? ( '#' ) ) found . push k if k . downcase . include? ( query . downcase ) end end found end
Get a list of documented paths that match the query .
1,117
def document path rake_yard ( store ) docs = [ ] docs . push code_object_at ( path ) unless code_object_at ( path ) . nil? docs end
Get YARD documentation for the specified path .
1,118
def query_symbols query result = [ ] source_map_hash . values . each do | s | result . concat s . query_symbols ( query ) end result end
Get an array of all symbols in the workspace that match the query .
1,119
def require_extensions Gem :: Specification . all_names . select { | n | n . match ( / \- \- \- \- \. / ) } . each do | n | Solargraph :: Logging . logger . info "Loading extension #{n}" require n . match ( / \- \- \- \- \. / ) [ 1 ] end end
Require extensions for the experimental plugin architecture . Any installed gem with a name that starts with solargraph - is considered an extension .
1,120
def prefer_non_nil_variables pins result = [ ] nil_pins = [ ] pins . each do | pin | if pin . variable? && pin . nil_assignment? nil_pins . push pin else result . push pin end end result + nil_pins end
Sort an array of pins to put nil or undefined variables last .
1,121
def super_and_sub? ( sup , sub ) fqsup = qualify ( sup ) cls = qualify ( store . get_superclass ( sub ) , sub ) until cls . nil? return true if cls == fqsup cls = qualify ( store . get_superclass ( cls ) , cls ) end false end
Check if a class is a superclass of another class .
1,122
def contain? position position = Position . normalize ( position ) return false if position . line < start . line || position . line > ending . line return false if position . line == start . line && position . character < start . character return false if position . line == ending . line && position . character > ending . character true end
True if the specified position is inside the range .
1,123
def include? position position = Position . normalize ( position ) contain? ( position ) && ! ( position . line == start . line && position . character == start . character ) end
True if the range contains the specified position and the position does not precede it .
1,124
def tree_at ( line , column ) position = Position . new ( line , column ) stack = [ ] inner_tree_at @node , position , stack stack end
Get an array of nodes containing the specified index starting with the nearest node and ending with the root .
1,125
def synchronize updater raise 'Invalid synchronization' unless updater . filename == filename real_code = updater . write ( @code ) if real_code == @code @version = updater . version return self end synced = Source . new ( real_code , filename ) if synced . parsed? synced . version = updater . version return synced end incr_code = updater . repair ( @repaired ) synced = Source . new ( incr_code , filename ) synced . error_ranges . concat ( error_ranges + updater . changes . map ( & :range ) ) synced . code = real_code synced . version = updater . version synced end
Synchronize the Source with an update . This method applies changes to the code parses the new code s AST and returns the resulting Source object .
1,126
def location st = Position . new ( 0 , 0 ) en = Position . from_offset ( code , code . length ) range = Range . new ( st , en ) Location . new ( filename , range ) end
A location representing the file in its entirety .
1,127
def associated_comments @associated_comments ||= begin result = { } Parser :: Source :: Comment . associate_locations ( node , comments ) . each_pair do | loc , all | block = all next if block . empty? result [ loc . line ] ||= [ ] result [ loc . line ] . concat block end result end end
Get a hash of comments grouped by the line numbers of the associated code .
1,128
def stringify_comment_array comments ctxt = '' num = nil started = false last_line = nil comments . each { | l | p = l . text . gsub ( / / , '' ) if num . nil? and ! p . strip . empty? num = p . index ( / / ) started = true elsif started and ! p . strip . empty? cur = p . index ( / / ) num = cur if cur < num end ctxt += ( "\n" * ( l . loc . first_line - last_line - 1 ) ) unless last_line . nil? || l . loc . first_line - last_line <= 0 ctxt += "#{p[num..-1]}\n" if started last_line = l . loc . last_line if last_line . nil? || l . loc . last_line > last_line } ctxt end
Get a string representation of an array of comments .
1,129
def foldable_comment_block_ranges return [ ] unless synchronized? result = [ ] grouped = [ ] @comments . each do | cmnt | if cmnt . document? result . push Range . from_expr ( cmnt . loc . expression ) elsif code . lines [ cmnt . loc . expression . line ] . strip . start_with? ( '#' ) if grouped . empty? || cmnt . loc . expression . line == grouped . last . loc . expression . line + 1 grouped . push cmnt else result . push Range . from_to ( grouped . first . loc . expression . line , 0 , grouped . last . loc . expression . line , 0 ) unless grouped . length < 3 grouped = [ cmnt ] end else unless grouped . length < 3 result . push Range . from_to ( grouped . first . loc . expression . line , 0 , grouped . last . loc . expression . line , 0 ) end grouped . clear end end result . push Range . from_to ( grouped . first . loc . expression . line , 0 , grouped . last . loc . expression . line , 0 ) unless grouped . length < 3 result end
Get an array of foldable comment block ranges . Blocks are excluded if they are less than 3 lines long .
1,130
def merge source unless directory == '*' || source_hash . key? ( source . filename ) @config = Solargraph :: Workspace :: Config . new ( directory ) return false unless config . calculated . include? ( source . filename ) end source_hash [ source . filename ] = source true end
Merge the source . A merge will update the existing source for the file or add it to the sources if the workspace is configured to include it . The source is ignored if the configuration excludes it .
1,131
def would_merge? filename return true if directory == '*' || source_hash . include? ( filename ) @config = Solargraph :: Workspace :: Config . new ( directory ) config . calculated . include? ( filename ) end
Determine whether a file would be merged into the workspace .
1,132
def remove filename return false unless source_hash . key? ( filename ) source_hash . delete filename true end
Remove a source from the workspace . The source will not be removed if its file exists and the workspace is configured to include it .
1,133
def would_require? path require_paths . each do | rp | return true if File . exist? ( File . join ( rp , "#{path}.rb" ) ) end false end
True if the path resolves to a file in the workspace s require paths .
1,134
def synchronize! updater source_hash [ updater . filename ] = source_hash [ updater . filename ] . synchronize ( updater ) end
Synchronize the workspace from the provided updater .
1,135
def generate_require_paths return configured_require_paths unless gemspec? result = [ ] gemspecs . each do | file | base = File . dirname ( file ) begin spec = eval ( File . read ( file ) , binding , file ) next unless Gem :: Specification === spec result . concat ( spec . require_paths . map { | path | File . join ( base , path ) } ) rescue Exception => e Solargraph . logger . warn "Error reading #{file}: [#{e.class}] #{e.message}" result . push File . join ( base , 'lib' ) end end result . concat config . require_paths result . push File . join ( directory , 'lib' ) if result . empty? result end
Generate require paths from gemspecs if they exist or assume the default lib directory .
1,136
def configured_require_paths return [ 'lib' ] if directory . empty? return [ File . join ( directory , 'lib' ) ] if config . require_paths . empty? config . require_paths . map { | p | File . join ( directory , p ) } end
Get additional require paths defined in the configuration .
1,137
def create filename , text result = false mutex . synchronize do next unless contain? ( filename ) || open? ( filename ) || workspace . would_merge? ( filename ) @synchronized = false source = Solargraph :: Source . load_string ( text , filename ) workspace . merge ( source ) result = true end result end
Create a source to be added to the workspace . The file is ignored if it is neither open in the library nor included in the workspace .
1,138
def create_from_disk filename result = false mutex . synchronize do next if File . directory? ( filename ) || ! File . exist? ( filename ) next unless contain? ( filename ) || open? ( filename ) || workspace . would_merge? ( filename ) @synchronized = false source = Solargraph :: Source . load_string ( File . read ( filename ) , filename ) workspace . merge ( source ) result = true end result end
Create a file source from a file on disk . The file is ignored if it is neither open in the library nor included in the workspace .
1,139
def delete filename detach filename result = false mutex . synchronize do result = workspace . remove ( filename ) @synchronized = ! result if synchronized? end result end
Delete a file from the library . Deleting a file will make it unavailable for checkout and optionally remove it from the workspace unless the workspace configuration determines that it should still exist .
1,140
def completions_at filename , line , column position = Position . new ( line , column ) cursor = Source :: Cursor . new ( checkout ( filename ) , position ) api_map . clip ( cursor ) . complete end
Get completion suggestions at the specified file and location .
1,141
def definitions_at filename , line , column position = Position . new ( line , column ) cursor = Source :: Cursor . new ( checkout ( filename ) , position ) api_map . clip ( cursor ) . define end
Get definition suggestions for the expression at the specified file and location .
1,142
def signatures_at filename , line , column position = Position . new ( line , column ) cursor = Source :: Cursor . new ( checkout ( filename ) , position ) api_map . clip ( cursor ) . signify end
Get signature suggestions for the method at the specified file and location .
1,143
def diagnose filename return [ ] unless open? ( filename ) catalog result = [ ] source = read ( filename ) workspace . config . reporters . each do | name | reporter = Diagnostics . reporter ( name ) raise DiagnosticsError , "Diagnostics reporter #{name} does not exist" if reporter . nil? result . concat reporter . new . diagnose ( source , api_map ) end result end
Get diagnostics about a file .
1,144
def catalog @catalog_mutex . synchronize do break if synchronized? logger . info "Cataloging #{workspace.directory.empty? ? 'generic workspace' : workspace.directory}" api_map . catalog bundle @synchronized = true logger . info "Catalog complete (#{api_map.pins.length} pins)" end end
Update the ApiMap from the library s workspace and open files .
1,145
def merge source result = nil mutex . synchronize do result = workspace . merge ( source ) @synchronized = ! result if synchronized? end result end
Try to merge a source into the library s workspace . If the workspace is not configured to include the source it gets ignored .
1,146
def read filename return @current if @current && @current . filename == filename raise FileNotFoundError , "File not found: #{filename}" unless workspace . has_file? ( filename ) workspace . source ( filename ) end
Get the source for an open file or create a new source if the file exists on disk . Sources created from disk are not added to the open workspace files i . e . the version on disk remains the authoritative version .
1,147
def extract_headers! ( options = { } ) extract = { wrap_ttl : Vault :: Client :: WRAP_TTL_HEADER , } { } . tap do | h | extract . each do | k , v | if options [ k ] h [ v ] = options . delete ( k ) end end end end
Removes the given header fields from options and returns the result . This modifies the given options in place .
1,148
def unseal ( shard ) json = client . put ( "/v1/sys/unseal" , JSON . fast_generate ( key : shard , ) ) return SealStatus . decode ( json ) end
Unseal the vault with the given shard .
1,149
def list ( path , options = { } ) headers = extract_headers! ( options ) json = client . list ( "/v1/#{encode_path(path)}" , { } , headers ) json [ :data ] [ :keys ] || [ ] rescue HTTPError => e return [ ] if e . code == 404 raise end
List the secrets at the given path if the path supports listing . If the the path does not exist an exception will be raised .
1,150
def read ( path , options = { } ) headers = extract_headers! ( options ) json = client . get ( "/v1/#{encode_path(path)}" , { } , headers ) return Secret . decode ( json ) rescue HTTPError => e return nil if e . code == 404 raise end
Read the secret at the given path . If the secret does not exist + nil + will be returned .
1,151
def unwrap ( wrapper ) client . with_token ( wrapper ) do | client | json = client . get ( "/v1/cubbyhole/response" ) secret = Secret . decode ( json ) if secret . nil? || secret . data . nil? || secret . data [ :response ] . nil? return nil end json = JSON . parse ( secret . data [ :response ] , symbolize_names : true ) secret = Secret . decode ( json ) return secret end rescue HTTPError => e return nil if e . code == 404 raise end
Unwrap the data stored against the given token . If the secret does not exist nil will be returned .
1,152
def unwrap_token ( wrapper ) if wrapper . is_a? ( Secret ) wrapper = wrapper . wrap_info . token end response = unwrap ( wrapper ) if response . nil? || response . auth . nil? return nil end return response . auth . client_token rescue HTTPError => e raise end
Unwrap a token in a wrapped response given the temporary token .
1,153
def auths json = client . get ( "/v1/sys/auth" ) json = json [ :data ] if json [ :data ] return Hash [ * json . map do | k , v | [ k . to_s . chomp ( "/" ) . to_sym , Auth . decode ( v ) ] end . flatten ] end
List all auths in Vault .
1,154
def enable_auth ( path , type , description = nil ) payload = { type : type } payload [ :description ] = description if ! description . nil? client . post ( "/v1/sys/auth/#{encode_path(path)}" , JSON . fast_generate ( payload ) ) return true end
Enable a particular authentication at the given path .
1,155
def auth_tune ( path ) json = client . get ( "/v1/sys/auth/#{encode_path(path)}/tune" ) return AuthConfig . decode ( json ) rescue HTTPError => e return nil if e . code == 404 raise end
Read the given auth path s configuration .
1,156
def put_auth_tune ( path , config = { } ) json = client . put ( "/v1/sys/auth/#{encode_path(path)}/tune" , JSON . fast_generate ( config ) ) if json . nil? return true else return Secret . decode ( json ) end end
Write the given auth path s configuration .
1,157
def expired? connection return true if @max_requests && connection . requests >= @max_requests return false unless @idle_timeout return true if @idle_timeout . zero? Time . now - connection . last_use > @idle_timeout end
Returns true if the connection should be reset due to an idle timeout or maximum request count false otherwise .
1,158
def idempotent? req case req when Net :: HTTP :: Delete , Net :: HTTP :: Get , Net :: HTTP :: Head , Net :: HTTP :: Options , Net :: HTTP :: Put , Net :: HTTP :: Trace then true end end
Is + req + idempotent according to RFC 2616?
1,159
def pipeline uri , requests , & block connection_for uri do | connection | connection . http . pipeline requests , & block end end
Pipelines + requests + to the HTTP server at + uri + yielding responses if a block is given . Returns all responses recieved .
1,160
def proxy_from_env env_proxy = ENV [ 'http_proxy' ] || ENV [ 'HTTP_PROXY' ] return nil if env_proxy . nil? or env_proxy . empty? uri = URI normalize_uri env_proxy env_no_proxy = ENV [ 'no_proxy' ] || ENV [ 'NO_PROXY' ] return nil if env_no_proxy == '*' if env_no_proxy then uri . query = "no_proxy=#{escape(env_no_proxy)}" end unless uri . user or uri . password then uri . user = escape ENV [ 'http_proxy_user' ] || ENV [ 'HTTP_PROXY_USER' ] uri . password = escape ENV [ 'http_proxy_pass' ] || ENV [ 'HTTP_PROXY_PASS' ] end uri end
Creates a URI for an HTTP proxy server from ENV variables .
1,161
def proxy_bypass? host , port host = host . downcase host_port = [ host , port ] . join ':' @no_proxy . each do | name | return true if host [ - name . length , name . length ] == name or host_port [ - name . length , name . length ] == name end false end
Returns true when proxy should by bypassed for host .
1,162
def request_failed exception , req , connection due_to = "(due to #{exception.message} - #{exception.class})" message = "too many connection resets #{due_to} #{error_message connection}" finish connection raise Error , message , exception . backtrace end
Raises an Error for + exception + which resulted from attempting the request + req + on the + connection + .
1,163
def request_setup req_or_uri req = if URI === req_or_uri then Net :: HTTP :: Get . new req_or_uri . request_uri else req_or_uri end @headers . each do | pair | req . add_field ( * pair ) end @override_headers . each do | name , value | req [ name ] = value end unless req [ 'Connection' ] then req . add_field 'Connection' , 'keep-alive' req . add_field 'Keep-Alive' , @keep_alive end req end
Creates a GET request if + req_or_uri + is a URI and adds headers to the request .
1,164
def token ( new_token ) old_token = client . token client . token = new_token json = client . get ( "/v1/auth/token/lookup-self" ) secret = Secret . decode ( json ) return secret rescue client . token = old_token raise end
Authenticate via the token authentication method . This authentication method is a bit bizarre because you already have a token but hey whatever floats your boat .
1,165
def app_id ( app_id , user_id , options = { } ) payload = { app_id : app_id , user_id : user_id } . merge ( options ) json = client . post ( "/v1/auth/app-id/login" , JSON . fast_generate ( payload ) ) secret = Secret . decode ( json ) client . token = secret . auth . client_token return secret end
Authenticate via the app - id authentication method . If authentication is successful the resulting token will be stored on the client and used for future requests .
1,166
def approle ( role_id , secret_id = nil ) payload = { role_id : role_id } payload [ :secret_id ] = secret_id if secret_id json = client . post ( "/v1/auth/approle/login" , JSON . fast_generate ( payload ) ) secret = Secret . decode ( json ) client . token = secret . auth . client_token return secret end
Authenticate via the approle authentication method . If authentication is successful the resulting token will be stored on the client and used for future requests .
1,167
def userpass ( username , password , options = { } ) payload = { password : password } . merge ( options ) json = client . post ( "/v1/auth/userpass/login/#{encode_path(username)}" , JSON . fast_generate ( payload ) ) secret = Secret . decode ( json ) client . token = secret . auth . client_token return secret end
Authenticate via the userpass authentication method . If authentication is successful the resulting token will be stored on the client and used for future requests .
1,168
def github ( github_token , path = "/v1/auth/github/login" ) payload = { token : github_token } json = client . post ( path , JSON . fast_generate ( payload ) ) secret = Secret . decode ( json ) client . token = secret . auth . client_token return secret end
Authenticate via the GitHub authentication method . If authentication is successful the resulting token will be stored on the client and used for future requests .
1,169
def aws_ec2 ( role , pkcs7 , nonce = nil , route = nil ) route ||= '/v1/auth/aws-ec2/login' payload = { role : role , pkcs7 : pkcs7 } payload [ :nonce ] = nonce if nonce json = client . post ( route , JSON . fast_generate ( payload ) ) secret = Secret . decode ( json ) client . token = secret . auth . client_token return secret end
Authenticate via the AWS EC2 authentication method . If authentication is successful the resulting token will be stored on the client and used for future requests .
1,170
def gcp ( role , jwt , path = 'gcp' ) payload = { role : role , jwt : jwt } json = client . post ( "/v1/auth/#{CGI.escape(path)}/login" , JSON . fast_generate ( payload ) ) secret = Secret . decode ( json ) client . token = secret . auth . client_token return secret end
Authenticate via the GCP authentication method . If authentication is successful the resulting token will be stored on the client and used for future requests .
1,171
def tls ( pem = nil , path = 'cert' ) new_client = client . dup new_client . ssl_pem_contents = pem if ! pem . nil? json = new_client . post ( "/v1/auth/#{CGI.escape(path)}/login" ) secret = Secret . decode ( json ) client . token = secret . auth . client_token return secret end
Authenticate via a TLS authentication method . If authentication is successful the resulting token will be stored on the client and used for future requests .
1,172
def policy ( name ) json = client . get ( "/v1/sys/policy/#{encode_path(name)}" ) return Policy . decode ( json ) rescue HTTPError => e return nil if e . code == 404 raise end
Get the policy by the given name . If a policy does not exist by that name + nil + is returned .
1,173
def put_policy ( name , rules ) client . put ( "/v1/sys/policy/#{encode_path(name)}" , JSON . fast_generate ( rules : rules , ) ) return true end
Create a new policy with the given name and rules .
1,174
def audits json = client . get ( "/v1/sys/audit" ) json = json [ :data ] if json [ :data ] return Hash [ * json . map do | k , v | [ k . to_s . chomp ( "/" ) . to_sym , Audit . decode ( v ) ] end . flatten ] end
List all audits for the vault .
1,175
def audit_hash ( path , input ) json = client . post ( "/v1/sys/audit-hash/#{encode_path(path)}" , JSON . fast_generate ( input : input ) ) json = json [ :data ] if json [ :data ] json [ :hash ] end
Generates a HMAC verifier for a given input .
1,176
def accessors ( options = { } ) headers = extract_headers! ( options ) json = client . list ( "/v1/auth/token/accessors" , options , headers ) return Secret . decode ( json ) end
Lists all token accessors .
1,177
def create ( options = { } ) headers = extract_headers! ( options ) json = client . post ( "/v1/auth/token/create" , JSON . fast_generate ( options ) , headers ) return Secret . decode ( json ) end
Create an authentication token . Note that the parameters specified below are not validated and passed directly to the Vault server . Depending on the version of Vault in operation some of these options may not work and newer options may be available that are not listed here .
1,178
def create_with_role ( name , options = { } ) headers = extract_headers! ( options ) json = client . post ( "/v1/auth/token/create/#{encode_path(name)}" , JSON . fast_generate ( options ) , headers ) return Secret . decode ( json ) end
Create an orphaned authentication token .
1,179
def lookup ( token , options = { } ) headers = extract_headers! ( options ) json = client . post ( "/v1/auth/token/lookup" , JSON . fast_generate ( token : token , ) , headers ) return Secret . decode ( json ) end
Lookup information about the current token .
1,180
def lookup_accessor ( accessor , options = { } ) headers = extract_headers! ( options ) json = client . post ( "/v1/auth/token/lookup-accessor" , JSON . fast_generate ( accessor : accessor , ) , headers ) return Secret . decode ( json ) end
Lookup information about the given token accessor .
1,181
def renew ( token , increment = 0 , options = { } ) headers = extract_headers! ( options ) json = client . put ( "/v1/auth/token/renew" , JSON . fast_generate ( token : token , increment : increment , ) , headers ) return Secret . decode ( json ) end
Renew the given authentication token .
1,182
def renew_self ( increment = 0 , options = { } ) headers = extract_headers! ( options ) json = client . put ( "/v1/auth/token/renew-self" , JSON . fast_generate ( increment : increment , ) , headers ) return Secret . decode ( json ) end
Renews a lease associated with the calling token .
1,183
def pool @lock . synchronize do return @nhp if @nhp @nhp = PersistentHTTP . new ( "vault-ruby" , nil , pool_size ) if proxy_address proxy_uri = URI . parse "http://#{proxy_address}" proxy_uri . port = proxy_port if proxy_port if proxy_username proxy_uri . user = proxy_username proxy_uri . password = proxy_password end @nhp . proxy = proxy_uri end if open_timeout || timeout @nhp . open_timeout = ( open_timeout || timeout ) . to_i end if read_timeout || timeout @nhp . read_timeout = ( read_timeout || timeout ) . to_i end @nhp . verify_mode = OpenSSL :: SSL :: VERIFY_PEER @nhp . ssl_version = "TLSv1_2" @nhp . ciphers = ssl_ciphers pem = ssl_pem_contents || ( ssl_pem_file ? File . read ( ssl_pem_file ) : nil ) if pem @nhp . cert = OpenSSL :: X509 :: Certificate . new ( pem ) @nhp . key = OpenSSL :: PKey :: RSA . new ( pem , ssl_pem_passphrase ) end if ssl_ca_cert @nhp . ca_file = ssl_ca_cert end if ssl_ca_path @nhp . ca_path = ssl_ca_path end if ssl_cert_store @nhp . cert_store = ssl_cert_store end if ! ssl_verify @nhp . verify_mode = OpenSSL :: SSL :: VERIFY_NONE end if ssl_timeout || timeout @nhp . ssl_timeout = ( ssl_timeout || timeout ) . to_i end @nhp end end
Create a new Client with the given options . Any options given take precedence over the default options .
1,184
def list ( path , params = { } , headers = { } ) params = params . merge ( list : true ) request ( :get , path , params , headers ) end
Perform a LIST request .
1,185
def renew ( id , increment = 0 ) json = client . put ( "/v1/sys/renew/#{id}" , JSON . fast_generate ( increment : increment , ) ) return Secret . decode ( json ) end
Renew a lease with the given ID .
1,186
def to_h self . class . fields . inject ( { } ) do | h , ( k , opts ) | if opts [ :as ] . nil? h [ k ] = self . public_send ( k ) else h [ k ] = self . public_send ( opts [ :as ] ) end if ! h [ k ] . nil? && ! h [ k ] . is_a? ( Array ) && h [ k ] . respond_to? ( :to_h ) h [ k ] = h [ k ] . to_h end h end end
Create a hash - bashed representation of this response .
1,187
def init ( options = { } ) json = client . put ( "/v1/sys/init" , JSON . fast_generate ( root_token_pgp_key : options . fetch ( :root_token_pgp_key , nil ) , secret_shares : options . fetch ( :secret_shares , options . fetch ( :shares , 5 ) ) , secret_threshold : options . fetch ( :secret_threshold , options . fetch ( :threshold , 3 ) ) , pgp_keys : options . fetch ( :pgp_keys , nil ) , stored_shares : options . fetch ( :stored_shares , nil ) , recovery_shares : options . fetch ( :recovery_shares , nil ) , recovery_threshold : options . fetch ( :recovery_threshold , nil ) , recovery_pgp_keys : options . fetch ( :recovery_pgp_keys , nil ) , ) ) return InitResponse . decode ( json ) end
Initialize a new vault .
1,188
def set_role ( name , options = { } ) headers = extract_headers! ( options ) client . post ( "/v1/auth/approle/role/#{encode_path(name)}" , JSON . fast_generate ( options ) , headers ) return true end
Creates a new AppRole or update an existing AppRole with the given name and attributes .
1,189
def role ( name ) json = client . get ( "/v1/auth/approle/role/#{encode_path(name)}" ) return Secret . decode ( json ) rescue HTTPError => e return nil if e . code == 404 raise end
Gets the AppRole by the given name . If an AppRole does not exist by that name + nil + is returned .
1,190
def role_id ( name ) json = client . get ( "/v1/auth/approle/role/#{encode_path(name)}/role-id" ) return Secret . decode ( json ) . data [ :role_id ] rescue HTTPError => e return nil if e . code == 404 raise end
Reads the RoleID of an existing AppRole . If an AppRole does not exist by that name + nil + is returned .
1,191
def set_role_id ( name , role_id ) options = { role_id : role_id } client . post ( "/v1/auth/approle/role/#{encode_path(name)}/role-id" , JSON . fast_generate ( options ) ) return true end
Updates the RoleID of an existing AppRole to a custom value .
1,192
def create_secret_id ( role_name , options = { } ) headers = extract_headers! ( options ) if options [ :secret_id ] json = client . post ( "/v1/auth/approle/role/#{encode_path(role_name)}/custom-secret-id" , JSON . fast_generate ( options ) , headers ) else json = client . post ( "/v1/auth/approle/role/#{encode_path(role_name)}/secret-id" , JSON . fast_generate ( options ) , headers ) end return Secret . decode ( json ) end
Generates and issues a new SecretID on an existing AppRole .
1,193
def secret_id ( role_name , secret_id ) opts = { secret_id : secret_id } json = client . post ( "/v1/auth/approle/role/#{encode_path(role_name)}/secret-id/lookup" , JSON . fast_generate ( opts ) , { } ) return nil unless json return Secret . decode ( json ) rescue HTTPError => e if e . code == 404 || e . code == 405 begin json = client . get ( "/v1/auth/approle/role/#{encode_path(role_name)}/secret-id/#{encode_path(secret_id)}" ) return Secret . decode ( json ) rescue HTTPError => e return nil if e . code == 404 raise e end end raise end
Reads out the properties of a SecretID assigned to an AppRole . If the specified SecretID don t exist + nil + is returned .
1,194
def secret_id_accessors ( role_name , options = { } ) headers = extract_headers! ( options ) json = client . list ( "/v1/auth/approle/role/#{encode_path(role_name)}/secret-id" , options , headers ) return Secret . decode ( json ) . data [ :keys ] || [ ] rescue HTTPError => e return [ ] if e . code == 404 raise end
Lists the accessors of all the SecretIDs issued against the AppRole . This includes the accessors for custom SecretIDs as well . If there are no SecretIDs against this role an empty array will be returned .
1,195
def encode_path ( path ) path . b . gsub ( %r! ! ) { | m | '%' + m . unpack ( 'H2' * m . bytesize ) . join ( '%' ) . upcase } end
Encodes a string according to the rules for URL paths . This is used as opposed to CGI . escape because in a URL path space needs to be escaped as %20 and CGI . escapes a space as + .
1,196
def mounts json = client . get ( "/v1/sys/mounts" ) json = json [ :data ] if json [ :data ] return Hash [ * json . map do | k , v | [ k . to_s . chomp ( "/" ) . to_sym , Mount . decode ( v ) ] end . flatten ] end
List all mounts in the vault .
1,197
def mount_tune ( path , data = { } ) json = client . post ( "/v1/sys/mounts/#{encode_path(path)}/tune" , JSON . fast_generate ( data ) ) return true end
Tune a mount at the given path .
1,198
def remount ( from , to ) client . post ( "/v1/sys/remount" , JSON . fast_generate ( from : from , to : to , ) ) return true end
Change the name of the mount
1,199
def coverage_path @coverage_path ||= begin coverage_path = File . expand_path ( coverage_dir , root ) FileUtils . mkdir_p coverage_path coverage_path end end
Returns the full path to the output directory using SimpleCov . root and SimpleCov . coverage_dir so you can adjust this by configuring those values . Will create the directory if it s missing