idx
int64
0
252k
question
stringlengths
48
5.28k
target
stringlengths
5
1.23k
243,600
def get_suffixes ( ) : names = [ ] if at_least_libvips ( 8 , 8 ) : array = vips_lib . vips_foreign_get_suffixes ( ) i = 0 while array [ i ] != ffi . NULL : name = _to_string ( array [ i ] ) if name not in names : names . append ( name ) glib_lib . g_free ( array [ i ] ) i += 1 glib_lib . g_free ( array ) return names
Get a list of all the filename suffixes supported by libvips .
243,601
def at_least_libvips ( x , y ) : major = version ( 0 ) minor = version ( 1 ) return major > x or ( major == x and minor >= y )
Is this at least libvips x . y?
243,602
def type_map ( gtype , fn ) : cb = ffi . callback ( 'VipsTypeMap2Fn' , fn ) return vips_lib . vips_type_map ( gtype , cb , ffi . NULL , ffi . NULL )
Map fn over all child types of gtype .
243,603
def basicConfig ( ** kwargs ) : logging . basicConfig ( ** kwargs ) logging . _acquireLock ( ) try : stream = logging . root . handlers [ 0 ] stream . setFormatter ( ColoredFormatter ( fmt = kwargs . get ( 'format' , BASIC_FORMAT ) , datefmt = kwargs . get ( 'datefmt' , None ) ) ) finally : logging . _releaseLock ( )
Call logging . basicConfig and override the formatter it creates .
243,604
def ensure_configured ( func ) : @ functools . wraps ( func ) def wrapper ( * args , ** kwargs ) : if len ( logging . root . handlers ) == 0 : basicConfig ( ) return func ( * args , ** kwargs ) return wrapper
Modify a function to call basicConfig first if no handlers exist .
243,605
def color ( self , log_colors , level_name ) : if not self . stream . isatty ( ) : log_colors = { } return ColoredFormatter . color ( self , log_colors , level_name )
Only returns colors if STDOUT is a TTY .
243,606
def setup_logger ( ) : formatter = ColoredFormatter ( "%(log_color)s%(levelname)-8s%(reset)s %(blue)s%(message)s" , datefmt = None , reset = True , log_colors = { 'DEBUG' : 'cyan' , 'INFO' : 'green' , 'WARNING' : 'yellow' , 'ERROR' : 'red' , 'CRITICAL' : 'red' , } ) logger = logging . getLogger ( 'example' ) handler = ...
Return a logger with a default ColoredFormatter .
243,607
def _extract_annotations_from_task ( self , task ) : annotations = list ( ) if 'annotations' in task : existing_annotations = task . pop ( 'annotations' ) for v in existing_annotations : if isinstance ( v , dict ) : annotations . append ( v [ 'description' ] ) else : annotations . append ( v ) for key in list ( task . ...
Removes annotations from a task and returns a list of annotations
243,608
def task_done ( self , ** kw ) : def validate ( task ) : if not Status . is_pending ( task [ 'status' ] ) : raise ValueError ( "Task is not pending." ) return self . _task_change_status ( Status . COMPLETED , validate , ** kw )
Marks a pending task as done optionally specifying a completion date with the end argument .
243,609
def task_delete ( self , ** kw ) : def validate ( task ) : if task [ 'status' ] == Status . DELETED : raise ValueError ( "Task is already deleted." ) return self . _task_change_status ( Status . DELETED , validate , ** kw )
Marks a task as deleted optionally specifying a completion date with the end argument .
243,610
def _execute ( self , * args ) : command = ( [ 'task' , 'rc:%s' % self . config_filename , ] + self . get_configuration_override_args ( ) + [ six . text_type ( arg ) for arg in args ] ) for i in range ( len ( command ) ) : if isinstance ( command [ i ] , six . text_type ) : command [ i ] = ( taskw . utils . clean_ctrl_...
Execute a given taskwarrior command with arguments
243,611
def load_tasks ( self , command = 'all' ) : results = dict ( ( db , self . _get_task_objects ( 'status:%s' % db , 'export' ) ) for db in Command . files ( command ) ) if 'pending' in results : results [ 'pending' ] . extend ( self . _get_task_objects ( 'status:waiting' , 'export' ) ) return results
Returns a dictionary of tasks for a list of command .
243,612
def filter_tasks ( self , filter_dict ) : query_args = taskw . utils . encode_query ( filter_dict , self . get_version ( ) ) return self . _get_task_objects ( 'export' , * query_args )
Return a filtered list of tasks from taskwarrior .
243,613
def task_annotate ( self , task , annotation ) : self . _execute ( task [ 'uuid' ] , 'annotate' , '--' , annotation ) id , annotated_task = self . get_task ( uuid = task [ six . u ( 'uuid' ) ] ) return annotated_task
Annotates a task .
243,614
def task_denotate ( self , task , annotation ) : self . _execute ( task [ 'uuid' ] , 'denotate' , '--' , annotation ) id , denotated_task = self . get_task ( uuid = task [ six . u ( 'uuid' ) ] ) return denotated_task
Removes an annotation from a task .
243,615
def task_delete ( self , ** kw ) : id , task = self . get_task ( ** kw ) if task [ 'status' ] == Status . DELETED : raise ValueError ( "Task is already deleted." ) self . _execute ( id , 'delete' ) return self . get_task ( uuid = task [ 'uuid' ] ) [ 1 ]
Marks a task as deleted .
243,616
def task_start ( self , ** kw ) : id , task = self . get_task ( ** kw ) self . _execute ( id , 'start' ) return self . get_task ( uuid = task [ 'uuid' ] ) [ 1 ]
Marks a task as started .
243,617
def task_stop ( self , ** kw ) : id , task = self . get_task ( ** kw ) self . _execute ( id , 'stop' ) return self . get_task ( uuid = task [ 'uuid' ] ) [ 1 ]
Marks a task as stopped .
243,618
def to_file ( cls , status ) : return { Status . PENDING : DataFile . PENDING , Status . WAITING : DataFile . PENDING , Status . COMPLETED : DataFile . COMPLETED , Status . DELETED : DataFile . COMPLETED } [ status ]
Returns the file in which this task is stored .
243,619
def from_stub ( cls , data , udas = None ) : udas = udas or { } fields = cls . FIELDS . copy ( ) fields . update ( udas ) processed = { } for k , v in six . iteritems ( data ) : processed [ k ] = cls . _serialize ( k , v , fields ) return cls ( processed , udas )
Create a Task from an already deserialized dict .
243,620
def from_input ( cls , input_file = sys . stdin , modify = False , udas = None ) : original_task = input_file . readline ( ) . strip ( ) if modify : modified_task = input_file . readline ( ) . strip ( ) return cls ( json . loads ( modified_task ) , udas = udas ) return cls ( json . loads ( original_task ) , udas = udas...
Create a Task directly from stdin by reading one line . If modify = True two lines are expected which is consistent with the Taskwarrior hook system . The first line is interpreted as the original state of the Task and the second one as the new modified state .
243,621
def _deserialize ( cls , key , value , fields ) : converter = cls . _get_converter_for_field ( key , None , fields ) return converter . deserialize ( value )
Marshal incoming data into Python objects .
243,622
def _serialize ( cls , key , value , fields ) : converter = cls . _get_converter_for_field ( key , None , fields ) return converter . serialize ( value )
Marshal outgoing data into Taskwarrior s JSON format .
243,623
def get_changes ( self , serialized = False , keep = False ) : results = { } for k , f , t in self . _changes : if k not in results : results [ k ] = [ f , None ] results [ k ] [ 1 ] = ( self . _serialize ( k , t , self . _fields ) if serialized else t ) for k , v in six . iteritems ( self ) : if isinstance ( v , Dirty...
Get a journal of changes that have occurred
243,624
def update ( self , values , force = False ) : results = { } for k , v in six . iteritems ( values ) : results [ k ] = self . __setitem__ ( k , v , force = force ) return results
Update this task dictionary
243,625
def set ( self , key , value ) : return self . __setitem__ ( key , value , force = True )
Set a key s value regardless of whether a change is seen .
243,626
def serialized ( self ) : serialized = { } for k , v in six . iteritems ( self ) : serialized [ k ] = self . _serialize ( k , v , self . _fields ) return serialized
Returns a serialized representation of this task .
243,627
def encode_task_experimental ( task ) : task = task . copy ( ) if 'tags' in task : task [ 'tags' ] = ',' . join ( task [ 'tags' ] ) for k in task : task [ k ] = encode_task_value ( k , task [ k ] ) return [ "%s:\"%s\"" % ( k , v ) if v else "%s:" % ( k , ) for k , v in sorted ( task . items ( ) , key = itemgetter ( 0 )...
Convert a dict - like task to its string representation Used for adding a task via task add
243,628
def encode_task ( task ) : task = task . copy ( ) if 'tags' in task : task [ 'tags' ] = ',' . join ( task [ 'tags' ] ) for k in task : for unsafe , safe in six . iteritems ( encode_replacements ) : if isinstance ( task [ k ] , six . string_types ) : task [ k ] = task [ k ] . replace ( unsafe , safe ) if isinstance ( ta...
Convert a dict - like task to its string representation
243,629
def convert_dict_to_override_args ( config , prefix = '' ) : args = [ ] for k , v in six . iteritems ( config ) : if isinstance ( v , dict ) : args . extend ( convert_dict_to_override_args ( v , prefix = '.' . join ( [ prefix , k , ] ) if prefix else k ) ) else : v = six . text_type ( v ) left = 'rc' + ( ( '.' + prefix...
Converts a dictionary of override arguments into CLI arguments .
243,630
def stats_per_chunk ( chunk ) : for block_id in chunk . iter_block ( ) : try : block_counts [ block_id ] += 1 except KeyError : block_counts [ block_id ] = 1
Given a chunk increment the block types with the number of blocks found
243,631
def bounded_stats_per_chunk ( chunk , block_counts , start , stop ) : chunk_z , chunk_x = chunk . get_coords ( ) for z in range ( 16 ) : world_z = z + chunk_z * 16 if ( ( start != None and world_z < int ( start [ 2 ] ) ) or ( stop != None and world_z > int ( stop [ 2 ] ) ) ) : break for x in range ( 16 ) : world_x = x ...
Given a chunk return the number of blocks types within the specified selection
243,632
def process_region_file ( region , start , stop ) : rx = region . loc . x rz = region . loc . z if ( start != None ) : if ( ( rx + 1 ) * 512 - 1 < int ( start [ 0 ] ) or ( rz + 1 ) * 512 - 1 < int ( start [ 2 ] ) ) : return elif ( stop != None ) : if ( rx * 512 - 1 > int ( stop [ 0 ] ) or rz * 512 - 1 > int ( stop [ 2 ...
Given a region return the number of blocks of each ID in that region
243,633
def get_region ( self , x , z ) : if ( x , z ) not in self . regions : if ( x , z ) in self . regionfiles : self . regions [ ( x , z ) ] = region . RegionFile ( self . regionfiles [ ( x , z ) ] ) else : self . regions [ ( x , z ) ] = region . RegionFile ( ) self . regions [ ( x , z ) ] . loc = Location ( x = x , z = z ...
Get a region using x z coordinates of a region . Cache results .
243,634
def iter_regions ( self ) : for x , z in self . regionfiles . keys ( ) : close_after_use = False if ( x , z ) in self . regions : regionfile = self . regions [ ( x , z ) ] else : regionfile = region . RegionFile ( self . regionfiles [ ( x , z ) ] , chunkclass = self . chunkclass ) regionfile . loc = Location ( x = x , ...
Return an iterable list of all region files . Use this function if you only want to loop through each region files once and do not want to cache the results .
243,635
def get_nbt ( self , x , z ) : rx , cx = divmod ( x , 32 ) rz , cz = divmod ( z , 32 ) if ( rx , rz ) not in self . regions and ( rx , rz ) not in self . regionfiles : raise InconceivedChunk ( "Chunk %s,%s is not present in world" % ( x , z ) ) nbt = self . get_region ( rx , rz ) . get_nbt ( cx , cz ) assert nbt != Non...
Return a NBT specified by the chunk coordinates x z . Raise InconceivedChunk if the NBT file is not yet generated . To get a Chunk object use get_chunk .
243,636
def get_chunk ( self , x , z ) : return self . chunkclass ( self . get_nbt ( x , z ) )
Return a chunk specified by the chunk coordinates x z . Raise InconceivedChunk if the chunk is not yet generated . To get the raw NBT data use get_nbt .
243,637
def chunk_count ( self ) : c = 0 for r in self . iter_regions ( ) : c += r . chunk_count ( ) return c
Return a count of the chunks in this world folder .
243,638
def get_boundingbox ( self ) : b = BoundingBox ( ) for rx , rz in self . regionfiles . keys ( ) : region = self . get_region ( rx , rz ) rx , rz = 32 * rx , 32 * rz for cc in region . get_chunk_coords ( ) : x , z = ( rx + cc [ 'x' ] , rz + cc [ 'z' ] ) b . expand ( x , None , z ) return b
Return minimum and maximum x and z coordinates of the chunks that make up this world save
243,639
def expand ( self , x , y , z ) : if x != None : if self . minx is None or x < self . minx : self . minx = x if self . maxx is None or x > self . maxx : self . maxx = x if y != None : if self . miny is None or y < self . miny : self . miny = y if self . maxy is None or y > self . maxy : self . maxy = y if z != None : i...
Expands the bounding
243,640
def unpack_nbt ( tag ) : if isinstance ( tag , TAG_List ) : return [ unpack_nbt ( i ) for i in tag . tags ] elif isinstance ( tag , TAG_Compound ) : return dict ( ( i . name , unpack_nbt ( i ) ) for i in tag . tags ) else : return tag . value
Unpack an NBT tag into a native Python data structure .
243,641
def _init_file ( self ) : header_length = 2 * SECTOR_LENGTH if self . size > header_length : self . file . truncate ( header_length ) self . file . seek ( 0 ) self . file . write ( header_length * b'\x00' ) self . size = header_length
Initialise the file header . This will erase any data previously in the file .
243,642
def _sectors ( self , ignore_chunk = None ) : sectorsize = self . _bytes_to_sector ( self . size ) sectors = [ [ ] for s in range ( sectorsize ) ] sectors [ 0 ] = True sectors [ 1 ] = True for m in self . metadata . values ( ) : if not m . is_created ( ) : continue if ignore_chunk == m : continue if m . blocklength and...
Return a list of all sectors each sector is a list of chunks occupying the block .
243,643
def _locate_free_sectors ( self , ignore_chunk = None ) : sectors = self . _sectors ( ignore_chunk = ignore_chunk ) return [ not i for i in sectors ]
Return a list of booleans indicating the free sectors .
243,644
def get_nbt ( self , x , z ) : data = self . get_blockdata ( x , z ) data = BytesIO ( data ) err = None try : nbt = NBTFile ( buffer = data ) if self . loc . x != None : x += self . loc . x * 32 if self . loc . z != None : z += self . loc . z * 32 nbt . loc = Location ( x = x , z = z ) return nbt except MalformedFileEr...
Return a NBTFile of the specified chunk . Raise InconceivedChunk if the chunk is not included in the file .
243,645
def write_chunk ( self , x , z , nbt_file ) : data = BytesIO ( ) nbt_file . write_file ( buffer = data ) self . write_blockdata ( x , z , data . getvalue ( ) )
Pack the NBT file as binary data and write to file in a compressed format .
243,646
def unlink_chunk ( self , x , z ) : if self . size < 2 * SECTOR_LENGTH : return self . file . seek ( 4 * ( x + 32 * z ) ) self . file . write ( pack ( ">IB" , 0 , 0 ) [ 1 : ] ) self . file . seek ( SECTOR_LENGTH + 4 * ( x + 32 * z ) ) self . file . write ( pack ( ">I" , 0 ) ) current = self . metadata [ x , z ] free_se...
Remove a chunk from the header of the region file . Fragmentation is not a problem chunks are written to free sectors when possible .
243,647
def _classname ( self ) : if self . __class__ . __module__ in ( None , ) : return self . __class__ . __name__ else : return "%s.%s" % ( self . __class__ . __module__ , self . __class__ . __name__ )
Return the fully qualified class name .
243,648
def chests_per_chunk ( chunk ) : chests = [ ] for entity in chunk [ 'Entities' ] : eid = entity [ "id" ] . value if eid == "Minecart" and entity [ "type" ] . value == 1 or eid == "minecraft:chest_minecart" : x , y , z = entity [ "Pos" ] x , y , z = x . value , y . value , z . value try : items = items_from_nbt ( entity...
Find chests and get contents in a given chunk .
243,649
def get_block ( self , x , y , z ) : sy , by = divmod ( y , 16 ) section = self . get_section ( sy ) if section == None : return None return section . get_block ( x , by , z )
Get a block from relative x y z .
243,650
def get_blocks_byte_array ( self , buffer = False ) : if buffer : length = len ( self . blocksList ) return BytesIO ( pack ( ">i" , length ) + self . get_blocks_byte_array ( ) ) else : return array . array ( 'B' , self . blocksList ) . tostring ( )
Return a list of all blocks in this chunk .
243,651
def get_data_byte_array ( self , buffer = False ) : if buffer : length = len ( self . dataList ) return BytesIO ( pack ( ">i" , length ) + self . get_data_byte_array ( ) ) else : return array . array ( 'B' , self . dataList ) . tostring ( )
Return a list of data for all blocks in this chunk .
243,652
def generate_heightmap ( self , buffer = False , as_array = False ) : non_solids = [ 0 , 8 , 9 , 10 , 11 , 38 , 37 , 32 , 31 ] if buffer : return BytesIO ( pack ( ">i" , 256 ) + self . generate_heightmap ( ) ) else : bytes = [ ] for z in range ( 16 ) : for x in range ( 16 ) : for y in range ( 127 , - 1 , - 1 ) : offset...
Return a heightmap representing the highest solid blocks in this chunk .
243,653
def set_blocks ( self , list = None , dict = None , fill_air = False ) : if list : self . blocksList = list elif dict : list = [ ] for x in range ( 16 ) : for z in range ( 16 ) : for y in range ( 128 ) : coord = x , y , z offset = y + z * 128 + x * 128 * 16 if ( coord in dict ) : list . append ( dict [ coord ] ) else :...
Sets all blocks in this chunk using either a list or dictionary . Blocks not explicitly set can be filled to air by setting fill_air to True .
243,654
def set_block ( self , x , y , z , id , data = 0 ) : offset = y + z * 128 + x * 128 * 16 self . blocksList [ offset ] = id if ( offset % 2 == 1 ) : index = ( offset - 1 ) // 2 b = self . dataList [ index ] self . dataList [ index ] = ( b & 240 ) + ( data & 15 ) else : index = offset // 2 b = self . dataList [ index ] s...
Sets the block a x y z to the specified id and optionally data .
243,655
def get_block ( self , x , y , z , coord = False ) : offset = y + z * 128 + x * 128 * 16 if ( coord == False ) else coord [ 1 ] + coord [ 2 ] * 128 + coord [ 0 ] * 128 * 16 return self . blocksList [ offset ]
Return the id of the block at x y z .
243,656
def tag_info ( self ) : return self . __class__ . __name__ + ( '(%r)' % self . name if self . name else "" ) + ": " + self . valuestr ( )
Return Unicode string with class name and unnested value .
243,657
def parse_file ( self , filename = None , buffer = None , fileobj = None ) : if filename : self . file = GzipFile ( filename , 'rb' ) elif buffer : if hasattr ( buffer , 'name' ) : self . filename = buffer . name self . file = buffer elif fileobj : if hasattr ( fileobj , 'name' ) : self . filename = fileobj . name self...
Completely parse a file extracting all tags .
243,658
def write_file ( self , filename = None , buffer = None , fileobj = None ) : closefile = True if buffer : self . filename = None self . file = buffer closefile = False elif filename : self . filename = filename self . file = GzipFile ( filename , "wb" ) elif fileobj : self . filename = None self . file = GzipFile ( fil...
Write this NBT file to a file .
243,659
def loads ( self , value ) : raw = False if self . encoding == "utf-8" else True if value is None : return None return msgpack . loads ( value , raw = raw , use_list = self . use_list )
Deserialize value using msgpack . loads .
243,660
def aiocache_enabled ( cls , fake_return = None ) : def enabled ( func ) : @ functools . wraps ( func ) async def _enabled ( * args , ** kwargs ) : if os . getenv ( "AIOCACHE_DISABLE" ) == "1" : return fake_return return await func ( * args , ** kwargs ) return _enabled return enabled
Use this decorator to be able to fake the return of the function by setting the AIOCACHE_DISABLE environment variable
243,661
async def add ( self , key , value , ttl = SENTINEL , dumps_fn = None , namespace = None , _conn = None ) : start = time . monotonic ( ) dumps = dumps_fn or self . _serializer . dumps ns_key = self . build_key ( key , namespace = namespace ) await self . _add ( ns_key , dumps ( value ) , ttl = self . _get_ttl ( ttl ) ,...
Stores the value in the given key with ttl if specified . Raises an error if the key already exists .
243,662
async def get ( self , key , default = None , loads_fn = None , namespace = None , _conn = None ) : start = time . monotonic ( ) loads = loads_fn or self . _serializer . loads ns_key = self . build_key ( key , namespace = namespace ) value = loads ( await self . _get ( ns_key , encoding = self . serializer . encoding ,...
Get a value from the cache . Returns default if not found .
243,663
async def multi_get ( self , keys , loads_fn = None , namespace = None , _conn = None ) : start = time . monotonic ( ) loads = loads_fn or self . _serializer . loads ns_keys = [ self . build_key ( key , namespace = namespace ) for key in keys ] values = [ loads ( value ) for value in await self . _multi_get ( ns_keys ,...
Get multiple values from the cache values not found are Nones .
243,664
async def set ( self , key , value , ttl = SENTINEL , dumps_fn = None , namespace = None , _cas_token = None , _conn = None ) : start = time . monotonic ( ) dumps = dumps_fn or self . _serializer . dumps ns_key = self . build_key ( key , namespace = namespace ) res = await self . _set ( ns_key , dumps ( value ) , ttl =...
Stores the value in the given key with ttl if specified
243,665
async def multi_set ( self , pairs , ttl = SENTINEL , dumps_fn = None , namespace = None , _conn = None ) : start = time . monotonic ( ) dumps = dumps_fn or self . _serializer . dumps tmp_pairs = [ ] for key , value in pairs : tmp_pairs . append ( ( self . build_key ( key , namespace = namespace ) , dumps ( value ) ) )...
Stores multiple values in the given keys .
243,666
async def delete ( self , key , namespace = None , _conn = None ) : start = time . monotonic ( ) ns_key = self . build_key ( key , namespace = namespace ) ret = await self . _delete ( ns_key , _conn = _conn ) logger . debug ( "DELETE %s %d (%.4f)s" , ns_key , ret , time . monotonic ( ) - start ) return ret
Deletes the given key .
243,667
async def exists ( self , key , namespace = None , _conn = None ) : start = time . monotonic ( ) ns_key = self . build_key ( key , namespace = namespace ) ret = await self . _exists ( ns_key , _conn = _conn ) logger . debug ( "EXISTS %s %d (%.4f)s" , ns_key , ret , time . monotonic ( ) - start ) return ret
Check key exists in the cache .
243,668
async def expire ( self , key , ttl , namespace = None , _conn = None ) : start = time . monotonic ( ) ns_key = self . build_key ( key , namespace = namespace ) ret = await self . _expire ( ns_key , ttl , _conn = _conn ) logger . debug ( "EXPIRE %s %d (%.4f)s" , ns_key , ret , time . monotonic ( ) - start ) return ret
Set the ttl to the given key . By setting it to 0 it will disable it
243,669
async def clear ( self , namespace = None , _conn = None ) : start = time . monotonic ( ) ret = await self . _clear ( namespace , _conn = _conn ) logger . debug ( "CLEAR %s %d (%.4f)s" , namespace , ret , time . monotonic ( ) - start ) return ret
Clears the cache in the cache namespace . If an alternative namespace is given it will clear those ones instead .
243,670
async def raw ( self , command , * args , _conn = None , ** kwargs ) : start = time . monotonic ( ) ret = await self . _raw ( command , * args , encoding = self . serializer . encoding , _conn = _conn , ** kwargs ) logger . debug ( "%s (%.4f)s" , command , time . monotonic ( ) - start ) return ret
Send the raw command to the underlying client . Note that by using this CMD you will lose compatibility with other backends .
243,671
async def close ( self , * args , _conn = None , ** kwargs ) : start = time . monotonic ( ) ret = await self . _close ( * args , _conn = _conn , ** kwargs ) logger . debug ( "CLOSE (%.4f)s" , time . monotonic ( ) - start ) return ret
Perform any resource clean up necessary to exit the program safely . After closing cmd execution is still possible but you will have to close again before exiting .
243,672
def get ( self , alias : str ) : try : return self . _caches [ alias ] except KeyError : pass config = self . get_alias_config ( alias ) cache = _create_cache ( ** deepcopy ( config ) ) self . _caches [ alias ] = cache return cache
Retrieve cache identified by alias . Will return always the same instance
243,673
def create ( self , alias = None , cache = None , ** kwargs ) : if alias : config = self . get_alias_config ( alias ) elif cache : warnings . warn ( "Creating a cache with an explicit config is deprecated, use 'aiocache.Cache'" , DeprecationWarning , ) config = { "cache" : cache } else : raise TypeError ( "create call ...
Create a new cache . Either alias or cache params are required . You can use kwargs to pass extra parameters to configure the cache .
243,674
async def async_poller ( client , initial_response , deserialization_callback , polling_method ) : try : client = client if isinstance ( client , ServiceClientAsync ) else client . _client except AttributeError : raise ValueError ( "Poller client parameter must be a low-level msrest Service Client or a SDK client." ) r...
Async Poller for long running operations .
243,675
def send ( self , request , ** kwargs ) : session = request . context . session old_max_redirects = None if 'max_redirects' in kwargs : warnings . warn ( "max_redirects in operation kwargs is deprecated, use config.redirect_policy instead" , DeprecationWarning ) old_max_redirects = session . max_redirects session . max...
Patch the current session with Request level operation config .
243,676
def _request ( self , method , url , params , headers , content , form_content ) : request = ClientRequest ( method , self . format_url ( url ) ) if params : request . format_parameters ( params ) if headers : request . headers . update ( headers ) if "Accept" not in request . headers : _LOGGER . debug ( "Accept header...
Create ClientRequest object .
243,677
def format_url ( self , url , ** kwargs ) : url = url . format ( ** kwargs ) parsed = urlparse ( url ) if not parsed . scheme or not parsed . netloc : url = url . lstrip ( '/' ) base = self . config . base_url . format ( ** kwargs ) . rstrip ( '/' ) url = urljoin ( base + '/' , url ) return url
Format request URL with the client base URL unless the supplied URL is already absolute .
243,678
def get ( self , url , params = None , headers = None , content = None , form_content = None ) : request = self . _request ( 'GET' , url , params , headers , content , form_content ) request . method = 'GET' return request
Create a GET request object .
243,679
def put ( self , url , params = None , headers = None , content = None , form_content = None ) : request = self . _request ( 'PUT' , url , params , headers , content , form_content ) return request
Create a PUT request object .
243,680
def send_formdata ( self , request , headers = None , content = None , ** config ) : request . headers = headers request . add_formdata ( content ) return self . send ( request , ** config )
Send data as a multipart form - data request . We only deal with file - like objects or strings at this point . The requests is not yet streamed .
243,681
def add_header ( self , header , value ) : warnings . warn ( "Private attribute _client.add_header is deprecated. Use config.headers instead." , DeprecationWarning ) self . config . headers [ header ] = value
Add a persistent header - this header will be applied to all requests sent during the current client session .
243,682
def signed_session ( self , session = None ) : session = super ( ApiKeyCredentials , self ) . signed_session ( session ) session . headers . update ( self . in_headers ) try : session . params . update ( self . in_query ) except AttributeError : raise ValueError ( "session.params must be a dict to be used in ApiKeyCred...
Create requests session with ApiKey .
243,683
def deserialize_from_text ( cls , data , content_type = None ) : if hasattr ( data , 'read' ) : data = cast ( IO , data ) . read ( ) if isinstance ( data , bytes ) : data_as_str = data . decode ( encoding = 'utf-8-sig' ) else : data_as_str = cast ( str , data ) data_as_str = data_as_str . lstrip ( _BOM ) if content_typ...
Decode data according to content - type .
243,684
def deserialize_from_http_generics ( cls , body_bytes , headers ) : content_type = None if 'content-type' in headers : content_type = headers [ 'content-type' ] . split ( ";" ) [ 0 ] . strip ( ) . lower ( ) else : content_type = "application/json" if body_bytes : return cls . deserialize_from_text ( body_bytes , conten...
Deserialize from HTTP response .
243,685
def on_response ( self , request , response , ** kwargs ) : if kwargs . get ( "stream" , True ) : return http_response = response . http_response response . context [ self . CONTEXT_NAME ] = self . deserialize_from_http_generics ( http_response . text ( ) , http_response . headers )
Extract data from the body of a REST response object .
243,686
def add_headers ( self , header_dict ) : if not self . response : return for name , data_type in header_dict . items ( ) : value = self . response . headers . get ( name ) value = self . _deserialize ( data_type , value ) self . headers [ name ] = value
Deserialize a specific header .
243,687
def log_request ( _ , request , * _args , ** _kwargs ) : if not _LOGGER . isEnabledFor ( logging . DEBUG ) : return try : _LOGGER . debug ( "Request URL: %r" , request . url ) _LOGGER . debug ( "Request method: %r" , request . method ) _LOGGER . debug ( "Request headers:" ) for header , value in request . headers . ite...
Log a client request .
243,688
def log_response ( _ , _request , response , * _args , ** kwargs ) : if not _LOGGER . isEnabledFor ( logging . DEBUG ) : return None try : _LOGGER . debug ( "Response status: %r" , response . status_code ) _LOGGER . debug ( "Response headers:" ) for res_header , value in response . headers . items ( ) : _LOGGER . debug...
Log a server response .
243,689
def _clear_config ( self ) : for section in self . _config . sections ( ) : self . _config . remove_section ( section )
Clearout config object in memory .
243,690
def format_parameters ( self , params ) : query = urlparse ( self . url ) . query if query : self . url = self . url . partition ( '?' ) [ 0 ] existing_params = { p [ 0 ] : p [ - 1 ] for p in [ p . partition ( '=' ) for p in query . split ( '&' ) ] } params . update ( existing_params ) query_params = [ "{}={}" . format...
Format parameters into a valid query string . It s assumed all parameters have already been quoted as valid URL strings .
243,691
def _format_data ( data ) : if hasattr ( data , 'read' ) : data = cast ( IO , data ) data_name = None try : if data . name [ 0 ] != '<' and data . name [ - 1 ] != '>' : data_name = os . path . basename ( data . name ) except ( AttributeError , TypeError ) : pass return ( data_name , data , "application/octet-stream" ) ...
Format field data according to whether it is a stream or a string for a form - data request .
243,692
def add_formdata ( self , content = None ) : if content is None : content = { } content_type = self . headers . pop ( 'Content-Type' , None ) if self . headers else None if content_type and content_type . lower ( ) == 'application/x-www-form-urlencoded' : self . data = { f : d for f , d in content . items ( ) if d is n...
Add data as a multipart form - data request to the request .
243,693
def raise_with_traceback ( exception , message = "" , * args , ** kwargs ) : exc_type , exc_value , exc_traceback = sys . exc_info ( ) exc_msg = "{}, {}: {}" . format ( message , exc_type . __name__ , exc_value ) error = exception ( exc_msg , * args , ** kwargs ) try : raise error . with_traceback ( exc_traceback ) exc...
Raise exception with a specified traceback .
243,694
def _patch_redirect ( session ) : def enforce_http_spec ( resp , request ) : if resp . status_code in ( 301 , 302 ) and request . method not in [ 'GET' , 'HEAD' ] : return False return True redirect_logic = session . resolve_redirects def wrapped_redirect ( resp , req , ** kwargs ) : attempt = enforce_http_spec ( resp ...
Whether redirect policy should be applied based on status code .
243,695
def _init_session ( self , session ) : _patch_redirect ( session ) max_retries = self . config . retry_policy ( ) for protocol in self . _protocols : session . adapters [ protocol ] . max_retries = max_retries
Init session level configuration of requests .
243,696
def _configure_send ( self , request , ** kwargs ) : requests_kwargs = { } session = kwargs . pop ( 'session' , self . session ) if session is not self . session : self . _init_session ( session ) session . max_redirects = int ( self . config . redirect_policy ( ) ) session . trust_env = bool ( self . config . proxies ...
Configure the kwargs to use with requests .
243,697
def full_restapi_key_transformer ( key , attr_desc , value ) : keys = _FLATTEN . split ( attr_desc [ 'key' ] ) return ( [ _decode_attribute_map_key ( k ) for k in keys ] , value )
A key transformer that returns the full RestAPI key path .
243,698
def last_restapi_key_transformer ( key , attr_desc , value ) : key , value = full_restapi_key_transformer ( key , attr_desc , value ) return ( key [ - 1 ] , value )
A key transformer that returns the last RestAPI key .
243,699
def _create_xml_node ( tag , prefix = None , ns = None ) : if prefix and ns : ET . register_namespace ( prefix , ns ) if ns : return ET . Element ( "{" + ns + "}" + tag ) else : return ET . Element ( tag )
Create a XML node .