idx int64 0 252k | question stringlengths 48 5.28k | target stringlengths 5 1.23k |
|---|---|---|
249,500 | def formMarkup ( self , realm , return_to = None , immediate = False , form_tag_attrs = None ) : message = self . getMessage ( realm , return_to , immediate ) return message . toFormMarkup ( self . endpoint . server_url , form_tag_attrs ) | Get html for a form to submit this request to the IDP . |
249,501 | def htmlMarkup ( self , realm , return_to = None , immediate = False , form_tag_attrs = None ) : return oidutil . autoSubmitHTML ( self . formMarkup ( realm , return_to , immediate , form_tag_attrs ) ) | Get an autosubmitting HTML page that submits this request to the IDP . This is just a wrapper for formMarkup . |
249,502 | def isSigned ( self , ns_uri , ns_key ) : return self . message . getKey ( ns_uri , ns_key ) in self . signed_fields | Return whether a particular key is signed regardless of its namespace alias |
249,503 | def getSigned ( self , ns_uri , ns_key , default = None ) : if self . isSigned ( ns_uri , ns_key ) : return self . message . getArg ( ns_uri , ns_key , default ) else : return default | Return the specified signed field if available otherwise return default |
249,504 | def getSignedNS ( self , ns_uri ) : msg_args = self . message . getArgs ( ns_uri ) for key in msg_args . iterkeys ( ) : if not self . isSigned ( ns_uri , key ) : logging . info ( "SuccessResponse.getSignedNS: (%s, %s) not signed." % ( ns_uri , key ) ) return None return msg_args | Get signed arguments from the response message . Return a dict of all arguments in the specified namespace . If any of the arguments are not signed return None . |
249,505 | def extensionResponse ( self , namespace_uri , require_signed ) : if require_signed : return self . getSignedNS ( namespace_uri ) else : return self . message . getArgs ( namespace_uri ) | Return response arguments in the specified namespace . |
249,506 | def mkFilter ( parts ) : if parts is None : parts = [ BasicServiceEndpoint ] try : parts = list ( parts ) except TypeError : return mkCompoundFilter ( [ parts ] ) else : return mkCompoundFilter ( parts ) | Convert a filter - convertable thing into a filter |
249,507 | def getServiceEndpoints ( self , yadis_url , service_element ) : endpoints = [ ] for type_uris , uri , _ in expandService ( service_element ) : endpoint = BasicServiceEndpoint ( yadis_url , type_uris , uri , service_element ) e = self . applyFilters ( endpoint ) if e is not None : endpoints . append ( e ) return endpoints | Returns an iterator of endpoint objects produced by the filter functions . |
249,508 | def applyFilters ( self , endpoint ) : for filter_function in self . filter_functions : e = filter_function ( endpoint ) if e is not None : return e return None | Apply filter functions to an endpoint until one of them returns non - None . |
249,509 | def getServiceEndpoints ( self , yadis_url , service_element ) : endpoints = [ ] for subfilter in self . subfilters : endpoints . extend ( subfilter . getServiceEndpoints ( yadis_url , service_element ) ) return endpoints | Generate all endpoint objects for all of the subfilters of this filter and return their concatenation . |
249,510 | def randomString ( length , chrs = None ) : if chrs is None : return getBytes ( length ) else : n = len ( chrs ) return '' . join ( [ chrs [ randrange ( n ) ] for _ in xrange ( length ) ] ) | Produce a string of length random bytes chosen from chrs . |
249,511 | def _hasher_first_run ( self , preimage ) : new_hasher = self . _backend . keccak256 assert new_hasher ( b'' ) == b"\xc5\xd2F\x01\x86\xf7#<\x92~}\xb2\xdc\xc7\x03\xc0\xe5\x00\xb6S\xca\x82';\x7b\xfa\xd8\x04]\x85\xa4p" self . hasher = new_hasher return new_hasher ( preimage ) | Invoke the backend on - demand and check an expected hash result then replace this first run with the new hasher method . This is a bit of a hacky way to minimize overhead on hash calls after this first one . |
249,512 | def dirname ( path : Optional [ str ] ) -> Optional [ str ] : if path is not None : return os . path . dirname ( path ) | Returns the directory component of a pathname and None if the argument is None |
249,513 | def basename ( path : Optional [ str ] ) -> Optional [ str ] : if path is not None : return os . path . basename ( path ) | Returns the final component of a pathname and None if the argument is None |
249,514 | def normpath ( path : Optional [ str ] ) -> Optional [ str ] : if path is not None : return os . path . normpath ( path ) | Normalizes the path returns None if the argument is None |
249,515 | def join_paths ( path1 : Optional [ str ] , path2 : Optional [ str ] ) -> Optional [ str ] : if path1 is not None and path2 is not None : return os . path . join ( path1 , path2 ) | Joins two paths if neither of them is None |
249,516 | def stasher ( self ) : stashed = [ False ] clean = [ False ] def stash ( ) : if clean [ 0 ] or not self . repo . is_dirty ( submodules = False ) : clean [ 0 ] = True return if stashed [ 0 ] : return if self . change_count > 1 : message = 'stashing {0} changes' else : message = 'stashing {0} change' print ( colored ( message . format ( self . change_count ) , 'magenta' ) ) try : self . _run ( 'stash' ) except GitError as e : raise StashError ( stderr = e . stderr , stdout = e . stdout ) stashed [ 0 ] = True yield stash if stashed [ 0 ] : print ( colored ( 'unstashing' , 'magenta' ) ) try : self . _run ( 'stash' , 'pop' ) except GitError as e : raise UnstashError ( stderr = e . stderr , stdout = e . stdout ) | A stashing contextmanager . |
249,517 | def checkout ( self , branch_name ) : try : find ( self . repo . branches , lambda b : b . name == branch_name ) . checkout ( ) except OrigCheckoutError as e : raise CheckoutError ( branch_name , details = e ) | Checkout a branch by name . |
249,518 | def rebase ( self , target_branch ) : current_branch = self . repo . active_branch arguments = ( ( [ self . config ( 'git-up.rebase.arguments' ) ] or [ ] ) + [ target_branch . name ] ) try : self . _run ( 'rebase' , * arguments ) except GitError as e : raise RebaseError ( current_branch . name , target_branch . name , ** e . __dict__ ) | Rebase to target branch . |
249,519 | def push ( self , * args , ** kwargs ) : stdout = six . b ( '' ) cmd = self . git . push ( as_process = True , * args , ** kwargs ) while True : output = cmd . stdout . read ( 1 ) sys . stdout . write ( output . decode ( 'utf-8' ) ) sys . stdout . flush ( ) stdout += output if output == six . b ( "" ) : break try : cmd . wait ( ) except GitCommandError as error : message = "'{0}' returned exit status {1}" . format ( ' ' . join ( str ( c ) for c in error . command ) , error . status ) raise GitError ( message , stderr = error . stderr , stdout = stdout ) return stdout . strip ( ) | Push commits to remote |
249,520 | def change_count ( self ) : status = self . git . status ( porcelain = True , untracked_files = 'no' ) . strip ( ) if not status : return 0 else : return len ( status . split ( '\n' ) ) | The number of changes in the working directory . |
249,521 | def uniq ( seq ) : seen = set ( ) return [ x for x in seq if str ( x ) not in seen and not seen . add ( str ( x ) ) ] | Return a copy of seq without duplicates . |
249,522 | def current_version ( ) : import setuptools version = [ None ] def monkey_setup ( ** settings ) : version [ 0 ] = settings [ 'version' ] old_setup = setuptools . setup setuptools . setup = monkey_setup import setup reload ( setup ) setuptools . setup = old_setup return version [ 0 ] | Get the current version number from setup . py |
249,523 | def run ( version , quiet , no_fetch , push , ** kwargs ) : if version : if NO_DISTRIBUTE : print ( colored ( 'Please install \'git-up\' via pip in order to ' 'get version information.' , 'yellow' ) ) else : GitUp ( sparse = True ) . version_info ( ) return if quiet : sys . stdout = StringIO ( ) try : gitup = GitUp ( ) if push is not None : gitup . settings [ 'push.auto' ] = push if no_fetch : gitup . should_fetch = False except GitError : sys . exit ( 1 ) else : gitup . run ( ) | A nicer git pull . |
249,524 | def run ( self ) : try : if self . should_fetch : self . fetch ( ) self . rebase_all_branches ( ) if self . with_bundler ( ) : self . check_bundler ( ) if self . settings [ 'push.auto' ] : self . push ( ) except GitError as error : self . print_error ( error ) if self . testing : raise else : sys . exit ( 1 ) | Run all the git - up stuff . |
249,525 | def fetch ( self ) : fetch_kwargs = { 'multiple' : True } fetch_args = [ ] if self . is_prune ( ) : fetch_kwargs [ 'prune' ] = True if self . settings [ 'fetch.all' ] : fetch_kwargs [ 'all' ] = True else : if '.' in self . remotes : self . remotes . remove ( '.' ) if not self . remotes : return fetch_args . append ( self . remotes ) try : self . git . fetch ( * fetch_args , ** fetch_kwargs ) except GitError as error : error . message = "`git fetch` failed" raise error | Fetch the recent refs from the remotes . Unless git - up . fetch . all is set to true all remotes with locally existent branches will be fetched . |
249,526 | def log ( self , branch , remote ) : log_hook = self . settings [ 'rebase.log-hook' ] if log_hook : if ON_WINDOWS : log_hook = re . sub ( r'\$(\d+)' , r'%\1' , log_hook ) log_hook = re . sub ( r'%(?!\d)' , '%%' , log_hook ) log_hook = re . sub ( r'; ?' , r'\n' , log_hook ) with NamedTemporaryFile ( prefix = 'PyGitUp.' , suffix = '.bat' , delete = False ) as bat_file : bat_file . file . write ( b'@echo off\n' ) bat_file . file . write ( log_hook . encode ( 'utf-8' ) ) state = subprocess . call ( [ bat_file . name , branch . name , remote . name ] ) os . remove ( bat_file . name ) else : state = subprocess . call ( [ log_hook , 'git-up' , branch . name , remote . name ] , shell = True ) if self . testing : assert state == 0 , 'log_hook returned != 0' | Call a log - command if set by git - up . fetch . all . |
249,527 | def version_info ( self ) : package = pkg . get_distribution ( 'git-up' ) local_version_str = package . version local_version = package . parsed_version print ( 'GitUp version is: ' + colored ( 'v' + local_version_str , 'green' ) ) if not self . settings [ 'updates.check' ] : return print ( 'Checking for updates...' , end = '' ) try : reader = codecs . getreader ( 'utf-8' ) details = json . load ( reader ( urlopen ( PYPI_URL ) ) ) online_version = details [ 'info' ] [ 'version' ] except ( HTTPError , URLError , ValueError ) : recent = True else : recent = local_version >= pkg . parse_version ( online_version ) if not recent : print ( '\rRecent version is: ' + colored ( 'v' + online_version , color = 'yellow' , attrs = [ 'bold' ] ) ) print ( 'Run \'pip install -U git-up\' to get the update.' ) else : sys . stdout . write ( '\r' + ' ' * 80 + '\n' ) | Tell what version we re running at and if it s up to date . |
249,528 | def load_config ( self ) : for key in self . settings : value = self . config ( key ) if value == '' or value is None : continue if value . lower ( ) == 'true' : value = True elif value . lower ( ) == 'false' : value = False elif value : pass self . settings [ key ] = value | Load the configuration from git config . |
249,529 | def check_bundler ( self ) : def get_config ( name ) : return name if self . config ( 'bundler.' + name ) else '' from pkg_resources import Requirement , resource_filename relative_path = os . path . join ( 'PyGitUp' , 'check-bundler.rb' ) bundler_script = resource_filename ( Requirement . parse ( 'git-up' ) , relative_path ) assert os . path . exists ( bundler_script ) , 'check-bundler.rb doesn\'t ' 'exist!' return_value = subprocess . call ( [ 'ruby' , bundler_script , get_config ( 'autoinstall' ) , get_config ( 'local' ) , get_config ( 'rbenv' ) ] ) if self . testing : assert return_value == 0 , 'Errors while executing check-bundler.rb' | Run the bundler check . |
249,530 | def opendocx ( file ) : mydoc = zipfile . ZipFile ( file ) xmlcontent = mydoc . read ( 'word/document.xml' ) document = etree . fromstring ( xmlcontent ) return document | Open a docx file return a document XML tree |
249,531 | def makeelement ( tagname , tagtext = None , nsprefix = 'w' , attributes = None , attrnsprefix = None ) : namespacemap = None if isinstance ( nsprefix , list ) : namespacemap = { } for prefix in nsprefix : namespacemap [ prefix ] = nsprefixes [ prefix ] nsprefix = nsprefix [ 0 ] if nsprefix : namespace = '{%s}' % nsprefixes [ nsprefix ] else : namespace = '' newelement = etree . Element ( namespace + tagname , nsmap = namespacemap ) if attributes : if not attrnsprefix : if nsprefix == 'w' : attributenamespace = namespace else : attributenamespace = '' else : attributenamespace = '{' + nsprefixes [ attrnsprefix ] + '}' for tagattribute in attributes : newelement . set ( attributenamespace + tagattribute , attributes [ tagattribute ] ) if tagtext : newelement . text = tagtext return newelement | Create an element & return it |
249,532 | def heading ( headingtext , headinglevel , lang = 'en' ) : lmap = { 'en' : 'Heading' , 'it' : 'Titolo' } paragraph = makeelement ( 'p' ) pr = makeelement ( 'pPr' ) pStyle = makeelement ( 'pStyle' , attributes = { 'val' : lmap [ lang ] + str ( headinglevel ) } ) run = makeelement ( 'r' ) text = makeelement ( 't' , tagtext = headingtext ) pr . append ( pStyle ) run . append ( text ) paragraph . append ( pr ) paragraph . append ( run ) return paragraph | Make a new heading return the heading element |
249,533 | def clean ( document ) : newdocument = document for t in ( 't' , 'r' ) : rmlist = [ ] for element in newdocument . iter ( ) : if element . tag == '{%s}%s' % ( nsprefixes [ 'w' ] , t ) : if not element . text and not len ( element ) : rmlist . append ( element ) for element in rmlist : element . getparent ( ) . remove ( element ) return newdocument | Perform misc cleaning operations on documents . Returns cleaned document . |
249,534 | def findTypeParent ( element , tag ) : p = element while True : p = p . getparent ( ) if p . tag == tag : return p return None | Finds fist parent of element of the given type |
249,535 | def AdvSearch ( document , search , bs = 3 ) : searchre = re . compile ( search ) matches = [ ] searchels = [ ] for element in document . iter ( ) : if element . tag == '{%s}t' % nsprefixes [ 'w' ] : if element . text : searchels . append ( element ) if len ( searchels ) > bs : searchels . pop ( 0 ) found = False for l in range ( 1 , len ( searchels ) + 1 ) : if found : break for s in range ( len ( searchels ) ) : if found : break if s + l <= len ( searchels ) : e = range ( s , s + l ) txtsearch = '' for k in e : txtsearch += searchels [ k ] . text match = searchre . search ( txtsearch ) if match : matches . append ( match . group ( ) ) found = True return set ( matches ) | Return set of all regex matches |
249,536 | def getdocumenttext ( document ) : paratextlist = [ ] paralist = [ ] for element in document . iter ( ) : if element . tag == '{' + nsprefixes [ 'w' ] + '}p' : paralist . append ( element ) for para in paralist : paratext = u'' for element in para . iter ( ) : if element . tag == '{' + nsprefixes [ 'w' ] + '}t' : if element . text : paratext = paratext + element . text elif element . tag == '{' + nsprefixes [ 'w' ] + '}tab' : paratext = paratext + '\t' if not len ( paratext ) == 0 : paratextlist . append ( paratext ) return paratextlist | Return the raw text of a document as a list of paragraphs . |
249,537 | def wordrelationships ( relationshiplist ) : relationships = etree . fromstring ( '<Relationships xmlns="http://schemas.openxmlformats.org/package/2006' '/relationships"></Relationships>' ) count = 0 for relationship in relationshiplist : rel_elm = makeelement ( 'Relationship' , nsprefix = None , attributes = { 'Id' : 'rId' + str ( count + 1 ) , 'Type' : relationship [ 0 ] , 'Target' : relationship [ 1 ] } ) relationships . append ( rel_elm ) count += 1 return relationships | Generate a Word relationships file |
249,538 | def savedocx ( document , coreprops , appprops , contenttypes , websettings , wordrelationships , output , imagefiledict = None ) : if imagefiledict is None : warn ( 'Using savedocx() without imagefiledict parameter will be deprec' 'ated in the future.' , PendingDeprecationWarning ) assert os . path . isdir ( template_dir ) docxfile = zipfile . ZipFile ( output , mode = 'w' , compression = zipfile . ZIP_DEFLATED ) prev_dir = os . path . abspath ( '.' ) os . chdir ( template_dir ) treesandfiles = { document : 'word/document.xml' , coreprops : 'docProps/core.xml' , appprops : 'docProps/app.xml' , contenttypes : '[Content_Types].xml' , websettings : 'word/webSettings.xml' , wordrelationships : 'word/_rels/document.xml.rels' } for tree in treesandfiles : log . info ( 'Saving: %s' % treesandfiles [ tree ] ) treestring = etree . tostring ( tree , pretty_print = True ) docxfile . writestr ( treesandfiles [ tree ] , treestring ) if imagefiledict is not None : for imagepath , picrelid in imagefiledict . items ( ) : archivename = 'word/media/%s_%s' % ( picrelid , basename ( imagepath ) ) log . info ( 'Saving: %s' , archivename ) docxfile . write ( imagepath , archivename ) files_to_ignore = [ '.DS_Store' ] for dirpath , dirnames , filenames in os . walk ( '.' ) : for filename in filenames : if filename in files_to_ignore : continue templatefile = join ( dirpath , filename ) archivename = templatefile [ 2 : ] log . info ( 'Saving: %s' , archivename ) docxfile . write ( templatefile , archivename ) log . info ( 'Saved new file to: %r' , output ) docxfile . close ( ) os . chdir ( prev_dir ) return | Save a modified document |
249,539 | def _depr ( fn , usage , stacklevel = 3 ) : warn ( '{0} is deprecated. Use {1} instead' . format ( fn , usage ) , stacklevel = stacklevel , category = DeprecationWarning ) | Internal convenience function for deprecation warnings |
249,540 | def upsert ( self , key , value , cas = 0 , ttl = 0 , format = None , persist_to = 0 , replicate_to = 0 ) : return _Base . upsert ( self , key , value , cas = cas , ttl = ttl , format = format , persist_to = persist_to , replicate_to = replicate_to ) | Unconditionally store the object in Couchbase . |
249,541 | def insert ( self , key , value , ttl = 0 , format = None , persist_to = 0 , replicate_to = 0 ) : return _Base . insert ( self , key , value , ttl = ttl , format = format , persist_to = persist_to , replicate_to = replicate_to ) | Store an object in Couchbase unless it already exists . |
249,542 | def prepend ( self , key , value , cas = 0 , format = None , persist_to = 0 , replicate_to = 0 ) : return _Base . prepend ( self , key , value , cas = cas , format = format , persist_to = persist_to , replicate_to = replicate_to ) | Prepend a string to an existing value in Couchbase . |
249,543 | def get ( self , key , ttl = 0 , quiet = None , replica = False , no_format = False ) : return _Base . get ( self , key , ttl = ttl , quiet = quiet , replica = replica , no_format = no_format ) | Obtain an object stored in Couchbase by given key . |
249,544 | def touch ( self , key , ttl = 0 ) : return _Base . touch ( self , key , ttl = ttl ) | Update a key s expiration time |
249,545 | def lock ( self , key , ttl = 0 ) : return _Base . lock ( self , key , ttl = ttl ) | Lock and retrieve a key - value entry in Couchbase . |
249,546 | def unlock ( self , key , cas ) : return _Base . unlock ( self , key , cas = cas ) | Unlock a Locked Key in Couchbase . |
249,547 | def remove ( self , key , cas = 0 , quiet = None , persist_to = 0 , replicate_to = 0 ) : return _Base . remove ( self , key , cas = cas , quiet = quiet , persist_to = persist_to , replicate_to = replicate_to ) | Remove the key - value entry for a given key in Couchbase . |
249,548 | def counter ( self , key , delta = 1 , initial = None , ttl = 0 ) : return _Base . counter ( self , key , delta = delta , initial = initial , ttl = ttl ) | Increment or decrement the numeric value of an item . |
249,549 | def mutate_in ( self , key , * specs , ** kwargs ) : sdflags = kwargs . pop ( '_sd_doc_flags' , 0 ) if kwargs . pop ( 'insert_doc' , False ) : sdflags |= _P . CMDSUBDOC_F_INSERT_DOC if kwargs . pop ( 'upsert_doc' , False ) : sdflags |= _P . CMDSUBDOC_F_UPSERT_DOC kwargs [ '_sd_doc_flags' ] = sdflags return super ( Bucket , self ) . mutate_in ( key , specs , ** kwargs ) | Perform multiple atomic modifications within a document . |
249,550 | def lookup_in ( self , key , * specs , ** kwargs ) : return super ( Bucket , self ) . lookup_in ( { key : specs } , ** kwargs ) | Atomically retrieve one or more paths from a document . |
249,551 | def retrieve_in ( self , key , * paths , ** kwargs ) : import couchbase . subdocument as SD return self . lookup_in ( key , * tuple ( SD . get ( x ) for x in paths ) , ** kwargs ) | Atomically fetch one or more paths from a document . |
249,552 | def stats ( self , keys = None , keystats = False ) : if keys and not isinstance ( keys , ( tuple , list ) ) : keys = ( keys , ) return self . _stats ( keys , keystats = keystats ) | Request server statistics . |
249,553 | def observe ( self , key , master_only = False ) : return _Base . observe ( self , key , master_only = master_only ) | Return storage information for a key . |
249,554 | def endure ( self , key , persist_to = - 1 , replicate_to = - 1 , cas = 0 , check_removed = False , timeout = 5.0 , interval = 0.010 ) : kv = { key : cas } rvs = self . endure_multi ( keys = kv , persist_to = persist_to , replicate_to = replicate_to , check_removed = check_removed , timeout = timeout , interval = interval ) return rvs [ key ] | Wait until a key has been distributed to one or more nodes |
249,555 | def endure_multi ( self , keys , persist_to = - 1 , replicate_to = - 1 , timeout = 5.0 , interval = 0.010 , check_removed = False ) : return _Base . endure_multi ( self , keys , persist_to = persist_to , replicate_to = replicate_to , timeout = timeout , interval = interval , check_removed = check_removed ) | Check durability requirements for multiple keys |
249,556 | def remove_multi ( self , kvs , quiet = None ) : return _Base . remove_multi ( self , kvs , quiet = quiet ) | Remove multiple items from the cluster |
249,557 | def counter_multi ( self , kvs , initial = None , delta = 1 , ttl = 0 ) : return _Base . counter_multi ( self , kvs , initial = initial , delta = delta , ttl = ttl ) | Perform counter operations on multiple items |
249,558 | def rget ( self , key , replica_index = None , quiet = None ) : if replica_index is not None : return _Base . _rgetix ( self , key , replica = replica_index , quiet = quiet ) else : return _Base . _rget ( self , key , quiet = quiet ) | Get an item from a replica node |
249,559 | def query ( self , design , view , use_devmode = False , ** kwargs ) : design = self . _mk_devmode ( design , use_devmode ) itercls = kwargs . pop ( 'itercls' , View ) return itercls ( self , design , view , ** kwargs ) | Query a pre - defined MapReduce view passing parameters . |
249,560 | def n1ql_query ( self , query , * args , ** kwargs ) : if not isinstance ( query , N1QLQuery ) : query = N1QLQuery ( query ) itercls = kwargs . pop ( 'itercls' , N1QLRequest ) return itercls ( query , self , * args , ** kwargs ) | Execute a N1QL query . |
249,561 | def analytics_query ( self , query , host , * args , ** kwargs ) : if not isinstance ( query , AnalyticsQuery ) : query = AnalyticsQuery ( query , * args , ** kwargs ) else : query . update ( * args , ** kwargs ) return couchbase . analytics . gen_request ( query , host , self ) | Execute an Analytics query . |
249,562 | def search ( self , index , query , ** kwargs ) : itercls = kwargs . pop ( 'itercls' , _FTS . SearchRequest ) iterargs = itercls . mk_kwargs ( kwargs ) params = kwargs . pop ( 'params' , _FTS . Params ( ** kwargs ) ) body = _FTS . make_search_body ( index , query , params ) return itercls ( body , self , ** iterargs ) | Perform full - text searches |
249,563 | def is_ssl ( self ) : mode = self . _cntl ( op = _LCB . LCB_CNTL_SSL_MODE , value_type = 'int' ) return mode & _LCB . LCB_SSL_ENABLED != 0 | Read - only boolean property indicating whether SSL is used for this connection . |
249,564 | def flush ( self ) : path = '/pools/default/buckets/{0}/controller/doFlush' path = path . format ( self . bucket ) return self . _http_request ( type = _LCB . LCB_HTTP_TYPE_MANAGEMENT , path = path , method = _LCB . LCB_HTTP_METHOD_POST ) | Clears the bucket s contents . |
249,565 | def map_add ( self , key , mapkey , value , create = False , ** kwargs ) : op = SD . upsert ( mapkey , value ) sdres = self . mutate_in ( key , op , ** kwargs ) return self . _wrap_dsop ( sdres ) | Set a value for a key in a map . |
249,566 | def map_get ( self , key , mapkey ) : op = SD . get ( mapkey ) sdres = self . lookup_in ( key , op ) return self . _wrap_dsop ( sdres , True ) | Retrieve a value from a map . |
249,567 | def map_remove ( self , key , mapkey , ** kwargs ) : op = SD . remove ( mapkey ) sdres = self . mutate_in ( key , op , ** kwargs ) return self . _wrap_dsop ( sdres ) | Remove an item from a map . |
249,568 | def map_size ( self , key ) : rv = self . get ( key ) return len ( rv . value ) | Get the number of items in the map . |
249,569 | def list_append ( self , key , value , create = False , ** kwargs ) : op = SD . array_append ( '' , value ) sdres = self . mutate_in ( key , op , ** kwargs ) return self . _wrap_dsop ( sdres ) | Add an item to the end of a list . |
249,570 | def list_prepend ( self , key , value , create = False , ** kwargs ) : op = SD . array_prepend ( '' , value ) sdres = self . mutate_in ( key , op , ** kwargs ) return self . _wrap_dsop ( sdres ) | Add an item to the beginning of a list . |
249,571 | def list_set ( self , key , index , value , ** kwargs ) : op = SD . replace ( '[{0}]' . format ( index ) , value ) sdres = self . mutate_in ( key , op , ** kwargs ) return self . _wrap_dsop ( sdres ) | Sets an item within a list at a given position . |
249,572 | def set_add ( self , key , value , create = False , ** kwargs ) : op = SD . array_addunique ( '' , value ) try : sdres = self . mutate_in ( key , op , ** kwargs ) return self . _wrap_dsop ( sdres ) except E . SubdocPathExistsError : pass | Add an item to a set if the item does not yet exist . |
249,573 | def set_remove ( self , key , value , ** kwargs ) : while True : rv = self . get ( key ) try : ix = rv . value . index ( value ) kwargs [ 'cas' ] = rv . cas return self . list_remove ( key , ix , ** kwargs ) except E . KeyExistsError : pass except ValueError : return | Remove an item from a set . |
249,574 | def list_remove ( self , key , index , ** kwargs ) : return self . map_remove ( key , '[{0}]' . format ( index ) , ** kwargs ) | Remove the element at a specific index from a list . |
249,575 | def queue_push ( self , key , value , create = False , ** kwargs ) : return self . list_prepend ( key , value , ** kwargs ) | Add an item to the end of a queue . |
249,576 | def queue_pop ( self , key , ** kwargs ) : while True : try : itm = self . list_get ( key , - 1 ) except IndexError : raise E . QueueEmpty kwargs [ 'cas' ] = itm . cas try : self . list_remove ( key , - 1 , ** kwargs ) return itm except E . KeyExistsError : pass except IndexError : raise E . QueueEmpty | Remove and return the first item queue . |
249,577 | def _callback ( self , mres ) : try : rows = self . _process_payload ( self . raw . rows ) if rows : self . on_rows ( rows ) if self . raw . done : self . on_done ( ) finally : if self . raw . done : self . _clear ( ) | This is invoked as the row callback . If rows is true then we are a row callback otherwise the request has ended and it s time to collect the other data |
249,578 | def create ( cls , name , email , cb ) : it = cls ( name , create_structure = True ) it . value [ 'email' ] = email cb . upsert_multi ( ItemSequence ( [ it ] ) ) return it | Create the basic structure of a player |
249,579 | def _doc_rev ( self , res ) : jstr = res . headers [ 'X-Couchbase-Meta' ] jobj = json . loads ( jstr ) return jobj [ 'rev' ] | Returns the rev id from the header |
249,580 | def design_create ( self , name , ddoc , use_devmode = True , syncwait = 0 ) : name = self . _cb . _mk_devmode ( name , use_devmode ) fqname = "_design/{0}" . format ( name ) if not isinstance ( ddoc , dict ) : ddoc = json . loads ( ddoc ) ddoc = ddoc . copy ( ) ddoc [ '_id' ] = fqname ddoc = json . dumps ( ddoc ) existing = None if syncwait : try : existing = self . design_get ( name , use_devmode = False ) except CouchbaseError : pass ret = self . _cb . _http_request ( type = _LCB . LCB_HTTP_TYPE_VIEW , path = fqname , method = _LCB . LCB_HTTP_METHOD_PUT , post_data = ddoc , content_type = "application/json" ) self . _design_poll ( name , 'add' , existing , syncwait , use_devmode = use_devmode ) return ret | Store a design document |
249,581 | def design_get ( self , name , use_devmode = True ) : name = self . _mk_devmode ( name , use_devmode ) existing = self . _http_request ( type = _LCB . LCB_HTTP_TYPE_VIEW , path = "_design/" + name , method = _LCB . LCB_HTTP_METHOD_GET , content_type = "application/json" ) return existing | Retrieve a design document |
249,582 | def design_delete ( self , name , use_devmode = True , syncwait = 0 ) : name = self . _mk_devmode ( name , use_devmode ) existing = None if syncwait : try : existing = self . design_get ( name , use_devmode = False ) except CouchbaseError : pass ret = self . _http_request ( type = _LCB . LCB_HTTP_TYPE_VIEW , path = "_design/" + name , method = _LCB . LCB_HTTP_METHOD_DELETE ) self . _design_poll ( name , 'del' , existing , syncwait ) return ret | Delete a design document |
249,583 | def design_list ( self ) : ret = self . _http_request ( type = _LCB . LCB_HTTP_TYPE_MANAGEMENT , path = "/pools/default/buckets/{0}/ddocs" . format ( self . _cb . bucket ) , method = _LCB . LCB_HTTP_METHOD_GET ) real_rows = { } for r in ret . value [ 'rows' ] : real_rows [ r [ 'doc' ] [ 'meta' ] [ 'id' ] ] = r [ 'doc' ] [ 'json' ] ret . value . clear ( ) ret . value . update ( real_rows ) return ret | List all design documents for the current bucket . |
249,584 | def n1ql_index_create ( self , ix , ** kwargs ) : defer = kwargs . pop ( 'defer' , False ) ignore_exists = kwargs . pop ( 'ignore_exists' , False ) primary = kwargs . pop ( 'primary' , False ) fields = kwargs . pop ( 'fields' , [ ] ) cond = kwargs . pop ( 'condition' , None ) if kwargs : raise TypeError ( 'Unknown keyword arguments' , kwargs ) info = self . _mk_index_def ( ix , primary ) if primary and fields : raise TypeError ( 'Cannot create primary index with explicit fields' ) elif not primary and not fields : raise ValueError ( 'Fields required for non-primary index' ) if fields : info . fields = fields if primary and info . name is N1QL_PRIMARY_INDEX : del info . name if cond : if primary : raise ValueError ( 'cannot specify condition for primary index' ) info . condition = cond options = { 'ignore_exists' : ignore_exists , 'defer' : defer } return IxmgmtRequest ( self . _cb , 'create' , info , ** options ) . execute ( ) | Create an index for use with N1QL . |
249,585 | def n1ql_index_create_primary ( self , defer = False , ignore_exists = False ) : return self . n1ql_index_create ( '' , defer = defer , primary = True , ignore_exists = ignore_exists ) | Create the primary index on the bucket . |
249,586 | def n1ql_index_drop ( self , ix , primary = False , ** kwargs ) : info = self . _mk_index_def ( ix , primary ) return IxmgmtRequest ( self . _cb , 'drop' , info , ** kwargs ) . execute ( ) | Delete an index from the cluster . |
249,587 | def n1ql_index_build_deferred ( self , other_buckets = False ) : info = N1qlIndex ( ) if not other_buckets : info . keyspace = self . _cb . bucket return IxmgmtRequest ( self . _cb , 'build' , info ) . execute ( ) | Instruct the server to begin building any previously deferred index definitions . |
249,588 | def n1ql_index_watch ( self , indexes , timeout = 30 , interval = 0.2 , watch_primary = False ) : kwargs = { 'timeout_us' : int ( timeout * 1000000 ) , 'interval_us' : int ( interval * 1000000 ) } ixlist = [ N1qlIndex . from_any ( x , self . _cb . bucket ) for x in indexes ] if watch_primary : ixlist . append ( N1qlIndex . from_any ( N1QL_PRIMARY_INDEX , self . _cb . bucket ) ) return IxmgmtRequest ( self . _cb , 'watch' , ixlist , ** kwargs ) . execute ( ) | Await completion of index building |
249,589 | def _set_range_common ( self , k_sugar , k_start , k_end , value ) : if not isinstance ( value , ( list , tuple , _Unspec ) ) : raise ArgumentError . pyexc ( "Range specification for {0} must be a list, tuple or UNSPEC" . format ( k_sugar ) ) if self . _user_options . get ( k_start , UNSPEC ) is not UNSPEC or ( self . _user_options . get ( k_end , UNSPEC ) is not UNSPEC ) : raise ArgumentError . pyexc ( "Cannot specify {0} with either {1} or {2}" . format ( k_sugar , k_start , k_end ) ) if not value : self . _set_common ( k_start , UNSPEC , set_user = False ) self . _set_common ( k_end , UNSPEC , set_user = False ) self . _user_options [ k_sugar ] = UNSPEC return if len ( value ) not in ( 1 , 2 ) : raise ArgumentError . pyexc ( "Range specification " "must have one or two elements" , value ) value = value [ : : ] if len ( value ) == 1 : value . append ( UNSPEC ) for p , ix in ( ( k_start , 0 ) , ( k_end , 1 ) ) : self . _set_common ( p , value [ ix ] , set_user = False ) self . _user_options [ k_sugar ] = value | Checks to see if the client - side convenience key is present and if so converts the sugar convenience key into its real server - side equivalents . |
249,590 | def update ( self , copy = False , ** params ) : if copy : self = deepcopy ( self ) for k , v in params . items ( ) : if not hasattr ( self , k ) : if not self . unrecognized_ok : raise ArgumentError . pyexc ( "Unknown option" , k ) self . _set_common ( k , v ) else : setattr ( self , k , v ) return self | Chained assignment operator . |
249,591 | def from_any ( cls , params , ** ctor_opts ) : if isinstance ( params , cls ) : return deepcopy ( params ) elif isinstance ( params , dict ) : ctor_opts . update ( ** params ) if cls is QueryBase : if ( 'bbox' in params or 'start_range' in params or 'end_range' in params ) : return SpatialQuery ( ** ctor_opts ) else : return ViewQuery ( ** ctor_opts ) elif isinstance ( params , basestring ) : ret = cls ( ) ret . _base_str = params return ret else : raise ArgumentError . pyexc ( "Params must be Query, dict, or string" ) | Creates a new Query object from input . |
249,592 | def encoded ( self ) : if not self . _encoded : self . _encoded = self . _encode ( ) if self . _base_str : return '&' . join ( ( self . _base_str , self . _encoded ) ) else : return self . _encoded | Returns an encoded form of the query |
249,593 | def registerDeferred ( self , event , d ) : try : self . _evq [ event ] . schedule ( d ) except KeyError : raise ValueError ( "No such event type" , event ) | Register a defer to be fired at the firing of a specific event . |
249,594 | def queryEx ( self , viewcls , * args , ** kwargs ) : kwargs [ 'itercls' ] = viewcls o = super ( AsyncBucket , self ) . query ( * args , ** kwargs ) if not self . connected : self . connect ( ) . addCallback ( lambda x : o . start ( ) ) else : o . start ( ) return o | Query a view with the viewcls instance receiving events of the query as they arrive . |
249,595 | def n1qlQueryEx ( self , cls , * args , ** kwargs ) : kwargs [ 'itercls' ] = cls o = super ( AsyncBucket , self ) . n1ql_query ( * args , ** kwargs ) if not self . connected : self . connect ( ) . addCallback ( lambda x : o . start ( ) ) else : o . start ( ) return o | Execute a N1QL statement providing a custom handler for rows . |
249,596 | def n1qlQueryAll ( self , * args , ** kwargs ) : if not self . connected : cb = lambda x : self . n1qlQueryAll ( * args , ** kwargs ) return self . connect ( ) . addCallback ( cb ) kwargs [ 'itercls' ] = BatchedN1QLRequest o = super ( RawBucket , self ) . n1ql_query ( * args , ** kwargs ) o . start ( ) return o . _getDeferred ( ) | Execute a N1QL query retrieving all rows . |
249,597 | def _wrap ( self , meth , * args , ** kwargs ) : if not self . connected : return self . _connectSchedule ( self . _wrap , meth , * args , ** kwargs ) opres = meth ( self , * args , ** kwargs ) return self . defer ( opres ) | Calls a given method with the appropriate arguments or defers such a call until the instance has been connected |
249,598 | def get_decode_format ( flags ) : c_flags = flags & FMT_COMMON_MASK l_flags = flags & FMT_LEGACY_MASK if c_flags : if c_flags not in COMMON_FORMATS : return FMT_BYTES , False else : return COMMON2UNIFIED [ c_flags ] , True else : if not l_flags in LEGACY_FORMATS : return FMT_BYTES , False else : return LEGACY2UNIFIED [ l_flags ] , True | Returns a tuple of format recognized |
249,599 | def bucket_create ( self , name , bucket_type = 'couchbase' , bucket_password = '' , replicas = 0 , ram_quota = 1024 , flush_enabled = False ) : params = { 'name' : name , 'bucketType' : bucket_type , 'authType' : 'sasl' , 'saslPassword' : bucket_password if bucket_password else '' , 'flushEnabled' : int ( flush_enabled ) , 'ramQuotaMB' : ram_quota } if bucket_type in ( 'couchbase' , 'membase' , 'ephemeral' ) : params [ 'replicaNumber' ] = replicas return self . http_request ( path = '/pools/default/buckets' , method = 'POST' , content = self . _mk_formstr ( params ) , content_type = 'application/x-www-form-urlencoded' ) | Create a new bucket |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.