code_tokens
stringlengths
74
3.78k
def _fix_up_fields ( cls ) : cls . _arguments = dict ( ) if cls . __module__ == __name__ : return for name in set ( dir ( cls ) ) : attr = getattr ( cls , name , None ) if isinstance ( attr , BaseArgument ) : if name . startswith ( '_' ) : raise TypeError ( "Endpoint argument %s cannot begin with " "an underscore, as these attributes are reserved " "for instance variables of the endpoint object, " "rather than for arguments to your HTTP Endpoint." % name ) attr . _fix_up ( cls , name ) cls . _arguments [ attr . name ] = attr
def _execute ( self , request , ** kwargs ) : try : self . _create_context ( request ) self . _authenticate ( ) context = get_current_context ( ) self . _parse_args ( ) if hasattr ( self , '_before_handlers' ) and isinstance ( self . _before_handlers , ( list , tuple ) ) : for handler in self . _before_handlers : handler ( context ) context . handler_result = self . _handle ( context ) if hasattr ( self , '_after_handlers' ) and isinstance ( self . _after_handlers , ( list , tuple ) ) : for handler in self . _after_handlers : handler ( context ) self . _render ( ) response = context . response except AuthenticationError as e : if hasattr ( e , 'message' ) and e . message is not None : message = e . message else : message = "You don't have permission to do that." err = APIError . Forbidden ( message ) response = self . _response_class ( * err . response ) response . headers [ "Content-Type" ] = 'application/json' except ArgumentError as e : err = APIError . UnprocessableEntity ( e . message ) response = self . _response_class ( * err . response ) response . headers [ "Content-Type" ] = 'application/json' except APIError as e : response = self . _response_class ( * e . response ) response . headers [ "Content-Type" ] = 'application/json' except PaleRaisedResponse as r : response = self . _response_class ( * r . response ) response . headers [ "Content-Type" ] = 'application/json' except Exception as e : logging . exception ( "Failed to handle Pale Endpoint %s: %r" , self . __class__ . __name__ , e ) err = APIError . Exception ( repr ( e ) ) response = self . _response_class ( * err . response ) response . headers [ "Content-Type" ] = 'application/json' allow_cors = getattr ( self , "_allow_cors" , None ) if allow_cors is True : response . headers [ 'Access-Control-Allow-Origin' ] = '*' elif isinstance ( allow_cors , basestring ) : response . headers [ 'Access-Control-Allow-Origin' ] = allow_cors context . response = response try : if hasattr ( self , '_after_response_handlers' ) and isinstance ( self . _after_response_handlers , ( list , tuple ) ) : for handler in self . _after_response_handlers : handler ( context , response ) except Exception as e : logging . exception ( "Failed to process _after_response_handlers for Endpoint %s" , self . __class__ . __name__ ) raise return response
def construct_concierge_header ( self , url ) : concierge_request_header = ( etree . Element ( etree . QName ( XHTML_NAMESPACE , "ConciergeRequestHeader" ) , nsmap = { 'sch' : XHTML_NAMESPACE } ) ) if self . session_id : session = ( etree . SubElement ( concierge_request_header , etree . QName ( XHTML_NAMESPACE , "SessionId" ) ) ) session . text = self . session_id access_key = ( etree . SubElement ( concierge_request_header , etree . QName ( XHTML_NAMESPACE , "AccessKeyId" ) ) ) access_key . text = self . access_key association_id = ( etree . SubElement ( concierge_request_header , etree . QName ( XHTML_NAMESPACE , "AssociationId" ) ) ) association_id . text = self . association_id signature = ( etree . SubElement ( concierge_request_header , etree . QName ( XHTML_NAMESPACE , "Signature" ) ) ) signature . text = self . get_hashed_signature ( url = url ) return concierge_request_header
def options_string_builder ( option_mapping , args ) : options_string = "" for option , flag in option_mapping . items ( ) : if option in args : options_string += str ( " %s %s" % ( flag , str ( args [ option ] ) ) ) return options_string
def build_kal_scan_band_string ( kal_bin , band , args ) : option_mapping = { "gain" : "-g" , "device" : "-d" , "error" : "-e" } if not sanity . scan_band_is_valid ( band ) : err_txt = "Unsupported band designation: %" % band raise ValueError ( err_txt ) base_string = "%s -v -s %s" % ( kal_bin , band ) base_string += options_string_builder ( option_mapping , args ) return ( base_string )
def build_kal_scan_channel_string ( kal_bin , channel , args ) : option_mapping = { "gain" : "-g" , "device" : "-d" , "error" : "-e" } base_string = "%s -v -c %s" % ( kal_bin , channel ) base_string += options_string_builder ( option_mapping , args ) return ( base_string )
def determine_final_freq ( base , direction , modifier ) : result = 0 if direction == "+" : result = base + modifier elif direction == "-" : result = base - modifier return ( result )
def to_eng ( num_in ) : x = decimal . Decimal ( str ( num_in ) ) eng_not = x . normalize ( ) . to_eng_string ( ) return ( eng_not )
def determine_device ( kal_out ) : device = "" while device == "" : for line in kal_out . splitlines ( ) : if "Using device " in line : device = str ( line . split ( ' ' , 2 ) [ - 1 ] ) if device == "" : device = None return device
def extract_value_from_output ( canary , split_offset , kal_out ) : retval = "" while retval == "" : for line in kal_out . splitlines ( ) : if canary in line : retval = str ( line . split ( ) [ split_offset ] ) if retval == "" : retval = None return retval
def determine_chan_detect_threshold ( kal_out ) : channel_detect_threshold = "" while channel_detect_threshold == "" : for line in kal_out . splitlines ( ) : if "channel detect threshold: " in line : channel_detect_threshold = str ( line . split ( ) [ - 1 ] ) if channel_detect_threshold == "" : print ( "Unable to parse sample rate" ) channel_detect_threshold = None return channel_detect_threshold
def determine_band_channel ( kal_out ) : band = "" channel = "" tgt_freq = "" while band == "" : for line in kal_out . splitlines ( ) : if "Using " in line and " channel " in line : band = str ( line . split ( ) [ 1 ] ) channel = str ( line . split ( ) [ 3 ] ) tgt_freq = str ( line . split ( ) [ 4 ] ) . replace ( "(" , "" ) . replace ( ")" , "" ) if band == "" : band = None return ( band , channel , tgt_freq )
def parse_kal_scan ( kal_out ) : kal_data = [ ] scan_band = determine_scan_band ( kal_out ) scan_gain = determine_scan_gain ( kal_out ) scan_device = determine_device ( kal_out ) sample_rate = determine_sample_rate ( kal_out ) chan_detect_threshold = determine_chan_detect_threshold ( kal_out ) for line in kal_out . splitlines ( ) : if "chan:" in line : p_line = line . split ( ' ' ) chan = str ( p_line [ 1 ] ) modifier = str ( p_line [ 3 ] ) power = str ( p_line [ 5 ] ) mod_raw = str ( p_line [ 4 ] ) . replace ( ')\tpower:' , '' ) base_raw = str ( ( p_line [ 2 ] ) . replace ( '(' , '' ) ) mod_freq = herz_me ( mod_raw ) base_freq = herz_me ( base_raw ) final_freq = to_eng ( determine_final_freq ( base_freq , modifier , mod_freq ) ) kal_run = { "channel" : chan , "base_freq" : base_freq , "mod_freq" : mod_freq , "modifier" : modifier , "final_freq" : final_freq , "power" : power , "band" : scan_band , "gain" : scan_gain , "device" : scan_device , "sample_rate" : sample_rate , "channel_detect_threshold" : chan_detect_threshold } kal_data . append ( kal_run . copy ( ) ) return kal_data
def parse_kal_channel ( kal_out ) : scan_band , scan_channel , tgt_freq = determine_band_channel ( kal_out ) kal_data = { "device" : determine_device ( kal_out ) , "sample_rate" : determine_sample_rate ( kal_out ) , "gain" : determine_scan_gain ( kal_out ) , "band" : scan_band , "channel" : scan_channel , "frequency" : tgt_freq , "avg_absolute_error" : determine_avg_absolute_error ( kal_out ) , "measurements" : get_measurements_from_kal_scan ( kal_out ) , "raw_scan_result" : kal_out } return kal_data
def get_measurements_from_kal_scan ( kal_out ) : result = [ ] for line in kal_out . splitlines ( ) : if "offset " in line : p_line = line . split ( ' ' ) result . append ( p_line [ - 1 ] ) return result
def render ( self , obj , name , context ) : if self . value_lambda is not None : val = self . value_lambda ( obj ) else : attr_name = name if self . property_name is not None : attr_name = self . property_name if isinstance ( obj , dict ) : val = obj . get ( attr_name , None ) else : val = getattr ( obj , attr_name , None ) if callable ( val ) : try : val = val ( ) except : logging . exception ( "Attempted to call `%s` on obj of type %s." , attr_name , type ( obj ) ) raise return val
def doc_dict ( self ) : doc = { 'type' : self . value_type , 'description' : self . description , 'extended_description' : self . details } return doc
def capability ( self , cap_name ) : if cap_name in self . __class_capabilities__ : function_name = self . __class_capabilities__ [ cap_name ] return getattr ( self , function_name )
def has_capabilities ( self , * cap_names ) : for name in cap_names : if name not in self . __class_capabilities__ : return False return True
def add_entity_errors ( self , property_name , direct_errors = None , schema_errors = None ) : if direct_errors is None and schema_errors is None : return self if direct_errors is not None : if property_name not in self . errors : self . errors [ property_name ] = dict ( ) if 'direct' not in self . errors [ property_name ] : self . errors [ property_name ] [ 'direct' ] = [ ] if type ( direct_errors ) is not list : direct_errors = [ direct_errors ] for error in direct_errors : if not isinstance ( error , Error ) : err = 'Error must be of type {}' raise x . InvalidErrorType ( err . format ( Error ) ) self . errors [ property_name ] [ 'direct' ] . append ( error ) if schema_errors is not None : if isinstance ( schema_errors , Result ) : schema_errors = schema_errors . errors if not schema_errors : return self if property_name not in self . errors : self . errors [ property_name ] = dict ( ) if 'schema' not in self . errors [ property_name ] : self . errors [ property_name ] [ 'schema' ] = schema_errors else : self . errors [ property_name ] [ 'schema' ] = self . merge_errors ( self . errors [ property_name ] [ 'schema' ] , schema_errors ) return self
def add_collection_errors ( self , property_name , direct_errors = None , collection_errors = None ) : if direct_errors is None and collection_errors is None : return self if direct_errors is not None : if type ( direct_errors ) is not list : direct_errors = [ direct_errors ] if property_name not in self . errors : self . errors [ property_name ] = dict ( ) if 'direct' not in self . errors [ property_name ] : self . errors [ property_name ] [ 'direct' ] = [ ] for error in direct_errors : if not isinstance ( error , Error ) : err = 'Error must be of type {}' raise x . InvalidErrorType ( err . format ( Error ) ) self . errors [ property_name ] [ 'direct' ] . append ( error ) if collection_errors : enum = enumerate ( collection_errors ) errors_dict = { i : e for i , e in enum if not bool ( e ) } if not errors_dict : return self if property_name not in self . errors : self . errors [ property_name ] = dict ( ) if 'collection' not in self . errors [ property_name ] : self . errors [ property_name ] [ 'collection' ] = errors_dict else : local = self . errors [ property_name ] [ 'collection' ] remote = errors_dict for index , result in remote . items ( ) : if index not in local : self . errors [ property_name ] [ 'collection' ] [ index ] = result else : merged = self . merge_errors ( local [ index ] . errors , remote [ index ] . errors ) self . errors [ property_name ] [ 'collection' ] [ index ] = merged return self
def merge_errors ( self , errors_local , errors_remote ) : for prop in errors_remote : if prop not in errors_local : errors_local [ prop ] = errors_remote [ prop ] continue local = errors_local [ prop ] local = local . errors if isinstance ( local , Result ) else local remote = errors_remote [ prop ] remote = remote . errors if isinstance ( remote , Result ) else remote if not isinstance ( local , type ( remote ) ) : msg = 'Type mismatch on property [{}] when merging errors. ' msg += 'Unable to merge [{}] into [{}]' raise x . UnableToMergeResultsType ( msg . format ( prop , type ( errors_remote [ prop ] ) , type ( self . errors [ prop ] ) ) ) mismatch = 'Unable to merge nested entity errors with nested ' mismatch += 'collection errors on property [{}]' if 'schema' in local and 'collection' in remote : raise x . UnableToMergeResultsType ( mismatch . format ( prop ) ) if 'collection' in local and 'schema' in remote : raise x . UnableToMergeResultsType ( mismatch . format ( prop ) ) if type ( remote ) is list : errors_local [ prop ] . extend ( remote ) continue if 'direct' in remote and 'direct' in local : errors_local [ prop ] [ 'direct' ] . extend ( remote [ 'direct' ] ) if 'schema' in remote and 'schema' in local : errors_local [ prop ] [ 'schema' ] = self . merge_errors ( errors_local [ prop ] [ 'schema' ] , remote [ 'schema' ] ) if 'collection' in remote and 'collection' in local : for index , result in remote [ 'collection' ] . items ( ) : if index not in local [ 'collection' ] : errors_local [ prop ] [ 'collection' ] [ index ] = result else : merged = self . merge_errors ( errors_local [ prop ] [ 'collection' ] [ index ] . errors , errors_remote [ prop ] [ 'collection' ] [ index ] . errors , ) errors_local [ prop ] [ 'collection' ] [ index ] = merged return errors_local
def merge ( self , another ) : if isinstance ( another , Result ) : another = another . errors self . errors = self . merge_errors ( self . errors , another )
def get_messages ( self , locale = None ) : if locale is None : locale = self . locale if self . translator : def translate ( error ) : return self . translator . translate ( error , locale ) else : def translate ( error ) : return error errors = deepcopy ( self . errors ) errors = self . _translate_errors ( errors , translate ) return errors
def _translate_errors ( self , errors , translate ) : for prop in errors : prop_errors = errors [ prop ] if type ( prop_errors ) is list : for index , error in enumerate ( prop_errors ) : message = translate ( error . message ) message = self . format_error ( message , error . kwargs ) errors [ prop ] [ index ] = message if type ( prop_errors ) is dict and 'direct' in prop_errors : for index , error in enumerate ( prop_errors [ 'direct' ] ) : message = translate ( error . message ) message = self . format_error ( message , error . kwargs ) errors [ prop ] [ 'direct' ] [ index ] = message if type ( prop_errors ) is dict and 'schema' in prop_errors : errors [ prop ] [ 'schema' ] = self . _translate_errors ( prop_errors [ 'schema' ] , translate ) if type ( prop_errors ) is dict and 'collection' in prop_errors : translated = dict ( ) for index , result in prop_errors [ 'collection' ] . items ( ) : translated [ index ] = self . _translate_errors ( result . errors , translate ) errors [ prop ] [ 'collection' ] = translated return errors
def make_url ( self , path , api_root = u'/v2/' ) : return urljoin ( urljoin ( self . url , api_root ) , path )
def make_key_url ( self , key ) : if type ( key ) is bytes : key = key . decode ( 'utf-8' ) buf = io . StringIO ( ) buf . write ( u'keys' ) if not key . startswith ( u'/' ) : buf . write ( u'/' ) buf . write ( key ) return self . make_url ( buf . getvalue ( ) )
def get ( self , key , recursive = False , sorted = False , quorum = False , wait = False , wait_index = None , timeout = None ) : url = self . make_key_url ( key ) params = self . build_args ( { 'recursive' : ( bool , recursive or None ) , 'sorted' : ( bool , sorted or None ) , 'quorum' : ( bool , quorum or None ) , 'wait' : ( bool , wait or None ) , 'waitIndex' : ( int , wait_index ) , } ) if timeout is None : while True : try : try : res = self . session . get ( url , params = params ) except : self . erred ( ) except ( TimedOut , ChunkedEncodingError ) : continue else : break else : try : res = self . session . get ( url , params = params , timeout = timeout ) except ChunkedEncodingError : raise TimedOut except : self . erred ( ) return self . wrap_response ( res )
def delete ( self , key , dir = False , recursive = False , prev_value = None , prev_index = None , timeout = None ) : url = self . make_key_url ( key ) params = self . build_args ( { 'dir' : ( bool , dir or None ) , 'recursive' : ( bool , recursive or None ) , 'prevValue' : ( six . text_type , prev_value ) , 'prevIndex' : ( int , prev_index ) , } ) try : res = self . session . delete ( url , params = params , timeout = timeout ) except : self . erred ( ) return self . wrap_response ( res )
def login_to_portal ( username , password , client , retries = 2 , delay = 0 ) : if not client . session_id : client . request_session ( ) concierge_request_header = client . construct_concierge_header ( url = ( "http://membersuite.com/contracts/IConciergeAPIService/" "LoginToPortal" ) ) attempts = 0 while attempts < retries : if attempts : time . sleep ( delay ) result = client . client . service . LoginToPortal ( _soapheaders = [ concierge_request_header ] , portalUserName = username , portalPassword = password ) login_to_portal_result = result [ "body" ] [ "LoginToPortalResult" ] if login_to_portal_result [ "Success" ] : portal_user = login_to_portal_result [ "ResultValue" ] [ "PortalUser" ] session_id = get_session_id ( result = result ) return PortalUser ( membersuite_object_data = portal_user , session_id = session_id ) else : attempts += 1 try : error_code = login_to_portal_result [ "Errors" ] [ "ConciergeError" ] [ 0 ] [ "Code" ] except IndexError : continue else : if attempts < retries and error_code == "GeneralException" : continue raise LoginToPortalError ( result = result )
def logout ( client ) : if not client . session_id : client . request_session ( ) concierge_request_header = client . construct_concierge_header ( url = ( "http://membersuite.com/contracts/IConciergeAPIService/" "Logout" ) ) logout_result = client . client . service . Logout ( _soapheaders = [ concierge_request_header ] ) result = logout_result [ "body" ] [ "LogoutResult" ] if result [ "SessionID" ] is None : client . session_id = None else : raise LogoutError ( result = result )
def get_user_for_membersuite_entity ( membersuite_entity ) : user = None user_created = False user_username = generate_username ( membersuite_entity ) try : user = User . objects . get ( username = user_username ) except User . DoesNotExist : pass if not user : try : user = User . objects . filter ( email = membersuite_entity . email_address ) [ 0 ] except IndexError : pass if not user : user = User . objects . create ( username = user_username , email = membersuite_entity . email_address , first_name = membersuite_entity . first_name , last_name = membersuite_entity . last_name ) user_created = True return user , user_created
def add_validator ( self , validator ) : if not isinstance ( validator , AbstractValidator ) : err = 'Validator must be of type {}' . format ( AbstractValidator ) raise InvalidValidator ( err ) self . validators . append ( validator ) return self
def filter ( self , value = None , model = None , context = None ) : if value is None : return value for filter_obj in self . filters : value = filter_obj . filter ( value = value , model = model , context = context if self . use_context else None ) return value
def validate ( self , value = None , model = None , context = None ) : errors = [ ] for validator in self . validators : if value is None and not isinstance ( validator , Required ) : continue error = validator . run ( value = value , model = model , context = context if self . use_context else None ) if error : errors . append ( error ) return errors
def filter_with_schema ( self , model = None , context = None ) : if model is None or self . schema is None : return self . _schema . filter ( model = model , context = context if self . use_context else None )
def validate_with_schema ( self , model = None , context = None ) : if self . _schema is None or model is None : return result = self . _schema . validate ( model = model , context = context if self . use_context else None ) return result
def filter_with_schema ( self , collection = None , context = None ) : if collection is None or self . schema is None : return try : for item in collection : self . _schema . filter ( model = item , context = context if self . use_context else None ) except TypeError : pass
def validate_with_schema ( self , collection = None , context = None ) : if self . _schema is None or not collection : return result = [ ] try : for index , item in enumerate ( collection ) : item_result = self . _schema . validate ( model = item , context = context if self . use_context else None ) result . append ( item_result ) except TypeError : pass return result
def json_based_stable_hash ( obj ) : encoded_str = json . dumps ( obj = obj , skipkeys = False , ensure_ascii = False , check_circular = True , allow_nan = True , cls = None , indent = 0 , separators = ( ',' , ':' ) , default = None , sort_keys = True , ) . encode ( 'utf-8' ) return hashlib . sha256 ( encoded_str ) . hexdigest ( )
def read_request_line ( self , request_line ) : request = self . __request_cls . parse_request_line ( self , request_line ) protocol_version = self . protocol_version ( ) if protocol_version == '0.9' : if request . method ( ) != 'GET' : raise Exception ( 'HTTP/0.9 standard violation' ) elif protocol_version == '1.0' or protocol_version == '1.1' : pass elif protocol_version == '2' : pass else : raise RuntimeError ( 'Unsupported HTTP-protocol' )
def metaclass ( * metaclasses ) : def _inner ( cls ) : metabases = tuple ( collections . OrderedDict ( ( c , None ) for c in ( metaclasses + ( type ( cls ) , ) ) ) . keys ( ) ) _Meta = metabases [ 0 ] for base in metabases [ 1 : ] : class _Meta ( base , _Meta ) : pass return six . add_metaclass ( _Meta ) ( cls ) return _inner
def get_attrition_in_years ( self ) : attrition_of_nets = self . itn . find ( "attritionOfNets" ) function = attrition_of_nets . attrib [ "function" ] if function != "step" : return None L = attrition_of_nets . attrib [ "L" ] return L
def add ( self , intervention , name = None ) : if self . et is None : return assert isinstance ( intervention , six . string_types ) et = ElementTree . fromstring ( intervention ) vector_pop = VectorPopIntervention ( et ) assert isinstance ( vector_pop . name , six . string_types ) if name is not None : assert isinstance ( name , six . string_types ) et . attrib [ "name" ] = name index = len ( self . et . findall ( "intervention" ) ) self . et . insert ( index , et )
def add ( self , value ) : index = len ( self . __history ) self . __history . append ( value ) return index
def start_session ( self ) : self . __current_row = '' self . __history_mode = False self . __editable_history = deepcopy ( self . __history ) self . __prompt_show = True self . refresh_window ( )
def fin_session ( self ) : self . __prompt_show = False self . __history . add ( self . row ( ) ) self . exec ( self . row ( ) )
def data ( self , previous_data = False , prompt = False , console_row = False , console_row_to_cursor = False , console_row_from_cursor = False ) : result = '' if previous_data : result += self . __previous_data if prompt or console_row or console_row_to_cursor : result += self . console ( ) . prompt ( ) if console_row or ( console_row_from_cursor and console_row_to_cursor ) : result += self . console ( ) . row ( ) elif console_row_to_cursor : result += self . console ( ) . row ( ) [ : self . cursor ( ) ] elif console_row_from_cursor : result += self . console ( ) . row ( ) [ self . cursor ( ) : ] return result
def write_data ( self , data , start_position = 0 ) : if len ( data ) > self . height ( ) : raise ValueError ( 'Data too long (too many strings)' ) for i in range ( len ( data ) ) : self . write_line ( start_position + i , data [ i ] )
def write_feedback ( self , feedback , cr = True ) : self . __previous_data += feedback if cr is True : self . __previous_data += '\n'
def refresh ( self , prompt_show = True ) : self . clear ( ) for drawer in self . __drawers : if drawer . suitable ( self , prompt_show = prompt_show ) : drawer . draw ( self , prompt_show = prompt_show ) return raise RuntimeError ( 'No suitable drawer was found' )
def is_dir ( path ) : try : return path . expanduser ( ) . absolute ( ) . is_dir ( ) except AttributeError : return os . path . isdir ( os . path . abspath ( os . path . expanduser ( str ( path ) ) ) )
def is_file ( path ) : try : return path . expanduser ( ) . absolute ( ) . is_file ( ) except AttributeError : return os . path . isfile ( os . path . abspath ( os . path . expanduser ( str ( path ) ) ) )
def exists ( path ) : try : return path . expanduser ( ) . absolute ( ) . exists ( ) except AttributeError : return os . path . exists ( os . path . abspath ( os . path . expanduser ( str ( path ) ) ) )
def enableHook ( self , msgObj ) : self . killListIdx = len ( qte_global . kill_list ) - 2 self . qteMain . qtesigKeyseqComplete . connect ( self . disableHook )
def cursorPositionChangedEvent ( self ) : qteWidget = self . sender ( ) tc = qteWidget . textCursor ( ) origin = tc . position ( ) qteWidget . cursorPositionChanged . disconnect ( self . cursorPositionChangedEvent ) self . qteRemoveHighlighting ( qteWidget ) qteWidget . cursorPositionChanged . connect ( self . cursorPositionChangedEvent ) if origin >= len ( qteWidget . toPlainText ( ) ) : return else : char = qteWidget . toPlainText ( ) [ origin ] if char not in self . charToHighlight : return qteWidget . cursorPositionChanged . disconnect ( self . cursorPositionChangedEvent ) if char == self . charToHighlight [ 0 ] : start = origin stop = qteWidget . toPlainText ( ) . find ( self . charToHighlight [ 1 ] , start + 1 ) else : stop = origin start = qteWidget . toPlainText ( ) . rfind ( self . charToHighlight [ 0 ] , 0 , stop ) oldCharFormats = self . highlightCharacters ( qteWidget , ( start , stop ) , QtCore . Qt . blue , 100 ) data = self . qteMacroData ( qteWidget ) data . matchingPositions = ( start , stop ) data . oldCharFormats = oldCharFormats self . qteSaveMacroData ( data , qteWidget ) qteWidget . cursorPositionChanged . connect ( self . cursorPositionChangedEvent )
def qteRemoveHighlighting ( self , widgetObj ) : data = self . qteMacroData ( widgetObj ) if not data : return if not data . matchingPositions : return self . highlightCharacters ( widgetObj , data . matchingPositions , QtCore . Qt . black , 50 , data . oldCharFormats ) data . matchingPositions = None data . oldCharFormats = None self . qteSaveMacroData ( data , widgetObj )
def highlightCharacters ( self , widgetObj , setPos , colorCode , fontWeight , charFormat = None ) : textCursor = widgetObj . textCursor ( ) oldPos = textCursor . position ( ) retVal = [ ] for ii , pos in enumerate ( setPos ) : pos = setPos [ ii ] if pos < 0 : retVal . append ( None ) continue textCursor . setPosition ( pos ) retVal . append ( textCursor . charFormat ( ) ) if charFormat : fmt = charFormat [ ii ] else : fmt = textCursor . charFormat ( ) myBrush = fmt . foreground ( ) myBrush . setColor ( colorCode ) myBrush . setStyle ( QtCore . Qt . SolidPattern ) fmt . setForeground ( myBrush ) fmt . setFontWeight ( fontWeight ) textCursor . movePosition ( QtGui . QTextCursor . NextCharacter , QtGui . QTextCursor . KeepAnchor ) textCursor . setCharFormat ( fmt ) textCursor . setPosition ( oldPos ) widgetObj . setTextCursor ( textCursor ) return retVal
def scenarios ( self , generate_seed = False ) : seed = prime_numbers ( 1000 ) sweeps_all = self . experiment [ "sweeps" ] . keys ( ) if "combinations" in self . experiment : if isinstance ( self . experiment [ "combinations" ] , list ) : combinations_in_experiment = { " " : self . experiment [ "combinations" ] } else : combinations_in_experiment = self . experiment [ "combinations" ] else : combinations_in_experiment = dict ( ) all_combinations_sweeps = [ ] all_combinations = [ ] for key , combinations_ in combinations_in_experiment . items ( ) : if not combinations_ : combinations_sweeps = [ ] combinations = [ [ ] ] else : combinations_sweeps = combinations_ [ 0 ] combinations = combinations_ [ 1 : ] for item in combinations_sweeps : all_combinations_sweeps . append ( item ) all_combinations . append ( ( combinations_sweeps , combinations ) ) sweeps_fully_factorial = list ( set ( sweeps_all ) - set ( all_combinations_sweeps ) ) for sweep in sweeps_fully_factorial : all_combinations . append ( ( [ sweep ] , [ [ x ] for x in self . experiment [ "sweeps" ] [ sweep ] . keys ( ) ] ) ) red_iter = 0 while len ( all_combinations ) > 1 : comb1 = all_combinations [ 0 ] comb2 = all_combinations [ 1 ] new_sweeps = comb1 [ 0 ] + comb2 [ 0 ] new_combinations = [ x + y for x in comb1 [ 1 ] for y in comb2 [ 1 ] ] all_combinations = [ ( new_sweeps , new_combinations ) ] + all_combinations [ 2 : ] red_iter += 1 sweep_names = all_combinations [ 0 ] [ 0 ] combinations = all_combinations [ 0 ] [ 1 ] for combination in combinations : scenario = Scenario ( self . _apply_combination ( self . experiment [ "base" ] , sweep_names , combination ) ) scenario . parameters = dict ( zip ( sweep_names , combination ) ) if generate_seed : if "@seed@" in scenario . xml : scenario . xml = scenario . xml . replace ( "@seed@" , str ( next ( seed ) ) ) else : raise ( RuntimeError ( "@seed@ placeholder is not found" ) ) yield scenario
def lvm_info ( self , name = None ) : cmd = [ ] if self . sudo ( ) is False else [ 'sudo' ] cmd . extend ( [ self . command ( ) , '-c' ] ) if name is not None : cmd . append ( name ) output = subprocess . check_output ( cmd , timeout = self . cmd_timeout ( ) ) output = output . decode ( ) result = [ ] fields_count = self . fields_count ( ) for line in output . split ( '\n' ) : line = line . strip ( ) fields = line . split ( ':' ) if len ( fields ) == fields_count : result . append ( fields ) if name is not None and len ( result ) != 1 : raise RuntimeError ( 'Unable to parse command result' ) return tuple ( result )
def uuid ( self ) : uuid_file = '/sys/block/%s/dm/uuid' % os . path . basename ( os . path . realpath ( self . volume_path ( ) ) ) lv_uuid = open ( uuid_file ) . read ( ) . strip ( ) if lv_uuid . startswith ( 'LVM-' ) is True : return lv_uuid [ 4 : ] return lv_uuid
def create_snapshot ( self , snapshot_size , snapshot_suffix ) : size_extent = math . ceil ( self . extents_count ( ) * snapshot_size ) size_kb = self . volume_group ( ) . extent_size ( ) * size_extent snapshot_name = self . volume_name ( ) + snapshot_suffix lvcreate_cmd = [ 'sudo' ] if self . lvm_command ( ) . sudo ( ) is True else [ ] lvcreate_cmd . extend ( [ 'lvcreate' , '-L' , '%iK' % size_kb , '-s' , '-n' , snapshot_name , '-p' , 'r' , self . volume_path ( ) ] ) subprocess . check_output ( lvcreate_cmd , timeout = self . __class__ . __lvm_snapshot_create_cmd_timeout__ ) return WLogicalVolume ( self . volume_path ( ) + snapshot_suffix , sudo = self . lvm_command ( ) . sudo ( ) )
def remove_volume ( self ) : lvremove_cmd = [ 'sudo' ] if self . lvm_command ( ) . sudo ( ) is True else [ ] lvremove_cmd . extend ( [ 'lvremove' , '-f' , self . volume_path ( ) ] ) subprocess . check_output ( lvremove_cmd , timeout = self . __class__ . __lvm_snapshot_remove_cmd_timeout__ )
def logical_volume ( cls , file_path , sudo = False ) : mp = WMountPoint . mount_point ( file_path ) if mp is not None : name_file = '/sys/block/%s/dm/name' % mp . device_name ( ) if os . path . exists ( name_file ) : lv_path = '/dev/mapper/%s' % open ( name_file ) . read ( ) . strip ( ) return WLogicalVolume ( lv_path , sudo = sudo )
def write ( self , b ) : self . __buffer += bytes ( b ) bytes_written = 0 while len ( self . __buffer ) >= self . __cipher_block_size : io . BufferedWriter . write ( self , self . __cipher . encrypt_block ( self . __buffer [ : self . __cipher_block_size ] ) ) self . __buffer = self . __buffer [ self . __cipher_block_size : ] bytes_written += self . __cipher_block_size return len ( b )
def reset_component ( self , component ) : if isinstance ( component , str ) is True : component = WURI . Component ( component ) self . __components [ component ] = None
def parse ( cls , uri ) : uri_components = urlsplit ( uri ) adapter_fn = lambda x : x if x is not None and ( isinstance ( x , str ) is False or len ( x ) ) > 0 else None return cls ( scheme = adapter_fn ( uri_components . scheme ) , username = adapter_fn ( uri_components . username ) , password = adapter_fn ( uri_components . password ) , hostname = adapter_fn ( uri_components . hostname ) , port = adapter_fn ( uri_components . port ) , path = adapter_fn ( uri_components . path ) , query = adapter_fn ( uri_components . query ) , fragment = adapter_fn ( uri_components . fragment ) , )
def add_parameter ( self , name , value = None ) : if name not in self . __query : self . __query [ name ] = [ value ] else : self . __query [ name ] . append ( value )
def remove_parameter ( self , name ) : if name in self . __query : self . __query . pop ( name )
def parse ( cls , query_str ) : parsed_query = parse_qs ( query_str , keep_blank_values = True , strict_parsing = True ) result = cls ( ) for parameter_name in parsed_query . keys ( ) : for parameter_value in parsed_query [ parameter_name ] : result . add_parameter ( parameter_name , parameter_value if len ( parameter_value ) > 0 else None ) return result
def add_specification ( self , specification ) : name = specification . name ( ) if name in self . __specs : raise ValueError ( 'WStrictURIQuery object already has specification for parameter "%s" ' % name ) self . __specs [ name ] = specification
def remove_specification ( self , name ) : if name in self . __specs : self . __specs . pop ( name )
def replace_parameter ( self , name , value = None ) : spec = self . __specs [ name ] if name in self . __specs else None if self . extra_parameters ( ) is False and spec is None : raise ValueError ( 'Extra parameters are forbidden for this WStrictURIQuery object' ) if spec is not None and spec . nullable ( ) is False and value is None : raise ValueError ( 'Nullable values is forbidden for parameter "%s"' % name ) if spec is not None and value is not None : re_obj = spec . re_obj ( ) if re_obj is not None and re_obj . match ( value ) is None : raise ValueError ( 'Value does not match regular expression' ) WURIQuery . replace_parameter ( self , name , value = value )
def remove_parameter ( self , name ) : spec = self . __specs [ name ] if name in self . __specs else None if spec is not None and spec . optional ( ) is False : raise ValueError ( 'Unable to remove a required parameter "%s"' % name ) WURIQuery . remove_parameter ( self , name )
def validate ( self , uri ) : requirement = self . requirement ( ) uri_component = uri . component ( self . component ( ) ) if uri_component is None : return requirement != WURIComponentVerifier . Requirement . required if requirement == WURIComponentVerifier . Requirement . unsupported : return False re_obj = self . re_obj ( ) if re_obj is not None : return re_obj . match ( uri_component ) is not None return True
def validate ( self , uri ) : if WURIComponentVerifier . validate ( self , uri ) is False : return False try : WStrictURIQuery ( WURIQuery . parse ( uri . component ( self . component ( ) ) ) , * self . __specs , extra_parameters = self . __extra_parameters ) except ValueError : return False return True
def is_compatible ( self , uri ) : for component , component_value in uri : if self . verifier ( component ) . validate ( uri ) is False : return False return True
def handler ( self , scheme_name = None ) : if scheme_name is None : return self . __default_handler_cls for handler in self . __handlers_cls : if handler . scheme_specification ( ) . scheme_name ( ) == scheme_name : return handler
def open ( self , uri , ** kwargs ) : handler = self . handler ( uri . scheme ( ) ) if handler is None : raise WSchemeCollection . NoHandlerFound ( uri ) if uri . scheme ( ) is None : uri . component ( 'scheme' , handler . scheme_specification ( ) . scheme_name ( ) ) if handler . scheme_specification ( ) . is_compatible ( uri ) is False : raise WSchemeCollection . SchemeIncompatible ( uri ) return handler . create_handler ( uri , ** kwargs )
def loadFile ( self , fileName ) : self . fileName = fileName self . qteWeb . load ( QtCore . QUrl ( fileName ) )
def render_to_response ( self , context , ** response_kwargs ) : context [ "ajax_form_id" ] = self . ajax_form_id return self . response_class ( request = self . request , template = self . get_template_names ( ) , context = context , ** response_kwargs )
def to_python ( self , value , resource ) : if value is None : return self . _transform ( value ) if isinstance ( value , six . text_type ) : return self . _transform ( value ) if self . encoding is None and isinstance ( value , ( six . text_type , six . binary_type ) ) : return self . _transform ( value ) if self . encoding is not None and isinstance ( value , six . binary_type ) : return self . _transform ( value . decode ( self . encoding ) ) return self . _transform ( six . text_type ( value ) )
def to_python ( self , value , resource ) : if isinstance ( value , dict ) : d = { self . aliases . get ( k , k ) : self . to_python ( v , resource ) if isinstance ( v , ( dict , list ) ) else v for k , v in six . iteritems ( value ) } return type ( self . class_name , ( ) , d ) elif isinstance ( value , list ) : return [ self . to_python ( x , resource ) if isinstance ( x , ( dict , list ) ) else x for x in value ] else : return value
def to_value ( self , obj , resource , visited = set ( ) ) : if id ( obj ) in visited : raise ValueError ( 'Circular reference detected when attempting to serialize object' ) if isinstance ( obj , ( list , tuple , set ) ) : return [ self . to_value ( x , resource ) if hasattr ( x , '__dict__' ) else x for x in obj ] elif hasattr ( obj , '__dict__' ) : attrs = obj . __dict__ . copy ( ) for key in six . iterkeys ( obj . __dict__ ) : if key . startswith ( '_' ) : del attrs [ key ] return { self . reverse_aliases . get ( k , k ) : self . to_value ( v , resource ) if hasattr ( v , '__dict__' ) or isinstance ( v , ( list , tuple , set ) ) else v for k , v in six . iteritems ( attrs ) } else : return obj
def hello_message ( self , invert_hello = False ) : if invert_hello is False : return self . __gouverneur_message hello_message = [ ] for i in range ( len ( self . __gouverneur_message ) - 1 , - 1 , - 1 ) : hello_message . append ( self . __gouverneur_message [ i ] ) return bytes ( hello_message )
def _message_address_parse ( self , message , invert_hello = False ) : message_header = self . hello_message ( invert_hello = invert_hello ) if message [ : len ( message_header ) ] != message_header : raise ValueError ( 'Invalid message header' ) message = message [ len ( message_header ) : ] message_parts = message . split ( WBeaconGouverneurMessenger . __message_splitter__ ) address = None port = None if len ( message_parts ) > 3 : raise ValueError ( 'Invalid message. Too many separators' ) elif len ( message_parts ) == 3 : address = WIPV4SocketInfo . parse_address ( message_parts [ 1 ] . decode ( 'ascii' ) ) port = WIPPort ( int ( message_parts [ 2 ] ) ) elif len ( message_parts ) == 2 and len ( message_parts [ 1 ] ) > 0 : address = WIPV4SocketInfo . parse_address ( message_parts [ 1 ] . decode ( 'ascii' ) ) return WIPV4SocketInfo ( address , port )
def cipher ( self ) : cipher = Cipher ( * self . mode ( ) . aes_args ( ) , ** self . mode ( ) . aes_kwargs ( ) ) return WAES . WAESCipher ( cipher )
def encrypt ( self , data ) : padding = self . mode ( ) . padding ( ) if padding is not None : data = padding . pad ( data , WAESMode . __data_padding_length__ ) return self . cipher ( ) . encrypt_block ( data )
def decrypt ( self , data , decode = False ) : result = self . cipher ( ) . decrypt_block ( data ) padding = self . mode ( ) . padding ( ) if padding is not None : result = padding . reverse_pad ( result , WAESMode . __data_padding_length__ ) return result . decode ( ) if decode else result
def thread_tracker_exception ( self , raised_exception ) : print ( 'Thread tracker execution was stopped by the exception. Exception: %s' % str ( raised_exception ) ) print ( 'Traceback:' ) print ( traceback . format_exc ( ) )
def __store_record ( self , record ) : if isinstance ( record , WSimpleTrackerStorage . Record ) is False : raise TypeError ( 'Invalid record type was' ) limit = self . record_limit ( ) if limit is not None and len ( self . __registry ) >= limit : self . __registry . pop ( 0 ) self . __registry . append ( record )
def put_nested_val ( dict_obj , key_tuple , value ) : current_dict = dict_obj for key in key_tuple [ : - 1 ] : try : current_dict = current_dict [ key ] except KeyError : current_dict [ key ] = { } current_dict = current_dict [ key ] current_dict [ key_tuple [ - 1 ] ] = value
def get_alternative_nested_val ( key_tuple , dict_obj ) : top_keys = key_tuple [ 0 ] if isinstance ( key_tuple [ 0 ] , ( list , tuple ) ) else [ key_tuple [ 0 ] ] for key in top_keys : try : if len ( key_tuple ) < 2 : return dict_obj [ key ] return get_alternative_nested_val ( key_tuple [ 1 : ] , dict_obj [ key ] ) except ( KeyError , TypeError , IndexError ) : pass raise KeyError
def subdict_by_keys ( dict_obj , keys ) : return { k : dict_obj [ k ] for k in set ( keys ) . intersection ( dict_obj . keys ( ) ) }
def add_to_dict_val_set ( dict_obj , key , val ) : try : dict_obj [ key ] . add ( val ) except KeyError : dict_obj [ key ] = set ( [ val ] )
def add_many_to_dict_val_set ( dict_obj , key , val_list ) : try : dict_obj [ key ] . update ( val_list ) except KeyError : dict_obj [ key ] = set ( val_list )
def add_many_to_dict_val_list ( dict_obj , key , val_list ) : try : dict_obj [ key ] . extend ( val_list ) except KeyError : dict_obj [ key ] = list ( val_list )
def get_keys_of_max_n ( dict_obj , n ) : return sorted ( [ item [ 0 ] for item in sorted ( dict_obj . items ( ) , key = lambda item : item [ 1 ] , reverse = True ) [ : n ] ] )
def deep_merge_dict ( base , priority ) : if not isinstance ( base , dict ) or not isinstance ( priority , dict ) : return priority result = copy . deepcopy ( base ) for key in priority . keys ( ) : if key in base : result [ key ] = deep_merge_dict ( base [ key ] , priority [ key ] ) else : result [ key ] = priority [ key ] return result
def norm_int_dict ( int_dict ) : norm_dict = int_dict . copy ( ) val_sum = sum ( norm_dict . values ( ) ) for key in norm_dict : norm_dict [ key ] = norm_dict [ key ] / val_sum return norm_dict