idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
2,400 | def query ( self , constraint , sortby = None , typenames = None , maxrecords = 10 , startposition = 0 ) : if 'where' in constraint : query = self . _get_repo_filter ( Layer . objects ) . filter ( is_valid = True ) . extra ( where = [ constraint [ 'where' ] ] , params = constraint [ 'values' ] ) else : query = self . _... | Query records from underlying repository |
2,401 | def insert ( self , resourcetype , source , insert_date = None ) : caller = inspect . stack ( ) [ 1 ] [ 3 ] if caller == 'transaction' : hhclass = 'Layer' source = resourcetype resourcetype = resourcetype . csw_schema else : hhclass = 'Service' if resourcetype not in HYPERMAP_SERVICE_TYPES . keys ( ) : raise RuntimeErr... | Insert a record into the repository |
2,402 | def _insert_or_update ( self , resourcetype , source , mode = 'insert' , hhclass = 'Service' ) : keywords = [ ] if self . filter is not None : catalog = Catalog . objects . get ( id = int ( self . filter . split ( ) [ - 1 ] ) ) try : if hhclass == 'Layer' : match = Layer . objects . filter ( name = source . name , titl... | Insert or update a record in the repository |
2,403 | def delete ( self , constraint ) : results = self . _get_repo_filter ( Service . objects ) . extra ( where = [ constraint [ 'where' ] ] , params = constraint [ 'values' ] ) . all ( ) deleted = len ( results ) results . delete ( ) return deleted | Delete a record from the repository |
2,404 | def check ( func ) : def iCheck ( request , * args , ** kwargs ) : if not request . method == "POST" : return HttpResponseBadRequest ( "Must be POST request." ) follow = func ( request , * args , ** kwargs ) if request . is_ajax ( ) : return HttpResponse ( 'ok' ) try : if 'next' in request . GET : return HttpResponseRe... | Check the permissions http method and login state . |
2,405 | def register ( model , field_name = None , related_name = None , lookup_method_name = 'get_follows' ) : if model in registry : return registry . append ( model ) if not field_name : field_name = 'target_%s' % model . _meta . module_name if not related_name : related_name = 'follow_%s' % model . _meta . module_name fiel... | This registers any model class to be follow - able . |
2,406 | def follow ( user , obj ) : follow , created = Follow . objects . get_or_create ( user , obj ) return follow | Make a user follow an object |
2,407 | def unfollow ( user , obj ) : try : follow = Follow . objects . get_follows ( obj ) . get ( user = user ) follow . delete ( ) return follow except Follow . DoesNotExist : pass | Make a user unfollow an object |
2,408 | def create ( self , user , obj , ** kwargs ) : follow = Follow ( user = user ) follow . target = obj follow . save ( ) return follow | Create a new follow link between a user and an object of a registered model type . |
2,409 | def get_or_create ( self , user , obj , ** kwargs ) : if not self . is_following ( user , obj ) : return self . create ( user , obj , ** kwargs ) , True return self . get_follows ( obj ) . get ( user = user ) , False | Almost the same as FollowManager . objects . create - behaves the same as the normal get_or_create methods in django though . |
2,410 | def is_following ( self , user , obj ) : if isinstance ( user , AnonymousUser ) : return False return 0 < self . get_follows ( obj ) . filter ( user = user ) . count ( ) | Returns True or False |
2,411 | def get_follows ( self , model_or_obj_or_qs ) : fname = self . fname ( model_or_obj_or_qs ) if isinstance ( model_or_obj_or_qs , QuerySet ) : return self . filter ( ** { '%s__in' % fname : model_or_obj_or_qs } ) if inspect . isclass ( model_or_obj_or_qs ) : return self . exclude ( ** { fname : None } ) return self . fi... | Returns all the followers of a model an object or a queryset . |
2,412 | def create_event_regressors ( self , event_times_indices , covariates = None , durations = None ) : if covariates is None : covariates = np . ones ( self . event_times_indices . shape ) if durations is None : durations = np . ones ( self . event_times_indices . shape ) else : durations = np . round ( durations * self .... | create_event_regressors creates the part of the design matrix corresponding to one event type . |
2,413 | def regress ( self , method = 'lstsq' ) : if method is 'lstsq' : self . betas , residuals_sum , rank , s = LA . lstsq ( self . design_matrix . T , self . resampled_signal . T ) self . residuals = self . resampled_signal - self . predict_from_design_matrix ( self . design_matrix ) elif method is 'sm_ols' : import statsm... | regress performs linear least squares regression of the designmatrix on the data . |
2,414 | def predict_from_design_matrix ( self , design_matrix ) : assert hasattr ( self , 'betas' ) , 'no betas found, please run regression before prediction' assert design_matrix . shape [ 0 ] == self . betas . shape [ 0 ] , 'designmatrix needs to have the same number of regressors as the betas already calculated' prediction... | predict_from_design_matrix predicts signals given a design matrix . |
2,415 | def resource_urls ( request ) : url_parsed = urlparse ( settings . SEARCH_URL ) defaults = dict ( APP_NAME = __description__ , APP_VERSION = __version__ , SITE_URL = settings . SITE_URL . rstrip ( '/' ) , SEARCH_TYPE = settings . SEARCH_TYPE , SEARCH_URL = settings . SEARCH_URL , SEARCH_IP = '%s://%s:%s' % ( url_parsed... | Global values to pass to templates |
2,416 | def remove_service_checks ( self , service_id ) : from hypermap . aggregator . models import Service service = Service . objects . get ( id = service_id ) service . check_set . all ( ) . delete ( ) layer_to_process = service . layer_set . all ( ) for layer in layer_to_process : layer . check_set . all ( ) . delete ( ) | Remove all checks from a service . |
2,417 | def index_service ( self , service_id ) : from hypermap . aggregator . models import Service service = Service . objects . get ( id = service_id ) if not service . is_valid : LOGGER . debug ( 'Not indexing service with id %s in search engine as it is not valid' % service . id ) return LOGGER . debug ( 'Indexing service... | Index a service in search engine . |
2,418 | def index_layer ( self , layer_id , use_cache = False ) : from hypermap . aggregator . models import Layer layer = Layer . objects . get ( id = layer_id ) if not layer . is_valid : LOGGER . debug ( 'Not indexing or removing layer with id %s in search engine as it is not valid' % layer . id ) unindex_layer ( layer . id ... | Index a layer in the search backend . If cache is set append it to the list if it isn t send the transaction right away . cache needs memcached to be available . |
2,419 | def unindex_layers_with_issues ( self , use_cache = False ) : from hypermap . aggregator . models import Issue , Layer , Service from django . contrib . contenttypes . models import ContentType layer_type = ContentType . objects . get_for_model ( Layer ) service_type = ContentType . objects . get_for_model ( Service ) ... | Remove the index for layers in search backend which are linked to an issue . |
2,420 | def unindex_layer ( self , layer_id , use_cache = False ) : from hypermap . aggregator . models import Layer layer = Layer . objects . get ( id = layer_id ) if use_cache : LOGGER . debug ( 'Caching layer with id %s for being removed from search engine' % layer . id ) deleted_layers = cache . get ( 'deleted_layers' ) if... | Remove the index for a layer in the search backend . If cache is set append it to the list of removed layers if it isn t send the transaction right away . |
2,421 | def index_all_layers ( self ) : from hypermap . aggregator . models import Layer if not settings . REGISTRY_SKIP_CELERY : layers_cache = set ( Layer . objects . filter ( is_valid = True ) . values_list ( 'id' , flat = True ) ) deleted_layers_cache = set ( Layer . objects . filter ( is_valid = False ) . values_list ( 'i... | Index all layers in search engine . |
2,422 | def bbox2wktpolygon ( bbox ) : try : minx = float ( bbox [ 0 ] ) miny = float ( bbox [ 1 ] ) maxx = float ( bbox [ 2 ] ) maxy = float ( bbox [ 3 ] ) except : LOGGER . debug ( "Invalid bbox, setting it to a zero POLYGON" ) minx = 0 miny = 0 maxx = 0 maxy = 0 return 'POLYGON((%.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f, %... | Return OGC WKT Polygon of a simple bbox list |
2,423 | def gen_anytext ( * args ) : bag = [ ] for term in args : if term is not None : if isinstance ( term , list ) : for term2 in term : if term2 is not None : bag . append ( term2 ) else : bag . append ( term ) return ' ' . join ( bag ) | Convenience function to create bag of words for anytext property |
2,424 | def endpointlist_post_save ( instance , * args , ** kwargs ) : with open ( instance . upload . file . name , mode = 'rb' ) as f : lines = f . readlines ( ) for url in lines : if len ( url ) > 255 : LOGGER . debug ( 'Skipping this endpoint, as it is more than 255 characters: %s' % url ) else : if Endpoint . objects . fi... | Used to process the lines of the endpoint list . |
2,425 | def layer_pre_save ( instance , * args , ** kwargs ) : is_valid = True if not instance . service . type == 'Hypermap:WorldMap' : if not instance . service . is_valid : is_valid = False LOGGER . debug ( 'Layer with id %s is marked invalid because its service is invalid' % instance . id ) if instance . bbox_x0 > - 2 and ... | Used to check layer validity . |
2,426 | def layer_post_save ( instance , * args , ** kwargs ) : if instance . is_monitored and instance . service . is_monitored : if not settings . REGISTRY_SKIP_CELERY : check_layer . delay ( instance . id ) else : check_layer ( instance . id ) else : index_layer ( instance . id ) | Used to do a layer full check when saving it . |
2,427 | def update_layers ( self ) : signals . post_save . disconnect ( layer_post_save , sender = Layer ) try : LOGGER . debug ( 'Updating layers for service id %s' % self . id ) if self . type == 'OGC:WMS' : update_layers_wms ( self ) elif self . type == 'OGC:WMTS' : update_layers_wmts ( self ) elif self . type == 'ESRI:ArcG... | Update layers for a service . |
2,428 | def update_validity ( self ) : if self . type == 'Hypermap:WorldMap' : return signals . post_save . disconnect ( service_post_save , sender = Service ) try : is_valid = True if self . srs . filter ( code__in = SUPPORTED_SRS ) . count ( ) == 0 : LOGGER . debug ( 'Service with id %s is marked invalid because in not expos... | Update validity of a service . |
2,429 | def get_url_endpoint ( self ) : endpoint = self . url if self . type not in ( 'Hypermap:WorldMap' , ) : endpoint = 'registry/%s/layer/%s/map/wmts/1.0.0/WMTSCapabilities.xml' % ( self . catalog . slug , self . id ) return endpoint | Returns the Hypermap endpoint for a layer . This endpoint will be the WMTS MapProxy endpoint only for WM we use the original endpoint . |
2,430 | def check_available ( self ) : success = True start_time = datetime . datetime . utcnow ( ) message = '' LOGGER . debug ( 'Checking layer id %s' % self . id ) signals . post_save . disconnect ( layer_post_save , sender = Layer ) try : self . update_thumbnail ( ) except ValueError , err : if str ( err ) . startswith ( "... | Check for availability of a layer and provide run metrics . |
2,431 | def _input_github_repo ( url = None ) : if url is None : url = user_input ( 'Input the URL of the GitHub repository ' 'to use as a `trytravis` repository: ' ) url = url . strip ( ) http_match = _HTTPS_REGEX . match ( url ) ssh_match = _SSH_REGEX . match ( url ) if not http_match and not ssh_match : raise RuntimeError (... | Grabs input from the user and saves it as their trytravis target repo |
2,432 | def _load_github_repo ( ) : if 'TRAVIS' in os . environ : raise RuntimeError ( 'Detected that we are running in Travis. ' 'Stopping to prevent infinite loops.' ) try : with open ( os . path . join ( config_dir , 'repo' ) , 'r' ) as f : return f . read ( ) except ( OSError , IOError ) : raise RuntimeError ( 'Could not f... | Loads the GitHub repository from the users config . |
2,433 | def _submit_changes_to_github_repo ( path , url ) : try : repo = git . Repo ( path ) except Exception : raise RuntimeError ( 'Couldn\'t locate a repository at `%s`.' % path ) commited = False try : try : repo . delete_remote ( 'trytravis' ) except Exception : pass print ( 'Adding a temporary remote to ' '`%s`...' % url... | Temporarily commits local changes and submits them to the GitHub repository that the user has specified . Then reverts the changes to the git repository if a commit was necessary . |
2,434 | def _wait_for_travis_build ( url , commit , committed_at ) : print ( 'Waiting for a Travis build to appear ' 'for `%s` after `%s`...' % ( commit , committed_at ) ) import requests slug = _slug_from_url ( url ) start_time = time . time ( ) build_id = None while time . time ( ) - start_time < 60 : with requests . get ( '... | Waits for a Travis build to appear with the given commit SHA |
2,435 | def _watch_travis_build ( build_id ) : import requests try : build_size = None running = True while running : with requests . get ( 'https://api.travis-ci.org/builds/%d' % build_id , headers = _travis_headers ( ) ) as r : json = r . json ( ) if build_size is not None : if build_size > 1 : sys . stdout . write ( '\r\x1b... | Watches and progressively outputs information about a given Travis build |
2,436 | def _travis_job_state ( state ) : if state in [ None , 'queued' , 'created' , 'received' ] : return colorama . Fore . YELLOW , '*' , True elif state in [ 'started' , 'running' ] : return colorama . Fore . LIGHTYELLOW_EX , '*' , True elif state == 'passed' : return colorama . Fore . LIGHTGREEN_EX , 'P' , False elif stat... | Converts a Travis state into a state character color and whether it s still running or a stopped state . |
2,437 | def _slug_from_url ( url ) : http_match = _HTTPS_REGEX . match ( url ) ssh_match = _SSH_REGEX . match ( url ) if not http_match and not ssh_match : raise RuntimeError ( 'Could not parse the URL (`%s`) ' 'for your repository.' % url ) if http_match : return '/' . join ( http_match . groups ( ) ) else : return '/' . join... | Parses a project slug out of either an HTTPS or SSH URL . |
2,438 | def main ( argv = None ) : try : colorama . init ( ) if argv is None : argv = sys . argv [ 1 : ] _main ( argv ) except RuntimeError as e : print ( colorama . Fore . RED + 'ERROR: ' + str ( e ) + colorama . Style . RESET_ALL ) sys . exit ( 1 ) else : sys . exit ( 0 ) | Main entry point when the user runs the trytravis command . |
2,439 | def csw_global_dispatch_by_catalog ( request , catalog_slug ) : catalog = get_object_or_404 ( Catalog , slug = catalog_slug ) if catalog : url = settings . SITE_URL . rstrip ( '/' ) + request . path . rstrip ( '/' ) return csw_global_dispatch ( request , url = url , catalog_id = catalog . id ) | pycsw wrapper for catalogs |
2,440 | def good_coords ( coords ) : if ( len ( coords ) != 4 ) : return False for coord in coords [ 0 : 3 ] : try : num = float ( coord ) if ( math . isnan ( num ) ) : return False if ( math . isinf ( num ) ) : return False except ValueError : return False return True | passed a string array |
2,441 | def clear_es ( ) : ESHypermap . es . indices . delete ( ESHypermap . index_name , ignore = [ 400 , 404 ] ) LOGGER . debug ( 'Elasticsearch: Index cleared' ) | Clear all indexes in the es core |
2,442 | def create_indices ( catalog_slug ) : mapping = { "mappings" : { "layer" : { "properties" : { "layer_geoshape" : { "type" : "geo_shape" , "tree" : "quadtree" , "precision" : REGISTRY_MAPPING_PRECISION } } } } } ESHypermap . es . indices . create ( catalog_slug , ignore = [ 400 , 404 ] , body = mapping ) | Create ES core indices |
2,443 | def kill_process ( procname , scriptname ) : import signal import subprocess p = subprocess . Popen ( [ 'ps' , 'aux' ] , stdout = subprocess . PIPE ) out , err = p . communicate ( ) for line in out . decode ( ) . splitlines ( ) : if procname in line and scriptname in line : pid = int ( line . split ( ) [ 1 ] ) info ( '... | kill WSGI processes that may be running in development |
2,444 | def populate_initial_services ( ) : services_list = ( ( 'Harvard WorldMap' , 'Harvard WorldMap open source web geospatial platform' , 'Hypermap:WorldMap' , 'http://worldmap.harvard.edu' ) , ( 'NYPL MapWarper' , 'The New York Public Library (NYPL) MapWarper web site' , 'Hypermap:WARPER' , 'http://maps.nypl.org/warper/ma... | Populate a fresh installed Hypermap instances with basic services . |
2,445 | def main ( ) : tcp_adapter = TcpAdapter ( "192.168.1.3" , name = "HASS" , activate_source = False ) hdmi_network = HDMINetwork ( tcp_adapter ) hdmi_network . start ( ) while True : for d in hdmi_network . devices : _LOGGER . info ( "Device: %s" , d ) time . sleep ( 7 ) | For testing purpose |
2,446 | def compare_hexdigests ( digest1 , digest2 ) : digest1 = tuple ( [ int ( digest1 [ i : i + 2 ] , 16 ) for i in range ( 0 , 63 , 2 ) ] ) digest2 = tuple ( [ int ( digest2 [ i : i + 2 ] , 16 ) for i in range ( 0 , 63 , 2 ) ] ) bits = 0 for i in range ( 32 ) : bits += POPC [ 255 & digest1 [ i ] ^ digest2 [ i ] ] return 12... | Compute difference in bits between digest1 and digest2 returns - 127 to 128 ; 128 is the same - 127 is different |
2,447 | def tran3 ( self , a , b , c , n ) : return ( ( ( TRAN [ ( a + n ) & 255 ] ^ TRAN [ b ] * ( n + n + 1 ) ) + TRAN [ ( c ) ^ TRAN [ n ] ] ) & 255 ) | Get accumulator for a transition n between chars a b c . |
2,448 | def update ( self , data ) : for character in data : if PY3 : ch = character else : ch = ord ( character ) self . count += 1 if self . lastch [ 1 ] > - 1 : self . acc [ self . tran3 ( ch , self . lastch [ 0 ] , self . lastch [ 1 ] , 0 ) ] += 1 if self . lastch [ 2 ] > - 1 : self . acc [ self . tran3 ( ch , self . lastc... | Add data to running digest increasing the accumulators for 0 - 8 triplets formed by this char and the previous 0 - 3 chars . |
2,449 | def digest ( self ) : total = 0 if self . count == 3 : total = 1 elif self . count == 4 : total = 4 elif self . count > 4 : total = 8 * self . count - 28 threshold = total / 256 code = [ 0 ] * 32 for i in range ( 256 ) : if self . acc [ i ] > threshold : code [ i >> 3 ] += 1 << ( i & 7 ) return code [ : : - 1 ] | Get digest of data seen thus far as a list of bytes . |
2,450 | def from_file ( self , filename ) : f = open ( filename , 'rb' ) while True : data = f . read ( 10480 ) if not data : break self . update ( data ) f . close ( ) | Update running digest with content of named file . |
2,451 | def compare ( self , otherdigest , ishex = False ) : bits = 0 myd = self . digest ( ) if ishex : otherdigest = tuple ( [ int ( otherdigest [ i : i + 2 ] , 16 ) for i in range ( 0 , 63 , 2 ) ] ) for i in range ( 32 ) : bits += POPC [ 255 & myd [ i ] ^ otherdigest [ i ] ] return 128 - bits | Compute difference in bits between own digest and another . returns - 127 to 128 ; 128 is the same - 127 is different |
2,452 | def jdout ( api_response ) : try : output = json . dumps ( api_response . cgx_content , indent = 4 ) except ( TypeError , ValueError , AttributeError ) : try : output = json . dumps ( api_response , indent = 4 ) except ( TypeError , ValueError , AttributeError ) : output = api_response return output | JD Output function . Does quick pretty printing of a CloudGenix Response body . This function returns a string instead of directly printing content . |
2,453 | def jdout_detailed ( api_response , sensitive = False ) : try : output = "REQUEST: {0} {1}\n" . format ( api_response . request . method , api_response . request . path_url ) output += "REQUEST HEADERS:\n" for key , value in api_response . request . headers . items ( ) : if key . lower ( ) in [ 'cookie' ] and not sensi... | JD Output Detailed function . Meant for quick DETAILED pretty - printing of CloudGenix Request and Response objects for troubleshooting . This function returns a string instead of directly printing content . |
2,454 | def notify_for_new_version ( self ) : try : recommend_update = False update_check_resp = requests . get ( self . update_info_url , timeout = 3 ) web_version = update_check_resp . json ( ) [ "info" ] [ "version" ] api_logger . debug ( "RETRIEVED_VERSION: %s" , web_version ) available_version = SDK_BUILD_REGEX . search (... | Check for a new version of the SDK on API constructor instantiation . If new version found print Notification to STDERR . |
2,455 | def ssl_verify ( self , ssl_verify ) : self . verify = ssl_verify if isinstance ( self . verify , bool ) : if self . verify : if os . name == 'nt' : self . _ca_verify_file_handle = temp_ca_bundle ( delete = False ) self . _ca_verify_file_handle . write ( BYTE_CA_BUNDLE ) self . _ca_verify_file_handle . flush ( ) self .... | Modify ssl verification settings |
2,456 | def modify_rest_retry ( self , total = 8 , connect = None , read = None , redirect = None , status = None , method_whitelist = urllib3 . util . retry . Retry . DEFAULT_METHOD_WHITELIST , status_forcelist = None , backoff_factor = 0.705883 , raise_on_redirect = True , raise_on_status = True , respect_retry_after_header ... | Modify retry parameters for the SDK s rest call object . |
2,457 | def set_debug ( self , debuglevel ) : if isinstance ( debuglevel , int ) : self . _debuglevel = debuglevel if self . _debuglevel == 1 : logging . basicConfig ( level = logging . INFO , format = "%(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s" ) api_logger . setLevel ( logging . INFO ) elif self . _debugle... | Change the debug level of the API |
2,458 | def _subclass_container ( self ) : _parent_class = self class GetWrapper ( Get ) : def __init__ ( self ) : self . _parent_class = _parent_class class PostWrapper ( Post ) : def __init__ ( self ) : self . _parent_class = _parent_class class PutWrapper ( Put ) : def __init__ ( self ) : self . _parent_class = _parent_clas... | Call subclasses via function to allow passing parent namespace to subclasses . |
2,459 | def _cleanup_ca_temp_file ( self ) : if os . name == 'nt' : if isinstance ( self . ca_verify_filename , ( binary_type , text_type ) ) : os . unlink ( self . ca_verify_filename ) else : self . _ca_verify_file_handle . close ( ) | Function to clean up ca temp file for requests . |
2,460 | def parse_auth_token ( self , auth_token ) : auth_token_cleaned = auth_token . split ( '-' , 1 ) [ 1 ] auth_token_decoded = self . url_decode ( auth_token_cleaned ) auth_dict = { } for key_value in auth_token_decoded . split ( "&" ) : key_value_list = key_value . split ( "=" ) if len ( key_value_list ) == 2 and type ( ... | Break auth_token up into it s constituent values . |
2,461 | def parse_region ( self , login_response ) : auth_token = login_response . cgx_content [ 'x_auth_token' ] auth_token_dict = self . parse_auth_token ( auth_token ) auth_region = auth_token_dict . get ( 'region' ) return auth_region | Return region from a successful login response . |
2,462 | def _catch_nonjson_streamresponse ( rawresponse ) : try : response = json . loads ( rawresponse ) except ( ValueError , TypeError ) : if rawresponse : response = { '_error' : [ { 'message' : 'Response not in JSON format.' , 'data' : rawresponse , } ] } else : response = { } return response | Validate a streamed response is JSON . Return a Python dictionary either way . |
2,463 | def url_decode ( url ) : return re . compile ( '%([0-9a-fA-F]{2})' , re . M ) . sub ( lambda m : chr ( int ( m . group ( 1 ) , 16 ) ) , url ) | URL Decode function using REGEX |
2,464 | def jcrop_css ( css_url = None ) : if css_url is None : if current_app . config [ 'AVATARS_SERVE_LOCAL' ] : css_url = url_for ( 'avatars.static' , filename = 'jcrop/css/jquery.Jcrop.min.css' ) else : css_url = 'https://cdn.jsdelivr.net/npm/jcrop-0.9.12@0.9.12/css/jquery.Jcrop.min.css' return Markup ( '<link rel="styles... | Load jcrop css file . |
2,465 | def crop_box ( endpoint = None , filename = None ) : crop_size = current_app . config [ 'AVATARS_CROP_BASE_WIDTH' ] if endpoint is None or filename is None : url = url_for ( 'avatars.static' , filename = 'default/default_l.jpg' ) else : url = url_for ( endpoint , filename = filename ) return Markup ( '<img src="%s" id=... | Create a crop box . |
2,466 | def resize_avatar ( self , img , base_width ) : w_percent = ( base_width / float ( img . size [ 0 ] ) ) h_size = int ( ( float ( img . size [ 1 ] ) * float ( w_percent ) ) ) img = img . resize ( ( base_width , h_size ) , PIL . Image . ANTIALIAS ) return img | Resize an avatar . |
2,467 | def save_avatar ( self , image ) : path = current_app . config [ 'AVATARS_SAVE_PATH' ] filename = uuid4 ( ) . hex + '_raw.png' image . save ( os . path . join ( path , filename ) ) return filename | Save an avatar as raw image return new filename . |
2,468 | def get_image ( self , string , width , height , pad = 0 ) : hex_digest_byte_list = self . _string_to_byte_list ( string ) matrix = self . _create_matrix ( hex_digest_byte_list ) return self . _create_image ( matrix , width , height , pad ) | Byte representation of a PNG image |
2,469 | def _get_pastel_colour ( self , lighten = 127 ) : def r ( ) : return random . randint ( 0 , 128 ) + lighten return r ( ) , r ( ) , r ( ) | Create a pastel colour hex colour string |
2,470 | def _luminance ( self , rgb ) : a = [ ] for v in rgb : v = v / float ( 255 ) if v < 0.03928 : result = v / 12.92 else : result = math . pow ( ( ( v + 0.055 ) / 1.055 ) , 2.4 ) a . append ( result ) return a [ 0 ] * 0.2126 + a [ 1 ] * 0.7152 + a [ 2 ] * 0.0722 | Determine the liminanace of an RGB colour |
2,471 | def _string_to_byte_list ( self , data ) : bytes_length = 16 m = self . digest ( ) m . update ( str . encode ( data ) ) hex_digest = m . hexdigest ( ) return list ( int ( hex_digest [ num * 2 : num * 2 + 2 ] , bytes_length ) for num in range ( bytes_length ) ) | Creates a hex digest of the input string given to create the image if it s not already hexadecimal |
2,472 | def _create_image ( self , matrix , width , height , pad ) : image = Image . new ( "RGB" , ( width + ( pad * 2 ) , height + ( pad * 2 ) ) , self . bg_colour ) image_draw = ImageDraw . Draw ( image ) block_width = float ( width ) / self . cols block_height = float ( height ) / self . rows for row , cols in enumerate ( m... | Generates a PNG byte list |
2,473 | def city ( self , value = None ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `city`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for field `city`' ) self . _city = value | Corresponds to IDD Field city |
2,474 | def state_province_region ( self , value = None ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `state_province_region`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for field `s... | Corresponds to IDD Field state_province_region |
2,475 | def country ( self , value = None ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `country`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for field `country`' ) self . _country =... | Corresponds to IDD Field country |
2,476 | def source ( self , value = None ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `source`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for field `source`' ) self . _source = val... | Corresponds to IDD Field source |
2,477 | def wmo ( self , value = None ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `wmo`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for field `wmo`' ) self . _wmo = value | Corresponds to IDD Field wmo usually a 6 digit field . Used as alpha in EnergyPlus . |
2,478 | def latitude ( self , value = 0.0 ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `latitude`' . format ( value ) ) if value < - 90.0 : raise ValueError ( 'value need to be greater or equal -90.0 ' 'for field `latitude`' ) if... | Corresponds to IDD Field latitude |
2,479 | def longitude ( self , value = 0.0 ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `longitude`' . format ( value ) ) if value < - 180.0 : raise ValueError ( 'value need to be greater or equal -180.0 ' 'for field `longitude`'... | Corresponds to IDD Field longitude |
2,480 | def timezone ( self , value = 0.0 ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `timezone`' . format ( value ) ) if value < - 12.0 : raise ValueError ( 'value need to be greater or equal -12.0 ' 'for field `timezone`' ) if... | Corresponds to IDD Field timezone Time relative to GMT . |
2,481 | def elevation ( self , value = 0.0 ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `elevation`' . format ( value ) ) if value < - 1000.0 : raise ValueError ( 'value need to be greater or equal -1000.0 ' 'for field `elevation... | Corresponds to IDD Field elevation |
2,482 | def title_of_design_condition ( self , value = None ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `title_of_design_condition`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for ... | Corresponds to IDD Field title_of_design_condition |
2,483 | def unkown_field ( self , value = None ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `unkown_field`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for field `unkown_field`' ) se... | Corresponds to IDD Field unkown_field Empty field in data . |
2,484 | def design_stat_heating ( self , value = "Heating" ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `design_stat_heating`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for field `... | Corresponds to IDD Field design_stat_heating |
2,485 | def coldestmonth ( self , value = None ) : if value is not None : try : value = int ( value ) except ValueError : raise ValueError ( 'value {} need to be of type int ' 'for field `coldestmonth`' . format ( value ) ) if value < 1 : raise ValueError ( 'value need to be greater or equal 1 ' 'for field `coldestmonth`' ) if... | Corresponds to IDD Field coldestmonth |
2,486 | def ws004c ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `ws004c`' . format ( value ) ) self . _ws004c = value | Corresponds to IDD Field ws004c |
2,487 | def db_ws004c ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `db_ws004c`' . format ( value ) ) self . _db_ws004c = value | Corresponds to IDD Field db_ws004c Mean coincident dry - bulb temperature to wind speed corresponding to 0 . 40% cumulative frequency for coldest month |
2,488 | def ws010c ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `ws010c`' . format ( value ) ) self . _ws010c = value | Corresponds to IDD Field ws010c Wind speed corresponding to 1 . 0% cumulative frequency of occurrence for coldest month ; |
2,489 | def db_ws010c ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `db_ws010c`' . format ( value ) ) self . _db_ws010c = value | Corresponds to IDD Field db_ws010c Mean coincident dry - bulb temperature to wind speed corresponding to 1 . 0% cumulative frequency for coldest month |
2,490 | def ws_db996 ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `ws_db996`' . format ( value ) ) self . _ws_db996 = value | Corresponds to IDD Field ws_db996 Mean wind speed coincident with 99 . 6% dry - bulb temperature |
2,491 | def design_stat_cooling ( self , value = "Cooling" ) : if value is not None : try : value = str ( value ) except ValueError : raise ValueError ( 'value {} need to be of type str ' 'for field `design_stat_cooling`' . format ( value ) ) if ',' in value : raise ValueError ( 'value should not contain a comma ' 'for field `... | Corresponds to IDD Field design_stat_cooling |
2,492 | def dbr ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `dbr`' . format ( value ) ) self . _dbr = value | Corresponds to IDD Field dbr Daily temperature range for hottest month . |
2,493 | def wb004 ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `wb004`' . format ( value ) ) self . _wb004 = value | Corresponds to IDD Field wb004 Wet - bulb temperature corresponding to 0 . 4% annual cumulative frequency of occurrence |
2,494 | def db_wb004 ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `db_wb004`' . format ( value ) ) self . _db_wb004 = value | Corresponds to IDD Field db_wb004 mean coincident dry - bulb temperature to Wet - bulb temperature corresponding to 0 . 4% annual cumulative frequency of occurrence |
2,495 | def wb010 ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `wb010`' . format ( value ) ) self . _wb010 = value | Corresponds to IDD Field wb010 Wet - bulb temperature corresponding to 1 . 0% annual cumulative frequency of occurrence |
2,496 | def db_wb010 ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `db_wb010`' . format ( value ) ) self . _db_wb010 = value | Corresponds to IDD Field db_wb010 mean coincident dry - bulb temperature to Wet - bulb temperature corresponding to 1 . 0% annual cumulative frequency of occurrence |
2,497 | def wb020 ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `wb020`' . format ( value ) ) self . _wb020 = value | Corresponds to IDD Field wb020 Wet - bulb temperature corresponding to 02 . 0% annual cumulative frequency of occurrence |
2,498 | def db_wb020 ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `db_wb020`' . format ( value ) ) self . _db_wb020 = value | Corresponds to IDD Field db_wb020 mean coincident dry - bulb temperature to Wet - bulb temperature corresponding to 2 . 0% annual cumulative frequency of occurrence |
2,499 | def ws_db004 ( self , value = None ) : if value is not None : try : value = float ( value ) except ValueError : raise ValueError ( 'value {} need to be of type float ' 'for field `ws_db004`' . format ( value ) ) self . _ws_db004 = value | Corresponds to IDD Field ws_db004 Mean wind speed coincident with 0 . 4% dry - bulb temperature |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.