idx int64 0 252k | question stringlengths 48 5.28k | target stringlengths 5 1.23k |
|---|---|---|
246,300 | def generate_model_file ( filename , project , model , fields ) : for field in fields : field . type = field . __class__ . __name__ content = open ( os . path . join ( os . path . dirname ( __file__ ) , 'templates/model_page.html' ) , 'r' ) . read ( ) engine = StatikTemplateEngine ( project ) template = engine . create_template ( content ) context = { 'model' : model , 'fields' : fields } context . update ( dict ( project . config . context_static ) ) string = template . render ( context ) with open ( filename , 'w' ) as file : file . write ( string ) | Creates a webpage for a given instance of a model . |
246,301 | def build_dynamic ( self , db , extra = None , safe_mode = False ) : result = dict ( ) for var , query in iteritems ( self . dynamic ) : result [ var ] = db . query ( query , safe_mode = safe_mode , additional_locals = extra ) return result | Builds the dynamic context based on our current dynamic context entity and the given database . |
246,302 | def build_for_each ( self , db , safe_mode = False , extra = None ) : result = dict ( ) for var , query in iteritems ( self . for_each ) : result [ var ] = db . query ( query , additional_locals = extra , safe_mode = safe_mode ) return result | Builds the for - each context . |
246,303 | def build ( self , db = None , safe_mode = False , for_each_inst = None , extra = None ) : result = copy ( self . initial ) result . update ( self . static ) if self . dynamic : result . update ( self . build_dynamic ( db , extra = extra , safe_mode = safe_mode ) ) if self . for_each and for_each_inst : result . update ( self . build_for_each ( db , safe_mode = safe_mode , extra = extra ) ) if isinstance ( extra , dict ) : result . update ( extra ) return result | Builds a dictionary that can be used as context for template rendering . |
246,304 | def template_exception_handler ( fn , error_context , filename = None ) : error_message = None if filename : error_context . update ( filename = filename ) try : return fn ( ) except jinja2 . TemplateSyntaxError as exc : error_context . update ( filename = exc . filename , line_no = exc . lineno ) error_message = exc . message except jinja2 . TemplateError as exc : error_message = exc . message except Exception as exc : error_message = "%s" % exc raise TemplateError ( message = error_message , context = error_context ) | Calls the given function attempting to catch any template - related errors and converts the error to a Statik TemplateError instance . Returns the result returned by the function itself . |
246,305 | def create_template ( self , s , provider_name = None ) : if provider_name is None : provider_name = self . supported_providers [ 0 ] return template_exception_handler ( lambda : self . get_provider ( provider_name ) . create_template ( s ) , self . error_context ) | Creates a template from the given string based on the specified provider or the provider with highest precedence . |
246,306 | def construct_field ( model_name , field_name , field_type , all_models , ** kwargs ) : field_type_parts = field_type . split ( '->' ) _field_type = field_type_parts [ 0 ] . strip ( ) . split ( '[]' ) [ 0 ] . strip ( ) back_populates = field_type_parts [ 1 ] . strip ( ) if len ( field_type_parts ) > 1 else None error_context = kwargs . pop ( 'error_context' , StatikErrorContext ( ) ) _kwargs = copy ( kwargs ) _kwargs [ 'back_populates' ] = back_populates if _field_type not in FIELD_TYPES and _field_type not in all_models : raise InvalidFieldTypeError ( model_name , field_name , context = error_context ) if _field_type in FIELD_TYPES : return FIELD_TYPES [ _field_type ] ( field_name , ** _kwargs ) if field_type_parts [ 0 ] . strip ( ) . endswith ( '[]' ) : return StatikManyToManyField ( field_name , _field_type , ** _kwargs ) return StatikForeignKeyField ( field_name , _field_type , ** _kwargs ) | Helper function to build a field from the given field name and type . |
246,307 | def paginate ( db_query , items_per_page , offset = 0 , start_page = 1 ) : return Paginator ( db_query , items_per_page , offset = offset , start_page = start_page ) | Instantiates a Paginator instance for database queries . |
246,308 | def render_reverse ( self , inst = None , context = None ) : rendered = self . render ( inst = inst , context = context ) parts = rendered . split ( '/' ) if parts [ - 1 ] in [ 'index.html' , 'index.htm' ] : return ( '/' . join ( parts [ : - 1 ] ) ) + '/' return rendered | Renders the reverse URL for this path . |
246,309 | def create ( cls , path , template_engine = None , output_filename = None , output_ext = None , view_name = None ) : if isinstance ( path , dict ) : return StatikViewComplexPath ( path , template_engine , output_filename = output_filename , output_ext = output_ext , view_name = view_name ) elif isinstance ( path , basestring ) : return StatikViewSimplePath ( path , output_filename = output_filename , output_ext = output_ext , view_name = view_name ) else : raise ValueError ( "Unrecognised structure for \"path\" configuration in view: %s" % view_name ) | Create the relevant subclass of StatikView based on the given path variable and parameters . |
246,310 | def render ( self , context , db = None , safe_mode = False , extra_context = None ) : if not db : raise MissingParameterError ( "db" , context = self . error_context ) rendered_views = dict ( ) path_instances = db . query ( self . path . query , safe_mode = safe_mode ) extra_ctx = copy ( extra_context ) if extra_context else dict ( ) for inst in path_instances : extra_ctx . update ( { self . path . variable : inst } ) ctx = context . build ( db = db , safe_mode = safe_mode , for_each_inst = inst , extra = extra_ctx ) inst_path = self . path . render ( inst = inst , context = ctx ) rendered_view = self . template . render ( ctx ) rendered_views = deep_merge_dict ( rendered_views , dict_from_path ( inst_path , final_value = rendered_view ) ) return rendered_views | Renders the given context using the specified database returning a dictionary containing path segments and rendered view contents . |
246,311 | def render ( self , db , safe_mode = False , extra_context = None ) : return self . renderer . render ( self . context , db , safe_mode = safe_mode , extra_context = extra_context ) | Renders this view given the specified StatikDatabase instance . |
246,312 | def _validate_number_of_layers ( self , number_of_layers ) : if number_of_layers <= 0 : raise SquashError ( "Number of layers to squash cannot be less or equal 0, provided: %s" % number_of_layers ) if number_of_layers > len ( self . old_image_layers ) : raise SquashError ( "Cannot squash %s layers, the %s image contains only %s layers" % ( number_of_layers , self . image , len ( self . old_image_layers ) ) ) | Makes sure that the specified number of layers to squash is a valid number |
246,313 | def _files_in_layers ( self , layers , directory ) : files = { } for layer in layers : self . log . debug ( "Generating list of files in layer '%s'..." % layer ) tar_file = os . path . join ( directory , layer , "layer.tar" ) with tarfile . open ( tar_file , 'r' , format = tarfile . PAX_FORMAT ) as tar : files [ layer ] = [ self . _normalize_path ( x ) for x in tar . getnames ( ) ] self . log . debug ( "Done, found %s files" % len ( files [ layer ] ) ) return files | Prepare a list of files in all layers |
246,314 | def _prepare_tmp_directory ( self , tmp_dir ) : if tmp_dir : if os . path . exists ( tmp_dir ) : raise SquashError ( "The '%s' directory already exists, please remove it before you proceed" % tmp_dir ) os . makedirs ( tmp_dir ) else : tmp_dir = tempfile . mkdtemp ( prefix = "docker-squash-" ) self . log . debug ( "Using %s as the temporary directory" % tmp_dir ) return tmp_dir | Creates temporary directory that is used to work on layers |
246,315 | def _layers_to_squash ( self , layers , from_layer ) : to_squash = [ ] to_leave = [ ] should_squash = True for l in reversed ( layers ) : if l == from_layer : should_squash = False if should_squash : to_squash . append ( l ) else : to_leave . append ( l ) to_squash . reverse ( ) to_leave . reverse ( ) return to_squash , to_leave | Prepares a list of layer IDs that should be squashed |
246,316 | def _save_image ( self , image_id , directory ) : for x in [ 0 , 1 , 2 ] : self . log . info ( "Saving image %s to %s directory..." % ( image_id , directory ) ) self . log . debug ( "Try #%s..." % ( x + 1 ) ) try : image = self . docker . get_image ( image_id ) if docker . version_info [ 0 ] < 3 : self . log . debug ( "Extracting image using HTTPResponse object directly" ) self . _extract_tar ( image , directory ) else : self . log . debug ( "Extracting image using iterator over raw data" ) fd_r , fd_w = os . pipe ( ) r = os . fdopen ( fd_r , 'rb' ) w = os . fdopen ( fd_w , 'wb' ) extracter = threading . Thread ( target = self . _extract_tar , args = ( r , directory ) ) extracter . start ( ) for chunk in image : w . write ( chunk ) w . flush ( ) w . close ( ) extracter . join ( ) r . close ( ) self . log . info ( "Image saved!" ) return True except Exception as e : self . log . exception ( e ) self . log . warn ( "An error occured while saving the %s image, retrying..." % image_id ) raise SquashError ( "Couldn't save %s image!" % image_id ) | Saves the image as a tar archive under specified name |
246,317 | def _unpack ( self , tar_file , directory ) : self . log . info ( "Unpacking %s tar file to %s directory" % ( tar_file , directory ) ) with tarfile . open ( tar_file , 'r' ) as tar : tar . extractall ( path = directory ) self . log . info ( "Archive unpacked!" ) | Unpacks tar archive to selected directory |
246,318 | def _parse_image_name ( self , image ) : if ':' in image and '/' not in image . split ( ':' ) [ - 1 ] : image_tag = image . split ( ':' ) [ - 1 ] image_name = image [ : - ( len ( image_tag ) + 1 ) ] else : image_tag = "latest" image_name = image return ( image_name , image_tag ) | Parses the provided image name and splits it in the name and tag part if possible . If no tag is provided latest is used . |
246,319 | def _dump_json ( self , data , new_line = False ) : json_data = json . dumps ( data , separators = ( ',' , ':' ) ) if new_line : json_data = "%s\n" % json_data sha = hashlib . sha256 ( json_data . encode ( 'utf-8' ) ) . hexdigest ( ) return json_data , sha | Helper function to marshal object into JSON string . Additionally a sha256sum of the created JSON string is generated . |
246,320 | def _move_layers ( self , layers , src , dest ) : for layer in layers : layer_id = layer . replace ( 'sha256:' , '' ) self . log . debug ( "Moving unmodified layer '%s'..." % layer_id ) shutil . move ( os . path . join ( src , layer_id ) , dest ) | This moves all the layers that should be copied as - is . In other words - all layers that are not meant to be squashed will be moved from the old image to the new image untouched . |
246,321 | def _marker_files ( self , tar , members ) : marker_files = { } self . log . debug ( "Searching for marker files in '%s' archive..." % tar . name ) for member in members : if '.wh.' in member . name : self . log . debug ( "Found '%s' marker file" % member . name ) marker_files [ member ] = tar . extractfile ( member ) self . log . debug ( "Done, found %s files" % len ( marker_files ) ) return marker_files | Searches for marker files in the specified archive . |
246,322 | def _add_markers ( self , markers , tar , files_in_layers , added_symlinks ) : if markers : self . log . debug ( "Marker files to add: %s" % [ o . name for o in markers . keys ( ) ] ) else : return tar_files = [ self . _normalize_path ( x ) for x in tar . getnames ( ) ] for marker , marker_file in six . iteritems ( markers ) : actual_file = marker . name . replace ( '.wh.' , '' ) normalized_file = self . _normalize_path ( actual_file ) should_be_added_back = False if self . _file_should_be_skipped ( normalized_file , added_symlinks ) : self . log . debug ( "Skipping '%s' marker file, this file is on a symlink path" % normalized_file ) continue if normalized_file in tar_files : self . log . debug ( "Skipping '%s' marker file, this file was added earlier for some reason..." % normalized_file ) continue if files_in_layers : for files in files_in_layers . values ( ) : if normalized_file in files : should_be_added_back = True break else : should_be_added_back = True if should_be_added_back : self . log . debug ( "Adding '%s' marker file back..." % marker . name ) tar . addfile ( tarfile . TarInfo ( name = marker . name ) , marker_file ) tar_files . append ( normalized_file ) else : self . log . debug ( "Skipping '%s' marker file..." % marker . name ) | This method is responsible for adding back all markers that were not added to the squashed layer AND files they refer to can be found in layers we do not squash . |
246,323 | def _proc_pax ( self , filetar ) : buf = filetar . fileobj . read ( self . _block ( self . size ) ) if self . type == tarfile . XGLTYPE : pax_headers = filetar . pax_headers else : pax_headers = filetar . pax_headers . copy ( ) regex = re . compile ( r"(\d+) ([^=]+)=" , re . U ) pos = 0 while True : match = regex . match ( buf , pos ) if not match : break length , keyword = match . groups ( ) length = int ( length ) value = buf [ match . end ( 2 ) + 1 : match . start ( 1 ) + length - 1 ] try : keyword = keyword . decode ( "utf8" ) except Exception : pass try : value = value . decode ( "utf8" ) except Exception : pass pax_headers [ keyword ] = value pos += length try : next = self . fromtarfile ( filetar ) except tarfile . HeaderError : raise tarfile . SubsequentHeaderError ( "missing or bad subsequent header" ) if self . type in ( tarfile . XHDTYPE , tarfile . SOLARIS_XHDTYPE ) : next . _apply_pax_info ( pax_headers , filetar . encoding , filetar . errors ) next . offset = self . offset if "size" in pax_headers : offset = next . offset_data if next . isreg ( ) or next . type not in tarfile . SUPPORTED_TYPES : offset += next . _block ( next . size ) filetar . offset = offset return next | Process an extended or global header as described in POSIX . 1 - 2001 . |
246,324 | def _create_pax_generic_header ( cls , pax_headers , type = tarfile . XHDTYPE ) : records = [ ] for keyword , value in pax_headers . iteritems ( ) : try : keyword = keyword . encode ( "utf8" ) except Exception : pass try : value = value . encode ( "utf8" ) except Exception : pass l = len ( keyword ) + len ( value ) + 3 n = p = 0 while True : n = l + len ( str ( p ) ) if n == p : break p = n records . append ( "%d %s=%s\n" % ( p , keyword , value ) ) records = "" . join ( records ) info = { } info [ "name" ] = "././@PaxHeader" info [ "type" ] = type info [ "size" ] = len ( records ) info [ "magic" ] = tarfile . POSIX_MAGIC return cls . _create_header ( info , tarfile . USTAR_FORMAT ) + cls . _create_payload ( records ) | Return a POSIX . 1 - 2001 extended or global header sequence that contains a list of keyword value pairs . The values must be unicode objects . |
246,325 | def _read_json_file ( self , json_file ) : self . log . debug ( "Reading '%s' JSON file..." % json_file ) with open ( json_file , 'r' ) as f : return json . load ( f , object_pairs_hook = OrderedDict ) | Helper function to read JSON file as OrderedDict |
246,326 | def _read_layer_paths ( self , old_image_config , old_image_manifest , layers_to_move ) : current_manifest_layer = 0 layer_paths_to_move = [ ] layer_paths_to_squash = [ ] for i , layer in enumerate ( old_image_config [ 'history' ] ) : if not layer . get ( 'empty_layer' , False ) : layer_id = old_image_manifest [ 'Layers' ] [ current_manifest_layer ] . rsplit ( '/' ) [ 0 ] if len ( layers_to_move ) > i : layer_paths_to_move . append ( layer_id ) else : layer_paths_to_squash . append ( layer_id ) current_manifest_layer += 1 return layer_paths_to_squash , layer_paths_to_move | In case of v2 format layer id s are not the same as the id s used in the exported tar archive to name directories for layers . These id s can be found in the configuration files saved with the image - we need to read them . |
246,327 | def _generate_squashed_layer_path_id ( self ) : v1_metadata = OrderedDict ( self . old_image_config ) v1_metadata [ 'created' ] = self . date for key in 'history' , 'rootfs' , 'container' : v1_metadata . pop ( key , None ) operating_system = v1_metadata . pop ( 'os' , None ) v1_metadata [ 'layer_id' ] = "sha256:%s" % self . chain_ids [ - 1 ] if operating_system : v1_metadata [ 'os' ] = operating_system if self . layer_paths_to_move : if self . layer_paths_to_squash : parent = self . layer_paths_to_move [ - 1 ] else : parent = self . layer_paths_to_move [ 0 ] v1_metadata [ 'parent' ] = "sha256:%s" % parent if self . squash_id : v1_metadata [ 'config' ] [ 'Image' ] = self . squash_id else : v1_metadata [ 'config' ] [ 'Image' ] = "" sha = self . _dump_json ( v1_metadata ) [ 1 ] return sha | This function generates the id used to name the directory to store the squashed layer content in the archive . |
246,328 | def write_local_file ( self , outputfile , path ) : self . logger . info ( "Writing file to %s" , path ) outputfile . seek ( 0 ) with open ( path , 'wb' ) as fd : copyfileobj ( outputfile , fd ) | Write file to the desired path . |
246,329 | def _cleanup_old_backups ( self , database = None , servername = None ) : self . storage . clean_old_backups ( encrypted = self . encrypt , compressed = self . compress , content_type = self . content_type , database = database , servername = servername ) | Cleanup old backups keeping the number of backups specified by DBBACKUP_CLEANUP_KEEP and any backups that occur on first of the month . |
246,330 | def _save_new_backup ( self , database ) : self . logger . info ( "Backing Up Database: %s" , database [ 'NAME' ] ) filename = self . connector . generate_filename ( self . servername ) outputfile = self . connector . create_dump ( ) if self . compress : compressed_file , filename = utils . compress_file ( outputfile , filename ) outputfile = compressed_file if self . encrypt : encrypted_file , filename = utils . encrypt_file ( outputfile , filename ) outputfile = encrypted_file filename = self . filename if self . filename else filename self . logger . debug ( "Backup size: %s" , utils . handle_size ( outputfile ) ) outputfile . seek ( 0 ) if self . path is None : self . write_to_storage ( outputfile , filename ) else : self . write_local_file ( outputfile , self . path ) | Save a new backup file . |
246,331 | def _explore_storage ( self ) : path = '' dirs = [ path ] while dirs : path = dirs . pop ( ) subdirs , files = self . media_storage . listdir ( path ) for media_filename in files : yield os . path . join ( path , media_filename ) dirs . extend ( [ os . path . join ( path , subdir ) for subdir in subdirs ] ) | Generator of all files contained in media storage . |
246,332 | def _create_tar ( self , name ) : fileobj = utils . create_spooled_temporary_file ( ) mode = 'w:gz' if self . compress else 'w' tar_file = tarfile . open ( name = name , fileobj = fileobj , mode = mode ) for media_filename in self . _explore_storage ( ) : tarinfo = tarfile . TarInfo ( media_filename ) media_file = self . media_storage . open ( media_filename ) tarinfo . size = len ( media_file ) tar_file . addfile ( tarinfo , media_file ) tar_file . close ( ) return fileobj | Create TAR file . |
246,333 | def backup_mediafiles ( self ) : extension = "tar%s" % ( '.gz' if self . compress else '' ) filename = utils . filename_generate ( extension , servername = self . servername , content_type = self . content_type ) tarball = self . _create_tar ( filename ) if self . encrypt : encrypted_file = utils . encrypt_file ( tarball , filename ) tarball , filename = encrypted_file self . logger . debug ( "Backup size: %s" , utils . handle_size ( tarball ) ) tarball . seek ( 0 ) if self . path is None : self . write_to_storage ( tarball , filename ) else : self . write_local_file ( tarball , self . path ) | Create backup file and write it to storage . |
246,334 | def bytes_to_str ( byteVal , decimals = 1 ) : for unit , byte in BYTES : if ( byteVal >= byte ) : if decimals == 0 : return '%s %s' % ( int ( round ( byteVal / byte , 0 ) ) , unit ) return '%s %s' % ( round ( byteVal / byte , decimals ) , unit ) return '%s B' % byteVal | Convert bytes to a human readable string . |
246,335 | def mail_admins ( subject , message , fail_silently = False , connection = None , html_message = None ) : if not settings . ADMINS : return mail = EmailMultiAlternatives ( '%s%s' % ( settings . EMAIL_SUBJECT_PREFIX , subject ) , message , settings . SERVER_EMAIL , [ a [ 1 ] for a in settings . ADMINS ] , connection = connection ) if html_message : mail . attach_alternative ( html_message , 'text/html' ) mail . send ( fail_silently = fail_silently ) | Sends a message to the admins as defined by the DBBACKUP_ADMINS setting . |
246,336 | def create_spooled_temporary_file ( filepath = None , fileobj = None ) : spooled_file = tempfile . SpooledTemporaryFile ( max_size = settings . TMP_FILE_MAX_SIZE , dir = settings . TMP_DIR ) if filepath : fileobj = open ( filepath , 'r+b' ) if fileobj is not None : fileobj . seek ( 0 ) copyfileobj ( fileobj , spooled_file , settings . TMP_FILE_READ_SIZE ) return spooled_file | Create a spooled temporary file . if filepath or fileobj is defined its content will be copied into temporary file . |
246,337 | def compress_file ( inputfile , filename ) : outputfile = create_spooled_temporary_file ( ) new_filename = filename + '.gz' zipfile = gzip . GzipFile ( filename = filename , fileobj = outputfile , mode = "wb" ) try : inputfile . seek ( 0 ) copyfileobj ( inputfile , zipfile , settings . TMP_FILE_READ_SIZE ) finally : zipfile . close ( ) return outputfile , new_filename | Compress input file using gzip and change its name . |
246,338 | def uncompress_file ( inputfile , filename ) : zipfile = gzip . GzipFile ( fileobj = inputfile , mode = "rb" ) try : outputfile = create_spooled_temporary_file ( fileobj = zipfile ) finally : zipfile . close ( ) new_basename = os . path . basename ( filename ) . replace ( '.gz' , '' ) return outputfile , new_basename | Uncompress this file using gzip and change its name . |
246,339 | def timestamp ( value ) : value = value if timezone . is_naive ( value ) else timezone . localtime ( value ) return value . strftime ( settings . DATE_FORMAT ) | Return the timestamp of a datetime . datetime object . |
246,340 | def datefmt_to_regex ( datefmt ) : new_string = datefmt for pat , reg in PATTERN_MATCHNG : new_string = new_string . replace ( pat , reg ) return re . compile ( r'(%s)' % new_string ) | Convert a strftime format string to a regex . |
246,341 | def filename_to_date ( filename , datefmt = None ) : datefmt = datefmt or settings . DATE_FORMAT datestring = filename_to_datestring ( filename , datefmt ) if datestring is not None : return datetime . strptime ( datestring , datefmt ) | Return a datetime from a file name . |
246,342 | def filename_generate ( extension , database_name = '' , servername = None , content_type = 'db' , wildcard = None ) : if content_type == 'db' : if '/' in database_name : database_name = os . path . basename ( database_name ) if '.' in database_name : database_name = database_name . split ( '.' ) [ 0 ] template = settings . FILENAME_TEMPLATE elif content_type == 'media' : template = settings . MEDIA_FILENAME_TEMPLATE else : template = settings . FILENAME_TEMPLATE params = { 'servername' : servername or settings . HOSTNAME , 'datetime' : wildcard or datetime . now ( ) . strftime ( settings . DATE_FORMAT ) , 'databasename' : database_name , 'extension' : extension , 'content_type' : content_type } if callable ( template ) : filename = template ( ** params ) else : filename = template . format ( ** params ) filename = REG_FILENAME_CLEAN . sub ( '-' , filename ) filename = filename [ 1 : ] if filename . startswith ( '-' ) else filename return filename | Create a new backup filename . |
246,343 | def get_storage ( path = None , options = None ) : path = path or settings . STORAGE options = options or settings . STORAGE_OPTIONS if not path : raise ImproperlyConfigured ( 'You must specify a storage class using ' 'DBBACKUP_STORAGE settings.' ) return Storage ( path , ** options ) | Get the specified storage configured with options . |
246,344 | def list_backups ( self , encrypted = None , compressed = None , content_type = None , database = None , servername = None ) : if content_type not in ( 'db' , 'media' , None ) : msg = "Bad content_type %s, must be 'db', 'media', or None" % ( content_type ) raise TypeError ( msg ) files = [ f for f in self . list_directory ( ) if utils . filename_to_datestring ( f ) ] if encrypted is not None : files = [ f for f in files if ( '.gpg' in f ) == encrypted ] if compressed is not None : files = [ f for f in files if ( '.gz' in f ) == compressed ] if content_type == 'media' : files = [ f for f in files if '.tar' in f ] elif content_type == 'db' : files = [ f for f in files if '.tar' not in f ] if database : files = [ f for f in files if database in f ] if servername : files = [ f for f in files if servername in f ] return files | List stored files except given filter . If filter is None it won t be used . content_type must be db for database backups or media for media backups . |
246,345 | def get_older_backup ( self , encrypted = None , compressed = None , content_type = None , database = None , servername = None ) : files = self . list_backups ( encrypted = encrypted , compressed = compressed , content_type = content_type , database = database , servername = servername ) if not files : raise FileNotFound ( "There's no backup file available." ) return min ( files , key = utils . filename_to_date ) | Return the older backup s file name . |
246,346 | def clean_old_backups ( self , encrypted = None , compressed = None , content_type = None , database = None , servername = None , keep_number = None ) : if keep_number is None : keep_number = settings . CLEANUP_KEEP if content_type == 'db' else settings . CLEANUP_KEEP_MEDIA keep_filter = settings . CLEANUP_KEEP_FILTER files = self . list_backups ( encrypted = encrypted , compressed = compressed , content_type = content_type , database = database , servername = servername ) files = sorted ( files , key = utils . filename_to_date , reverse = True ) files_to_delete = [ fi for i , fi in enumerate ( files ) if i >= keep_number ] for filename in files_to_delete : if keep_filter ( filename ) : continue self . delete_file ( filename ) | Delete olders backups and hold the number defined . |
246,347 | def _get_database ( self , options ) : database_name = options . get ( 'database' ) if not database_name : if len ( settings . DATABASES ) > 1 : errmsg = "Because this project contains more than one database, you" " must specify the --database option." raise CommandError ( errmsg ) database_name = list ( settings . DATABASES . keys ( ) ) [ 0 ] if database_name not in settings . DATABASES : raise CommandError ( "Database %s does not exist." % database_name ) return database_name , settings . DATABASES [ database_name ] | Get the database to restore . |
246,348 | def _restore_backup ( self ) : input_filename , input_file = self . _get_backup_file ( database = self . database_name , servername = self . servername ) self . logger . info ( "Restoring backup for database '%s' and server '%s'" , self . database_name , self . servername ) self . logger . info ( "Restoring: %s" % input_filename ) if self . decrypt : unencrypted_file , input_filename = utils . unencrypt_file ( input_file , input_filename , self . passphrase ) input_file . close ( ) input_file = unencrypted_file if self . uncompress : uncompressed_file , input_filename = utils . uncompress_file ( input_file , input_filename ) input_file . close ( ) input_file = uncompressed_file self . logger . info ( "Restore tempfile created: %s" , utils . handle_size ( input_file ) ) if self . interactive : self . _ask_confirmation ( ) input_file . seek ( 0 ) self . connector = get_connector ( self . database_name ) self . connector . restore_dump ( input_file ) | Restore the specified database . |
246,349 | def get_connector ( database_name = None ) : from django . db import connections , DEFAULT_DB_ALIAS database_name = database_name or DEFAULT_DB_ALIAS connection = connections [ database_name ] engine = connection . settings_dict [ 'ENGINE' ] connector_settings = settings . CONNECTORS . get ( database_name , { } ) connector_path = connector_settings . get ( 'CONNECTOR' , CONNECTOR_MAPPING [ engine ] ) connector_module_path = '.' . join ( connector_path . split ( '.' ) [ : - 1 ] ) module = import_module ( connector_module_path ) connector_name = connector_path . split ( '.' ) [ - 1 ] connector = getattr ( module , connector_name ) return connector ( database_name , ** connector_settings ) | Get a connector from its database key in setttings . |
246,350 | def settings ( self ) : if not hasattr ( self , '_settings' ) : sett = self . connection . settings_dict . copy ( ) sett . update ( settings . CONNECTORS . get ( self . database_name , { } ) ) self . _settings = sett return self . _settings | Mix of database and connector settings . |
246,351 | def run_command ( self , command , stdin = None , env = None ) : cmd = shlex . split ( command ) stdout = SpooledTemporaryFile ( max_size = settings . TMP_FILE_MAX_SIZE , dir = settings . TMP_DIR ) stderr = SpooledTemporaryFile ( max_size = settings . TMP_FILE_MAX_SIZE , dir = settings . TMP_DIR ) full_env = os . environ . copy ( ) if self . use_parent_env else { } full_env . update ( self . env ) full_env . update ( env or { } ) try : if isinstance ( stdin , ( ContentFile , SFTPStorageFile ) ) : process = Popen ( cmd , stdin = PIPE , stdout = stdout , stderr = stderr , env = full_env ) process . communicate ( input = stdin . read ( ) ) else : process = Popen ( cmd , stdin = stdin , stdout = stdout , stderr = stderr , env = full_env ) process . wait ( ) if process . poll ( ) : stderr . seek ( 0 ) raise exceptions . CommandConnectorError ( "Error running: {}\n{}" . format ( command , stderr . read ( ) . decode ( 'utf-8' ) ) ) stdout . seek ( 0 ) stderr . seek ( 0 ) return stdout , stderr except OSError as err : raise exceptions . CommandConnectorError ( "Error running: {}\n{}" . format ( command , str ( err ) ) ) | Launch a shell command line . |
246,352 | def _assign_zones ( self ) : for zone_id in range ( 1 , 5 ) : zone = RainCloudyFaucetZone ( parent = self . _parent , controller = self . _controller , faucet = self , zone_id = zone_id ) if zone not in self . zones : self . zones . append ( zone ) | Assign all RainCloudyFaucetZone managed by faucet . |
246,353 | def _find_zone_by_id ( self , zone_id ) : if not self . zones : return None zone = list ( filter ( lambda zone : zone . id == zone_id , self . zones ) ) return zone [ 0 ] if zone else None | Return zone by id . |
246,354 | def _set_zone_name ( self , zoneid , name ) : zoneid -= 1 data = { '_set_zone_name' : 'Set Name' , 'select_zone' : str ( zoneid ) , 'zone_name' : name , } self . _controller . post ( data ) | Private method to override zone name . |
246,355 | def _set_watering_time ( self , zoneid , value ) : if value not in MANUAL_WATERING_ALLOWED : raise ValueError ( 'Valid options are: {}' . format ( ', ' . join ( map ( str , MANUAL_WATERING_ALLOWED ) ) ) ) if isinstance ( value , int ) and value == 0 : value = 'OFF' elif isinstance ( value , str ) : value = value . upper ( ) if value == 'ON' : value = MAX_WATERING_MINUTES ddata = self . preupdate ( ) attr = 'zone{}_select_manual_mode' . format ( zoneid ) ddata [ attr ] = value self . submit_action ( ddata ) | Private method to set watering_time per zone . |
246,356 | def watering_time ( self ) : index = self . id - 1 auto_watering_time = self . _attributes [ 'rain_delay_mode' ] [ index ] [ 'auto_watering_time' ] manual_watering_time = self . _attributes [ 'rain_delay_mode' ] [ index ] [ 'manual_watering_time' ] if auto_watering_time > manual_watering_time : watering_time = auto_watering_time else : watering_time = manual_watering_time return watering_time | Return watering_time from zone . |
246,357 | def _set_rain_delay ( self , zoneid , value ) : zoneid -= 1 if isinstance ( value , int ) : if value > MAX_RAIN_DELAY_DAYS or value < 0 : return None elif value == 0 : value = 'off' elif value == 1 : value = '1day' elif value >= 2 : value = str ( value ) + 'days' elif isinstance ( value , str ) : if value . lower ( ) != 'off' : return None ddata = self . preupdate ( ) attr = 'zone{}_rain_delay_select' . format ( zoneid ) ddata [ attr ] = value self . submit_action ( ddata ) return True | Generic method to set auto_watering program . |
246,358 | def _set_auto_watering ( self , zoneid , value ) : if not isinstance ( value , bool ) : return None ddata = self . preupdate ( ) attr = 'zone{}_program_toggle' . format ( zoneid ) try : if not value : ddata . pop ( attr ) else : ddata [ attr ] = 'on' except KeyError : pass self . submit_action ( ddata ) return True | Private method to set auto_watering program . |
246,359 | def auto_watering ( self ) : value = "zone{}" . format ( self . id ) return find_program_status ( self . _parent . html [ 'home' ] , value ) | Return if zone is configured to automatic watering . |
246,360 | def _to_dict ( self ) : return { 'auto_watering' : getattr ( self , "auto_watering" ) , 'droplet' : getattr ( self , "droplet" ) , 'is_watering' : getattr ( self , "is_watering" ) , 'name' : getattr ( self , "name" ) , 'next_cycle' : getattr ( self , "next_cycle" ) , 'rain_delay' : getattr ( self , "rain_delay" ) , 'watering_time' : getattr ( self , "watering_time" ) , } | Method to build zone dict . |
246,361 | def preupdate ( self , force_refresh = True ) : ddata = MANUAL_OP_DATA . copy ( ) if force_refresh : self . update ( ) ddata [ 'select_controller' ] = self . _parent . controllers . index ( self . _controller ) ddata [ 'select_faucet' ] = self . _controller . faucets . index ( self . _faucet ) for zone in self . _faucet . zones : attr = 'zone{}_program_toggle' . format ( zone . id ) if zone . auto_watering : ddata [ attr ] = 'on' for zone in self . _faucet . zones : attr = 'zone{}_select_manual_mode' . format ( zone . id ) if zone . watering_time and attr in ddata . keys ( ) : ddata [ attr ] = zone . watering_time for zone in self . _faucet . zones : attr = 'zone{}_rain_delay_select' . format ( zone . id - 1 ) value = zone . rain_delay if value and attr in ddata . keys ( ) : if int ( value ) >= 2 and int ( value ) <= 7 : value = str ( value ) + 'days' else : value = str ( value ) + 'day' ddata [ attr ] = value return ddata | Return a dict with all current options prior submitting request . |
246,362 | def submit_action ( self , ddata ) : self . _controller . post ( ddata , url = HOME_ENDPOINT , referer = HOME_ENDPOINT ) | Post data . |
246,363 | def controller ( self ) : if hasattr ( self , 'controllers' ) : if len ( self . controllers ) > 1 : raise TypeError ( "Only one controller per account." ) return self . controllers [ 0 ] raise AttributeError ( "There is no controller assigned." ) | Show current linked controllers . |
246,364 | def _assign_faucets ( self , faucets ) : if not faucets : raise TypeError ( "Controller does not have a faucet assigned." ) for faucet_id in faucets : self . faucets . append ( RainCloudyFaucet ( self . _parent , self , faucet_id ) ) | Assign RainCloudyFaucet objects to self . faucets . |
246,365 | def post ( self , ddata , url = SETUP_ENDPOINT , referer = SETUP_ENDPOINT ) : headers = HEADERS . copy ( ) if referer is None : headers . pop ( 'Referer' ) else : headers [ 'Referer' ] = referer if 'csrfmiddlewaretoken' not in ddata . keys ( ) : ddata [ 'csrfmiddlewaretoken' ] = self . _parent . csrftoken req = self . _parent . client . post ( url , headers = headers , data = ddata ) if req . status_code == 200 : self . update ( ) | Method to update some attributes on namespace . |
246,366 | def _get_cu_and_fu_status ( self ) : headers = HEADERS . copy ( ) headers [ 'Accept' ] = '*/*' headers [ 'X-Requested-With' ] = 'XMLHttpRequest' headers [ 'X-CSRFToken' ] = self . _parent . csrftoken args = '?controller_serial=' + self . serial + '&faucet_serial=' + self . faucet . serial req = self . _parent . client . get ( STATUS_ENDPOINT + args , headers = headers ) if req . status_code == 403 : self . _parent . login ( ) self . update ( ) elif req . status_code == 200 : self . attributes = req . json ( ) else : req . raise_for_status ( ) | Submit GET request to update information . |
246,367 | def name ( self , value ) : data = { '_set_controller_name' : 'Set Name' , 'controller_name' : value , } self . post ( data , url = SETUP_ENDPOINT , referer = SETUP_ENDPOINT ) | Set a new name to controller . |
246,368 | def faucet ( self ) : if hasattr ( self , 'faucets' ) : if len ( self . faucets ) > 1 : raise TypeError ( "Only one faucet per account." ) return self . faucets [ 0 ] raise AttributeError ( "There is no faucet assigned." ) | Show current linked faucet . |
246,369 | def serial_finder ( data ) : if not isinstance ( data , BeautifulSoup ) : raise TypeError ( "Function requires BeautifulSoup HTML element." ) try : controllersElement = data . find_all ( 'select' , { 'id' : 'id_select_controller2' } ) faucetsElement = data . find_all ( 'select' , { 'id' : 'id_select_faucet2' } ) controllerSerial = controllersElement [ 0 ] . text . split ( '-' ) [ 1 ] . strip ( ) faucetSerial = faucetsElement [ 0 ] . text . split ( '-' ) [ 1 ] . strip ( ) parsed_dict = { } parsed_dict [ 'controller_serial' ] = controllerSerial parsed_dict [ 'faucet_serial' ] = [ faucetSerial ] return parsed_dict except ( AttributeError , IndexError , ValueError ) : raise RainCloudyException ( 'Could not find any valid controller or faucet' ) | Find controller serial and faucet_serial from the setup page . |
246,370 | def find_controller_or_faucet_name ( data , p_type ) : if not isinstance ( data , BeautifulSoup ) : raise TypeError ( "Function requires BeautilSoup HTML element." ) if not ( p_type == 'controller' or p_type == 'faucet' ) : raise TypeError ( "Function p_type must be controller or faucet" ) try : search_field = 'id_select_{0}' . format ( p_type ) child = data . find ( 'select' , { 'id' : search_field } ) return child . get_text ( ) . strip ( ) except AttributeError : return None | Find on the HTML document the controller name . |
246,371 | def find_zone_name ( data , zone_id ) : if not isinstance ( data , BeautifulSoup ) : raise TypeError ( "Function requires BeautilSoup HTML element." ) table = data . find ( 'table' , { 'class' : 'zone_table' } ) table_body = table . find ( 'tbody' ) rows = table_body . find_all ( 'span' , { 'class' : 'more_info' } ) for row in rows : if row . get_text ( ) . startswith ( str ( zone_id ) ) : return row . get_text ( ) [ 4 : ] . strip ( ) return None | Find on the HTML document the zone name . |
246,372 | def new_payment_query_listener ( sender , order = None , payment = None , ** kwargs ) : payment . amount = order . total payment . currency = order . currency logger . debug ( "new_payment_query_listener, amount=%s, currency=%s" , payment . amount , payment . currency ) | Here we fill only two obligatory fields of payment and leave signal handler |
246,373 | def payment_status_changed_listener ( sender , instance , old_status , new_status , ** kwargs ) : logger . debug ( "payment_status_changed_listener, old=%s, new=%s" , old_status , new_status ) if old_status != 'paid' and new_status == 'paid' : instance . order . status = 'P' instance . order . save ( ) | Here we will actually do something when payment is accepted . E . g . lets change an order status . |
246,374 | def register_to_payment ( order_class , ** kwargs ) : global Payment global Order class Payment ( PaymentFactory . construct ( order = order_class , ** kwargs ) ) : objects = PaymentManager ( ) class Meta : ordering = ( '-created_on' , ) verbose_name = _ ( "Payment" ) verbose_name_plural = _ ( "Payments" ) Order = order_class backend_models_modules = import_backend_modules ( 'models' ) for backend_name , models_module in backend_models_modules . items ( ) : for model in models_module . build_models ( Payment ) : apps . register_model ( backend_name , model ) return Payment | A function for registering unaware order class to getpaid . This will generate a Payment model class that will store payments with ForeignKey to original order class |
246,375 | def get_backend_choices ( currency = None ) : choices = [ ] backends_names = getattr ( settings , 'GETPAID_BACKENDS' , [ ] ) for backend_name in backends_names : backend = import_module ( backend_name ) if currency : if currency in backend . PaymentProcessor . BACKEND_ACCEPTED_CURRENCY : choices . append ( ( backend_name , backend . PaymentProcessor . BACKEND_NAME ) ) else : choices . append ( ( backend_name , backend . PaymentProcessor . BACKEND_NAME ) ) return choices | Get active backends modules . Backend list can be filtered by supporting given currency . |
246,376 | def online ( cls , payload , ip , req_sig ) : from getpaid . models import Payment params = json . loads ( payload ) order_data = params . get ( 'order' , { } ) pos_id = order_data . get ( 'merchantPosId' ) payment_id = order_data . get ( 'extOrderId' ) key2 = cls . get_backend_setting ( 'key2' ) if pos_id != cls . get_backend_setting ( 'pos_id' ) : logger . warning ( 'Received message for different pos: {}' . format ( pos_id ) ) return 'ERROR' req_sig_dict = cls . parse_payu_sig ( req_sig ) sig = cls . compute_sig ( payload , key2 , algorithm = req_sig_dict . get ( 'algorithm' , 'md5' ) ) if sig != req_sig_dict [ 'signature' ] : logger . warning ( 'Received message with malformed signature. Payload: {}' . format ( payload ) ) return 'ERROR' try : payment = Payment . objects . get ( id = payment_id ) except Payment . DoesNotExist : logger . warning ( 'Received message for nonexistent payment: {}.\nPayload: {}' . format ( payment_id , payload ) ) return 'ERROR' status = order_data [ 'status' ] if payment . status != 'paid' : if status == 'COMPLETED' : payment . external_id = order_data [ 'orderId' ] payment . amount = Decimal ( order_data [ 'totalAmount' ] ) / Decimal ( 100 ) payment . amount_paid = payment . amount payment . currenct = order_data [ 'currencyCode' ] payment . paid_on = pendulum . parse ( params [ 'localReceiptDateTime' ] ) . in_tz ( 'utc' ) payment . description = order_data [ 'description' ] payment . change_status ( 'paid' ) elif status == 'PENDING' : payment . change_status ( 'in_progress' ) elif status in [ 'CANCELED' , 'REJECTED' ] : payment . change_status ( 'cancelled' ) return 'OK' | Receive and analyze request from payment service with information on payment status change . |
246,377 | def get_order_description ( self , payment , order ) : template = getattr ( settings , 'GETPAID_ORDER_DESCRIPTION' , None ) if template : return Template ( template ) . render ( Context ( { "payment" : payment , "order" : order } ) ) else : return six . text_type ( order ) | Renders order description using django template provided in settings . GETPAID_ORDER_DESCRIPTION or if not provided return unicode representation of Order object . |
246,378 | def get_backend_setting ( cls , name , default = None ) : backend_settings = get_backend_settings ( cls . BACKEND ) if default is not None : return backend_settings . get ( name , default ) else : try : return backend_settings [ name ] except KeyError : raise ImproperlyConfigured ( "getpaid '%s' requires backend '%s' setting" % ( cls . BACKEND , name ) ) | Reads name setting from backend settings dictionary . |
246,379 | def get_gateway_url ( self , request ) : params = { 'id' : self . get_backend_setting ( 'id' ) , 'description' : self . get_order_description ( self . payment , self . payment . order ) , 'amount' : self . payment . amount , 'currency' : self . payment . currency , 'type' : 0 , 'control' : self . payment . pk , 'URL' : self . get_URL ( self . payment . pk ) , 'URLC' : self . get_URLC ( ) , 'api_version' : 'dev' , } user_data = { 'email' : None , 'lang' : None , } signals . user_data_query . send ( sender = None , order = self . payment . order , user_data = user_data ) if user_data [ 'email' ] : params [ 'email' ] = user_data [ 'email' ] if user_data [ 'lang' ] and user_data [ 'lang' ] . lower ( ) in self . _ACCEPTED_LANGS : params [ 'lang' ] = user_data [ 'lang' ] . lower ( ) elif self . get_backend_setting ( 'lang' , False ) and self . get_backend_setting ( 'lang' ) . lower ( ) in self . _ACCEPTED_LANGS : params [ 'lang' ] = self . get_backend_setting ( 'lang' ) . lower ( ) if self . get_backend_setting ( 'onlinetransfer' , False ) : params [ 'onlinetransfer' ] = 1 if self . get_backend_setting ( 'p_email' , False ) : params [ 'p_email' ] = self . get_backend_setting ( 'p_email' ) if self . get_backend_setting ( 'p_info' , False ) : params [ 'p_info' ] = self . get_backend_setting ( 'p_info' ) if self . get_backend_setting ( 'tax' , False ) : params [ 'tax' ] = 1 gateway_url = self . get_backend_setting ( 'gateway_url' , self . _GATEWAY_URL ) if self . get_backend_setting ( 'method' , 'get' ) . lower ( ) == 'post' : return gateway_url , 'POST' , params elif self . get_backend_setting ( 'method' , 'get' ) . lower ( ) == 'get' : for key in params . keys ( ) : params [ key ] = six . text_type ( params [ key ] ) . encode ( 'utf-8' ) return gateway_url + '?' + urlencode ( params ) , "GET" , { } else : raise ImproperlyConfigured ( 'Dotpay payment backend accepts only GET or POST' ) | Routes a payment to Gateway should return URL for redirection . |
246,380 | def channel_ready_future ( channel ) : fut = channel . _loop . create_future ( ) def _set_result ( state ) : if not fut . done ( ) and state is _grpc . ChannelConnectivity . READY : fut . set_result ( None ) fut . add_done_callback ( lambda f : channel . unsubscribe ( _set_result ) ) channel . subscribe ( _set_result , try_to_connect = True ) return fut | Creates a Future that tracks when a Channel is ready . |
246,381 | def insecure_channel ( target , options = None , * , loop = None , executor = None , standalone_pool_for_streaming = False ) : return Channel ( _grpc . insecure_channel ( target , options ) , loop , executor , standalone_pool_for_streaming ) | Creates an insecure Channel to a server . |
246,382 | def secure_channel ( target , credentials , options = None , * , loop = None , executor = None , standalone_pool_for_streaming = False ) : return Channel ( _grpc . secure_channel ( target , credentials , options ) , loop , executor , standalone_pool_for_streaming ) | Creates a secure Channel to a server . |
246,383 | def future ( self , request , timeout = None , metadata = None , credentials = None ) : return _utils . wrap_future_call ( self . _inner . future ( request , timeout , metadata , credentials ) , self . _loop , self . _executor ) | Asynchronously invokes the underlying RPC . |
246,384 | async def with_call ( self , request_iterator , timeout = None , metadata = None , credentials = None ) : fut = self . future ( request_iterator , timeout , metadata , credentials ) try : result = await fut return ( result , fut ) finally : if not fut . done ( ) : fut . cancel ( ) | Synchronously invokes the underlying RPC on the client . |
246,385 | def future ( self , request_iterator , timeout = None , metadata = None , credentials = None ) : return _utils . wrap_future_call ( self . _inner . future ( _utils . WrappedAsyncIterator ( request_iterator , self . _loop ) , timeout , metadata , credentials ) , self . _loop , self . _executor ) | Asynchronously invokes the underlying RPC on the client . |
246,386 | def config_field_type ( field , cls ) : return defs . ConfigField ( lambda _ : isinstance ( _ , cls ) , lambda : CONFIG_FIELD_TYPE_ERROR . format ( field , cls . __name__ ) ) | Validate a config field against a type . |
246,387 | def get_config_parameters ( plugin_path ) : json_config_path = os . path . join ( plugin_path , defs . CONFIG_FILE_NAME ) with open ( json_config_path , "r" ) as f : config = json . load ( f ) return config . get ( defs . PARAMETERS , [ ] ) | Return the parameters section from config . json . |
246,388 | def validate_config_parameters ( config_json , allowed_keys , allowed_types ) : custom_fields = config_json . get ( defs . PARAMETERS , [ ] ) for field in custom_fields : validate_field ( field , allowed_keys , allowed_types ) default = field . get ( defs . DEFAULT ) field_type = field . get ( defs . TYPE ) if default : validate_field_matches_type ( field [ defs . VALUE ] , default , field_type ) | Validate parameters in config file . |
246,389 | def validate_field_matches_type ( field , value , field_type , select_items = None , _min = None , _max = None ) : if ( field_type == defs . TEXT_TYPE and not isinstance ( value , six . string_types ) ) or ( field_type == defs . STRING_TYPE and not isinstance ( value , six . string_types ) ) or ( field_type == defs . BOOLEAN_TYPE and not isinstance ( value , bool ) ) or ( field_type == defs . INTEGER_TYPE and not isinstance ( value , int ) ) : raise exceptions . ConfigFieldTypeMismatch ( field , value , field_type ) if field_type == defs . INTEGER_TYPE : if _min and value < _min : raise exceptions . ConfigFieldTypeMismatch ( field , value , "must be higher than {}" . format ( _min ) ) if _max and value > _max : raise exceptions . ConfigFieldTypeMismatch ( field , value , "must be lower than {}" . format ( _max ) ) if field_type == defs . SELECT_TYPE : from honeycomb . utils . plugin_utils import get_select_items items = get_select_items ( select_items ) if value not in items : raise exceptions . ConfigFieldTypeMismatch ( field , value , "one of: {}" . format ( ", " . join ( items ) ) ) | Validate a config field against a specific type . |
246,390 | def get_truetype ( value ) : if value in [ "true" , "True" , "y" , "Y" , "yes" ] : return True if value in [ "false" , "False" , "n" , "N" , "no" ] : return False if value . isdigit ( ) : return int ( value ) return str ( value ) | Convert a string to a pythonized parameter . |
246,391 | def validate_field ( field , allowed_keys , allowed_types ) : for key , value in field . items ( ) : if key not in allowed_keys : raise exceptions . ParametersFieldError ( key , "property" ) if key == defs . TYPE : if value not in allowed_types : raise exceptions . ParametersFieldError ( value , key ) if key == defs . VALUE : if not is_valid_field_name ( value ) : raise exceptions . ParametersFieldError ( value , "field name" ) | Validate field is allowed and valid . |
246,392 | def is_valid_field_name ( value ) : leftovers = re . sub ( r"\w" , "" , value ) leftovers = re . sub ( r"-" , "" , leftovers ) if leftovers != "" or value [ 0 ] . isdigit ( ) or value [ 0 ] in [ "-" , "_" ] or " " in value : return False return True | Ensure field name is valid . |
246,393 | def process_config ( ctx , configfile ) : from honeycomb . commands . service . run import run as service_run from honeycomb . commands . service . install import install as service_install from honeycomb . commands . integration . install import install as integration_install from honeycomb . commands . integration . configure import configure as integration_configure VERSION = "version" SERVICES = defs . SERVICES INTEGRATIONS = defs . INTEGRATIONS required_top_keys = [ VERSION , SERVICES ] supported_versions = [ 1 ] def validate_yml ( config ) : for key in required_top_keys : if key not in config : raise exceptions . ConfigFieldMissing ( key ) version = config . get ( VERSION ) if version not in supported_versions : raise exceptions . ConfigFieldTypeMismatch ( VERSION , version , "one of: {}" . format ( repr ( supported_versions ) ) ) def install_plugins ( services , integrations ) : for cmd , kwargs in [ ( service_install , { SERVICES : services } ) , ( integration_install , { INTEGRATIONS : integrations } ) ] : try : ctx . invoke ( cmd , ** kwargs ) except SystemExit : pass def parameters_to_string ( parameters_dict ) : return [ "{}={}" . format ( k , v ) for k , v in parameters_dict . items ( ) ] def configure_integrations ( integrations ) : for integration in integrations : args_list = parameters_to_string ( config [ INTEGRATIONS ] [ integration ] . get ( defs . PARAMETERS , dict ( ) ) ) ctx . invoke ( integration_configure , integration = integration , args = args_list ) def run_services ( services , integrations ) : for service in services : args_list = parameters_to_string ( config [ SERVICES ] [ service ] . get ( defs . PARAMETERS , dict ( ) ) ) ctx . invoke ( service_run , service = service , integration = integrations , args = args_list ) with open ( configfile , "rb" ) as fh : config = yaml . load ( fh . read ( ) ) validate_yml ( config ) services = config . get ( SERVICES ) . keys ( ) integrations = config . get ( INTEGRATIONS ) . keys ( ) if config . get ( INTEGRATIONS ) else [ ] install_plugins ( services , integrations ) configure_integrations ( integrations ) run_services ( services , integrations ) | Process a yaml config with instructions . |
246,394 | def get_plugin_path ( home , plugin_type , plugin_name , editable = False ) : if editable : plugin_path = plugin_name else : plugin_path = os . path . join ( home , plugin_type , plugin_name ) return os . path . realpath ( plugin_path ) | Return path to plugin . |
246,395 | def install_plugin ( pkgpath , plugin_type , install_path , register_func ) : service_name = os . path . basename ( pkgpath ) if os . path . exists ( os . path . join ( install_path , service_name ) ) : raise exceptions . PluginAlreadyInstalled ( pkgpath ) if os . path . exists ( pkgpath ) : logger . debug ( "%s exists in filesystem" , pkgpath ) if os . path . isdir ( pkgpath ) : pip_status = install_dir ( pkgpath , install_path , register_func ) else : pip_status = install_from_zip ( pkgpath , install_path , register_func ) else : logger . debug ( "cannot find %s locally, checking github repo" , pkgpath ) click . secho ( "Collecting {}.." . format ( pkgpath ) ) pip_status = install_from_repo ( pkgpath , plugin_type , install_path , register_func ) if pip_status == 0 : click . secho ( "[+] Great success!" ) else : click . secho ( "[-] Service installed but something was odd with dependency install, please review debug logs" ) | Install specified plugin . |
246,396 | def install_deps ( pkgpath ) : if os . path . exists ( os . path . join ( pkgpath , "requirements.txt" ) ) : logger . debug ( "installing dependencies" ) click . secho ( "[*] Installing dependencies" ) pipargs = [ "install" , "--target" , os . path . join ( pkgpath , defs . DEPS_DIR ) , "--ignore-installed" , "-r" , os . path . join ( pkgpath , "requirements.txt" ) ] logger . debug ( "running pip %s" , pipargs ) return subprocess . check_call ( [ sys . executable , "-m" , "pip" ] + pipargs ) return 0 | Install plugin dependencies using pip . |
246,397 | def copy_file ( src , dst ) : try : fin = os . open ( src , READ_FLAGS ) stat = os . fstat ( fin ) fout = os . open ( dst , WRITE_FLAGS , stat . st_mode ) for x in iter ( lambda : os . read ( fin , BUFFER_SIZE ) , b"" ) : os . write ( fout , x ) finally : try : os . close ( fin ) except Exception as exc : logger . debug ( "Failed to close file handle when copying: {}" . format ( exc ) ) try : os . close ( fout ) except Exception as exc : logger . debug ( "Failed to close file handle when copying: {}" . format ( exc ) ) | Copy a single file . |
246,398 | def copy_tree ( src , dst , symlinks = False , ignore = [ ] ) : names = os . listdir ( src ) if not os . path . exists ( dst ) : os . makedirs ( dst ) errors = [ ] for name in names : if name in ignore : continue srcname = os . path . join ( src , name ) dstname = os . path . join ( dst , name ) try : if symlinks and os . path . islink ( srcname ) : linkto = os . readlink ( srcname ) os . symlink ( linkto , dstname ) elif os . path . isdir ( srcname ) : copy_tree ( srcname , dstname , symlinks , ignore ) else : copy_file ( srcname , dstname ) except ( IOError , os . error ) as exc : errors . append ( ( srcname , dstname , str ( exc ) ) ) except CTError as exc : errors . extend ( exc . errors ) if errors : raise CTError ( errors ) | Copy a full directory structure . |
246,399 | def install_dir ( pkgpath , install_path , register_func , delete_after_install = False ) : logger . debug ( "%s is a directory, attempting to validate" , pkgpath ) plugin = register_func ( pkgpath ) logger . debug ( "%s looks good, copying to %s" , pkgpath , install_path ) try : copy_tree ( pkgpath , os . path . join ( install_path , plugin . name ) ) if delete_after_install : logger . debug ( "deleting %s" , pkgpath ) shutil . rmtree ( pkgpath ) pkgpath = os . path . join ( install_path , plugin . name ) except ( OSError , CTError ) as exc : logger . debug ( str ( exc ) , exc_info = True ) raise exceptions . PluginAlreadyInstalled ( plugin . name ) return install_deps ( pkgpath ) | Install plugin from specified directory . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.