input
stringlengths
49
3.53k
target
stringclasses
19 values
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def __init__(self, allow=None, disallow=None, secure=True, *args, **kwargs): super(TemplateField, self).__init__(*args, **kwargs) self.validators.append(TemplateValidator(allow, disallow, secure))
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def load_source(module_name, module_path): """Loads a python module from the path of the corresponding file.""" if sys.version_info[0] == 3 and sys.version_info[1] >= 5: import importlib.util spec = importlib.util.spec_from_file_location(module_name, module_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) elif sys.version_info[0] == 3 and sys.version_info[1] < 5: import importlib.machinery loader = importlib.machinery.SourceFileLoader(module_name, module_path) module = loader.load_module() return module
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def __init__(self, field): self.field = field
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def __get__(self, instance, owner): if instance is None: raise AttributeError # ?
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def __set__(self, instance, value): instance.__dict__[self.field.name] = value setattr(instance, self.field.attname, json.dumps(value))
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def __delete__(self, instance): del(instance.__dict__[self.field.name]) setattr(instance, self.field.attname, json.dumps(None))
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def get_attname(self): return "%s_json" % self.name
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def contribute_to_class(self, cls, name): super(JSONField, self).contribute_to_class(cls, name) setattr(cls, name, JSONDescriptor(self)) models.signals.pre_init.connect(self.fix_init_kwarg, sender=cls)
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def init(src, minimal=False): """Copies template files to a given directory. :param str src: The path to output the template lambda project files. :param bool minimal: Minimal possible template files (excludes event.json). """ templates_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), "project_templates", ) for filename in os.listdir(templates_path): if (minimal and filename == "event.json") or filename.endswith(".pyc"): continue dest_path = os.path.join(templates_path, filename) if not os.path.isdir(dest_path): copy(dest_path, src)
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def cleanup_old_versions( src, keep_last_versions, config_file="config.yaml", profile_name=None, ): """Deletes old deployed versions of the function in AWS Lambda. Won't delete $Latest and any aliased version :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param int keep_last_versions: The number of recent versions to keep and not delete """ if keep_last_versions <= 0: print("Won't delete all versions. Please do this manually") else: path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) profile_name = cfg.get("profile") aws_access_key_id = cfg.get("aws_access_key_id") aws_secret_access_key = cfg.get("aws_secret_access_key") client = get_client( "lambda", profile_name, aws_access_key_id, aws_secret_access_key, cfg.get("region"), ) response = client.list_versions_by_function( FunctionName=cfg.get("function_name"), ) versions = response.get("Versions") if len(response.get("Versions")) < keep_last_versions: print("Nothing to delete. (Too few versions published)") else: version_numbers = [ elem.get("Version") for elem in versions[1:-keep_last_versions] ] for version_number in version_numbers: try: client.delete_function( FunctionName=cfg.get("function_name"), Qualifier=version_number, ) except botocore.exceptions.ClientError as e: print(f"Skipping Version {version_number}: {e}")
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def deploy( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, preserve_vpc=False, ): """Deploys a new function to AWS Lambda. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) existing_config = get_function_config(cfg) if existing_config: update_function( cfg, path_to_zip_file, existing_config, preserve_vpc=preserve_vpc ) else: create_function(cfg, path_to_zip_file)
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def deploy_s3( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, preserve_vpc=False, ): """Deploys a new function via AWS S3. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) use_s3 = True s3_file = upload_s3(cfg, path_to_zip_file, use_s3) existing_config = get_function_config(cfg) if existing_config: update_function( cfg, path_to_zip_file, existing_config, use_s3=use_s3, s3_file=s3_file, preserve_vpc=preserve_vpc, ) else: create_function(cfg, path_to_zip_file, use_s3=use_s3, s3_file=s3_file)
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def upload( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, ): """Uploads a new function to AWS S3. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) upload_s3(cfg, path_to_zip_file)
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def invoke( src, event_file="event.json", config_file="config.yaml", profile_name=None, verbose=False, ): """Simulates a call to your function. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str alt_event: An optional argument to override which event file to use. :param bool verbose: Whether to print out verbose details. """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Set AWS_PROFILE environment variable based on `--profile` option. if profile_name: os.environ["AWS_PROFILE"] = profile_name # Load environment variables from the config file into the actual # environment. env_vars = cfg.get("environment_variables") if env_vars: for key, value in env_vars.items(): os.environ[key] = get_environment_variable_value(value) # Load and parse event file. path_to_event_file = os.path.join(src, event_file) event = read(path_to_event_file, loader=json.loads) # Tweak to allow module to import local modules try: sys.path.index(src) except ValueError: sys.path.append(src) handler = cfg.get("handler") # Inspect the handler string (<module>.<function name>) and translate it # into a function we can execute. fn = get_callable_handler_function(src, handler) timeout = cfg.get("timeout") if timeout: context = LambdaContext(cfg.get("function_name"), timeout) else: context = LambdaContext(cfg.get("function_name")) start = time.time() results = fn(event, context) end = time.time() print("{0}".format(results)) if verbose: print( "\nexecution time: {:.8f}s\nfunction execution " "timeout: {:2}s".format(end - start, cfg.get("timeout", 15)) )
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def to_python(self, value): if not value: return []
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def get_prep_value(self, value): return ','.join(value)
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def get_concurrency(cfg): """Return the Reserved Concurrent Executions if present in the config""" concurrency = int(cfg.get("concurrency", 0)) return max(0, concurrency)
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def test_iterators_are_a_type(self): it = iter(range(1,6)) total = 0 for num in it: total += num self.assertEqual(15 , total)
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs): # Anything passed in as self.name is assumed to come from a serializer and # will be treated as a json string. if self.name in kwargs: value = kwargs.pop(self.name)
def test_iterating_with_next(self): stages = iter(['alpha','beta','gamma']) try: self.assertEqual('alpha', next(stages)) next(stages) self.assertEqual('gamma', next(stages)) next(stages) except StopIteration as ex: err_msg = 'Ran out of iterations' self.assertRegex(err_msg, 'Ran out')
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def __init__(self, allow=None, disallow=None, secure=True, *args, **kwargs): super(TemplateField, self).__init__(*args, **kwargs) self.validators.append(TemplateValidator(allow, disallow, secure))
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def load_source(module_name, module_path): """Loads a python module from the path of the corresponding file.""" if sys.version_info[0] == 3 and sys.version_info[1] >= 5: import importlib.util spec = importlib.util.spec_from_file_location(module_name, module_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) elif sys.version_info[0] == 3 and sys.version_info[1] < 5: import importlib.machinery loader = importlib.machinery.SourceFileLoader(module_name, module_path) module = loader.load_module() return module
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def __init__(self, field): self.field = field
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def __get__(self, instance, owner): if instance is None: raise AttributeError # ?
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def __set__(self, instance, value): instance.__dict__[self.field.name] = value setattr(instance, self.field.attname, json.dumps(value))
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def __delete__(self, instance): del(instance.__dict__[self.field.name]) setattr(instance, self.field.attname, json.dumps(None))
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def get_attname(self): return "%s_json" % self.name
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def contribute_to_class(self, cls, name): super(JSONField, self).contribute_to_class(cls, name) setattr(cls, name, JSONDescriptor(self)) models.signals.pre_init.connect(self.fix_init_kwarg, sender=cls)
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def init(src, minimal=False): """Copies template files to a given directory. :param str src: The path to output the template lambda project files. :param bool minimal: Minimal possible template files (excludes event.json). """ templates_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), "project_templates", ) for filename in os.listdir(templates_path): if (minimal and filename == "event.json") or filename.endswith(".pyc"): continue dest_path = os.path.join(templates_path, filename) if not os.path.isdir(dest_path): copy(dest_path, src)
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def cleanup_old_versions( src, keep_last_versions, config_file="config.yaml", profile_name=None, ): """Deletes old deployed versions of the function in AWS Lambda. Won't delete $Latest and any aliased version :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param int keep_last_versions: The number of recent versions to keep and not delete """ if keep_last_versions <= 0: print("Won't delete all versions. Please do this manually") else: path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) profile_name = cfg.get("profile") aws_access_key_id = cfg.get("aws_access_key_id") aws_secret_access_key = cfg.get("aws_secret_access_key") client = get_client( "lambda", profile_name, aws_access_key_id, aws_secret_access_key, cfg.get("region"), ) response = client.list_versions_by_function( FunctionName=cfg.get("function_name"), ) versions = response.get("Versions") if len(response.get("Versions")) < keep_last_versions: print("Nothing to delete. (Too few versions published)") else: version_numbers = [ elem.get("Version") for elem in versions[1:-keep_last_versions] ] for version_number in version_numbers: try: client.delete_function( FunctionName=cfg.get("function_name"), Qualifier=version_number, ) except botocore.exceptions.ClientError as e: print(f"Skipping Version {version_number}: {e}")
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def deploy( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, preserve_vpc=False, ): """Deploys a new function to AWS Lambda. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) existing_config = get_function_config(cfg) if existing_config: update_function( cfg, path_to_zip_file, existing_config, preserve_vpc=preserve_vpc ) else: create_function(cfg, path_to_zip_file)
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def deploy_s3( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, preserve_vpc=False, ): """Deploys a new function via AWS S3. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) use_s3 = True s3_file = upload_s3(cfg, path_to_zip_file, use_s3) existing_config = get_function_config(cfg) if existing_config: update_function( cfg, path_to_zip_file, existing_config, use_s3=use_s3, s3_file=s3_file, preserve_vpc=preserve_vpc, ) else: create_function(cfg, path_to_zip_file, use_s3=use_s3, s3_file=s3_file)
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def upload( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, ): """Uploads a new function to AWS S3. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) upload_s3(cfg, path_to_zip_file)
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def invoke( src, event_file="event.json", config_file="config.yaml", profile_name=None, verbose=False, ): """Simulates a call to your function. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str alt_event: An optional argument to override which event file to use. :param bool verbose: Whether to print out verbose details. """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Set AWS_PROFILE environment variable based on `--profile` option. if profile_name: os.environ["AWS_PROFILE"] = profile_name # Load environment variables from the config file into the actual # environment. env_vars = cfg.get("environment_variables") if env_vars: for key, value in env_vars.items(): os.environ[key] = get_environment_variable_value(value) # Load and parse event file. path_to_event_file = os.path.join(src, event_file) event = read(path_to_event_file, loader=json.loads) # Tweak to allow module to import local modules try: sys.path.index(src) except ValueError: sys.path.append(src) handler = cfg.get("handler") # Inspect the handler string (<module>.<function name>) and translate it # into a function we can execute. fn = get_callable_handler_function(src, handler) timeout = cfg.get("timeout") if timeout: context = LambdaContext(cfg.get("function_name"), timeout) else: context = LambdaContext(cfg.get("function_name")) start = time.time() results = fn(event, context) end = time.time() print("{0}".format(results)) if verbose: print( "\nexecution time: {:.8f}s\nfunction execution " "timeout: {:2}s".format(end - start, cfg.get("timeout", 15)) )
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def to_python(self, value): if not value: return []
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def get_prep_value(self, value): return ','.join(value)
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def get_concurrency(cfg): """Return the Reserved Concurrent Executions if present in the config""" concurrency = int(cfg.get("concurrency", 0)) return max(0, concurrency)
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def test_iterators_are_a_type(self): it = iter(range(1,6)) total = 0 for num in it: total += num self.assertEqual(15 , total)
def formfield(self, *args, **kwargs): kwargs["form_class"] = JSONFormField return super(JSONField, self).formfield(*args, **kwargs)
def test_iterating_with_next(self): stages = iter(['alpha','beta','gamma']) try: self.assertEqual('alpha', next(stages)) next(stages) self.assertEqual('gamma', next(stages)) next(stages) except StopIteration as ex: err_msg = 'Ran out of iterations' self.assertRegex(err_msg, 'Ran out')
def get_internal_type(self): return "TextField"
def __init__(self, allow=None, disallow=None, secure=True, *args, **kwargs): super(TemplateField, self).__init__(*args, **kwargs) self.validators.append(TemplateValidator(allow, disallow, secure))
def get_internal_type(self): return "TextField"
def load_source(module_name, module_path): """Loads a python module from the path of the corresponding file.""" if sys.version_info[0] == 3 and sys.version_info[1] >= 5: import importlib.util spec = importlib.util.spec_from_file_location(module_name, module_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) elif sys.version_info[0] == 3 and sys.version_info[1] < 5: import importlib.machinery loader = importlib.machinery.SourceFileLoader(module_name, module_path) module = loader.load_module() return module
def get_internal_type(self): return "TextField"
def __init__(self, field): self.field = field
def get_internal_type(self): return "TextField"
def __get__(self, instance, owner): if instance is None: raise AttributeError # ?
def get_internal_type(self): return "TextField"
def __set__(self, instance, value): instance.__dict__[self.field.name] = value setattr(instance, self.field.attname, json.dumps(value))
def get_internal_type(self): return "TextField"
def __delete__(self, instance): del(instance.__dict__[self.field.name]) setattr(instance, self.field.attname, json.dumps(None))
def get_internal_type(self): return "TextField"
def get_attname(self): return "%s_json" % self.name
def get_internal_type(self): return "TextField"
def contribute_to_class(self, cls, name): super(JSONField, self).contribute_to_class(cls, name) setattr(cls, name, JSONDescriptor(self)) models.signals.pre_init.connect(self.fix_init_kwarg, sender=cls)
def get_internal_type(self): return "TextField"
def init(src, minimal=False): """Copies template files to a given directory. :param str src: The path to output the template lambda project files. :param bool minimal: Minimal possible template files (excludes event.json). """ templates_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), "project_templates", ) for filename in os.listdir(templates_path): if (minimal and filename == "event.json") or filename.endswith(".pyc"): continue dest_path = os.path.join(templates_path, filename) if not os.path.isdir(dest_path): copy(dest_path, src)
def get_internal_type(self): return "TextField"
def cleanup_old_versions( src, keep_last_versions, config_file="config.yaml", profile_name=None, ): """Deletes old deployed versions of the function in AWS Lambda. Won't delete $Latest and any aliased version :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param int keep_last_versions: The number of recent versions to keep and not delete """ if keep_last_versions <= 0: print("Won't delete all versions. Please do this manually") else: path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) profile_name = cfg.get("profile") aws_access_key_id = cfg.get("aws_access_key_id") aws_secret_access_key = cfg.get("aws_secret_access_key") client = get_client( "lambda", profile_name, aws_access_key_id, aws_secret_access_key, cfg.get("region"), ) response = client.list_versions_by_function( FunctionName=cfg.get("function_name"), ) versions = response.get("Versions") if len(response.get("Versions")) < keep_last_versions: print("Nothing to delete. (Too few versions published)") else: version_numbers = [ elem.get("Version") for elem in versions[1:-keep_last_versions] ] for version_number in version_numbers: try: client.delete_function( FunctionName=cfg.get("function_name"), Qualifier=version_number, ) except botocore.exceptions.ClientError as e: print(f"Skipping Version {version_number}: {e}")
def get_internal_type(self): return "TextField"
def deploy( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, preserve_vpc=False, ): """Deploys a new function to AWS Lambda. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) existing_config = get_function_config(cfg) if existing_config: update_function( cfg, path_to_zip_file, existing_config, preserve_vpc=preserve_vpc ) else: create_function(cfg, path_to_zip_file)
def get_internal_type(self): return "TextField"
def deploy_s3( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, preserve_vpc=False, ): """Deploys a new function via AWS S3. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) use_s3 = True s3_file = upload_s3(cfg, path_to_zip_file, use_s3) existing_config = get_function_config(cfg) if existing_config: update_function( cfg, path_to_zip_file, existing_config, use_s3=use_s3, s3_file=s3_file, preserve_vpc=preserve_vpc, ) else: create_function(cfg, path_to_zip_file, use_s3=use_s3, s3_file=s3_file)
def get_internal_type(self): return "TextField"
def upload( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, ): """Uploads a new function to AWS S3. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) upload_s3(cfg, path_to_zip_file)
def get_internal_type(self): return "TextField"
def invoke( src, event_file="event.json", config_file="config.yaml", profile_name=None, verbose=False, ): """Simulates a call to your function. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str alt_event: An optional argument to override which event file to use. :param bool verbose: Whether to print out verbose details. """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Set AWS_PROFILE environment variable based on `--profile` option. if profile_name: os.environ["AWS_PROFILE"] = profile_name # Load environment variables from the config file into the actual # environment. env_vars = cfg.get("environment_variables") if env_vars: for key, value in env_vars.items(): os.environ[key] = get_environment_variable_value(value) # Load and parse event file. path_to_event_file = os.path.join(src, event_file) event = read(path_to_event_file, loader=json.loads) # Tweak to allow module to import local modules try: sys.path.index(src) except ValueError: sys.path.append(src) handler = cfg.get("handler") # Inspect the handler string (<module>.<function name>) and translate it # into a function we can execute. fn = get_callable_handler_function(src, handler) timeout = cfg.get("timeout") if timeout: context = LambdaContext(cfg.get("function_name"), timeout) else: context = LambdaContext(cfg.get("function_name")) start = time.time() results = fn(event, context) end = time.time() print("{0}".format(results)) if verbose: print( "\nexecution time: {:.8f}s\nfunction execution " "timeout: {:2}s".format(end - start, cfg.get("timeout", 15)) )
def get_internal_type(self): return "TextField"
def to_python(self, value): if not value: return []
def get_internal_type(self): return "TextField"
def get_prep_value(self, value): return ','.join(value)
def get_internal_type(self): return "TextField"
def get_concurrency(cfg): """Return the Reserved Concurrent Executions if present in the config""" concurrency = int(cfg.get("concurrency", 0)) return max(0, concurrency)
def get_internal_type(self): return "TextField"
def test_iterators_are_a_type(self): it = iter(range(1,6)) total = 0 for num in it: total += num self.assertEqual(15 , total)
def get_internal_type(self): return "TextField"
def test_iterating_with_next(self): stages = iter(['alpha','beta','gamma']) try: self.assertEqual('alpha', next(stages)) next(stages) self.assertEqual('gamma', next(stages)) next(stages) except StopIteration as ex: err_msg = 'Ran out of iterations' self.assertRegex(err_msg, 'Ran out')
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def __init__(self, allow=None, disallow=None, secure=True, *args, **kwargs): super(TemplateField, self).__init__(*args, **kwargs) self.validators.append(TemplateValidator(allow, disallow, secure))
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def load_source(module_name, module_path): """Loads a python module from the path of the corresponding file.""" if sys.version_info[0] == 3 and sys.version_info[1] >= 5: import importlib.util spec = importlib.util.spec_from_file_location(module_name, module_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) elif sys.version_info[0] == 3 and sys.version_info[1] < 5: import importlib.machinery loader = importlib.machinery.SourceFileLoader(module_name, module_path) module = loader.load_module() return module
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def __init__(self, field): self.field = field
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def __get__(self, instance, owner): if instance is None: raise AttributeError # ?
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def __set__(self, instance, value): instance.__dict__[self.field.name] = value setattr(instance, self.field.attname, json.dumps(value))
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def __delete__(self, instance): del(instance.__dict__[self.field.name]) setattr(instance, self.field.attname, json.dumps(None))
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def get_attname(self): return "%s_json" % self.name
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def contribute_to_class(self, cls, name): super(JSONField, self).contribute_to_class(cls, name) setattr(cls, name, JSONDescriptor(self)) models.signals.pre_init.connect(self.fix_init_kwarg, sender=cls)
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def init(src, minimal=False): """Copies template files to a given directory. :param str src: The path to output the template lambda project files. :param bool minimal: Minimal possible template files (excludes event.json). """ templates_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), "project_templates", ) for filename in os.listdir(templates_path): if (minimal and filename == "event.json") or filename.endswith(".pyc"): continue dest_path = os.path.join(templates_path, filename) if not os.path.isdir(dest_path): copy(dest_path, src)
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def cleanup_old_versions( src, keep_last_versions, config_file="config.yaml", profile_name=None, ): """Deletes old deployed versions of the function in AWS Lambda. Won't delete $Latest and any aliased version :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param int keep_last_versions: The number of recent versions to keep and not delete """ if keep_last_versions <= 0: print("Won't delete all versions. Please do this manually") else: path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) profile_name = cfg.get("profile") aws_access_key_id = cfg.get("aws_access_key_id") aws_secret_access_key = cfg.get("aws_secret_access_key") client = get_client( "lambda", profile_name, aws_access_key_id, aws_secret_access_key, cfg.get("region"), ) response = client.list_versions_by_function( FunctionName=cfg.get("function_name"), ) versions = response.get("Versions") if len(response.get("Versions")) < keep_last_versions: print("Nothing to delete. (Too few versions published)") else: version_numbers = [ elem.get("Version") for elem in versions[1:-keep_last_versions] ] for version_number in version_numbers: try: client.delete_function( FunctionName=cfg.get("function_name"), Qualifier=version_number, ) except botocore.exceptions.ClientError as e: print(f"Skipping Version {version_number}: {e}")
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def deploy( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, preserve_vpc=False, ): """Deploys a new function to AWS Lambda. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) existing_config = get_function_config(cfg) if existing_config: update_function( cfg, path_to_zip_file, existing_config, preserve_vpc=preserve_vpc ) else: create_function(cfg, path_to_zip_file)
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def deploy_s3( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, preserve_vpc=False, ): """Deploys a new function via AWS S3. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) use_s3 = True s3_file = upload_s3(cfg, path_to_zip_file, use_s3) existing_config = get_function_config(cfg) if existing_config: update_function( cfg, path_to_zip_file, existing_config, use_s3=use_s3, s3_file=s3_file, preserve_vpc=preserve_vpc, ) else: create_function(cfg, path_to_zip_file, use_s3=use_s3, s3_file=s3_file)
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def upload( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, ): """Uploads a new function to AWS S3. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) upload_s3(cfg, path_to_zip_file)
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def invoke( src, event_file="event.json", config_file="config.yaml", profile_name=None, verbose=False, ): """Simulates a call to your function. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str alt_event: An optional argument to override which event file to use. :param bool verbose: Whether to print out verbose details. """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Set AWS_PROFILE environment variable based on `--profile` option. if profile_name: os.environ["AWS_PROFILE"] = profile_name # Load environment variables from the config file into the actual # environment. env_vars = cfg.get("environment_variables") if env_vars: for key, value in env_vars.items(): os.environ[key] = get_environment_variable_value(value) # Load and parse event file. path_to_event_file = os.path.join(src, event_file) event = read(path_to_event_file, loader=json.loads) # Tweak to allow module to import local modules try: sys.path.index(src) except ValueError: sys.path.append(src) handler = cfg.get("handler") # Inspect the handler string (<module>.<function name>) and translate it # into a function we can execute. fn = get_callable_handler_function(src, handler) timeout = cfg.get("timeout") if timeout: context = LambdaContext(cfg.get("function_name"), timeout) else: context = LambdaContext(cfg.get("function_name")) start = time.time() results = fn(event, context) end = time.time() print("{0}".format(results)) if verbose: print( "\nexecution time: {:.8f}s\nfunction execution " "timeout: {:2}s".format(end - start, cfg.get("timeout", 15)) )
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def to_python(self, value): if not value: return []
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def get_prep_value(self, value): return ','.join(value)
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def get_concurrency(cfg): """Return the Reserved Concurrent Executions if present in the config""" concurrency = int(cfg.get("concurrency", 0)) return max(0, concurrency)
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def test_iterators_are_a_type(self): it = iter(range(1,6)) total = 0 for num in it: total += num self.assertEqual(15 , total)
def get_handler_filename(handler): """Shortcut to get the filename from the handler string. :param str handler: A dot delimited string representing the `<module>.<function name>`. """ module_name, _ = handler.split(".") return "{0}.py".format(module_name)
def test_iterating_with_next(self): stages = iter(['alpha','beta','gamma']) try: self.assertEqual('alpha', next(stages)) next(stages) self.assertEqual('gamma', next(stages)) next(stages) except StopIteration as ex: err_msg = 'Ran out of iterations' self.assertRegex(err_msg, 'Ran out')
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def __init__(self, allow=None, disallow=None, secure=True, *args, **kwargs): super(TemplateField, self).__init__(*args, **kwargs) self.validators.append(TemplateValidator(allow, disallow, secure))
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def load_source(module_name, module_path): """Loads a python module from the path of the corresponding file.""" if sys.version_info[0] == 3 and sys.version_info[1] >= 5: import importlib.util spec = importlib.util.spec_from_file_location(module_name, module_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) elif sys.version_info[0] == 3 and sys.version_info[1] < 5: import importlib.machinery loader = importlib.machinery.SourceFileLoader(module_name, module_path) module = loader.load_module() return module
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def __init__(self, field): self.field = field
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def __get__(self, instance, owner): if instance is None: raise AttributeError # ?
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def __set__(self, instance, value): instance.__dict__[self.field.name] = value setattr(instance, self.field.attname, json.dumps(value))
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def __delete__(self, instance): del(instance.__dict__[self.field.name]) setattr(instance, self.field.attname, json.dumps(None))
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def get_attname(self): return "%s_json" % self.name
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def contribute_to_class(self, cls, name): super(JSONField, self).contribute_to_class(cls, name) setattr(cls, name, JSONDescriptor(self)) models.signals.pre_init.connect(self.fix_init_kwarg, sender=cls)
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def init(src, minimal=False): """Copies template files to a given directory. :param str src: The path to output the template lambda project files. :param bool minimal: Minimal possible template files (excludes event.json). """ templates_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), "project_templates", ) for filename in os.listdir(templates_path): if (minimal and filename == "event.json") or filename.endswith(".pyc"): continue dest_path = os.path.join(templates_path, filename) if not os.path.isdir(dest_path): copy(dest_path, src)
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def cleanup_old_versions( src, keep_last_versions, config_file="config.yaml", profile_name=None, ): """Deletes old deployed versions of the function in AWS Lambda. Won't delete $Latest and any aliased version :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param int keep_last_versions: The number of recent versions to keep and not delete """ if keep_last_versions <= 0: print("Won't delete all versions. Please do this manually") else: path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) profile_name = cfg.get("profile") aws_access_key_id = cfg.get("aws_access_key_id") aws_secret_access_key = cfg.get("aws_secret_access_key") client = get_client( "lambda", profile_name, aws_access_key_id, aws_secret_access_key, cfg.get("region"), ) response = client.list_versions_by_function( FunctionName=cfg.get("function_name"), ) versions = response.get("Versions") if len(response.get("Versions")) < keep_last_versions: print("Nothing to delete. (Too few versions published)") else: version_numbers = [ elem.get("Version") for elem in versions[1:-keep_last_versions] ] for version_number in version_numbers: try: client.delete_function( FunctionName=cfg.get("function_name"), Qualifier=version_number, ) except botocore.exceptions.ClientError as e: print(f"Skipping Version {version_number}: {e}")
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def deploy( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, preserve_vpc=False, ): """Deploys a new function to AWS Lambda. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) existing_config = get_function_config(cfg) if existing_config: update_function( cfg, path_to_zip_file, existing_config, preserve_vpc=preserve_vpc ) else: create_function(cfg, path_to_zip_file)
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def deploy_s3( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, preserve_vpc=False, ): """Deploys a new function via AWS S3. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) use_s3 = True s3_file = upload_s3(cfg, path_to_zip_file, use_s3) existing_config = get_function_config(cfg) if existing_config: update_function( cfg, path_to_zip_file, existing_config, use_s3=use_s3, s3_file=s3_file, preserve_vpc=preserve_vpc, ) else: create_function(cfg, path_to_zip_file, use_s3=use_s3, s3_file=s3_file)
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def upload( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, ): """Uploads a new function to AWS S3. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) upload_s3(cfg, path_to_zip_file)
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def invoke( src, event_file="event.json", config_file="config.yaml", profile_name=None, verbose=False, ): """Simulates a call to your function. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str alt_event: An optional argument to override which event file to use. :param bool verbose: Whether to print out verbose details. """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Set AWS_PROFILE environment variable based on `--profile` option. if profile_name: os.environ["AWS_PROFILE"] = profile_name # Load environment variables from the config file into the actual # environment. env_vars = cfg.get("environment_variables") if env_vars: for key, value in env_vars.items(): os.environ[key] = get_environment_variable_value(value) # Load and parse event file. path_to_event_file = os.path.join(src, event_file) event = read(path_to_event_file, loader=json.loads) # Tweak to allow module to import local modules try: sys.path.index(src) except ValueError: sys.path.append(src) handler = cfg.get("handler") # Inspect the handler string (<module>.<function name>) and translate it # into a function we can execute. fn = get_callable_handler_function(src, handler) timeout = cfg.get("timeout") if timeout: context = LambdaContext(cfg.get("function_name"), timeout) else: context = LambdaContext(cfg.get("function_name")) start = time.time() results = fn(event, context) end = time.time() print("{0}".format(results)) if verbose: print( "\nexecution time: {:.8f}s\nfunction execution " "timeout: {:2}s".format(end - start, cfg.get("timeout", 15)) )
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def to_python(self, value): if not value: return []
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def get_prep_value(self, value): return ','.join(value)
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def get_concurrency(cfg): """Return the Reserved Concurrent Executions if present in the config""" concurrency = int(cfg.get("concurrency", 0)) return max(0, concurrency)
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def test_iterators_are_a_type(self): it = iter(range(1,6)) total = 0 for num in it: total += num self.assertEqual(15 , total)
def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "python-lambda=="] return all(package.startswith(entry) is False for entry in blacklist)
def test_iterating_with_next(self): stages = iter(['alpha','beta','gamma']) try: self.assertEqual('alpha', next(stages)) next(stages) self.assertEqual('gamma', next(stages)) next(stages) except StopIteration as ex: err_msg = 'Ran out of iterations' self.assertRegex(err_msg, 'Ran out')
def pip_install_to_target(path, requirements=None, local_package=None): """For a given active virtualenv, gather all installed pip packages then copy (re-install) them to the path provided. :param str path: Path to copy installed pip packages to. :param str requirements: If set, only the packages in the supplied requirements file are installed. If not set then installs all packages found via pip freeze. :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ packages = [] if not requirements: print("Gathering pip packages") pkgStr = subprocess.check_output( [sys.executable, "-m", "pip", "freeze"] ) packages.extend(pkgStr.decode("utf-8").splitlines()) else: if os.path.exists(requirements): print("Gathering requirement packages") data = read(requirements) packages.extend(data.splitlines()) if not packages: print("No dependency packages installed!") if local_package is not None: if not isinstance(local_package, (list, tuple)): local_package = [local_package] for l_package in local_package: packages.append(l_package) _install_packages(path, packages)
def __init__(self, allow=None, disallow=None, secure=True, *args, **kwargs): super(TemplateField, self).__init__(*args, **kwargs) self.validators.append(TemplateValidator(allow, disallow, secure))
def pip_install_to_target(path, requirements=None, local_package=None): """For a given active virtualenv, gather all installed pip packages then copy (re-install) them to the path provided. :param str path: Path to copy installed pip packages to. :param str requirements: If set, only the packages in the supplied requirements file are installed. If not set then installs all packages found via pip freeze. :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ packages = [] if not requirements: print("Gathering pip packages") pkgStr = subprocess.check_output( [sys.executable, "-m", "pip", "freeze"] ) packages.extend(pkgStr.decode("utf-8").splitlines()) else: if os.path.exists(requirements): print("Gathering requirement packages") data = read(requirements) packages.extend(data.splitlines()) if not packages: print("No dependency packages installed!") if local_package is not None: if not isinstance(local_package, (list, tuple)): local_package = [local_package] for l_package in local_package: packages.append(l_package) _install_packages(path, packages)
def load_source(module_name, module_path): """Loads a python module from the path of the corresponding file.""" if sys.version_info[0] == 3 and sys.version_info[1] >= 5: import importlib.util spec = importlib.util.spec_from_file_location(module_name, module_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) elif sys.version_info[0] == 3 and sys.version_info[1] < 5: import importlib.machinery loader = importlib.machinery.SourceFileLoader(module_name, module_path) module = loader.load_module() return module
def pip_install_to_target(path, requirements=None, local_package=None): """For a given active virtualenv, gather all installed pip packages then copy (re-install) them to the path provided. :param str path: Path to copy installed pip packages to. :param str requirements: If set, only the packages in the supplied requirements file are installed. If not set then installs all packages found via pip freeze. :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ packages = [] if not requirements: print("Gathering pip packages") pkgStr = subprocess.check_output( [sys.executable, "-m", "pip", "freeze"] ) packages.extend(pkgStr.decode("utf-8").splitlines()) else: if os.path.exists(requirements): print("Gathering requirement packages") data = read(requirements) packages.extend(data.splitlines()) if not packages: print("No dependency packages installed!") if local_package is not None: if not isinstance(local_package, (list, tuple)): local_package = [local_package] for l_package in local_package: packages.append(l_package) _install_packages(path, packages)
def __init__(self, field): self.field = field
def pip_install_to_target(path, requirements=None, local_package=None): """For a given active virtualenv, gather all installed pip packages then copy (re-install) them to the path provided. :param str path: Path to copy installed pip packages to. :param str requirements: If set, only the packages in the supplied requirements file are installed. If not set then installs all packages found via pip freeze. :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ packages = [] if not requirements: print("Gathering pip packages") pkgStr = subprocess.check_output( [sys.executable, "-m", "pip", "freeze"] ) packages.extend(pkgStr.decode("utf-8").splitlines()) else: if os.path.exists(requirements): print("Gathering requirement packages") data = read(requirements) packages.extend(data.splitlines()) if not packages: print("No dependency packages installed!") if local_package is not None: if not isinstance(local_package, (list, tuple)): local_package = [local_package] for l_package in local_package: packages.append(l_package) _install_packages(path, packages)
def __get__(self, instance, owner): if instance is None: raise AttributeError # ?
def pip_install_to_target(path, requirements=None, local_package=None): """For a given active virtualenv, gather all installed pip packages then copy (re-install) them to the path provided. :param str path: Path to copy installed pip packages to. :param str requirements: If set, only the packages in the supplied requirements file are installed. If not set then installs all packages found via pip freeze. :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ packages = [] if not requirements: print("Gathering pip packages") pkgStr = subprocess.check_output( [sys.executable, "-m", "pip", "freeze"] ) packages.extend(pkgStr.decode("utf-8").splitlines()) else: if os.path.exists(requirements): print("Gathering requirement packages") data = read(requirements) packages.extend(data.splitlines()) if not packages: print("No dependency packages installed!") if local_package is not None: if not isinstance(local_package, (list, tuple)): local_package = [local_package] for l_package in local_package: packages.append(l_package) _install_packages(path, packages)
def __set__(self, instance, value): instance.__dict__[self.field.name] = value setattr(instance, self.field.attname, json.dumps(value))

Dataset Card for "bad_code_to_good_code_dataset"

More Information needed

Downloads last month
0
Edit dataset card