text
stringlengths 81
112k
|
---|
train_new_deep_neural_network
Train a new deep neural network and store the results as a new:
``MLJob`` and ``MLJobResult`` database records.
def train_new_deep_neural_network():
"""train_new_deep_neural_network
Train a new deep neural network and store the results as a new:
``MLJob`` and ``MLJobResult`` database records.
"""
parser = argparse.ArgumentParser(
description=(
"Python client to Train a Deep Neural Network "
"with AntiNex Django Rest Framework"))
parser.add_argument(
"-u",
help="username",
required=False,
dest="user")
parser.add_argument(
"-p",
help="user password",
required=False,
dest="password")
parser.add_argument(
"-e",
help="user email",
required=False,
dest="email")
parser.add_argument(
"-a",
help="url endpoint with default http://localhost:8010",
required=False,
dest="url")
parser.add_argument(
"-f",
help="file to use default ./examples/test-keras-dnn.json",
required=False,
dest="datafile")
parser.add_argument(
"-b",
help=(
"optional - path to CA bundle directory for "
"client encryption over HTTP"),
required=False,
dest="ca_dir")
parser.add_argument(
"-c",
help=(
"optional - path to x509 certificate for "
"client encryption over HTTP"),
required=False,
dest="cert_file")
parser.add_argument(
"-k",
help=(
"optional - path to x509 key file for "
"client encryption over HTTP"),
required=False,
dest="key_file")
parser.add_argument(
"-s",
help="silent",
required=False,
dest="silent",
action="store_true")
parser.add_argument(
"-d",
help="debug",
required=False,
dest="debug",
action="store_true")
args = parser.parse_args()
user = ev(
"API_USER",
"user-not-set")
password = ev(
"API_PASSWORD",
"password-not-set")
email = ev(
"API_EMAIL",
"email-not-set")
url = ev(
"API_URL",
"http://localhost:8010")
datafile = ev(
"DATAFILE",
"datafile-not-set")
ca_dir = os.getenv(
"API_CA_BUNDLE_DIR",
None)
cert_file = os.getenv(
"API_CERT_FILE",
None)
key_file = os.getenv(
"API_KEY_FILE",
None)
verbose = bool(str(ev(
"API_VERBOSE",
"true")).lower() == "true")
debug = bool(str(ev(
"API_DEBUG",
"false")).lower() == "true")
if args.user:
user = args.user
if args.password:
password = args.password
if args.email:
email = args.email
if args.url:
url = args.url
if args.datafile:
datafile = args.datafile
if args.ca_dir:
ca_dir = args.ca_dir
if args.cert_file:
cert_file = args.cert_file
if args.key_file:
key_file = args.key_file
if args.silent:
verbose = False
if args.debug:
debug = True
usage = (
"Please run with "
"-u <username> "
"-p <password> "
"-a <AntiNex URL http://localhost:8010> "
"-f <path to json file> "
"-b <optional - path to CA bundle directory> "
"-c <optional - path to x509 ssl certificate file> "
"-k <optional - path to x509 ssl key file>")
valid = True
if not user or user == "user-not-set":
log.error("missing user")
valid = False
if not password or password == "password-not-set":
log.error("missing password")
valid = False
if not datafile or datafile == "datafile-not-set":
log.error("missing datafile")
valid = False
else:
if not os.path.exists(datafile):
log.error(("did not find datafile={} on disk")
.format(
datafile))
valid = False
if not valid:
log.error(usage)
sys.exit(1)
if verbose:
log.info((
"creating client user={} url={} file={} "
"ca_dir={} cert_file={} key_file={}").format(
user,
url,
datafile,
ca_dir,
cert_file,
key_file))
client = AIClient(
user=user,
email=email,
password=password,
url=url,
ca_dir=ca_dir,
cert_file=cert_file,
key_file=key_file,
verbose=verbose,
debug=debug)
if verbose:
log.info(("loading request in datafile={}")
.format(
datafile))
req_body = None
with open(datafile, "r") as f:
req_body = json.loads(f.read())
if verbose:
log.info("running job")
job_was_started = False
response = client.run_job(
body=req_body)
if response["status"] == SUCCESS:
log.info(("job started with response={}")
.format(
response["data"]))
job_was_started = True
elif response["status"] == FAILED:
log.error(("job failed with error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == ERROR:
log.error(("job had an error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == LOGIN_FAILED:
log.error(("job reported user was not able to log in "
"with an error='{}' with response={}")
.format(
response["error"],
response["data"]))
if not job_was_started:
sys.exit(1)
if debug:
log.info(("parsing response data={}")
.format(
response["data"]))
else:
if verbose:
log.info("parsing data")
res_data = response["data"]
job_data = res_data.get(
"job",
None)
result_data = res_data.get(
"results",
None)
if not job_data:
log.error(("missing job dictionary in response data={}")
.format(
response["data"]))
sys.exit(1)
if not result_data:
log.error(("missing results dictionary in response data={}")
.format(
response["data"]))
sys.exit(1)
job_id = job_data.get("id", None)
job_status = job_data.get("status", None)
result_id = result_data.get("id", None)
result_status = result_data.get("status", None)
log.info(("started job.id={} job.status={} with "
"result.id={} result.status={}")
.format(
job_id,
job_status,
result_id,
result_status))
job_results = client.wait_for_job_to_finish(
job_id=job_id)
if job_results["status"] != SUCCESS:
log.error(("failed waiting for job.id={} to finish error={} data={}")
.format(
job_id,
job_results["error"],
job_results["data"]))
sys.exit(1)
final_job = job_results["data"]["job"]
final_result = job_results["data"]["result"]
if verbose:
log.info(("job={}")
.format(
ppj(final_job)))
else:
log.info(("job={}")
.format(
str(final_job)[0:10]))
if verbose:
log.info(("result={}")
.format(
ppj(final_result)))
else:
log.info(("result={}")
.format(
str(final_result)[0:10]))
log.info(("job.id={} is done")
.format(
job_id))
predictions = final_result["predictions_json"].get(
"predictions",
[])
log.info(("loading predictions={} into pandas dataframe")
.format(
len(predictions)))
df = pd.DataFrame(predictions)
if verbose:
log.info(("dataframe={}")
.format(
df))
|
prepare_new_dataset
Prepare a new ``MLPrepare`` record and dataset files on disk.
def prepare_new_dataset():
"""prepare_new_dataset
Prepare a new ``MLPrepare`` record and dataset files on disk.
"""
parser = argparse.ArgumentParser(
description=(
"Python client to Prepare a dataset"))
parser.add_argument(
"-u",
help="username",
required=False,
dest="user")
parser.add_argument(
"-p",
help="user password",
required=False,
dest="password")
parser.add_argument(
"-e",
help="user email",
required=False,
dest="email")
parser.add_argument(
"-a",
help="url endpoint with default http://localhost:8010",
required=False,
dest="url")
parser.add_argument(
"-f",
help="file to use default ./examples/test-keras-dnn.json",
required=False,
dest="prepare_file")
parser.add_argument(
"-b",
help=(
"optional - path to CA bundle directory for "
"client encryption over HTTP"),
required=False,
dest="ca_dir")
parser.add_argument(
"-c",
help=(
"optional - path to x509 certificate for "
"client encryption over HTTP"),
required=False,
dest="cert_file")
parser.add_argument(
"-k",
help=(
"optional - path to x509 key file for "
"client encryption over HTTP"),
required=False,
dest="key_file")
parser.add_argument(
"-s",
help="silent",
required=False,
dest="silent",
action="store_true")
parser.add_argument(
"-d",
help="debug",
required=False,
dest="debug",
action="store_true")
args = parser.parse_args()
user = ev(
"API_USER",
"user-not-set")
password = ev(
"API_PASSWORD",
"password-not-set")
email = ev(
"API_EMAIL",
"email-not-set")
url = ev(
"API_URL",
"http://localhost:8010")
prepare_file = ev(
"DATAFILE",
"prepare_file-not-set")
ca_dir = os.getenv(
"API_CA_BUNDLE_DIR",
None)
cert_file = os.getenv(
"API_CERT_FILE",
None)
key_file = os.getenv(
"API_KEY_FILE",
None)
verbose = bool(str(ev(
"API_VERBOSE",
"true")).lower() == "true")
debug = bool(str(ev(
"API_DEBUG",
"false")).lower() == "true")
if args.user:
user = args.user
if args.password:
password = args.password
if args.email:
email = args.email
if args.url:
url = args.url
if args.prepare_file:
prepare_file = args.prepare_file
if args.ca_dir:
ca_dir = args.ca_dir
if args.cert_file:
cert_file = args.cert_file
if args.key_file:
key_file = args.key_file
if args.silent:
verbose = False
if args.debug:
debug = True
usage = (
"Please run with "
"-u <username> "
"-p <password> "
"-a <AntiNex URL http://localhost:8010> "
"-f <path to prepare file> "
"-b <optional - path to CA bundle directory> "
"-c <optional - path to x509 ssl certificate file> "
"-k <optional - path to x509 ssl key file>")
valid = True
if not user or user == "user-not-set":
log.error("missing user")
valid = False
if not password or password == "password-not-set":
log.error("missing password")
valid = False
if not prepare_file or prepare_file == "prepare_file-not-set":
log.error("missing prepare_file")
valid = False
else:
if not os.path.exists(prepare_file):
log.error(("did not find prepare_file={} on disk")
.format(
prepare_file))
valid = False
if not valid:
log.error(usage)
sys.exit(1)
if verbose:
log.info(("creating client user={} url={} file={}")
.format(
user,
url,
prepare_file))
client = AIClient(
user=user,
email=email,
password=password,
url=url,
ca_dir=ca_dir,
cert_file=cert_file,
key_file=key_file,
verbose=verbose,
debug=debug)
if verbose:
log.info(("loading request in prepare_file={}")
.format(
prepare_file))
req_body = None
with open(prepare_file, "r") as f:
req_body = json.loads(f.read())
if verbose:
log.info("running prepare")
prepare_was_started = False
response = client.run_prepare(
body=req_body)
if response["status"] == SUCCESS:
log.info(("prepare started with response={}")
.format(
response["data"]))
prepare_was_started = True
elif response["status"] == FAILED:
log.error(("prepare failed with error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == ERROR:
log.error(("prepare had an error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == LOGIN_FAILED:
log.error(("prepare reported user was not able to log in "
"with an error='{}' with response={}")
.format(
response["error"],
response["data"]))
if not prepare_was_started:
sys.exit(1)
if debug:
log.info(("parsing response data={}")
.format(
response["data"]))
else:
if verbose:
log.info("parsing data")
prepare_data = response["data"]
if not prepare_data:
log.error(("missing prepare dictionary in response data={}")
.format(
response["data"]))
sys.exit(1)
prepare_id = prepare_data.get("id", None)
prepare_status = prepare_data.get("status", None)
log.info(("started prepare.id={} prepare.status={}")
.format(
prepare_id,
prepare_status))
prepare_results = client.wait_for_prepare_to_finish(
prepare_id=prepare_id)
if prepare_results["status"] != SUCCESS:
log.error(("failed waiting for prepare.id={} to finish "
"error={} data={}")
.format(
prepare_id,
prepare_results["error"],
prepare_results["data"]))
sys.exit(1)
final_prepare = prepare_results["data"]
log.info(("prepare={}")
.format(
ppj(final_prepare)))
log.info(("prepare.id={} is done")
.format(
prepare_id))
|
Convert a 2D feature to a 3D feature by sampling a raster
Parameters:
raster (rasterio): raster to provide the z coordinate
feature (dict): fiona feature record to convert
Returns:
result (Point or Linestring): shapely Point or LineString of xyz coordinate triples
def drape(raster, feature):
"""Convert a 2D feature to a 3D feature by sampling a raster
Parameters:
raster (rasterio): raster to provide the z coordinate
feature (dict): fiona feature record to convert
Returns:
result (Point or Linestring): shapely Point or LineString of xyz coordinate triples
"""
coords = feature['geometry']['coordinates']
geom_type = feature['geometry']['type']
if geom_type == 'Point':
xyz = sample(raster, [coords])
result = Point(xyz[0])
elif geom_type == 'LineString':
xyz = sample(raster, coords)
points = [Point(x, y, z) for x, y, z in xyz]
result = LineString(points)
else:
logging.error('drape not implemented for {}'.format(geom_type))
return result
|
Sample a raster at given coordinates
Given a list of coordinates, return a list of x,y,z triples with z coordinates sampled from an input raster
Parameters:
raster (rasterio): raster dataset to sample
coords: array of tuples containing coordinate pairs (x,y) or triples (x,y,z)
Returns:
result: array of tuples containing coordinate triples (x,y,z)
def sample(raster, coords):
"""Sample a raster at given coordinates
Given a list of coordinates, return a list of x,y,z triples with z coordinates sampled from an input raster
Parameters:
raster (rasterio): raster dataset to sample
coords: array of tuples containing coordinate pairs (x,y) or triples (x,y,z)
Returns:
result: array of tuples containing coordinate triples (x,y,z)
"""
if len(coords[0]) == 3:
logging.info('Input is a 3D geometry, z coordinate will be updated.')
z = raster.sample([(x, y) for x, y, z in coords], indexes=raster.indexes)
else:
z = raster.sample(coords, indexes=raster.indexes)
result = [(vert[0], vert[1], vert_z) for vert, vert_z in zip(coords, z)]
return result
|
setup_logging
Setup logging configuration
:param default_level: level to log
:param default_path: path to config (optional)
:param env_key: path to config in this env var
:param config_name: filename for config
def setup_logging(
default_level=logging.INFO,
default_path="{}/logging.json".format(
os.getenv(
"LOG_DIR",
os.path.dirname(os.path.realpath(__file__)))),
env_key="LOG_CFG",
config_name=None):
"""setup_logging
Setup logging configuration
:param default_level: level to log
:param default_path: path to config (optional)
:param env_key: path to config in this env var
:param config_name: filename for config
"""
path = default_path
file_name = default_path.split("/")[-1]
if config_name:
file_name = config_name
path = "{}/{}".format(
"/".join(default_path.split("/")[:-1]),
file_name)
value = os.getenv(env_key, None)
if value:
path = value
if os.path.exists(path):
with open(path, "rt") as f:
config = json.load(f)
logging.config.dictConfig(config)
return
else:
cwd_path = os.getcwd() + "/antinex_client/log/{}".format(
file_name)
if os.path.exists(cwd_path):
with open(cwd_path, "rt") as f:
config = json.load(f)
logging.config.dictConfig(config)
return
rels_path = os.getcwd() + "/../log/{}".format(
file_name)
if os.path.exists(rels_path):
with open(rels_path, "rt") as f:
config = json.load(f)
logging.config.dictConfig(config)
return
else:
logging.basicConfig(level=default_level)
return
|
build_logger
:param name: name that shows in the logger
:param config: name of the config file
:param log_level: level to log
:param log_config_path: path to log config file
def build_logger(
name=os.getenv(
"LOG_NAME",
"client"),
config="logging.json",
log_level=logging.INFO,
log_config_path="{}/logging.json".format(
os.getenv(
"LOG_CFG",
os.path.dirname(os.path.realpath(__file__))))):
"""build_logger
:param name: name that shows in the logger
:param config: name of the config file
:param log_level: level to log
:param log_config_path: path to log config file
"""
use_config = ("./log/{}").format(
"{}".format(
config))
if not os.path.exists(use_config):
use_config = log_config_path
if not os.path.exists(use_config):
use_config = ("./antinex_client/log/{}").format(
"logging.json")
# find the log processing
setup_logging(
default_level=log_level,
default_path=use_config)
return logging.getLogger(name)
|
build_colorized_logger
:param name: name that shows in the logger
:param config: name of the config file
:param log_level: level to log
:param log_config_path: path to log config file
def build_colorized_logger(
name=os.getenv(
"LOG_NAME",
"client"),
config="colors-logging.json",
log_level=logging.INFO,
log_config_path="{}/logging.json".format(
os.getenv(
"LOG_CFG",
os.path.dirname(os.path.realpath(__file__))))):
"""build_colorized_logger
:param name: name that shows in the logger
:param config: name of the config file
:param log_level: level to log
:param log_config_path: path to log config file
"""
override_config = os.getenv(
"SHARED_LOG_CFG",
None)
debug_log_config = bool(os.getenv(
"DEBUG_SHARED_LOG_CFG",
"0") == "1")
if override_config:
if debug_log_config:
print((
"creating logger config env var: "
"SHARED_LOG_CFG={}".format(
override_config)))
if os.path.exists(override_config):
setup_logging(
default_level=log_level,
default_path=override_config)
return logging.getLogger(name)
if debug_log_config:
print((
"Failed to find log config using env var: "
"SHARED_LOG_CFG={}".format(
override_config)))
else:
if debug_log_config:
print((
"Not using shared logging env var: "
"SHARED_LOG_CFG={}".format(
override_config)))
# allow a shared log config across all components
use_config = ("{}").format(
config)
if not os.path.exists(use_config):
use_config = ("./antinex_client/log/{}").format(
config)
if not os.path.exists(use_config):
use_config = log_config_path
if not os.path.exists(use_config):
use_config = ("./log/{}").format(
config)
if not os.path.exists(use_config):
use_config = ("./antinex_client/log/{}").format(
"logging.json")
# find the last log config backup from the base of the repo
# find the log config from the defaults with the env LOG_CFG
# find the log config from the base of the repo
# find the log config by the given path
setup_logging(
default_level=log_level,
default_path=use_config)
return logging.getLogger(name)
|
Your AuhtService should override this method for request authentication, otherwise means no authentication.
:param request: HttpRequest Django's HttpRequest object
:param auth_route: str User's resqueted route
:param actual_params: User's url parameters
:return: bool
def authenticate(self, request: HttpRequest, auth_route: str, actual_params: dict) -> bool:
"""
Your AuhtService should override this method for request authentication, otherwise means no authentication.
:param request: HttpRequest Django's HttpRequest object
:param auth_route: str User's resqueted route
:param actual_params: User's url parameters
:return: bool
"""
if auth_route and actual_params:
self.auth_data = {}
return True
|
Converts 2D geometries to 3D using GEOS sample through fiona.
\b
Example:
drape point.shp elevation.tif -o point_z.shp
def cli(source_f, raster_f, output, verbose):
"""
Converts 2D geometries to 3D using GEOS sample through fiona.
\b
Example:
drape point.shp elevation.tif -o point_z.shp
"""
with fiona.open(source_f, 'r') as source:
source_driver = source.driver
source_crs = source.crs
sink_schema = source.schema.copy()
source_geom = source.schema['geometry']
if source_geom == 'Point':
sink_schema['geometry'] = '3D Point'
elif source_geom == 'LineString':
sink_schema['geometry'] = '3D LineString'
elif source_geom == '3D Point' or source_geom == '3D LineString':
pass
else:
click.BadParameter("Source geometry type {} not implemented".format(source_geom))
with rasterio.open(raster_f) as raster:
if source_crs != raster.crs:
click.BadParameter("Features and raster have different CRS.")
if raster.count > 1:
warnings.warn("Found {0} bands in {1}, expected a single band raster".format(raster.bands, raster_f))
supported = ['int16', 'int32', 'float32', 'float64']
if raster.dtypes[0] not in supported:
warnings.warn("Found {0} type in {1}, expected one of {2}".format(raster.dtypes[0]), raster_f, supported)
with fiona.open(
output, 'w',
driver=source_driver,
crs=source_crs,
schema=sink_schema) as sink:
for feature in source:
try:
feature_z = drapery.drape(raster, feature)
sink.write({
'geometry': mapping(feature_z),
'properties': feature['properties'],
})
except Exception:
logging.exception("Error processing feature %s:", feature['id'])
|
QuerySet for all comments for a particular model (either an instance or
a class).
def for_model(self, model):
"""
QuerySet for all comments for a particular model (either an instance or
a class).
"""
ct = ContentType.objects.get_for_model(model)
qs = self.get_queryset().filter(content_type=ct)
if isinstance(model, models.Model):
qs = qs.filter(object_pk=force_text(model._get_pk_val()))
return qs
|
Blocking call, returns the value of the execution in JS
def eval(self, command):
'Blocking call, returns the value of the execution in JS'
event = threading.Event()
# TODO: Add event to server
#job_id = str(id(command))
import random
job_id = str(random.random())
server.EVALUATIONS[job_id] = event
message = '?' + job_id + '=' + command
logging.info(('message:', [message]))
for listener in server.LISTENERS.get(self.path, []):
logging.debug(('listener:', listener))
listener.write_message(message)
success = event.wait(timeout=30)
if success:
value_parser = server.RESULTS[job_id]
del server.EVALUATIONS[job_id]
del server.RESULTS[job_id]
return value_parser()
else:
del server.EVALUATIONS[job_id]
if job_id in server.RESULTS:
del server.RESULTS[job_id]
raise IOError('Evaluation failed.')
|
Register a callback on server and on connected clients.
def register(self, callback, name):
'Register a callback on server and on connected clients.'
server.CALLBACKS[name] = callback
self.run('''
window.skink.%s = function(args=[]) {
window.skink.call("%s", args);
}''' % (name, name))
|
Launch a Python exception from an error that took place in the browser.
messsage format:
- name: str
- description: str
def launch_exception(message):
"""
Launch a Python exception from an error that took place in the browser.
messsage format:
- name: str
- description: str
"""
error_name = message['name']
error_descr = message['description']
mapping = {
'ReferenceError': NameError,
}
if message['name'] in mapping:
raise mapping[error_name](error_descr)
else:
raise Exception('{}: {}'.format(error_name, error_descr))
|
start_predictions
Using environment variables, create an AntiNex AI Client.
You can also use command line args if you want.
This can train a new deep neural network if it does not
exist or it can use an existing pre-trained deep neural
network within the AntiNex Core to make new predictions.
def start_predictions():
"""start_predictions
Using environment variables, create an AntiNex AI Client.
You can also use command line args if you want.
This can train a new deep neural network if it does not
exist or it can use an existing pre-trained deep neural
network within the AntiNex Core to make new predictions.
"""
parser = argparse.ArgumentParser(
description=(
"Python client to make Predictions "
"using a Pre-trained Deep Neural Network "
"with AntiNex Django Rest Framework"))
parser.add_argument(
"-f",
help=(
"file to use default ./examples/"
"predict-rows-scaler-full-django.json"),
required=False,
dest="datafile")
parser.add_argument(
"-m",
help="send mock data",
required=False,
dest="use_fake_rows",
action="store_true")
parser.add_argument(
"-b",
help=(
"optional - path to CA bundle directory for "
"client encryption over HTTP"),
required=False,
dest="ca_dir")
parser.add_argument(
"-c",
help=(
"optional - path to x509 certificate for "
"client encryption over HTTP"),
required=False,
dest="cert_file")
parser.add_argument(
"-k",
help=(
"optional - path to x509 key file for "
"client encryption over HTTP"),
required=False,
dest="key_file")
parser.add_argument(
"-s",
help="silent",
required=False,
dest="silent",
action="store_true")
parser.add_argument(
"-d",
help="debug",
required=False,
dest="debug",
action="store_true")
args = parser.parse_args()
datafile = ev(
"DATAFILE",
"./examples/predict-rows-scaler-full-django.json")
ca_dir = os.getenv(
"API_CA_BUNDLE_DIR",
None)
cert_file = os.getenv(
"API_CERT_FILE",
None)
key_file = os.getenv(
"API_KEY_FILE",
None)
verbose = bool(str(ev(
"API_CLIENT_VERBOSE",
"1")).lower() == "1")
debug = bool(str(ev(
"API_CLIENT_DEBUG",
"0")).lower() == "1")
use_fake_rows = False
if args.use_fake_rows:
use_fake_rows = True
if args.datafile:
datafile = args.datafile
if args.ca_dir:
ca_dir = args.ca_dir
if args.cert_file:
cert_file = args.cert_file
if args.key_file:
key_file = args.key_file
if args.silent:
verbose = False
if args.debug:
debug = True
if verbose:
log.info("creating client")
client = build_ai_client_from_env(
ca_dir=ca_dir,
cert_file=cert_file,
key_file=key_file,
verbose=verbose,
debug=debug)
if verbose:
log.info(("loading request in datafile={}")
.format(
datafile))
# pass in full or partial prediction record dictionaries
# the generate_ai_request will fill in gaps with defaults
fake_rows_for_predicting = [
{
"tcp_seq": 1
},
{
"tcp_seq": 2
},
{
"tcp_seq": 3
},
{
"tcp_seq": 4
}
]
res_gen = None
if use_fake_rows:
res_gen = generate_ai_request(
predict_rows=fake_rows_for_predicting)
else:
req_with_org_rows = None
with open(datafile, "r") as f:
req_with_org_rows = json.loads(f.read())
res_gen = generate_ai_request(
predict_rows=req_with_org_rows["predict_rows"])
# end of sending mock data from this file or a file on disk
if res_gen["status"] != SUCCESS:
log.error(("failed generate_ai_request with error={}")
.format(
res_gen["error"]))
sys.exit(1)
req_body = res_gen["data"]
if verbose:
log.info("running job")
job_was_started = False
response = client.run_job(
body=req_body)
if response["status"] == SUCCESS:
log.info(("job started with response={}")
.format(
response["data"]))
job_was_started = True
elif response["status"] == FAILED:
log.error(("job failed with error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == ERROR:
log.error(("job had an error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == LOGIN_FAILED:
log.error(("job reported user was not able to log in "
"with an error='{}' with response={}")
.format(
response["error"],
response["data"]))
if not job_was_started:
sys.exit(1)
if debug:
log.info(("parsing response data={}")
.format(
response["data"]))
else:
if verbose:
log.info("parsing data")
res_data = response["data"]
job_data = res_data.get(
"job",
None)
result_data = res_data.get(
"results",
None)
if not job_data:
log.error(("missing job dictionary in response data={}")
.format(
response["data"]))
sys.exit(1)
if not result_data:
log.error(("missing results dictionary in response data={}")
.format(
response["data"]))
sys.exit(1)
job_id = job_data.get("id", None)
job_status = job_data.get("status", None)
result_id = result_data.get("id", None)
result_status = result_data.get("status", None)
log.info(("started job.id={} job.status={} with "
"result.id={} result.status={}")
.format(
job_id,
job_status,
result_id,
result_status))
job_results = client.wait_for_job_to_finish(
job_id=job_id)
if job_results["status"] != SUCCESS:
log.error(("failed waiting for job.id={} to finish error={} data={}")
.format(
job_id,
job_results["error"],
job_results["data"]))
sys.exit(1)
final_job = job_results["data"]["job"]
final_result = job_results["data"]["result"]
log.info(("job={}")
.format(
ppj(final_job)))
log.info(("result={}")
.format(
ppj(final_result)))
log.info(("job.id={} is done")
.format(
job_id))
predictions = final_result["predictions_json"].get(
"predictions",
[])
log.info(("loading predictions={} into pandas dataframe")
.format(
len(predictions)))
df = pd.DataFrame(predictions)
log.info(("dataframe={}")
.format(
df))
|
login
def login(
self):
"""login"""
auth_url = self.api_urls["login"]
if self.verbose:
log.info(("log in user={} url={} ca_dir={} cert={}")
.format(
self.user,
auth_url,
self.ca_dir,
self.cert))
use_headers = {
"Content-type": "application/json"
}
login_data = {
"username": self.user,
"password": self.password
}
if self.debug:
log.info((
"LOGIN with body={} headers={} url={} "
"verify={} cert={}").format(
login_data,
use_headers,
auth_url,
self.use_verify,
self.cert))
response = requests.post(
auth_url,
verify=self.use_verify,
cert=self.cert,
data=json.dumps(login_data),
headers=use_headers)
if self.debug:
log.info(("LOGIN response status_code={} text={} reason={}")
.format(
response.status_code,
response.text,
response.reason))
user_token = ""
if response.status_code == 200:
user_token = json.loads(response.text)["token"]
if user_token != "":
self.token = user_token
self.login_status = LOGIN_SUCCESS
if self.verbose:
log.debug("login success")
else:
log.error(("failed to login user={} to url={} text={}")
.format(
self.user,
auth_url,
response.text))
self.login_status = LOGIN_FAILED
# if the user token exists
return self.login_status
|
build_response
:param status: status code
:param error: error message
:param data: dictionary to send back
def build_response(
self,
status=NOT_SET,
error="",
data=None):
"""build_response
:param status: status code
:param error: error message
:param data: dictionary to send back
"""
res_node = {
"status": status,
"error": error,
"data": data
}
return res_node
|
retry_login
def retry_login(
self):
"""retry_login"""
if not self.user or not self.password:
return self.build_response(
status=ERROR,
error="please set the user and password")
retry = 0
not_done = True
while not_done:
if self.is_logged_in():
return self.build_response(
status=SUCCESS)
else:
if self.verbose:
log.debug(("login attempt={} max={}")
.format(
retry,
self.max_retries))
if self.login() == LOGIN_SUCCESS:
return self.build_response(
status=SUCCESS)
else:
time.sleep(
self.login_retry_wait_time)
# if able to login or not
retry += 1
if retry > self.max_retries:
return self.build_response(
status=ERROR,
error="failed logging in user={} retries={}".format(
self.user,
self.max_retries))
# if login worked or not
return self.build_response(
status=FAILED,
error="user={} not able to login attempts={}".format(
self.user,
retry))
|
get_prepare_by_id
:param prepare_id: MLJob.id in the database
def get_prepare_by_id(
self,
prepare_id=None):
"""get_prepare_by_id
:param prepare_id: MLJob.id in the database
"""
if not prepare_id:
log.error("missing prepare_id for get_prepare_by_id")
return self.build_response(
status=ERROR,
error="missing prepare_id for get_prepare_by_id")
if self.debug:
log.info(("user={} getting prepare={}")
.format(
self.user,
prepare_id))
url = "{}{}".format(
self.api_urls["prepare"],
prepare_id)
not_done = True
while not_done:
if self.debug:
log.info((
"JOB attempting to get={} to url={} "
"verify={} cert={}").format(
prepare_id,
url,
self.use_verify,
self.cert))
response = requests.get(
url,
verify=self.use_verify,
cert=self.cert,
headers=self.get_auth_header())
if self.debug:
log.info(("JOB response status_code={} text={} reason={}")
.format(
response.status_code,
response.text,
response.reason))
if response.status_code == 401:
login_res = self.retry_login()
if login_res["status"] != SUCCESS:
if self.verbose:
log.error(
"retry login attempts failed")
return self.build_response(
status=login_res["status"],
error=login_res["error"])
# if able to log back in just retry the call
elif response.status_code == 200:
if self.verbose:
log.debug("deserializing")
prepare_data = json.loads(
response.text)
prepare_id = prepare_data.get(
"id",
None)
if not prepare_id:
return self.build_response(
status=ERROR,
error="missing prepare.id",
data="text={} reason={}".format(
response.reason,
response.text))
self.all_prepares[str(prepare_id)] = prepare_data
if self.debug:
log.info(("added prepare={} all_prepares={}")
.format(
prepare_id,
len(self.all_prepares)))
return self.build_response(
status=SUCCESS,
error="",
data=prepare_data)
else:
err_msg = ("failed with "
"status_code={} text={} reason={}").format(
response.status_code,
response.text,
response.reason)
if self.verbose:
log.error(err_msg)
return self.build_response(
status=ERROR,
error=err_msg)
|
wait_for_job_to_finish
:param job_id: MLJob.id to wait on
:param sec_to_sleep: seconds to sleep during polling
:param max_retries: max retires until stopping
def wait_for_job_to_finish(
self,
job_id,
sec_to_sleep=5.0,
max_retries=100000):
"""wait_for_job_to_finish
:param job_id: MLJob.id to wait on
:param sec_to_sleep: seconds to sleep during polling
:param max_retries: max retires until stopping
"""
not_done = True
retry_attempt = 1
while not_done:
if self.debug:
log.info(("JOBSTATUS getting job.id={} details")
.format(
job_id))
response = self.get_job_by_id(job_id)
if self.debug:
log.info(("JOBSTATUS got job.id={} response={}")
.format(
job_id,
response))
if response["status"] != SUCCESS:
log.error(("JOBSTATUS failed to get job.id={} with error={}")
.format(
job_id,
response["error"]))
return self.build_response(
status=ERROR,
error=response["error"],
data=response["data"])
# stop if this failed getting the job details
job_data = response.get(
"data",
None)
if not job_data:
return self.build_response(
status=ERROR,
error="failed to find job dictionary in response",
data=response["data"])
job_status = job_data["status"]
if job_status == "finished" \
or job_status == "completed" \
or job_status == "launched":
if self.debug:
log.info(("job.id={} is done with status={}")
.format(
job_id,
job_status))
result_id = job_data["predict_manifest"]["result_id"]
if self.debug:
log.info(("JOBRESULT getting result.id={} details")
.format(
result_id))
response = self.get_result_by_id(result_id)
if self.debug:
log.info(("JOBRESULT got result.id={} response={}")
.format(
result_id,
response))
if response["status"] != SUCCESS:
log.error(("JOBRESULT failed to get "
"result.id={} with error={}")
.format(
result_id,
response["error"]))
return self.build_response(
status=ERROR,
error=response["error"],
data=response["data"])
# stop if this failed getting the result details
result_data = response.get(
"data",
None)
if result_data["status"] == "finished":
full_response = {
"job": job_data,
"result": result_data
}
not_done = False
return self.build_response(
status=SUCCESS,
error="",
data=full_response)
else:
if retry_attempt % 100 == 0:
if self.verbose:
log.info(("result_id={} are not done retry={}")
.format(
result_id,
retry_attempt))
retry_attempt += 1
if retry_attempt > max_retries:
err_msg = ("failed waiting "
"for job.id={} result.id={} "
"to finish").format(
job_id,
result_id)
log.error(err_msg)
return self.build_response(
status=ERROR,
error=err_msg)
else:
time.sleep(sec_to_sleep)
# wait while results are written to the db
else:
retry_attempt += 1
if retry_attempt > max_retries:
err_msg = ("failed waiting "
"for job.id={} to finish").format(
job_id)
log.error(err_msg)
return self.build_response(
status=ERROR,
error=err_msg)
else:
if self.verbose:
if retry_attempt % 100 == 0:
log.info(("waiting on job.id={} retry={}")
.format(
job_id,
retry_attempt))
# if logging just to show this is running
time.sleep(sec_to_sleep)
|
wait_for_prepare_to_finish
:param prepare_id: MLPrepare.id to wait on
:param sec_to_sleep: seconds to sleep during polling
:param max_retries: max retires until stopping
def wait_for_prepare_to_finish(
self,
prepare_id,
sec_to_sleep=5.0,
max_retries=100000):
"""wait_for_prepare_to_finish
:param prepare_id: MLPrepare.id to wait on
:param sec_to_sleep: seconds to sleep during polling
:param max_retries: max retires until stopping
"""
not_done = True
retry_attempt = 1
while not_done:
if self.debug:
log.info(("PREPSTATUS getting prepare.id={} details")
.format(
prepare_id))
response = self.get_prepare_by_id(prepare_id)
if self.debug:
log.info(("PREPSTATUS got prepare.id={} response={}")
.format(
prepare_id,
response))
if response["status"] != SUCCESS:
log.error(("PREPSTATUS failed to get prepare.id={} "
"with error={}")
.format(
prepare_id,
response["error"]))
return self.build_response(
status=ERROR,
error=response["error"],
data=response["data"])
# stop if this failed getting the prepare details
prepare_data = response.get(
"data",
None)
if not prepare_data:
return self.build_response(
status=ERROR,
error="failed to find prepare dictionary in response",
data=response["data"])
prepare_status = prepare_data["status"]
if prepare_status == "finished" \
or prepare_status == "completed":
not_done = False
return self.build_response(
status=SUCCESS,
error="",
data=prepare_data)
else:
retry_attempt += 1
if retry_attempt > max_retries:
err_msg = ("failed waiting "
"for prepare.id={} to finish").format(
prepare_id)
log.error(err_msg)
return self.build_response(
status=ERROR,
error=err_msg)
else:
if self.verbose:
if retry_attempt % 100 == 0:
log.info(("waiting on prepare.id={} retry={}")
.format(
prepare_id,
retry_attempt))
# if logging just to show this is running
time.sleep(sec_to_sleep)
|
NB: Overridden to remove dupe comment check for admins (necessary for
canned responses)
Return a new (unsaved) comment object based on the information in this
form. Assumes that the form is already validated and will throw a
ValueError if not.
Does not set any of the fields that would come from a Request object
(i.e. ``user`` or ``ip_address``).
def get_comment_object(self):
"""
NB: Overridden to remove dupe comment check for admins (necessary for
canned responses)
Return a new (unsaved) comment object based on the information in this
form. Assumes that the form is already validated and will throw a
ValueError if not.
Does not set any of the fields that would come from a Request object
(i.e. ``user`` or ``ip_address``).
"""
if not self.is_valid():
raise ValueError(
"get_comment_object may only be called on valid forms")
CommentModel = self.get_comment_model()
new = CommentModel(**self.get_comment_create_data())
user_model = get_user_model()
try:
user = user_model.objects.get(username=new.user_name)
if not user.is_staff:
new = self.check_for_duplicate_comment(new)
except user_model.DoesNotExist:
# post_molo_comment may have set the username to 'Anonymous'
new = self.check_for_duplicate_comment(new)
return new
|
Start Rinzler App
:param app_name: str Application's identifier
:return: dict
def boot(app_name) -> Rinzler:
"""
Start Rinzler App
:param app_name: str Application's identifier
:return: dict
"""
app = Rinzler(app_name)
app.log.info("App booted =)")
return app
|
Maps a route namespace with the given params and point it's requests to the especified controller.
:param route: str Namespace route to be mapped
:param controller: callback Controller callable to map end-points
:rtype: url
def mount(self, route: str, controller: callable) -> url:
"""
Maps a route namespace with the given params and point it's requests to the especified controller.
:param route: str Namespace route to be mapped
:param controller: callback Controller callable to map end-points
:rtype: url
"""
if issubclass(controller, TemplateView):
return url(
r"%s" % route,
Router(self, route, controller).handle
)
else:
raise TypeError("The controller %s must be a subclass of %s" % (
controller, TemplateView
)
)
|
Sets the authentication service
:param auth_service: BaseAuthService Authentication service
:raises: TypeError If the auth_service object is not a subclass of rinzler.auth.BaseAuthService
:rtype: Rinzler
def set_auth_service(self, auth_service: BaseAuthService):
"""
Sets the authentication service
:param auth_service: BaseAuthService Authentication service
:raises: TypeError If the auth_service object is not a subclass of rinzler.auth.BaseAuthService
:rtype: Rinzler
"""
if issubclass(auth_service.__class__, BaseAuthService):
self.auth_service = auth_service
return self
else:
raise TypeError("Your auth service object must be a subclass of rinzler.auth.BaseAuthService.")
|
Prepares for the CallBackResolver and handles the response and exceptions
:param request HttpRequest
:rtype: HttpResponse
def handle(self, request: HttpRequest) -> HttpResponse:
"""
Prepares for the CallBackResolver and handles the response and exceptions
:param request HttpRequest
:rtype: HttpResponse
"""
self.__request_start = datetime.now()
self.__request = request
self.__uri = request.path[1:]
self.__method = request.method
# Initializes the callable controller and call it's connect method to get the mapped end-points.
controller: RouteMapping = self.__controller().connect(self.app)
self.__end_points = controller.get_routes()
indent = self.get_json_ident(request.META)
if self.set_end_point_uri() is False:
return self.set_response_headers(self.no_route_found(self.__request).render(indent))
response = HttpResponse(None)
try:
response = self.exec_route_callback()
except RinzlerHttpException as e:
client.captureException()
self.app.log.error(f"< {e.status_code}", exc_info=True)
response = Response(None, status=e.status_code)
except RequestDataTooBig:
client.captureException()
self.app.log.error("< 413", exc_info=True)
response = Response(None, status=413)
except BaseException:
client.captureException()
self.app.log.error("< 500", exc_info=True)
response = Response(None, status=500)
finally:
if type(response) == Response:
return self.set_response_headers(response.render(indent))
else:
return self.set_response_headers(response)
|
Executes the resolved end-point callback, or its fallback
:rtype: Response or object
def exec_route_callback(self) -> Response or object:
"""
Executes the resolved end-point callback, or its fallback
:rtype: Response or object
"""
if self.__method.lower() in self.__end_points:
for bound in self.__end_points[self.__method.lower()]:
route = list(bound)[0]
expected_params = self.get_url_params(route)
actual_params = self.get_url_params(self.get_end_point_uri())
if self.request_matches_route(self.get_end_point_uri(), route):
self.app.log.info("> {0} {1}".format(self.__method, self.__uri))
if self.authenticate(route, actual_params):
self.app.log.debug(
"%s(%d) %s" % ("body ", len(self.__request.body), self.__request.body.decode('utf-8'))
)
pattern_params = self.get_callback_pattern(expected_params, actual_params)
self.app.request_handle_time = (
lambda d: int((d.days * 24 * 60 * 60 * 1000) + (d.seconds * 1000) + (d.microseconds / 1000))
)(datetime.now() - self.__request_start)
return bound[route](self.__request, self.app, **pattern_params)
else:
raise AuthException("Authentication failed.")
if self.__method == "OPTIONS":
self.app.log.info("Route matched: {0} {1}".format(self.__method, self.__uri))
return self.default_route_options()
if self.__route == '' and self.__uri == '':
return self.welcome_page()
else:
return self.no_route_found(self.__request)
|
Determines whether a route matches the actual requested route or not
:param actual_route str
:param expected_route
:rtype: Boolean
def request_matches_route(self, actual_route: str, expected_route: str):
"""
Determines whether a route matches the actual requested route or not
:param actual_route str
:param expected_route
:rtype: Boolean
"""
expected_params = self.get_url_params(expected_route)
actual_params = self.get_url_params(actual_route)
i = 0
if len(expected_params) == len(actual_params):
for param in actual_params:
if expected_params[i][0] != "{":
if param != expected_params[i]:
return False
i += 1
else:
return False
return True
|
Runs the pre-defined authenticaton service
:param bound_route str route matched
:param actual_params dict actual url parameters
:rtype: bool
def authenticate(self, bound_route, actual_params) -> bool:
"""
Runs the pre-defined authenticaton service
:param bound_route str route matched
:param actual_params dict actual url parameters
:rtype: bool
"""
if self.__auth_service is not None:
auth_route = "{0}_{1}{2}".format(self.__method, self.__route, bound_route)
auth_data = self.__auth_service.authenticate(self.__request, auth_route, actual_params)
if auth_data is True:
self.app.auth_data = self.__auth_service.auth_data
else:
return False
return True
|
Assembles a dictionary whith the parameters schema defined for this route
:param expected_params dict parameters schema defined for this route
:param actual_params dict actual url parameters
:rtype: dict
def get_callback_pattern(expected_params, actual_params):
"""
Assembles a dictionary whith the parameters schema defined for this route
:param expected_params dict parameters schema defined for this route
:param actual_params dict actual url parameters
:rtype: dict
"""
pattern = dict()
key = 0
for exp_param in expected_params:
if exp_param[0] == '{' and exp_param[-1:] == '}':
pattern[exp_param[1:-1]] = actual_params[key]
key = key + 1
return pattern
|
Gets route parameters as dictionary
:param end_point str target route
:rtype: list
def get_url_params(end_point: str) -> list:
"""
Gets route parameters as dictionary
:param end_point str target route
:rtype: list
"""
var_params = end_point.split('/')
if len(var_params) == 1 and var_params[0] == '':
return []
elif len(var_params) == 1 and var_params[0] != '':
return [var_params[0]]
else:
params = list()
for param in var_params:
if len(param) > 0:
params.append(param)
return params
|
Extracts the route from the accessed URL and sets it to __end_point_uri
:rtype: bool
def set_end_point_uri(self) -> bool:
"""
Extracts the route from the accessed URL and sets it to __end_point_uri
:rtype: bool
"""
expected_parts = self.__route.split("/")
actual_parts = self.__uri.split("/")
i = 0
for part in expected_parts:
if part != actual_parts[i]:
return False
i = i + 1
uri_prefix = len(self.__route)
self.__end_point_uri = self.__uri[uri_prefix:]
return True
|
Default callback for route not found
:param request HttpRequest
:rtype: Response
def no_route_found(self, request):
"""
Default callback for route not found
:param request HttpRequest
:rtype: Response
"""
response_obj = OrderedDict()
response_obj["status"] = False
response_obj["exceptions"] = {
"message": "No route found for {0} {1}".format(self.__method, self.__uri),
}
response_obj["request"] = {
"method": self.__method,
"path_info": self.__uri,
"content": request.body.decode("utf-8")
}
response_obj["message"] = "We are sorry, but something went terribly wrong."
return Response(response_obj, content_type="application/json", status=404, charset="utf-8")
|
Defaulf welcome page when the route / is note mapped yet
:rtype: HttpResponse
def welcome_page(self):
"""
Defaulf welcome page when the route / is note mapped yet
:rtype: HttpResponse
"""
message = "HTTP/1.1 200 OK RINZLER FRAMEWORK"
return HttpResponse(
"<center><h1>{0}({1})</h1></center>".format(message, self.app.app_name),
content_type="text/html", charset="utf-8"
)
|
Default callback for OPTIONS request
:rtype: Response
def default_route_options():
"""
Default callback for OPTIONS request
:rtype: Response
"""
response_obj = OrderedDict()
response_obj["status"] = True
response_obj["data"] = "Ok"
return Response(response_obj, content_type="application/json", charset="utf-8")
|
Appends default headers to every response returned by the API
:param response HttpResponse
:rtype: HttpResponse
def set_response_headers(self, response: HttpResponse) -> HttpResponse:
"""
Appends default headers to every response returned by the API
:param response HttpResponse
:rtype: HttpResponse
"""
public_name = os.environ.get('SERVER_PUBLIC_NAME')
response_headers = {
'access-control-allow-headers': self.app.allowed_headers,
'access-control-allow-methods': self.app.allowed_methods,
'access-control-allow-origin': self.app.allowed_origins,
'access-control-allow-credentials': True,
'www-authenticate': "Bearer",
'server-public-name': public_name if public_name else "No one",
'user-info': "Rinzler Framework rulez!"
}
response_headers.update(self.app.default_headers)
for key in response_headers:
response[key] = response_headers[key]
status = response.status_code
if status != 404:
self.app.log.info("< {0}".format(status))
return response
|
Defines whether the JSON response will be indented or not
:param request_headers: dict
:return: self
def get_json_ident(request_headers: dict) -> int:
"""
Defines whether the JSON response will be indented or not
:param request_headers: dict
:return: self
"""
if 'HTTP_USER_AGENT' in request_headers:
indent = 2 if re.match("[Mozilla]{7}", request_headers['HTTP_USER_AGENT']) else 0
else:
indent = 0
return indent
|
Implementation of prop (get_item) that also supports object attributes
:param key:
:param dct_or_obj:
:return:
def prop(key, dct_or_obj):
"""
Implementation of prop (get_item) that also supports object attributes
:param key:
:param dct_or_obj:
:return:
"""
# Note that hasattr is a builtin and getattr is a ramda function, hence the different arg position
if isinstance(dict, dct_or_obj):
if has(key, dct_or_obj):
return dct_or_obj[key]
else:
raise Exception("No key %s found for dict %s" % (key, dct_or_obj))
elif isinstance(list, dct_or_obj):
if isint(key):
return dct_or_obj[key]
else:
raise Exception("Key %s not expected for list type: %s" % (key, dct_or_obj))
elif isinstance(object, dct_or_obj):
if hasattr(dct_or_obj, key):
return getattr(key, dct_or_obj)
else:
raise Exception("No key %s found for objects %s" % (key, dct_or_obj))
else:
raise Exception("%s is neither a dict nor objects" % dct_or_obj)
|
Returns true if all dct values pass f
:param f: binary lambda predicate
:param dct:
:return: True or false
def all_pass_dict(f, dct):
"""
Returns true if all dct values pass f
:param f: binary lambda predicate
:param dct:
:return: True or false
"""
return all(map_with_obj_to_values(
lambda key, value: f(key, value),
dct
))
|
Ramda propOr implementation. This also resolves object attributes, so key
can be a dict prop or an attribute of dct_or_obj
:param default: Value if dct_or_obj doesn't have key_or_prop or the resolved value is null
:param key:
:param dct_or_obj:
:return:
def prop_or(default, key, dct_or_obj):
"""
Ramda propOr implementation. This also resolves object attributes, so key
can be a dict prop or an attribute of dct_or_obj
:param default: Value if dct_or_obj doesn't have key_or_prop or the resolved value is null
:param key:
:param dct_or_obj:
:return:
"""
# Note that hasattr is a builtin and getattr is a ramda function, hence the different arg position
if isinstance(dict, dct_or_obj):
value = dct_or_obj[key] if has(key, dct_or_obj) else default
elif isinstance(object, dct_or_obj):
value = getattr(key, dct_or_obj) if hasattr(dct_or_obj, key) else default
else:
value = default
# 0 and False are ok, None defaults
if value == None:
return default
return value
|
Ramda propEq plus propOr implementation
:param default:
:param key:
:param value:
:param dct:
:return:
def prop_eq_or(default, key, value, dct):
"""
Ramda propEq plus propOr implementation
:param default:
:param key:
:param value:
:param dct:
:return:
"""
return dct[key] and dct[key] == value if key in dct else default
|
Ramda propEq/propIn plus propOr
:param default:
:param key:
:param value:
:param dct:
:return:
def prop_eq_or_in_or(default, key, value, dct):
"""
Ramda propEq/propIn plus propOr
:param default:
:param key:
:param value:
:param dct:
:return:
"""
return has(key, dct) and \
(dct[key] == value if key in dct else (
dct[key] in value if isinstance((list, tuple), value) and not isinstance(str, value)
else default
))
|
Optional version of item_path with a default value. keys can be dict keys or object attributes, or a combination
:param default:
:param keys: List of keys or dot-separated string
:param dict_or_obj: A dict or obj
:return:
def item_path_or(default, keys, dict_or_obj):
"""
Optional version of item_path with a default value. keys can be dict keys or object attributes, or a combination
:param default:
:param keys: List of keys or dot-separated string
:param dict_or_obj: A dict or obj
:return:
"""
if not keys:
raise ValueError("Expected at least one key, got {0}".format(keys))
resolved_keys = keys.split('.') if isinstance(str, keys) else keys
current_value = dict_or_obj
for key in resolved_keys:
current_value = prop_or(default, key, default_to({}, current_value))
return current_value
|
Given a string of path segments separated by ., splits them into an array. Int strings are converted
to numbers to serve as an array index
:param keys: e.g. 'foo.bar.1.goo'
:param dct: e.g. dict(foo=dict(bar=[dict(goo='a'), dict(goo='b')])
:return: The resolved value or an error. E.g. for above the result would be b
def item_str_path(keys, dct):
"""
Given a string of path segments separated by ., splits them into an array. Int strings are converted
to numbers to serve as an array index
:param keys: e.g. 'foo.bar.1.goo'
:param dct: e.g. dict(foo=dict(bar=[dict(goo='a'), dict(goo='b')])
:return: The resolved value or an error. E.g. for above the result would be b
"""
return item_path(map(lambda segment: int(segment) if isint(segment) else segment, keys.split('.')), dct)
|
Given a string of path segments separated by ., splits them into an array. Int strings are converted
to numbers to serve as an array index
:param default: Value if any part yields None or undefined
:param keys: e.g. 'foo.bar.1.goo'
:param dct: e.g. dict(foo=dict(bar=[dict(goo='a'), dict(goo='b')])
:return: The resolved value or an error. E.g. for above the result would be b
def item_str_path_or(default, keys, dct):
"""
Given a string of path segments separated by ., splits them into an array. Int strings are converted
to numbers to serve as an array index
:param default: Value if any part yields None or undefined
:param keys: e.g. 'foo.bar.1.goo'
:param dct: e.g. dict(foo=dict(bar=[dict(goo='a'), dict(goo='b')])
:return: The resolved value or an error. E.g. for above the result would be b
"""
return item_path_or(default, map(lambda segment: int(segment) if isint(segment) else segment, keys.split('.')), dct)
|
Implementation of ramda has
:param prop:
:param object_or_dct:
:return:
def has(prop, object_or_dct):
"""
Implementation of ramda has
:param prop:
:param object_or_dct:
:return:
"""
return prop in object_or_dct if isinstance(dict, object_or_dct) else hasattr(object_or_dct, prop)
|
Implementation of omit that recurses. This tests the same keys at every level of dict and in lists
:param omit_props:
:param dct:
:return:
def omit_deep(omit_props, dct):
"""
Implementation of omit that recurses. This tests the same keys at every level of dict and in lists
:param omit_props:
:param dct:
:return:
"""
omit_partial = omit_deep(omit_props)
if isinstance(dict, dct):
# Filter out keys and then recurse on each value that wasn't filtered out
return map_dict(omit_partial, compact_dict(omit(omit_props, dct)))
if isinstance((list, tuple), dct):
# run omit_deep on each value
return map(omit_partial, dct)
# scalar
return dct
|
Implementation of pick that recurses. This tests the same keys at every level of dict and in lists
:param pick_dct: Deep dict matching some portion of dct.
:param dct: Dct to filter. Any key matching pick_dct pass through. It doesn't matter what the pick_dct value
is as long as the key exists. Arrays also pass through if the have matching values in pick_dct
:return:
def pick_deep(pick_dct, dct):
"""
Implementation of pick that recurses. This tests the same keys at every level of dict and in lists
:param pick_dct: Deep dict matching some portion of dct.
:param dct: Dct to filter. Any key matching pick_dct pass through. It doesn't matter what the pick_dct value
is as long as the key exists. Arrays also pass through if the have matching values in pick_dct
:return:
"""
if isinstance(dict, dct):
# Filter out keys and then recurse on each value that wasn't filtered out
return map_with_obj(
lambda k, v: pick_deep(prop(k, pick_dct), v),
pick(keys(pick_dct), dct)
)
if isinstance((list, tuple), dct):
# run pick_deep on each value
return map(
lambda tup: pick_deep(*tup),
list(zip(pick_dct or [], dct))
)
# scalar
return dct
|
Implementation of map that recurses. This tests the same keys at every level of dict and in lists
:param f: 2-ary function expecting a key and value and returns a modified value
:param dct: Dict for deep processing
:return: Modified dct with matching props mapped
def map_with_obj_deep(f, dct):
"""
Implementation of map that recurses. This tests the same keys at every level of dict and in lists
:param f: 2-ary function expecting a key and value and returns a modified value
:param dct: Dict for deep processing
:return: Modified dct with matching props mapped
"""
return _map_deep(lambda k, v: [k, f(k, v)], dct)
|
Implementation of map that recurses. This tests the same keys at every level of dict and in lists
:param f: 2-ary function expecting a key and value and returns a modified key
:param dct: Dict for deep processing
:return: Modified dct with matching props mapped
def map_keys_deep(f, dct):
"""
Implementation of map that recurses. This tests the same keys at every level of dict and in lists
:param f: 2-ary function expecting a key and value and returns a modified key
:param dct: Dict for deep processing
:return: Modified dct with matching props mapped
"""
return _map_deep(lambda k, v: [f(k, v), v], dct)
|
Used by map_deep and map_keys_deep
:param map_props:
:param f: Expects a key and value and returns a pair
:param dct:
:return:
def _map_deep(f, dct):
"""
Used by map_deep and map_keys_deep
:param map_props:
:param f: Expects a key and value and returns a pair
:param dct:
:return:
"""
if isinstance(dict, dct):
return map_key_values(lambda k, v: f(k, _map_deep(f, v)), dct)
elif isinstance((list, tuple), dct):
# Call each value with the index as the key. Since f returns a key value discard the key that it returns
# Even if this is called with map_keys_deep we can't manipulate index values here
return map(lambda iv: f(iv[0], _map_deep(f, iv[1]))[1], enumerate(dct))
# scalar
return dct
|
Filters deeply by comparing dct to filter_dct's value at each depth. Whenever a mismatch occurs the whole
thing returns false
:param params_dct: dict matching any portion of dct. E.g. filter_dct = {foo: {bar: 1}} would allow
{foo: {bar: 1, car: 2}} to pass, {foo: {bar: 2}} would fail, {goo: ...} would fail
:param dct: Dict for deep processing
:return: True if all pass else false
def dict_matches_params_deep(params_dct, dct):
"""
Filters deeply by comparing dct to filter_dct's value at each depth. Whenever a mismatch occurs the whole
thing returns false
:param params_dct: dict matching any portion of dct. E.g. filter_dct = {foo: {bar: 1}} would allow
{foo: {bar: 1, car: 2}} to pass, {foo: {bar: 2}} would fail, {goo: ...} would fail
:param dct: Dict for deep processing
:return: True if all pass else false
"""
def recurse_if_param_exists(params, key, value):
"""
If a param[key] exists, recurse. Otherwise return True since there is no param to contest value
:param params:
:param key:
:param value:
:return:
"""
return dict_matches_params_deep(
prop(key, params),
value
) if has(key, params) else True
def recurse_if_array_param_exists(params, index, value):
"""
If a param[key] exists, recurse. Otherwise return True since there is no param to contest value
:param params:
:param index:
:param value:
:return:
"""
return dict_matches_params_deep(
params[index],
value
) if isinstance((list, tuple), params_dct) and index < length(params_dct) else True
if isinstance(dict, dct):
# Filter out keys and then recurse on each value
return all_pass_dict(
# Recurse on each value if there is a corresponding filter_dct[key]. If not we pass
lambda key, value: recurse_if_param_exists(params_dct, key, value),
# We shallow merge, giving dct priority with (hopefully) unmatchable values
merge(map_with_obj(lambda k, v: 1 / (-e * pi), params_dct), dct)
)
if isinstance((list, tuple), dct):
if isinstance((list, tuple), params_dct) and length(dct) < length(params_dct):
# if there are more param items then dct items fail
return False
# run map_deep on each value
return all(map(
lambda ivalue: recurse_if_array_param_exists(params_dct, *ivalue),
enumerate(dct)
))
# scalar. Not that anything not truthy, False, None, 0, are considered equal
return params_dct == dct
|
Ramda implementation of join
:param strin:
:param items:
:return:
def join(strin, items):
"""
Ramda implementation of join
:param strin:
:param items:
:return:
"""
return strin.join(map(lambda item: str(item), items))
|
Implementation of Ramda's mapObjIndexed without the final argument.
This returns the original key with the mapped value. Use map_key_values to modify the keys too
:param f: Called with a key and value
:param dct:
:return {dict}: Keyed by the original key, valued by the mapped value
def map_with_obj(f, dct):
"""
Implementation of Ramda's mapObjIndexed without the final argument.
This returns the original key with the mapped value. Use map_key_values to modify the keys too
:param f: Called with a key and value
:param dct:
:return {dict}: Keyed by the original key, valued by the mapped value
"""
f_dict = {}
for k, v in dct.items():
f_dict[k] = f(k, v)
return f_dict
|
Calls f with each key of dct, possibly returning a modified key. Values are unchanged
:param f: Called with each key and returns the same key or a modified key
:param dct:
:return: A dct with keys possibly modifed but values unchanged
def map_keys(f, dct):
"""
Calls f with each key of dct, possibly returning a modified key. Values are unchanged
:param f: Called with each key and returns the same key or a modified key
:param dct:
:return: A dct with keys possibly modifed but values unchanged
"""
f_dict = {}
for k, v in dct.items():
f_dict[f(k)] = v
return f_dict
|
Calls f with each key and value of dct, possibly returning a modified key. Values are unchanged
:param f: Called with each key and value and returns the same key or a modified key
:param dct:
:return: A dct with keys possibly modifed but values unchanged
def map_keys_with_obj(f, dct):
"""
Calls f with each key and value of dct, possibly returning a modified key. Values are unchanged
:param f: Called with each key and value and returns the same key or a modified key
:param dct:
:return: A dct with keys possibly modifed but values unchanged
"""
f_dict = {}
for k, v in dct.items():
f_dict[f(k, v)] = v
return f_dict
|
Deep merge by this spec below
:param dct1:
:param dct2:
:param merger Optional merger
:return:
def merge_deep(dct1, dct2, merger=None):
"""
Deep merge by this spec below
:param dct1:
:param dct2:
:param merger Optional merger
:return:
"""
my_merger = merger or Merger(
# pass in a list of tuples,with the
# strategies you are looking to apply
# to each type.
[
(list, ["append"]),
(dict, ["merge"])
],
# next, choose the fallback strategies,
# applied to all other types:
["override"],
# finally, choose the strategies in
# the case where the types conflict:
["override"]
)
return my_merger.merge(dct1, dct2)
|
Shallow merge all the dcts
:param dcts:
:return:
def merge_all(dcts):
"""
Shallow merge all the dcts
:param dcts:
:return:
"""
return reduce(
lambda accum, dct: merge(accum, dct),
dict(),
dcts
)
|
Like from pairs but combines duplicate key values into arrays
:param pairs:
:return:
def from_pairs_to_array_values(pairs):
"""
Like from pairs but combines duplicate key values into arrays
:param pairs:
:return:
"""
result = {}
for pair in pairs:
result[pair[0]] = concat(prop_or([], pair[0], result), [pair[1]])
return result
|
Returns the given prop of each item in the list
:param prp:
:param lst:
:return:
def map_prop_value_as_index(prp, lst):
"""
Returns the given prop of each item in the list
:param prp:
:param lst:
:return:
"""
return from_pairs(map(lambda item: (prop(prp, item), item), lst))
|
Converts a key string like 'foo.bar.0.wopper' to ['foo', 'bar', 0, 'wopper']
:param {String} keyString The dot-separated key string
:return {[String]} The lens array containing string or integers
def key_string_to_lens_path(key_string):
"""
Converts a key string like 'foo.bar.0.wopper' to ['foo', 'bar', 0, 'wopper']
:param {String} keyString The dot-separated key string
:return {[String]} The lens array containing string or integers
"""
return map(
if_else(
isinstance(int),
# convert to int
lambda s: int(s),
# Leave the string alone
identity
),
key_string.split('.')
)
|
Simulates R.view with a lens_path since we don't have lens functions
:param lens_path: Array of string paths
:param obj: Object containing the given path
:return: The value at the path or None
def fake_lens_path_view(lens_path, obj):
"""
Simulates R.view with a lens_path since we don't have lens functions
:param lens_path: Array of string paths
:param obj: Object containing the given path
:return: The value at the path or None
"""
segment = head(lens_path)
return if_else(
both(lambda _: identity(segment), has(segment)),
# Recurse on the rest of the path
compose(fake_lens_path_view(tail(lens_path)), getitem(segment)),
# Give up
lambda _: None
)(obj)
|
Simulates R.set with a lens_path since we don't have lens functions
:param lens_path: Array of string paths
:param value: The value to set at the lens path
:param obj: Object containing the given path
:return: The value at the path or None
def fake_lens_path_set(lens_path, value, obj):
"""
Simulates R.set with a lens_path since we don't have lens functions
:param lens_path: Array of string paths
:param value: The value to set at the lens path
:param obj: Object containing the given path
:return: The value at the path or None
"""
segment = head(lens_path)
obj_copy = copy.copy(obj)
def set_array_index(i, v, l):
# Fill the array with None up to the given index and set the index to v
try:
l[i] = v
except IndexError:
for _ in range(i - len(l) + 1):
l.append(None)
l[i] = v
if not (length(lens_path) - 1):
# Done
new_value = value
else:
# Find the value at the path or create a {} or [] at obj[segment]
found_or_created = item_path_or(
if_else(
lambda segment: segment.isnumeric(),
always([]),
always({})
)(head(tail(lens_path))),
segment,
obj
)
# Recurse on the rest of the path
new_value = fake_lens_path_set(tail(lens_path), value, found_or_created)
# Set or replace
if segment.isnumeric():
set_array_index(int(segment), new_value, obj_copy)
else:
obj_copy[segment] = new_value
return obj_copy
|
Undoes the work of flatten_dict
@param {Object} obj 1-D object in the form returned by flattenObj
@returns {Object} The original
:param obj:
:return:
def unflatten_dct(obj):
"""
Undoes the work of flatten_dict
@param {Object} obj 1-D object in the form returned by flattenObj
@returns {Object} The original
:param obj:
:return:
"""
def reduce_func(accum, key_string_and_value):
key_string = key_string_and_value[0]
value = key_string_and_value[1]
item_key_path = key_string_to_lens_path(key_string)
# All but the last segment gives us the item container len
container_key_path = init(item_key_path)
container = unless(
# If the path has any length (not []) and the value is set, don't do anything
both(always(length(container_key_path)), fake_lens_path_view(container_key_path)),
# Else we are at the top level, so use the existing accum or create a [] or {}
# depending on if our item key is a number or not
lambda x: default_to(
if_else(
lambda segment: segment.isnumeric(),
always([]),
always({})
)(head(item_key_path))
)(x)
)(accum)
# Finally set the container at the itemLensPath
return fake_lens_path_set(
item_key_path,
value,
container
)
return compose(
reduce(
reduce_func,
# null initial value
None
),
to_pairs
)(obj)
|
ppj
:param json_data: dictionary to print
def ppj(json_data):
"""ppj
:param json_data: dictionary to print
"""
return str(json.dumps(
json_data,
sort_keys=True,
indent=4,
separators=(',', ': ')))
|
Override change view to add extra context enabling moderate tool.
def change_view(self, request, object_id, form_url='', extra_context=None):
"""
Override change view to add extra context enabling moderate tool.
"""
context = {
'has_moderate_tool': True
}
if extra_context:
context.update(extra_context)
return super(AdminModeratorMixin, self).change_view(
request=request,
object_id=object_id,
form_url=form_url,
extra_context=context
)
|
Add aditional moderate url.
def get_urls(self):
"""
Add aditional moderate url.
"""
from django.conf.urls import url
urls = super(AdminModeratorMixin, self).get_urls()
info = self.model._meta.app_label, self.model._meta.model_name
return [
url(r'^(.+)/moderate/$',
self.admin_site.admin_view(self.moderate_view),
name='%s_%s_moderate' % info),
] + urls
|
Renders a HttpResponse for the ongoing request
:param indent int
:rtype: HttpResponse
def render(self, indent=0):
"""
Renders a HttpResponse for the ongoing request
:param indent int
:rtype: HttpResponse
"""
self.__indent = indent
return HttpResponse(
str(self), content_type=self.__content_type, charset=self.__charset, **self.__kwargs
)
|
Setup logging configuration
def setup_logging(default_path='logging.yaml', env_key='LOG_CFG'):
"""
Setup logging configuration
"""
path = default_path
value = os.getenv(env_key, None)
if value:
path = value
if os.path.exists(path):
with open(path, 'rt') as f:
configs = yaml.safe_load(f.read())
logging.config.dictConfig(configs)
else:
logging.config.dictConfig(config)
|
Binds a GET route with the given callback
:rtype: object
def get(self, route: str(), callback: object()):
"""
Binds a GET route with the given callback
:rtype: object
"""
self.__set_route('get', {route: callback})
return RouteMapping
|
Binds a POST route with the given callback
:rtype: object
def post(self, route: str(), callback: object()):
"""
Binds a POST route with the given callback
:rtype: object
"""
self.__set_route('post', {route: callback})
return RouteMapping
|
Binds a PUT route with the given callback
:rtype: object
def put(self, route: str(), callback: object()):
"""
Binds a PUT route with the given callback
:rtype: object
"""
self.__set_route('put', {route: callback})
return RouteMapping
|
Binds a PATCH route with the given callback
:rtype: object
def patch(self, route: str(), callback: object()):
"""
Binds a PATCH route with the given callback
:rtype: object
"""
self.__set_route('patch', {route: callback})
return RouteMapping
|
Binds a PUT route with the given callback
:rtype: object
def delete(self, route: str(), callback: object()):
"""
Binds a PUT route with the given callback
:rtype: object
"""
self.__set_route('delete', {route: callback})
return RouteMapping
|
Binds a HEAD route with the given callback
:rtype: object
def head(self, route: str(), callback: object()):
"""
Binds a HEAD route with the given callback
:rtype: object
"""
self.__set_route('head', {route: callback})
return RouteMapping
|
Binds a OPTIONS route with the given callback
:rtype: object
def options(self, route: str(), callback: object()):
"""
Binds a OPTIONS route with the given callback
:rtype: object
"""
self.__set_route('options', {route: callback})
return RouteMapping
|
Sets the given type_route and route to the route mapping
:rtype: object
def __set_route(self, type_route, route):
"""
Sets the given type_route and route to the route mapping
:rtype: object
"""
if type_route in self.__routes:
if not self.verify_route_already_bound(type_route, route):
self.__routes[type_route].append(route)
else:
self.__routes[type_route] = [route]
return RouteMapping
|
Return a string identifying the operating system the application
is running on.
:rtype: str
def operating_system():
"""Return a string identifying the operating system the application
is running on.
:rtype: str
"""
if platform.system() == 'Darwin':
return 'OS X Version %s' % platform.mac_ver()[0]
distribution = ' '.join(platform.linux_distribution()).strip()
os_platform = platform.platform(True, True)
if distribution:
os_platform += ' (%s)' % distribution
return os_platform
|
Daemonize if the process is not already running.
def start(self):
"""Daemonize if the process is not already running."""
if self._is_already_running():
LOGGER.error('Is already running')
sys.exit(1)
try:
self._daemonize()
self.controller.start()
except Exception as error:
sys.stderr.write('\nERROR: Startup of %s Failed\n.' %
sys.argv[0].split('/')[-1])
exception_log = self._get_exception_log_path()
if exception_log:
with open(exception_log, 'a') as handle:
timestamp = datetime.datetime.now().isoformat()
handle.write('{:->80}\n'.format(' [START]'))
handle.write('%s Exception [%s]\n' % (sys.argv[0],
timestamp))
handle.write('{:->80}\n'.format(' [INFO]'))
handle.write('Interpreter: %s\n' % sys.executable)
handle.write('CLI arguments: %s\n' % ' '.join(sys.argv))
handle.write('Exception: %s\n' % error)
handle.write('Traceback:\n')
output = traceback.format_exception(*sys.exc_info())
_dev_null = [(handle.write(line),
sys.stdout.write(line)) for line in output]
handle.write('{:->80}\n'.format(' [END]'))
handle.flush()
sys.stderr.write('\nException log: %s\n\n' % exception_log)
sys.exit(1)
|
Return the group id that the daemon will run with
:rtype: int
def gid(self):
"""Return the group id that the daemon will run with
:rtype: int
"""
if not self._gid:
if self.controller.config.daemon.group:
self._gid = grp.getgrnam(self.config.daemon.group).gr_gid
else:
self._gid = os.getgid()
return self._gid
|
Return the user id that the process will run as
:rtype: int
def uid(self):
"""Return the user id that the process will run as
:rtype: int
"""
if not self._uid:
if self.config.daemon.user:
self._uid = pwd.getpwnam(self.config.daemon.user).pw_uid
else:
self._uid = os.getuid()
return self._uid
|
Fork into a background process and setup the process, copied in part
from http://www.jejik.com/files/examples/daemon3x.py
def _daemonize(self):
"""Fork into a background process and setup the process, copied in part
from http://www.jejik.com/files/examples/daemon3x.py
"""
LOGGER.info('Forking %s into the background', sys.argv[0])
# Write the pidfile if current uid != final uid
if os.getuid() != self.uid:
fd = open(self.pidfile_path, 'w')
os.fchmod(fd.fileno(), 0o644)
os.fchown(fd.fileno(), self.uid, self.gid)
fd.close()
try:
pid = os.fork()
if pid > 0:
sys.exit(0)
except OSError as error:
raise OSError('Could not fork off parent: %s', error)
# Set the user id
if self.uid != os.getuid():
os.setuid(self.uid)
# Set the group id
if self.gid != os.getgid():
try:
os.setgid(self.gid)
except OSError as error:
LOGGER.error('Could not set group: %s', error)
# Decouple from parent environment
os.chdir('/')
os.setsid()
os.umask(0o022)
# Fork again
try:
pid = os.fork()
if pid > 0:
sys.exit(0)
except OSError as error:
raise OSError('Could not fork child: %s', error)
# redirect standard file descriptors
sys.stdout.flush()
sys.stderr.flush()
si = open(os.devnull, 'r')
so = open(os.devnull, 'a+')
se = open(os.devnull, 'a+')
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
# Automatically call self._remove_pidfile when the app exits
atexit.register(self._remove_pidfile)
self._write_pidfile()
|
Return the normalized path for the connection log, raising an
exception if it can not written to.
:return: str
def _get_exception_log_path():
"""Return the normalized path for the connection log, raising an
exception if it can not written to.
:return: str
"""
app = sys.argv[0].split('/')[-1]
for exception_log in ['/var/log/%s.errors' % app,
'/var/tmp/%s.errors' % app,
'/tmp/%s.errors' % app]:
if os.access(path.dirname(exception_log), os.W_OK):
return exception_log
return None
|
Return the normalized path for the pidfile, raising an
exception if it can not written to.
:return: str
:raises: ValueError
:raises: OSError
def _get_pidfile_path(self):
"""Return the normalized path for the pidfile, raising an
exception if it can not written to.
:return: str
:raises: ValueError
:raises: OSError
"""
if self.config.daemon.pidfile:
pidfile = path.abspath(self.config.daemon.pidfile)
if not os.access(path.dirname(pidfile), os.W_OK):
raise ValueError('Cannot write to specified pid file path'
' %s' % pidfile)
return pidfile
app = sys.argv[0].split('/')[-1]
for pidfile in ['%s/pids/%s.pid' % (os.getcwd(), app),
'/var/run/%s.pid' % app,
'/var/run/%s/%s.pid' % (app, app),
'/var/tmp/%s.pid' % app,
'/tmp/%s.pid' % app,
'%s.pid' % app]:
if os.access(path.dirname(pidfile), os.W_OK):
return pidfile
raise OSError('Could not find an appropriate place for a pid file')
|
Check to see if the process is running, first looking for a pidfile,
then shelling out in either case, removing a pidfile if it exists but
the process is not running.
def _is_already_running(self):
"""Check to see if the process is running, first looking for a pidfile,
then shelling out in either case, removing a pidfile if it exists but
the process is not running.
"""
# Look for the pidfile, if exists determine if the process is alive
pidfile = self._get_pidfile_path()
if os.path.exists(pidfile):
pid = open(pidfile).read().strip()
try:
os.kill(int(pid), 0)
sys.stderr.write('Process already running as pid # %s\n' % pid)
return True
except OSError as error:
LOGGER.debug('Found pidfile, no process # %s', error)
os.unlink(pidfile)
# Check the os for a process that is not this one that looks the same
pattern = ' '.join(sys.argv)
pattern = '[%s]%s' % (pattern[0], pattern[1:])
try:
output = subprocess.check_output('ps a | grep "%s"' % pattern,
shell=True)
except AttributeError:
# Python 2.6
stdin, stdout, stderr = os.popen3('ps a | grep "%s"' % pattern)
output = stdout.read()
except subprocess.CalledProcessError:
return False
pids = [int(pid) for pid in (re.findall(r'^([0-9]+)\s',
output.decode('latin-1')))]
if os.getpid() in pids:
pids.remove(os.getpid())
if not pids:
return False
if len(pids) == 1:
pids = pids[0]
sys.stderr.write('Process already running as pid # %s\n' % pids)
return True
|
Remove the pid file from the filesystem
def _remove_pidfile(self):
"""Remove the pid file from the filesystem"""
LOGGER.debug('Removing pidfile: %s', self.pidfile_path)
try:
os.unlink(self.pidfile_path)
except OSError:
pass
|
Write the pid file out with the process number in the pid file
def _write_pidfile(self):
"""Write the pid file out with the process number in the pid file"""
LOGGER.debug('Writing pidfile: %s', self.pidfile_path)
with open(self.pidfile_path, "w") as handle:
handle.write(str(os.getpid()))
|
Convert a string from snake case to camel case. For example, "some_var" would become "someVar".
:param snake_case_string: Snake-cased string to convert to camel case.
:returns: Camel-cased version of snake_case_string.
def to_camel_case(snake_case_string):
"""
Convert a string from snake case to camel case. For example, "some_var" would become "someVar".
:param snake_case_string: Snake-cased string to convert to camel case.
:returns: Camel-cased version of snake_case_string.
"""
parts = snake_case_string.lstrip('_').split('_')
return parts[0] + ''.join([i.title() for i in parts[1:]])
|
Convert a string from snake case to camel case with the first letter capitalized. For example, "some_var"
would become "SomeVar".
:param snake_case_string: Snake-cased string to convert to camel case.
:returns: Camel-cased version of snake_case_string.
def to_capitalized_camel_case(snake_case_string):
"""
Convert a string from snake case to camel case with the first letter capitalized. For example, "some_var"
would become "SomeVar".
:param snake_case_string: Snake-cased string to convert to camel case.
:returns: Camel-cased version of snake_case_string.
"""
parts = snake_case_string.split('_')
return ''.join([i.title() for i in parts])
|
Convert a string from camel case to snake case. From example, "someVar" would become "some_var".
:param camel_case_string: Camel-cased string to convert to snake case.
:return: Snake-cased version of camel_case_string.
def to_snake_case(camel_case_string):
"""
Convert a string from camel case to snake case. From example, "someVar" would become "some_var".
:param camel_case_string: Camel-cased string to convert to snake case.
:return: Snake-cased version of camel_case_string.
"""
first_pass = _first_camel_case_regex.sub(r'\1_\2', camel_case_string)
return _second_camel_case_regex.sub(r'\1_\2', first_pass).lower()
|
Make a copy of a dictionary with all keys converted to snake case. This is just calls to_snake_case on
each of the keys in the dictionary and returns a new dictionary.
:param camel_case_dict: Dictionary with the keys to convert.
:type camel_case_dict: Dictionary.
:return: Dictionary with the keys converted to snake case.
def keys_to_snake_case(camel_case_dict):
"""
Make a copy of a dictionary with all keys converted to snake case. This is just calls to_snake_case on
each of the keys in the dictionary and returns a new dictionary.
:param camel_case_dict: Dictionary with the keys to convert.
:type camel_case_dict: Dictionary.
:return: Dictionary with the keys converted to snake case.
"""
return dict((to_snake_case(key), value) for (key, value) in camel_case_dict.items())
|
List the deployed lambda functions and print configuration.
:return: exit_code
def list_functions(awsclient):
"""List the deployed lambda functions and print configuration.
:return: exit_code
"""
client_lambda = awsclient.get_client('lambda')
response = client_lambda.list_functions()
for function in response['Functions']:
log.info(function['FunctionName'])
log.info('\t' 'Memory: ' + str(function['MemorySize']))
log.info('\t' 'Timeout: ' + str(function['Timeout']))
log.info('\t' 'Role: ' + str(function['Role']))
log.info('\t' 'Current Version: ' + str(function['Version']))
log.info('\t' 'Last Modified: ' + str(function['LastModified']))
log.info('\t' 'CodeSha256: ' + str(function['CodeSha256']))
log.info('\n')
return 0
|
Create or update a lambda function.
:param awsclient:
:param function_name:
:param role:
:param handler_filename:
:param handler_function:
:param folders:
:param description:
:param timeout:
:param memory:
:param subnet_ids:
:param security_groups:
:param artifact_bucket:
:param zipfile:
:param environment: environment variables
:param retention_in_days: retention time of the cloudwatch logs
:return: exit_code
def deploy_lambda(awsclient, function_name, role, handler_filename,
handler_function,
folders, description, timeout, memory, subnet_ids=None,
security_groups=None, artifact_bucket=None,
zipfile=None,
fail_deployment_on_unsuccessful_ping=False,
runtime='python2.7', settings=None, environment=None,
retention_in_days=None
):
"""Create or update a lambda function.
:param awsclient:
:param function_name:
:param role:
:param handler_filename:
:param handler_function:
:param folders:
:param description:
:param timeout:
:param memory:
:param subnet_ids:
:param security_groups:
:param artifact_bucket:
:param zipfile:
:param environment: environment variables
:param retention_in_days: retention time of the cloudwatch logs
:return: exit_code
"""
# TODO: the signature of this function is too big, clean this up
# also consolidate create, update, config and add waiters!
if lambda_exists(awsclient, function_name):
function_version = _update_lambda(awsclient, function_name,
handler_filename,
handler_function, folders, role,
description, timeout, memory,
subnet_ids, security_groups,
artifact_bucket=artifact_bucket,
zipfile=zipfile,
environment=environment
)
else:
if not zipfile:
return 1
log.info('buffer size: %0.2f MB' % float(len(zipfile) / 1000000.0))
function_version = _create_lambda(awsclient, function_name, role,
handler_filename, handler_function,
folders, description, timeout,
memory, subnet_ids, security_groups,
artifact_bucket, zipfile,
runtime=runtime,
environment=environment)
# configure cloudwatch logs
if retention_in_days:
log_group_name = '/aws/lambda/%s' % function_name
put_retention_policy(awsclient, log_group_name, retention_in_days)
pong = ping(awsclient, function_name, version=function_version)
if 'alive' in str(pong):
log.info(colored.green('Great you\'re already accepting a ping ' +
'in your Lambda function'))
elif fail_deployment_on_unsuccessful_ping and not 'alive' in pong:
log.info(colored.red('Pinging your lambda function failed'))
# we do not deploy alias and fail command
return 1
else:
log.info(colored.red('Please consider adding a reaction to a ' +
'ping event to your lambda function'))
_deploy_alias(awsclient, function_name, function_version)
return 0
|
Write zipfile contents to file.
:param zipfile:
:return: exit_code
def bundle_lambda(zipfile):
"""Write zipfile contents to file.
:param zipfile:
:return: exit_code
"""
# TODO have 'bundle.zip' as default config
if not zipfile:
return 1
with open('bundle.zip', 'wb') as zfile:
zfile.write(zipfile)
log.info('Finished - a bundle.zip is waiting for you...')
return 0
|
Print out cloudformation metrics for a lambda function.
:param awsclient
:param name: name of the lambda function
:return: exit_code
def get_metrics(awsclient, name):
"""Print out cloudformation metrics for a lambda function.
:param awsclient
:param name: name of the lambda function
:return: exit_code
"""
metrics = ['Duration', 'Errors', 'Invocations', 'Throttles']
client_cw = awsclient.get_client('cloudwatch')
for metric in metrics:
response = client_cw.get_metric_statistics(
Namespace='AWS/Lambda',
MetricName=metric,
Dimensions=[
{
'Name': 'FunctionName',
'Value': name
},
],
# StartTime=datetime.now() + timedelta(days=-1),
# EndTime=datetime.now(),
StartTime=maya.now().subtract(days=1).datetime(),
EndTime=maya.now().datetime(),
Period=3600,
Statistics=[
'Sum',
],
Unit=unit(metric)
)
log.info('\t%s %s' % (metric,
repr(aggregate_datapoints(response['Datapoints']))))
return 0
|
Rollback a lambda function to a given version.
:param awsclient:
:param function_name:
:param alias_name:
:param version:
:return: exit_code
def rollback(awsclient, function_name, alias_name=ALIAS_NAME, version=None):
"""Rollback a lambda function to a given version.
:param awsclient:
:param function_name:
:param alias_name:
:param version:
:return: exit_code
"""
if version:
log.info('rolling back to version {}'.format(version))
else:
log.info('rolling back to previous version')
version = _get_previous_version(awsclient, function_name, alias_name)
if version == '0':
log.error('unable to find previous version of lambda function')
return 1
log.info('new version is %s' % str(version))
_update_alias(awsclient, function_name, version, alias_name)
return 0
|
Delete a lambda function.
:param awsclient:
:param function_name:
:param events: list of events
:param delete_logs:
:return: exit_code
def delete_lambda(awsclient, function_name, events=None, delete_logs=False):
"""Delete a lambda function.
:param awsclient:
:param function_name:
:param events: list of events
:param delete_logs:
:return: exit_code
"""
if events is not None:
unwire(awsclient, events, function_name, alias_name=ALIAS_NAME)
client_lambda = awsclient.get_client('lambda')
response = client_lambda.delete_function(FunctionName=function_name)
if delete_logs:
log_group_name = '/aws/lambda/%s' % function_name
delete_log_group(awsclient, log_group_name)
# TODO remove event source first and maybe also needed for permissions
log.info(json2table(response))
return 0
|
Deprecated: please use delete_lambda!
:param awsclient:
:param function_name:
:param s3_event_sources:
:param time_event_sources:
:param delete_logs:
:return: exit_code
def delete_lambda_deprecated(awsclient, function_name, s3_event_sources=[],
time_event_sources=[], delete_logs=False):
# FIXME: mutable default arguments!
"""Deprecated: please use delete_lambda!
:param awsclient:
:param function_name:
:param s3_event_sources:
:param time_event_sources:
:param delete_logs:
:return: exit_code
"""
unwire_deprecated(awsclient, function_name, s3_event_sources=s3_event_sources,
time_event_sources=time_event_sources,
alias_name=ALIAS_NAME)
client_lambda = awsclient.get_client('lambda')
response = client_lambda.delete_function(FunctionName=function_name)
if delete_logs:
log_group_name = '/aws/lambda/%s' % function_name
delete_log_group(awsclient, log_group_name)
# TODO remove event source first and maybe also needed for permissions
log.info(json2table(response))
return 0
|
Deletes files used for creating bundle.
* vendored/*
* bundle.zip
def cleanup_bundle():
"""Deletes files used for creating bundle.
* vendored/*
* bundle.zip
"""
paths = ['./vendored', './bundle.zip']
for path in paths:
if os.path.exists(path):
log.debug("Deleting %s..." % path)
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
|
Send a ping request to a lambda function.
:param awsclient:
:param function_name:
:param alias_name:
:param version:
:return: ping response payload
def ping(awsclient, function_name, alias_name=ALIAS_NAME, version=None):
"""Send a ping request to a lambda function.
:param awsclient:
:param function_name:
:param alias_name:
:param version:
:return: ping response payload
"""
log.debug('sending ping to lambda function: %s', function_name)
payload = '{"ramuda_action": "ping"}' # default to ping event
# reuse invoke
return invoke(awsclient, function_name, payload, invocation_type=None,
alias_name=alias_name, version=version)
|
Send a ping request to a lambda function.
:param awsclient:
:param function_name:
:param payload:
:param invocation_type:
:param alias_name:
:param version:
:param outfile: write response to file
:return: ping response payload
def invoke(awsclient, function_name, payload, invocation_type=None,
alias_name=ALIAS_NAME, version=None, outfile=None):
"""Send a ping request to a lambda function.
:param awsclient:
:param function_name:
:param payload:
:param invocation_type:
:param alias_name:
:param version:
:param outfile: write response to file
:return: ping response payload
"""
log.debug('invoking lambda function: %s', function_name)
client_lambda = awsclient.get_client('lambda')
if invocation_type is None:
invocation_type = 'RequestResponse'
if payload.startswith('file://'):
log.debug('reading payload from file: %s' % payload)
with open(payload[7:], 'r') as pfile:
payload = pfile.read()
if version:
response = client_lambda.invoke(
FunctionName=function_name,
InvocationType=invocation_type,
Payload=payload,
Qualifier=version
)
else:
response = client_lambda.invoke(
FunctionName=function_name,
InvocationType=invocation_type,
Payload=payload,
Qualifier=alias_name
)
results = response['Payload'].read() # payload is a 'StreamingBody'
log.debug('invoke completed')
# write to file
if outfile:
with open(outfile, 'w') as ofile:
ofile.write(str(results))
ofile.flush()
return
else:
return results
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.