text
stringlengths 81
112k
|
---|
REV16 Ra, Rb
Reverse the byte order of the half words in register Rb and store the result in Ra
def REV16(self, params):
"""
REV16 Ra, Rb
Reverse the byte order of the half words in register Rb and store the result in Ra
"""
Ra, Rb = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(low_registers=(Ra, Rb))
def REV16_func():
self.register[Ra] = ((self.register[Rb] & 0xFF00FF00) >> 8) | \
((self.register[Rb] & 0x00FF00FF) << 8)
return REV16_func
|
REVSH
Reverse the byte order in the lower half word in Rb and store the result in Ra.
If the result of the result is signed, then sign extend
def REVSH(self, params):
"""
REVSH
Reverse the byte order in the lower half word in Rb and store the result in Ra.
If the result of the result is signed, then sign extend
"""
Ra, Rb = self.get_two_parameters(r'\s*([^\s,]*),\s*([^\s,]*)(,\s*[^\s,]*)*\s*', params)
self.check_arguments(low_registers=(Ra, Rb))
def REVSH_func():
self.register[Ra] = ((self.register[Rb] & 0x0000FF00) >> 8) | \
((self.register[Rb] & 0x000000FF) << 8)
if self.register[Ra] & (1 << 15):
self.register[Ra] |= 0xFFFF0000
return REVSH_func
|
STXB Ra, Rb
Sign extend the byte in Rb and store the result in Ra
def SXTB(self, params):
"""
STXB Ra, Rb
Sign extend the byte in Rb and store the result in Ra
"""
Ra, Rb = self.get_two_parameters(r'\s*([^\s,]*),\s*([^\s,]*)(,\s*[^\s,]*)*\s*', params)
self.check_arguments(low_registers=(Ra, Rb))
def SXTB_func():
if self.register[Rb] & (1 << 7):
self.register[Ra] = 0xFFFFFF00 + (self.register[Rb] & 0xFF)
else:
self.register[Ra] = (self.register[Rb] & 0xFF)
return SXTB_func
|
STXH Ra, Rb
Sign extend the half word in Rb and store the result in Ra
def SXTH(self, params):
"""
STXH Ra, Rb
Sign extend the half word in Rb and store the result in Ra
"""
Ra, Rb = self.get_two_parameters(r'\s*([^\s,]*),\s*([^\s,]*)(,\s*[^\s,]*)*\s*', params)
self.check_arguments(low_registers=(Ra, Rb))
def SXTH_func():
if self.register[Rb] & (1 << 15):
self.register[Ra] = 0xFFFF0000 + (self.register[Rb] & 0xFFFF)
else:
self.register[Ra] = (self.register[Rb] & 0xFFFF)
return SXTH_func
|
UTXB Ra, Rb
Zero extend the byte in Rb and store the result in Ra
def UXTB(self, params):
"""
UTXB Ra, Rb
Zero extend the byte in Rb and store the result in Ra
"""
Ra, Rb = self.get_two_parameters(r'\s*([^\s,]*),\s*([^\s,]*)(,\s*[^\s,]*)*\s*', params)
self.check_arguments(low_registers=(Ra, Rb))
def UXTB_func():
self.register[Ra] = (self.register[Rb] & 0xFF)
return UXTB_func
|
UTXH Ra, Rb
Zero extend the half word in Rb and store the result in Ra
def UXTH(self, params):
"""
UTXH Ra, Rb
Zero extend the half word in Rb and store the result in Ra
"""
Ra, Rb = self.get_two_parameters(r'\s*([^\s,]*),\s*([^\s,]*)(,\s*[^\s,]*)*\s*', params)
self.check_arguments(low_registers=(Ra, Rb))
def UXTH_func():
self.register[Ra] = (self.register[Rb] & 0xFFFF)
return UXTH_func
|
Get type of event e.g. 's3', 'events', 'kinesis',...
:param evt_source:
:return:
def _get_event_type(evt_source):
"""Get type of event e.g. 's3', 'events', 'kinesis',...
:param evt_source:
:return:
"""
if 'schedule' in evt_source:
return 'events'
elif 'pattern' in evt_source:
return 'events'
elif 'log_group_name_prefix' in evt_source:
return 'cloudwatch_logs'
else:
arn = evt_source['arn']
_, _, svc, _ = arn.split(':', 3)
return svc
|
Given awsclient, event_source dictionary item
create an event_source object of the appropriate event type
to schedule this event, and return the object.
def _get_event_source_obj(awsclient, evt_source):
"""
Given awsclient, event_source dictionary item
create an event_source object of the appropriate event type
to schedule this event, and return the object.
"""
event_source_map = {
'dynamodb': event_source.dynamodb_stream.DynamoDBStreamEventSource,
'kinesis': event_source.kinesis.KinesisEventSource,
's3': event_source.s3.S3EventSource,
'sns': event_source.sns.SNSEventSource,
'events': event_source.cloudwatch.CloudWatchEventSource,
'cloudfront': event_source.cloudfront.CloudFrontEventSource,
'cloudwatch_logs': event_source.cloudwatch_logs.CloudWatchLogsEventSource,
}
evt_type = _get_event_type(evt_source)
event_source_func = event_source_map.get(evt_type, None)
if not event_source:
raise ValueError('Unknown event source: {0}'.format(
evt_source['arn']))
return event_source_func(awsclient, evt_source)
|
Given an event_source dictionary, create the object and add the event source.
def _add_event_source(awsclient, evt_source, lambda_arn):
"""
Given an event_source dictionary, create the object and add the event source.
"""
event_source_obj = _get_event_source_obj(awsclient, evt_source)
# (where zappa goes like remove, add)
# we go with update and add like this:
if event_source_obj.exists(lambda_arn):
event_source_obj.update(lambda_arn)
else:
event_source_obj.add(lambda_arn)
|
Given an event_source dictionary, create the object and remove the event source.
def _remove_event_source(awsclient, evt_source, lambda_arn):
"""
Given an event_source dictionary, create the object and remove the event source.
"""
event_source_obj = _get_event_source_obj(awsclient, evt_source)
if event_source_obj.exists(lambda_arn):
event_source_obj.remove(lambda_arn)
|
Given an event_source dictionary, create the object and get the event source status.
def _get_event_source_status(awsclient, evt_source, lambda_arn):
"""
Given an event_source dictionary, create the object and get the event source status.
"""
event_source_obj = _get_event_source_obj(awsclient, evt_source)
return event_source_obj.status(lambda_arn)
|
Unwire a list of event from an AWS Lambda function.
'events' is a list of dictionaries, where the dict must contains the
'schedule' of the event as string, and an optional 'name' and 'description'.
:param awsclient:
:param events: list of events
:param lambda_name:
:param alias_name:
:return: exit_code
def unwire(awsclient, events, lambda_name, alias_name=ALIAS_NAME):
"""Unwire a list of event from an AWS Lambda function.
'events' is a list of dictionaries, where the dict must contains the
'schedule' of the event as string, and an optional 'name' and 'description'.
:param awsclient:
:param events: list of events
:param lambda_name:
:param alias_name:
:return: exit_code
"""
if not lambda_exists(awsclient, lambda_name):
log.error(colored.red('The function you try to wire up doesn\'t ' +
'exist... Bailing out...'))
return 1
client_lambda = awsclient.get_client('lambda')
lambda_function = client_lambda.get_function(FunctionName=lambda_name)
lambda_arn = client_lambda.get_alias(FunctionName=lambda_name,
Name=alias_name)['AliasArn']
log.info('UN-wiring lambda_arn %s ' % lambda_arn)
# TODO why load the policies here?
'''
policies = None
try:
result = client_lambda.get_policy(FunctionName=lambda_name,
Qualifier=alias_name)
policies = json.loads(result['Policy'])
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
log.warn("Permission policies not found")
else:
raise e
'''
if lambda_function is not None:
#_unschedule_events(awsclient, events, lambda_arn)
for event in events:
evt_source = event['event_source']
_remove_event_source(awsclient, evt_source, lambda_arn)
return 0
|
Deprecated! Please use wire!
:param awsclient:
:param function_name:
:param s3_event_sources: dictionary
:param time_event_sources:
:param alias_name:
:return: exit_code
def wire_deprecated(awsclient, function_name, s3_event_sources=None,
time_event_sources=None,
alias_name=ALIAS_NAME):
"""Deprecated! Please use wire!
:param awsclient:
:param function_name:
:param s3_event_sources: dictionary
:param time_event_sources:
:param alias_name:
:return: exit_code
"""
if not lambda_exists(awsclient, function_name):
log.error(colored.red('The function you try to wire up doesn\'t ' +
'exist... Bailing out...'))
return 1
client_lambda = awsclient.get_client('lambda')
lambda_function = client_lambda.get_function(FunctionName=function_name)
lambda_arn = client_lambda.get_alias(FunctionName=function_name,
Name=alias_name)['AliasArn']
log.info('wiring lambda_arn %s ...' % lambda_arn)
if lambda_function is not None:
s3_events_ensure_exists, s3_events_ensure_absent = filter_events_ensure(
s3_event_sources)
cloudwatch_events_ensure_exists, cloudwatch_events_ensure_absent = \
filter_events_ensure(time_event_sources)
for s3_event_source in s3_events_ensure_absent:
_ensure_s3_event(awsclient, s3_event_source, function_name,
alias_name, lambda_arn, s3_event_source['ensure'])
for s3_event_source in s3_events_ensure_exists:
_ensure_s3_event(awsclient, s3_event_source, function_name,
alias_name, lambda_arn, s3_event_source['ensure'])
for time_event in cloudwatch_events_ensure_absent:
_ensure_cloudwatch_event(awsclient, time_event, function_name,
alias_name, lambda_arn,
time_event['ensure'])
for time_event in cloudwatch_events_ensure_exists:
_ensure_cloudwatch_event(awsclient, time_event, function_name,
alias_name, lambda_arn,
time_event['ensure'])
return 0
|
Deprecated! Please use unwire!
:param awsclient:
:param function_name:
:param s3_event_sources: dictionary
:param time_event_sources:
:param alias_name:
:return: exit_code
def unwire_deprecated(awsclient, function_name, s3_event_sources=None,
time_event_sources=None, alias_name=ALIAS_NAME):
"""Deprecated! Please use unwire!
:param awsclient:
:param function_name:
:param s3_event_sources: dictionary
:param time_event_sources:
:param alias_name:
:return: exit_code
"""
if not lambda_exists(awsclient, function_name):
log.error(colored.red('The function you try to wire up doesn\'t ' +
'exist... Bailing out...'))
return 1
client_lambda = awsclient.get_client('lambda')
lambda_function = client_lambda.get_function(FunctionName=function_name)
lambda_arn = client_lambda.get_alias(FunctionName=function_name,
Name=alias_name)['AliasArn']
log.info('UN-wiring lambda_arn %s ' % lambda_arn)
policies = None
try:
result = client_lambda.get_policy(FunctionName=function_name,
Qualifier=alias_name)
policies = json.loads(result['Policy'])
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
log.warn("Permission policies not found")
else:
raise e
if lambda_function is not None:
#### S3 Events
# for every permission - delete it and corresponding rule (if exists)
if policies:
for statement in policies['Statement']:
if statement['Principal']['Service'] == 's3.amazonaws.com':
source_bucket = get_bucket_from_s3_arn(
statement['Condition']['ArnLike']['AWS:SourceArn'])
log.info('\tRemoving S3 permission {} invoking {}'.format(
source_bucket, lambda_arn))
_remove_permission(awsclient, function_name,
statement['Sid'], alias_name)
log.info('\tRemoving All S3 events {} invoking {}'.format(
source_bucket, lambda_arn))
_remove_events_from_s3_bucket(awsclient, source_bucket,
lambda_arn)
# Case: s3 events without permissions active "safety measure"
for s3_event_source in s3_event_sources:
bucket_name = s3_event_source.get('bucket')
_remove_events_from_s3_bucket(awsclient, bucket_name, lambda_arn)
#### CloudWatch Events
# for every permission - delete it and corresponding rule (if exists)
if policies:
for statement in policies['Statement']:
if statement['Principal']['Service'] == 'events.amazonaws.com':
rule_name = get_rule_name_from_event_arn(
statement['Condition']['ArnLike']['AWS:SourceArn'])
log.info(
'\tRemoving Cloudwatch permission {} invoking {}'.format(
rule_name, lambda_arn))
_remove_permission(awsclient, function_name,
statement['Sid'], alias_name)
log.info('\tRemoving Cloudwatch rule {} invoking {}'.format(
rule_name, lambda_arn))
_remove_cloudwatch_rule_event(awsclient, rule_name,
lambda_arn)
# Case: rules without permissions active, "safety measure"
for time_event in time_event_sources:
rule_name = time_event.get('ruleName')
_remove_cloudwatch_rule_event(awsclient, rule_name, lambda_arn)
return 0
|
Use only prefix OR suffix
:param arn:
:param event:
:param bucket:
:param prefix:
:param suffix:
:return:
def _lambda_add_s3_event_source(awsclient, arn, event, bucket, prefix,
suffix):
"""Use only prefix OR suffix
:param arn:
:param event:
:param bucket:
:param prefix:
:param suffix:
:return:
"""
json_data = {
'LambdaFunctionConfigurations': [{
'LambdaFunctionArn': arn,
'Id': str(uuid.uuid1()),
'Events': [event]
}]
}
filter_rules = build_filter_rules(prefix, suffix)
json_data['LambdaFunctionConfigurations'][0].update({
'Filter': {
'Key': {
'FilterRules': filter_rules
}
}
})
# http://docs.aws.amazon.com/cli/latest/reference/s3api/put-bucket-notification-configuration.html
# http://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html
client_s3 = awsclient.get_client('s3')
bucket_configurations = client_s3.get_bucket_notification_configuration(
Bucket=bucket)
bucket_configurations.pop('ResponseMetadata')
if 'LambdaFunctionConfigurations' in bucket_configurations:
bucket_configurations['LambdaFunctionConfigurations'].append(
json_data['LambdaFunctionConfigurations'][0]
)
else:
bucket_configurations['LambdaFunctionConfigurations'] = json_data[
'LambdaFunctionConfigurations']
response = client_s3.put_bucket_notification_configuration(
Bucket=bucket,
NotificationConfiguration=bucket_configurations
)
# TODO don't return a table, but success state
return json2table(response)
|
r'''
Try to find the Eigen library. If successful the include directory is returned.
def find_eigen(hint=None):
r'''
Try to find the Eigen library. If successful the include directory is returned.
'''
# search with pkgconfig
# ---------------------
try:
import pkgconfig
if pkgconfig.installed('eigen3','>3.0.0'):
return pkgconfig.parse('eigen3')['include_dirs'][0]
except:
pass
# manual search
# -------------
search_dirs = [] if hint is None else hint
search_dirs += [
"/usr/local/include/eigen3",
"/usr/local/homebrew/include/eigen3",
"/opt/local/var/macports/software/eigen3",
"/opt/local/include/eigen3",
"/usr/include/eigen3",
"/usr/include/local",
"/usr/include",
]
for d in search_dirs:
path = os.path.join(d, "Eigen", "Dense")
if os.path.exists(path):
vf = os.path.join(d, "Eigen", "src", "Core", "util", "Macros.h")
if not os.path.exists(vf):
continue
src = open(vf, "r").read()
v1 = re.findall("#define EIGEN_WORLD_VERSION (.+)", src)
v2 = re.findall("#define EIGEN_MAJOR_VERSION (.+)", src)
v3 = re.findall("#define EIGEN_MINOR_VERSION (.+)", src)
if not len(v1) or not len(v2) or not len(v3):
continue
v = "{0}.{1}.{2}".format(v1[0], v2[0], v3[0])
print("Found Eigen version {0} in: {1}".format(v, d))
return d
return None
|
Helper to read the params for the logs command
def check_and_format_logs_params(start, end, tail):
"""Helper to read the params for the logs command"""
def _decode_duration_type(duration_type):
durations = {'m': 'minutes', 'h': 'hours', 'd': 'days', 'w': 'weeks'}
return durations[duration_type]
if not start:
if tail:
start_dt = maya.now().subtract(seconds=300).datetime(naive=True)
else:
start_dt = maya.now().subtract(days=1).datetime(naive=True)
elif start and start[-1] in ['m', 'h', 'd', 'w']:
value = int(start[:-1])
start_dt = maya.now().subtract(
**{_decode_duration_type(start[-1]): value}).datetime(naive=True)
elif start:
start_dt = maya.parse(start).datetime(naive=True)
if end and end[-1] in ['m', 'h', 'd', 'w']:
value = int(end[:-1])
end_dt = maya.now().subtract(
**{_decode_duration_type(end[-1]): value}).datetime(naive=True)
elif end:
end_dt = maya.parse(end).datetime(naive=True)
else:
end_dt = None
return start_dt, end_dt
|
Upload a file to AWS S3 bucket.
:param awsclient:
:param bucket:
:param key:
:param filename:
:return:
def upload_file_to_s3(awsclient, bucket, key, filename):
"""Upload a file to AWS S3 bucket.
:param awsclient:
:param bucket:
:param key:
:param filename:
:return:
"""
client_s3 = awsclient.get_client('s3')
transfer = S3Transfer(client_s3)
# Upload /tmp/myfile to s3://bucket/key and print upload progress.
transfer.upload_file(filename, bucket, key)
response = client_s3.head_object(Bucket=bucket, Key=key)
etag = response.get('ETag')
version_id = response.get('VersionId', None)
return etag, version_id
|
Remove a file from an AWS S3 bucket.
:param awsclient:
:param bucket:
:param key:
:return:
def remove_file_from_s3(awsclient, bucket, key):
"""Remove a file from an AWS S3 bucket.
:param awsclient:
:param bucket:
:param key:
:return:
"""
client_s3 = awsclient.get_client('s3')
response = client_s3.delete_object(Bucket=bucket, Key=key)
|
List bucket contents
:param awsclient:
:param bucket:
:param prefix:
:return:
def ls(awsclient, bucket, prefix=None):
"""List bucket contents
:param awsclient:
:param bucket:
:param prefix:
:return:
"""
# this works until 1000 keys!
params = {'Bucket': bucket}
if prefix:
params['Prefix'] = prefix
client_s3 = awsclient.get_client('s3')
objects = client_s3.list_objects_v2(**params)
if objects['KeyCount'] > 0:
keys = [k['Key'] for k in objects['Contents']]
return keys
|
ORRS [Ra,] Ra, Rb
OR Ra and Rb together and store the result in Ra
The equivalent of `Ra = Ra | Rc`
Updates NZ flags
Ra and Rb must be low registers
The first register is optional
def ORRS(self, params):
"""
ORRS [Ra,] Ra, Rb
OR Ra and Rb together and store the result in Ra
The equivalent of `Ra = Ra | Rc`
Updates NZ flags
Ra and Rb must be low registers
The first register is optional
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
self.check_arguments(low_registers=(Ra, Rc))
self.match_first_two_parameters(Ra, Rb)
# ORRS Ra, Ra, Rb
def ORRS_func():
self.register[Ra] = self.register[Ra] | self.register[Rc]
self.set_NZ_flags(self.register[Ra])
return ORRS_func
|
TST Ra, Rb
AND Ra and Rb together and update the NZ flag. The result is not set
The equivalent of `Ra & Rc`
Ra and Rb must be low registers
def TST(self, params):
"""
TST Ra, Rb
AND Ra and Rb together and update the NZ flag. The result is not set
The equivalent of `Ra & Rc`
Ra and Rb must be low registers
"""
Ra, Rb = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(low_registers=(Ra, Rb))
def TST_func():
result = self.register[Ra] & self.register[Rb]
self.set_NZ_flags(result)
return TST_func
|
Renders a mail and returns the resulting ``EmailMultiAlternatives``
instance
* ``template``: The base name of the text and HTML (optional) version of
the mail.
* ``context``: The context used to render the mail. This context instance
should contain everything required.
* Additional keyword arguments are passed to the ``EmailMultiAlternatives``
instantiation. Use those to specify the ``to``, ``headers`` etc.
arguments.
Usage example::
# Render the template myproject/hello_mail.txt (first non-empty line
# contains the subject, third to last the body) and optionally the
# template myproject/hello_mail.html containing the alternative HTML
# representation.
message = render_to_mail('myproject/hello_mail', {}, to=[email])
message.send()
def render_to_mail(template, context, **kwargs):
"""
Renders a mail and returns the resulting ``EmailMultiAlternatives``
instance
* ``template``: The base name of the text and HTML (optional) version of
the mail.
* ``context``: The context used to render the mail. This context instance
should contain everything required.
* Additional keyword arguments are passed to the ``EmailMultiAlternatives``
instantiation. Use those to specify the ``to``, ``headers`` etc.
arguments.
Usage example::
# Render the template myproject/hello_mail.txt (first non-empty line
# contains the subject, third to last the body) and optionally the
# template myproject/hello_mail.html containing the alternative HTML
# representation.
message = render_to_mail('myproject/hello_mail', {}, to=[email])
message.send()
"""
lines = iter(
line.rstrip()
for line in render_to_string("%s.txt" % template, context).splitlines()
)
subject = ""
try:
while True:
line = next(lines)
if line:
subject = line
break
except StopIteration: # if lines is empty
pass
body = "\n".join(lines).strip("\n")
message = EmailMultiAlternatives(subject=subject, body=body, **kwargs)
try:
message.attach_alternative(
render_to_string("%s.html" % template, context), "text/html"
)
except TemplateDoesNotExist:
pass
return message
|
Returns the confirmation URL
def get_confirmation_url(email, request, name="email_registration_confirm", **kwargs):
"""
Returns the confirmation URL
"""
return request.build_absolute_uri(
reverse(name, kwargs={"code": get_confirmation_code(email, request, **kwargs)})
)
|
send_registration_mail(email, *, request, **kwargs)
Sends the registration mail
* ``email``: The email address where the registration link should be
sent to.
* ``request``: A HTTP request instance, used to construct the complete
URL (including protocol and domain) for the registration link.
* Additional keyword arguments for ``get_confirmation_url`` respectively
``get_confirmation_code``.
The mail is rendered using the following two templates:
* ``registration/email_registration_email.txt``: The first line of this
template will be the subject, the third to the last line the body of the
email.
* ``registration/email_registration_email.html``: The body of the HTML
version of the mail. This template is **NOT** available by default and
is not required either.
def send_registration_mail(email, *, request, **kwargs):
"""send_registration_mail(email, *, request, **kwargs)
Sends the registration mail
* ``email``: The email address where the registration link should be
sent to.
* ``request``: A HTTP request instance, used to construct the complete
URL (including protocol and domain) for the registration link.
* Additional keyword arguments for ``get_confirmation_url`` respectively
``get_confirmation_code``.
The mail is rendered using the following two templates:
* ``registration/email_registration_email.txt``: The first line of this
template will be the subject, the third to the last line the body of the
email.
* ``registration/email_registration_email.html``: The body of the HTML
version of the mail. This template is **NOT** available by default and
is not required either.
"""
render_to_mail(
"registration/email_registration_email",
{"url": get_confirmation_url(email, request, **kwargs)},
to=[email],
).send()
|
decode(code, *, max_age)
Decodes the code from the registration link and returns a tuple consisting
of the verified email address and the payload which was passed through to
``get_confirmation_code``.
The maximum age in seconds of the link has to be specified as ``max_age``.
This method raises ``ValidationError`` exceptions when anything goes wrong
when verifying the signature or the expiry timeout.
def decode(code, *, max_age):
"""decode(code, *, max_age)
Decodes the code from the registration link and returns a tuple consisting
of the verified email address and the payload which was passed through to
``get_confirmation_code``.
The maximum age in seconds of the link has to be specified as ``max_age``.
This method raises ``ValidationError`` exceptions when anything goes wrong
when verifying the signature or the expiry timeout.
"""
try:
data = get_signer().unsign(code, max_age=max_age)
except signing.SignatureExpired:
raise ValidationError(
_("The link is expired. Please request another registration link."),
code="email_registration_expired",
)
except signing.BadSignature:
raise ValidationError(
_(
"Unable to verify the signature. Please request a new"
" registration link."
),
code="email_registration_signature",
)
return data.split(":", 1)
|
do_some_work
:param work_dict: dictionary for key/values
def do_some_work(
self,
work_dict):
"""do_some_work
:param work_dict: dictionary for key/values
"""
label = "do_some_work"
log.info(("task - {} - start "
"work_dict={}")
.format(label,
work_dict))
ret_data = {
"job_results": ("some response key={}").format(
str(uuid.uuid4()))
}
log.info(("task - {} - result={} done")
.format(
ret_data,
label))
return ret_data
|
Builds route53 record entries enabling DNS names for services
Note: gcdt.route53 create_record(awsclient, ...)
is used in dataplatform cloudformation.py templates!
:param name_prefix: The sub domain prefix to use
:param instance_reference: The EC2 troposphere reference which's private IP should be linked to
:param type: The type of the record A or CNAME (default: A)
:param host_zone_name: The host zone name to use (like preprod.ds.glomex.cloud. - DO NOT FORGET THE DOT!)
:return: RecordSetType
def create_record(awsclient, name_prefix, instance_reference, type="A", host_zone_name=None):
"""
Builds route53 record entries enabling DNS names for services
Note: gcdt.route53 create_record(awsclient, ...)
is used in dataplatform cloudformation.py templates!
:param name_prefix: The sub domain prefix to use
:param instance_reference: The EC2 troposphere reference which's private IP should be linked to
:param type: The type of the record A or CNAME (default: A)
:param host_zone_name: The host zone name to use (like preprod.ds.glomex.cloud. - DO NOT FORGET THE DOT!)
:return: RecordSetType
"""
# Only fetch the host zone from the COPS stack if nessary
if host_zone_name is None:
host_zone_name = _retrieve_stack_host_zone_name(awsclient)
if not (type == "A" or type == "CNAME"):
raise Exception("Record set type is not supported!")
name_of_record = name_prefix \
.replace('.', '') \
.replace('-', '') \
.title() + "HostRecord"
# Reference EC2 instance automatically to their private IP
if isinstance(instance_reference, Instance):
resource_record = troposphere.GetAtt(
instance_reference,
"PrivateIp"
)
else:
resource_record = instance_reference
return RecordSetType(
name_of_record,
HostedZoneName=host_zone_name,
Name=troposphere.Join("", [
name_prefix + ".",
host_zone_name,
]),
Type=type,
TTL=TTL_DEFAULT,
ResourceRecords=[
resource_record
],
)
|
Use service discovery to get the host zone name from the default stack
:return: Host zone name as string
def _retrieve_stack_host_zone_name(awsclient, default_stack_name=None):
"""
Use service discovery to get the host zone name from the default stack
:return: Host zone name as string
"""
global _host_zone_name
if _host_zone_name is not None:
return _host_zone_name
env = get_env()
if env is None:
print("Please set environment...")
# TODO: why is there a sys.exit in library code used by cloudformation!!!
sys.exit()
if default_stack_name is None:
# TODO why 'dp-<env>'? - this should not be hardcoded!
default_stack_name = 'dp-%s' % env
default_stack_output = get_outputs_for_stack(awsclient, default_stack_name)
if HOST_ZONE_NAME__STACK_OUTPUT_NAME not in default_stack_output:
print("Please debug why default stack '{}' does not contain '{}'...".format(
default_stack_name,
HOST_ZONE_NAME__STACK_OUTPUT_NAME,
))
# TODO: why is there a sys.exit in library code used by cloudformation!!!
sys.exit()
_host_zone_name = default_stack_output[HOST_ZONE_NAME__STACK_OUTPUT_NAME] + "."
return _host_zone_name
|
Load and register installed gcdt plugins.
def load_plugins(group='gcdt10'):
"""Load and register installed gcdt plugins.
"""
# on using entrypoints:
# http://stackoverflow.com/questions/774824/explain-python-entry-points
# TODO: make sure we do not have conflicting generators installed!
for ep in pkg_resources.iter_entry_points(group, name=None):
plugin = ep.load() # load the plugin
if check_hook_mechanism_is_intact(plugin):
if check_register_present(plugin):
plugin.register() # register the plugin so it listens to gcdt_signals
else:
log.warning('No valid hook configuration: %s. Not using hooks!', plugin)
|
Load and register installed gcdt plugins.
def get_plugin_versions(group='gcdt10'):
"""Load and register installed gcdt plugins.
"""
versions = {}
for ep in pkg_resources.iter_entry_points(group, name=None):
versions[ep.dist.project_name] = ep.dist.version
return versions
|
Delete the specified log group
:param log_group_name: log group name
:return:
def delete_log_group(awsclient, log_group_name):
"""Delete the specified log group
:param log_group_name: log group name
:return:
"""
client_logs = awsclient.get_client('logs')
response = client_logs.delete_log_group(
logGroupName=log_group_name
)
|
Sets the retention of the specified log group
if the log group does not yet exist than it will be created first.
:param log_group_name: log group name
:param retention_in_days: log group name
:return:
def put_retention_policy(awsclient, log_group_name, retention_in_days):
"""Sets the retention of the specified log group
if the log group does not yet exist than it will be created first.
:param log_group_name: log group name
:param retention_in_days: log group name
:return:
"""
try:
# Note: for AWS Lambda the log_group is created once the first
# log event occurs. So if the log_group does not exist we create it
create_log_group(awsclient, log_group_name)
except GracefulExit:
raise
except Exception:
# TODO check that it is really a ResourceAlreadyExistsException
pass
client_logs = awsclient.get_client('logs')
response = client_logs.put_retention_policy(
logGroupName=log_group_name,
retentionInDays=retention_in_days
)
|
Note: this is used to retrieve logs in ramuda.
:param log_group_name: log group name
:param start_ts: timestamp
:param end_ts: timestamp
:return: list of log entries
def filter_log_events(awsclient, log_group_name, start_ts, end_ts=None):
"""
Note: this is used to retrieve logs in ramuda.
:param log_group_name: log group name
:param start_ts: timestamp
:param end_ts: timestamp
:return: list of log entries
"""
client_logs = awsclient.get_client('logs')
# TODO use all_pages instead!
logs = []
next_token = None
while True:
request = {
'logGroupName': log_group_name,
'startTime': start_ts
}
if end_ts:
request['endTime'] = end_ts
if next_token:
request['nextToken'] = next_token
response = client_logs.filter_log_events(**request)
logs.extend(
[{'timestamp': e['timestamp'], 'message': e['message']}
for e in response['events']]
)
if 'nextToken' not in response:
break
next_token = response['nextToken']
return logs
|
Get info on the specified log group
:param log_group_name: log group name
:return:
def describe_log_group(awsclient, log_group_name):
"""Get info on the specified log group
:param log_group_name: log group name
:return:
"""
client_logs = awsclient.get_client('logs')
request = {
'logGroupNamePrefix': log_group_name,
'limit': 1
}
response = client_logs.describe_log_groups(**request)
if response['logGroups']:
return response['logGroups'][0]
else:
return
|
Get info on the specified log stream
:param log_group_name: log group name
:param log_stream_name: log stream
:return:
def describe_log_stream(awsclient, log_group_name, log_stream_name):
"""Get info on the specified log stream
:param log_group_name: log group name
:param log_stream_name: log stream
:return:
"""
client_logs = awsclient.get_client('logs')
response = client_logs.describe_log_streams(
logGroupName=log_group_name,
logStreamNamePrefix=log_stream_name,
limit=1
)
if response['logStreams']:
return response['logStreams'][0]
else:
return
|
Creates a log group with the specified name.
:param log_group_name: log group name
:return:
def create_log_group(awsclient, log_group_name):
"""Creates a log group with the specified name.
:param log_group_name: log group name
:return:
"""
client_logs = awsclient.get_client('logs')
response = client_logs.create_log_group(
logGroupName=log_group_name,
)
|
Creates a log stream for the specified log group.
:param log_group_name: log group name
:param log_stream_name: log stream name
:return:
def create_log_stream(awsclient, log_group_name, log_stream_name):
"""Creates a log stream for the specified log group.
:param log_group_name: log group name
:param log_stream_name: log stream name
:return:
"""
client_logs = awsclient.get_client('logs')
response = client_logs.create_log_stream(
logGroupName=log_group_name,
logStreamName=log_stream_name
)
|
Put log events for the specified log group and stream.
:param log_group_name: log group name
:param log_stream_name: log stream name
:param log_events: [{'timestamp': 123, 'message': 'string'}, ...]
:param sequence_token: the sequence token
:return: next_token
def put_log_events(awsclient, log_group_name, log_stream_name, log_events,
sequence_token=None):
"""Put log events for the specified log group and stream.
:param log_group_name: log group name
:param log_stream_name: log stream name
:param log_events: [{'timestamp': 123, 'message': 'string'}, ...]
:param sequence_token: the sequence token
:return: next_token
"""
client_logs = awsclient.get_client('logs')
request = {
'logGroupName': log_group_name,
'logStreamName': log_stream_name,
'logEvents': log_events
}
if sequence_token:
request['sequenceToken'] = sequence_token
response = client_logs.put_log_events(**request)
if 'rejectedLogEventsInfo' in response:
log.warn(response['rejectedLogEventsInfo'])
if 'nextSequenceToken' in response:
return response['nextSequenceToken']
|
Get log events for the specified log group and stream.
this is used in tenkai output instance diagnostics
:param log_group_name: log group name
:param log_stream_name: log stream name
:param start_ts: timestamp
:return:
def get_log_events(awsclient, log_group_name, log_stream_name, start_ts=None):
"""Get log events for the specified log group and stream.
this is used in tenkai output instance diagnostics
:param log_group_name: log group name
:param log_stream_name: log stream name
:param start_ts: timestamp
:return:
"""
client_logs = awsclient.get_client('logs')
request = {
'logGroupName': log_group_name,
'logStreamName': log_stream_name
}
if start_ts:
request['startTime'] = start_ts
# TODO exhaust the events!
# TODO use all_pages !
response = client_logs.get_log_events(**request)
if 'events' in response and response['events']:
return [{'timestamp': e['timestamp'], 'message': e['message']}
for e in response['events']]
|
Check
:param log_group_name: log group name
:param log_stream_name: log stream name
:return: True / False
def check_log_stream_exists(awsclient, log_group_name, log_stream_name):
"""Check
:param log_group_name: log group name
:param log_stream_name: log stream name
:return: True / False
"""
lg = describe_log_group(awsclient, log_group_name)
if lg and lg['logGroupName'] == log_group_name:
stream = describe_log_stream(awsclient, log_group_name, log_stream_name)
if stream and stream['logStreamName'] == log_stream_name:
return True
return False
|
Convert unix timestamp (millis) into date & time we use in logs output.
:param timestamp: unix timestamp in millis
:return: date, time in UTC
def decode_format_timestamp(timestamp):
"""Convert unix timestamp (millis) into date & time we use in logs output.
:param timestamp: unix timestamp in millis
:return: date, time in UTC
"""
dt = maya.MayaDT(timestamp / 1000).datetime(naive=True)
return dt.strftime('%Y-%m-%d'), dt.strftime('%H:%M:%S')
|
Reload the configuration from disk returning True if the
configuration has changed from the previous values.
def reload(self):
"""Reload the configuration from disk returning True if the
configuration has changed from the previous values.
"""
config = self._default_configuration()
if self._file_path:
config.update(self._load_config_file())
if config != self._values:
self._values = config
return True
return False
|
Load the configuration file into memory, returning the content.
def _load_config_file(self):
"""Load the configuration file into memory, returning the content.
"""
LOGGER.info('Loading configuration from %s', self._file_path)
if self._file_path.endswith('json'):
config = self._load_json_config()
else:
config = self._load_yaml_config()
for key, value in [(k, v) for k, v in config.items()]:
if key.title() != key:
config[key.title()] = value
del config[key]
return flatdict.FlatDict(config)
|
Load the configuration file in JSON format
:rtype: dict
def _load_json_config(self):
"""Load the configuration file in JSON format
:rtype: dict
"""
try:
return json.loads(self._read_config())
except ValueError as error:
raise ValueError(
'Could not read configuration file: {}'.format(error))
|
Loads the configuration file from a .yaml or .yml file
:type: dict
def _load_yaml_config(self):
"""Loads the configuration file from a .yaml or .yml file
:type: dict
"""
try:
config = self._read_config()
except OSError as error:
raise ValueError('Could not read configuration file: %s' % error)
try:
return yaml.safe_load(config)
except yaml.YAMLError as error:
message = '\n'.join([' > %s' % line
for line in str(error).split('\n')])
sys.stderr.write('\n\n Error in the configuration file:\n\n'
'{}\n\n'.format(message))
sys.stderr.write(' Configuration should be a valid YAML file.\n')
sys.stderr.write(' YAML format validation available at '
'http://yamllint.com\n')
raise ValueError(error)
|
Normalize the file path value.
:param str file_path: The file path as passed in
:rtype: str
def _normalize_file_path(file_path):
"""Normalize the file path value.
:param str file_path: The file path as passed in
:rtype: str
"""
if not file_path:
return None
elif file_path.startswith('s3://') or \
file_path.startswith('http://') or \
file_path.startswith('https://'):
return file_path
return path.abspath(file_path)
|
Read the configuration from the various places it may be read from.
:rtype: str
:raises: ValueError
def _read_config(self):
"""Read the configuration from the various places it may be read from.
:rtype: str
:raises: ValueError
"""
if not self._file_path:
return None
elif self._file_path.startswith('s3://'):
return self._read_s3_config()
elif self._file_path.startswith('http://') or \
self._file_path.startswith('https://'):
return self._read_remote_config()
elif not path.exists(self._file_path):
raise ValueError(
'Configuration file not found: {}'.format(self._file_path))
with open(self._file_path, 'r') as handle:
return handle.read()
|
Read a remote config via URL.
:rtype: str
:raises: ValueError
def _read_remote_config(self):
"""Read a remote config via URL.
:rtype: str
:raises: ValueError
"""
try:
import requests
except ImportError:
requests = None
if not requests:
raise ValueError(
'Remote config URL specified but requests not installed')
result = requests.get(self._file_path)
if not result.ok:
raise ValueError(
'Failed to retrieve remote config: {}'.format(
result.status_code))
return result.text
|
Read in the value of the configuration file in Amazon S3.
:rtype: str
:raises: ValueError
def _read_s3_config(self):
"""Read in the value of the configuration file in Amazon S3.
:rtype: str
:raises: ValueError
"""
try:
import boto3
import botocore.exceptions
except ImportError:
boto3, botocore = None, None
if not boto3:
raise ValueError(
's3 URL specified for configuration but boto3 not installed')
parsed = parse.urlparse(self._file_path)
try:
response = boto3.client(
's3', endpoint_url=os.environ.get('S3_ENDPOINT')).get_object(
Bucket=parsed.netloc, Key=parsed.path.lstrip('/'))
except botocore.exceptions.ClientError as e:
raise ValueError(
'Failed to download configuration from S3: {}'.format(e))
return response['Body'].read().decode('utf-8')
|
Update the internal configuration values, removing debug_only
handlers if debug is False. Returns True if the configuration has
changed from previous configuration values.
:param dict configuration: The logging configuration
:param bool debug: Toggles use of debug_only loggers
:rtype: bool
def update(self, configuration, debug=None):
"""Update the internal configuration values, removing debug_only
handlers if debug is False. Returns True if the configuration has
changed from previous configuration values.
:param dict configuration: The logging configuration
:param bool debug: Toggles use of debug_only loggers
:rtype: bool
"""
if self.config != dict(configuration) and debug != self.debug:
self.config = dict(configuration)
self.debug = debug
self.configure()
return True
return False
|
Configure the Python stdlib logger
def configure(self):
"""Configure the Python stdlib logger"""
if self.debug is not None and not self.debug:
self._remove_debug_handlers()
self._remove_debug_only()
logging.config.dictConfig(self.config)
try:
logging.captureWarnings(True)
except AttributeError:
pass
|
Remove any handlers with an attribute of debug_only that is True and
remove the references to said handlers from any loggers that are
referencing them.
def _remove_debug_handlers(self):
"""Remove any handlers with an attribute of debug_only that is True and
remove the references to said handlers from any loggers that are
referencing them.
"""
remove = list()
for handler in self.config[self.HANDLERS]:
if self.config[self.HANDLERS][handler].get('debug_only'):
remove.append(handler)
for handler in remove:
del self.config[self.HANDLERS][handler]
for logger in self.config[self.LOGGERS].keys():
logger = self.config[self.LOGGERS][logger]
if handler in logger[self.HANDLERS]:
logger[self.HANDLERS].remove(handler)
self._remove_debug_only()
|
Iterate through each handler removing the invalid dictConfig key of
debug_only.
def _remove_debug_only(self):
"""Iterate through each handler removing the invalid dictConfig key of
debug_only.
"""
LOGGER.debug('Removing debug only from handlers')
for handler in self.config[self.HANDLERS]:
if self.DEBUG_ONLY in self.config[self.HANDLERS][handler]:
del self.config[self.HANDLERS][handler][self.DEBUG_ONLY]
|
Convert this object to a dictionary with formatting appropriate for a PIF.
:returns: Dictionary with the content of this object formatted for a PIF.
def as_dictionary(self):
"""
Convert this object to a dictionary with formatting appropriate for a PIF.
:returns: Dictionary with the content of this object formatted for a PIF.
"""
return {to_camel_case(i): Serializable._convert_to_dictionary(self.__dict__[i])
for i in self.__dict__ if self.__dict__[i] is not None}
|
Convert obj to a dictionary with formatting appropriate for a PIF. This function attempts to treat obj as
a Pio object and otherwise returns obj.
:param obj: Object to convert to a dictionary.
:returns: Input object as a dictionary or the original object.
def _convert_to_dictionary(obj):
"""
Convert obj to a dictionary with formatting appropriate for a PIF. This function attempts to treat obj as
a Pio object and otherwise returns obj.
:param obj: Object to convert to a dictionary.
:returns: Input object as a dictionary or the original object.
"""
if isinstance(obj, list):
return [Serializable._convert_to_dictionary(i) for i in obj]
elif hasattr(obj, 'as_dictionary'):
return obj.as_dictionary()
else:
return obj
|
Helper function that returns an object, or if it is a dictionary, initializes it from class_.
:param class_: Class to use to instantiate object.
:param obj: Object to process.
:return: One or more objects.
def _get_object(class_, obj):
"""
Helper function that returns an object, or if it is a dictionary, initializes it from class_.
:param class_: Class to use to instantiate object.
:param obj: Object to process.
:return: One or more objects.
"""
if isinstance(obj, list):
return [Serializable._get_object(class_, i) for i in obj]
elif isinstance(obj, dict):
return class_(**keys_to_snake_case(obj))
else:
return obj
|
Calculates the damping factor for sound in dB/m
depending on temperature, humidity and sound frequency.
Source: http://www.sengpielaudio.com/LuftdaempfungFormel.htm
temp: Temperature in degrees celsius
relhum: Relative humidity as percentage, e.g. 50
freq: Sound frequency in herz
pres: Atmospheric pressure in kilopascal
def damping(temp, relhum, freq, pres=101325):
"""
Calculates the damping factor for sound in dB/m
depending on temperature, humidity and sound frequency.
Source: http://www.sengpielaudio.com/LuftdaempfungFormel.htm
temp: Temperature in degrees celsius
relhum: Relative humidity as percentage, e.g. 50
freq: Sound frequency in herz
pres: Atmospheric pressure in kilopascal
"""
temp += 273.15 # convert to kelvin
pres = pres / 101325.0 # convert to relative pressure
c_humid = 4.6151 - 6.8346 * pow((273.15 / temp), 1.261)
hum = relhum * pow(10.0, c_humid) * pres
tempr = temp / 293.15 # convert to relative air temp (re 20 deg C)
frO = pres * (24.0 + 4.04e4 * hum * (0.02 + hum) / (0.391 + hum))
frN = (pres * pow(tempr, -0.5) * (9.0 + 280.0 * hum * math.exp(-4.17 *
(pow(tempr, (-1.0 / 3.0)) - 1.0))))
damp = (8.686 * freq * freq * (
1.84e-11 * (1.0 / pres) * math.sqrt(tempr) +
pow(tempr, -2.5) *
(
0.01275 * (math.exp(-2239.1 / temp) * 1.0 /
(frO + freq * freq / frO)) +
0.1068 * (
math.exp(-3352 / temp) * 1.0 /
(frN + freq * freq / frN)
)
)
)
)
return damp
|
Calculates the total sound pressure level based on multiple source levels
def total_level(source_levels):
"""
Calculates the total sound pressure level based on multiple source levels
"""
sums = 0.0
for l in source_levels:
if l is None:
continue
if l == 0:
continue
sums += pow(10.0, float(l) / 10.0)
level = 10.0 * math.log10(sums)
return level
|
Calculates the A-rated total sound pressure level
based on octave band frequencies
def total_rated_level(octave_frequencies):
"""
Calculates the A-rated total sound pressure level
based on octave band frequencies
"""
sums = 0.0
for band in OCTAVE_BANDS.keys():
if band not in octave_frequencies:
continue
if octave_frequencies[band] is None:
continue
if octave_frequencies[band] == 0:
continue
sums += pow(10.0, ((float(octave_frequencies[band]) + OCTAVE_BANDS[band][1]) / 10.0))
level = 10.0 * math.log10(sums)
return level
|
Calculates the energy-equivalent (Leq3) value
given a regular measurement interval.
def leq3(levels):
"""
Calculates the energy-equivalent (Leq3) value
given a regular measurement interval.
"""
n = float(len(levels))
sums = 0.0
if sum(levels) == 0.0:
return 0.0
for l in levels:
if l == 0:
continue
sums += pow(10.0, float(l) / 10.0)
leq3 = 10.0 * math.log10((1.0 / n) * sums)
leq3 = max(0.0, leq3)
return leq3
|
Calculates the sound pressure level
in dependence of a distance
where a perfect ball-shaped source and spread is assumed.
reference_level: Sound pressure level in reference distance in dB
distance: Distance to calculate sound pressure level for, in meters
reference_distance: reference distance in meters (defaults to 1)
def distant_level(reference_level, distance, reference_distance=1.0):
"""
Calculates the sound pressure level
in dependence of a distance
where a perfect ball-shaped source and spread is assumed.
reference_level: Sound pressure level in reference distance in dB
distance: Distance to calculate sound pressure level for, in meters
reference_distance: reference distance in meters (defaults to 1)
"""
rel_dist = float(reference_distance) / float(distance)
level = float(reference_level) + 20.0 * (math.log(rel_dist) / math.log(10))
return level
|
Calculates the damped, A-rated total sound pressure level
in a given distance, temperature and relative humidity
from octave frequency sound pressure levels in a reference distance
def distant_total_damped_rated_level(
octave_frequencies,
distance,
temp,
relhum,
reference_distance=1.0):
"""
Calculates the damped, A-rated total sound pressure level
in a given distance, temperature and relative humidity
from octave frequency sound pressure levels in a reference distance
"""
damping_distance = distance - reference_distance
sums = 0.0
for band in OCTAVE_BANDS.keys():
if band not in octave_frequencies:
continue
if octave_frequencies[band] is None:
continue
# distance-adjusted level per band
distant_val = distant_level(
reference_level=float(octave_frequencies[band]),
distance=distance,
reference_distance=reference_distance
)
# damping
damp_per_meter = damping(
temp=temp,
relhum=relhum,
freq=OCTAVE_BANDS[band][0])
distant_val = distant_val - (damping_distance * damp_per_meter)
# applyng A-rating
distant_val += OCTAVE_BANDS[band][1]
sums += pow(10.0, (distant_val / 10.0))
level = 10.0 * math.log10(sums)
return level
|
ASRS [Ra,] Ra, Rc
ASRS [Ra,] Rb, #imm5_counting
Arithmetic shift right Rb by Rc or imm5_counting and store the result in Ra
imm5 counting is [1, 32]
In the register shift, the first two operands must be the same register
Ra, Rb, and Rc must be low registers
If Ra is omitted, then it is assumed to be Rb
def ASRS(self, params):
"""
ASRS [Ra,] Ra, Rc
ASRS [Ra,] Rb, #imm5_counting
Arithmetic shift right Rb by Rc or imm5_counting and store the result in Ra
imm5 counting is [1, 32]
In the register shift, the first two operands must be the same register
Ra, Rb, and Rc must be low registers
If Ra is omitted, then it is assumed to be Rb
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
if self.is_register(Rc):
# ASRS Ra, Ra, Rb
self.check_arguments(low_registers=(Ra, Rc))
self.match_first_two_parameters(Ra, Rb)
def ASRS_func():
# Set the C flag, or the last shifted out bit
if (self.register[Rc] > 0) and (self.register[Rb] & (1 << (self.register[Rc] - 1))):
self.set_APSR_flag_to_value('C', 1)
else:
self.set_APSR_flag_to_value('C', 0)
if self.register[Ra] & (1 << (self._bit_width - 1)):
self.register[Ra] = (self.register[Ra] >> self.register[Rc]) | (
int('1' * self.register[Rc], 2) << (self._bit_width - self.register[Rc]))
else:
self.register[Ra] = self.register[Ra] >> self.register[Rc]
self.set_NZ_flags(self.register[Ra])
else:
# ASRS Ra, Rb, #imm5_counting
self.check_arguments(low_registers=(Ra, Rb), imm5_counting=(Rc,))
shift_amount = self.check_immediate(Rc)
def ASRS_func():
# Set the C flag, or the last shifted out bit
if self.register[Rb] & (1 << (shift_amount - 1)):
self.set_APSR_flag_to_value('C', 1)
else:
self.set_APSR_flag_to_value('C', 0)
if self.register[Ra] & (1 << (self._bit_width - 1)):
self.register[Ra] = (self.register[Ra] >> shift_amount) | (
int('1' * shift_amount, 2) << (self._bit_width - shift_amount))
else:
self.register[Ra] = self.register[Rb] >> shift_amount
self.set_NZ_flags(self.register[Ra])
return ASRS_func
|
LSLS [Ra,] Ra, Rc
LSLS [Ra,] Rb, #imm5
Logical shift left Rb by Rc or imm5 and store the result in Ra
imm5 is [0, 31]
In the register shift, the first two operands must be the same register
Ra, Rb, and Rc must be low registers
If Ra is omitted, then it is assumed to be Rb
def LSLS(self, params):
"""
LSLS [Ra,] Ra, Rc
LSLS [Ra,] Rb, #imm5
Logical shift left Rb by Rc or imm5 and store the result in Ra
imm5 is [0, 31]
In the register shift, the first two operands must be the same register
Ra, Rb, and Rc must be low registers
If Ra is omitted, then it is assumed to be Rb
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
if self.is_register(Rc):
# LSLS Ra, Ra, Rb
self.check_arguments(low_registers=(Ra, Rc))
self.match_first_two_parameters(Ra, Rb)
def LSLS_func():
# Set the C flag, or the last shifted out bit
if (self.register[Rc] < self._bit_width) and (self.register[Ra] & (1 << (self._bit_width - self.register[Rc]))):
self.set_APSR_flag_to_value('C', 1)
else:
self.set_APSR_flag_to_value('C', 0)
self.register[Ra] = self.register[Ra] << self.register[Rc]
self.set_NZ_flags(self.register[Ra])
else:
# LSLS Ra, Rb, #imm5
self.check_arguments(low_registers=(Ra, Rb), imm5=(Rc,))
shift_amount = self.check_immediate(Rc)
def LSLS_func():
# Set the C flag, or the last shifted out bit
if (shift_amount < self._bit_width) and (self.register[Rb] & (1 << (self._bit_width - shift_amount))):
self.set_APSR_flag_to_value('C', 1)
else:
self.set_APSR_flag_to_value('C', 0)
self.register[Ra] = self.register[Rb] << shift_amount
self.set_NZ_flags(self.register[Ra])
return LSLS_func
|
LSRS [Ra,] Ra, Rc
LSRS [Ra,] Rb, #imm5_counting
Logical shift right Rb by Rc or imm5 and store the result in Ra
imm5 counting is [1, 32]
In the register shift, the first two operands must be the same register
Ra, Rb, and Rc must be low registers
If Ra is omitted, then it is assumed to be Rb
def LSRS(self, params):
"""
LSRS [Ra,] Ra, Rc
LSRS [Ra,] Rb, #imm5_counting
Logical shift right Rb by Rc or imm5 and store the result in Ra
imm5 counting is [1, 32]
In the register shift, the first two operands must be the same register
Ra, Rb, and Rc must be low registers
If Ra is omitted, then it is assumed to be Rb
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
if self.is_register(Rc):
# LSRS Ra, Ra, Rb
self.check_arguments(low_registers=(Ra, Rc))
self.match_first_two_parameters(Ra, Rb)
def LSRS_func():
# Set the C flag, or the last shifted out bit
if (self.register[Rc] > 0) and (self.register[Rb] & (1 << (self.register[Rc] - 1))):
self.set_APSR_flag_to_value('C', 1)
else:
self.set_APSR_flag_to_value('C', 0)
self.register[Ra] = self.register[Ra] >> self.register[Rc]
self.set_NZ_flags(self.register[Ra])
else:
# LSRS Ra, Rb, #imm5_counting
self.check_arguments(low_registers=(Ra, Rb), imm5_counting=(Rc,))
shift_amount = self.check_immediate(Rc)
def LSRS_func():
# Set the C flag, or the last shifted out bit
if self.register[Rb] & (1 << (shift_amount - 1)):
self.set_APSR_flag_to_value('C', 1)
else:
self.set_APSR_flag_to_value('C', 0)
self.register[Ra] = self.register[Rb] >> shift_amount
self.set_NZ_flags(self.register[Ra])
return LSRS_func
|
RORS [Ra,] Ra, Rc
Rotate shift right Rb by Rc or imm5 and store the result in Ra
The first two operands must be the same register
Ra and Rc must be low registers
The first register is optional
def RORS(self, params):
"""
RORS [Ra,] Ra, Rc
Rotate shift right Rb by Rc or imm5 and store the result in Ra
The first two operands must be the same register
Ra and Rc must be low registers
The first register is optional
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
# TODO implement this function
# TODO figure out the last shifted bit
# TODO figure out how to wrap bits around
raise iarm.exceptions.NotImplementedError
# RORS Ra, Ra, Rb
self.check_arguments(low_registers=(Ra, Rc))
self.match_first_two_parameters(Ra, Rb)
def RORS_func():
raise NotImplementedError
return RORS_func
|
解析话题列表
:internal
:param xml: 页面XML
:param tds: 每列的含义,可以是title, created, comment, group, updated, author, time, rec
:param selector: 表在页面中的位置
:return:
def _parse_topic_table(self, xml, tds='title,created,comment,group', selector='//table[@class="olt"]//tr'):
"""
解析话题列表
:internal
:param xml: 页面XML
:param tds: 每列的含义,可以是title, created, comment, group, updated, author, time, rec
:param selector: 表在页面中的位置
:return:
"""
xml_results = xml.xpath(selector)
results = []
tds = tds.split(',')
for item in xml_results:
try:
result = {}
index = 0
for td in tds:
index += 1
if td == 'title':
xml_title = item.xpath('.//td[position()=%s]/a' % index)[0]
url = xml_title.get('href')
tid = int(slash_right(url))
title = xml_title.text
result.update({'id': tid, 'url': url, 'title': title})
elif td == 'created':
xml_created = item.xpath('.//td[position()=%s]/a' % index) \
or item.xpath('.//td[position()=%s]' % index)
created_at = xml_created[0].get('title')
result['created_at'] = created_at
elif td == 'comment':
xml_comment = item.xpath('.//td[position()=%s]/span' % index) \
or item.xpath('.//td[position()=%s]' % index)
comment_count = int(re.match(r'\d+', xml_comment[0].text).group())
result['comment_count'] = comment_count
elif td == 'group':
xml_group = item.xpath('.//td[position()=%s]/a' % index)[0]
group_url = xml_group.get('href')
group_alias = slash_right(group_url)
group_name = xml_group.text
result.update({'group_alias': group_alias, 'group_url': group_url, 'group_name': group_name})
elif td == 'author':
xml_author = item.xpath('.//td[position()=%s]/a' % index)[0]
author_url = xml_author.get('href')
author_alias = slash_right(author_url)
author_nickname = xml_author.text
result.update({
'author_url': author_url,
'author_alias': author_alias,
'author_nickname': author_nickname,
})
elif td == 'updated':
result['updated_at'] = item.xpath('.//td[position()=%s]/text()' % index)[0]
elif td == 'time':
result['time'] = item.xpath('.//td[position()=%s]/text()' % index)[0]
elif td == 'rec':
xml_rec = item.xpath('.//td[position()=%s]//a[@class="lnk-remove"]/@href' % (index - 1))[0]
result['rec_id'] = re.search(r'rec_id=(\d+)', xml_rec).groups()[0]
results.append(result)
except Exception as e:
self.api.api.logger.exception('parse topic table exception: %s' % e)
return results
|
搜索小组
:param keyword: 搜索的关键字
:param start: 翻页
:return: 含总数的列表
def search_groups(self, keyword, start=0):
"""
搜索小组
:param keyword: 搜索的关键字
:param start: 翻页
:return: 含总数的列表
"""
xml = self.api.xml(API_GROUP_SEARCH_GROUPS % (start, keyword))
xml_results = xml.xpath('//div[@class="groups"]/div[@class="result"]')
results = []
for item in xml_results:
try:
url = item.xpath('.//h3/a/@href')[0]
info = item.xpath('.//div[@class="content"]/div[@class="info"]/text()')[0].strip(' ')
onclick = item.xpath('.//h3/a/@onclick')[0]
meta = {
'icon': item.xpath('.//img/@src')[0],
'id': re.search(r'sid[^\d]+(\d+)', onclick).groups()[0],
'url': url,
'alias': url.rstrip('/').rsplit('/', 1)[1],
'name': item.xpath('.//h3/a/text()')[0],
'user_count': int(re.match(r'\d+', info).group()),
'user_alias': re.search(r'个(.+)\s*在此', info).groups()[0],
}
results.append(meta)
except Exception as e:
self.api.logger.exception('parse search groups result error: %s' % e)
return build_list_result(results, xml)
|
已加入的小组列表
:param user_alias: 用户名,默认为当前用户名
:return: 单页列表
def list_joined_groups(self, user_alias=None):
"""
已加入的小组列表
:param user_alias: 用户名,默认为当前用户名
:return: 单页列表
"""
xml = self.api.xml(API_GROUP_LIST_JOINED_GROUPS % (user_alias or self.api.user_alias))
xml_results = xml.xpath('//div[@class="group-list group-cards"]/ul/li')
results = []
for item in xml_results:
try:
icon = item.xpath('.//img/@src')[0]
link = item.xpath('.//div[@class="title"]/a')[0]
url = link.get('href')
name = link.text
alias = url.rstrip('/').rsplit('/', 1)[1]
user_count = int(item.xpath('.//span[@class="num"]/text()')[0][1:-1])
results.append({
'icon': icon,
'alias': alias,
'url': url,
'name': name,
'user_count': user_count,
})
except Exception as e:
self.api.logger.exception('parse joined groups exception: %s' % e)
return build_list_result(results, xml)
|
加入小组
:param group_alias: 小组ID
:param message: 如果要验证,留言信息
:return: 枚举
- joined: 加入成功
- waiting: 等待审核
- initial: 加入失败
def join_group(self, group_alias, message=None):
"""
加入小组
:param group_alias: 小组ID
:param message: 如果要验证,留言信息
:return: 枚举
- joined: 加入成功
- waiting: 等待审核
- initial: 加入失败
"""
xml = self.api.xml(API_GROUP_GROUP_HOME % group_alias, params={
'action': 'join',
'ck': self.api.ck(),
})
misc = xml.xpath('//div[@class="group-misc"]')[0]
intro = misc.xpath('string(.)') or ''
if intro.find('退出小组') > -1:
return 'joined'
elif intro.find('你已经申请加入小组') > -1:
return 'waiting'
elif intro.find('申请加入小组') > -1:
res = self.api.xml(API_GROUP_GROUP_HOME % group_alias, 'post', data={
'ck': self.api.ck(),
'action': 'request_join',
'message': message,
'send': '发送',
})
misc = res.xpath('//div[@class="group-misc"]')[0]
intro = misc.xpath('string(.)') or ''
if intro.find('你已经申请加入小组') > -1:
return 'waiting'
else:
return 'initial'
else:
return 'initial'
|
退出小组
:param group_alias: 小组ID
:return:
def leave_group(self, group_alias):
"""
退出小组
:param group_alias: 小组ID
:return:
"""
return self.api.req(API_GROUP_GROUP_HOME % group_alias, params={
'action': 'quit',
'ck': self.api.ck(),
})
|
搜索话题
:param keyword: 关键字
:param sort: 排序方式 relevance/newest
:param start: 翻页
:return: 带总数的列表
def search_topics(self, keyword, sort='relevance', start=0):
"""
搜索话题
:param keyword: 关键字
:param sort: 排序方式 relevance/newest
:param start: 翻页
:return: 带总数的列表
"""
xml = self.api.xml(API_GROUP_SEARCH_TOPICS % (start, sort, keyword))
return build_list_result(self._parse_topic_table(xml), xml)
|
小组内话题列表
:param group_alias: 小组ID
:param _type: 类型 默认最新,hot:最热
:param start: 翻页
:return: 带下一页的列表
def list_topics(self, group_alias, _type='', start=0):
"""
小组内话题列表
:param group_alias: 小组ID
:param _type: 类型 默认最新,hot:最热
:param start: 翻页
:return: 带下一页的列表
"""
xml = self.api.xml(API_GROUP_LIST_GROUP_TOPICS % group_alias, params={
'start': start,
'type': _type,
})
return build_list_result(self._parse_topic_table(xml, 'title,author,comment,updated'), xml)
|
已加入的所有小组的话题列表
:param start: 翻页
:return: 带下一页的列表
def list_joined_topics(self, start=0):
"""
已加入的所有小组的话题列表
:param start: 翻页
:return: 带下一页的列表
"""
xml = self.api.xml(API_GROUP_HOME, params={'start': start})
return build_list_result(self._parse_topic_table(xml, 'title,comment,created,group'), xml)
|
发表的话题
:param start: 翻页
:return: 带下一页的列表
def list_user_topics(self, start=0):
"""
发表的话题
:param start: 翻页
:return: 带下一页的列表
"""
xml = self.api.xml(API_GROUP_LIST_USER_PUBLISHED_TOPICS % self.api.user_alias, params={'start': start})
return build_list_result(self._parse_topic_table(xml, 'title,comment,created,group'), xml)
|
回复过的话题列表
:param start: 翻页
:return: 带下一页的列表
def list_commented_topics(self, start=0):
"""
回复过的话题列表
:param start: 翻页
:return: 带下一页的列表
"""
xml = self.api.xml(API_GROUP_LIST_USER_COMMENTED_TOPICS % self.api.user_alias, params={'start': start})
return build_list_result(self._parse_topic_table(xml, 'title,comment,time,group'), xml)
|
喜欢过的话题
:param user_alias: 指定用户,默认当前
:param start: 翻页
:return: 带下一页的列表
def list_liked_topics(self, user_alias=None, start=0):
"""
喜欢过的话题
:param user_alias: 指定用户,默认当前
:param start: 翻页
:return: 带下一页的列表
"""
user_alias = user_alias or self.api.user_alias
xml = self.api.xml(API_GROUP_LIST_USER_LIKED_TOPICS % user_alias, params={'start': start})
return build_list_result(self._parse_topic_table(xml, 'title,comment,time,group'), xml)
|
推荐的话题列表
:param user_alias: 指定用户,默认当前
:param start: 翻页
:return: 带下一页的列表
def list_reced_topics(self, user_alias=None, start=0):
"""
推荐的话题列表
:param user_alias: 指定用户,默认当前
:param start: 翻页
:return: 带下一页的列表
"""
user_alias = user_alias or self.api.user_alias
xml = self.api.xml(API_GROUP_LIST_USER_RECED_TOPICS % user_alias, params={'start': start})
return build_list_result(self._parse_topic_table(xml, 'title,comment,time,group,rec'), xml)
|
创建话题(小心验证码~)
:param group_alias: 小组ID
:param title: 标题
:param content: 内容
:return: bool
def add_topic(self, group_alias, title, content):
"""
创建话题(小心验证码~)
:param group_alias: 小组ID
:param title: 标题
:param content: 内容
:return: bool
"""
xml = self.api.req(API_GROUP_ADD_TOPIC % group_alias, 'post', data={
'ck': self.api.ck(),
'rev_title': title,
'rev_text': content,
'rev_submit': '好了,发言',
})
return not xml.url.startswith(API_GROUP_ADD_TOPIC % group_alias)
|
删除话题(需要先删除所有评论,使用默认参数)
:param topic_id: 话题ID
:return: None
def remove_topic(self, topic_id):
"""
删除话题(需要先删除所有评论,使用默认参数)
:param topic_id: 话题ID
:return: None
"""
comment_start = 0
while comment_start is not None:
comments = self.list_comments(topic_id, comment_start)
for comment in comments['results']:
self.remove_comment(topic_id, comment['id'])
comment_start = comments['next_start']
return self.api.req(API_GROUP_REMOVE_TOPIC % topic_id, params={'ck': self.api.ck()})
|
更新话题
:param topic_id: 话题ID
:param title: 标题
:param content: 内容
:return: bool
def update_topic(self, topic_id, title, content):
"""
更新话题
:param topic_id: 话题ID
:param title: 标题
:param content: 内容
:return: bool
"""
xml = self.api.req(API_GROUP_UPDATE_TOPIC % topic_id, 'post', data={
'ck': self.api.ck(),
'rev_title': title,
'rev_text': content,
'rev_submit': '好了,改吧',
})
return not xml.url.startswith(API_GROUP_UPDATE_TOPIC % topic_id)
|
回复列表
:param topic_id: 话题ID
:param start: 翻页
:return: 带下一页的列表
def list_comments(self, topic_id, start=0):
"""
回复列表
:param topic_id: 话题ID
:param start: 翻页
:return: 带下一页的列表
"""
xml = self.api.xml(API_GROUP_GET_TOPIC % topic_id, params={'start': start})
xml_results = xml.xpath('//ul[@id="comments"]/li')
results = []
for item in xml_results:
try:
author_avatar = item.xpath('.//img/@src')[0]
author_url = item.xpath('.//div[@class="user-face"]/a/@href')[0]
author_alias = slash_right(author_url)
author_signature = item.xpath('.//h4/text()')[1].strip()
author_nickname = item.xpath('.//h4/a/text()')[0].strip()
created_at = item.xpath('.//h4/span/text()')[0].strip()
content = etree.tostring(item.xpath('.//div[@class="reply-doc content"]/p')[0]).decode('utf8').strip()
cid = item.get('id')
results.append({
'id': cid,
'author_avatar': author_avatar,
'author_url': author_url,
'author_alias': author_alias,
'author_signature': author_signature,
'author_nickname': author_nickname,
'created_at': created_at,
'content': unescape(content),
})
except Exception as e:
self.api.logger.exception('parse comment exception: %s' % e)
return build_list_result(results, xml)
|
添加评论
:param topic_id: 话题ID
:param content: 内容
:param reply_id: 回复ID
:return: None
def add_comment(self, topic_id, content, reply_id=None):
"""
添加评论
:param topic_id: 话题ID
:param content: 内容
:param reply_id: 回复ID
:return: None
"""
return self.api.req(API_GROUP_ADD_COMMENT % topic_id, 'post', data={
'ck': self.api.ck(),
'ref_cid': reply_id,
'rv_comment': content,
'start': 0,
'submit_btn': '加上去',
})
|
删除评论(自己发的话题所有的都可以删除,否则只能删自己发的)
:param topic_id: 话题ID
:param comment_id: 评论ID
:param reason: 原因 0/1/2 (内容不符/反动/其它)
:param other: 其它原因的具体(2)
:return: None
def remove_comment(self, topic_id, comment_id, reason='0', other=None):
"""
删除评论(自己发的话题所有的都可以删除,否则只能删自己发的)
:param topic_id: 话题ID
:param comment_id: 评论ID
:param reason: 原因 0/1/2 (内容不符/反动/其它)
:param other: 其它原因的具体(2)
:return: None
"""
params = {'cid': comment_id}
data = {'cid': comment_id, 'ck': self.api.ck(), 'reason': reason, 'other': other, 'submit': '确定'}
r = self.api.req(API_GROUP_REMOVE_COMMENT % topic_id, 'post', params, data)
if r.text.find('douban_admin') > -1:
r = self.api.req(API_GROUP_ADMIN_REMOVE_COMMENT % topic_id, 'post', params, data)
self.api.logger.debug('remove comment final url is <%s>' % r.url)
return r
|
列出用户在话题下的所有回复
:param topic_id: 话题ID
:param user_alias: 用户ID,默认当前
:return: 纯列表
def list_user_comments(self, topic_id, user_alias=None):
"""
列出用户在话题下的所有回复
:param topic_id: 话题ID
:param user_alias: 用户ID,默认当前
:return: 纯列表
"""
user_alias = user_alias or self.api.user_alias
comment_start = 0
results = []
while comment_start is not None:
comments = self.list_comments(topic_id, comment_start)
results += [item for item in comments['results'] if item['author_alias'] == user_alias]
comment_start = comments['next_start']
return results
|
删除回复的话题(删除所有自己发布的评论)
:param topic_id: 话题ID
:return: None
def remove_commented_topic(self, topic_id):
"""
删除回复的话题(删除所有自己发布的评论)
:param topic_id: 话题ID
:return: None
"""
return [self.remove_comment(topic_id, item['id']) for item in self.list_user_comments(topic_id)]
|
Creates a new :class:`Shell` with a function as callback. This
works otherwise the same as :func:`command` just that the `cls`
parameter is set to :class:`Shell`.
def shell(name=None, **attrs):
"""Creates a new :class:`Shell` with a function as callback. This
works otherwise the same as :func:`command` just that the `cls`
parameter is set to :class:`Shell`.
"""
attrs.setdefault('cls', Shell)
return click.command(name, **attrs)
|
This is used by gcdt plugins to get a logger with the right level.
def getLogger(name):
"""This is used by gcdt plugins to get a logger with the right level."""
logger = logging.getLogger(name)
# note: the level might be adjusted via '-v' option
logger.setLevel(logging_config['loggers']['gcdt']['level'])
return logger
|
Discovers methods in the XML-RPC API and creates attributes for them
on this object. Enables stuff like "magento.cart.create(...)" to work
without having to define Python methods for each XML-RPC equivalent.
def _discover(self):
"""Discovers methods in the XML-RPC API and creates attributes for them
on this object. Enables stuff like "magento.cart.create(...)" to work
without having to define Python methods for each XML-RPC equivalent.
"""
self._resources = {}
resources = self._client.resources(self._session_id)
for resource in resources:
self._resources[resource['name']] = MagentoResource(
self._client, self._session_id, resource['name'],
resource['title'], resource['methods'])
|
If the session expired, logs back in.
def keep_session_alive(self):
"""If the session expired, logs back in."""
try:
self.resources()
except xmlrpclib.Fault as fault:
if fault.faultCode == 5:
self.login()
else:
raise
|
Prints discovered resources and their associated methods. Nice when
noodling in the terminal to wrap your head around Magento's insanity.
def help(self):
"""Prints discovered resources and their associated methods. Nice when
noodling in the terminal to wrap your head around Magento's insanity.
"""
print('Resources:')
print('')
for name in sorted(self._resources.keys()):
methods = sorted(self._resources[name]._methods.keys())
print('{}: {}'.format(bold(name), ', '.join(methods)))
|
Import the controller and run it.
This mimics the processing done by :func:`helper.start`
when a controller is run in the foreground. A new instance
of ``self.controller`` is created and run until a keyboard
interrupt occurs or the controller stops on its own accord.
def run(self):
"""Import the controller and run it.
This mimics the processing done by :func:`helper.start`
when a controller is run in the foreground. A new instance
of ``self.controller`` is created and run until a keyboard
interrupt occurs or the controller stops on its own accord.
"""
segments = self.controller.split('.')
controller_class = reduce(getattr, segments[1:],
__import__('.'.join(segments[:-1])))
cmd_line = ['-f']
if self.configuration is not None:
cmd_line.extend(['-c', self.configuration])
args = parser.get().parse_args(cmd_line)
controller_instance = controller_class(args, platform)
try:
controller_instance.start()
except KeyboardInterrupt:
controller_instance.stop()
|
Scans the input path and automatically determines the optimal
piece size based on ~1500 pieces (up to MAX_PIECE_SIZE) along
with other basic info, including total size (in bytes), the
total number of files, piece size (in bytes), and resulting
number of pieces. If ``piece_size`` has already been set, the
custom value will be used instead.
:return: ``(total_size, total_files, piece_size, num_pieces)``
def get_info(self):
"""
Scans the input path and automatically determines the optimal
piece size based on ~1500 pieces (up to MAX_PIECE_SIZE) along
with other basic info, including total size (in bytes), the
total number of files, piece size (in bytes), and resulting
number of pieces. If ``piece_size`` has already been set, the
custom value will be used instead.
:return: ``(total_size, total_files, piece_size, num_pieces)``
"""
if os.path.isfile(self.path):
total_size = os.path.getsize(self.path)
total_files = 1
elif os.path.exists(self.path):
total_size = 0
total_files = 0
for x in os.walk(self.path):
for fn in x[2]:
if any(fnmatch.fnmatch(fn, ext) for ext in self.exclude):
continue
fpath = os.path.normpath(os.path.join(x[0], fn))
fsize = os.path.getsize(fpath)
if fsize and not is_hidden_file(fpath):
total_size += fsize
total_files += 1
else:
raise exceptions.InvalidInputException
if not (total_files and total_size):
raise exceptions.EmptyInputException
if self.piece_size:
ps = self.piece_size
else:
ps = 1 << max(0, math.ceil(math.log(total_size / 1500, 2)))
if ps < MIN_PIECE_SIZE:
ps = MIN_PIECE_SIZE
if ps > MAX_PIECE_SIZE:
ps = MAX_PIECE_SIZE
return (total_size, total_files, ps, math.ceil(total_size / ps))
|
Computes and stores piece data. Returns ``True`` on success, ``False``
otherwise.
:param callback: progress/cancellation callable with method
signature ``(filename, pieces_completed, pieces_total)``.
Useful for reporting progress if dottorrent is used in a
GUI/threaded context, and if torrent generation needs to be cancelled.
The callable's return value should evaluate to ``True`` to trigger
cancellation.
def generate(self, callback=None):
"""
Computes and stores piece data. Returns ``True`` on success, ``False``
otherwise.
:param callback: progress/cancellation callable with method
signature ``(filename, pieces_completed, pieces_total)``.
Useful for reporting progress if dottorrent is used in a
GUI/threaded context, and if torrent generation needs to be cancelled.
The callable's return value should evaluate to ``True`` to trigger
cancellation.
"""
files = []
single_file = os.path.isfile(self.path)
if single_file:
files.append((self.path, os.path.getsize(self.path), {}))
elif os.path.exists(self.path):
for x in os.walk(self.path):
for fn in x[2]:
if any(fnmatch.fnmatch(fn, ext) for ext in self.exclude):
continue
fpath = os.path.normpath(os.path.join(x[0], fn))
fsize = os.path.getsize(fpath)
if fsize and not is_hidden_file(fpath):
files.append((fpath, fsize, {}))
else:
raise exceptions.InvalidInputException
total_size = sum([x[1] for x in files])
if not (len(files) and total_size):
raise exceptions.EmptyInputException
# set piece size if not already set
if self.piece_size is None:
self.piece_size = self.get_info()[2]
if files:
self._pieces = bytearray()
i = 0
num_pieces = math.ceil(total_size / self.piece_size)
pc = 0
buf = bytearray()
while i < len(files):
fe = files[i]
f = open(fe[0], 'rb')
if self.include_md5:
md5_hasher = md5()
else:
md5_hasher = None
for chunk in iter(lambda: f.read(self.piece_size), b''):
buf += chunk
if len(buf) >= self.piece_size \
or i == len(files)-1:
piece = buf[:self.piece_size]
self._pieces += sha1(piece).digest()
del buf[:self.piece_size]
pc += 1
if callback:
cancel = callback(fe[0], pc, num_pieces)
if cancel:
f.close()
return False
if self.include_md5:
md5_hasher.update(chunk)
if self.include_md5:
fe[2]['md5sum'] = md5_hasher.hexdigest()
f.close()
i += 1
# Add pieces from any remaining data
while len(buf):
piece = buf[:self.piece_size]
self._pieces += sha1(piece).digest()
del buf[:self.piece_size]
pc += 1
if callback:
cancel = callback(fe[0], pc, num_pieces)
if cancel:
return False
# Create the torrent data structure
data = OrderedDict()
if len(self.trackers) > 0:
data['announce'] = self.trackers[0].encode()
if len(self.trackers) > 1:
data['announce-list'] = [[x.encode()] for x in self.trackers]
if self.comment:
data['comment'] = self.comment.encode()
if self.created_by:
data['created by'] = self.created_by.encode()
else:
data['created by'] = DEFAULT_CREATOR.encode()
if self.creation_date:
data['creation date'] = int(self.creation_date.timestamp())
if self.web_seeds:
data['url-list'] = [x.encode() for x in self.web_seeds]
data['info'] = OrderedDict()
if single_file:
data['info']['length'] = files[0][1]
if self.include_md5:
data['info']['md5sum'] = files[0][2]['md5sum']
data['info']['name'] = files[0][0].split(os.sep)[-1].encode()
else:
data['info']['files'] = []
path_sp = self.path.split(os.sep)
for x in files:
fx = OrderedDict()
fx['length'] = x[1]
if self.include_md5:
fx['md5sum'] = x[2]['md5sum']
fx['path'] = [y.encode()
for y in x[0].split(os.sep)[len(path_sp):]]
data['info']['files'].append(fx)
data['info']['name'] = path_sp[-1].encode()
data['info']['pieces'] = bytes(self._pieces)
data['info']['piece length'] = self.piece_size
data['info']['private'] = int(self.private)
if self.source:
data['info']['source'] = self.source.encode()
self._data = data
return True
|
Returns the base32 info hash of the torrent. Useful for generating
magnet links.
.. note:: ``generate()`` must be called first.
def info_hash_base32(self):
"""
Returns the base32 info hash of the torrent. Useful for generating
magnet links.
.. note:: ``generate()`` must be called first.
"""
if getattr(self, '_data', None):
return b32encode(sha1(bencode(self._data['info'])).digest())
else:
raise exceptions.TorrentNotGeneratedException
|
:return: The SHA-1 info hash of the torrent. Useful for generating
magnet links.
.. note:: ``generate()`` must be called first.
def info_hash(self):
"""
:return: The SHA-1 info hash of the torrent. Useful for generating
magnet links.
.. note:: ``generate()`` must be called first.
"""
if getattr(self, '_data', None):
return sha1(bencode(self._data['info'])).hexdigest()
else:
raise exceptions.TorrentNotGeneratedException
|
请求API
:type url: str
:param url: API
:type method: str
:param method: HTTP METHOD
:type params: dict
:param params: query
:type data: dict
:param data: body
:type auth: bool
:param auth: if True and session expired will raise exception
:rtype: requests.Response
:return: Response
def req(self, url, method='get', params=None, data=None, auth=False):
"""
请求API
:type url: str
:param url: API
:type method: str
:param method: HTTP METHOD
:type params: dict
:param params: query
:type data: dict
:param data: body
:type auth: bool
:param auth: if True and session expired will raise exception
:rtype: requests.Response
:return: Response
"""
self.logger.debug('fetch api<%s:%s>' % (method, url))
if auth and self.user_alias is None:
raise Exception('cannot fetch api<%s> without session' % url)
s = requests.Session()
r = s.request(method, url, params=params, data=data, cookies=self.cookies, headers=self.headers,
timeout=self.timeout)
s.close()
if r.url is not url and RE_SESSION_EXPIRE.search(r.url) is not None:
self.expire()
if auth:
raise Exception('auth expired, could not fetch with<%s>' % url)
return r
|
请求并返回json
:type url: str
:param url: API
:type method: str
:param method: HTTP METHOD
:type params: dict
:param params: query
:type data: dict
:param data: body
:rtype: dict
:return:
def json(self, url, method='get', params=None, data=None):
"""
请求并返回json
:type url: str
:param url: API
:type method: str
:param method: HTTP METHOD
:type params: dict
:param params: query
:type data: dict
:param data: body
:rtype: dict
:return:
"""
r = self.req(url, method, params, data)
return r.json()
|
请求并返回xml
:type url: str
:param url: API
:type method: str
:param method: HTTP METHOD
:type params: dict
:param params: query
:type data: dict
:param data: body
:rtype: html.HtmlElement
:return:
def xml(self, url, method='get', params=None, data=None):
"""
请求并返回xml
:type url: str
:param url: API
:type method: str
:param method: HTTP METHOD
:type params: dict
:param params: query
:type data: dict
:param data: body
:rtype: html.HtmlElement
:return:
"""
r = self.req(url, method, params, data)
# this is required for avoid utf8-mb4 lead to encoding error
return self.to_xml(r.content, base_url=r.url)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.