text
stringlengths 81
112k
|
---|
Send a ping request to a lambda function.
:param awsclient:
:param function_name:
:param start_dt:
:param end_dt:
:param tail:
:return:
def logs(awsclient, function_name, start_dt, end_dt=None, tail=False):
"""Send a ping request to a lambda function.
:param awsclient:
:param function_name:
:param start_dt:
:param end_dt:
:param tail:
:return:
"""
log.debug('Getting cloudwatch logs for: %s', function_name)
log_group_name = '/aws/lambda/%s' % function_name
current_date = None
start_ts = datetime_to_timestamp(start_dt)
if end_dt:
end_ts = datetime_to_timestamp(end_dt)
else:
end_ts = None
# tail mode
# we assume that logs can arrive late but not out of order
# so we hold the timestamp of the last logentry and start the next iteration
# from there
while True:
logentries = filter_log_events(awsclient, log_group_name,
start_ts=start_ts, end_ts=end_ts)
if logentries:
for e in logentries:
actual_date, actual_time = decode_format_timestamp(e['timestamp'])
if current_date != actual_date:
# print the date only when it changed
current_date = actual_date
log.info(current_date)
log.info('%s %s' % (actual_time, e['message'].strip()))
if tail:
if logentries:
start_ts = logentries[-1]['timestamp'] + 1
time.sleep(2)
continue
break
|
This will require validating version string (such as "3.3.5").
A version string could be converted to a datetime value if this
validation is not executed.
def __validate_datetime_string(self):
"""
This will require validating version string (such as "3.3.5").
A version string could be converted to a datetime value if this
validation is not executed.
"""
try:
try:
StrictVersion(self._value)
raise TypeConversionError(
"invalid datetime string: version string found {}".format(self._value)
)
except ValueError:
pass
except TypeError:
raise TypeConversionError("invalid datetime string: type={}".format(type(self._value)))
|
ADR Ra, [PC, #imm10_4]
ADR Ra, label
Load the address of label or the PC offset into Ra
Ra must be a low register
def ADR(self, params):
"""
ADR Ra, [PC, #imm10_4]
ADR Ra, label
Load the address of label or the PC offset into Ra
Ra must be a low register
"""
# TODO may need to rethink how I do PC, may need to be byte alligned
# TODO This is wrong as each address is a word, not a byte. The filled value with its location (Do we want that, or the value at that location [Decompiled instruction])
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_WITH_BRACKETS, params)
except iarm.exceptions.ParsingError:
Ra, label = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
# TODO the address must be within 1020 bytes of current PC
self.check_arguments(low_registers=(Ra,), label_exists=(label,))
def ADR_func():
self.register[Ra] = self.labels[label] # TODO is this correct?
return ADR_func
self.check_arguments(low_registers=(Ra,), imm10_4=(Rc,))
if Rb != 'PC':
raise iarm.exceptions.IarmError("Second position argument is not PC: {}".format(Rb))
def ADR_func():
self.register[Ra] = self.register[Rb] + self.convert_to_integer(Rc[1:])
return ADR_func
|
LDR Ra, [PC, #imm10_4]
LDR Ra, label
LDR Ra, =equate
LDR Ra, [Rb, Rc]
LDR Ra, [Rb, #imm7_4]
LDR Ra, [SP, #imm10_4]
Load a word from memory into Ra
Ra, Rb, and Rc must be low registers
def LDR(self, params):
"""
LDR Ra, [PC, #imm10_4]
LDR Ra, label
LDR Ra, =equate
LDR Ra, [Rb, Rc]
LDR Ra, [Rb, #imm7_4]
LDR Ra, [SP, #imm10_4]
Load a word from memory into Ra
Ra, Rb, and Rc must be low registers
"""
# TODO definition for PC is Ra <- M[PC + Imm10_4], Imm10_4 = PC - label, need to figure this one out
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_WITH_BRACKETS, params)
except iarm.exceptions.ParsingError:
Ra, label_name = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
if label_name.startswith('='):
# This is a pseudoinstructions
label_name = label_name[1:]
# TODO add check that label is a 32 bit number
# TODO This does not work on instruction loading. This interpreter follows a harvard like architecture,
# TODO while ARMv6-M (Cortex-M0+) is a Von Neumann architeture. Instructions will not be decompiled
self.check_arguments(low_registers=(Ra,))
if label_name in self.labels:
label_value = self.labels[label_name]
elif label_name in self.equates:
label_value = self.equates[label_name]
else:
try:
label_value = int(self.convert_to_integer(label_name))
except ValueError:
warnings.warn(iarm.exceptions.LabelDoesNotExist("Label `{}` does not exist or is not a parsable number. If it is a label, make sure it exists before running".format(label_name)))
label_value = None
if label_value is not None and int(label_value) % 4 != 0:
# Make sure we are word aligned
raise iarm.exceptions.IarmError("Memory access not word aligned; Immediate: {}".format(int(label_value)))
elif label_name.startswith('[') and label_name.endswith(']'):
# TODO improve this
Rb = label_name[1:-1]
if Rb == 'SP' or Rb == 'R13':
self.check_arguments(low_registers=(Ra,))
else:
self.check_arguments(low_registers=(Ra, label_name))
def LDR_func():
if self.memory[Rb] % 4 != 0:
raise iarm.exceptions.HardFault(
"Memory access not word aligned; Register: {} Immediate: {}".format(self.register[Rb],
self.convert_to_integer(
Rc[1:])))
self.register[Ra] = 0
for i in range(4):
self.register[Ra] |= (self.memory[self.register[Rb] + i] << (8 * i))
return LDR_func
else:
self.check_arguments(low_registers=(Ra,), label_exists=(label_name,))
try:
label_value = self.labels[label_name]
if label_value >= 1024:
raise iarm.exceptions.IarmError("Label {} has value {} and is greater than 1020".format(label_name, label_value))
if label_value % 4 != 0:
raise iarm.exceptions.IarmError("Label {} has value {} and is not word aligned".format(label_name, label_value))
except KeyError:
# Label doesn't exist, nothing we can do about that except maybe raise an exception now,
# But we're avoiding that elsewhere, might as well avoid it here too
pass
def LDR_func():
nonlocal label_value
# Since we can get a label that didn't exist in the creation step, We need to check it here
# TODO is there a way for label_value to not exist?
if label_value is None:
# Try to get it again
if label_name in self.labels:
label_value = self.labels[label_name]
elif label_name in self.equates:
label_value = self.equates[label_name]
# If it is still None, then it never got allocated
if label_value is None:
raise iarm.exceptions.IarmError("label `{}` does not exist. Was space allocated?".format(label_name))
# It does exist, make sure its word aligned
if int(label_value) % 4 != 0:
raise iarm.exceptions.IarmError("Memory access not word aligned; Immediate: {}".format(int(label_value)))
try:
self.register[Ra] = int(label_value)
except ValueError:
# TODO Can we even get to this path now?
self.register[Ra] = self.labels[label_name]
return LDR_func
if self.is_immediate(Rc):
if Rb == 'SP' or Rb == 'R15':
self.check_arguments(low_registers=(Ra,), imm10_4=(Rc,))
else:
self.check_arguments(low_registers=(Ra, Rb), imm7_4=(Rc,))
def LDR_func():
# TODO does memory read up?
if (self.register[Rb] + self.convert_to_integer(Rc[1:])) % 4 != 0:
raise iarm.exceptions.HardFault("Memory access not word aligned; Register: {} Immediate: {}".format(self.register[Rb], self.convert_to_integer(Rc[1:])))
self.register[Ra] = 0
for i in range(4):
self.register[Ra] |= (self.memory[self.register[Rb] + self.convert_to_integer(Rc[1:]) + i] << (8 * i))
else:
self.check_arguments(low_registers=(Ra, Rb, Rc))
def LDR_func():
# TODO does memory read up?
if (self.register[Rb] + self.register[Rc]) % 4 != 0:
raise iarm.exceptions.HardFault(
"Memory access not word aligned; Register: {} Immediate: {}".format(self.register[Rb],
self.convert_to_integer(
Rc[1:])))
self.register[Ra] = 0
for i in range(4):
self.register[Ra] |= (self.memory[self.register[Rb] + self.register[Rc] + i] << (8 * i))
return LDR_func
|
LDRB Ra, [Rb, Rc]
LDRB Ra, [Rb, #imm5]
Load a byte from memory into Ra
Ra, Rb, and Rc must be low registers
def LDRB(self, params):
"""
LDRB Ra, [Rb, Rc]
LDRB Ra, [Rb, #imm5]
Load a byte from memory into Ra
Ra, Rb, and Rc must be low registers
"""
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_WITH_BRACKETS, params)
except iarm.exceptions.ParsingError:
# LDRB Rn, [Rk] translates to an offset of zero
Ra, Rb = self.get_two_parameters(r'\s*([^\s,]*),\s*\[([^\s,]*)\](,\s*[^\s,]*)*\s*', params)
Rc = '#0'
if self.is_immediate(Rc):
self.check_arguments(low_registers=(Ra, Rb), imm5=(Rc,))
def LDRB_func():
self.register[Ra] = self.memory[self.register[Rb] + self.convert_to_integer(Rc[1:])]
else:
self.check_arguments(low_registers=(Ra, Rb, Rc))
def LDRB_func():
self.register[Ra] = self.memory[self.register[Rb] + self.register[Rc]]
return LDRB_func
|
LDRH Ra, [Rb, Rc]
LDRH Ra, [Rb, #imm6_2]
Load a half word from memory into Ra
Ra, Rb, and Rc must be low registers
def LDRH(self, params):
"""
LDRH Ra, [Rb, Rc]
LDRH Ra, [Rb, #imm6_2]
Load a half word from memory into Ra
Ra, Rb, and Rc must be low registers
"""
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_WITH_BRACKETS, params)
except iarm.exceptions.ParsingError:
# LDRB Rn, [Rk] translates to an offset of zero
Ra, Rb = self.get_two_parameters(r'\s*([^\s,]*),\s*\[([^\s,]*)\](,\s*[^\s,]*)*\s*', params)
Rc = '#0'
if self.is_immediate(Rc):
self.check_arguments(low_registers=(Ra, Rb), imm6_2=(Rc,))
def LDRH_func():
# TODO does memory read up?
if (self.register[Rb]) % 2 != 0:
raise iarm.exceptions.HardFault(
"Memory access not half word aligned; Register: {} Immediate: {}".format(self.register[Rb],
self.convert_to_integer(
Rc[1:])))
self.register[Ra] = 0
for i in range(2):
self.register[Ra] |= (self.memory[self.register[Rb] + self.convert_to_integer(Rc[1:]) + i] << (8 * i))
else:
self.check_arguments(low_registers=(Ra, Rb, Rc))
def LDRH_func():
# TODO does memory read up?
if (self.register[Rb] + self.register[Rc]) % 2 != 0:
raise iarm.exceptions.HardFault(
"Memory access not half word aligned; Register: {} Immediate: {}".format(self.register[Rb],
self.register[Rc]))
self.register[Ra] = 0
for i in range(2):
self.register[Ra] |= (self.memory[self.register[Rb] + self.register[Rc] + i] << (8 * i))
return LDRH_func
|
LDRSB Ra, [Rb, Rc]
Load a byte from memory, sign extend, and put into Ra
Ra, Rb, and Rc must be low registers
def LDRSB(self, params):
"""
LDRSB Ra, [Rb, Rc]
Load a byte from memory, sign extend, and put into Ra
Ra, Rb, and Rc must be low registers
"""
# TODO LDRSB cant use immediates
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_WITH_BRACKETS, params)
self.check_arguments(low_registers=(Ra, Rb, Rc))
def LDRSB_func():
# TODO does memory read up?
self.register[Ra] = 0
self.register[Ra] |= self.memory[self.register[Rb] + self.register[Rc]]
if self.register[Ra] & (1 << 7):
self.register[Ra] |= (0xFFFFFF << 8)
return LDRSB_func
|
LDRSH Ra, [Rb, Rc]
Load a half word from memory, sign extend, and put into Ra
Ra, Rb, and Rc must be low registers
def LDRSH(self, params):
"""
LDRSH Ra, [Rb, Rc]
Load a half word from memory, sign extend, and put into Ra
Ra, Rb, and Rc must be low registers
"""
# TODO LDRSH cant use immediates
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_WITH_BRACKETS, params)
self.check_arguments(low_registers=(Ra, Rb, Rc))
def LDRSH_func():
# TODO does memory read up?
if (self.register[Rb] + self.register[Rc]) % 2 != 0:
raise iarm.exceptions.HardFault(
"Memory access not half word aligned\nR{}: {}\nR{}: {}".format(Rb, self.register[Rb],
Rc, self.register[Rc]))
self.register[Ra] = 0
for i in range(2):
self.register[Ra] |= (self.memory[self.register[Rb] + self.register[Rc] + i] << (8 * i))
if self.register[Ra] & (1 << 15):
self.register[Ra] |= (0xFFFF << 16)
return LDRSH_func
|
POP {RPopList}
Pop from the stack into the list of registers
List must contain only low registers or PC
def POP(self, params):
"""
POP {RPopList}
Pop from the stack into the list of registers
List must contain only low registers or PC
"""
# TODO verify pop order
# TODO pop list is comma separate, right?
# TODO what registeres are allowed to POP to? Low Registers and PC
# TODO need to support ranges, ie {R2, R5-R7}
# TODO PUSH should reverse the list, not POP
RPopList = self.get_one_parameter(r'\s*{(.*)}(.*)', params).split(',')
RPopList.reverse()
RPopList = [i.strip() for i in RPopList]
def POP_func():
for register in RPopList:
# Get 4 bytes
value = 0
for i in range(4):
# TODO use memory width instead of constants
value |= self.memory[self.register['SP'] + i] << (8 * i)
self.register[register] = value
self.register['SP'] += 4
return POP_func
|
PUSH {RPushList}
Push to the stack from a list of registers
List must contain only low registers or LR
def PUSH(self, params):
"""
PUSH {RPushList}
Push to the stack from a list of registers
List must contain only low registers or LR
"""
# TODO what registers are allowed to PUSH to? Low registers and LR
# TODO PUSH should reverse the list, not POP
RPushList = self.get_one_parameter(r'\s*{(.*)}(.*)', params).split(',')
RPushList = [i.strip() for i in RPushList]
# TODO should we make sure the register exists? probably not
def PUSH_func():
for register in RPushList:
self.register['SP'] -= 4
for i in range(4):
# TODO is this the same as with POP?
self.memory[self.register['SP'] + i] = ((self.register[register] >> (8 * i)) & 0xFF)
return PUSH_func
|
STM Ra!, {RLoList}
Store multiple registers into memory
def STM(self, params):
"""
STM Ra!, {RLoList}
Store multiple registers into memory
"""
# TODO what registers can be stored?
Ra, RLoList = self.get_two_parameters(r'\s*([^\s,]*)!,\s*{(.*)}(.*)', params).split(',')
RLoList = RLoList.split(',')
RLoList = [i.strip() for i in RLoList]
self.check_arguments(low_registers=[Ra] + RLoList)
def STM_func():
for i in range(len(RLoList)):
for j in range(4):
self.memory[self.register[Ra] + 4*i + j] = ((self.register[RLoList[i]] >> (8 * j)) & 0xFF)
self.register[Ra] += 4*len(RLoList)
return STM_func
|
STR Ra, [Rb, Rc]
STR Ra, [Rb, #imm7_4]
STR Ra, [SP, #imm10_4]
Store Ra into memory as a word
Ra, Rb, and Rc must be low registers
def STR(self, params):
"""
STR Ra, [Rb, Rc]
STR Ra, [Rb, #imm7_4]
STR Ra, [SP, #imm10_4]
Store Ra into memory as a word
Ra, Rb, and Rc must be low registers
"""
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_WITH_BRACKETS, params)
if self.is_immediate(Rc):
if Rb == 'SP' or Rb == 'FP':
self.check_arguments(low_registers=(Ra,), imm10_4=(Rc,))
else:
self.check_arguments(low_registers=(Ra, Rb), imm7_4=(Rc,))
def STR_func():
for i in range(4):
self.memory[self.register[Rb] + self.convert_to_integer(Rc[1:]) + i] = ((self.register[Ra] >> (8 * i)) & 0xFF)
else:
self.check_arguments(low_registers=(Ra, Rb, Rc))
def STR_func():
for i in range(4):
self.memory[self.register[Rb] + self.register[Rc] + i] = ((self.register[Ra] >> (8 * i)) & 0xFF)
return STR_func
|
STRB Ra, [Rb, Rc]
STRB Ra, [Rb, #imm5]
Store Ra into memory as a byte
Ra, Rb, and Rc must be low registers
def STRB(self, params):
"""
STRB Ra, [Rb, Rc]
STRB Ra, [Rb, #imm5]
Store Ra into memory as a byte
Ra, Rb, and Rc must be low registers
"""
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_WITH_BRACKETS, params)
if self.is_immediate(Rc):
self.check_arguments(low_registers=(Ra, Rb), imm5=(Rc,))
def STRB_func():
self.memory[self.register[Rb] + self.convert_to_integer(Rc[1:])] = (self.register[Ra] & 0xFF)
else:
self.check_arguments(low_registers=(Ra, Rb, Rc))
def STRB_func():
self.memory[self.register[Rb] + self.register[Rc]] = (self.register[Ra] & 0xFF)
return STRB_func
|
STRH Ra, [Rb, Rc]
STRH Ra, [Rb, #imm6_2]
Store Ra into memory as a half word
Ra, Rb, and Rc must be low registers
def STRH(self, params):
"""
STRH Ra, [Rb, Rc]
STRH Ra, [Rb, #imm6_2]
Store Ra into memory as a half word
Ra, Rb, and Rc must be low registers
"""
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_WITH_BRACKETS, params)
if self.is_immediate(Rc):
self.check_arguments(low_registers=(Ra, Rb), imm5=(Rc,))
def STRH_func():
for i in range(2):
self.memory[self.register[Rb] + self.convert_to_integer(Rc[1:]) + i] = ((self.register[Ra] >> (8 * i)) & 0xFF)
else:
self.check_arguments(low_registers=(Ra, Rb, Rc))
def STRH_func():
for i in range(2):
self.memory[self.register[Rb] + self.register[Rc] + i] = ((self.register[Ra] >> (8 * i)) & 0xFF)
return STRH_func
|
Helper to stop ec2 instances.
By default it waits for instances to stop.
:param awsclient:
:param ec2_instances:
:param wait: waits for instances to stop
:return:
def _stop_ec2_instances(awsclient, ec2_instances, wait=True):
"""Helper to stop ec2 instances.
By default it waits for instances to stop.
:param awsclient:
:param ec2_instances:
:param wait: waits for instances to stop
:return:
"""
if len(ec2_instances) == 0:
return
client_ec2 = awsclient.get_client('ec2')
# get running instances
running_instances = all_pages(
client_ec2.describe_instance_status,
{
'InstanceIds': ec2_instances,
'Filters': [{
'Name': 'instance-state-name',
'Values': ['pending', 'running']
}]
},
lambda r: [i['InstanceId'] for i in r.get('InstanceStatuses', [])],
)
if running_instances:
log.info('Stopping EC2 instances: %s', running_instances)
client_ec2.stop_instances(InstanceIds=running_instances)
if wait:
# wait for instances to stop
waiter_inst_stopped = client_ec2.get_waiter('instance_stopped')
waiter_inst_stopped.wait(InstanceIds=running_instances)
|
Helper to start ec2 instances
:param awsclient:
:param ec2_instances:
:param wait: waits for instances to start
:return:
def _start_ec2_instances(awsclient, ec2_instances, wait=True):
"""Helper to start ec2 instances
:param awsclient:
:param ec2_instances:
:param wait: waits for instances to start
:return:
"""
if len(ec2_instances) == 0:
return
client_ec2 = awsclient.get_client('ec2')
# get stopped instances
stopped_instances = all_pages(
client_ec2.describe_instance_status,
{
'InstanceIds': ec2_instances,
'Filters': [{
'Name': 'instance-state-name',
'Values': ['stopping', 'stopped']
}],
'IncludeAllInstances': True
},
lambda r: [i['InstanceId'] for i in r.get('InstanceStatuses', [])],
)
if stopped_instances:
# start all stopped instances
log.info('Starting EC2 instances: %s', stopped_instances)
client_ec2.start_instances(InstanceIds=stopped_instances)
if wait:
# wait for instances to come up
waiter_inst_running = client_ec2.get_waiter('instance_running')
waiter_inst_running.wait(InstanceIds=stopped_instances)
# wait for status checks
waiter_status_ok = client_ec2.get_waiter('instance_status_ok')
waiter_status_ok.wait(InstanceIds=stopped_instances)
|
helper to select dbinstances.
:param awsclient:
:param db_instances:
:param status_list:
:return: list of db_instances that match the filter
def _filter_db_instances_by_status(awsclient, db_instances, status_list):
"""helper to select dbinstances.
:param awsclient:
:param db_instances:
:param status_list:
:return: list of db_instances that match the filter
"""
client_rds = awsclient.get_client('rds')
db_instances_with_status = []
for db in db_instances:
response = client_rds.describe_db_instances(
DBInstanceIdentifier=db
)
for entry in response.get('DBInstances', []):
if entry['DBInstanceStatus'] in status_list:
db_instances_with_status.append(db)
return db_instances_with_status
|
Helper to change desiredCount of ECS services to zero.
By default it waits for this to complete.
Docs here: http://docs.aws.amazon.com/cli/latest/reference/ecs/update-service.html
:param awsclient:
:param services:
:param template: the cloudformation template
:param parameters: the parameters used for the cloudformation template
:param wait: waits for services to stop
:return:
def _stop_ecs_services(awsclient, services, template, parameters, wait=True):
"""Helper to change desiredCount of ECS services to zero.
By default it waits for this to complete.
Docs here: http://docs.aws.amazon.com/cli/latest/reference/ecs/update-service.html
:param awsclient:
:param services:
:param template: the cloudformation template
:param parameters: the parameters used for the cloudformation template
:param wait: waits for services to stop
:return:
"""
if len(services) == 0:
return
client_ecs = awsclient.get_client('ecs')
for service in services:
log.info('Resize ECS service \'%s\' to desiredCount=0',
service['LogicalResourceId'])
cluster, desired_count = _get_service_cluster_desired_count(
template, parameters, service['LogicalResourceId'])
log.debug('cluster: %s' % cluster)
response = client_ecs.update_service(
cluster=cluster,
service=service['PhysicalResourceId'],
desiredCount=0
)
|
Stop an existing stack on AWS cloud.
:param awsclient:
:param stack_name:
:param use_suspend: use suspend and resume on the autoscaling group
:return: exit_code
def stop_stack(awsclient, stack_name, use_suspend=False):
"""Stop an existing stack on AWS cloud.
:param awsclient:
:param stack_name:
:param use_suspend: use suspend and resume on the autoscaling group
:return: exit_code
"""
exit_code = 0
# check for DisableStop
#disable_stop = conf.get('deployment', {}).get('DisableStop', False)
#if disable_stop:
# log.warn('\'DisableStop\' is set - nothing to do!')
#else:
if not stack_exists(awsclient, stack_name):
log.warn('Stack \'%s\' not deployed - nothing to do!', stack_name)
else:
client_cfn = awsclient.get_client('cloudformation')
client_autoscaling = awsclient.get_client('autoscaling')
client_rds = awsclient.get_client('rds')
client_ec2 = awsclient.get_client('ec2')
resources = all_pages(
client_cfn.list_stack_resources,
{ 'StackName': stack_name },
lambda r: r['StackResourceSummaries']
)
autoscaling_groups = [
r for r in resources
if r['ResourceType'] == 'AWS::AutoScaling::AutoScalingGroup'
]
# lookup all types of scaling processes
# [Launch, Terminate, HealthCheck, ReplaceUnhealthy, AZRebalance
# AlarmNotification, ScheduledActions, AddToLoadBalancer]
response = client_autoscaling.describe_scaling_process_types()
scaling_process_types = [t['ProcessName'] for t in response.get('Processes', [])]
for asg in autoscaling_groups:
# find instances in autoscaling group
ec2_instances = all_pages(
client_autoscaling.describe_auto_scaling_instances,
{},
lambda r: [i['InstanceId'] for i in r.get('AutoScalingInstances', [])
if i['AutoScalingGroupName'] == asg['PhysicalResourceId']],
)
if use_suspend:
# alternative implementation to speed up start
# only problem is that instances must survive stop & start
# suspend all autoscaling processes
log.info('Suspending all autoscaling processes for \'%s\'',
asg['LogicalResourceId'])
response = client_autoscaling.suspend_processes(
AutoScalingGroupName=asg['PhysicalResourceId'],
ScalingProcesses=scaling_process_types
)
_stop_ec2_instances(awsclient, ec2_instances)
else:
# resize autoscaling group (min, max = 0)
log.info('Resize autoscaling group \'%s\' to minSize=0, maxSize=0',
asg['LogicalResourceId'])
response = client_autoscaling.update_auto_scaling_group(
AutoScalingGroupName=asg['PhysicalResourceId'],
MinSize=0,
MaxSize=0
)
if ec2_instances:
running_instances = all_pages(
client_ec2.describe_instance_status,
{
'InstanceIds': ec2_instances,
'Filters': [{
'Name': 'instance-state-name',
'Values': ['pending', 'running']
}]
},
lambda r: [i['InstanceId'] for i in r.get('InstanceStatuses', [])],
)
if running_instances:
# wait for instances to terminate
waiter_inst_terminated = client_ec2.get_waiter('instance_terminated')
waiter_inst_terminated.wait(InstanceIds=running_instances)
# setting ECS desiredCount to zero
services = [
r for r in resources
if r['ResourceType'] == 'AWS::ECS::Service'
]
if services:
template, parameters = _get_template_parameters(awsclient, stack_name)
_stop_ecs_services(awsclient, services, template, parameters)
# stopping ec2 instances
instances = [
r['PhysicalResourceId'] for r in resources
if r['ResourceType'] == 'AWS::EC2::Instance'
]
_stop_ec2_instances(awsclient, instances)
# stopping db instances
db_instances = [
r['PhysicalResourceId'] for r in resources
if r['ResourceType'] == 'AWS::RDS::DBInstance'
]
running_db_instances = _filter_db_instances_by_status(
awsclient, db_instances, ['available']
)
for db in running_db_instances:
log.info('Stopping RDS instance \'%s\'', db)
client_rds.stop_db_instance(DBInstanceIdentifier=db)
return exit_code
|
Helper to extract the configured MinSize, MaxSize attributes from the
template.
:param template: cloudformation template (json)
:param parameters: list of {'ParameterKey': 'x1', 'ParameterValue': 'y1'}
:param asg_name: logical resource name of the autoscaling group
:return: MinSize, MaxSize
def _get_autoscaling_min_max(template, parameters, asg_name):
"""Helper to extract the configured MinSize, MaxSize attributes from the
template.
:param template: cloudformation template (json)
:param parameters: list of {'ParameterKey': 'x1', 'ParameterValue': 'y1'}
:param asg_name: logical resource name of the autoscaling group
:return: MinSize, MaxSize
"""
params = {e['ParameterKey']: e['ParameterValue'] for e in parameters}
asg = template.get('Resources', {}).get(asg_name, None)
if asg:
assert asg['Type'] == 'AWS::AutoScaling::AutoScalingGroup'
min = asg.get('Properties', {}).get('MinSize', None)
max = asg.get('Properties', {}).get('MaxSize', None)
if 'Ref' in min:
min = params.get(min['Ref'], None)
if 'Ref' in max:
max = params.get(max['Ref'], None)
if min and max:
return int(min), int(max)
|
Helper to extract the configured desiredCount attribute from the
template.
:param template: cloudformation template (json)
:param parameters: list of {'ParameterKey': 'x1', 'ParameterValue': 'y1'}
:param service_name: logical resource name of the ECS service
:return: cluster, desiredCount
def _get_service_cluster_desired_count(template, parameters, service_name):
"""Helper to extract the configured desiredCount attribute from the
template.
:param template: cloudformation template (json)
:param parameters: list of {'ParameterKey': 'x1', 'ParameterValue': 'y1'}
:param service_name: logical resource name of the ECS service
:return: cluster, desiredCount
"""
params = {e['ParameterKey']: e['ParameterValue'] for e in parameters}
service = template.get('Resources', {}).get(service_name, None)
if service:
assert service['Type'] == 'AWS::ECS::Service'
cluster = service.get('Properties', {}).get('Cluster', None)
desired_count = service.get('Properties', {}).get('DesiredCount', None)
if 'Ref' in cluster:
cluster = params.get(cluster['Ref'], None)
if not isinstance(desired_count, int) and 'Ref' in desired_count:
desired_count = params.get(desired_count['Ref'], None)
return cluster, int(desired_count)
|
Start an existing stack on AWS cloud.
:param awsclient:
:param stack_name:
:param use_suspend: use suspend and resume on the autoscaling group
:return: exit_code
def start_stack(awsclient, stack_name, use_suspend=False):
"""Start an existing stack on AWS cloud.
:param awsclient:
:param stack_name:
:param use_suspend: use suspend and resume on the autoscaling group
:return: exit_code
"""
exit_code = 0
# check for DisableStop
#disable_stop = conf.get('deployment', {}).get('DisableStop', False)
#if disable_stop:
# log.warn('\'DisableStop\' is set - nothing to do!')
#else:
if not stack_exists(awsclient, stack_name):
log.warn('Stack \'%s\' not deployed - nothing to do!', stack_name)
else:
client_cfn = awsclient.get_client('cloudformation')
client_autoscaling = awsclient.get_client('autoscaling')
client_rds = awsclient.get_client('rds')
resources = all_pages(
client_cfn.list_stack_resources,
{ 'StackName': stack_name },
lambda r: r['StackResourceSummaries']
)
autoscaling_groups = [
r for r in resources
if r['ResourceType'] == 'AWS::AutoScaling::AutoScalingGroup'
]
# lookup all types of scaling processes
# [Launch, Terminate, HealthCheck, ReplaceUnhealthy, AZRebalance
# AlarmNotification, ScheduledActions, AddToLoadBalancer]
response = client_autoscaling.describe_scaling_process_types()
scaling_process_types = [t['ProcessName'] for t in response.get('Processes', [])]
# starting db instances
db_instances = [
r['PhysicalResourceId'] for r in resources
if r['ResourceType'] == 'AWS::RDS::DBInstance'
]
stopped_db_instances = _filter_db_instances_by_status(
awsclient, db_instances, ['stopped']
)
for db in stopped_db_instances:
log.info('Starting RDS instance \'%s\'', db)
client_rds.start_db_instance(DBInstanceIdentifier=db)
# wait for db instances to become available
for db in stopped_db_instances:
waiter_db_available = client_rds.get_waiter('db_instance_available')
waiter_db_available.wait(DBInstanceIdentifier=db)
# starting ec2 instances
instances = [
r['PhysicalResourceId'] for r in resources
if r['ResourceType'] == 'AWS::EC2::Instance'
]
_start_ec2_instances(awsclient, instances)
services = [
r for r in resources
if r['ResourceType'] == 'AWS::ECS::Service'
]
if (autoscaling_groups and not use_suspend) or services:
template, parameters = _get_template_parameters(awsclient, stack_name)
# setting ECS desiredCount back
if services:
_start_ecs_services(awsclient, services, template, parameters)
for asg in autoscaling_groups:
if use_suspend:
# alternative implementation to speed up start
# only problem is that instances must survive stop & start
# find instances in autoscaling group
instances = all_pages(
client_autoscaling.describe_auto_scaling_instances,
{},
lambda r: [i['InstanceId'] for i in r.get('AutoScalingInstances', [])
if i['AutoScalingGroupName'] == asg['PhysicalResourceId']],
)
_start_ec2_instances(awsclient, instances)
# resume all autoscaling processes
log.info('Resuming all autoscaling processes for \'%s\'',
asg['LogicalResourceId'])
response = client_autoscaling.resume_processes(
AutoScalingGroupName=asg['PhysicalResourceId'],
ScalingProcesses=scaling_process_types
)
else:
# resize autoscaling group back to its original values
log.info('Resize autoscaling group \'%s\' back to original values',
asg['LogicalResourceId'])
min, max = _get_autoscaling_min_max(
template, parameters, asg['LogicalResourceId'])
response = client_autoscaling.update_auto_scaling_group(
AutoScalingGroupName=asg['PhysicalResourceId'],
MinSize=min,
MaxSize=max
)
return exit_code
|
Property method that returns a bool specifying if the process is
currently running. This will return true if the state is active, idle
or initializing.
:rtype: bool
def is_running(self):
"""Property method that returns a bool specifying if the process is
currently running. This will return true if the state is active, idle
or initializing.
:rtype: bool
"""
return self._state in [self.STATE_ACTIVE,
self.STATE_IDLE,
self.STATE_INITIALIZING]
|
Invoked whenever a signal is added to the stack.
:param int signum: The signal that was added
def process_signal(self, signum):
"""Invoked whenever a signal is added to the stack.
:param int signum: The signal that was added
"""
if signum == signal.SIGTERM:
LOGGER.info('Received SIGTERM, initiating shutdown')
self.stop()
elif signum == signal.SIGHUP:
LOGGER.info('Received SIGHUP')
if self.config.reload():
LOGGER.info('Configuration reloaded')
logging.config.dictConfig(self.config.logging)
self.on_configuration_reloaded()
elif signum == signal.SIGUSR1:
self.on_sigusr1()
elif signum == signal.SIGUSR2:
self.on_sigusr2()
|
The core method for starting the application. Will setup logging,
toggle the runtime state flag, block on loop, then call shutdown.
Redefine this method if you intend to use an IO Loop or some other
long running process.
def run(self):
"""The core method for starting the application. Will setup logging,
toggle the runtime state flag, block on loop, then call shutdown.
Redefine this method if you intend to use an IO Loop or some other
long running process.
"""
LOGGER.info('%s v%s started', self.APPNAME, self.VERSION)
self.setup()
while not any([self.is_stopping, self.is_stopped]):
self.set_state(self.STATE_SLEEPING)
try:
signum = self.pending_signals.get(True, self.wake_interval)
except queue.Empty:
pass
else:
self.process_signal(signum)
if any([self.is_stopping, self.is_stopped]):
break
self.set_state(self.STATE_ACTIVE)
self.process()
|
Important:
Do not extend this method, rather redefine Controller.run
def start(self):
"""Important:
Do not extend this method, rather redefine Controller.run
"""
for signum in [signal.SIGHUP, signal.SIGTERM,
signal.SIGUSR1, signal.SIGUSR2]:
signal.signal(signum, self._on_signal)
self.run()
|
Set the runtime state of the Controller. Use the internal constants
to ensure proper state values:
- :attr:`Controller.STATE_INITIALIZING`
- :attr:`Controller.STATE_ACTIVE`
- :attr:`Controller.STATE_IDLE`
- :attr:`Controller.STATE_SLEEPING`
- :attr:`Controller.STATE_STOP_REQUESTED`
- :attr:`Controller.STATE_STOPPING`
- :attr:`Controller.STATE_STOPPED`
:param int state: The runtime state
:raises: ValueError
def set_state(self, state):
"""Set the runtime state of the Controller. Use the internal constants
to ensure proper state values:
- :attr:`Controller.STATE_INITIALIZING`
- :attr:`Controller.STATE_ACTIVE`
- :attr:`Controller.STATE_IDLE`
- :attr:`Controller.STATE_SLEEPING`
- :attr:`Controller.STATE_STOP_REQUESTED`
- :attr:`Controller.STATE_STOPPING`
- :attr:`Controller.STATE_STOPPED`
:param int state: The runtime state
:raises: ValueError
"""
if state == self._state:
return
elif state not in self._STATES.keys():
raise ValueError('Invalid state {}'.format(state))
# Check for invalid transitions
if self.is_waiting_to_stop and state not in [self.STATE_STOPPING,
self.STATE_STOPPED]:
LOGGER.warning('Attempt to set invalid state while waiting to '
'shutdown: %s ', self._STATES[state])
return
elif self.is_stopping and state != self.STATE_STOPPED:
LOGGER.warning('Attempt to set invalid post shutdown state: %s',
self._STATES[state])
return
elif self.is_running and state not in [self.STATE_ACTIVE,
self.STATE_IDLE,
self.STATE_SLEEPING,
self.STATE_STOP_REQUESTED,
self.STATE_STOPPING]:
LOGGER.warning('Attempt to set invalid post running state: %s',
self._STATES[state])
return
elif self.is_sleeping and state not in [self.STATE_ACTIVE,
self.STATE_IDLE,
self.STATE_STOP_REQUESTED,
self.STATE_STOPPING]:
LOGGER.warning('Attempt to set invalid post sleeping state: %s',
self._STATES[state])
return
LOGGER.debug('State changed from %s to %s',
self._STATES[self._state], self._STATES[state])
self._state = state
|
Override to implement shutdown steps.
def stop(self):
"""Override to implement shutdown steps."""
LOGGER.info('Attempting to stop the process')
self.set_state(self.STATE_STOP_REQUESTED)
# Call shutdown for classes to add shutdown steps
self.shutdown()
# Wait for the current run to finish
while self.is_running and self.is_waiting_to_stop:
LOGGER.info('Waiting for the process to finish')
time.sleep(self.SLEEP_UNIT)
# Change the state to shutting down
if not self.is_stopping:
self.set_state(self.STATE_STOPPING)
# Call a method that may be overwritten to cleanly shutdown
self.on_shutdown()
# Change our state
self.set_state(self.STATE_STOPPED)
|
parse timestamp.
:param ts: timestamp in ISO8601 format
:return: tbd!!!
def parse_ts(ts):
"""
parse timestamp.
:param ts: timestamp in ISO8601 format
:return: tbd!!!
"""
# ISO8601 = '%Y-%m-%dT%H:%M:%SZ'
# ISO8601_MS = '%Y-%m-%dT%H:%M:%S.%fZ'
# RFC1123 = '%a, %d %b %Y %H:%M:%S %Z'
dt = maya.parse(ts.strip())
return dt.datetime(naive=True)
|
Read environment from ENV and mangle it to a (lower case) representation
Note: gcdt.servicediscovery get_outputs_for_stack((awsclient, stack_name)
is used in many cloudformation.py templates!
:param awsclient:
:param stack_name:
:return: dictionary containing the stack outputs
def get_outputs_for_stack(awsclient, stack_name):
"""
Read environment from ENV and mangle it to a (lower case) representation
Note: gcdt.servicediscovery get_outputs_for_stack((awsclient, stack_name)
is used in many cloudformation.py templates!
:param awsclient:
:param stack_name:
:return: dictionary containing the stack outputs
"""
client_cf = awsclient.get_client('cloudformation')
response = client_cf.describe_stacks(StackName=stack_name)
if response['Stacks'] and 'Outputs' in response['Stacks'][0]:
result = {}
for output in response['Stacks'][0]['Outputs']:
result[output['OutputKey']] = output['OutputValue']
return result
|
DEPRECATED!!!
return the latest version of our base AMI
we can't use tags for this, so we have only the name as resource
note: this functionality is deprecated since this only works for "old"
baseami.
def get_base_ami(awsclient, owners):
"""
DEPRECATED!!!
return the latest version of our base AMI
we can't use tags for this, so we have only the name as resource
note: this functionality is deprecated since this only works for "old"
baseami.
"""
client_ec2 = awsclient.get_client('ec2')
image_filter = [
{
'Name': 'state',
'Values': [
'available',
]
},
]
latest_ts = maya.MayaDT(0).datetime(naive=True)
latest_version = StrictVersion('0.0.0')
latest_id = None
for i in client_ec2.describe_images(
Owners=owners,
Filters=image_filter
)['Images']:
m = re.search(r'(Ops_Base-Image)_(\d+.\d+.\d+)_(\d+)$', i['Name'])
if m:
version = StrictVersion(m.group(2))
#timestamp = m.group(3)
creation_date = parse_ts(i['CreationDate'])
if creation_date > latest_ts and version >=latest_version:
latest_id = i['ImageId']
latest_ts = creation_date
latest_version = version
return latest_id
|
Add the default arguments to the parser.
:param argparse.ArgumentParser parser: The argument parser
def _add_default_arguments(parser):
"""Add the default arguments to the parser.
:param argparse.ArgumentParser parser: The argument parser
"""
parser.add_argument('-c', '--config', action='store', dest='config',
help='Path to the configuration file')
parser.add_argument('-f', '--foreground', action='store_true', dest='foreground',
help='Run the application interactively')
|
Ensures that the val is the default str() type for python2 or 3
def _str(val):
"""
Ensures that the val is the default str() type for python2 or 3
"""
if str == bytes:
if isinstance(val, str):
return val
else:
return str(val)
else:
if isinstance(val, str):
return val
else:
return str(val, 'ascii')
|
Use requests.Request and requests.PreparedRequest to produce the
body (and boundary value) of a multipart/form-data; POST request as
detailed in https://www.mediawiki.org/wiki/API:Edit#Large_texts
def _prepare_long_request(self, url, api_query):
"""
Use requests.Request and requests.PreparedRequest to produce the
body (and boundary value) of a multipart/form-data; POST request as
detailed in https://www.mediawiki.org/wiki/API:Edit#Large_texts
"""
partlist = []
for k, v in iteritems(api_query):
if k in ('title', 'text', 'summary'):
# title, text and summary values in the request
# should be utf-8 encoded
part = (k,
(None, v.encode('utf-8'),
'text/plain; charset=UTF-8',
{'Content-Transfer-Encoding': '8bit'}
)
)
else:
part = (k, (None, v))
partlist.append(part)
auth1 = OAuth1(
self.consumer_token.key,
client_secret=self.consumer_token.secret,
resource_owner_key=session['mwoauth_access_token']['key'],
resource_owner_secret=session['mwoauth_access_token']['secret'])
return Request(
url=url, files=partlist, auth=auth1, method="post").prepare()
|
e.g. {'action': 'query', 'meta': 'userinfo'}. format=json not required
function returns a python dict that resembles the api's json response
def request(self, api_query, url=None):
"""
e.g. {'action': 'query', 'meta': 'userinfo'}. format=json not required
function returns a python dict that resembles the api's json response
"""
api_query['format'] = 'json'
if url is not None:
api_url = url + "/api.php"
else:
api_url = self.api_url
size = sum([sys.getsizeof(v) for k, v in iteritems(api_query)])
if size > (1024 * 8):
# if request is bigger than 8 kB (the limit is somewhat arbitrary,
# see https://www.mediawiki.org/wiki/API:Edit#Large_texts) then
# transmit as multipart message
req = self._prepare_long_request(url=api_url,
api_query=api_query)
req.send()
if self.return_json:
return req.response.json()
else:
return req.response.text
else:
auth1 = OAuth1(
self.consumer_token.key,
client_secret=self.consumer_token.secret,
resource_owner_key=session['mwoauth_access_token']['key'],
resource_owner_secret=session['mwoauth_access_token']['secret'])
if self.return_json:
return requests.post(api_url, data=api_query, auth=auth1).json()
else:
return requests.post(api_url, data=api_query, auth=auth1).text
|
Convert a single Physical Information Object, or a list of such objects, into a JSON-encoded text file.
:param pif: Object or list of objects to serialize.
:param fp: File-like object supporting .write() method to write the serialized object(s) to.
:param kwargs: Any options available to json.dump().
def dump(pif, fp, **kwargs):
"""
Convert a single Physical Information Object, or a list of such objects, into a JSON-encoded text file.
:param pif: Object or list of objects to serialize.
:param fp: File-like object supporting .write() method to write the serialized object(s) to.
:param kwargs: Any options available to json.dump().
"""
return json.dump(pif, fp, cls=PifEncoder, **kwargs)
|
Convert content in a JSON-encoded text file to a Physical Information Object or a list of such objects.
:param fp: File-like object supporting .read() method to deserialize from.
:param class_: Subclass of :class:`.Pio` to produce, if not unambiguous
:param kwargs: Any options available to json.load().
:return: Single object derived from :class:`.Pio` or a list of such object.
def load(fp, class_=None, **kwargs):
"""
Convert content in a JSON-encoded text file to a Physical Information Object or a list of such objects.
:param fp: File-like object supporting .read() method to deserialize from.
:param class_: Subclass of :class:`.Pio` to produce, if not unambiguous
:param kwargs: Any options available to json.load().
:return: Single object derived from :class:`.Pio` or a list of such object.
"""
return loado(json.load(fp, **kwargs), class_=class_)
|
Convert content in a JSON-encoded string to a Physical Information Object or a list of such objects.
:param s: String to deserialize.
:param class_: Subclass of :class:`.Pio` to produce, if not unambiguous
:param kwargs: Any options available to json.loads().
:return: Single object derived from :class:`.Pio` or a list of such object.
def loads(s, class_=None, **kwargs):
"""
Convert content in a JSON-encoded string to a Physical Information Object or a list of such objects.
:param s: String to deserialize.
:param class_: Subclass of :class:`.Pio` to produce, if not unambiguous
:param kwargs: Any options available to json.loads().
:return: Single object derived from :class:`.Pio` or a list of such object.
"""
return loado(json.loads(s, **kwargs), class_=class_)
|
Convert a dictionary or a list of dictionaries into a single Physical Information Object or a list of such objects.
:param obj: Dictionary or list to convert to Physical Information Objects.
:param class_: Subclass of :class:`.Pio` to produce, if not unambiguous
:return: Single object derived from :class:`.Pio` or a list of such object.
def loado(obj, class_=None):
"""
Convert a dictionary or a list of dictionaries into a single Physical Information Object or a list of such objects.
:param obj: Dictionary or list to convert to Physical Information Objects.
:param class_: Subclass of :class:`.Pio` to produce, if not unambiguous
:return: Single object derived from :class:`.Pio` or a list of such object.
"""
if isinstance(obj, list):
return [_dict_to_pio(i, class_=class_) for i in obj]
elif isinstance(obj, dict):
return _dict_to_pio(obj, class_=class_)
else:
raise ValueError('expecting list or dictionary as outermost structure')
|
Convert a single dictionary object to a Physical Information Object.
:param d: Dictionary to convert.
:param class_: Subclass of :class:`.Pio` to produce, if not unambiguous
:return: Single object derived from :class:`.Pio`.
def _dict_to_pio(d, class_=None):
"""
Convert a single dictionary object to a Physical Information Object.
:param d: Dictionary to convert.
:param class_: Subclass of :class:`.Pio` to produce, if not unambiguous
:return: Single object derived from :class:`.Pio`.
"""
d = keys_to_snake_case(d)
if class_:
return class_(**d)
if 'category' not in d:
raise ValueError('Dictionary does not contains a category field: ' + ', '.join(d.keys()))
elif d['category'] == 'system':
return System(**d)
elif d['category'] == 'system.chemical':
return ChemicalSystem(**d)
elif d['category'] == 'system.chemical.alloy': # Legacy support
return Alloy(**d)
elif d['category'] == 'system.chemical.alloy.phase': # Legacy support
return ChemicalSystem(**d)
raise ValueError('Dictionary does not contain a valid top-level category: ' + str(d['category']))
|
Utility function to extract command from docopt arguments.
:param arguments:
:return: command
def get_command(arguments):
"""Utility function to extract command from docopt arguments.
:param arguments:
:return: command
"""
cmds = list(filter(lambda k: not (k.startswith('-') or
k.startswith('<')) and arguments[k],
arguments.keys()))
if len(cmds) != 1:
raise Exception('invalid command line!')
return cmds[0]
|
Dispatch arguments parsed by docopt to the cmd with matching spec.
:param arguments:
:param kwargs:
:return: exit_code
def dispatch(cls, arguments, **kwargs):
"""Dispatch arguments parsed by docopt to the cmd with matching spec.
:param arguments:
:param kwargs:
:return: exit_code
"""
# first match wins
# spec: all '-' elements must match, all others are False;
# '<sth>' elements are converted to call args on order of
# appearance
#
# kwargs are provided to dispatch call and used in func call
for spec, func in cls._specs:
# if command and arguments.get(command) and match(args):
args = [] # specified args in order of appearance
options = list(filter(lambda k: k.startswith('-') and
(arguments[k] or k in spec),
arguments.keys()))
cmds = list(filter(lambda k: not (k.startswith('-') or
k.startswith('<')) and arguments[k],
arguments.keys()))
args_spec = list(filter(lambda k: k.startswith('<'), spec))
cmd_spec = list(filter(lambda k: not (k.startswith('-') or
k.startswith('<')), spec))
for element in spec:
if element.startswith('-'):
# element is an option
if element in options:
args.append(arguments.get(element, False))
options.remove(element)
elif element.startswith('<') and \
not arguments.get(element) is False:
# element is an argument
args.append(arguments.get(element))
if element in args_spec:
args_spec.remove(element)
else:
# element is a command
if element in cmds and element in cmd_spec:
cmds.remove(element)
cmd_spec.remove(element)
if options:
continue # not all options have been matched
if cmds:
continue # not all cmds from command line have been matched
if args_spec:
continue # not all args from spec have been provided
if cmd_spec:
continue # not all cmds from spec have been provided
# all options and cmds matched : call the cmd
# TODO leave out all args to deal with "empty" signature
exit_code = func(*args, **kwargs)
return exit_code
# no matching spec found
raise Exception('No implementation for spec: %s' % arguments)
|
Retrieves an attribute from an existing stack
:param parameter: The output parameter which should be retrieved
:param as_reference: Is the parameter a reference (Default) or a string
:return: Value of parameter to retrieve
def get_att(self, parameter, as_reference=True):
"""Retrieves an attribute from an existing stack
:param parameter: The output parameter which should be retrieved
:param as_reference: Is the parameter a reference (Default) or a string
:return: Value of parameter to retrieve
"""
if as_reference:
return troposphere.GetAtt(
self.__custom_stack_obj,
troposphere.Ref(parameter)
)
else:
return troposphere.GetAtt(
self.__custom_stack_obj,
parameter
)
|
if 'key_filters' in self._config:
filters_spec = {'Key': {'FilterRules': [] } }
# I do not think this is a useful structure:
for filter in self._config['key_filters']:
if 'type' in filter and 'value' in filter and filter['type'] in ('prefix', 'suffix'):
rule = {'Name': filter['type'].capitalize(), 'Value': filter['value'] }
filters_spec['Key']['FilterRules'].append(rule)
notification_spec['Filter'] = filters_spec
def _get_notification_spec(self, lambda_arn):
lambda_name = base.get_lambda_name(lambda_arn)
notification_spec = {
'Id': self._make_notification_id(lambda_name),
'Events': [e for e in self._config['events']],
'LambdaFunctionArn': lambda_arn
}
# Add S3 key filters
filter_rules = []
# look for filter rules
for filter_type in ['prefix', 'suffix']:
if filter_type in self._config:
rule = {'Name': filter_type.capitalize(), 'Value': self._config[filter_type] }
filter_rules.append(rule)
if filter_rules:
notification_spec['Filter'] = {'Key': {'FilterRules': filter_rules } }
'''
if 'key_filters' in self._config:
filters_spec = {'Key': {'FilterRules': [] } }
# I do not think this is a useful structure:
for filter in self._config['key_filters']:
if 'type' in filter and 'value' in filter and filter['type'] in ('prefix', 'suffix'):
rule = {'Name': filter['type'].capitalize(), 'Value': filter['value'] }
filters_spec['Key']['FilterRules'].append(rule)
notification_spec['Filter'] = filters_spec
'''
return notification_spec
|
Return the proper representation for the given integer
def convert_representation(self, i):
"""
Return the proper representation for the given integer
"""
if self.number_representation == 'unsigned':
return i
elif self.number_representation == 'signed':
if i & (1 << self.interpreter._bit_width - 1):
return -((~i + 1) & (2**self.interpreter._bit_width - 1))
else:
return i
elif self.number_representation == 'hex':
return hex(i)
|
Set the generate random flag, unset registers and memory will return a random value.
Usage:
Call the magic by itself or with `true` to have registers and memory return a random value
if they are unset and read from, much like how real hardware would work.
Defaults to False, or to not generate random values
`%generate_random`
or
`%generate_random true`
or
`%generate_random false`
def magic_generate_random(self, line):
"""
Set the generate random flag, unset registers and memory will return a random value.
Usage:
Call the magic by itself or with `true` to have registers and memory return a random value
if they are unset and read from, much like how real hardware would work.
Defaults to False, or to not generate random values
`%generate_random`
or
`%generate_random true`
or
`%generate_random false`
"""
line = line.strip().lower()
if not line or line == 'true':
self.interpreter.generate_random = True
elif line == 'false':
self.interpreter.generate_random = False
else:
stream_content = {'name': 'stderr', 'text': "unknwon value '{}'".format(line)}
self.send_response(self.iopub_socket, 'stream', stream_content)
return {'status': 'error',
'execution_count': self.execution_count,
'ename': ValueError.__name__,
'evalue': "unknwon value '{}'".format(line),
'traceback': '???'}
|
Postpone execution of instructions until explicitly run
Usage:
Call this magic with `true` or nothing to postpone execution,
or call with `false` to execute each instruction when evaluated.
This defaults to True.
Note that each cell is executed only executed after all lines in
the cell have been evaluated properly.
`%postpone_execution`
or
`%postpone_execution true`
or
`%postpone_execution false`
def magic_postpone_execution(self, line):
"""
Postpone execution of instructions until explicitly run
Usage:
Call this magic with `true` or nothing to postpone execution,
or call with `false` to execute each instruction when evaluated.
This defaults to True.
Note that each cell is executed only executed after all lines in
the cell have been evaluated properly.
`%postpone_execution`
or
`%postpone_execution true`
or
`%postpone_execution false`
"""
line = line.strip().lower()
if not line or line == 'true':
self.interpreter.postpone_execution = True
elif line == 'false':
self.interpreter.postpone_execution = False
else:
stream_content = {'name': 'stderr', 'text': "unknwon value '{}'".format(line)}
self.send_response(self.iopub_socket, 'stream', stream_content)
return {'status': 'error',
'execution_count': self.execution_count,
'ename': ValueError.__name__,
'evalue': "unknwon value '{}'".format(line),
'traceback': '???'}
|
Print out the current value of a register
Usage:
Pass in the register, or a list of registers separated by spaces
A list of registeres can be entered by separating them by a hyphen
`%reg R1`
or
`%reg R0 R5 R6`
or
`%reg R8-R12`
def magic_register(self, line):
"""
Print out the current value of a register
Usage:
Pass in the register, or a list of registers separated by spaces
A list of registeres can be entered by separating them by a hyphen
`%reg R1`
or
`%reg R0 R5 R6`
or
`%reg R8-R12`
"""
message = ""
for reg in [i.strip() for i in line.replace(',', '').split()]:
if '-' in reg:
# We have a range (Rn-Rk)
r1, r2 = reg.split('-')
# TODO do we want to allow just numbers?
n1 = re.search(self.interpreter.REGISTER_REGEX, r1).groups()[0]
n2 = re.search(self.interpreter.REGISTER_REGEX, r2).groups()[0]
n1 = self.interpreter.convert_to_integer(n1)
n2 = self.interpreter.convert_to_integer(n2)
for i in range(n1, n2+1):
val = self.interpreter.register[r1[0] + str(i)]
val = self.convert_representation(val)
message += "{}: {}\n".format(r1[0] + str(i), val)
else:
val = self.interpreter.register[reg]
val = self.convert_representation(val)
message += "{}: {}\n".format(reg, val)
stream_content = {'name': 'stdout', 'text': message}
self.send_response(self.iopub_socket, 'stream', stream_content)
|
Print out the current value of memory
Usage:
Pass in the byte of memory to read, separated by spaced
A list of memory contents can be entered by separating them by a hyphen
`%mem 4 5`
or
`%mem 8-12`
def magic_memory(self, line):
"""
Print out the current value of memory
Usage:
Pass in the byte of memory to read, separated by spaced
A list of memory contents can be entered by separating them by a hyphen
`%mem 4 5`
or
`%mem 8-12`
"""
# TODO add support for directives
message = ""
for address in [i.strip() for i in line.replace(',', '').split()]:
if '-' in address:
# We have a range (n-k)
m1, m2 = address.split('-')
n1 = re.search(self.interpreter.IMMEDIATE_NUMBER, m1).groups()[0]
n2 = re.search(self.interpreter.IMMEDIATE_NUMBER, m2).groups()[0]
n1 = self.interpreter.convert_to_integer(n1)
n2 = self.interpreter.convert_to_integer(n2)
for i in range(n1, n2 + 1):
val = self.interpreter.memory[i]
val = self.convert_representation(val)
message += "{}: {}\n".format(str(i), val)
else:
# TODO fix what is the key for memory (currently it's an int, but registers are strings, should it be the same?)
val = self.interpreter.memory[self.interpreter.convert_to_integer(address)]
val = self.convert_representation(val)
message += "{}: {}\n".format(address, val)
stream_content = {'name': 'stdout', 'text': message}
self.send_response(self.iopub_socket, 'stream', stream_content)
|
Run the current program
Usage:
Call with a numbe rto run that many steps,
or call with no arguments to run to the end of the current program
`%run`
or
`%run 1`
def magic_run(self, line):
"""
Run the current program
Usage:
Call with a numbe rto run that many steps,
or call with no arguments to run to the end of the current program
`%run`
or
`%run 1`
"""
i = float('inf')
if line.strip():
i = int(line)
try:
with warnings.catch_warnings(record=True) as w:
self.interpreter.run(i)
for warning_message in w:
# TODO should this be stdout or stderr
stream_content = {'name': 'stdout', 'text': 'Warning: ' + str(warning_message.message) + '\n'}
self.send_response(self.iopub_socket, 'stream', stream_content)
except iarm.exceptions.EndOfProgram as e:
f_name = self.interpreter.program[self.interpreter.register['PC'] - 1].__name__
f_name = f_name[:f_name.find('_')]
message = "Error in {}: ".format(f_name)
stream_content = {'name': 'stdout', 'text': message + str(e) + '\n'}
self.send_response(self.iopub_socket, 'stream', stream_content)
except Exception as e:
for err in e.args:
stream_content = {'name': 'stderr', 'text': str(err)}
self.send_response(self.iopub_socket, 'stream', stream_content)
return {'status': 'error',
'execution_count': self.execution_count,
'ename': type(e).__name__,
'evalue': str(e),
'traceback': '???'}
|
Print out the help for magics
Usage:
Call help with no arguments to list all magics,
or call it with a magic to print out it's help info.
`%help`
or
`%help run
def magic_help(self, line):
"""
Print out the help for magics
Usage:
Call help with no arguments to list all magics,
or call it with a magic to print out it's help info.
`%help`
or
`%help run
"""
line = line.strip()
if not line:
for magic in self.magics:
stream_content = {'name': 'stdout', 'text': "%{}\n".format(magic)}
self.send_response(self.iopub_socket, 'stream', stream_content)
elif line in self.magics:
# its a magic
stream_content = {'name': 'stdout', 'text': "{}\n{}".format(line, self.magics[line].__doc__)}
self.send_response(self.iopub_socket, 'stream', stream_content)
elif line in self.interpreter.ops:
# it's an instruction
stream_content = {'name': 'stdout', 'text': "{}\n{}".format(line, self.interpreter.ops[line].__doc__)}
self.send_response(self.iopub_socket, 'stream', stream_content)
else:
stream_content = {'name': 'stderr', 'text': "'{}' not a known magic or instruction".format(line)}
self.send_response(self.iopub_socket, 'stream', stream_content)
|
Export the API design as swagger file.
:param api_name:
:param stage_name:
:param api_description:
:param lambdas:
:param custom_hostname:
:param custom_base_path:
def export_to_swagger(awsclient, api_name, stage_name, api_description,
lambdas, custom_hostname=False, custom_base_path=False):
"""Export the API design as swagger file.
:param api_name:
:param stage_name:
:param api_description:
:param lambdas:
:param custom_hostname:
:param custom_base_path:
"""
print('Exporting to swagger...')
api = _api_by_name(awsclient, api_name)
if api is not None:
print(json2table(api))
api_id = api['id']
client_api = awsclient.get_client('apigateway')
template_variables = _template_variables_to_dict(
client_api,
api_name,
api_description,
stage_name,
api_id,
lambdas,
custom_hostname,
custom_base_path)
content = _compile_template(SWAGGER_FILE, template_variables)
swagger_file = open('swagger_export.yaml', 'w')
swagger_file.write(content)
else:
print('API name unknown')
|
List APIs in account.
def list_apis(awsclient):
"""List APIs in account."""
client_api = awsclient.get_client('apigateway')
apis = client_api.get_rest_apis()['items']
for api in apis:
print(json2table(api))
|
Deploy API Gateway to AWS cloud.
:param awsclient:
:param api_name:
:param api_description:
:param stage_name:
:param api_key:
:param lambdas:
:param cache_cluster_enabled:
:param cache_cluster_size:
:param method_settings:
def deploy_api(awsclient, api_name, api_description, stage_name, api_key,
lambdas, cache_cluster_enabled, cache_cluster_size, method_settings=None):
"""Deploy API Gateway to AWS cloud.
:param awsclient:
:param api_name:
:param api_description:
:param stage_name:
:param api_key:
:param lambdas:
:param cache_cluster_enabled:
:param cache_cluster_size:
:param method_settings:
"""
if not _api_exists(awsclient, api_name):
if os.path.isfile(SWAGGER_FILE):
# this does an import from swagger file
# the next step does not make sense since there is a check in
# _import_from_swagger for if api is existent!
# _create_api(api_name=api_name, api_description=api_description)
_import_from_swagger(awsclient, api_name, api_description,
stage_name, lambdas)
else:
print('No swagger file (%s) found' % SWAGGER_FILE)
api = _api_by_name(awsclient, api_name)
if api is not None:
_ensure_lambdas_permissions(awsclient, lambdas, api)
_create_deployment(awsclient, api_name, stage_name, cache_cluster_enabled, cache_cluster_size)
_update_stage(awsclient, api['id'], stage_name, method_settings)
_wire_api_key(awsclient, api_name, api_key, stage_name)
else:
print('API name unknown')
else:
if os.path.isfile(SWAGGER_FILE):
_update_from_swagger(awsclient, api_name, api_description,
stage_name, lambdas)
else:
_update_api()
api = _api_by_name(awsclient, api_name)
if api is not None:
_ensure_lambdas_permissions(awsclient, lambdas, api)
_create_deployment(awsclient, api_name, stage_name, cache_cluster_enabled, cache_cluster_size)
_update_stage(awsclient, api['id'], stage_name, method_settings)
else:
print('API name unknown')
|
Delete the API.
:param api_name:
def delete_api(awsclient, api_name):
"""Delete the API.
:param api_name:
"""
_sleep()
client_api = awsclient.get_client('apigateway')
print('deleting api: %s' % api_name)
api = _api_by_name(awsclient, api_name)
if api is not None:
print(json2table(api))
response = client_api.delete_rest_api(
restApiId=api['id']
)
print(json2table(response))
else:
print('API name unknown')
|
Create a new API key as reference for api.conf.
:param api_name:
:param api_key_name:
:return: api_key
def create_api_key(awsclient, api_name, api_key_name):
"""Create a new API key as reference for api.conf.
:param api_name:
:param api_key_name:
:return: api_key
"""
_sleep()
client_api = awsclient.get_client('apigateway')
print('create api key: %s' % api_key_name)
response = client_api.create_api_key(
name=api_key_name,
description='Created for ' + api_name,
enabled=True
)
#print(json2table(response))
print('Add this api key \'%s\' to your api.conf' % response['id'])
return response['id']
|
Remove API key.
:param api_key:
def delete_api_key(awsclient, api_key):
"""Remove API key.
:param api_key:
"""
_sleep()
client_api = awsclient.get_client('apigateway')
print('delete api key: %s' % api_key)
response = client_api.delete_api_key(
apiKey=api_key
)
print(json2table(response))
|
Print the defined API keys.
def list_api_keys(awsclient):
"""Print the defined API keys.
"""
_sleep()
client_api = awsclient.get_client('apigateway')
print('listing api keys')
response = client_api.get_api_keys()['items']
for item in response:
print(json2table(item))
|
Add custom domain to your API.
:param api_name:
:param api_target_stage:
:param api_base_path:
:param domain_name:
:param route_53_record:
:param ssl_cert:
:param cert_name:
:param cert_arn:
:param hosted_zone_id:
:return: exit_code
def deploy_custom_domain(awsclient, api_name, api_target_stage,
api_base_path, domain_name, route_53_record,
cert_name, cert_arn, hosted_zone_id, ensure_cname):
"""Add custom domain to your API.
:param api_name:
:param api_target_stage:
:param api_base_path:
:param domain_name:
:param route_53_record:
:param ssl_cert:
:param cert_name:
:param cert_arn:
:param hosted_zone_id:
:return: exit_code
"""
api_base_path = _basepath_to_string_if_null(api_base_path)
api = _api_by_name(awsclient, api_name)
if not api:
print("Api %s does not exist, aborting..." % api_name)
# exit(1)
return 1
domain = _custom_domain_name_exists(awsclient, domain_name)
if not domain:
response = _create_custom_domain(awsclient, domain_name, cert_name, cert_arn)
cloudfront_distribution = response['distributionDomainName']
else:
response = _update_custom_domain(awsclient, domain_name, cert_name, cert_arn)
cloudfront_distribution = response['distributionDomainName']
if _base_path_mapping_exists(awsclient, domain_name, api_base_path):
_ensure_correct_base_path_mapping(awsclient, domain_name,
api_base_path, api['id'],
api_target_stage)
else:
_create_base_path_mapping(awsclient, domain_name, api_base_path,
api_target_stage, api['id'])
if ensure_cname:
record_exists, record_correct = \
_record_exists_and_correct(awsclient, hosted_zone_id,
route_53_record,
cloudfront_distribution)
if record_correct:
print('Route53 record correctly set: %s --> %s' % (route_53_record,
cloudfront_distribution))
else:
_ensure_correct_route_53_record(awsclient, hosted_zone_id,
record_name=route_53_record,
record_value=cloudfront_distribution)
print('Route53 record set: %s --> %s' % (route_53_record,
cloudfront_distribution))
else:
print('Skipping creating and checking DNS record')
return 0
|
Get the list of lambda functions.
:param config:
:param add_arn:
:return: list containing lambda entries
def get_lambdas(awsclient, config, add_arn=False):
"""Get the list of lambda functions.
:param config:
:param add_arn:
:return: list containing lambda entries
"""
if 'lambda' in config:
client_lambda = awsclient.get_client('lambda')
lambda_entries = config['lambda'].get('entries', [])
lmbdas = []
for lambda_entry in lambda_entries:
lmbda = {
'name': lambda_entry.get('name', None),
'alias': lambda_entry.get('alias', None),
'swagger_ref': lambda_entry.get('swaggerRef', None)
}
if add_arn:
_sleep()
response_lambda = client_lambda.get_function(
FunctionName=lmbda['name'])
lmbda['arn'] = response_lambda['Configuration']['FunctionArn']
lmbdas.append(lmbda)
return lmbdas
else:
return []
|
Helper to apply method_settings to stage
:param awsclient:
:param api_id:
:param stage_name:
:param method_settings:
:return:
def _update_stage(awsclient, api_id, stage_name, method_settings):
"""Helper to apply method_settings to stage
:param awsclient:
:param api_id:
:param stage_name:
:param method_settings:
:return:
"""
# settings docs in response: https://botocore.readthedocs.io/en/latest/reference/services/apigateway.html#APIGateway.Client.update_stage
client_api = awsclient.get_client('apigateway')
operations = _convert_method_settings_into_operations(method_settings)
if operations:
print('update method settings for stage')
_sleep()
response = client_api.update_stage(
restApiId=api_id,
stageName=stage_name,
patchOperations=operations)
|
Helper to handle the conversion of method_settings to operations
:param method_settings:
:return: list of operations
def _convert_method_settings_into_operations(method_settings=None):
"""Helper to handle the conversion of method_settings to operations
:param method_settings:
:return: list of operations
"""
# operations docs here: https://tools.ietf.org/html/rfc6902#section-4
operations = []
if method_settings:
for method in method_settings.keys():
for key, value in method_settings[method].items():
if isinstance(value, bool):
if value:
value = 'true'
else:
value = 'false'
operations.append({
'op': 'replace',
'path': method + _resolve_key(key),
'value': value
})
return operations
|
This command is run when ``default_path`` doesn't exist, or ``init`` is
run and returns a string representing the default data to put into their
settings file.
def generate_settings():
"""
This command is run when ``default_path`` doesn't exist, or ``init`` is
run and returns a string representing the default data to put into their
settings file.
"""
conf_file = os.path.join(os.path.dirname(base_settings.__file__),
'example', 'conf.py')
conf_template = open(conf_file).read()
default_url = 'http://salmon.example.com'
site_url = raw_input("What will be the URL for Salmon? [{0}]".format(
default_url))
site_url = site_url or default_url
secret_key = base64.b64encode(os.urandom(KEY_LENGTH))
api_key = base64.b64encode(os.urandom(KEY_LENGTH))
output = conf_template.format(api_key=api_key, secret_key=secret_key,
site_url=site_url)
return output
|
Builds up the settings using the same method as logan
def configure_app(**kwargs):
"""Builds up the settings using the same method as logan"""
sys_args = sys.argv
args, command, command_args = parse_args(sys_args[1:])
parser = OptionParser()
parser.add_option('--config', metavar='CONFIG')
(options, logan_args) = parser.parse_args(args)
config_path = options.config
logan_configure(config_path=config_path, **kwargs)
|
Stores current values for comparison later
def _reset_changes(self):
"""Stores current values for comparison later"""
self._original = {}
if self.last_updated is not None:
self._original['last_updated'] = self.last_updated
|
Build a file path to the Whisper database
def whisper_filename(self):
"""Build a file path to the Whisper database"""
source_name = self.source_id and self.source.name or ''
return get_valid_filename("{0}__{1}.wsp".format(source_name,
self.name))
|
Human friendly value output
def get_value_display(self):
"""Human friendly value output"""
if self.display_as == 'percentage':
return '{0}%'.format(self.latest_value)
if self.display_as == 'boolean':
return bool(self.latest_value)
if self.display_as == 'byte':
return defaultfilters.filesizeformat(self.latest_value)
if self.display_as == 'second':
return time.strftime('%H:%M:%S', time.gmtime(self.latest_value))
return self.latest_value
|
Time between current `last_updated` and previous `last_updated`
def time_between_updates(self):
"""Time between current `last_updated` and previous `last_updated`"""
if 'last_updated' not in self._original:
return 0
last_update = self._original['last_updated']
this_update = self.last_updated
return this_update - last_update
|
Apply the transformation (if it exists) to the latest_value
def do_transform(self):
"""Apply the transformation (if it exists) to the latest_value"""
if not self.transform:
return
try:
self.latest_value = utils.Transform(
expr=self.transform, value=self.latest_value,
timedelta=self.time_between_updates().total_seconds()).result()
except (TypeError, ValueError):
logger.warn("Invalid transformation '%s' for metric %s",
self.transfrom, self.pk)
self.transform = ''
|
Update latest value to the diff between it and the previous value
def do_counter_conversion(self):
"""Update latest value to the diff between it and the previous value"""
if self.is_counter:
if self._previous_counter_value is None:
prev_value = self.latest_value
else:
prev_value = self._previous_counter_value
self._previous_counter_value = self.latest_value
self.latest_value = self.latest_value - prev_value
|
Relay source group signals to the appropriate spec strategy.
def source_group_receiver(self, sender, source, signal, **kwargs):
"""
Relay source group signals to the appropriate spec strategy.
"""
from imagekit.cachefiles import ImageCacheFile
source_group = sender
# Ignore signals from unregistered groups.
if source_group not in self._source_groups:
return
#OVERRIDE HERE -- pass specs into generator object
specs = [generator_registry.get(id, source=source, specs=spec_data_field_hash[id]) for id in
self._source_groups[source_group]]
callback_name = self._signals[signal]
#END OVERRIDE
for spec in specs:
file = ImageCacheFile(spec)
call_strategy_method(file, callback_name)
|
Substitute variables with numeric values
def replace_variable(self, variable):
"""Substitute variables with numeric values"""
if variable == 'x':
return self.value
if variable == 't':
return self.timedelta
raise ValueError("Invalid variable %s", variable)
|
Evaluate expression and return result
def result(self):
"""Evaluate expression and return result"""
# Module(body=[Expr(value=...)])
return self.eval_(ast.parse(self.expr).body[0].value)
|
获取用户信息
:param user_alias: 用户ID
:return:
def get_people(self, user_alias=None):
"""
获取用户信息
:param user_alias: 用户ID
:return:
"""
user_alias = user_alias or self.api.user_alias
content = self.api.req(API_PEOPLE_HOME % user_alias).content
xml = self.api.to_xml(re.sub(b'<br />', b'\n', content))
try:
xml_user = xml.xpath('//*[@id="profile"]')
if not xml_user:
return None
else:
xml_user = xml_user[0]
avatar = first(xml_user.xpath('.//img/@src'))
city = first(xml_user.xpath('.//div[@class="user-info"]/a/text()'))
city_url = first(xml_user.xpath('.//div[@class="user-info"]/a/@href'))
text_created_at = xml_user.xpath('.//div[@class="pl"]/text()')[1]
created_at = re.match(r'.+(?=加入)', text_created_at.strip()).group()
xml_intro = first(xml.xpath('//*[@id="intro_display"]'))
intro = xml_intro.xpath('string(.)') if xml_intro is not None else None
nickname = first(xml.xpath('//*[@id="db-usr-profile"]//h1/text()'), '').strip() or None
signature = first(xml.xpath('//*[@id="display"]/text()'))
xml_contact_count = xml.xpath('//*[@id="friend"]/h2')[0]
contact_count = int(re.search(r'成员(\d+)', xml_contact_count.xpath('string(.)')).groups()[0])
text_rev_contact_count = xml.xpath('//p[@class="rev-link"]/a/text()')[0]
rev_contact_count = int(re.search(r'(\d+)人关注', text_rev_contact_count.strip()).groups()[0])
return {
'alias': user_alias,
'url': API_PEOPLE_HOME % user_alias,
'avatar': avatar,
'city': city,
'city_url': city_url,
'created_at': created_at,
'intro': intro,
'nickname': nickname,
'signature': signature,
'contact_count': contact_count,
'rev_contact_count': rev_contact_count,
}
except Exception as e:
self.api.logger.exception('parse people meta error: %s' % e)
|
Given a request, an email and optionally some additional data, ensure that
a user with the email address exists, and authenticate & login them right
away if the user is active.
Returns a tuple consisting of ``(user, created)`` upon success or ``(None,
None)`` when authentication fails.
def email_login(request, *, email, **kwargs):
"""
Given a request, an email and optionally some additional data, ensure that
a user with the email address exists, and authenticate & login them right
away if the user is active.
Returns a tuple consisting of ``(user, created)`` upon success or ``(None,
None)`` when authentication fails.
"""
_u, created = auth.get_user_model()._default_manager.get_or_create(email=email)
user = auth.authenticate(request, email=email)
if user and user.is_active: # The is_active check is possibly redundant.
auth.login(request, user)
return user, created
return None, None
|
Shows the latest results for each source
def dashboard(request):
"""Shows the latest results for each source"""
sources = (models.Source.objects.all().prefetch_related('metric_set')
.order_by('name'))
metrics = SortedDict([(src, src.metric_set.all()) for src in sources])
no_source_metrics = models.Metric.objects.filter(source__isnull=True)
if no_source_metrics:
metrics[''] = no_source_metrics
if request.META.get('HTTP_X_PJAX', False):
parent_template = 'pjax.html'
else:
parent_template = 'base.html'
return render(request, 'metrics/dashboard.html', {
'source_metrics': metrics,
'parent_template': parent_template
})
|
Create the Whisper file on disk
def _create(self):
"""Create the Whisper file on disk"""
if not os.path.exists(settings.SALMON_WHISPER_DB_PATH):
os.makedirs(settings.SALMON_WHISPER_DB_PATH)
archives = [whisper.parseRetentionDef(retentionDef)
for retentionDef in settings.ARCHIVES.split(",")]
whisper.create(self.path, archives,
xFilesFactor=settings.XFILEFACTOR,
aggregationMethod=settings.AGGREGATION_METHOD)
|
This method store in the datapoints in the current database.
:datapoints: is a list of tupple with the epoch timestamp and value
[(1368977629,10)]
def _update(self, datapoints):
"""
This method store in the datapoints in the current database.
:datapoints: is a list of tupple with the epoch timestamp and value
[(1368977629,10)]
"""
if len(datapoints) == 1:
timestamp, value = datapoints[0]
whisper.update(self.path, value, timestamp)
else:
whisper.update_many(self.path, datapoints)
|
This method fetch data from the database according to the period
given
fetch(path, fromTime, untilTime=None)
fromTime is an datetime
untilTime is also an datetime, but defaults to now.
Returns a tuple of (timeInfo, valueList)
where timeInfo is itself a tuple of (fromTime, untilTime, step)
Returns None if no data can be returned
def fetch(self, from_time, until_time=None):
"""
This method fetch data from the database according to the period
given
fetch(path, fromTime, untilTime=None)
fromTime is an datetime
untilTime is also an datetime, but defaults to now.
Returns a tuple of (timeInfo, valueList)
where timeInfo is itself a tuple of (fromTime, untilTime, step)
Returns None if no data can be returned
"""
until_time = until_time or datetime.now()
time_info, values = whisper.fetch(self.path,
from_time.strftime('%s'),
until_time.strftime('%s'))
# build up a list of (timestamp, value)
start_time, end_time, step = time_info
current = start_time
times = []
while current <= end_time:
times.append(current)
current += step
return zip(times, values)
|
构建带翻页的列表
:param results: 已获取的数据列表
:param xml: 原始页面xml
:return: {'results': list, 'count': int, 'next_start': int|None}
如果count与results长度不同,则有更多
如果next_start不为None,则可以到下一页
def build_list_result(results, xml):
"""
构建带翻页的列表
:param results: 已获取的数据列表
:param xml: 原始页面xml
:return: {'results': list, 'count': int, 'next_start': int|None}
如果count与results长度不同,则有更多
如果next_start不为None,则可以到下一页
"""
xml_count = xml.xpath('//div[@class="paginator"]/span[@class="count"]/text()')
xml_next = xml.xpath('//div[@class="paginator"]/span[@class="next"]/a/@href')
count = int(re.search(r'\d+', xml_count[0]).group()) if xml_count else len(results)
next_start = int(re.search(r'start=(\d+)', xml_next[0]).groups()[0]) if xml_next else None
return {'results': results, 'count': count, 'next_start': next_start}
|
ADCS [Ra,] Rb, Rc
Add Rb and Rc + the carry bit and store the result in Ra
Ra, Rb, and Rc must be low registers
if Ra is omitted, then it is assumed to be Rb
def ADCS(self, params):
"""
ADCS [Ra,] Rb, Rc
Add Rb and Rc + the carry bit and store the result in Ra
Ra, Rb, and Rc must be low registers
if Ra is omitted, then it is assumed to be Rb
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
self.check_arguments(low_registers=(Ra, Rc))
self.match_first_two_parameters(Ra, Rb)
# ADCS Ra, Ra, Rb
def ADCS_func():
# TODO need to rethink the set_NZCV with the C flag
oper_1 = self.register[Ra]
oper_2 = self.register[Rc]
self.register[Ra] = oper_1 + oper_2
self.register[Ra] += 1 if self.is_C_set() else 0
self.set_NZCV_flags(oper_1, oper_2, self.register[Ra], 'add')
return ADCS_func
|
ADD [Rx,] Ry, [Rz, PC]
ADD [Rx,] [SP, PC], #imm10_4
ADD [SP,] SP, #imm9_4
Add Ry and Rz and store the result in Rx
Rx, Ry, and Rz can be any register
If Rx is omitted, then it is assumed to be Ry
def ADD(self, params):
"""
ADD [Rx,] Ry, [Rz, PC]
ADD [Rx,] [SP, PC], #imm10_4
ADD [SP,] SP, #imm9_4
Add Ry and Rz and store the result in Rx
Rx, Ry, and Rz can be any register
If Rx is omitted, then it is assumed to be Ry
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
# TODO can we have ADD SP, #imm9_4?
try:
Rx, Ry, Rz = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Ry, Rz = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Rx = Ry
if self.is_register(Rz):
# ADD Rx, Ry, Rz
self.check_arguments(any_registers=(Rx, Ry, Rz))
if Rx != Ry:
raise iarm.exceptions.RuleError("Second parameter {} does not equal first parameter {}". format(Ry, Rx))
def ADD_func():
self.register[Rx] = self.register[Ry] + self.register[Rz]
else:
if Rx == 'SP':
# ADD SP, SP, #imm9_4
self.check_arguments(imm9_4=(Rz,))
if Rx != Ry:
raise iarm.exceptions.RuleError("Second parameter {} is not SP".format(Ry))
else:
# ADD Rx, [SP, PC], #imm10_4
self.check_arguments(any_registers=(Rx,), imm10_4=(Rz,))
if Ry not in ('SP', 'PC'):
raise iarm.exceptions.RuleError("Second parameter {} is not SP or PC".format(Ry))
def ADD_func():
self.register[Rx] = self.register[Ry] + self.convert_to_integer(Rz[1:])
return ADD_func
|
CMN Ra, Rb
Add the two registers and set the NZCV flags
The result is discarded
Ra and Rb must be low registers
def CMN(self, params):
"""
CMN Ra, Rb
Add the two registers and set the NZCV flags
The result is discarded
Ra and Rb must be low registers
"""
Ra, Rb = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(low_registers=(Ra, Rb))
# CMN Ra, Rb
def CMN_func():
self.set_NZCV_flags(self.register[Ra], self.register[Rb],
self.register[Ra] + self.register[Rb], 'add')
return CMN_func
|
CMP Rm, Rn
CMP Rm, #imm8
Subtract Rn or imm8 from Rm, set the NZCV flags, and discard the result
Rm and Rn can be R0-R14
def CMP(self, params):
"""
CMP Rm, Rn
CMP Rm, #imm8
Subtract Rn or imm8 from Rm, set the NZCV flags, and discard the result
Rm and Rn can be R0-R14
"""
Rm, Rn = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
if self.is_register(Rn):
# CMP Rm, Rn
self.check_arguments(R0_thru_R14=(Rm, Rn))
def CMP_func():
self.set_NZCV_flags(self.register[Rm], self.register[Rn],
self.register[Rm] - self.register[Rn], 'sub')
else:
# CMP Rm, #imm8
self.check_arguments(R0_thru_R14=(Rm,), imm8=(Rn,))
def CMP_func():
tmp = self.convert_to_integer(Rn[1:])
self.set_NZCV_flags(self.register[Rm], tmp,
self.register[Rm] - tmp, 'sub')
return CMP_func
|
MULS Ra, Rb, Ra
Multiply Rb and Ra together and store the result in Ra.
Set the NZ flags.
Ra and Rb must be low registers
The first and last operand must be the same register
def MULS(self, params):
"""
MULS Ra, Rb, Ra
Multiply Rb and Ra together and store the result in Ra.
Set the NZ flags.
Ra and Rb must be low registers
The first and last operand must be the same register
"""
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(low_registers=(Ra, Rb, Rc))
if Ra != Rc:
raise iarm.exceptions.RuleError("Third parameter {} is not the same as the first parameter {}".format(Rc, Ra))
# MULS Ra, Rb, Ra
def MULS_func():
self.register[Ra] = self.register[Rb] * self.register[Rc]
self.set_NZ_flags(self.register[Ra])
return MULS_func
|
RSBS [Ra,] Rb, #0
Subtract Rb from zero (0 - Rb) and store the result in Ra
Set the NZCV flags
Ra and Rb must be low registers
if Ra is omitted, then it is assumed to be Rb
def RSBS(self, params):
"""
RSBS [Ra,] Rb, #0
Subtract Rb from zero (0 - Rb) and store the result in Ra
Set the NZCV flags
Ra and Rb must be low registers
if Ra is omitted, then it is assumed to be Rb
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
self.check_arguments(low_registers=(Ra, Rb))
if Rc != '#0':
raise iarm.exceptions.RuleError("Third parameter {} is not #0".format(Rc))
# RSBS Ra, Rb, #0
def RSBS_func():
oper_2 = self.register[Rb]
self.register[Ra] = 0 - self.register[Rb]
self.set_NZCV_flags(0, oper_2, self.register[Ra], 'sub')
return RSBS_func
|
SUB [SP,] SP, #imm9_4
Subtract an immediate from the Stack Pointer
The first SP is optional
def SUB(self, params):
"""
SUB [SP,] SP, #imm9_4
Subtract an immediate from the Stack Pointer
The first SP is optional
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
self.check_arguments(imm9_4=(Rc,))
if Ra != 'SP':
raise iarm.exceptions.RuleError("First parameter {} is not equal to SP".format(Ra))
if Rb != 'SP':
raise iarm.exceptions.RuleError("Second parameter {} is not equal to SP".format(Rb))
# SUB SP, SP, #imm9_4
def SUB_func():
self.register[Ra] = self.register[Rb] - self.convert_to_integer(Rc[1:])
return SUB_func
|
SUBS [Ra,] Rb, Rc
SUBS [Ra,] Rb, #imm3
SUBS [Ra,] Ra, #imm8
Subtract Rc or an immediate from Rb and store the result in Ra
Ra, Rb, and Rc must be low registers
If Ra is omitted, then it is assumed to be Rb
def SUBS(self, params):
"""
SUBS [Ra,] Rb, Rc
SUBS [Ra,] Rb, #imm3
SUBS [Ra,] Ra, #imm8
Subtract Rc or an immediate from Rb and store the result in Ra
Ra, Rb, and Rc must be low registers
If Ra is omitted, then it is assumed to be Rb
"""
# This instruction allows for an optional destination register
# If it is omitted, then it is assumed to be Rb
# As defined in http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0662b/index.html
try:
Ra, Rb, Rc = self.get_three_parameters(self.THREE_PARAMETER_COMMA_SEPARATED, params)
except iarm.exceptions.ParsingError:
Rb, Rc = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
Ra = Rb
if self.is_register(Rc):
# SUBS Ra, Rb, Rc
self.check_arguments(low_registers=(Ra, Rb, Rc))
def SUBS_func():
oper_1 = self.register[Rb]
oper_2 = self.register[Rc]
self.register[Ra] = self.register[Rb] - self.register[Rc]
self.set_NZCV_flags(oper_1, oper_2, self.register[Ra], 'sub')
else:
if Ra == Rb:
# SUBS Ra, Ra, #imm8
self.check_arguments(low_registers=(Ra,), imm8=(Rc,))
def SUBS_func():
oper_1 = self.register[Ra]
self.register[Ra] = self.register[Ra] - self.convert_to_integer(Rc[1:])
self.set_NZCV_flags(oper_1, self.convert_to_integer(Rc[1:]), self.register[Ra], 'sub')
else:
# SUBS Ra, Rb, #imm3
self.check_arguments(low_registers=(Ra, Rb), imm3=(Rc,))
def SUBS_func():
oper_1 = self.register[Rb]
self.register[Ra] = self.register[Rb] - self.convert_to_integer(Rc[1:])
self.set_NZCV_flags(oper_1, self.convert_to_integer(Rc[1:]), self.register[Ra], 'sub')
return SUBS_func
|
Adds SERVICE_NAME, SERVICE_ENVIRONMENT, and DEFAULT_TAGS to the template
:param template:
:param service_name:
:param environment:
:return:
def initialize(template, service_name, environment='dev'):
"""Adds SERVICE_NAME, SERVICE_ENVIRONMENT, and DEFAULT_TAGS to the template
:param template:
:param service_name:
:param environment:
:return:
"""
template.SERVICE_NAME = os.getenv('SERVICE_NAME', service_name)
template.SERVICE_ENVIRONMENT = os.getenv('ENV', environment).lower()
template.DEFAULT_TAGS = troposphere.Tags(**{
'service-name': template.SERVICE_NAME,
'environment': template.SERVICE_ENVIRONMENT
})
template.add_version("2010-09-09")
template.add_description("Stack for %s microservice" % service_name)
|
Get dist for installed version of dist_name avoiding pkg_resources cache
def get_dist(dist_name, lookup_dirs=None):
"""Get dist for installed version of dist_name avoiding pkg_resources cache
"""
# note: based on pip/utils/__init__.py, get_installed_version(...)
# Create a requirement that we'll look for inside of setuptools.
req = pkg_resources.Requirement.parse(dist_name)
# We want to avoid having this cached, so we need to construct a new
# working set each time.
if lookup_dirs is None:
working_set = pkg_resources.WorkingSet()
else:
working_set = pkg_resources.WorkingSet(lookup_dirs)
# Get the installed distribution from our working set
return working_set.find(req)
|
Get the package version information (=SetuptoolsVersion) which is
comparable.
note: we use the pip list_command implementation for this
:param package: name of the package
:return: installed version, latest available version
def get_package_versions(package):
"""Get the package version information (=SetuptoolsVersion) which is
comparable.
note: we use the pip list_command implementation for this
:param package: name of the package
:return: installed version, latest available version
"""
list_command = ListCommand()
options, args = list_command.parse_args([])
packages = [get_dist(package)]
dists = list_command.iter_packages_latest_infos(packages, options)
try:
dist = next(dists)
return dist.parsed_version, dist.latest_version
except StopIteration:
return None, None
|
Load hook module and register signals.
:param path: Absolute or relative path to module.
:return: module
def _load_hooks(path):
"""Load hook module and register signals.
:param path: Absolute or relative path to module.
:return: module
"""
module = imp.load_source(os.path.splitext(os.path.basename(path))[0], path)
if not check_hook_mechanism_is_intact(module):
# no hooks - do nothing
log.debug('No valid hook configuration: \'%s\'. Not using hooks!', path)
else:
if check_register_present(module):
# register the template hooks so they listen to gcdt_signals
module.register()
return module
|
Tool lifecycle which provides hooks into the different stages of the
command execution. See signals for hook details.
def lifecycle(awsclient, env, tool, command, arguments):
"""Tool lifecycle which provides hooks into the different stages of the
command execution. See signals for hook details.
"""
log.debug('### init')
load_plugins()
context = get_context(awsclient, env, tool, command, arguments)
# every tool needs a awsclient so we provide this via the context
context['_awsclient'] = awsclient
log.debug('### context:')
log.debug(context)
if 'error' in context:
# no need to send an 'error' signal here
return 1
## initialized
gcdt_signals.initialized.send(context)
log.debug('### initialized')
if 'error' in context:
log.error(context['error'])
return 1
check_gcdt_update()
# config is "assembled" by config_reader NOT here!
config = {}
gcdt_signals.config_read_init.send((context, config))
log.debug('### config_read_init')
gcdt_signals.config_read_finalized.send((context, config))
log.debug('### config_read_finalized')
# TODO we might want to be able to override config via env variables?
# here would be the right place to do this
if 'hookfile' in config:
# load hooks from hookfile
_load_hooks(config['hookfile'])
if 'kumo' in config:
# deprecated: this needs to be removed once all old-style "cloudformation" entries are gone
fix_old_kumo_config(config)
# check_credentials
gcdt_signals.check_credentials_init.send((context, config))
log.debug('### check_credentials_init')
gcdt_signals.check_credentials_finalized.send((context, config))
log.debug('### check_credentials_finalized')
if 'error' in context:
log.error(context['error'])
gcdt_signals.error.send((context, config))
return 1
## lookup
gcdt_signals.lookup_init.send((context, config))
log.debug('### lookup_init')
gcdt_signals.lookup_finalized.send((context, config))
log.debug('### lookup_finalized')
log.debug('### config after lookup:')
log.debug(config)
## config validation
gcdt_signals.config_validation_init.send((context, config))
log.debug('### config_validation_init')
gcdt_signals.config_validation_finalized.send((context, config))
if context['command'] in \
DEFAULT_CONFIG.get(context['tool'], {}).get('non_config_commands', []):
pass # we do not require a config for this command
elif tool not in config and tool != 'gcdt':
context['error'] = 'Configuration missing for \'%s\'.' % tool
log.error(context['error'])
gcdt_signals.error.send((context, config))
return 1
log.debug('### config_validation_finalized')
## check credentials are valid (AWS services)
# DEPRECATED, use gcdt-logon plugin instead
if are_credentials_still_valid(awsclient):
context['error'] = \
'Your credentials have expired... Please renew and try again!'
log.error(context['error'])
gcdt_signals.error.send((context, config))
return 1
## bundle step
gcdt_signals.bundle_pre.send((context, config))
log.debug('### bundle_pre')
gcdt_signals.bundle_init.send((context, config))
log.debug('### bundle_init')
gcdt_signals.bundle_finalized.send((context, config))
log.debug('### bundle_finalized')
if 'error' in context:
log.error(context['error'])
gcdt_signals.error.send((context, config))
return 1
## dispatch command providing context and config (= tooldata)
gcdt_signals.command_init.send((context, config))
log.debug('### command_init')
try:
if tool == 'gcdt':
conf = config # gcdt works on the whole config
else:
conf = config.get(tool, {})
exit_code = cmd.dispatch(arguments,
context=context,
config=conf)
except GracefulExit:
raise
except Exception as e:
log.debug(traceback.format_exc())
context['error'] = str(e)
log.error(context['error'])
exit_code = 1
if exit_code:
if 'error' not in context or context['error'] == '':
context['error'] = '\'%s\' command failed with exit code 1' % command
gcdt_signals.error.send((context, config))
return 1
gcdt_signals.command_finalized.send((context, config))
log.debug('### command_finalized')
# TODO reporting (in case you want to get a summary / output to the user)
gcdt_signals.finalized.send(context)
log.debug('### finalized')
return 0
|
gcdt tools parametrized main function to initiate gcdt lifecycle.
:param doc: docopt string
:param tool: gcdt tool (gcdt, kumo, tenkai, ramuda, yugen)
:param dispatch_only: list of commands which do not use gcdt lifecycle
:return: exit_code
def main(doc, tool, dispatch_only=None):
"""gcdt tools parametrized main function to initiate gcdt lifecycle.
:param doc: docopt string
:param tool: gcdt tool (gcdt, kumo, tenkai, ramuda, yugen)
:param dispatch_only: list of commands which do not use gcdt lifecycle
:return: exit_code
"""
# Use signal handler to throw exception which can be caught to allow
# graceful exit.
# here: https://stackoverflow.com/questions/26414704/how-does-a-python-process-exit-gracefully-after-receiving-sigterm-while-waiting
signal.signal(signal.SIGTERM, signal_handler) # Jenkins
signal.signal(signal.SIGINT, signal_handler) # Ctrl-C
try:
arguments = docopt(doc, sys.argv[1:])
command = get_command(arguments)
# DEBUG mode (if requested)
verbose = arguments.pop('--verbose', False)
if verbose:
logging_config['loggers']['gcdt']['level'] = 'DEBUG'
dictConfig(logging_config)
if dispatch_only is None:
dispatch_only = ['version']
assert tool in ['gcdt', 'kumo', 'tenkai', 'ramuda', 'yugen']
if command in dispatch_only:
# handle commands that do not need a lifecycle
# Note: `dispatch_only` commands do not have a check for ENV variable!
check_gcdt_update()
return cmd.dispatch(arguments)
else:
env = get_env()
if not env:
log.error('\'ENV\' environment variable not set!')
return 1
awsclient = AWSClient(botocore.session.get_session())
return lifecycle(awsclient, env, tool, command, arguments)
except GracefulExit as e:
log.info('Received %s signal - exiting command \'%s %s\'',
str(e), tool, command)
return 1
|
MOV Rx, Ry
MOV PC, Ry
Move the value of Ry into Rx or PC
def MOV(self, params):
"""
MOV Rx, Ry
MOV PC, Ry
Move the value of Ry into Rx or PC
"""
Rx, Ry = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(any_registers=(Rx, Ry))
def MOV_func():
self.register[Rx] = self.register[Ry]
return MOV_func
|
MOVS Ra, Rb
MOVS Ra, #imm8
Move the value of Rb or imm8 into Ra
Ra and Rb must be low registers
def MOVS(self, params):
"""
MOVS Ra, Rb
MOVS Ra, #imm8
Move the value of Rb or imm8 into Ra
Ra and Rb must be low registers
"""
Ra, Rb = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
if self.is_immediate(Rb):
self.check_arguments(low_registers=[Ra], imm8=[Rb])
def MOVS_func():
self.register[Ra] = self.convert_to_integer(Rb[1:])
# Set N and Z status flags
self.set_NZ_flags(self.register[Ra])
return MOVS_func
elif self.is_register(Rb):
self.check_arguments(low_registers=(Ra, Rb))
def MOVS_func():
self.register[Ra] = self.register[Rb]
self.set_NZ_flags(self.register[Ra])
return MOVS_func
else:
raise iarm.exceptions.ParsingError("Unknown parameter: {}".format(Rb))
|
MRS Rj, Rspecial
Copy the value of Rspecial to Rj
Rspecial can be APSR, IPSR, or EPSR
def MRS(self, params):
"""
MRS Rj, Rspecial
Copy the value of Rspecial to Rj
Rspecial can be APSR, IPSR, or EPSR
"""
Rj, Rspecial = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(LR_or_general_purpose_registers=(Rj,), special_registers=(Rspecial,))
def MRS_func():
# TODO add combination registers IEPSR, IAPSR, and EAPSR
# TODO needs to use APSR, IPSR, EPSR, IEPSR, IAPSR, EAPSR, PSR, MSP, PSP, PRIMASK, or CONTROL.
# http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0553a/CHDBIBGJ.html
if Rspecial == 'PSR':
self.register[Rj] = self.register['APSR'] | self.register['IPSR'] | self.register['EPSR']
else:
self.register[Rj] = self.register[Rspecial]
return MRS_func
|
MSR Rspecial, Rj
Copy the value of Rj to Rspecial
Rspecial can be APSR, IPSR, or EPSR
def MSR(self, params):
"""
MSR Rspecial, Rj
Copy the value of Rj to Rspecial
Rspecial can be APSR, IPSR, or EPSR
"""
Rspecial, Rj = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(LR_or_general_purpose_registers=(Rj,), special_registers=(Rspecial,))
def MSR_func():
# TODO add combination registers IEPSR, IAPSR, and EAPSR
# http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0553a/CHDBIBGJ.html
# TODO update N Z C V flags
if Rspecial in ('PSR', 'APSR'):
# PSR ignores writes to IPSR and EPSR
self.register['APSR'] = self.register[Rj]
else:
# Do nothing
pass
return MSR_func
|
MVNS Ra, Rb
Negate the value in Rb and store it in Ra
Ra and Rb must be a low register
def MVNS(self, params):
"""
MVNS Ra, Rb
Negate the value in Rb and store it in Ra
Ra and Rb must be a low register
"""
Ra, Rb = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(low_registers=(Ra, Rb))
def MVNS_func():
self.register[Ra] = ~self.register[Rb]
self.set_NZ_flags(self.register[Ra])
return MVNS_func
|
REV Ra, Rb
Reverse the byte order in register Rb and store the result in Ra
def REV(self, params):
"""
REV Ra, Rb
Reverse the byte order in register Rb and store the result in Ra
"""
Ra, Rb = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(low_registers=(Ra, Rb))
def REV_func():
self.register[Ra] = ((self.register[Rb] & 0xFF000000) >> 24) | \
((self.register[Rb] & 0x00FF0000) >> 8) | \
((self.register[Rb] & 0x0000FF00) << 8) | \
((self.register[Rb] & 0x000000FF) << 24)
return REV_func
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.