gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
---|---|
# -*- coding: utf-8 -*-
# coding=utf-8
# Copyright 2019 The SGNMT Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains predictors for bag of words experiments. This
is the standard bow predictor and the bowsearch predictor which first
does an unrestricted search to construct a skeleton and then restricts
the order of words by that skeleton (in addition to the bag
restriction).
"""
import logging
from cam.sgnmt import utils
from cam.sgnmt.decoding.beam import BeamDecoder
from cam.sgnmt.decoding.core import CLOSED_VOCAB_SCORE_NORM_NONE
from cam.sgnmt.misc.trie import SimpleTrie
from cam.sgnmt.misc.unigram import FileUnigramTable, \
BestStatsUnigramTable, FullStatsUnigramTable, AllStatsUnigramTable
from cam.sgnmt.predictors.core import Predictor
from cam.sgnmt.utils import INF, NEG_INF, MESSAGE_TYPE_FULL_HYPO, \
MESSAGE_TYPE_DEFAULT
class BagOfWordsPredictor(Predictor):
"""This predictor is similar to the forced predictor, but it does
not enforce the word order in the reference. Therefore, it assigns
1 to all hypotheses which have the words in the reference in any
order, and -inf to all other hypos.
"""
def __init__(self,
trg_test_file,
accept_subsets=False,
accept_duplicates=False,
heuristic_scores_file="",
collect_stats_strategy='best',
heuristic_add_consumed = False,
heuristic_add_remaining = True,
diversity_heuristic_factor = -1.0,
equivalence_vocab=-1):
"""Creates a new bag-of-words predictor.
Args:
trg_test_file (string): Path to the plain text file with
the target sentences. Must have the
same number of lines as the number
of source sentences to decode. The
word order in the target sentences
is not relevant for this predictor.
accept_subsets (bool): If true, this predictor permits
EOS even if the bag is not fully
consumed yet
accept_duplicates (bool): If true, counts are not updated
when a word is consumed. This
means that we allow a word in a
bag to appear multiple times
heuristic_scores_file (string): Path to the unigram scores
which are used if this
predictor estimates future
costs
collect_stats_strategy (string): best, full, or all. Defines
how unigram estimates are
collected for heuristic
heuristic_add_consumed (bool): Set to true to add the
difference between actual
partial score and unigram
estimates of consumed words
to the predictor heuristic
heuristic_add_remaining (bool): Set to true to add the sum
of unigram scores of words
remaining in the bag to the
predictor heuristic
diversity_heuristic_factor (float): Factor for diversity
heuristic which
penalizes hypotheses
with the same bag as
full hypos
equivalence_vocab (int): If positive, predictor states are
considered equal if the the
remaining words within that vocab
and OOVs regarding this vocab are
the same. Only relevant when using
hypothesis recombination
"""
super(BagOfWordsPredictor, self).__init__()
with open(trg_test_file) as f:
self.lines = f.read().splitlines()
if heuristic_scores_file:
self.estimates = FileUnigramTable(heuristic_scores_file)
elif collect_stats_strategy == 'best':
self.estimates = BestStatsUnigramTable()
elif collect_stats_strategy == 'full':
self.estimates = FullStatsUnigramTable()
elif collect_stats_strategy == 'all':
self.estimates = AllStatsUnigramTable()
else:
logging.error("Unknown statistics collection strategy")
self.accept_subsets = accept_subsets
self.accept_duplicates = accept_duplicates
self.heuristic_add_consumed = heuristic_add_consumed
self.heuristic_add_remaining = heuristic_add_remaining
self.equivalence_vocab = equivalence_vocab
if accept_duplicates and not accept_subsets:
logging.error("You enabled bow_accept_duplicates but not bow_"
"accept_subsets. Therefore, the bow predictor will "
"never accept end-of-sentence and could cause "
"an infinite loop in the search strategy.")
self.diversity_heuristic_factor = diversity_heuristic_factor
self.diverse_heuristic = (diversity_heuristic_factor > 0.0)
def get_unk_probability(self, posterior):
"""Returns negative infinity unconditionally: Words which are
not in the target sentence have assigned probability 0 by
this predictor.
"""
return NEG_INF
def predict_next(self):
"""If the bag is empty, the only allowed symbol is EOS.
Otherwise, return the list of keys in the bag.
"""
if not self.bag: # Empty bag
return {utils.EOS_ID : 0.0}
ret = {w : 0.0 for w in self.bag}
if self.accept_subsets:
ret[utils.EOS_ID] = 0.0
return ret
def initialize(self, src_sentence):
"""Creates a new bag for the current target sentence..
Args:
src_sentence (list): Not used
"""
self.best_hypo_score = NEG_INF
self.bag = {}
for w in self.lines[self.current_sen_id].strip().split():
int_w = int(w)
self.bag[int_w] = self.bag.get(int_w, 0) + 1
self.full_bag = dict(self.bag)
def consume(self, word):
"""Updates the bag by deleting the consumed word.
Args:
word (int): Next word to consume
"""
if word == utils.EOS_ID:
self.bag = {}
return
if not word in self.bag:
logging.warn("Consuming word which is not in bag-of-words!")
return
cnt = self.bag.pop(word)
if cnt > 1 and not self.accept_duplicates:
self.bag[word] = cnt - 1
def get_state(self):
"""State of this predictor is the current bag """
return self.bag
def set_state(self, state):
"""State of this predictor is the current bag """
self.bag = state
def initialize_heuristic(self, src_sentence):
"""Calls ``reset`` of the used unigram table with estimates
``self.estimates`` to clear all statistics from the previous
sentence
Args:
src_sentence (list): Not used
"""
self.estimates.reset()
if self.diverse_heuristic:
self.explored_bags = SimpleTrie()
def notify(self, message, message_type = MESSAGE_TYPE_DEFAULT):
"""This gets called if this predictor observes the decoder. It
updates unigram heuristic estimates via passing through this
message to the unigram table ``self.estimates``.
"""
self.estimates.notify(message, message_type)
if self.diverse_heuristic and message_type == MESSAGE_TYPE_FULL_HYPO:
self._update_explored_bags(message)
def _update_explored_bags(self, hypo):
"""This is called if diversity heuristic is enabled. It updates
``self.explored_bags``
"""
sen = hypo.trgt_sentence
for l in range(len(sen)):
key = sen[:l]
key.sort()
cnt = self.explored_bags.get(key)
if not cnt:
cnt = 0.0
self.explored_bags.add(key, cnt + 1.0)
def estimate_future_cost(self, hypo):
"""The bow predictor comes with its own heuristic function. We
use the sum of scores of the remaining words as future cost
estimator.
"""
acc = 0.0
if self.heuristic_add_remaining:
remaining = dict(self.full_bag)
remaining[utils.EOS_ID] = 1
for w in hypo.trgt_sentence:
remaining[w] -= 1
acc -= sum([cnt*self.estimates.estimate(w)
for w,cnt in remaining.items()])
if self.diverse_heuristic:
key = list(hypo.trgt_sentence)
key.sort()
cnt = self.explored_bags.get(key)
if cnt:
acc += cnt * self.diversity_heuristic_factor
if self.heuristic_add_consumed:
acc -= hypo.score - sum([self.estimates.estimate(w, -1000.0)
for w in hypo.trgt_sentence])
return acc
def _get_unk_bag(self, org_bag):
if self.equivalence_vocab <= 0:
return org_bag
unk_bag = {}
for word,cnt in org_bag.items():
idx = word if word < self.equivalence_vocab else utils.UNK_ID
unk_bag[idx] = unk_bag.get(idx, 0) + cnt
return unk_bag
def is_equal(self, state1, state2):
"""Returns true if the bag is the same """
return self._get_unk_bag(state1) == self._get_unk_bag(state2)
class BagOfWordsSearchPredictor(BagOfWordsPredictor):
"""Combines the bag-of-words predictor with a proxy decoding pass
which creates a skeleton translation.
"""
def __init__(self,
main_decoder,
hypo_recombination,
trg_test_file,
accept_subsets=False,
accept_duplicates=False,
heuristic_scores_file="",
collect_stats_strategy='best',
heuristic_add_consumed = False,
heuristic_add_remaining = True,
diversity_heuristic_factor = -1.0,
equivalence_vocab=-1):
"""Creates a new bag-of-words predictor with pre search
Args:
main_decoder (Decoder): Reference to the main decoder
instance, used to fetch the predictors
hypo_recombination (bool): Activates hypo recombination for the
pre decoder
trg_test_file (string): Path to the plain text file with
the target sentences. Must have the
same number of lines as the number
of source sentences to decode. The
word order in the target sentences
is not relevant for this predictor.
accept_subsets (bool): If true, this predictor permits
EOS even if the bag is not fully
consumed yet
accept_duplicates (bool): If true, counts are not updated
when a word is consumed. This
means that we allow a word in a
bag to appear multiple times
heuristic_scores_file (string): Path to the unigram scores
which are used if this
predictor estimates future
costs
collect_stats_strategy (string): best, full, or all. Defines
how unigram estimates are
collected for heuristic
heuristic_add_consumed (bool): Set to true to add the
difference between actual
partial score and unigram
estimates of consumed words
to the predictor heuristic
heuristic_add_remaining (bool): Set to true to add the sum
of unigram scores of words
remaining in the bag to the
predictor heuristic
equivalence_vocab (int): If positive, predictor states are
considered equal if the the
remaining words within that vocab
and OOVs regarding this vocab are
the same. Only relevant when using
hypothesis recombination
"""
self.main_decoder = main_decoder
self.pre_decoder = BeamDecoder(CLOSED_VOCAB_SCORE_NORM_NONE,
main_decoder.max_len_factor,
hypo_recombination,
10)
self.pre_decoder.combine_posteriors = main_decoder.combine_posteriors
super(BagOfWordsSearchPredictor, self).__init__(trg_test_file,
accept_subsets,
accept_duplicates,
heuristic_scores_file,
collect_stats_strategy,
heuristic_add_consumed,
heuristic_add_remaining,
diversity_heuristic_factor,
equivalence_vocab)
self.pre_mode = False
def predict_next(self):
"""If in ``pre_mode``, pass through to super class. Otherwise,
scan skeleton
"""
if self.pre_mode:
return super(BagOfWordsSearchPredictor, self).predict_next()
if not self.bag: # Empty bag
return {utils.EOS_ID : 0.0}
ret = {w : 0.0 for w in self.missing}
if self.accept_subsets:
ret[utils.EOS_ID] = 0.0
if self.skeleton_pos < len(self.skeleton):
ret[self.skeleton[self.skeleton_pos]] = 0.0
return ret
def initialize(self, src_sentence):
"""If in ``pre_mode``, pass through to super class. Otherwise,
initialize skeleton.
"""
if self.pre_mode:
return super(BagOfWordsSearchPredictor, self).initialize(src_sentence)
self.pre_mode = True
old_accept_subsets = self.accept_subsets
old_accept_duplicates = self.accept_duplicates
self.accept_subsets = True
self.accept_duplicates = True
self.pre_decoder.predictors = self.main_decoder.predictors
self.pre_decoder.current_sen_id = self.main_decoder.current_sen_id - 1
hypos = self.pre_decoder.decode(src_sentence)
score = INF
if not hypos:
logging.warn("No hypothesis found by the pre decoder. Effectively "
"reducing bowsearch predictor to bow predictor.")
self.skeleton = []
else:
self.skeleton = hypos[0].trgt_sentence
score = hypos[0].total_score
if self.skeleton and self.skeleton[-1] -- utils.EOS_ID:
self.skeleton = self.skeleton[:-1] # Remove EOS
self.skeleton_pos = 0
self.accept_subsets = old_accept_subsets
self.accept_duplicates = old_accept_duplicates
self._set_up_full_mode()
logging.debug("BOW Skeleton (score=%f missing=%d): %s" % (
score,
sum(self.missing.values()),
self.skeleton))
self.main_decoder.current_sen_id -= 1
self.main_decoder.initialize_predictors(src_sentence)
self.pre_mode = False
def _set_up_full_mode(self):
"""This method initializes ``missing`` by using
``self.skeleton`` and ``self.full_bag`` and removes
duplicates from ``self.skeleton``.
"""
self.bag = dict(self.full_bag)
missing = dict(self.full_bag)
skeleton_no_duplicates = []
for word in self.skeleton:
if missing[word] > 0:
missing[word] -= 1
skeleton_no_duplicates.append(word)
self.skeleton = skeleton_no_duplicates
self.missing = {w: cnt for w, cnt in missing.items() if cnt > 0}
def consume(self, word):
"""Calls super class ``consume``. If not in ``pre_mode``,
update skeleton info.
Args:
word (int): Next word to consume
"""
super(BagOfWordsSearchPredictor, self).consume(word)
if self.pre_mode:
return
if (self.skeleton_pos < len(self.skeleton)
and word == self.skeleton[self.skeleton_pos]):
self.skeleton_pos += 1
elif word in self.missing:
self.missing[word] -= 1
if self.missing[word] <= 0:
del self.missing[word]
def get_state(self):
"""If in pre_mode, state of this predictor is the current bag
Otherwise, its the bag plus skeleton state
"""
if self.pre_mode:
return super(BagOfWordsSearchPredictor, self).get_state()
return self.bag, self.skeleton_pos, self.missing
def set_state(self, state):
"""If in pre_mode, state of this predictor is the current bag
Otherwise, its the bag plus skeleton state
"""
if self.pre_mode:
return super(BagOfWordsSearchPredictor, self).set_state(state)
self.bag, self.skeleton_pos, self.missing = state
def is_equal(self, state1, state2):
"""Returns true if the bag and the skeleton states are the same
"""
if self.pre_mode:
return super(BagOfWordsSearchPredictor, self).is_equal(state1,
state2)
return super(BagOfWordsSearchPredictor, self).is_equal(state1[0],
state2[0])
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# $Id$
#
import warnings
import logging
import string
import sys
import time
warnings.filterwarnings("ignore")
from pysnmp.entity.rfc3413.oneliner import cmdgen
from pysnmp.proto import rfc1902
from zoni.data.resourcequerysql import ResourceQuerySql
from zoni.hardware.systemmanagementinterface import SystemManagementInterface
from zoni.agents.dhcpdns import DhcpDns
#class systemmagement():
#def __init__(self, proto):
#self.proto = proto
class raritanDominionPx(SystemManagementInterface):
def __init__(self, config, nodeName, hostInfo):
# Register
self.config = config
self.nodeName = nodeName
self.log = logging.getLogger(__name__)
self.verbose = False
self.powerStatus = None
if hostInfo != None:
self.host = nodeName
self.pdu_name = hostInfo['pdu_name']
self.port = hostInfo['pdu_port']
self.user = hostInfo['pdu_userid']
self.password = hostInfo['pdu_password']
self.oid = "1,3,6,1,4,1,13742,4,1,2,2,1"
self.oid_name = ",2"
self.oid_set = ",3"
self.oid_status = ",3"
if self.getOffset():
self.port = hostInfo['pdu_port'] - 1
# this works
#errorIndication, errorStatus, errorIndex, varBinds = cmdgen.CommandGenerator().getCmd(cmdgen.CommunityData('my-agent', 'public', 0), cmdgen.UdpTransportTarget(('pdu0-r1r1', 161)), (1,3,6,1,4,1,13742,4,1,2,2,1,3,2))
#print varBinds
#oid = netsnmp.Varbind('sysDescr')
#result = netsnmp.snmpwalk(oid, Version = 2,DestHost="localhost",Community="public")
#print result
#var = netsnmp.Varbind('sysDescr.0')
#res = netsnmp.snmpget(var, ...:Version=1,...:DestHost = 'pdu0-r1r1',...: Community = 'prs-domain')
#print res
#print cmdgen
#set snmp = /usr/bin/snmpset -v 2c -c intel pdu .1.3.6.1.4.1.13742.4.1.2.2.1.3.$outletnumber i $state
#name snmp = /usr/bin/snmpset -v 2c -c intel pdu .1.3.6.1.4.1.13742.4.1.2.2.1.2.$outletnumber i $state
#status snmp = /usr/bin/snmpset -v 2c -c intel pdu .1.3.6.1.4.1.13742.4.1.2.2.1.1.$outletnumber i $state
#self.snmp_status_oid = ".1.3.6.1.4.1.13742.4.1.2.2.1.1."
#self.powerStatus = None
#print self.__dict__
def setVerbose(self, verbose):
self.verbose = verbose
'''
Just discovered that some PDUs start numbering ports in the SNMP mib
with 0. Adding a check to update the port number to match the labels on the
physical plugs which start numbering with 1.
'''
def getOffset(self):
thisoid = eval(str(self.oid) + str(self.oid_status) + "," + str(0))
__errorIndication, __errorStatus, __errorIndex, varBinds = cmdgen.CommandGenerator().getCmd( \
cmdgen.CommunityData('my-agent', self.user, 0), \
cmdgen.UdpTransportTarget((self.pdu_name, 161)), thisoid)
output = varBinds[0][1]
if output == -1:
return 0
else:
return 1
def __setPowerStatus(self):
thisoid = eval(str(self.oid) + str(self.oid_status) + "," + str(self.port))
__errorIndication, __errorStatus, __errorIndex, varBinds = cmdgen.CommandGenerator().getCmd( \
cmdgen.CommunityData('my-agent', self.user, 0), \
cmdgen.UdpTransportTarget((self.pdu_name, 161)), thisoid)
output = varBinds[0][1]
print output, varBinds
if output == 1:
self.powerStatus = 1
powerstat = "on"
if output == 0:
self.powerStatus = 0
powerstat = "off"
print "pwerstat", powerstat
self.log.info("hardware setPowerStatus %s : %s" % (powerstat, self.nodeName))
if output:
return 1
return 0
def isPowered(self):
if self.powerStatus == None:
self.__setPowerStatus()
if self.powerStatus:
return 1;
return 0;
def getPowerStatus(self):
return self.isPowered()
def powerOn(self):
thisoid = eval(str(self.oid) + str(self.oid_status) + "," + str(self.port))
__errorIndication, __errorStatus, __errorIndex, __varBinds = cmdgen.CommandGenerator().setCmd( \
cmdgen.CommunityData('my-agent', self.user, 1), \
cmdgen.UdpTransportTarget((self.pdu_name, 161)), \
(thisoid, rfc1902.Integer('1')))
return self.getPowerStatus()
def powerOff(self):
thisoid = eval(str(self.oid) + str(self.oid_status) + "," + str(self.port))
__errorIndication, __errorStatus, __errorIndex, __varBinds = cmdgen.CommandGenerator().setCmd( \
cmdgen.CommunityData('my-agent', self.user, 1), \
cmdgen.UdpTransportTarget((self.pdu_name, 161)), \
(thisoid, rfc1902.Integer('0')))
return self.getPowerStatus()
def powerCycle(self):
self.powerOff()
self.powerOn()
def powerReset(self):
self.powerCycle()
def registerToZoni(self, user, password, host):
import socket
host = string.strip(str(host))
# Get hostname of the switch
if len(host.split(".")) == 4:
ip = host
try:
host = string.strip(socket.gethostbyaddr(ip)[0].split(".")[0])
except Exception, e:
mesg = "WARNING: Host (" + host + ") not registered in DNS " + str(e) + "\n"
sys.stderr.write(mesg)
else:
# Maybe a hostname was entered...
try:
ip = socket.gethostbyname(host)
except Exception, e:
mesg = "ERROR: Host (" + host + ") not registered in DNS " + str(e) + "\n"
sys.stderr.write(mesg)
mesg = "Unable to resolve hostname" + "\n"
sys.stderr.write(mesg)
exit()
a={}
oid = eval(str("1,3,6,1,2,1,1,1,0"))
__errorIndication, __errorStatus, __errorIndex, varBinds = cmdgen.CommandGenerator().getCmd( \
cmdgen.CommunityData('my-agent', user, 0), \
cmdgen.UdpTransportTarget((host, 161)), oid)
if len(varBinds) < 1:
mesg = "Incorrect authentication details"
self.log.error(mesg)
return -1
a['hw_make'] = str(varBinds[0][1])
oid = eval("1,3,6,1,4,1,13742,4,1,1,6,0")
__errorIndication, __errorStatus, __errorIndex, varBinds = cmdgen.CommandGenerator().getCmd( \
cmdgen.CommunityData('my-agent', user, 0), \
cmdgen.UdpTransportTarget((host, 161)), oid)
x = []
for d in ['%x' % ord(i) for i in varBinds[0][1]]:
if len(d) == 1:
d = "0" + str(d)
x.append(d)
a['hw_mac'] = ":".join(['%s' % d for d in x])
oid = eval("1,3,6,1,4,1,13742,4,1,1,2,0")
__errorIndication, __errorStatus, __errorIndex, varBinds = cmdgen.CommandGenerator().getCmd( \
cmdgen.CommunityData('my-agent', user, 0), \
cmdgen.UdpTransportTarget((host, 161)), oid)
serial = str(varBinds[0][1])
datime = time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.localtime())
val = "Registered by Zoni on : " + datime
a['hw_notes'] = val + "; Serial " + serial
oid = eval("1,3,6,1,4,1,13742,4,1,1,1,0")
__errorIndication, __errorStatus, __errorIndex, varBinds = cmdgen.CommandGenerator().getCmd( \
cmdgen.CommunityData('my-agent', user, 0), \
cmdgen.UdpTransportTarget((host, 161)), oid)
a['hw_version_fw'] = str(varBinds[0][1])
oid = eval("1,3,6,1,4,1,13742,4,1,1,12,0")
__errorIndication, __errorStatus, __errorIndex, varBinds = cmdgen.CommandGenerator().getCmd( \
cmdgen.CommunityData('my-agent', user, 0), \
cmdgen.UdpTransportTarget((host, 161)), oid)
a['hw_model'] = str(varBinds[0][1])
a['hw_type'] = "pdu"
a['hw_name'] = host
a['hw_ipaddr'] = ip
a['hw_userid'] = user
a['hw_password'] = password
# Register in dns
if self.config['dnsEnabled']:
try:
mesg = "Adding %s(%s) to dns" % (host, ip)
self.log.info(mesg)
DhcpDns(self.config, verbose=self.verbose).addDns(host, ip)
mesg = "Adding %s(%s) to dhcp" % (host, ip)
self.log.info(mesg)
DhcpDns(self.config, verbose=self.verbose).addDhcp(host, ip, a['hw_mac'])
except:
mesg = "Adding %s(%s) %s to dhcp/dns failed" % (host, ip, a['hw_mac'])
self.log.error(mesg)
# Add to db
# Register to DB
query = ResourceQuerySql(self.config, self.verbose)
query.registerHardware(a)
|
|
# Copyright (C) 2013-2014 Computer Sciences Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Autogenerated by Thrift Compiler (0.9.1)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def ping(self):
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def ping(self):
self.send_ping()
return self.recv_ping()
def send_ping(self):
self._oprot.writeMessageBegin('ping', TMessageType.CALL, self._seqid)
args = ping_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_ping(self):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = ping_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "ping failed: unknown result");
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["ping"] = Processor.process_ping
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_ping(self, seqid, iprot, oprot):
args = ping_args()
args.read(iprot)
iprot.readMessageEnd()
result = ping_result()
result.success = self._handler.ping()
oprot.writeMessageBegin("ping", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class ping_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ping_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ping_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ping_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test suite for the Hyper-V driver and related APIs.
"""
import io
import mox
import os
import platform
import shutil
import time
import uuid
from oslo.config import cfg
from nova.api.metadata import base as instance_metadata
from nova.compute import power_state
from nova.compute import task_states
from nova import context
from nova import db
from nova import exception
from nova.image import glance
from nova.openstack.common.gettextutils import _
from nova import test
from nova.tests import fake_network
from nova.tests.image import fake as fake_image
from nova.tests import matchers
from nova.tests.virt.hyperv import db_fakes
from nova.tests.virt.hyperv import fake
from nova import utils
from nova.virt import configdrive
from nova.virt import driver
from nova.virt.hyperv import basevolumeutils
from nova.virt.hyperv import constants
from nova.virt.hyperv import driver as driver_hyperv
from nova.virt.hyperv import hostutils
from nova.virt.hyperv import livemigrationutils
from nova.virt.hyperv import networkutils
from nova.virt.hyperv import networkutilsv2
from nova.virt.hyperv import pathutils
from nova.virt.hyperv import utilsfactory
from nova.virt.hyperv import vhdutils
from nova.virt.hyperv import vhdutilsv2
from nova.virt.hyperv import vmutils
from nova.virt.hyperv import vmutilsv2
from nova.virt.hyperv import volumeutils
from nova.virt.hyperv import volumeutilsv2
from nova.virt import images
CONF = cfg.CONF
CONF.import_opt('vswitch_name', 'nova.virt.hyperv.vif', 'hyperv')
class HyperVAPITestCase(test.NoDBTestCase):
"""Unit tests for Hyper-V driver calls."""
def __init__(self, test_case_name):
self._mox = mox.Mox()
super(HyperVAPITestCase, self).__init__(test_case_name)
def setUp(self):
super(HyperVAPITestCase, self).setUp()
self._user_id = 'fake'
self._project_id = 'fake'
self._instance_data = None
self._image_metadata = None
self._fetched_image = None
self._update_image_raise_exception = False
self._volume_target_portal = 'testtargetportal:3260'
self._volume_id = '0ef5d708-45ab-4129-8c59-d774d2837eb7'
self._context = context.RequestContext(self._user_id, self._project_id)
self._instance_ide_disks = []
self._instance_ide_dvds = []
self._instance_volume_disks = []
self._test_vm_name = None
self._test_instance_dir = 'C:\\FakeInstancesPath\\instance-0000001'
self._check_min_windows_version_satisfied = True
self._setup_stubs()
self.flags(instances_path=r'C:\Hyper-V\test\instances',
network_api_class='nova.network.neutronv2.api.API')
self.flags(force_volumeutils_v1=True, group='hyperv')
self.flags(force_hyperv_utils_v1=True, group='hyperv')
self._conn = driver_hyperv.HyperVDriver(None)
def _setup_stubs(self):
db_fakes.stub_out_db_instance_api(self.stubs)
fake_image.stub_out_image_service(self.stubs)
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs)
def fake_fetch(context, image_id, target, user, project):
self._fetched_image = target
self.stubs.Set(images, 'fetch', fake_fetch)
def fake_get_remote_image_service(context, name):
class FakeGlanceImageService(object):
def update(self_fake, context, image_id, image_metadata, f):
if self._update_image_raise_exception:
raise vmutils.HyperVException(
"Simulated update failure")
self._image_metadata = image_metadata
return (FakeGlanceImageService(), 1)
self.stubs.Set(glance, 'get_remote_image_service',
fake_get_remote_image_service)
def fake_check_min_windows_version(fake_self, major, minor):
return self._check_min_windows_version_satisfied
self.stubs.Set(hostutils.HostUtils, 'check_min_windows_version',
fake_check_min_windows_version)
def fake_sleep(ms):
pass
self.stubs.Set(time, 'sleep', fake_sleep)
def fake_vmutils__init__(self, host='.'):
pass
vmutils.VMUtils.__init__ = fake_vmutils__init__
self.stubs.Set(pathutils, 'PathUtils', fake.PathUtils)
self._mox.StubOutWithMock(fake.PathUtils, 'open')
self._mox.StubOutWithMock(fake.PathUtils, 'copyfile')
self._mox.StubOutWithMock(fake.PathUtils, 'rmtree')
self._mox.StubOutWithMock(fake.PathUtils, 'copy')
self._mox.StubOutWithMock(fake.PathUtils, 'remove')
self._mox.StubOutWithMock(fake.PathUtils, 'rename')
self._mox.StubOutWithMock(fake.PathUtils, 'makedirs')
self._mox.StubOutWithMock(fake.PathUtils,
'get_instance_migr_revert_dir')
self._mox.StubOutWithMock(fake.PathUtils, 'get_instance_dir')
self._mox.StubOutWithMock(vmutils.VMUtils, 'vm_exists')
self._mox.StubOutWithMock(vmutils.VMUtils, 'create_vm')
self._mox.StubOutWithMock(vmutils.VMUtils, 'destroy_vm')
self._mox.StubOutWithMock(vmutils.VMUtils, 'attach_ide_drive')
self._mox.StubOutWithMock(vmutils.VMUtils, 'create_scsi_controller')
self._mox.StubOutWithMock(vmutils.VMUtils, 'create_nic')
self._mox.StubOutWithMock(vmutils.VMUtils, 'set_vm_state')
self._mox.StubOutWithMock(vmutils.VMUtils, 'list_instances')
self._mox.StubOutWithMock(vmutils.VMUtils, 'get_vm_summary_info')
self._mox.StubOutWithMock(vmutils.VMUtils, 'take_vm_snapshot')
self._mox.StubOutWithMock(vmutils.VMUtils, 'remove_vm_snapshot')
self._mox.StubOutWithMock(vmutils.VMUtils, 'set_nic_connection')
self._mox.StubOutWithMock(vmutils.VMUtils, 'get_vm_scsi_controller')
self._mox.StubOutWithMock(vmutils.VMUtils, 'get_vm_ide_controller')
self._mox.StubOutWithMock(vmutils.VMUtils, 'get_attached_disks_count')
self._mox.StubOutWithMock(vmutils.VMUtils,
'attach_volume_to_controller')
self._mox.StubOutWithMock(vmutils.VMUtils,
'get_mounted_disk_by_drive_number')
self._mox.StubOutWithMock(vmutils.VMUtils, 'detach_vm_disk')
self._mox.StubOutWithMock(vmutils.VMUtils, 'get_vm_storage_paths')
self._mox.StubOutWithMock(vmutils.VMUtils,
'get_controller_volume_paths')
self._mox.StubOutWithMock(vmutils.VMUtils,
'enable_vm_metrics_collection')
self._mox.StubOutWithMock(vhdutils.VHDUtils, 'create_differencing_vhd')
self._mox.StubOutWithMock(vhdutils.VHDUtils, 'reconnect_parent_vhd')
self._mox.StubOutWithMock(vhdutils.VHDUtils, 'merge_vhd')
self._mox.StubOutWithMock(vhdutils.VHDUtils, 'get_vhd_parent_path')
self._mox.StubOutWithMock(vhdutils.VHDUtils, 'get_vhd_info')
self._mox.StubOutWithMock(vhdutils.VHDUtils, 'resize_vhd')
self._mox.StubOutWithMock(vhdutils.VHDUtils,
'get_internal_vhd_size_by_file_size')
self._mox.StubOutWithMock(vhdutils.VHDUtils, 'validate_vhd')
self._mox.StubOutWithMock(vhdutils.VHDUtils, 'get_vhd_format')
self._mox.StubOutWithMock(vhdutils.VHDUtils, 'create_dynamic_vhd')
self._mox.StubOutWithMock(hostutils.HostUtils, 'get_cpus_info')
self._mox.StubOutWithMock(hostutils.HostUtils,
'is_cpu_feature_present')
self._mox.StubOutWithMock(hostutils.HostUtils, 'get_memory_info')
self._mox.StubOutWithMock(hostutils.HostUtils, 'get_volume_info')
self._mox.StubOutWithMock(hostutils.HostUtils, 'get_windows_version')
self._mox.StubOutWithMock(hostutils.HostUtils, 'get_local_ips')
self._mox.StubOutWithMock(networkutils.NetworkUtils,
'get_external_vswitch')
self._mox.StubOutWithMock(networkutils.NetworkUtils,
'create_vswitch_port')
self._mox.StubOutWithMock(networkutils.NetworkUtils,
'vswitch_port_needed')
self._mox.StubOutWithMock(livemigrationutils.LiveMigrationUtils,
'live_migrate_vm')
self._mox.StubOutWithMock(livemigrationutils.LiveMigrationUtils,
'check_live_migration_config')
self._mox.StubOutWithMock(basevolumeutils.BaseVolumeUtils,
'volume_in_mapping')
self._mox.StubOutWithMock(basevolumeutils.BaseVolumeUtils,
'get_session_id_from_mounted_disk')
self._mox.StubOutWithMock(basevolumeutils.BaseVolumeUtils,
'get_device_number_for_target')
self._mox.StubOutWithMock(basevolumeutils.BaseVolumeUtils,
'get_target_from_disk_path')
self._mox.StubOutWithMock(volumeutils.VolumeUtils,
'login_storage_target')
self._mox.StubOutWithMock(volumeutils.VolumeUtils,
'logout_storage_target')
self._mox.StubOutWithMock(volumeutils.VolumeUtils,
'execute_log_out')
self._mox.StubOutWithMock(volumeutils.VolumeUtils,
'get_iscsi_initiator')
self._mox.StubOutWithMock(volumeutilsv2.VolumeUtilsV2,
'login_storage_target')
self._mox.StubOutWithMock(volumeutilsv2.VolumeUtilsV2,
'logout_storage_target')
self._mox.StubOutWithMock(volumeutilsv2.VolumeUtilsV2,
'execute_log_out')
self._mox.StubOutClassWithMocks(instance_metadata, 'InstanceMetadata')
self._mox.StubOutWithMock(instance_metadata.InstanceMetadata,
'metadata_for_config_drive')
# Can't use StubOutClassWithMocks due to __exit__ and __enter__
self._mox.StubOutWithMock(configdrive, 'ConfigDriveBuilder')
self._mox.StubOutWithMock(configdrive.ConfigDriveBuilder, 'make_drive')
self._mox.StubOutWithMock(utils, 'execute')
def tearDown(self):
self._mox.UnsetStubs()
super(HyperVAPITestCase, self).tearDown()
def test_get_available_resource(self):
cpu_info = {'Architecture': 'fake',
'Name': 'fake',
'Manufacturer': 'ACME, Inc.',
'NumberOfCores': 2,
'NumberOfLogicalProcessors': 4}
tot_mem_kb = 2000000L
free_mem_kb = 1000000L
tot_hdd_b = 4L * 1024 ** 3
free_hdd_b = 3L * 1024 ** 3
windows_version = '6.2.9200'
hostutils.HostUtils.get_memory_info().AndReturn((tot_mem_kb,
free_mem_kb))
m = hostutils.HostUtils.get_volume_info(mox.IsA(str))
m.AndReturn((tot_hdd_b, free_hdd_b))
hostutils.HostUtils.get_cpus_info().AndReturn([cpu_info])
m = hostutils.HostUtils.is_cpu_feature_present(mox.IsA(int))
m.MultipleTimes()
m = hostutils.HostUtils.get_windows_version()
m.AndReturn(windows_version)
self._mox.ReplayAll()
dic = self._conn.get_available_resource(None)
self._mox.VerifyAll()
self.assertEqual(dic['vcpus'], cpu_info['NumberOfLogicalProcessors'])
self.assertEqual(dic['hypervisor_hostname'], platform.node())
self.assertEqual(dic['memory_mb'], tot_mem_kb / 1024)
self.assertEqual(dic['memory_mb_used'],
tot_mem_kb / 1024 - free_mem_kb / 1024)
self.assertEqual(dic['local_gb'], tot_hdd_b / 1024 ** 3)
self.assertEqual(dic['local_gb_used'],
tot_hdd_b / 1024 ** 3 - free_hdd_b / 1024 ** 3)
self.assertEqual(dic['hypervisor_version'],
windows_version.replace('.', ''))
self.assertEqual(dic['supported_instances'],
'[["i686", "hyperv", "hvm"], ["x86_64", "hyperv", "hvm"]]')
def test_get_host_stats(self):
tot_mem_kb = 2000000L
free_mem_kb = 1000000L
tot_hdd_b = 4L * 1024 ** 3
free_hdd_b = 3L * 1024 ** 3
hostutils.HostUtils.get_memory_info().AndReturn((tot_mem_kb,
free_mem_kb))
m = hostutils.HostUtils.get_volume_info(mox.IsA(str))
m.AndReturn((tot_hdd_b, free_hdd_b))
self._mox.ReplayAll()
dic = self._conn.get_host_stats(True)
self._mox.VerifyAll()
self.assertEqual(dic['disk_total'], tot_hdd_b / 1024 ** 3)
self.assertEqual(dic['disk_available'], free_hdd_b / 1024 ** 3)
self.assertEqual(dic['host_memory_total'], tot_mem_kb / 1024)
self.assertEqual(dic['host_memory_free'], free_mem_kb / 1024)
self.assertEqual(dic['disk_total'],
dic['disk_used'] + dic['disk_available'])
self.assertEqual(dic['host_memory_total'],
dic['host_memory_overhead'] +
dic['host_memory_free'])
def test_list_instances(self):
fake_instances = ['fake1', 'fake2']
vmutils.VMUtils.list_instances().AndReturn(fake_instances)
self._mox.ReplayAll()
instances = self._conn.list_instances()
self._mox.VerifyAll()
self.assertEqual(instances, fake_instances)
def test_get_info(self):
self._instance_data = self._get_instance_data()
summary_info = {'NumberOfProcessors': 2,
'EnabledState': constants.HYPERV_VM_STATE_ENABLED,
'MemoryUsage': 1000,
'UpTime': 1}
m = vmutils.VMUtils.vm_exists(mox.Func(self._check_instance_name))
m.AndReturn(True)
func = mox.Func(self._check_instance_name)
m = vmutils.VMUtils.get_vm_summary_info(func)
m.AndReturn(summary_info)
self._mox.ReplayAll()
info = self._conn.get_info(self._instance_data)
self._mox.VerifyAll()
self.assertEqual(info["state"], power_state.RUNNING)
def test_get_info_instance_not_found(self):
# Tests that InstanceNotFound is raised if the instance isn't found
# from the vmutils.vm_exists method.
self._instance_data = self._get_instance_data()
m = vmutils.VMUtils.vm_exists(mox.Func(self._check_instance_name))
m.AndReturn(False)
self._mox.ReplayAll()
self.assertRaises(exception.InstanceNotFound, self._conn.get_info,
self._instance_data)
self._mox.VerifyAll()
def test_spawn_cow_image(self):
self._test_spawn_instance(True)
def test_spawn_cow_image_vhdx(self):
self._test_spawn_instance(True, vhd_format=constants.DISK_FORMAT_VHDX)
def test_spawn_no_cow_image(self):
self._test_spawn_instance(False)
def test_spawn_dynamic_memory(self):
CONF.set_override('dynamic_memory_ratio', 2.0, 'hyperv')
self._test_spawn_instance()
def test_spawn_no_cow_image_vhdx(self):
self._test_spawn_instance(False, vhd_format=constants.DISK_FORMAT_VHDX)
def _setup_spawn_config_drive_mocks(self, use_cdrom):
im = instance_metadata.InstanceMetadata(mox.IgnoreArg(),
content=mox.IsA(list),
extra_md=mox.IsA(dict))
m = fake.PathUtils.get_instance_dir(mox.IsA(str))
m.AndReturn(self._test_instance_dir)
cdb = self._mox.CreateMockAnything()
m = configdrive.ConfigDriveBuilder(instance_md=mox.IgnoreArg())
m.AndReturn(cdb)
# __enter__ and __exit__ are required by "with"
cdb.__enter__().AndReturn(cdb)
cdb.make_drive(mox.IsA(str))
cdb.__exit__(None, None, None).AndReturn(None)
if not use_cdrom:
utils.execute(CONF.hyperv.qemu_img_cmd,
'convert',
'-f',
'raw',
'-O',
'vpc',
mox.IsA(str),
mox.IsA(str),
attempts=1)
fake.PathUtils.remove(mox.IsA(str))
m = vmutils.VMUtils.attach_ide_drive(mox.IsA(str),
mox.IsA(str),
mox.IsA(int),
mox.IsA(int),
mox.IsA(str))
m.WithSideEffects(self._add_ide_disk)
def _test_spawn_config_drive(self, use_cdrom):
self.flags(force_config_drive=True)
self.flags(config_drive_cdrom=use_cdrom, group='hyperv')
self.flags(mkisofs_cmd='mkisofs.exe')
if use_cdrom:
expected_ide_disks = 1
expected_ide_dvds = 1
else:
expected_ide_disks = 2
expected_ide_dvds = 0
self._test_spawn_instance(expected_ide_disks=expected_ide_disks,
expected_ide_dvds=expected_ide_dvds,
config_drive=True,
use_cdrom=use_cdrom)
def test_spawn_config_drive(self):
self._test_spawn_config_drive(False)
def test_spawn_config_drive_cdrom(self):
self._test_spawn_config_drive(True)
def test_spawn_no_config_drive(self):
self.flags(force_config_drive=False)
expected_ide_disks = 1
expected_ide_dvds = 0
self._test_spawn_instance(expected_ide_disks=expected_ide_disks,
expected_ide_dvds=expected_ide_dvds)
def _test_spawn_nova_net_vif(self, with_port):
self.flags(network_api_class='nova.network.api.API')
# Reinstantiate driver, as the VIF plugin is loaded during __init__
self._conn = driver_hyperv.HyperVDriver(None)
def setup_vif_mocks():
fake_vswitch_path = 'fake vswitch path'
fake_vswitch_port = 'fake port'
m = networkutils.NetworkUtils.get_external_vswitch(
CONF.hyperv.vswitch_name)
m.AndReturn(fake_vswitch_path)
m = networkutils.NetworkUtils.vswitch_port_needed()
m.AndReturn(with_port)
if with_port:
m = networkutils.NetworkUtils.create_vswitch_port(
fake_vswitch_path, mox.IsA(str))
m.AndReturn(fake_vswitch_port)
vswitch_conn_data = fake_vswitch_port
else:
vswitch_conn_data = fake_vswitch_path
vmutils.VMUtils.set_nic_connection(mox.IsA(str), mox.IsA(str),
vswitch_conn_data)
self._test_spawn_instance(setup_vif_mocks_func=setup_vif_mocks)
def test_spawn_nova_net_vif_with_port(self):
self._test_spawn_nova_net_vif(True)
def test_spawn_nova_net_vif_without_port(self):
self._test_spawn_nova_net_vif(False)
def test_spawn_nova_net_vif_no_vswitch_exception(self):
self.flags(network_api_class='nova.network.api.API')
# Reinstantiate driver, as the VIF plugin is loaded during __init__
self._conn = driver_hyperv.HyperVDriver(None)
def setup_vif_mocks():
m = networkutils.NetworkUtils.get_external_vswitch(
CONF.hyperv.vswitch_name)
m.AndRaise(vmutils.HyperVException(_('fake vswitch not found')))
self.assertRaises(vmutils.HyperVException, self._test_spawn_instance,
setup_vif_mocks_func=setup_vif_mocks,
with_exception=True)
def test_spawn_with_metrics_collection(self):
self.flags(enable_instance_metrics_collection=True, group='hyperv')
self._test_spawn_instance(False)
def test_spawn_with_ephemeral_storage(self):
self._test_spawn_instance(True, expected_ide_disks=2,
ephemeral_storage=True)
def _check_instance_name(self, vm_name):
return vm_name == self._instance_data['name']
def _test_vm_state_change(self, action, from_state, to_state):
self._instance_data = self._get_instance_data()
vmutils.VMUtils.set_vm_state(mox.Func(self._check_instance_name),
to_state)
self._mox.ReplayAll()
action(self._instance_data)
self._mox.VerifyAll()
def test_pause(self):
self._test_vm_state_change(self._conn.pause, None,
constants.HYPERV_VM_STATE_PAUSED)
def test_pause_already_paused(self):
self._test_vm_state_change(self._conn.pause,
constants.HYPERV_VM_STATE_PAUSED,
constants.HYPERV_VM_STATE_PAUSED)
def test_unpause(self):
self._test_vm_state_change(self._conn.unpause,
constants.HYPERV_VM_STATE_PAUSED,
constants.HYPERV_VM_STATE_ENABLED)
def test_unpause_already_running(self):
self._test_vm_state_change(self._conn.unpause, None,
constants.HYPERV_VM_STATE_ENABLED)
def test_suspend(self):
self._test_vm_state_change(self._conn.suspend, None,
constants.HYPERV_VM_STATE_SUSPENDED)
def test_suspend_already_suspended(self):
self._test_vm_state_change(self._conn.suspend,
constants.HYPERV_VM_STATE_SUSPENDED,
constants.HYPERV_VM_STATE_SUSPENDED)
def test_resume(self):
self._test_vm_state_change(lambda i: self._conn.resume(i, None),
constants.HYPERV_VM_STATE_SUSPENDED,
constants.HYPERV_VM_STATE_ENABLED)
def test_resume_already_running(self):
self._test_vm_state_change(lambda i: self._conn.resume(i, None), None,
constants.HYPERV_VM_STATE_ENABLED)
def test_power_off(self):
self._test_vm_state_change(self._conn.power_off, None,
constants.HYPERV_VM_STATE_DISABLED)
def test_power_off_already_powered_off(self):
self._test_vm_state_change(self._conn.power_off,
constants.HYPERV_VM_STATE_DISABLED,
constants.HYPERV_VM_STATE_DISABLED)
def test_power_on(self):
self._instance_data = self._get_instance_data()
network_info = fake_network.fake_get_instance_nw_info(self.stubs)
vmutils.VMUtils.set_vm_state(mox.Func(self._check_instance_name),
constants.HYPERV_VM_STATE_ENABLED)
self._mox.ReplayAll()
self._conn.power_on(self._context, self._instance_data, network_info)
self._mox.VerifyAll()
def test_power_on_already_running(self):
self._instance_data = self._get_instance_data()
network_info = fake_network.fake_get_instance_nw_info(self.stubs)
vmutils.VMUtils.set_vm_state(mox.Func(self._check_instance_name),
constants.HYPERV_VM_STATE_ENABLED)
self._mox.ReplayAll()
self._conn.power_on(self._context, self._instance_data, network_info)
self._mox.VerifyAll()
def test_reboot(self):
network_info = fake_network.fake_get_instance_nw_info(self.stubs)
self._instance_data = self._get_instance_data()
vmutils.VMUtils.set_vm_state(mox.Func(self._check_instance_name),
constants.HYPERV_VM_STATE_REBOOT)
self._mox.ReplayAll()
self._conn.reboot(self._context, self._instance_data, network_info,
None)
self._mox.VerifyAll()
def _setup_destroy_mocks(self, destroy_disks=True):
m = vmutils.VMUtils.vm_exists(mox.Func(self._check_instance_name))
m.AndReturn(True)
func = mox.Func(self._check_instance_name)
vmutils.VMUtils.set_vm_state(func, constants.HYPERV_VM_STATE_DISABLED)
m = vmutils.VMUtils.get_vm_storage_paths(func)
m.AndReturn(([], []))
vmutils.VMUtils.destroy_vm(func)
if destroy_disks:
m = fake.PathUtils.get_instance_dir(mox.IsA(str),
create_dir=False,
remove_dir=True)
m.AndReturn(self._test_instance_dir)
def test_destroy(self):
self._instance_data = self._get_instance_data()
self._setup_destroy_mocks()
self._mox.ReplayAll()
self._conn.destroy(self._instance_data, None)
self._mox.VerifyAll()
def test_live_migration_unsupported_os(self):
self._check_min_windows_version_satisfied = False
self._conn = driver_hyperv.HyperVDriver(None)
self._test_live_migration(unsupported_os=True)
def test_live_migration_without_volumes(self):
self._test_live_migration()
def test_live_migration_with_volumes(self):
self._test_live_migration(with_volumes=True)
def test_live_migration_with_target_failure(self):
self._test_live_migration(test_failure=True)
def _test_live_migration(self, test_failure=False,
with_volumes=False,
unsupported_os=False):
dest_server = 'fake_server'
instance_data = self._get_instance_data()
instance_name = instance_data['name']
fake_post_method = self._mox.CreateMockAnything()
if not test_failure and not unsupported_os:
fake_post_method(self._context, instance_data, dest_server,
False)
fake_recover_method = self._mox.CreateMockAnything()
if test_failure:
fake_recover_method(self._context, instance_data, dest_server,
False)
fake_ide_controller_path = 'fakeide'
fake_scsi_controller_path = 'fakescsi'
if with_volumes:
fake_scsi_disk_path = 'fake_scsi_disk_path'
fake_target_iqn = 'fake_target_iqn'
fake_target_lun = 1
fake_scsi_paths = {0: fake_scsi_disk_path}
else:
fake_scsi_paths = {}
if not unsupported_os:
m = livemigrationutils.LiveMigrationUtils.live_migrate_vm(
instance_data['name'], dest_server)
if test_failure:
m.AndRaise(vmutils.HyperVException('Simulated failure'))
if with_volumes:
m.AndReturn([(fake_target_iqn, fake_target_lun)])
volumeutils.VolumeUtils.logout_storage_target(fake_target_iqn)
else:
m.AndReturn([])
self._mox.ReplayAll()
try:
hyperv_exception_raised = False
unsupported_os_exception_raised = False
self._conn.live_migration(self._context, instance_data,
dest_server, fake_post_method,
fake_recover_method)
except vmutils.HyperVException:
hyperv_exception_raised = True
except NotImplementedError:
unsupported_os_exception_raised = True
self.assertTrue(not test_failure ^ hyperv_exception_raised)
self.assertTrue(not unsupported_os ^ unsupported_os_exception_raised)
self._mox.VerifyAll()
def test_pre_live_migration_cow_image(self):
self._test_pre_live_migration(True, False)
def test_pre_live_migration_no_cow_image(self):
self._test_pre_live_migration(False, False)
def test_pre_live_migration_with_volumes(self):
self._test_pre_live_migration(False, True)
def _test_pre_live_migration(self, cow, with_volumes):
self.flags(use_cow_images=cow)
instance_data = self._get_instance_data()
instance = db.instance_create(self._context, instance_data)
instance['system_metadata'] = {}
network_info = fake_network.fake_get_instance_nw_info(self.stubs)
m = livemigrationutils.LiveMigrationUtils.check_live_migration_config()
m.AndReturn(True)
if cow:
m = basevolumeutils.BaseVolumeUtils.volume_in_mapping(mox.IsA(str),
None)
m.AndReturn(False)
self._setup_get_cached_image_mocks(cow)
if with_volumes:
block_device_info = db_fakes.get_fake_block_device_info(
self._volume_target_portal, self._volume_id)
mapping = driver.block_device_info_get_mapping(block_device_info)
data = mapping[0]['connection_info']['data']
target_lun = data['target_lun']
target_iqn = data['target_iqn']
target_portal = data['target_portal']
fake_mounted_disk = "fake_mounted_disk"
fake_device_number = 0
self._mock_login_storage_target(target_iqn, target_lun,
target_portal,
fake_mounted_disk,
fake_device_number)
else:
block_device_info = None
self._mox.ReplayAll()
self._conn.pre_live_migration(self._context, instance,
block_device_info, None, network_info)
self._mox.VerifyAll()
if cow:
self.assertTrue(self._fetched_image is not None)
else:
self.assertTrue(self._fetched_image is None)
def test_snapshot_with_update_failure(self):
(snapshot_name, func_call_matcher) = self._setup_snapshot_mocks()
self._update_image_raise_exception = True
self._mox.ReplayAll()
self.assertRaises(vmutils.HyperVException, self._conn.snapshot,
self._context, self._instance_data, snapshot_name,
func_call_matcher.call)
self._mox.VerifyAll()
# Assert states changed in correct order
self.assertIsNone(func_call_matcher.match())
def _setup_snapshot_mocks(self):
expected_calls = [
{'args': (),
'kwargs': {'task_state': task_states.IMAGE_PENDING_UPLOAD}},
{'args': (),
'kwargs': {'task_state': task_states.IMAGE_UPLOADING,
'expected_state': task_states.IMAGE_PENDING_UPLOAD}}
]
func_call_matcher = matchers.FunctionCallMatcher(expected_calls)
snapshot_name = 'test_snapshot_' + str(uuid.uuid4())
fake_hv_snapshot_path = 'fake_snapshot_path'
fake_parent_vhd_path = 'C:\\fake_vhd_path\\parent.vhd'
self._instance_data = self._get_instance_data()
func = mox.Func(self._check_instance_name)
m = vmutils.VMUtils.take_vm_snapshot(func)
m.AndReturn(fake_hv_snapshot_path)
m = fake.PathUtils.get_instance_dir(mox.IsA(str))
m.AndReturn(self._test_instance_dir)
m = vhdutils.VHDUtils.get_vhd_parent_path(mox.IsA(str))
m.AndReturn(fake_parent_vhd_path)
self._fake_dest_disk_path = None
def copy_dest_disk_path(src, dest):
self._fake_dest_disk_path = dest
m = fake.PathUtils.copyfile(mox.IsA(str), mox.IsA(str))
m.WithSideEffects(copy_dest_disk_path)
self._fake_dest_base_disk_path = None
def copy_dest_base_disk_path(src, dest):
self._fake_dest_base_disk_path = dest
m = fake.PathUtils.copyfile(fake_parent_vhd_path, mox.IsA(str))
m.WithSideEffects(copy_dest_base_disk_path)
def check_dest_disk_path(path):
return path == self._fake_dest_disk_path
def check_dest_base_disk_path(path):
return path == self._fake_dest_base_disk_path
func1 = mox.Func(check_dest_disk_path)
func2 = mox.Func(check_dest_base_disk_path)
# Make sure that the hyper-v base and differential VHDs are merged
vhdutils.VHDUtils.reconnect_parent_vhd(func1, func2)
vhdutils.VHDUtils.merge_vhd(func1, func2)
def check_snapshot_path(snapshot_path):
return snapshot_path == fake_hv_snapshot_path
# Make sure that the Hyper-V snapshot is removed
func = mox.Func(check_snapshot_path)
vmutils.VMUtils.remove_vm_snapshot(func)
fake.PathUtils.rmtree(mox.IsA(str))
m = fake.PathUtils.open(func2, 'rb')
m.AndReturn(io.BytesIO(b'fake content'))
return (snapshot_name, func_call_matcher)
def test_snapshot(self):
(snapshot_name, func_call_matcher) = self._setup_snapshot_mocks()
self._mox.ReplayAll()
self._conn.snapshot(self._context, self._instance_data, snapshot_name,
func_call_matcher.call)
self._mox.VerifyAll()
self.assertTrue(self._image_metadata and
"disk_format" in self._image_metadata and
self._image_metadata["disk_format"] == "vhd")
# Assert states changed in correct order
self.assertIsNone(func_call_matcher.match())
def _get_instance_data(self):
instance_name = 'openstack_unit_test_vm_' + str(uuid.uuid4())
return db_fakes.get_fake_instance_data(instance_name,
self._project_id,
self._user_id)
def _spawn_instance(self, cow, block_device_info=None,
ephemeral_storage=False):
self.flags(use_cow_images=cow)
self._instance_data = self._get_instance_data()
instance = db.instance_create(self._context, self._instance_data)
instance['system_metadata'] = {}
if ephemeral_storage:
instance['ephemeral_gb'] = 1
image = db_fakes.get_fake_image_data(self._project_id, self._user_id)
network_info = fake_network.fake_get_instance_nw_info(self.stubs)
self._conn.spawn(self._context, instance, image,
injected_files=[], admin_password=None,
network_info=network_info,
block_device_info=block_device_info)
def _add_ide_disk(self, vm_name, path, ctrller_addr,
drive_addr, drive_type):
if drive_type == constants.IDE_DISK:
self._instance_ide_disks.append(path)
elif drive_type == constants.IDE_DVD:
self._instance_ide_dvds.append(path)
def _add_volume_disk(self, vm_name, controller_path, address,
mounted_disk_path):
self._instance_volume_disks.append(mounted_disk_path)
def _check_img_path(self, image_path):
return image_path == self._fetched_image
def _setup_create_instance_mocks(self, setup_vif_mocks_func=None,
boot_from_volume=False,
block_device_info=None,
admin_permissions=True,
ephemeral_storage=False):
vmutils.VMUtils.create_vm(mox.Func(self._check_vm_name), mox.IsA(int),
mox.IsA(int), mox.IsA(bool),
CONF.hyperv.dynamic_memory_ratio)
if not boot_from_volume:
m = vmutils.VMUtils.attach_ide_drive(mox.Func(self._check_vm_name),
mox.IsA(str),
mox.IsA(int),
mox.IsA(int),
mox.IsA(str))
m.WithSideEffects(self._add_ide_disk).InAnyOrder()
if ephemeral_storage:
m = vmutils.VMUtils.attach_ide_drive(mox.Func(self._check_vm_name),
mox.IsA(str),
mox.IsA(int),
mox.IsA(int),
mox.IsA(str))
m.WithSideEffects(self._add_ide_disk).InAnyOrder()
func = mox.Func(self._check_vm_name)
m = vmutils.VMUtils.create_scsi_controller(func)
m.InAnyOrder()
if boot_from_volume:
mapping = driver.block_device_info_get_mapping(block_device_info)
data = mapping[0]['connection_info']['data']
target_lun = data['target_lun']
target_iqn = data['target_iqn']
target_portal = data['target_portal']
self._mock_attach_volume(mox.Func(self._check_vm_name), target_iqn,
target_lun, target_portal, True)
vmutils.VMUtils.create_nic(mox.Func(self._check_vm_name), mox.IsA(str),
mox.IsA(str)).InAnyOrder()
if setup_vif_mocks_func:
setup_vif_mocks_func()
if CONF.hyperv.enable_instance_metrics_collection:
vmutils.VMUtils.enable_vm_metrics_collection(
mox.Func(self._check_vm_name))
def _set_vm_name(self, vm_name):
self._test_vm_name = vm_name
def _check_vm_name(self, vm_name):
return vm_name == self._test_vm_name
def _setup_check_admin_permissions_mocks(self, admin_permissions=True):
self._mox.StubOutWithMock(vmutils.VMUtils,
'check_admin_permissions')
m = vmutils.VMUtils.check_admin_permissions()
if admin_permissions:
m.AndReturn(None)
else:
m.AndRaise(vmutils.HyperVAuthorizationException(_(
'Simulated failure')))
def _setup_get_cached_image_mocks(self, cow=True,
vhd_format=constants.DISK_FORMAT_VHD):
m = vhdutils.VHDUtils.get_vhd_format(
mox.Func(self._check_img_path))
m.AndReturn(vhd_format)
def check_img_path_with_ext(image_path):
return image_path == self._fetched_image + '.' + vhd_format.lower()
fake.PathUtils.rename(mox.Func(self._check_img_path),
mox.Func(check_img_path_with_ext))
if cow and vhd_format == constants.DISK_FORMAT_VHD:
m = vhdutils.VHDUtils.get_vhd_info(
mox.Func(check_img_path_with_ext))
m.AndReturn({'MaxInternalSize': 1024})
fake.PathUtils.copyfile(mox.IsA(str), mox.IsA(str))
m = vhdutils.VHDUtils.get_internal_vhd_size_by_file_size(
mox.IsA(str), mox.IsA(object))
m.AndReturn(1025)
vhdutils.VHDUtils.resize_vhd(mox.IsA(str), mox.IsA(object))
def _setup_spawn_instance_mocks(self, cow, setup_vif_mocks_func=None,
with_exception=False,
block_device_info=None,
boot_from_volume=False,
config_drive=False,
use_cdrom=False,
admin_permissions=True,
vhd_format=constants.DISK_FORMAT_VHD,
ephemeral_storage=False):
m = vmutils.VMUtils.vm_exists(mox.IsA(str))
m.WithSideEffects(self._set_vm_name).AndReturn(False)
m = fake.PathUtils.get_instance_dir(mox.IsA(str),
create_dir=False,
remove_dir=True)
m.AndReturn(self._test_instance_dir)
m = basevolumeutils.BaseVolumeUtils.volume_in_mapping(
mox.IsA(str), block_device_info)
m.AndReturn(boot_from_volume)
if not boot_from_volume:
m = fake.PathUtils.get_instance_dir(mox.Func(self._check_vm_name))
m.AndReturn(self._test_instance_dir)
self._setup_get_cached_image_mocks(cow, vhd_format)
if cow:
vhdutils.VHDUtils.create_differencing_vhd(mox.IsA(str),
mox.IsA(str))
else:
fake.PathUtils.copyfile(mox.IsA(str), mox.IsA(str))
m = vhdutils.VHDUtils.get_vhd_info(mox.IsA(str))
m.AndReturn({'MaxInternalSize': 1024, 'FileSize': 1024,
'Type': 2})
m = vhdutils.VHDUtils.get_internal_vhd_size_by_file_size(
mox.IsA(str), mox.IsA(object))
m.AndReturn(1025)
vhdutils.VHDUtils.resize_vhd(mox.IsA(str), mox.IsA(object))
self._setup_check_admin_permissions_mocks(
admin_permissions=admin_permissions)
if ephemeral_storage:
m = fake.PathUtils.get_instance_dir(mox.Func(self._check_vm_name))
m.AndReturn(self._test_instance_dir)
vhdutils.VHDUtils.create_dynamic_vhd(mox.IsA(str), mox.IsA(int),
mox.IsA(str))
self._setup_create_instance_mocks(setup_vif_mocks_func,
boot_from_volume,
block_device_info,
ephemeral_storage=ephemeral_storage)
if config_drive:
self._setup_spawn_config_drive_mocks(use_cdrom)
# TODO(alexpilotti) Based on where the exception is thrown
# some of the above mock calls need to be skipped
if with_exception:
self._setup_destroy_mocks()
else:
vmutils.VMUtils.set_vm_state(mox.Func(self._check_vm_name),
constants.HYPERV_VM_STATE_ENABLED)
def _test_spawn_instance(self, cow=True,
expected_ide_disks=1,
expected_ide_dvds=0,
setup_vif_mocks_func=None,
with_exception=False,
config_drive=False,
use_cdrom=False,
admin_permissions=True,
vhd_format=constants.DISK_FORMAT_VHD,
ephemeral_storage=False):
self._setup_spawn_instance_mocks(cow,
setup_vif_mocks_func,
with_exception,
config_drive=config_drive,
use_cdrom=use_cdrom,
admin_permissions=admin_permissions,
vhd_format=vhd_format,
ephemeral_storage=ephemeral_storage)
self._mox.ReplayAll()
self._spawn_instance(cow, ephemeral_storage=ephemeral_storage)
self._mox.VerifyAll()
self.assertEqual(len(self._instance_ide_disks), expected_ide_disks)
self.assertEqual(len(self._instance_ide_dvds), expected_ide_dvds)
vhd_path = os.path.join(self._test_instance_dir, 'root.' +
vhd_format.lower())
self.assertEqual(vhd_path, self._instance_ide_disks[0])
def _mock_get_mounted_disk_from_lun(self, target_iqn, target_lun,
fake_mounted_disk,
fake_device_number):
m = volumeutils.VolumeUtils.get_device_number_for_target(target_iqn,
target_lun)
m.AndReturn(fake_device_number)
m = vmutils.VMUtils.get_mounted_disk_by_drive_number(
fake_device_number)
m.AndReturn(fake_mounted_disk)
def _mock_login_storage_target(self, target_iqn, target_lun, target_portal,
fake_mounted_disk, fake_device_number):
m = volumeutils.VolumeUtils.get_device_number_for_target(target_iqn,
target_lun)
m.AndReturn(fake_device_number)
volumeutils.VolumeUtils.login_storage_target(target_lun,
target_iqn,
target_portal)
self._mock_get_mounted_disk_from_lun(target_iqn, target_lun,
fake_mounted_disk,
fake_device_number)
def _mock_attach_volume(self, instance_name, target_iqn, target_lun,
target_portal=None, boot_from_volume=False):
fake_mounted_disk = "fake_mounted_disk"
fake_device_number = 0
fake_controller_path = 'fake_scsi_controller_path'
self._mock_login_storage_target(target_iqn, target_lun,
target_portal,
fake_mounted_disk,
fake_device_number)
self._mock_get_mounted_disk_from_lun(target_iqn, target_lun,
fake_mounted_disk,
fake_device_number)
if boot_from_volume:
m = vmutils.VMUtils.get_vm_ide_controller(instance_name, 0)
m.AndReturn(fake_controller_path)
fake_free_slot = 0
else:
m = vmutils.VMUtils.get_vm_scsi_controller(instance_name)
m.AndReturn(fake_controller_path)
fake_free_slot = 1
m = vmutils.VMUtils.get_attached_disks_count(fake_controller_path)
m.AndReturn(fake_free_slot)
m = vmutils.VMUtils.attach_volume_to_controller(instance_name,
fake_controller_path,
fake_free_slot,
fake_mounted_disk)
m.WithSideEffects(self._add_volume_disk)
def _test_util_class_version(self, v1_class, v2_class,
get_instance_action, is_hyperv_2012,
force_v1_flag, force_utils_v1):
self._check_min_windows_version_satisfied = is_hyperv_2012
CONF.set_override(force_v1_flag, force_v1_flag, 'hyperv')
self._conn = driver_hyperv.HyperVDriver(None)
instance = get_instance_action()
is_v1 = isinstance(instance, v1_class)
# v2_class can inherit from v1_class
is_v2 = isinstance(instance, v2_class)
self.assertTrue((is_hyperv_2012 and not force_v1_flag) ^
(is_v1 and not is_v2))
def test_volumeutils_version_hyperv_2012(self):
self._test_util_class_version(volumeutils.VolumeUtils,
volumeutilsv2.VolumeUtilsV2,
lambda: utilsfactory.get_volumeutils(),
True, 'force_volumeutils_v1', False)
def test_volumeutils_version_hyperv_2012_force_v1(self):
self._test_util_class_version(volumeutils.VolumeUtils,
volumeutilsv2.VolumeUtilsV2,
lambda: utilsfactory.get_volumeutils(),
True, 'force_volumeutils_v1', True)
def test_volumeutils_version_hyperv_2008R2(self):
self._test_util_class_version(volumeutils.VolumeUtils,
volumeutilsv2.VolumeUtilsV2,
lambda: utilsfactory.get_volumeutils(),
False, 'force_volumeutils_v1', False)
def test_vmutils_version_hyperv_2012(self):
self._test_util_class_version(vmutils.VMUtils, vmutilsv2.VMUtilsV2,
lambda: utilsfactory.get_vmutils(),
True, 'force_hyperv_utils_v1', False)
def test_vmutils_version_hyperv_2012_force_v1(self):
self._test_util_class_version(vmutils.VMUtils, vmutilsv2.VMUtilsV2,
lambda: utilsfactory.get_vmutils(),
True, 'force_hyperv_utils_v1', True)
def test_vmutils_version_hyperv_2008R2(self):
self._test_util_class_version(vmutils.VMUtils, vmutilsv2.VMUtilsV2,
lambda: utilsfactory.get_vmutils(),
False, 'force_hyperv_utils_v1', False)
def test_vhdutils_version_hyperv_2012(self):
self._test_util_class_version(vhdutils.VHDUtils,
vhdutilsv2.VHDUtilsV2,
lambda: utilsfactory.get_vhdutils(),
True, 'force_hyperv_utils_v1', False)
def test_vhdutils_version_hyperv_2012_force_v1(self):
self._test_util_class_version(vhdutils.VHDUtils,
vhdutilsv2.VHDUtilsV2,
lambda: utilsfactory.get_vhdutils(),
True, 'force_hyperv_utils_v1', True)
def test_vhdutils_version_hyperv_2008R2(self):
self._test_util_class_version(vhdutils.VHDUtils,
vhdutilsv2.VHDUtilsV2,
lambda: utilsfactory.get_vhdutils(),
False, 'force_hyperv_utils_v1', False)
def test_networkutils_version_hyperv_2012(self):
self._test_util_class_version(networkutils.NetworkUtils,
networkutilsv2.NetworkUtilsV2,
lambda: utilsfactory.get_networkutils(),
True, 'force_hyperv_utils_v1', False)
def test_networkutils_version_hyperv_2012_force_v1(self):
self._test_util_class_version(networkutils.NetworkUtils,
networkutilsv2.NetworkUtilsV2,
lambda: utilsfactory.get_networkutils(),
True, 'force_hyperv_utils_v1', True)
def test_networkutils_version_hyperv_2008R2(self):
self._test_util_class_version(networkutils.NetworkUtils,
networkutilsv2.NetworkUtilsV2,
lambda: utilsfactory.get_networkutils(),
False, 'force_hyperv_utils_v1', False)
def test_attach_volume(self):
instance_data = self._get_instance_data()
connection_info = db_fakes.get_fake_volume_info_data(
self._volume_target_portal, self._volume_id)
data = connection_info['data']
target_lun = data['target_lun']
target_iqn = data['target_iqn']
target_portal = data['target_portal']
mount_point = '/dev/sdc'
self._mock_attach_volume(instance_data['name'], target_iqn, target_lun,
target_portal)
self._mox.ReplayAll()
self._conn.attach_volume(None, connection_info, instance_data,
mount_point)
self._mox.VerifyAll()
self.assertEqual(len(self._instance_volume_disks), 1)
def _mock_detach_volume(self, target_iqn, target_lun):
mount_point = '/dev/sdc'
fake_mounted_disk = "fake_mounted_disk"
fake_device_number = 0
m = volumeutils.VolumeUtils.get_device_number_for_target(target_iqn,
target_lun)
m.AndReturn(fake_device_number)
m = vmutils.VMUtils.get_mounted_disk_by_drive_number(
fake_device_number)
m.AndReturn(fake_mounted_disk)
vmutils.VMUtils.detach_vm_disk(mox.IsA(str), fake_mounted_disk)
volumeutils.VolumeUtils.logout_storage_target(mox.IsA(str))
def test_detach_volume(self):
instance_data = self._get_instance_data()
instance_name = instance_data['name']
connection_info = db_fakes.get_fake_volume_info_data(
self._volume_target_portal, self._volume_id)
data = connection_info['data']
target_lun = data['target_lun']
target_iqn = data['target_iqn']
target_portal = data['target_portal']
mount_point = '/dev/sdc'
self._mock_detach_volume(target_iqn, target_lun)
self._mox.ReplayAll()
self._conn.detach_volume(connection_info, instance_data, mount_point)
self._mox.VerifyAll()
def test_boot_from_volume(self):
block_device_info = db_fakes.get_fake_block_device_info(
self._volume_target_portal, self._volume_id)
self._setup_spawn_instance_mocks(cow=False,
block_device_info=block_device_info,
boot_from_volume=True)
self._mox.ReplayAll()
self._spawn_instance(False, block_device_info)
self._mox.VerifyAll()
self.assertEqual(len(self._instance_volume_disks), 1)
def test_get_volume_connector(self):
self._instance_data = self._get_instance_data()
instance = db.instance_create(self._context, self._instance_data)
fake_my_ip = "fake_ip"
fake_host = "fake_host"
fake_initiator = "fake_initiator"
self.flags(my_ip=fake_my_ip)
self.flags(host=fake_host)
m = volumeutils.VolumeUtils.get_iscsi_initiator()
m.AndReturn(fake_initiator)
self._mox.ReplayAll()
data = self._conn.get_volume_connector(instance)
self._mox.VerifyAll()
self.assertEqual(fake_my_ip, data.get('ip'))
self.assertEqual(fake_host, data.get('host'))
self.assertEqual(fake_initiator, data.get('initiator'))
def _setup_test_migrate_disk_and_power_off_mocks(self, same_host=False,
copy_exception=False,
size_exception=False):
self._instance_data = self._get_instance_data()
instance = db.instance_create(self._context, self._instance_data)
network_info = fake_network.fake_get_instance_nw_info(self.stubs)
instance['root_gb'] = 10
fake_local_ip = '10.0.0.1'
if same_host:
fake_dest_ip = fake_local_ip
else:
fake_dest_ip = '10.0.0.2'
if size_exception:
flavor = 'm1.tiny'
else:
flavor = 'm1.small'
instance_type = db.flavor_get_by_name(self._context, flavor)
if not size_exception:
fake_root_vhd_path = 'C:\\FakePath\\root.vhd'
fake_revert_path = os.path.join(self._test_instance_dir, '_revert')
func = mox.Func(self._check_instance_name)
vmutils.VMUtils.set_vm_state(func,
constants.HYPERV_VM_STATE_DISABLED)
m = vmutils.VMUtils.get_vm_storage_paths(func)
m.AndReturn(([fake_root_vhd_path], []))
m = hostutils.HostUtils.get_local_ips()
m.AndReturn([fake_local_ip])
m = fake.PathUtils.get_instance_dir(mox.IsA(str))
m.AndReturn(self._test_instance_dir)
m = pathutils.PathUtils.get_instance_migr_revert_dir(
instance['name'], remove_dir=True)
m.AndReturn(fake_revert_path)
if same_host:
fake.PathUtils.makedirs(mox.IsA(str))
m = fake.PathUtils.copy(fake_root_vhd_path, mox.IsA(str))
if copy_exception:
m.AndRaise(shutil.Error('Simulated copy error'))
m = fake.PathUtils.get_instance_dir(mox.IsA(str),
mox.IsA(str),
remove_dir=True)
m.AndReturn(self._test_instance_dir)
else:
fake.PathUtils.rename(mox.IsA(str), mox.IsA(str))
destroy_disks = True
if same_host:
fake.PathUtils.rename(mox.IsA(str), mox.IsA(str))
destroy_disks = False
self._setup_destroy_mocks(False)
if destroy_disks:
m = fake.PathUtils.get_instance_dir(mox.IsA(str),
mox.IsA(str),
remove_dir=True)
m.AndReturn(self._test_instance_dir)
return (instance, fake_dest_ip, network_info, instance_type)
def test_migrate_disk_and_power_off(self):
(instance,
fake_dest_ip,
network_info,
instance_type) = self._setup_test_migrate_disk_and_power_off_mocks()
self._mox.ReplayAll()
self._conn.migrate_disk_and_power_off(self._context, instance,
fake_dest_ip, instance_type,
network_info)
self._mox.VerifyAll()
def test_migrate_disk_and_power_off_same_host(self):
args = self._setup_test_migrate_disk_and_power_off_mocks(
same_host=True)
(instance, fake_dest_ip, network_info, instance_type) = args
self._mox.ReplayAll()
self._conn.migrate_disk_and_power_off(self._context, instance,
fake_dest_ip, instance_type,
network_info)
self._mox.VerifyAll()
def test_migrate_disk_and_power_off_copy_exception(self):
args = self._setup_test_migrate_disk_and_power_off_mocks(
copy_exception=True)
(instance, fake_dest_ip, network_info, instance_type) = args
self._mox.ReplayAll()
self.assertRaises(shutil.Error, self._conn.migrate_disk_and_power_off,
self._context, instance, fake_dest_ip,
instance_type, network_info)
self._mox.VerifyAll()
def test_migrate_disk_and_power_off_smaller_root_vhd_size_exception(self):
args = self._setup_test_migrate_disk_and_power_off_mocks(
size_exception=True)
(instance, fake_dest_ip, network_info, instance_type) = args
self._mox.ReplayAll()
self.assertRaises(vmutils.VHDResizeException,
self._conn.migrate_disk_and_power_off,
self._context, instance, fake_dest_ip,
instance_type, network_info)
self._mox.VerifyAll()
def _test_finish_migration(self, power_on, ephemeral_storage=False):
self._instance_data = self._get_instance_data()
instance = db.instance_create(self._context, self._instance_data)
instance['system_metadata'] = {}
network_info = fake_network.fake_get_instance_nw_info(self.stubs)
m = basevolumeutils.BaseVolumeUtils.volume_in_mapping(mox.IsA(str),
None)
m.AndReturn(False)
m = fake.PathUtils.get_instance_dir(mox.IsA(str))
m.AndReturn(self._test_instance_dir)
self._mox.StubOutWithMock(fake.PathUtils, 'exists')
m = fake.PathUtils.exists(mox.IsA(str))
m.AndReturn(True)
fake_parent_vhd_path = (os.path.join('FakeParentPath', '%s.vhd' %
instance["image_ref"]))
m = vhdutils.VHDUtils.get_vhd_info(mox.IsA(str))
m.AndReturn({'ParentPath': fake_parent_vhd_path,
'MaxInternalSize': 1})
m = vhdutils.VHDUtils.get_internal_vhd_size_by_file_size(
mox.IsA(str), mox.IsA(object))
m.AndReturn(1025)
vhdutils.VHDUtils.reconnect_parent_vhd(mox.IsA(str), mox.IsA(str))
m = vhdutils.VHDUtils.get_vhd_info(mox.IsA(str))
m.AndReturn({'MaxInternalSize': 1024})
m = fake.PathUtils.exists(mox.IsA(str))
m.AndReturn(True)
m = fake.PathUtils.get_instance_dir(mox.IsA(str))
if ephemeral_storage:
return m.AndReturn(self._test_instance_dir)
else:
m.AndReturn(None)
self._set_vm_name(instance['name'])
self._setup_create_instance_mocks(None, False,
ephemeral_storage=ephemeral_storage)
if power_on:
vmutils.VMUtils.set_vm_state(mox.Func(self._check_instance_name),
constants.HYPERV_VM_STATE_ENABLED)
self._mox.ReplayAll()
self._conn.finish_migration(self._context, None, instance, "",
network_info, None, False, None, power_on)
self._mox.VerifyAll()
def test_finish_migration_power_on(self):
self._test_finish_migration(True)
def test_finish_migration_power_off(self):
self._test_finish_migration(False)
def test_finish_migration_with_ephemeral_storage(self):
self._test_finish_migration(False, ephemeral_storage=True)
def test_confirm_migration(self):
self._instance_data = self._get_instance_data()
instance = db.instance_create(self._context, self._instance_data)
network_info = fake_network.fake_get_instance_nw_info(self.stubs)
pathutils.PathUtils.get_instance_migr_revert_dir(instance['name'],
remove_dir=True)
self._mox.ReplayAll()
self._conn.confirm_migration(None, instance, network_info)
self._mox.VerifyAll()
def _test_finish_revert_migration(self, power_on, ephemeral_storage=False):
self._instance_data = self._get_instance_data()
instance = db.instance_create(self._context, self._instance_data)
network_info = fake_network.fake_get_instance_nw_info(self.stubs)
fake_revert_path = ('C:\\FakeInstancesPath\\%s\\_revert' %
instance['name'])
m = basevolumeutils.BaseVolumeUtils.volume_in_mapping(mox.IsA(str),
None)
m.AndReturn(False)
m = fake.PathUtils.get_instance_dir(mox.IsA(str),
create_dir=False,
remove_dir=True)
m.AndReturn(self._test_instance_dir)
m = pathutils.PathUtils.get_instance_migr_revert_dir(instance['name'])
m.AndReturn(fake_revert_path)
fake.PathUtils.rename(fake_revert_path, mox.IsA(str))
m = fake.PathUtils.get_instance_dir(mox.IsA(str))
m.AndReturn(self._test_instance_dir)
m = fake.PathUtils.get_instance_dir(mox.IsA(str))
if ephemeral_storage:
m.AndReturn(self._test_instance_dir)
else:
m.AndReturn(None)
self._set_vm_name(instance['name'])
self._setup_create_instance_mocks(None, False,
ephemeral_storage=ephemeral_storage)
if power_on:
vmutils.VMUtils.set_vm_state(mox.Func(self._check_instance_name),
constants.HYPERV_VM_STATE_ENABLED)
self._mox.ReplayAll()
self._conn.finish_revert_migration(instance, network_info, None,
power_on)
self._mox.VerifyAll()
def test_finish_revert_migration_power_on(self):
self._test_finish_revert_migration(True)
def test_finish_revert_migration_power_off(self):
self._test_finish_revert_migration(False)
def test_spawn_no_admin_permissions(self):
self.assertRaises(vmutils.HyperVAuthorizationException,
self._test_spawn_instance,
with_exception=True,
admin_permissions=False)
def test_finish_revert_migration_with_ephemeral_storage(self):
self._test_finish_revert_migration(False, ephemeral_storage=True)
def test_plug_vifs(self):
# Check to make sure the method raises NotImplementedError.
self.assertRaises(NotImplementedError,
self._conn.plug_vifs,
instance=self._test_spawn_instance,
network_info=None)
def test_unplug_vifs(self):
# Check to make sure the method raises NotImplementedError.
self.assertRaises(NotImplementedError,
self._conn.unplug_vifs,
instance=self._test_spawn_instance,
network_info=None)
|
|
# Copyright (C) 2012-2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Configuration for libvirt objects.
Classes to represent the configuration of various libvirt objects
and support conversion to/from XML. These classes are solely concerned
by providing direct Object <-> XML document conversions. No policy or
operational decisions should be made by code in these classes. Such
policy belongs in the 'designer.py' module which provides simplified
helpers for populating up config object instances.
"""
from nova import exception
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova.openstack.common import units
from lxml import etree
LOG = logging.getLogger(__name__)
class LibvirtConfigObject(object):
def __init__(self, **kwargs):
super(LibvirtConfigObject, self).__init__()
self.root_name = kwargs.get("root_name")
self.ns_prefix = kwargs.get('ns_prefix')
self.ns_uri = kwargs.get('ns_uri')
@staticmethod
def _text_node(name, value):
child = etree.Element(name)
child.text = str(value)
return child
def format_dom(self):
if self.ns_uri is None:
return etree.Element(self.root_name)
else:
return etree.Element("{" + self.ns_uri + "}" + self.root_name,
nsmap={self.ns_prefix: self.ns_uri})
def parse_str(self, xmlstr):
self.parse_dom(etree.fromstring(xmlstr))
def parse_dom(self, xmldoc):
if self.root_name != xmldoc.tag:
raise exception.InvalidInput(
"Root element name should be '%s' not '%s'"
% (self.root_name, xmldoc.tag))
def to_xml(self, pretty_print=True):
root = self.format_dom()
xml_str = etree.tostring(root, pretty_print=pretty_print)
LOG.debug(_("Generated XML %s "), (xml_str,))
return xml_str
class LibvirtConfigCaps(LibvirtConfigObject):
def __init__(self, **kwargs):
super(LibvirtConfigCaps, self).__init__(root_name="capabilities",
**kwargs)
self.host = None
self.guests = []
def parse_dom(self, xmldoc):
super(LibvirtConfigCaps, self).parse_dom(xmldoc)
for c in xmldoc.getchildren():
if c.tag == "host":
host = LibvirtConfigCapsHost()
host.parse_dom(c)
self.host = host
elif c.tag == "guest":
guest = LibvirtConfigCapsGuest()
guest.parse_dom(c)
self.guests.append(guest)
def format_dom(self):
caps = super(LibvirtConfigCaps, self).format_dom()
if self.host:
caps.append(self.host.format_dom())
for g in self.guests:
caps.append(g.format_dom())
return caps
class LibvirtConfigCapsHost(LibvirtConfigObject):
def __init__(self, **kwargs):
super(LibvirtConfigCapsHost, self).__init__(root_name="host",
**kwargs)
self.cpu = None
self.uuid = None
def parse_dom(self, xmldoc):
super(LibvirtConfigCapsHost, self).parse_dom(xmldoc)
for c in xmldoc.getchildren():
if c.tag == "cpu":
cpu = LibvirtConfigCPU()
cpu.parse_dom(c)
self.cpu = cpu
elif c.tag == "uuid":
self.uuid = c.text
def format_dom(self):
caps = super(LibvirtConfigCapsHost, self).format_dom()
if self.uuid:
caps.append(self._text_node("uuid", self.uuid))
if self.cpu:
caps.append(self.cpu.format_dom())
return caps
class LibvirtConfigCapsGuest(LibvirtConfigObject):
def __init__(self, **kwargs):
super(LibvirtConfigCapsGuest, self).__init__(root_name="guest",
**kwargs)
self.arch = None
self.ostype = None
self.domtype = list()
def parse_dom(self, xmldoc):
super(LibvirtConfigCapsGuest, self).parse_dom(xmldoc)
for c in xmldoc.getchildren():
if c.tag == "os_type":
self.ostype = c.text
elif c.tag == "arch":
self.arch = c.get("name")
for sc in c.getchildren():
if sc.tag == "domain":
self.domtype.append(sc.get("type"))
def format_dom(self):
caps = super(LibvirtConfigCapsGuest, self).format_dom()
if self.ostype is not None:
caps.append(self._text_node("os_type", self.ostype))
if self.arch:
arch = etree.Element("arch", name=self.arch)
for dt in self.domtype:
dte = etree.Element("domain")
dte.set("type", dt)
arch.append(dte)
caps.append(arch)
return caps
class LibvirtConfigGuestTimer(LibvirtConfigObject):
def __init__(self, **kwargs):
super(LibvirtConfigGuestTimer, self).__init__(root_name="timer",
**kwargs)
self.name = "platform"
self.track = None
self.tickpolicy = None
self.present = None
def format_dom(self):
tm = super(LibvirtConfigGuestTimer, self).format_dom()
tm.set("name", self.name)
if self.track is not None:
tm.set("track", self.track)
if self.tickpolicy is not None:
tm.set("tickpolicy", self.tickpolicy)
if self.present is not None:
if self.present:
tm.set("present", "yes")
else:
tm.set("present", "no")
return tm
class LibvirtConfigGuestClock(LibvirtConfigObject):
def __init__(self, **kwargs):
super(LibvirtConfigGuestClock, self).__init__(root_name="clock",
**kwargs)
self.offset = "utc"
self.adjustment = None
self.timezone = None
self.timers = []
def format_dom(self):
clk = super(LibvirtConfigGuestClock, self).format_dom()
clk.set("offset", self.offset)
if self.adjustment:
clk.set("adjustment", self.adjustment)
elif self.timezone:
clk.set("timezone", self.timezone)
for tm in self.timers:
clk.append(tm.format_dom())
return clk
def add_timer(self, tm):
self.timers.append(tm)
class LibvirtConfigCPUFeature(LibvirtConfigObject):
def __init__(self, name=None, **kwargs):
super(LibvirtConfigCPUFeature, self).__init__(root_name='feature',
**kwargs)
self.name = name
def parse_dom(self, xmldoc):
super(LibvirtConfigCPUFeature, self).parse_dom(xmldoc)
self.name = xmldoc.get("name")
def format_dom(self):
ft = super(LibvirtConfigCPUFeature, self).format_dom()
ft.set("name", self.name)
return ft
class LibvirtConfigCPU(LibvirtConfigObject):
def __init__(self, **kwargs):
super(LibvirtConfigCPU, self).__init__(root_name='cpu',
**kwargs)
self.arch = None
self.vendor = None
self.model = None
self.sockets = None
self.cores = None
self.threads = None
self.features = []
def parse_dom(self, xmldoc):
super(LibvirtConfigCPU, self).parse_dom(xmldoc)
for c in xmldoc.getchildren():
if c.tag == "arch":
self.arch = c.text
elif c.tag == "model":
self.model = c.text
elif c.tag == "vendor":
self.vendor = c.text
elif c.tag == "topology":
self.sockets = int(c.get("sockets"))
self.cores = int(c.get("cores"))
self.threads = int(c.get("threads"))
elif c.tag == "feature":
f = LibvirtConfigCPUFeature()
f.parse_dom(c)
self.add_feature(f)
def format_dom(self):
cpu = super(LibvirtConfigCPU, self).format_dom()
if self.arch is not None:
cpu.append(self._text_node("arch", self.arch))
if self.model is not None:
cpu.append(self._text_node("model", self.model))
if self.vendor is not None:
cpu.append(self._text_node("vendor", self.vendor))
if (self.sockets is not None and
self.cores is not None and
self.threads is not None):
top = etree.Element("topology")
top.set("sockets", str(self.sockets))
top.set("cores", str(self.cores))
top.set("threads", str(self.threads))
cpu.append(top)
for f in self.features:
cpu.append(f.format_dom())
return cpu
def add_feature(self, feat):
self.features.append(feat)
class LibvirtConfigGuestCPUFeature(LibvirtConfigCPUFeature):
def __init__(self, name=None, **kwargs):
super(LibvirtConfigGuestCPUFeature, self).__init__(name, **kwargs)
self.policy = "require"
def format_dom(self):
ft = super(LibvirtConfigGuestCPUFeature, self).format_dom()
ft.set("policy", self.policy)
return ft
class LibvirtConfigGuestCPU(LibvirtConfigCPU):
def __init__(self, **kwargs):
super(LibvirtConfigGuestCPU, self).__init__(**kwargs)
self.mode = None
self.match = "exact"
def parse_dom(self, xmldoc):
super(LibvirtConfigGuestCPU, self).parse_dom(xmldoc)
self.mode = xmldoc.get('mode')
self.match = xmldoc.get('match')
def format_dom(self):
cpu = super(LibvirtConfigGuestCPU, self).format_dom()
if self.mode:
cpu.set("mode", self.mode)
cpu.set("match", self.match)
return cpu
class LibvirtConfigGuestSMBIOS(LibvirtConfigObject):
def __init__(self, **kwargs):
super(LibvirtConfigGuestSMBIOS, self).__init__(root_name="smbios",
**kwargs)
self.mode = "sysinfo"
def format_dom(self):
smbios = super(LibvirtConfigGuestSMBIOS, self).format_dom()
smbios.set("mode", self.mode)
return smbios
class LibvirtConfigGuestSysinfo(LibvirtConfigObject):
def __init__(self, **kwargs):
super(LibvirtConfigGuestSysinfo, self).__init__(root_name="sysinfo",
**kwargs)
self.type = "smbios"
self.bios_vendor = None
self.bios_version = None
self.system_manufacturer = None
self.system_product = None
self.system_version = None
self.system_serial = None
self.system_uuid = None
def format_dom(self):
sysinfo = super(LibvirtConfigGuestSysinfo, self).format_dom()
sysinfo.set("type", self.type)
bios = None
system = None
if self.bios_vendor is not None:
if bios is None:
bios = etree.Element("bios")
info = etree.Element("entry", name="vendor")
info.text = self.bios_vendor
bios.append(info)
if self.bios_version is not None:
if bios is None:
bios = etree.Element("bios")
info = etree.Element("entry", name="version")
info.text = self.bios_version
bios.append(info)
if self.system_manufacturer is not None:
if system is None:
system = etree.Element("system")
info = etree.Element("entry", name="manufacturer")
info.text = self.system_manufacturer
system.append(info)
if self.system_product is not None:
if system is None:
system = etree.Element("system")
info = etree.Element("entry", name="product")
info.text = self.system_product
system.append(info)
if self.system_version is not None:
if system is None:
system = etree.Element("system")
info = etree.Element("entry", name="version")
info.text = self.system_version
system.append(info)
if self.system_serial is not None:
if system is None:
system = etree.Element("system")
info = etree.Element("entry", name="serial")
info.text = self.system_serial
system.append(info)
if self.system_uuid is not None:
if system is None:
system = etree.Element("system")
info = etree.Element("entry", name="uuid")
info.text = self.system_uuid
system.append(info)
if bios is not None:
sysinfo.append(bios)
if system is not None:
sysinfo.append(system)
return sysinfo
class LibvirtConfigGuestDevice(LibvirtConfigObject):
def __init__(self, **kwargs):
super(LibvirtConfigGuestDevice, self).__init__(**kwargs)
class LibvirtConfigGuestDisk(LibvirtConfigGuestDevice):
def __init__(self, **kwargs):
super(LibvirtConfigGuestDisk, self).__init__(root_name="disk",
**kwargs)
self.source_type = "file"
self.source_device = "disk"
self.driver_name = None
self.driver_format = None
self.driver_cache = None
self.source_path = None
self.source_protocol = None
self.source_name = None
self.source_hosts = []
self.source_ports = []
self.target_dev = None
self.target_path = None
self.target_bus = None
self.auth_username = None
self.auth_secret_type = None
self.auth_secret_uuid = None
self.serial = None
self.disk_read_bytes_sec = None
self.disk_read_iops_sec = None
self.disk_write_bytes_sec = None
self.disk_write_iops_sec = None
self.disk_total_bytes_sec = None
self.disk_total_iops_sec = None
self.logical_block_size = None
self.physical_block_size = None
self.readonly = False
self.snapshot = None
def format_dom(self):
dev = super(LibvirtConfigGuestDisk, self).format_dom()
dev.set("type", self.source_type)
dev.set("device", self.source_device)
if (self.driver_name is not None or
self.driver_format is not None or
self.driver_cache is not None):
drv = etree.Element("driver")
if self.driver_name is not None:
drv.set("name", self.driver_name)
if self.driver_format is not None:
drv.set("type", self.driver_format)
if self.driver_cache is not None:
drv.set("cache", self.driver_cache)
dev.append(drv)
if self.source_type == "file":
dev.append(etree.Element("source", file=self.source_path))
elif self.source_type == "block":
dev.append(etree.Element("source", dev=self.source_path))
elif self.source_type == "mount":
dev.append(etree.Element("source", dir=self.source_path))
elif self.source_type == "network":
source = etree.Element("source", protocol=self.source_protocol)
if self.source_name is not None:
source.set('name', self.source_name)
hosts_info = zip(self.source_hosts, self.source_ports)
for name, port in hosts_info:
host = etree.Element('host', name=name)
if port is not None:
host.set('port', port)
source.append(host)
dev.append(source)
if self.auth_secret_type is not None:
auth = etree.Element("auth")
auth.set("username", self.auth_username)
auth.append(etree.Element("secret", type=self.auth_secret_type,
uuid=self.auth_secret_uuid))
dev.append(auth)
if self.source_type == "mount":
dev.append(etree.Element("target", dir=self.target_path))
else:
dev.append(etree.Element("target", dev=self.target_dev,
bus=self.target_bus))
if self.serial is not None:
dev.append(self._text_node("serial", self.serial))
iotune = etree.Element("iotune")
if self.disk_read_bytes_sec is not None:
iotune.append(self._text_node("read_bytes_sec",
self.disk_read_bytes_sec))
if self.disk_read_iops_sec is not None:
iotune.append(self._text_node("read_iops_sec",
self.disk_read_iops_sec))
if self.disk_write_bytes_sec is not None:
iotune.append(self._text_node("write_bytes_sec",
self.disk_write_bytes_sec))
if self.disk_write_iops_sec is not None:
iotune.append(self._text_node("write_iops_sec",
self.disk_write_iops_sec))
if self.disk_total_bytes_sec is not None:
iotune.append(self._text_node("total_bytes_sec",
self.disk_total_bytes_sec))
if self.disk_total_iops_sec is not None:
iotune.append(self._text_node("total_iops_sec",
self.disk_total_iops_sec))
if len(iotune) > 0:
dev.append(iotune)
# Block size tuning
if (self.logical_block_size is not None or
self.physical_block_size is not None):
blockio = etree.Element("blockio")
if self.logical_block_size is not None:
blockio.set('logical_block_size', self.logical_block_size)
if self.physical_block_size is not None:
blockio.set('physical_block_size', self.physical_block_size)
dev.append(blockio)
if self.readonly:
dev.append(etree.Element("readonly"))
return dev
def parse_dom(self, xmldoc):
super(LibvirtConfigGuestDisk, self).parse_dom(xmldoc)
self.source_type = xmldoc.get('type')
self.snapshot = xmldoc.get('snapshot')
for c in xmldoc.getchildren():
if c.tag == 'driver':
self.driver_name = c.get('name')
self.driver_format = c.get('type')
self.driver_cache = c.get('cache')
elif c.tag == 'source':
if self.source_type == 'file':
self.source_path = c.get('file')
elif self.source_type == 'block':
self.source_path = c.get('dev')
elif self.source_type == 'mount':
self.source_path = c.get('dir')
elif self.source_type == 'network':
self.source_protocol = c.get('protocol')
self.source_name = c.get('name')
elif c.tag == 'serial':
self.serial = c.text
for c in xmldoc.getchildren():
if c.tag == 'target':
if self.source_type == 'mount':
self.target_path = c.get('dir')
else:
self.target_dev = c.get('dev')
self.target_bus = c.get('bus', None)
class LibvirtConfigGuestSnapshotDisk(LibvirtConfigObject):
"""Disk class for handling disk information in snapshots.
Similar to LibvirtConfigGuestDisk, but used to represent
disk entities in <domainsnapshot> structures rather than
real devices. These typically have fewer members, and
different expectations for which fields are required.
"""
def __init__(self, **kwargs):
super(LibvirtConfigGuestSnapshotDisk, self).__init__(root_name="disk",
**kwargs)
self.source_type = None
self.source_device = None
self.name = None
self.snapshot = None
self.driver_name = None
self.driver_format = None
self.driver_cache = None
self.source_path = None
self.source_protocol = None
self.source_name = None
self.source_hosts = []
self.source_ports = []
self.target_dev = None
self.target_path = None
self.target_bus = None
self.auth_username = None
self.auth_secret_type = None
self.auth_secret_uuid = None
self.serial = None
def format_dom(self):
dev = super(LibvirtConfigGuestSnapshotDisk, self).format_dom()
if self.name:
dev.attrib['name'] = self.name
if self.snapshot:
dev.attrib['snapshot'] = self.snapshot
if self.source_type:
dev.set("type", self.source_type)
if self.source_device:
dev.set("device", self.source_device)
if (self.driver_name is not None or
self.driver_format is not None or
self.driver_cache is not None):
drv = etree.Element("driver")
if self.driver_name is not None:
drv.set("name", self.driver_name)
if self.driver_format is not None:
drv.set("type", self.driver_format)
if self.driver_cache is not None:
drv.set("cache", self.driver_cache)
dev.append(drv)
if self.source_type == "file":
dev.append(etree.Element("source", file=self.source_path))
elif self.source_type == "block":
dev.append(etree.Element("source", dev=self.source_path))
elif self.source_type == "mount":
dev.append(etree.Element("source", dir=self.source_path))
elif self.source_type == "network":
source = etree.Element("source", protocol=self.source_protocol)
if self.source_name is not None:
source.set('name', self.source_name)
hosts_info = zip(self.source_hosts, self.source_ports)
for name, port in hosts_info:
host = etree.Element('host', name=name)
if port is not None:
host.set('port', port)
source.append(host)
dev.append(source)
if self.auth_secret_type is not None:
auth = etree.Element("auth")
auth.set("username", self.auth_username)
auth.append(etree.Element("secret", type=self.auth_secret_type,
uuid=self.auth_secret_uuid))
dev.append(auth)
if self.source_type == "mount":
dev.append(etree.Element("target", dir=self.target_path))
else:
if self.target_bus and self.target_dev:
dev.append(etree.Element("target", dev=self.target_dev,
bus=self.target_bus))
return dev
def parse_dom(self, xmldoc):
super(LibvirtConfigGuestSnapshotDisk, self).parse_dom(xmldoc)
self.source_type = xmldoc.get('type')
self.snapshot = xmldoc.get('snapshot')
for c in xmldoc.getchildren():
if c.tag == 'driver':
self.driver_name = c.get('name')
self.driver_format = c.get('type')
self.driver_cache = c.get('cache')
elif c.tag == 'source':
if self.source_type == 'file':
self.source_path = c.get('file')
elif self.source_type == 'block':
self.source_path = c.get('dev')
elif self.source_type == 'mount':
self.source_path = c.get('dir')
elif self.source_type == 'network':
self.source_protocol = c.get('protocol')
self.source_name = c.get('name')
elif c.tag == 'serial':
self.serial = c.text
for c in xmldoc.getchildren():
if c.tag == 'target':
if self.source_type == 'mount':
self.target_path = c.get('dir')
else:
self.target_dev = c.get('dev')
self.target_bus = c.get('bus', None)
class LibvirtConfigGuestFilesys(LibvirtConfigGuestDevice):
def __init__(self, **kwargs):
super(LibvirtConfigGuestFilesys, self).__init__(root_name="filesystem",
**kwargs)
self.source_type = "mount"
self.source_dir = None
self.target_dir = "/"
def format_dom(self):
dev = super(LibvirtConfigGuestFilesys, self).format_dom()
dev.set("type", self.source_type)
dev.append(etree.Element("source", dir=self.source_dir))
dev.append(etree.Element("target", dir=self.target_dir))
return dev
class LibvirtConfigGuestInterface(LibvirtConfigGuestDevice):
def __init__(self, **kwargs):
super(LibvirtConfigGuestInterface, self).__init__(
root_name="interface",
**kwargs)
self.net_type = None
self.target_dev = None
self.model = None
self.mac_addr = None
self.script = None
self.source_dev = None
self.source_mode = "private"
self.vporttype = None
self.vportparams = []
self.filtername = None
self.filterparams = []
self.driver_name = None
self.vif_inbound_peak = None
self.vif_inbound_burst = None
self.vif_inbound_average = None
self.vif_outbound_peak = None
self.vif_outbound_burst = None
self.vif_outbound_average = None
def format_dom(self):
dev = super(LibvirtConfigGuestInterface, self).format_dom()
dev.set("type", self.net_type)
dev.append(etree.Element("mac", address=self.mac_addr))
if self.model:
dev.append(etree.Element("model", type=self.model))
if self.driver_name:
dev.append(etree.Element("driver", name=self.driver_name))
if self.net_type == "ethernet":
if self.script is not None:
dev.append(etree.Element("script", path=self.script))
elif self.net_type == "direct":
dev.append(etree.Element("source", dev=self.source_dev,
mode=self.source_mode))
else:
dev.append(etree.Element("source", bridge=self.source_dev))
if self.target_dev is not None:
dev.append(etree.Element("target", dev=self.target_dev))
if self.vporttype is not None:
vport = etree.Element("virtualport", type=self.vporttype)
for p in self.vportparams:
param = etree.Element("parameters")
param.set(p['key'], p['value'])
vport.append(param)
dev.append(vport)
if self.filtername is not None:
filter = etree.Element("filterref", filter=self.filtername)
for p in self.filterparams:
filter.append(etree.Element("parameter",
name=p['key'],
value=p['value']))
dev.append(filter)
if self.vif_inbound_average or self.vif_outbound_average:
bandwidth = etree.Element("bandwidth")
if self.vif_inbound_average is not None:
vif_inbound = etree.Element("inbound",
average=str(self.vif_inbound_average))
if self.vif_inbound_peak is not None:
vif_inbound.set("peak", str(self.vif_inbound_peak))
if self.vif_inbound_burst is not None:
vif_inbound.set("burst", str(self.vif_inbound_burst))
bandwidth.append(vif_inbound)
if self.vif_outbound_average is not None:
vif_outbound = etree.Element("outbound",
average=str(self.vif_outbound_average))
if self.vif_outbound_peak is not None:
vif_outbound.set("peak", str(self.vif_outbound_peak))
if self.vif_outbound_burst is not None:
vif_outbound.set("burst", str(self.vif_outbound_burst))
bandwidth.append(vif_outbound)
dev.append(bandwidth)
return dev
def add_filter_param(self, key, value):
self.filterparams.append({'key': key, 'value': value})
def add_vport_param(self, key, value):
self.vportparams.append({'key': key, 'value': value})
class LibvirtConfigGuestInput(LibvirtConfigGuestDevice):
def __init__(self, **kwargs):
super(LibvirtConfigGuestInput, self).__init__(root_name="input",
**kwargs)
self.type = "tablet"
self.bus = "usb"
def format_dom(self):
dev = super(LibvirtConfigGuestInput, self).format_dom()
dev.set("type", self.type)
dev.set("bus", self.bus)
return dev
class LibvirtConfigGuestGraphics(LibvirtConfigGuestDevice):
def __init__(self, **kwargs):
super(LibvirtConfigGuestGraphics, self).__init__(root_name="graphics",
**kwargs)
self.type = "vnc"
self.autoport = True
self.keymap = None
self.listen = None
def format_dom(self):
dev = super(LibvirtConfigGuestGraphics, self).format_dom()
dev.set("type", self.type)
if self.autoport:
dev.set("autoport", "yes")
else:
dev.set("autoport", "no")
if self.keymap:
dev.set("keymap", self.keymap)
if self.listen:
dev.set("listen", self.listen)
return dev
class LibvirtConfigSeclabel(LibvirtConfigObject):
def __init__(self, **kwargs):
super(LibvirtConfigSeclabel, self).__init__(root_name="seclabel",
**kwargs)
self.type = 'dynamic'
self.baselabel = None
def format_dom(self):
seclabel = super(LibvirtConfigSeclabel, self).format_dom()
seclabel.set('type', self.type)
if self.baselabel:
seclabel.append(self._text_node("baselabel", self.baselabel))
return seclabel
class LibvirtConfigGuestVideo(LibvirtConfigGuestDevice):
def __init__(self, **kwargs):
super(LibvirtConfigGuestVideo, self).__init__(root_name="video",
**kwargs)
self.type = 'cirrus'
self.vram = None
self.heads = None
def format_dom(self):
dev = super(LibvirtConfigGuestVideo, self).format_dom()
model = etree.Element("model")
model.set("type", self.type)
if self.vram:
model.set("vram", str(self.vram))
if self.heads:
model.set("heads", str(self.heads))
dev.append(model)
return dev
class LibvirtConfigGuestHostdev(LibvirtConfigGuestDevice):
def __init__(self, **kwargs):
super(LibvirtConfigGuestHostdev, self).\
__init__(root_name="hostdev", **kwargs)
self.mode = kwargs.get('mode')
self.type = kwargs.get('type')
self.managed = 'yes'
def format_dom(self):
dev = super(LibvirtConfigGuestHostdev, self).format_dom()
dev.set("mode", self.mode)
dev.set("type", self.type)
dev.set("managed", self.managed)
return dev
def parse_dom(self, xmldoc):
super(LibvirtConfigGuestHostdev, self).parse_dom(xmldoc)
self.mode = xmldoc.get('mode')
self.type = xmldoc.get('type')
self.managed = xmldoc.get('managed')
return xmldoc.getchildren()
class LibvirtConfigGuestHostdevPCI(LibvirtConfigGuestHostdev):
def __init__(self, **kwargs):
super(LibvirtConfigGuestHostdevPCI, self).\
__init__(mode='subsystem', type='pci',
**kwargs)
self.domain = None
self.bus = None
self.slot = None
self.function = None
def format_dom(self):
dev = super(LibvirtConfigGuestHostdevPCI, self).format_dom()
address = etree.Element("address",
domain='0x' + self.domain,
bus='0x' + self.bus,
slot='0x' + self.slot,
function='0x' + self.function)
source = etree.Element("source")
source.append(address)
dev.append(source)
return dev
def parse_dom(self, xmldoc):
childs = super(LibvirtConfigGuestHostdevPCI, self).parse_dom(xmldoc)
for c in childs:
if c.tag == "source":
for sub in c.getchildren():
if sub.tag == 'address':
self.domain = sub.get('domain')
self.bus = sub.get('bus')
self.slot = sub.get('slot')
self.function = sub.get('function')
class LibvirtConfigGuestCharBase(LibvirtConfigGuestDevice):
def __init__(self, **kwargs):
super(LibvirtConfigGuestCharBase, self).__init__(**kwargs)
self.type = "pty"
self.source_path = None
def format_dom(self):
dev = super(LibvirtConfigGuestCharBase, self).format_dom()
dev.set("type", self.type)
if self.type == "file":
dev.append(etree.Element("source", path=self.source_path))
elif self.type == "unix":
dev.append(etree.Element("source", mode="bind",
path=self.source_path))
return dev
class LibvirtConfigGuestChar(LibvirtConfigGuestCharBase):
def __init__(self, **kwargs):
super(LibvirtConfigGuestChar, self).__init__(**kwargs)
self.target_port = None
def format_dom(self):
dev = super(LibvirtConfigGuestChar, self).format_dom()
if self.target_port is not None:
dev.append(etree.Element("target", port=str(self.target_port)))
return dev
class LibvirtConfigGuestSerial(LibvirtConfigGuestChar):
def __init__(self, **kwargs):
super(LibvirtConfigGuestSerial, self).__init__(root_name="serial",
**kwargs)
class LibvirtConfigGuestConsole(LibvirtConfigGuestChar):
def __init__(self, **kwargs):
super(LibvirtConfigGuestConsole, self).__init__(root_name="console",
**kwargs)
class LibvirtConfigGuestChannel(LibvirtConfigGuestCharBase):
def __init__(self, **kwargs):
super(LibvirtConfigGuestChannel, self).__init__(root_name="channel",
**kwargs)
self.target_type = "virtio"
self.target_name = None
def format_dom(self):
dev = super(LibvirtConfigGuestChannel, self).format_dom()
target = etree.Element("target", type=self.target_type)
if self.target_name is not None:
target.set("name", self.target_name)
dev.append(target)
return dev
class LibvirtConfigGuest(LibvirtConfigObject):
def __init__(self, **kwargs):
super(LibvirtConfigGuest, self).__init__(root_name="domain",
**kwargs)
self.virt_type = None
self.uuid = None
self.name = None
self.memory = 500 * units.Mi
self.vcpus = 1
self.cpuset = None
self.cpu = None
self.cpu_shares = None
self.cpu_quota = None
self.cpu_period = None
self.acpi = False
self.apic = False
self.clock = None
self.sysinfo = None
self.os_type = None
self.os_loader = None
self.os_kernel = None
self.os_initrd = None
self.os_cmdline = None
self.os_root = None
self.os_init_path = None
self.os_boot_dev = []
self.os_smbios = None
self.os_mach_type = None
self.devices = []
def _format_basic_props(self, root):
root.append(self._text_node("uuid", self.uuid))
root.append(self._text_node("name", self.name))
root.append(self._text_node("memory", self.memory))
if self.cpuset is not None:
vcpu = self._text_node("vcpu", self.vcpus)
vcpu.set("cpuset", self.cpuset)
root.append(vcpu)
else:
root.append(self._text_node("vcpu", self.vcpus))
def _format_os(self, root):
os = etree.Element("os")
type_node = self._text_node("type", self.os_type)
if self.os_mach_type is not None:
type_node.set("machine", self.os_mach_type)
os.append(type_node)
if self.os_kernel is not None:
os.append(self._text_node("kernel", self.os_kernel))
if self.os_loader is not None:
os.append(self._text_node("loader", self.os_loader))
if self.os_initrd is not None:
os.append(self._text_node("initrd", self.os_initrd))
if self.os_cmdline is not None:
os.append(self._text_node("cmdline", self.os_cmdline))
if self.os_root is not None:
os.append(self._text_node("root", self.os_root))
if self.os_init_path is not None:
os.append(self._text_node("init", self.os_init_path))
for boot_dev in self.os_boot_dev:
os.append(etree.Element("boot", dev=boot_dev))
if self.os_smbios is not None:
os.append(self.os_smbios.format_dom())
root.append(os)
def _format_features(self, root):
if self.acpi or self.apic:
features = etree.Element("features")
if self.acpi:
features.append(etree.Element("acpi"))
if self.apic:
features.append(etree.Element("apic"))
root.append(features)
def _format_cputune(self, root):
cputune = etree.Element("cputune")
if self.cpu_shares is not None:
cputune.append(self._text_node("shares", self.cpu_shares))
if self.cpu_quota is not None:
cputune.append(self._text_node("quota", self.cpu_quota))
if self.cpu_period is not None:
cputune.append(self._text_node("period", self.cpu_period))
if len(cputune) > 0:
root.append(cputune)
def _format_devices(self, root):
if len(self.devices) == 0:
return
devices = etree.Element("devices")
for dev in self.devices:
devices.append(dev.format_dom())
root.append(devices)
def format_dom(self):
root = super(LibvirtConfigGuest, self).format_dom()
root.set("type", self.virt_type)
self._format_basic_props(root)
if self.sysinfo is not None:
root.append(self.sysinfo.format_dom())
self._format_os(root)
self._format_features(root)
self._format_cputune(root)
if self.clock is not None:
root.append(self.clock.format_dom())
if self.cpu is not None:
root.append(self.cpu.format_dom())
self._format_devices(root)
return root
def parse_dom(self, xmldoc):
# Note: This cover only for: LibvirtConfigGuestDisks
# LibvirtConfigGuestHostdevPCI
# LibvirtConfigGuestCPU
for c in xmldoc.getchildren():
if c.tag == 'devices':
for d in c.getchildren():
if d.tag == 'disk':
obj = LibvirtConfigGuestDisk()
obj.parse_dom(d)
self.devices.append(obj)
elif d.tag == 'hostdev' and d.get('type') == 'pci':
obj = LibvirtConfigGuestHostdevPCI()
obj.parse_dom(d)
self.devices.append(obj)
elif c.tag == 'cpu':
obj = LibvirtConfigGuestCPU()
obj.parse_dom(c)
self.cpu = obj
def add_device(self, dev):
self.devices.append(dev)
def set_clock(self, clk):
self.clock = clk
class LibvirtConfigGuestSnapshot(LibvirtConfigObject):
def __init__(self, **kwargs):
super(LibvirtConfigGuestSnapshot, self).__init__(
root_name="domainsnapshot",
**kwargs)
self.name = None
self.disks = []
def format_dom(self):
ss = super(LibvirtConfigGuestSnapshot, self).format_dom()
if self.name:
ss.append(self._text_node("name", self.name))
disks = etree.Element('disks')
for disk in self.disks:
disks.append(disk.format_dom())
ss.append(disks)
return ss
def add_disk(self, disk):
self.disks.append(disk)
class LibvirtConfigNodeDevice(LibvirtConfigObject):
"""Libvirt Node Devices parser"""
def __init__(self, **kwargs):
super(LibvirtConfigNodeDevice, self).__init__(root_name="device",
**kwargs)
self.name = None
self.parent = None
self.driver = None
self.pci_capability = None
def parse_dom(self, xmldoc):
super(LibvirtConfigNodeDevice, self).parse_dom(xmldoc)
for c in xmldoc.getchildren():
if c.tag == "name":
self.name = c.text
elif c.tag == "parent":
self.parent = c.text
elif c.tag == "capability" and c.get("type") == 'pci':
pcicap = LibvirtConfigNodeDevicePciCap()
pcicap.parse_dom(c)
self.pci_capability = pcicap
class LibvirtConfigNodeDevicePciCap(LibvirtConfigObject):
"""Libvirt Node Devices pci capability parser"""
def __init__(self, **kwargs):
super(LibvirtConfigNodeDevicePciCap, self).__init__(
root_name="capability", **kwargs)
self.domain = None
self.bus = None
self.slot = None
self.function = None
self.product = None
self.product_id = None
self.vendor = None
self.vendor_id = None
self.fun_capability = list()
def parse_dom(self, xmldoc):
super(LibvirtConfigNodeDevicePciCap, self).parse_dom(xmldoc)
for c in xmldoc.getchildren():
if c.tag == "domain":
self.domain = int(c.text)
elif c.tag == "slot":
self.slot = int(c.text)
elif c.tag == "bus":
self.bus = int(c.text)
elif c.tag == "function":
self.function = int(c.text)
elif c.tag == "product":
self.product = c.text
self.product_id = c.get('id')
elif c.tag == "vendor":
self.vendor = c.text
self.vendor_id = c.get('id')
elif c.tag == "capability" and c.get('type') in \
('virt_functions', 'phys_function'):
funcap = LibvirtConfigNodeDevicePciSubFunctionCap()
funcap.parse_dom(c)
self.fun_capability.append(funcap)
class LibvirtConfigNodeDevicePciSubFunctionCap(LibvirtConfigObject):
def __init__(self, **kwargs):
super(LibvirtConfigNodeDevicePciSubFunctionCap, self).__init__(
root_name="capability", **kwargs)
self.type = None
self.device_addrs = list() # list of tuple (domain,bus,slot,function)
def parse_dom(self, xmldoc):
super(LibvirtConfigNodeDevicePciSubFunctionCap, self).parse_dom(xmldoc)
self.type = xmldoc.get("type")
for c in xmldoc.getchildren():
if c.tag == "address":
self.device_addrs.append((c.get('domain'),
c.get('bus'),
c.get('slot'),
c.get('function')))
class LibvirtConfigGuestRng(LibvirtConfigGuestDevice):
def __init__(self, **kwargs):
super(LibvirtConfigGuestRng, self).__init__(root_name="rng",
**kwargs)
self.model = 'random'
self.backend = '/dev/random'
self.rate_period = None
self.rate_bytes = None
def format_dom(self):
dev = super(LibvirtConfigGuestRng, self).format_dom()
dev.set('model', 'virtio')
backend = etree.Element("backend")
backend.set("model", self.model)
backend.text = self.backend
if self.rate_period and self.rate_bytes:
rate = etree.Element("rate")
rate.set("period", str(self.rate_period))
rate.set("bytes", str(self.rate_bytes))
dev.append(rate)
dev.append(backend)
return dev
|
|
from os.path import join, dirname
import unittest
from StringIO import StringIO
from robot.errors import DataError
from robot.result import ExecutionResult
from robot.utils.asserts import assert_equals, assert_true, assert_raises
def _read_file(name):
with open(join(dirname(__file__), name)) as f:
return f.read()
GOLDEN_XML = _read_file('golden.xml')
GOLDEN_XML_TWICE = _read_file('goldenTwice.xml')
SUITE_TEARDOWN_FAILED = _read_file('suite_teardown_failed.xml')
class TestBuildingSuiteExecutionResult(unittest.TestCase):
def setUp(self):
result = ExecutionResult(StringIO(GOLDEN_XML))
self._suite = result.suite
self._test = self._suite.tests[0]
self._keyword = self._test.keywords[0]
self._user_keyword = self._test.keywords[1]
self._message = self._keyword.messages[0]
self._setup = self._suite.keywords[0]
self._errors = result.errors
def test_suite_is_built(self):
assert_equals(self._suite.source, 'normal.html')
assert_equals(self._suite.name, 'Normal')
assert_equals(self._suite.doc, 'Normal test cases')
assert_equals(self._suite.metadata, {'Something': 'My Value'})
assert_equals(self._suite.status, 'PASS')
assert_equals(self._suite.starttime, '20111024 13:41:20.873')
assert_equals(self._suite.endtime, '20111024 13:41:20.952')
assert_equals(self._suite.statistics.critical.passed, 1)
assert_equals(self._suite.statistics.critical.failed, 0)
assert_equals(self._suite.statistics.all.passed, 1)
assert_equals(self._suite.statistics.all.failed, 0)
def test_testcase_is_built(self):
assert_equals(self._test.name, 'First One')
assert_equals(self._test.doc, 'Test case documentation')
assert_equals(self._test.timeout, None)
assert_equals(list(self._test.tags), ['t1'])
assert_equals(len(self._test.keywords), 2)
assert_equals(self._test.status, 'PASS')
assert_equals(self._test.starttime, '20111024 13:41:20.925')
assert_equals(self._test.endtime, '20111024 13:41:20.934')
assert_true(self._test.critical)
def test_keyword_is_built(self):
assert_equals(self._keyword.name, 'BuiltIn.Log')
assert_equals(self._keyword.doc, 'Logs the given message with the given level.')
assert_equals(self._keyword.args, ('Test 1',))
assert_equals(self._keyword.status, 'PASS')
assert_equals(self._keyword.starttime, '20111024 13:41:20.926')
assert_equals(self._keyword.endtime, '20111024 13:41:20.928')
assert_equals(self._keyword.timeout, None)
assert_equals(len(self._keyword.keywords), 0)
assert_equals(len(self._keyword.messages), 1)
def test_user_keyword_is_built(self):
assert_equals(self._user_keyword.name, 'logs on trace')
assert_equals(self._user_keyword.doc, '')
assert_equals(self._user_keyword.args, ())
assert_equals(self._user_keyword.status, 'PASS')
assert_equals(self._user_keyword.starttime, '20111024 13:41:20.930')
assert_equals(self._user_keyword.endtime, '20111024 13:41:20.933')
assert_equals(self._user_keyword.timeout, None)
assert_equals(len(self._user_keyword.messages), 0)
assert_equals(len(self._user_keyword.keywords), 1)
def test_message_is_built(self):
assert_equals(self._message.message, 'Test 1')
assert_equals(self._message.level, 'INFO')
assert_equals(self._message.timestamp, '20111024 13:41:20.927')
def test_suite_setup_is_built(self):
assert_equals(len(self._setup.keywords), 0)
assert_equals(len(self._setup.messages), 0)
def test_errors_are_built(self):
assert_equals(len(self._errors.messages), 1)
assert_equals(self._errors.messages[0].message,
"Error in file 'normal.html' in table 'Settings': Resource file 'nope' does not exist.")
class TestCombiningSuites(unittest.TestCase):
def setUp(self):
self.result = ExecutionResult(StringIO(GOLDEN_XML), StringIO(GOLDEN_XML))
def test_name(self):
assert_equals(self.result.suite.name, 'Normal & Normal')
class TestElements(unittest.TestCase):
def test_nested_suites(self):
xml = """
<robot>
<suite name="foo">
<suite name="bar">
<suite name="quux">
</suite>
</suite>
</suite>
</robot>
"""
suite = ExecutionResult(StringIO(xml)).suite
assert_equals(suite.name, 'foo')
assert_equals(suite.suites[0].name, 'bar')
assert_equals(suite.longname, 'foo')
assert_equals(suite.suites[0].longname, 'foo.bar')
assert_equals(suite.suites[0].suites[0].name, 'quux')
assert_equals(suite.suites[0].suites[0].longname, 'foo.bar.quux')
def test_test_message(self):
xml = """
<robot>
<suite name="foo">
<test name="test">
<status status="FAIL">Failure message</status>
</test>
</suite>
</robot>
"""
test = ExecutionResult(StringIO(xml)).suite.tests[0]
assert_equals(test.message, 'Failure message')
assert_equals(test.status, 'FAIL')
assert_equals(test.longname, 'foo.test')
def test_suite_message(self):
xml = """
<robot>
<suite name="foo">
<status status="FAIL">Setup failed</status>
</suite>
</robot>
"""
suite = ExecutionResult(StringIO(xml)).suite
assert_equals(suite.message, 'Setup failed')
def test_unknown_elements_cause_an_error(self):
assert_raises(DataError, ExecutionResult, StringIO('<some_tag/>'))
class TestSuiteTeardownFailed(unittest.TestCase):
def test_passed_test(self):
tc = ExecutionResult(StringIO(SUITE_TEARDOWN_FAILED)).suite.tests[0]
assert_equals(tc.status, 'FAIL')
assert_equals(tc.message, 'Parent suite teardown failed:\nXXX')
def test_failed_test(self):
tc = ExecutionResult(StringIO(SUITE_TEARDOWN_FAILED)).suite.tests[1]
assert_equals(tc.status, 'FAIL')
assert_equals(tc.message, 'Message\n\n'
'Also parent suite teardown failed:\nXXX')
def test_already_processed(self):
inp = SUITE_TEARDOWN_FAILED.replace('generator="Robot', 'generator="Rebot')
passed, failed, teardowns = ExecutionResult(StringIO(inp)).suite.tests
assert_equals(passed.status, 'PASS')
assert_equals(passed.message, '')
assert_equals(failed.status, 'FAIL')
assert_equals(failed.message, 'Message')
assert_equals(teardowns.status, 'PASS')
assert_equals(teardowns.message, '')
def test_excluding_keywords(self):
suite = ExecutionResult(StringIO(SUITE_TEARDOWN_FAILED),
include_keywords=False).suite
passed, failed, teardowns = suite.tests
assert_equals(passed.status, 'FAIL')
assert_equals(passed.message, 'Parent suite teardown failed:\nXXX')
assert_equals(failed.status, 'FAIL')
assert_equals(failed.message, 'Message\n\n'
'Also parent suite teardown failed:\nXXX')
assert_equals(teardowns.status, 'FAIL')
assert_equals(teardowns.message, 'Parent suite teardown failed:\nXXX')
for item in suite, passed, failed, teardowns:
assert_equals(list(item.keywords), [])
def test_excluding_keywords_and_already_processed(self):
inp = SUITE_TEARDOWN_FAILED.replace('generator="Robot', 'generator="Rebot')
suite = ExecutionResult(StringIO(inp), include_keywords=False).suite
passed, failed, teardowns = suite.tests
assert_equals(passed.status, 'PASS')
assert_equals(passed.message, '')
assert_equals(failed.status, 'FAIL')
assert_equals(failed.message, 'Message')
assert_equals(teardowns.status, 'PASS')
assert_equals(teardowns.message, '')
for item in suite, passed, failed, teardowns:
assert_equals(list(item.keywords), [])
class TestBuildingFromXmlStringAndHandlingMissingInformation(unittest.TestCase):
def setUp(self):
self.result = ExecutionResult("""
<robot>
<suite name="foo">
<test name="some name">
<status status="PASS"></status>
</test>
<status status="PASS"></status>
</suite>
</robot>
""")
def test_suite(self):
suite = self.result.suite
assert_equals(suite.id, 's1')
assert_equals(suite.name, 'foo')
assert_equals(suite.doc, '')
assert_equals(suite.source, None)
assert_equals(suite.metadata, {})
assert_equals(list(suite.keywords), [])
assert_equals(suite.starttime, None)
assert_equals(suite.endtime, None)
assert_equals(suite.elapsedtime, 0)
def test_test(self):
test = self.result.suite.tests[0]
assert_equals(test.id, 's1-t1')
assert_equals(test.name, 'some name')
assert_equals(test.doc, '')
assert_equals(test.timeout, None)
assert_equals(test.critical, True)
assert_equals(list(test.tags), [])
assert_equals(list(test.keywords), [])
assert_equals(test.starttime, None)
assert_equals(test.endtime, None)
assert_equals(test.elapsedtime, 0)
if __name__ == '__main__':
unittest.main()
|
|
import ast
from . import astlib as _a
from .library import Library
from .parser import visitor
register = Library()
@register.tag
def load(parser, token):
args, kwargs = parser.parse_args(token)
assert len(args) == 1, '"load" tag takes only one argument.'
assert isinstance(args[0], ast.Str), \
'First argument to "load" tag must be a string'
parser.load_library(args[0].s)
@register.tag
def extends(parser, token):
args, kwargs = parser.parse_args(token)
assert len(args) == 1, '"extends" tag takes only one argument.'
assert isinstance(args[0], ast.Str), \
'First argument to "extends" tag must be a string'
parent = parser.loader.load(args[0].s, raw=True)
parser.parent = parent
@register.tag
def block(parser, token):
name = token.strip()
parser.build_method(name, endnodes=['endblock'])
return ast.YieldFrom(
value=_a.Call(_a.Attribute(_a.Name('self'), name), [
_a.Name('context'),
])
)
@register.tag(name='super')
def do_super(parser, token):
'''
Access the parent templates block.
{% super name %}
'''
name = token.strip()
return ast.YieldFrom(
value=_a.Call(_a.Attribute(_a.Call(_a.Name('super')), name), [
# _a.Attribute(_a.Name('context'), 'parent'),
_a.Name('context'),
])
)
@register.tag(name='if')
def do_if(parser, token):
code = parser.parse_expression(token)
nodelist, end = parser.parse_nodes_until('endif', 'else')
if end == 'else':
elsenodes, _ = parser.parse_nodes_until('endif')
else:
elsenodes = []
return ast.If(test=code, body=nodelist, orelse=elsenodes)
def _create_with_scope(body, kwargs):
'''
Helper function to wrap a block in a scope stack:
with ContextScope(context, **kwargs) as context:
... body ...
'''
return ast.With(
items=[
ast.withitem(
context_expr=_a.Call(
_a.Name('ContextScope'),
[_a.Name('context')],
keywords=kwargs,
),
optional_vars=_a.Name('context', ctx=ast.Store())
),
],
body=body,
)
def _wrap_kwargs(kwargs):
'''
Ensure expressions in keyword arguments are wrapped.
'''
for kw in kwargs:
visitor.visit(kw)
return kwargs
@register.tag(name='for')
def do_for(parser, token):
'''
{% for a, b, c in iterable %}
{% endfor %}
We create the structure:
with ContextWrapper(context) as context:
for a, b, c in iterable:
context.update(a=a, b=b, c=c)
...
If there is a {% empty %} clause, we create:
if iterable:
{ above code }
else:
{ empty clause }
'''
code = ast.parse('for %s: pass' % token, mode='exec')
# Grab the ast.For node
loop = code.body[0]
# Wrap its source iterable
loop.iter = visitor.visit(loop.iter)
# Get the body of the loop
body, end = parser.parse_nodes_until('endfor', 'empty')
# Build a list of target variable names
if isinstance(loop.target, ast.Tuple):
targets = [elt.id for elt in loop.target.elts]
else:
targets = [loop.target.id]
kwargs = [
ast.keyword(arg=elt, value=_a.Name(elt))
for elt in targets
]
# Insert our update call at the start of the loop body
body.insert(0, ast.Expr(value=_a.Call(
_a.Attribute(_a.Name('context'), 'update'),
keywords=kwargs
)))
loop.body = body
node = _create_with_scope([loop], [])
if end == 'empty':
# Now we wrap our for block in:
# if loop.iter:
# else:
empty, _ = parser.parse_nodes_until('endfor')
node = ast.If(
test=loop.iter,
body=[node],
orelse=empty
)
return node
@register.tag(name='include')
def do_include(parser, token):
args, kwargs = parser.parse_args(token)
assert isinstance(args[0], ast.Str), \
'First argument to "include" tag must be a string'
template_name = args[0].s
tmpl = parser.loader.load(template_name)
parser.helpers.setdefault('_includes', {})[template_name] = tmpl
# yield _._includes[name](context)
action = ast.Yield(
value=_a.Call(
func=ast.Subscript(
value=_a.Attribute(_a.Name('_'), '_includes'),
slice=ast.Index(value=ast.Str(s=template_name)),
ctx=ast.Load()
),
args=[
_a.Name('context'),
]
)
)
if kwargs:
kwargs = _wrap_kwargs(kwargs)
return _create_with_scope([ast.Expr(value=action)], kwargs)
return action
@register.tag(name='with')
def do_with(parser, token):
body, _ = parser.parse_nodes_until('endwith')
args, kwargs = parser.parse_args(token)
# Need to wrap name lookups in kwarg expressions
kwargs = _wrap_kwargs(kwargs)
action = _create_with_scope(body, kwargs)
return action
@register.tag
def macro(parser, token):
'''
Works just like block, but does not render.
'''
name = token.strip()
parser.build_method(name, endnodes=['endmacro'])
return ast.Yield(value=ast.Str(s=''))
@register.tag
def use(parser, token):
'''
Counterpart to `macro`, lets you render any block/macro in place.
'''
args, kwargs = parser.parse_args(token)
assert isinstance(args[0], ast.Str), \
'First argument to "include" tag must be a string'
name = args[0].s
action = ast.YieldFrom(
value=_a.Call(_a.Attribute(_a.Name('self'), name), [
_a.Name('context'),
])
)
if kwargs:
kwargs = _wrap_kwargs(kwargs)
return _create_with_scope([ast.Expr(value=action)], kwargs)
return action
|
|
import enum
import parsy
import pytest
from toolz import identity
import ibis
import ibis.expr.datatypes as dt
import ibis.expr.rules as rlz
import ibis.expr.types as ir
from ibis.common.exceptions import IbisTypeError
table = ibis.table(
[('int_col', 'int64'), ('string_col', 'string'), ('double_col', 'double')]
)
@pytest.mark.parametrize(
('value', 'expected'),
[
(dt.int32, dt.int32),
('int64', dt.int64),
('array<string>', dt.Array(dt.string)),
],
)
def test_valid_datatype(value, expected):
assert rlz.datatype(value) == expected
@pytest.mark.parametrize(
('value', 'expected'),
[
('exception', parsy.ParseError),
('array<cat>', parsy.ParseError),
(int, IbisTypeError),
([float], IbisTypeError),
],
)
def test_invalid_datatype(value, expected):
with pytest.raises(expected):
assert rlz.datatype(value)
@pytest.mark.parametrize(
('klass', 'value', 'expected'),
[(int, 32, 32), (str, 'foo', 'foo'), (dt.Integer, dt.int8, dt.int8)],
)
def test_valid_instance_of(klass, value, expected):
assert rlz.instance_of(klass, value) == expected
@pytest.mark.parametrize(
('klass', 'value', 'expected'),
[
(ir.TableExpr, object, IbisTypeError),
(ir.IntegerValue, 4, IbisTypeError),
],
)
def test_invalid_instance_of(klass, value, expected):
with pytest.raises(expected):
assert rlz.instance_of(klass, value)
@pytest.mark.parametrize(
('dtype', 'value', 'expected'),
[
pytest.param(dt.int8, 26, ibis.literal(26)),
pytest.param(dt.int16, 26, ibis.literal(26)),
pytest.param(dt.int32, 26, ibis.literal(26)),
pytest.param(dt.int64, 26, ibis.literal(26)),
pytest.param(dt.uint8, 26, ibis.literal(26)),
pytest.param(dt.uint16, 26, ibis.literal(26)),
pytest.param(dt.uint32, 26, ibis.literal(26)),
pytest.param(dt.uint64, 26, ibis.literal(26)),
pytest.param(dt.float32, 26, ibis.literal(26)),
pytest.param(dt.float64, 26.4, ibis.literal(26.4)),
pytest.param(dt.double, 26.3, ibis.literal(26.3)),
pytest.param(dt.string, 'bar', ibis.literal('bar')),
pytest.param(dt.Array(dt.float), [3.4, 5.6], ibis.literal([3.4, 5.6])),
pytest.param(
dt.Map(dt.string, dt.Array(dt.boolean)),
{'a': [True, False], 'b': [True]},
ibis.literal({'a': [True, False], 'b': [True]}),
id='map_literal',
),
],
)
def test_valid_value(dtype, value, expected):
result = rlz.value(dtype, value)
assert result.equals(expected)
@pytest.mark.parametrize(
('dtype', 'value', 'expected'),
[
(dt.uint8, -3, IbisTypeError),
(dt.int32, {}, IbisTypeError),
(dt.string, 1, IbisTypeError),
(dt.Array(dt.float), ['s'], IbisTypeError),
(
dt.Map(dt.string, dt.Array(dt.boolean)),
{'a': [True, False], 'b': ['B']},
IbisTypeError,
),
],
)
def test_invalid_value(dtype, value, expected):
with pytest.raises(expected):
rlz.value(dtype, value)
@pytest.mark.parametrize(
('values', 'value', 'expected'),
[
(['a', 'b'], 'a', 'a'),
(('a', 'b'), 'b', 'b'),
({'a', 'b', 'c'}, 'c', 'c'),
([1, 2, 'f'], 'f', 'f'),
({'a': 1, 'b': 2}, 'a', 1),
({'a': 1, 'b': 2}, 'b', 2),
],
)
def test_valid_isin(values, value, expected):
assert rlz.isin(values, value) == expected
@pytest.mark.parametrize(
('values', 'value', 'expected'),
[
(['a', 'b'], 'c', ValueError),
({'a', 'b', 'c'}, 'd', ValueError),
({'a': 1, 'b': 2}, 'c', ValueError),
],
)
def test_invalid_isin(values, value, expected):
with pytest.raises(expected):
rlz.isin(values, value)
class Foo(enum.Enum):
a = 1
b = 2
class Bar:
a = 'A'
b = 'B'
class Baz:
def __init__(self, a):
self.a = a
@pytest.mark.parametrize(
('obj', 'value', 'expected'),
[
(Foo, Foo.a, Foo.a),
(Foo, 'b', Foo.b),
(Bar, 'a', 'A'),
(Bar, 'b', 'B'),
(Baz(2), 'a', 2),
(Foo, ibis.literal(Foo.a), Foo.a),
],
)
def test_valid_member_of(obj, value, expected):
assert rlz.member_of(obj, value) == expected
@pytest.mark.parametrize(
('obj', 'value', 'expected'),
[
(Foo, 'c', IbisTypeError),
(Bar, 'c', IbisTypeError),
(Baz(3), 'b', IbisTypeError),
],
)
def test_invalid_member_of(obj, value, expected):
with pytest.raises(expected):
rlz.member_of(obj, value)
@pytest.mark.parametrize(
('validator', 'values', 'expected'),
[
(rlz.value_list_of(identity), (3, 2), ibis.sequence([3, 2])),
(rlz.value_list_of(rlz.integer), (3, 2), ibis.sequence([3, 2])),
(
rlz.value_list_of(rlz.integer),
(3, None),
ibis.sequence([3, ibis.NA]),
),
(rlz.value_list_of(rlz.string), ('a',), ibis.sequence(['a'])),
(rlz.value_list_of(rlz.string), ['a', 'b'], ibis.sequence(['a', 'b'])),
pytest.param(
rlz.value_list_of(rlz.value_list_of(rlz.string)),
[[], ['a']],
ibis.sequence([[], ['a']]),
marks=pytest.mark.xfail(
raises=ValueError, reason='Not yet implemented'
),
),
(
rlz.value_list_of(rlz.boolean, min_length=2),
[True, False],
ibis.sequence([True, False]),
),
],
)
def test_valid_value_list_of(validator, values, expected):
result = validator(values)
assert isinstance(result, ir.ListExpr)
assert len(result) == len(values)
for a, b in zip(result, expected):
assert a.equals(b)
def test_valid_list_of_extra():
validator = rlz.list_of(identity)
assert validator((3, 2)) == [3, 2]
validator = rlz.list_of(rlz.list_of(rlz.string))
result = validator([[], ['a']])
assert result[1][0].equals(ibis.literal('a'))
@pytest.mark.parametrize(
('validator', 'values'),
[
(rlz.value_list_of(rlz.double, min_length=2), [1]),
(rlz.value_list_of(rlz.integer), 1.1),
(rlz.value_list_of(rlz.string), 'asd'),
(rlz.value_list_of(identity), 3),
],
)
def test_invalid_list_of(validator, values):
with pytest.raises(IbisTypeError):
validator(values)
@pytest.mark.parametrize(
('units', 'value', 'expected'),
[
({'H', 'D'}, ibis.interval(days=3), ibis.interval(days=3)),
(['Y'], ibis.interval(years=3), ibis.interval(years=3)),
],
)
def test_valid_interval(units, value, expected):
result = rlz.interval(value, units=units)
assert result.equals(expected)
@pytest.mark.parametrize(
('units', 'value', 'expected'),
[
({'Y'}, ibis.interval(hours=1), IbisTypeError),
({'Y', 'M', 'D'}, ibis.interval(hours=1), IbisTypeError),
({'Q', 'W', 'D'}, ibis.interval(seconds=1), IbisTypeError),
],
)
def test_invalid_interval(units, value, expected):
with pytest.raises(expected):
rlz.interval(value, units=units)
@pytest.mark.parametrize(
('validator', 'value', 'expected'),
[
(rlz.column(rlz.any), table.int_col, table.int_col),
(rlz.column(rlz.string), table.string_col, table.string_col),
(rlz.scalar(rlz.integer), ibis.literal(3), ibis.literal(3)),
(rlz.scalar(rlz.any), 'caracal', ibis.literal('caracal')),
],
)
def test_valid_column_or_scalar(validator, value, expected):
result = validator(value)
assert result.equals(expected)
@pytest.mark.parametrize(
('validator', 'value', 'expected'),
[
(rlz.column(rlz.integer), table.double_col, IbisTypeError),
(rlz.column(rlz.any), ibis.literal(3), IbisTypeError),
(rlz.column(rlz.integer), ibis.literal(3), IbisTypeError),
],
)
def test_invalid_column_or_scalar(validator, value, expected):
with pytest.raises(expected):
validator(value)
@pytest.mark.parametrize(
'table',
[
ibis.table([('group', dt.int64), ('value', dt.double)]),
ibis.table(
[('group', dt.int64), ('value', dt.double), ('value2', dt.double)]
),
],
)
def test_table_with_schema(table):
validator = rlz.table(schema=[('group', dt.int64), ('value', dt.double)])
assert validator(table) == table
@pytest.mark.parametrize(
'table', [ibis.table([('group', dt.int64), ('value', dt.timestamp)])]
)
def test_table_with_schema_invalid(table):
validator = rlz.table(
schema=[('group', dt.double), ('value', dt.timestamp)]
)
with pytest.raises(IbisTypeError):
validator(table)
def test_shape_like_with_no_arguments():
with pytest.raises(ValueError) as e:
rlz.shape_like([])
assert str(e.value) == 'Must pass at least one expression'
@pytest.mark.parametrize(
('rule', 'input'),
[
(rlz.array_of(rlz.integer), [1, 2, 3]),
(rlz.array_of(rlz.integer), []),
(rlz.array_of(rlz.double), [1, 2]),
(rlz.array_of(rlz.string), ['a', 'b']),
(rlz.array_of(rlz.array_of(rlz.string)), [['a'], [], [], ['a', 'b']]),
],
)
def test_array_of(rule, input):
assert isinstance(rule(input).type(), dt.Array)
@pytest.mark.parametrize(
('rule', 'input'),
[
(rlz.array_of(rlz.array_of(rlz.string)), [1, 2]),
(rlz.array_of(rlz.string), [1, 2.0]),
(rlz.array_of(rlz.array_of(rlz.integer)), [2, 2.0]),
],
)
def test_array_of_invalid_input(rule, input):
with pytest.raises(IbisTypeError):
rule(input)
@pytest.mark.parametrize(
('validator', 'input'),
[
(rlz.array_of(rlz.integer), [1, 2, 3]),
(rlz.value_list_of(rlz.integer), (3, 2)),
(rlz.instance_of(int), 32),
],
)
def test_optional(validator, input):
expected = validator(input)
if isinstance(expected, ibis.Expr):
assert rlz.optional(validator)(input).equals(expected)
else:
assert rlz.optional(validator)(input) == expected
assert rlz.optional(validator)(None) is None
|
|
"""
Client classes for the GA4GH reference implementation.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import requests
import posixpath
import logging
import ga4gh.protocol as protocol
import ga4gh.exceptions as exceptions
class AbstractClient(object):
"""
The abstract superclass of GA4GH Client objects.
"""
def __init__(self, logLevel=0):
self._pageSize = None
self._logLevel = logLevel
self._protocolBytesReceived = 0
logging.basicConfig()
self._logger = logging.getLogger(__name__)
self._logger.setLevel(logLevel)
def _deserializeResponse(self, jsonResponseString, protocolResponseClass):
self._protocolBytesReceived += len(jsonResponseString)
self._logger.debug("response:{}".format(jsonResponseString))
if jsonResponseString == '':
raise exceptions.EmptyResponseException()
responseObject = protocolResponseClass.fromJsonString(
jsonResponseString)
return responseObject
def _runSearchPageRequest(
self, protocolRequest, objectName, protocolResponseClass):
"""
Runs a complete transaction with the server to obtain a single
page of search results.
"""
raise NotImplemented()
def _runSearchRequest(
self, protocolRequest, objectName, protocolResponseClass):
"""
Runs the specified request at the specified objectName and instantiates
an object of the specified class. We yield each object in listAttr.
If pages of results are present, repeat this process until the
pageToken is null.
"""
notDone = True
while notDone:
responseObject = self._runSearchPageRequest(
protocolRequest, objectName, protocolResponseClass)
valueList = getattr(
responseObject, protocolResponseClass.getValueListName())
for extract in valueList:
yield extract
notDone = responseObject.nextPageToken is not None
protocolRequest.pageToken = responseObject.nextPageToken
def _runListReferenceBasesPageRequest(self, id_, protocolRequest):
"""
Runs a complete transaction with the server to get a single
page of results for the specified ListReferenceBasesRequest.
"""
raise NotImplemented()
def listReferenceBases(self, id_, start=0, end=None):
"""
Returns an iterator over the bases from the server in the form
of consecutive strings. This command does not conform to the
patterns of the other search and get requests, and is implemented
differently.
"""
request = protocol.ListReferenceBasesRequest()
request.start = start
request.end = end
notDone = True
# TODO We should probably use a StringIO here to make string buffering
# a bit more efficient.
basesList = []
while notDone:
response = self._runListReferenceBasesPageRequest(id_, request)
basesList.append(response.sequence)
notDone = response.nextPageToken is not None
request.pageToken = response.nextPageToken
return "".join(basesList)
def _runGetRequest(self, objectName, protocolResponseClass, id_):
"""
Requests an object from the server and returns the object of
type protocolResponseClass that has id id_.
Used for requests where a single object is the expected response.
"""
raise NotImplemented()
def getPageSize(self):
"""
Returns the suggested maximum size of pages of results returned by
the server.
"""
return self._pageSize
def setPageSize(self, pageSize):
"""
Sets the requested maximum size of pages of results returned by the
server to the specified value.
"""
self._pageSize = pageSize
def getProtocolBytesReceived(self):
"""
Returns the total number of protocol bytes received from the server
by this client.
:return: The number of bytes consumed by protocol traffic read from
the server during the lifetime of this client.
:rtype: int
"""
return self._protocolBytesReceived
def getDataset(self, datasetId):
"""
Returns the Dataset with the specified ID from the server.
:param str datasetId: The ID of the Dataset of interest.
:return: The Dataset of interest.
:rtype: :class:`ga4gh.protocol.Dataset`
"""
return self._runGetRequest("datasets", protocol.Dataset, datasetId)
def getReferenceSet(self, referenceSetId):
"""
Returns the ReferenceSet with the specified ID from the server.
:param str referenceSetId: The ID of the ReferenceSet of interest.
:return: The ReferenceSet of interest.
:rtype: :class:`ga4gh.protocol.ReferenceSet`
"""
return self._runGetRequest(
"referencesets", protocol.ReferenceSet, referenceSetId)
def getReference(self, referenceId):
"""
Returns the Reference with the specified ID from the server.
:param str referenceId: The ID of the Reference of interest.
:return: The Reference of interest.
:rtype: :class:`ga4gh.protocol.Reference`
"""
return self._runGetRequest(
"references", protocol.Reference, referenceId)
def getReadGroupSet(self, readGroupSetId):
"""
Returns the ReadGroupSet with the specified ID from the server.
:param str readGroupSetId: The ID of the ReadGroupSet of interest.
:return: The ReadGroupSet of interest.
:rtype: :class:`ga4gh.protocol.ReadGroupSet`
"""
return self._runGetRequest(
"readgroupsets", protocol.ReadGroupSet, readGroupSetId)
def getReadGroup(self, readGroupId):
"""
Returns the ReadGroup with the specified ID from the server.
:param str readGroupId: The ID of the ReadGroup of interest.
:return: The ReadGroup of interest.
:rtype: :class:`ga4gh.protocol.ReadGroup`
"""
return self._runGetRequest(
"readgroups", protocol.ReadGroup, readGroupId)
def getCallSet(self, callSetId):
"""
Returns the CallSet with the specified ID from the server.
:param str callSetId: The ID of the CallSet of interest.
:return: The CallSet of interest.
:rtype: :class:`ga4gh.protocol.CallSet`
"""
return self._runGetRequest("callsets", protocol.CallSet, callSetId)
def getVariant(self, variantId):
"""
Returns the Variant with the specified ID from the server.
:param str variantId: The ID of the Variant of interest.
:return: The Variant of interest.
:rtype: :class:`ga4gh.protocol.Variant`
"""
return self._runGetRequest("variants", protocol.Variant, variantId)
def getVariantSet(self, variantSetId):
"""
Returns the VariantSet with the specified ID from the server.
:param str variantSetId: The ID of the VariantSet of interest.
:return: The VariantSet of interest.
:rtype: :class:`ga4gh.protocol.VariantSet`
"""
return self._runGetRequest(
"variantsets", protocol.VariantSet, variantSetId)
def searchVariants(
self, variantSetId, start=None, end=None, referenceName=None,
callSetIds=None):
"""
Returns an iterator over the Variants fulfilling the specified
conditions from the specified VariantSet.
:param str variantSetId: The ID of the
:class:`ga4gh.protocol.VariantSet` of interest.
:param int start: Required. The beginning of the window (0-based,
inclusive) for which overlapping variants should be returned.
Genomic positions are non-negative integers less than reference
length. Requests spanning the join of circular genomes are
represented as two requests one on each side of the join
(position 0).
:param int end: Required. The end of the window (0-based, exclusive)
for which overlapping variants should be returned.
:param str referenceName: The name of the
:class:`ga4gh.protocol.Reference` we wish to return variants from.
:param list callSetIds: Only return variant calls which belong to call
sets with these IDs. If an empty array, returns variants without
any call objects. If null, returns all variant calls.
:return: An iterator over the :class:`ga4gh.protocol.Variant` objects
defined by the query parameters.
:rtype: iter
"""
request = protocol.SearchVariantsRequest()
request.referenceName = referenceName
request.start = start
request.end = end
request.variantSetId = variantSetId
request.callSetIds = callSetIds
request.pageSize = self._pageSize
return self._runSearchRequest(
request, "variants", protocol.SearchVariantsResponse)
def searchDatasets(self):
"""
Returns an iterator over the Datasets on the server.
:return: An iterator over the :class:`ga4gh.protocol.Dataset`
objects on the server.
"""
request = protocol.SearchDatasetsRequest()
request.pageSize = self._pageSize
return self._runSearchRequest(
request, "datasets", protocol.SearchDatasetsResponse)
def searchVariantSets(self, datasetId):
"""
Returns an iterator over the VariantSets fulfilling the specified
conditions from the specified Dataset.
:param str datasetId: The ID of the :class:`ga4gh.protocol.Dataset`
of interest.
:return: An iterator over the :class:`ga4gh.protocol.VariantSet`
objects defined by the query parameters.
"""
request = protocol.SearchVariantSetsRequest()
request.datasetId = datasetId
request.pageSize = self._pageSize
return self._runSearchRequest(
request, "variantsets", protocol.SearchVariantSetsResponse)
def searchReferenceSets(
self, accession=None, md5checksum=None, assemblyId=None):
"""
Returns an iterator over the ReferenceSets fulfilling the specified
conditions.
:param str accession: If not null, return the reference sets for which
the `accession` matches this string (case-sensitive, exact match).
:param str md5checksum: If not null, return the reference sets for
which the `md5checksum` matches this string (case-sensitive, exact
match). See :class:`ga4gh.protocol.ReferenceSet::md5checksum` for
details.
:param str assemblyId: If not null, return the reference sets for which
the `assemblyId` matches this string (case-sensitive, exact match).
:return: An iterator over the :class:`ga4gh.protocol.ReferenceSet`
objects defined by the query parameters.
"""
request = protocol.SearchReferenceSetsRequest()
request.accession = accession
request.md5checksum = md5checksum
request.assemblyId = assemblyId
request.pageSize = self._pageSize
return self._runSearchRequest(
request, "referencesets", protocol.SearchReferenceSetsResponse)
def searchReferences(
self, referenceSetId, accession=None, md5checksum=None):
"""
Returns an iterator over the References fulfilling the specified
conditions from the specified Dataset.
:param str referenceSetId: The ReferenceSet to search.
:param str accession: If not None, return the references for which the
`accession` matches this string (case-sensitive, exact match).
:param str md5checksum: If not None, return the references for which
the `md5checksum` matches this string (case-sensitive, exact
match).
:return: An iterator over the :class:`ga4gh.protocol.Reference`
objects defined by the query parameters.
"""
request = protocol.SearchReferencesRequest()
request.referenceSetId = referenceSetId
request.accession = accession
request.md5checksum = md5checksum
request.pageSize = self._pageSize
return self._runSearchRequest(
request, "references", protocol.SearchReferencesResponse)
def searchCallSets(self, variantSetId, name=None):
"""
Returns an iterator over the CallSets fulfilling the specified
conditions from the specified VariantSet.
:param str name: Only CallSets matching the specified name will
be returned.
:return: An iterator over the :class:`ga4gh.protocol.CallSet`
objects defined by the query parameters.
"""
request = protocol.SearchCallSetsRequest()
request.variantSetId = variantSetId
request.name = name
request.pageSize = self._pageSize
return self._runSearchRequest(
request, "callsets", protocol.SearchCallSetsResponse)
def searchReadGroupSets(self, datasetId, name=None):
"""
Returns an iterator over the ReadGroupSets fulfilling the specified
conditions from the specified Dataset.
:param str name: Only ReadGroupSets matching the specified name
will be returned.
:return: An iterator over the :class:`ga4gh.protocol.ReadGroupSet`
objects defined by the query parameters.
:rtype: iter
"""
request = protocol.SearchReadGroupSetsRequest()
request.datasetId = datasetId
request.name = name
request.pageSize = self._pageSize
return self._runSearchRequest(
request, "readgroupsets", protocol.SearchReadGroupSetsResponse)
def searchReads(
self, readGroupIds, referenceId=None, start=None, end=None):
"""
Returns an iterator over the Reads fulfilling the specified
conditions from the specified ReadGroupIds.
:param str readGroupIds: The IDs of the
:class:`ga4gh.protocol.ReadGroup` of interest.
:param str referenceId: The name of the
:class:`ga4gh.protocol.Reference` we wish to return reads
mapped to.
:param int start: The start position (0-based) of this query. If a
reference is specified, this defaults to 0. Genomic positions are
non-negative integers less than reference length. Requests spanning
the join of circular genomes are represented as two requests one on
each side of the join (position 0).
:param int end: The end position (0-based, exclusive) of this query.
If a reference is specified, this defaults to the reference's
length.
:return: An iterator over the
:class:`ga4gh.protocol.ReadAlignment` objects defined by
the query parameters.
:rtype: iter
"""
request = protocol.SearchReadsRequest()
request.readGroupIds = readGroupIds
request.referenceId = referenceId
request.start = start
request.end = end
request.pageSize = self._pageSize
return self._runSearchRequest(
request, "reads", protocol.SearchReadsResponse)
class HttpClient(AbstractClient):
"""
The GA4GH HTTP client. This class provides methods corresponding to the
GA4GH search and object GET methods.
.. todo:: Add a better description of the role of this class and include
links to the high-level API documention.
:param str urlPrefix: The base URL of the GA4GH server we wish to
communicate with. This should include the 'http' or 'https' prefix.
:param int logLevel: The amount of debugging information to log using
the :mod:`logging` module. This is :data:`logging.WARNING` by default.
:param str authenticationKey: The authentication key provided by the
server after logging in.
"""
def __init__(
self, urlPrefix, logLevel=logging.WARNING, authenticationKey=None):
super(HttpClient, self).__init__(logLevel)
self._urlPrefix = urlPrefix
self._authenticationKey = authenticationKey
self._session = requests.Session()
self._setupHttpSession()
requestsLog = logging.getLogger("requests.packages.urllib3")
requestsLog.setLevel(logLevel)
requestsLog.propagate = True
def _setupHttpSession(self):
"""
Sets up the common HTTP session parameters used by requests.
"""
headers = {"Content-type": "application/json"}
self._session.headers.update(headers)
# TODO is this unsafe????
self._session.verify = False
def _checkResponseStatus(self, response):
"""
Checks the speficied HTTP response from the requests package and
raises an exception if a non-200 HTTP code was returned by the
server.
"""
if response.status_code != requests.codes.ok:
self._logger.error("%s %s", response.status_code, response.text)
raise exceptions.RequestNonSuccessException(
"Url {0} had status_code {1}".format(
response.url, response.status_code))
def _getHttpParameters(self):
"""
Returns the basic HTTP parameters we need all requests.
"""
return {'key': self._authenticationKey}
def _runSearchPageRequest(
self, protocolRequest, objectName, protocolResponseClass):
url = posixpath.join(self._urlPrefix, objectName + '/search')
data = protocolRequest.toJsonString()
self._logger.debug("request:{}".format(data))
response = self._session.post(
url, params=self._getHttpParameters(), data=data)
self._checkResponseStatus(response)
return self._deserializeResponse(response.text, protocolResponseClass)
def _runGetRequest(self, objectName, protocolResponseClass, id_):
urlSuffix = "{objectName}/{id}".format(objectName=objectName, id=id_)
url = posixpath.join(self._urlPrefix, urlSuffix)
response = self._session.get(url, params=self._getHttpParameters())
self._checkResponseStatus(response)
return self._deserializeResponse(response.text, protocolResponseClass)
def _runListReferenceBasesPageRequest(self, id_, request):
urlSuffix = "references/{id}/bases".format(id=id_)
url = posixpath.join(self._urlPrefix, urlSuffix)
params = self._getHttpParameters()
params.update(request.toJsonDict())
response = self._session.get(url, params=params)
self._checkResponseStatus(response)
return self._deserializeResponse(
response.text, protocol.ListReferenceBasesResponse)
class LocalClient(AbstractClient):
def __init__(self, backend):
super(LocalClient, self).__init__()
self._backend = backend
self._getMethodMap = {
"datasets": self._backend.runGetDataset,
"referencesets": self._backend.runGetReferenceSet,
"references": self._backend.runGetReference,
"variantsets": self._backend.runGetVariantSet,
"variants": self._backend.runGetVariant,
"readgroupsets": self._backend.runGetReadGroupSet,
"readgroups": self._backend.runGetReadGroup,
}
self._searchMethodMap = {
"datasets": self._backend.runSearchDatasets,
"referencesets": self._backend.runSearchReferenceSets,
"references": self._backend.runSearchReferences,
"variantsets": self._backend.runSearchVariantSets,
"variants": self._backend.runSearchVariants,
"readgroupsets": self._backend.runSearchReadGroupSets,
"reads": self._backend.runSearchReads,
}
def _runGetRequest(self, objectName, protocolResponseClass, id_):
getMethod = self._getMethodMap[objectName]
responseJson = getMethod(id_)
return self._deserializeResponse(responseJson, protocolResponseClass)
def _runSearchPageRequest(
self, protocolRequest, objectName, protocolResponseClass):
searchMethod = self._searchMethodMap[objectName]
responseJson = searchMethod(protocolRequest.toJsonString())
return self._deserializeResponse(responseJson, protocolResponseClass)
def _runListReferenceBasesPageRequest(self, id_, request):
requestArgs = request.toJsonDict()
# We need to remove end from this dict if it's not specified because
# of the way we're interacting with Flask and HTTP GET params.
# TODO: This is a really nasty way of doing things; we really
# should just have a request object and pass that around instead of an
# arguments dictionary.
if request.end is None:
del requestArgs["end"]
if request.pageToken is None:
del requestArgs["pageToken"]
responseJson = self._backend.runListReferenceBases(id_, requestArgs)
return self._deserializeResponse(
responseJson, protocol.ListReferenceBasesResponse)
|
|
"""Guess the MIME type of a file.
This module defines two useful functions:
guess_type(url, strict=True) -- guess the MIME type and encoding of a URL.
guess_extension(type, strict=True) -- guess the extension for a given MIME type.
It also contains the following, for tuning the behavior:
Data:
knownfiles -- list of files to parse
inited -- flag set when init() has been called
suffix_map -- dictionary mapping suffixes to suffixes
encodings_map -- dictionary mapping suffixes to encodings
types_map -- dictionary mapping suffixes to types
Functions:
init([files]) -- parse a list of files, default knownfiles
read_mime_types(file) -- parse one file, return a dictionary or None
"""
import os
import posixpath
import urllib.parse
__all__ = [
"guess_type","guess_extension","guess_all_extensions",
"add_type","read_mime_types","init"
]
knownfiles = [
"/etc/mime.types",
"/etc/httpd/mime.types", # Mac OS X
"/etc/httpd/conf/mime.types", # Apache
"/etc/apache/mime.types", # Apache 1
"/etc/apache2/mime.types", # Apache 2
"/usr/local/etc/httpd/conf/mime.types",
"/usr/local/lib/netscape/mime.types",
"/usr/local/etc/httpd/conf/mime.types", # Apache 1.2
"/usr/local/etc/mime.types", # Apache 1.3
]
inited = False
_db = None
class MimeTypes:
"""MIME-types datastore.
This datastore can handle information from mime.types-style files
and supports basic determination of MIME type from a filename or
URL, and can guess a reasonable extension given a MIME type.
"""
def __init__(self, filenames=(), strict=True):
if not inited:
init()
self.encodings_map = encodings_map.copy()
self.suffix_map = suffix_map.copy()
self.types_map = ({}, {}) # dict for (non-strict, strict)
self.types_map_inv = ({}, {})
for (ext, type) in types_map.items():
self.add_type(type, ext, True)
for (ext, type) in common_types.items():
self.add_type(type, ext, False)
for name in filenames:
self.read(name, strict)
def add_type(self, type, ext, strict=True):
"""Add a mapping between a type and an extension.
When the extension is already known, the new
type will replace the old one. When the type
is already known the extension will be added
to the list of known extensions.
If strict is true, information will be added to
list of standard types, else to the list of non-standard
types.
"""
self.types_map[strict][ext] = type
exts = self.types_map_inv[strict].setdefault(type, [])
if ext not in exts:
exts.append(ext)
def guess_type(self, url, strict=True):
"""Guess the type of a file based on its URL.
Return value is a tuple (type, encoding) where type is None if
the type can't be guessed (no or unknown suffix) or a string
of the form type/subtype, usable for a MIME Content-type
header; and encoding is None for no encoding or the name of
the program used to encode (e.g. compress or gzip). The
mappings are table driven. Encoding suffixes are case
sensitive; type suffixes are first tried case sensitive, then
case insensitive.
The suffixes .tgz, .taz and .tz (case sensitive!) are all
mapped to '.tar.gz'. (This is table-driven too, using the
dictionary suffix_map.)
Optional `strict' argument when False adds a bunch of commonly found,
but non-standard types.
"""
scheme, url = urllib.parse.splittype(url)
if scheme == 'data':
# syntax of data URLs:
# dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
# mediatype := [ type "/" subtype ] *( ";" parameter )
# data := *urlchar
# parameter := attribute "=" value
# type/subtype defaults to "text/plain"
comma = url.find(',')
if comma < 0:
# bad data URL
return None, None
semi = url.find(';', 0, comma)
if semi >= 0:
type = url[:semi]
else:
type = url[:comma]
if '=' in type or '/' not in type:
type = 'text/plain'
return type, None # never compressed, so encoding is None
base, ext = posixpath.splitext(url)
while ext in self.suffix_map:
base, ext = posixpath.splitext(base + self.suffix_map[ext])
if ext in self.encodings_map:
encoding = self.encodings_map[ext]
base, ext = posixpath.splitext(base)
else:
encoding = None
types_map = self.types_map[True]
if ext in types_map:
return types_map[ext], encoding
elif ext.lower() in types_map:
return types_map[ext.lower()], encoding
elif strict:
return None, encoding
types_map = self.types_map[False]
if ext in types_map:
return types_map[ext], encoding
elif ext.lower() in types_map:
return types_map[ext.lower()], encoding
else:
return None, encoding
def guess_all_extensions(self, type, strict=True):
"""Guess the extensions for a file based on its MIME type.
Return value is a list of strings giving the possible filename
extensions, including the leading dot ('.'). The extension is not
guaranteed to have been associated with any particular data stream,
but would be mapped to the MIME type `type' by guess_type().
Optional `strict' argument when false adds a bunch of commonly found,
but non-standard types.
"""
type = type.lower()
extensions = self.types_map_inv[True].get(type, [])
if not strict:
for ext in self.types_map_inv[False].get(type, []):
if ext not in extensions:
extensions.append(ext)
return extensions
def guess_extension(self, type, strict=True):
"""Guess the extension for a file based on its MIME type.
Return value is a string giving a filename extension,
including the leading dot ('.'). The extension is not
guaranteed to have been associated with any particular data
stream, but would be mapped to the MIME type `type' by
guess_type(). If no extension can be guessed for `type', None
is returned.
Optional `strict' argument when false adds a bunch of commonly found,
but non-standard types.
"""
extensions = self.guess_all_extensions(type, strict)
if not extensions:
return None
return extensions[0]
def read(self, filename, strict=True):
"""
Read a single mime.types-format file, specified by pathname.
If strict is true, information will be added to
list of standard types, else to the list of non-standard
types.
"""
fp = open(filename)
self.readfp(fp, strict)
fp.close()
def readfp(self, fp, strict=True):
"""
Read a single mime.types-format file.
If strict is true, information will be added to
list of standard types, else to the list of non-standard
types.
"""
while 1:
line = fp.readline()
if not line:
break
words = line.split()
for i in range(len(words)):
if words[i][0] == '#':
del words[i:]
break
if not words:
continue
type, suffixes = words[0], words[1:]
for suff in suffixes:
self.add_type(type, '.' + suff, strict)
def guess_type(url, strict=True):
"""Guess the type of a file based on its URL.
Return value is a tuple (type, encoding) where type is None if the
type can't be guessed (no or unknown suffix) or a string of the
form type/subtype, usable for a MIME Content-type header; and
encoding is None for no encoding or the name of the program used
to encode (e.g. compress or gzip). The mappings are table
driven. Encoding suffixes are case sensitive; type suffixes are
first tried case sensitive, then case insensitive.
The suffixes .tgz, .taz and .tz (case sensitive!) are all mapped
to ".tar.gz". (This is table-driven too, using the dictionary
suffix_map).
Optional `strict' argument when false adds a bunch of commonly found, but
non-standard types.
"""
if _db is None:
init()
return _db.guess_type(url, strict)
def guess_all_extensions(type, strict=True):
"""Guess the extensions for a file based on its MIME type.
Return value is a list of strings giving the possible filename
extensions, including the leading dot ('.'). The extension is not
guaranteed to have been associated with any particular data
stream, but would be mapped to the MIME type `type' by
guess_type(). If no extension can be guessed for `type', None
is returned.
Optional `strict' argument when false adds a bunch of commonly found,
but non-standard types.
"""
if _db is None:
init()
return _db.guess_all_extensions(type, strict)
def guess_extension(type, strict=True):
"""Guess the extension for a file based on its MIME type.
Return value is a string giving a filename extension, including the
leading dot ('.'). The extension is not guaranteed to have been
associated with any particular data stream, but would be mapped to the
MIME type `type' by guess_type(). If no extension can be guessed for
`type', None is returned.
Optional `strict' argument when false adds a bunch of commonly found,
but non-standard types.
"""
if _db is None:
init()
return _db.guess_extension(type, strict)
def add_type(type, ext, strict=True):
"""Add a mapping between a type and an extension.
When the extension is already known, the new
type will replace the old one. When the type
is already known the extension will be added
to the list of known extensions.
If strict is true, information will be added to
list of standard types, else to the list of non-standard
types.
"""
if _db is None:
init()
return _db.add_type(type, ext, strict)
def init(files=None):
global suffix_map, types_map, encodings_map, common_types
global inited, _db
inited = True # so that MimeTypes.__init__() doesn't call us again
db = MimeTypes()
if files is None:
files = knownfiles
for file in files:
if os.path.isfile(file):
db.readfp(open(file))
encodings_map = db.encodings_map
suffix_map = db.suffix_map
types_map = db.types_map[True]
common_types = db.types_map[False]
# Make the DB a global variable now that it is fully initialized
_db = db
def read_mime_types(file):
try:
f = open(file)
except IOError:
return None
db = MimeTypes()
db.readfp(f, True)
return db.types_map[True]
def _default_mime_types():
global suffix_map
global encodings_map
global types_map
global common_types
suffix_map = {
'.tgz': '.tar.gz',
'.taz': '.tar.gz',
'.tz': '.tar.gz',
'.tbz2': '.tar.bz2',
}
encodings_map = {
'.gz': 'gzip',
'.Z': 'compress',
'.bz2': 'bzip2',
}
# Before adding new types, make sure they are either registered with IANA,
# at http://www.isi.edu/in-notes/iana/assignments/media-types
# or extensions, i.e. using the x- prefix
# If you add to these, please keep them sorted!
types_map = {
'.a' : 'application/octet-stream',
'.ai' : 'application/postscript',
'.aif' : 'audio/x-aiff',
'.aifc' : 'audio/x-aiff',
'.aiff' : 'audio/x-aiff',
'.au' : 'audio/basic',
'.avi' : 'video/x-msvideo',
'.bat' : 'text/plain',
'.bcpio' : 'application/x-bcpio',
'.bin' : 'application/octet-stream',
'.bmp' : 'image/x-ms-bmp',
'.c' : 'text/plain',
# Duplicates :(
'.cdf' : 'application/x-cdf',
'.cdf' : 'application/x-netcdf',
'.cpio' : 'application/x-cpio',
'.csh' : 'application/x-csh',
'.css' : 'text/css',
'.dll' : 'application/octet-stream',
'.doc' : 'application/msword',
'.dot' : 'application/msword',
'.dvi' : 'application/x-dvi',
'.eml' : 'message/rfc822',
'.eps' : 'application/postscript',
'.etx' : 'text/x-setext',
'.exe' : 'application/octet-stream',
'.gif' : 'image/gif',
'.gtar' : 'application/x-gtar',
'.h' : 'text/plain',
'.hdf' : 'application/x-hdf',
'.htm' : 'text/html',
'.html' : 'text/html',
'.ief' : 'image/ief',
'.jpe' : 'image/jpeg',
'.jpeg' : 'image/jpeg',
'.jpg' : 'image/jpeg',
'.js' : 'application/x-javascript',
'.ksh' : 'text/plain',
'.latex' : 'application/x-latex',
'.m1v' : 'video/mpeg',
'.man' : 'application/x-troff-man',
'.me' : 'application/x-troff-me',
'.mht' : 'message/rfc822',
'.mhtml' : 'message/rfc822',
'.mif' : 'application/x-mif',
'.mov' : 'video/quicktime',
'.movie' : 'video/x-sgi-movie',
'.mp2' : 'audio/mpeg',
'.mp3' : 'audio/mpeg',
'.mp4' : 'video/mp4',
'.mpa' : 'video/mpeg',
'.mpe' : 'video/mpeg',
'.mpeg' : 'video/mpeg',
'.mpg' : 'video/mpeg',
'.ms' : 'application/x-troff-ms',
'.nc' : 'application/x-netcdf',
'.nws' : 'message/rfc822',
'.o' : 'application/octet-stream',
'.obj' : 'application/octet-stream',
'.oda' : 'application/oda',
'.p12' : 'application/x-pkcs12',
'.p7c' : 'application/pkcs7-mime',
'.pbm' : 'image/x-portable-bitmap',
'.pdf' : 'application/pdf',
'.pfx' : 'application/x-pkcs12',
'.pgm' : 'image/x-portable-graymap',
'.pl' : 'text/plain',
'.png' : 'image/png',
'.pnm' : 'image/x-portable-anymap',
'.pot' : 'application/vnd.ms-powerpoint',
'.ppa' : 'application/vnd.ms-powerpoint',
'.ppm' : 'image/x-portable-pixmap',
'.pps' : 'application/vnd.ms-powerpoint',
'.ppt' : 'application/vnd.ms-powerpoint',
'.ps' : 'application/postscript',
'.pwz' : 'application/vnd.ms-powerpoint',
'.py' : 'text/x-python',
'.pyc' : 'application/x-python-code',
'.pyo' : 'application/x-python-code',
'.qt' : 'video/quicktime',
'.ra' : 'audio/x-pn-realaudio',
'.ram' : 'application/x-pn-realaudio',
'.ras' : 'image/x-cmu-raster',
'.rdf' : 'application/xml',
'.rgb' : 'image/x-rgb',
'.roff' : 'application/x-troff',
'.rtx' : 'text/richtext',
'.sgm' : 'text/x-sgml',
'.sgml' : 'text/x-sgml',
'.sh' : 'application/x-sh',
'.shar' : 'application/x-shar',
'.snd' : 'audio/basic',
'.so' : 'application/octet-stream',
'.src' : 'application/x-wais-source',
'.sv4cpio': 'application/x-sv4cpio',
'.sv4crc' : 'application/x-sv4crc',
'.swf' : 'application/x-shockwave-flash',
'.t' : 'application/x-troff',
'.tar' : 'application/x-tar',
'.tcl' : 'application/x-tcl',
'.tex' : 'application/x-tex',
'.texi' : 'application/x-texinfo',
'.texinfo': 'application/x-texinfo',
'.tif' : 'image/tiff',
'.tiff' : 'image/tiff',
'.tr' : 'application/x-troff',
'.tsv' : 'text/tab-separated-values',
'.txt' : 'text/plain',
'.ustar' : 'application/x-ustar',
'.vcf' : 'text/x-vcard',
'.wav' : 'audio/x-wav',
'.wiz' : 'application/msword',
'.wsdl' : 'application/xml',
'.xbm' : 'image/x-xbitmap',
'.xlb' : 'application/vnd.ms-excel',
# Duplicates :(
'.xls' : 'application/excel',
'.xls' : 'application/vnd.ms-excel',
'.xml' : 'text/xml',
'.xpdl' : 'application/xml',
'.xpm' : 'image/x-xpixmap',
'.xsl' : 'application/xml',
'.xwd' : 'image/x-xwindowdump',
'.zip' : 'application/zip',
}
# These are non-standard types, commonly found in the wild. They will
# only match if strict=0 flag is given to the API methods.
# Please sort these too
common_types = {
'.jpg' : 'image/jpg',
'.mid' : 'audio/midi',
'.midi': 'audio/midi',
'.pct' : 'image/pict',
'.pic' : 'image/pict',
'.pict': 'image/pict',
'.rtf' : 'application/rtf',
'.xul' : 'text/xul'
}
_default_mime_types()
if __name__ == '__main__':
import sys
import getopt
USAGE = """\
Usage: mimetypes.py [options] type
Options:
--help / -h -- print this message and exit
--lenient / -l -- additionally search of some common, but non-standard
types.
--extension / -e -- guess extension instead of type
More than one type argument may be given.
"""
def usage(code, msg=''):
print(USAGE)
if msg: print(msg)
sys.exit(code)
try:
opts, args = getopt.getopt(sys.argv[1:], 'hle',
['help', 'lenient', 'extension'])
except getopt.error as msg:
usage(1, msg)
strict = 1
extension = 0
for opt, arg in opts:
if opt in ('-h', '--help'):
usage(0)
elif opt in ('-l', '--lenient'):
strict = 0
elif opt in ('-e', '--extension'):
extension = 1
for gtype in args:
if extension:
guess = guess_extension(gtype, strict)
if not guess: print("I don't know anything about type", gtype)
else: print(guess)
else:
guess, encoding = guess_type(gtype, strict)
if not guess: print("I don't know anything about type", gtype)
else: print('type:', guess, 'encoding:', encoding)
|
|
r"""OS routines for Java, with some attempts to support DOS, NT, and
Posix functionality.
This exports:
- all functions from posix, nt, dos, os2, mac, or ce, e.g. unlink, stat, etc.
- os.path is one of the modules posixpath, ntpath, macpath, or dospath
- os.name is 'posix', 'nt', 'dos', 'os2', 'mac', 'ce' or 'riscos'
- os.curdir is a string representing the current directory ('.' or ':')
- os.pardir is a string representing the parent directory ('..' or '::')
- os.sep is the (or a most common) pathname separator ('/' or ':' or '\\')
- os.altsep is the alternate pathname separator (None or '/')
- os.pathsep is the component separator used in $PATH etc
- os.linesep is the line separator in text files ('\r' or '\n' or '\r\n')
- os.defpath is the default search path for executables
Programs that import and use 'os' stand a better chance of being
portable between different platforms. Of course, they must then
only use functions that are defined by all platforms (e.g., unlink
and opendir), and leave all pathname manipulation to os.path
(e.g., split and join).
"""
from __future__ import division
__all__ = ["altsep", "curdir", "pardir", "sep", "pathsep", "linesep",
"defpath", "name"]
import java
from java.io import File, BufferedReader, InputStreamReader, IOException
import javapath as path
from UserDict import UserDict
import string
import exceptions
import re
import sys
import thread
error = OSError
name = 'java' # discriminate based on JDK version?
curdir = '.' # default to Posix for directory behavior, override below
pardir = '..'
sep = java.io.File.separator
altsep = None
pathsep = java.io.File.pathSeparator
defpath = '.'
linesep = java.lang.System.getProperty('line.separator')
if sep=='.':
extsep = '/'
else:
extsep = '.'
def _exit(n=0):
java.lang.System.exit(n)
def getcwd():
foo = File(File("foo").getAbsolutePath())
return foo.getParent()
def chdir(path):
raise OSError(0, 'chdir not supported in Java', path)
def listdir(path):
l = File(path).list()
if l is None:
raise OSError(0, 'No such directory', path)
return list(l)
def mkdir(path, mode='ignored'):
if not File(path).mkdir():
raise OSError(0, "couldn't make directory", path)
def makedirs(path, mode='ignored'):
if not File(path).mkdirs():
raise OSError(0, "couldn't make directories", path)
def remove(path):
if not File(path).delete():
raise OSError(0, "couldn't delete file", path)
def rename(path, newpath):
if not File(path).renameTo(File(newpath)):
raise OSError(0, "couldn't rename file", path)
def rmdir(path):
if not File(path).delete():
raise OSError(0, "couldn't delete directory", path)
unlink = remove
def stat(path):
"""The Java stat implementation only returns a small subset of
the standard fields"""
f = File(path)
size = f.length()
# Sadly, if the returned length is zero, we don't really know if the file
# is zero sized or does not exist.
if size == 0 and not f.exists():
raise OSError(0, 'No such file or directory', path)
mtime = f.lastModified() / 1000.0
return (0, 0, 0, 0, 0, 0, size, mtime, mtime, 0)
def utime(path, times):
# Only the modification time is changed (and only on java2).
if times and hasattr(File, "setLastModified"):
File(path).setLastModified(long(times[1] * 1000.0))
class LazyDict( UserDict ):
"""A lazy-populating User Dictionary.
Lazy initialization is not thread-safe.
"""
def __init__( self,
dict=None,
populate=None,
keyTransform=None ):
"""dict: starting dictionary of values
populate: function that returns the populated dictionary
keyTransform: function to normalize the keys (e.g., toupper/None)
"""
UserDict.__init__( self, dict )
self._populated = 0
self.__populateFunc = populate or (lambda: {})
self._keyTransform = keyTransform or (lambda key: key)
def __populate( self ):
if not self._populated:
self.data = self.__populateFunc()
self._populated = 1 # race condition
########## extend methods from UserDict by pre-populating
def __repr__(self):
self.__populate()
return UserDict.__repr__( self )
def __cmp__(self, dict):
self.__populate()
return UserDict.__cmp__( self, dict )
def __len__(self):
self.__populate()
return UserDict.__len__( self )
def __getitem__(self, key):
self.__populate()
return UserDict.__getitem__( self, self._keyTransform(key) )
def __setitem__(self, key, item):
self.__populate()
UserDict.__setitem__( self, self._keyTransform(key), item )
def __delitem__(self, key):
self.__populate()
UserDict.__delitem__( self, self._keyTransform(key) )
def clear(self):
self.__populate()
UserDict.clear( self )
def copy(self):
self.__populate()
return UserDict.copy( self )
def keys(self):
self.__populate()
return UserDict.keys( self )
def items(self):
self.__populate()
return UserDict.items( self )
def values(self):
self.__populate()
return UserDict.values( self )
def has_key(self, key):
self.__populate()
return UserDict.has_key( self, self._keyTransform(key) )
def update(self, dict):
self.__populate()
UserDict.update( self, dict )
def get(self, key, failobj=None):
self.__populate()
return UserDict.get( self, self._keyTransform(key), failobj )
def setdefault(self, key, failobj=None):
self.__populate()
return UserDict.setdefault( self, self._keyTransform(key), failobj )
def popitem(self):
self.__populate()
return UserDict.popitem( self )
class _ShellEnv:
"""Provide environment derived by spawning a subshell and parsing its
environment. Also supports system functions and provides empty
environment support for platforms with unknown shell
functionality.
"""
def __init__( self, cmd=None, getEnv=None, keyTransform=None ):
"""cmd: list of exec() arguments to run command in subshell, or None
getEnv: shell command to list environment variables, or None
keyTransform: normalization function for environment keys, or None
"""
self.cmd = cmd
self.getEnv = getEnv
self.environment = LazyDict(populate=self._getEnvironment,
keyTransform=keyTransform)
self._keyTransform = self.environment._keyTransform
########## system
def system( self, cmd ):
"""Imitate the standard library 'system' call.
Execute 'cmd' in a shell, and send output to stdout & stderr.
"""
p = self.execute( cmd )
def println( arg, write=sys.stdout.write ):
write( arg + "\n" )
def printlnStdErr( arg, write=sys.stderr.write ):
write( arg + "\n" )
# read stderr in new thread
thread.start_new_thread( self._readLines,
( p.getErrorStream(), printlnStdErr ))
# read stdin in main thread
self._readLines( p.getInputStream(), println )
return p.waitFor()
def execute( self, cmd ):
"""Execute cmd in a shell, and return the process instance"""
shellCmd = self._formatCmd( cmd )
if self.environment._populated:
env = self._formatEnvironment( self.environment )
else:
env = None
try:
p = java.lang.Runtime.getRuntime().exec( shellCmd, env )
return p
except IOException, ex:
raise OSError(
0,
"Failed to execute command (%s): %s" % ( shellCmd, ex )
)
########## utility methods
def _readLines( self, stream, func=None ):
"""Read lines of stream, and either append them to return
array of lines, or call func on each line.
"""
lines = []
func = func or lines.append
# should read both stderror and stdout in separate threads...
bufStream = BufferedReader( InputStreamReader( stream ))
while 1:
line = bufStream.readLine()
if line is None: break
func( line )
return lines or None
def _formatCmd( self, cmd ):
"""Format a command for execution in a shell."""
if self.cmd is None:
msgFmt = "Unable to execute commands in subshell because shell" \
" functionality not implemented for OS %s with shell" \
" setting %s. Failed command=%s"""
raise OSError( 0, msgFmt % ( _osType, _envType, cmd ))
return self.cmd + [cmd]
def _formatEnvironment( self, env ):
"""Format enviroment in lines suitable for Runtime.exec"""
lines = []
for keyValue in env.items():
lines.append( "%s=%s" % keyValue )
return lines
def _getEnvironment( self ):
"""Get the environment variables by spawning a subshell.
This allows multi-line variables as long as subsequent lines do
not have '=' signs.
"""
env = {}
if self.getEnv:
try:
p = self.execute( self.getEnv )
lines = self._readLines( p.getInputStream() )
if '=' not in lines[0]:
print "getEnv command (%s) did not print environment.\n" \
"Output=%s" % (
self.getEnv, '\n'.join( lines )
)
return env
for line in lines:
try:
i = line.index( '=' )
key = self._keyTransform(line[:i])
value = line[i+1:]
except ValueError:
# found no '=', so line is part of previous value
value = '%s\n%s' % ( value, line )
env[ key ] = value
except OSError, ex:
print "Failed to get environment, environ will be empty:", ex
return env
def _getOsType( os=None ):
"""Select the OS behavior based on os argument, 'python.os' registry
setting and 'os.name' Java property.
os: explicitly select desired OS. os=None to autodetect, os='None' to
disable
"""
os = os or sys.registry.getProperty( "python.os" ) or \
java.lang.System.getProperty( "os.name" )
_osTypeMap = (
( "nt", r"(nt)|(Windows NT)|(Windows NT 4.0)|(WindowsNT)|"
r"(Windows 2000)|(Windows XP)|(Windows CE)" ),
( "dos", r"(dos)|(Windows 95)|(Windows 98)|(Windows ME)" ),
( "os2", r"(OS/2)" ),
( "mac", r"(mac)|(MacOS.*)|(Darwin)" ),
( "None", r"(None)" ),
( "posix", r"(.*)" ), # default - posix seems to vary mast widely
)
for osType, pattern in _osTypeMap:
if re.match( pattern, os ):
break
return osType
def _getShellEnv( envType, shellCmd, envCmd, envTransform ):
"""Create the desired environment type.
envType: 'shell' or None
"""
if envType == "shell":
return _ShellEnv( shellCmd, envCmd, envTransform )
else:
return _ShellEnv()
_osType = _getOsType()
_envType = sys.registry.getProperty("python.environment", "shell")
# default to None/empty for shell and environment behavior
_shellCmd = None
_envCmd = None
_envTransform = None
# override defaults based on _osType
if (_osType == "nt") or (_osType == "os2"):
_shellCmd = ["cmd", "/c"]
_envCmd = "set"
_envTransform = string.upper
elif _osType == "dos":
_shellCmd = ["command.com", "/c"]
_envCmd = "set"
_envTransform = string.upper
elif _osType == "posix":
_shellCmd = ["sh", "-c"]
_envCmd = "env"
elif _osType == "mac":
curdir = ':' # override Posix directories
pardir = '::'
elif _osType == "None":
pass
# else:
# # may want a warning, but only at high verbosity:
# warn( "Unknown os type '%s', using default behavior." % _osType )
_shellEnv = _getShellEnv( _envType, _shellCmd, _envCmd, _envTransform )
# provide environ, putenv, getenv
environ = _shellEnv.environment
putenv = environ.__setitem__
getenv = environ.__getitem__
# provide system
system = _shellEnv.system
########## test code
def _testGetOsType():
testVals = {
"Windows NT": "nt",
"Windows 95": "dos",
"MacOS": "mac",
"Solaris": "posix",
"Linux": "posix",
"OS/2": "os2",
"None": "None"
}
msgFmt = "_getOsType( '%s' ) should return '%s', not '%s'"
# test basic mappings
for key, val in testVals.items():
got = _getOsType( key )
assert got == val, msgFmt % ( key, val, got )
def _testCmds( _shellEnv, testCmds, whichEnv ):
# test commands (key) and compare output to expected output (value).
# this actually executes all the commands twice, testing the return
# code by calling system(), and testing some of the output by calling
# execute()
for cmd, pattern in testCmds:
print "\nExecuting '%s' with %s environment" % (cmd, whichEnv)
assert not _shellEnv.system( cmd ), \
"%s failed with %s environment" % (cmd, whichEnv)
line = _shellEnv._readLines(
_shellEnv.execute(cmd).getInputStream())[0]
assert re.match( pattern, line ), \
"expected match for %s, got %s" % ( pattern, line )
def _testSystem( shellEnv=_shellEnv ):
# test system and environment functionality
key, value = "testKey", "testValue"
org = environ
testCmds = [
# test commands and regexes to match first line of expected
# output on first and second runs
# Note that the validation is incomplete for several of these
# - they should validate depending on platform and pre-post, but
# they don't.
# no quotes, should output both words
("echo hello there", "hello there"),
# should print PATH (on NT)
("echo PATH=%PATH%", "(PATH=.*;.*)|(PATH=%PATH%)"),
# should print 'testKey=%testKey%' on NT before initialization,
# should print 'testKey=' on 95 before initialization,
# and 'testKey=testValue' after
("echo %s=%%%s%%" % (key,key),
"(%s=)" % (key,)),
# should print PATH (on Unix)
( "echo PATH=$PATH", "PATH=.*" ),
# should print 'testKey=testValue' on Unix after initialization
( "echo %s=$%s" % (key,key),
"(%s=$%s)|(%s=)|(%s=%s)" % (key, key, key, key, value ) ),
# should output quotes on NT but not on Unix
( 'echo "hello there"', '"?hello there"?' ),
# should print 'why' to stdout.
( r'''jython -c "import sys;sys.stdout.write( 'why\n' )"''', "why" ),
# should print 'why' to stderr, but it won't right now. Have
# to add the print to give some output...empty string matches every
# thing...
( r'''jython -c "import sys;sys.stderr.write('why\n');print " ''',
"" )
]
assert not environ._populated, \
"before population, environ._populated should be false"
_testCmds( _shellEnv, testCmds, "default" )
# trigger initialization of environment
environ[ key ] = value
assert environ._populated, \
"after population, environ._populated should be true"
assert org.get( key, None ) == value, \
"expected stub to have %s set" % key
assert environ.get( key, None ) == value, \
"expected real environment to have %s set" % key
# test system using the non-default environment
_testCmds( _shellEnv, testCmds, "initialized" )
assert environ.has_key( "PATH" ), \
"expected environment to have PATH attribute " \
"(this may not apply to all platforms!)"
def _testBadShell():
# attempt to get an environment with a shell that is not startable
se2 = _ShellEnv( ["badshell", "-c"], "set" )
str(se2.environment) # trigger initialization
assert not se2.environment.items(), "environment should be empty"
def _testBadGetEnv():
# attempt to get an environment with a command that does not print an environment
se2 = _getShellEnv( "shell", _shellCmd, _envCmd, _envTransform )
se2.getEnv="echo This command does not print environment"
str(se2.environment) # trigger initialization
assert not se2.environment.items(), "environment should be empty"
def _test():
_testGetOsType()
_testBadShell()
_testBadGetEnv()
_testSystem()
|
|
#!/usr/bin/env python3
# Copyright (c) 2014-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test behavior of headers messages to announce blocks.
Setup:
- Two nodes:
- node0 is the node-under-test. We create two p2p connections to it. The
first p2p connection is a control and should only ever receive inv's. The
second p2p connection tests the headers sending logic.
- node1 is used to create reorgs.
test_null_locators
==================
Sends two getheaders requests with null locator values. First request's hashstop
value refers to validated block, while second request's hashstop value refers to
a block which hasn't been validated. Verifies only the first request returns
headers.
test_nonnull_locators
=====================
Part 1: No headers announcements before "sendheaders"
a. node mines a block [expect: inv]
send getdata for the block [expect: block]
b. node mines another block [expect: inv]
send getheaders and getdata [expect: headers, then block]
c. node mines another block [expect: inv]
peer mines a block, announces with header [expect: getdata]
d. node mines another block [expect: inv]
Part 2: After "sendheaders", headers announcements should generally work.
a. peer sends sendheaders [expect: no response]
peer sends getheaders with current tip [expect: no response]
b. node mines a block [expect: tip header]
c. for N in 1, ..., 10:
* for announce-type in {inv, header}
- peer mines N blocks, announces with announce-type
[ expect: getheaders/getdata or getdata, deliver block(s) ]
- node mines a block [ expect: 1 header ]
Part 3: Headers announcements stop after large reorg and resume after getheaders or inv from peer.
- For response-type in {inv, getheaders}
* node mines a 7 block reorg [ expect: headers announcement of 8 blocks ]
* node mines an 8-block reorg [ expect: inv at tip ]
* peer responds with getblocks/getdata [expect: inv, blocks ]
* node mines another block [ expect: inv at tip, peer sends getdata, expect: block ]
* node mines another block at tip [ expect: inv ]
* peer responds with getheaders with an old hashstop more than 8 blocks back [expect: headers]
* peer requests block [ expect: block ]
* node mines another block at tip [ expect: inv, peer sends getdata, expect: block ]
* peer sends response-type [expect headers if getheaders, getheaders/getdata if mining new block]
* node mines 1 block [expect: 1 header, peer responds with getdata]
Part 4: Test direct fetch behavior
a. Announce 2 old block headers.
Expect: no getdata requests.
b. Announce 3 new blocks via 1 headers message.
Expect: one getdata request for all 3 blocks.
(Send blocks.)
c. Announce 1 header that forks off the last two blocks.
Expect: no response.
d. Announce 1 more header that builds on that fork.
Expect: one getdata request for two blocks.
e. Announce 16 more headers that build on that fork.
Expect: getdata request for 14 more blocks.
f. Announce 1 more header that builds on that fork.
Expect: no response.
Part 5: Test handling of headers that don't connect.
a. Repeat 10 times:
1. Announce a header that doesn't connect.
Expect: getheaders message
2. Send headers chain.
Expect: getdata for the missing blocks, tip update.
b. Then send 9 more headers that don't connect.
Expect: getheaders message each time.
c. Announce a header that does connect.
Expect: no response.
d. Announce 49 headers that don't connect.
Expect: getheaders message each time.
e. Announce one more that doesn't connect.
Expect: disconnect.
"""
from test_framework.blocktools import create_block, create_coinbase
from test_framework.messages import CInv
from test_framework.mininode import (
CBlockHeader,
NODE_WITNESS,
P2PInterface,
mininode_lock,
msg_block,
msg_getblocks,
msg_getdata,
msg_getheaders,
msg_headers,
msg_inv,
msg_sendheaders,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
wait_until,
)
DIRECT_FETCH_RESPONSE_TIME = 0.05
class BaseNode(P2PInterface):
def __init__(self):
super().__init__()
self.block_announced = False
self.last_blockhash_announced = None
self.recent_headers_announced = []
def send_get_data(self, block_hashes):
"""Request data for a list of block hashes."""
msg = msg_getdata()
for x in block_hashes:
msg.inv.append(CInv(2, x))
self.send_message(msg)
def send_get_headers(self, locator, hashstop):
msg = msg_getheaders()
msg.locator.vHave = locator
msg.hashstop = hashstop
self.send_message(msg)
def send_block_inv(self, blockhash):
msg = msg_inv()
msg.inv = [CInv(2, blockhash)]
self.send_message(msg)
def send_header_for_blocks(self, new_blocks):
headers_message = msg_headers()
headers_message.headers = [CBlockHeader(b) for b in new_blocks]
self.send_message(headers_message)
def send_getblocks(self, locator):
getblocks_message = msg_getblocks()
getblocks_message.locator.vHave = locator
self.send_message(getblocks_message)
def wait_for_getdata(self, hash_list, timeout=60):
if hash_list == []:
return
test_function = lambda: "getdata" in self.last_message and [x.hash for x in self.last_message["getdata"].inv] == hash_list
wait_until(test_function, timeout=timeout, lock=mininode_lock)
def wait_for_block_announcement(self, block_hash, timeout=60):
test_function = lambda: self.last_blockhash_announced == block_hash
wait_until(test_function, timeout=timeout, lock=mininode_lock)
def on_inv(self, message):
self.block_announced = True
self.last_blockhash_announced = message.inv[-1].hash
def on_headers(self, message):
if len(message.headers):
self.block_announced = True
for x in message.headers:
x.calc_sha256()
# append because headers may be announced over multiple messages.
self.recent_headers_announced.append(x.sha256)
self.last_blockhash_announced = message.headers[-1].sha256
def clear_block_announcements(self):
with mininode_lock:
self.block_announced = False
self.last_message.pop("inv", None)
self.last_message.pop("headers", None)
self.recent_headers_announced = []
def check_last_headers_announcement(self, headers):
"""Test whether the last headers announcements received are right.
Headers may be announced across more than one message."""
test_function = lambda: (len(self.recent_headers_announced) >= len(headers))
wait_until(test_function, timeout=60, lock=mininode_lock)
with mininode_lock:
assert_equal(self.recent_headers_announced, headers)
self.block_announced = False
self.last_message.pop("headers", None)
self.recent_headers_announced = []
def check_last_inv_announcement(self, inv):
"""Test whether the last announcement received had the right inv.
inv should be a list of block hashes."""
test_function = lambda: self.block_announced
wait_until(test_function, timeout=60, lock=mininode_lock)
with mininode_lock:
compare_inv = []
if "inv" in self.last_message:
compare_inv = [x.hash for x in self.last_message["inv"].inv]
assert_equal(compare_inv, inv)
self.block_announced = False
self.last_message.pop("inv", None)
class SendHeadersTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def mine_blocks(self, count):
"""Mine count blocks and return the new tip."""
# Clear out block announcements from each p2p listener
[x.clear_block_announcements() for x in self.nodes[0].p2ps]
self.nodes[0].generatetoaddress(count, self.nodes[0].get_deterministic_priv_key().address)
return int(self.nodes[0].getbestblockhash(), 16)
def mine_reorg(self, length):
"""Mine a reorg that invalidates length blocks (replacing them with # length+1 blocks).
Note: we clear the state of our p2p connections after the
to-be-reorged-out blocks are mined, so that we don't break later tests.
return the list of block hashes newly mined."""
# make sure all invalidated blocks are node0's
self.nodes[0].generatetoaddress(length, self.nodes[0].get_deterministic_priv_key().address)
self.sync_blocks(self.nodes, wait=0.1)
for x in self.nodes[0].p2ps:
x.wait_for_block_announcement(int(self.nodes[0].getbestblockhash(), 16))
x.clear_block_announcements()
tip_height = self.nodes[1].getblockcount()
hash_to_invalidate = self.nodes[1].getblockhash(tip_height - (length - 1))
self.nodes[1].invalidateblock(hash_to_invalidate)
all_hashes = self.nodes[1].generatetoaddress(length + 1, self.nodes[1].get_deterministic_priv_key().address) # Must be longer than the orig chain
self.sync_blocks(self.nodes, wait=0.1)
return [int(x, 16) for x in all_hashes]
def run_test(self):
# Setup the p2p connections
inv_node = self.nodes[0].add_p2p_connection(BaseNode())
# Make sure NODE_NETWORK is not set for test_node, so no block download
# will occur outside of direct fetching
test_node = self.nodes[0].add_p2p_connection(BaseNode(), services=NODE_WITNESS)
self.test_null_locators(test_node, inv_node)
self.test_nonnull_locators(test_node, inv_node)
def test_null_locators(self, test_node, inv_node):
tip = self.nodes[0].getblockheader(self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address)[0])
tip_hash = int(tip["hash"], 16)
inv_node.check_last_inv_announcement(inv=[tip_hash])
test_node.check_last_inv_announcement(inv=[tip_hash])
self.log.info("Verify getheaders with null locator and valid hashstop returns headers.")
test_node.clear_block_announcements()
test_node.send_get_headers(locator=[], hashstop=tip_hash)
test_node.check_last_headers_announcement(headers=[tip_hash])
self.log.info("Verify getheaders with null locator and invalid hashstop does not return headers.")
block = create_block(int(tip["hash"], 16), create_coinbase(tip["height"] + 1), tip["mediantime"] + 1)
block.solve()
test_node.send_header_for_blocks([block])
test_node.clear_block_announcements()
test_node.send_get_headers(locator=[], hashstop=int(block.hash, 16))
test_node.sync_with_ping()
assert_equal(test_node.block_announced, False)
inv_node.clear_block_announcements()
test_node.send_message(msg_block(block))
inv_node.check_last_inv_announcement(inv=[int(block.hash, 16)])
def test_nonnull_locators(self, test_node, inv_node):
tip = int(self.nodes[0].getbestblockhash(), 16)
# PART 1
# 1. Mine a block; expect inv announcements each time
self.log.info("Part 1: headers don't start before sendheaders message...")
for i in range(4):
self.log.debug("Part 1.{}: starting...".format(i))
old_tip = tip
tip = self.mine_blocks(1)
inv_node.check_last_inv_announcement(inv=[tip])
test_node.check_last_inv_announcement(inv=[tip])
# Try a few different responses; none should affect next announcement
if i == 0:
# first request the block
test_node.send_get_data([tip])
test_node.wait_for_block(tip)
elif i == 1:
# next try requesting header and block
test_node.send_get_headers(locator=[old_tip], hashstop=tip)
test_node.send_get_data([tip])
test_node.wait_for_block(tip)
test_node.clear_block_announcements() # since we requested headers...
elif i == 2:
# this time announce own block via headers
inv_node.clear_block_announcements()
height = self.nodes[0].getblockcount()
last_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time']
block_time = last_time + 1
new_block = create_block(tip, create_coinbase(height + 1), block_time)
new_block.solve()
test_node.send_header_for_blocks([new_block])
test_node.wait_for_getdata([new_block.sha256])
test_node.send_and_ping(msg_block(new_block)) # make sure this block is processed
wait_until(lambda: inv_node.block_announced, timeout=60, lock=mininode_lock)
inv_node.clear_block_announcements()
test_node.clear_block_announcements()
self.log.info("Part 1: success!")
self.log.info("Part 2: announce blocks with headers after sendheaders message...")
# PART 2
# 2. Send a sendheaders message and test that headers announcements
# commence and keep working.
test_node.send_message(msg_sendheaders())
prev_tip = int(self.nodes[0].getbestblockhash(), 16)
test_node.send_get_headers(locator=[prev_tip], hashstop=0)
test_node.sync_with_ping()
# Now that we've synced headers, headers announcements should work
tip = self.mine_blocks(1)
inv_node.check_last_inv_announcement(inv=[tip])
test_node.check_last_headers_announcement(headers=[tip])
height = self.nodes[0].getblockcount() + 1
block_time += 10 # Advance far enough ahead
for i in range(10):
self.log.debug("Part 2.{}: starting...".format(i))
# Mine i blocks, and alternate announcing either via
# inv (of tip) or via headers. After each, new blocks
# mined by the node should successfully be announced
# with block header, even though the blocks are never requested
for j in range(2):
self.log.debug("Part 2.{}.{}: starting...".format(i, j))
blocks = []
for b in range(i + 1):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
if j == 0:
# Announce via inv
test_node.send_block_inv(tip)
test_node.wait_for_getheaders()
# Should have received a getheaders now
test_node.send_header_for_blocks(blocks)
# Test that duplicate inv's won't result in duplicate
# getdata requests, or duplicate headers announcements
[inv_node.send_block_inv(x.sha256) for x in blocks]
test_node.wait_for_getdata([x.sha256 for x in blocks])
inv_node.sync_with_ping()
else:
# Announce via headers
test_node.send_header_for_blocks(blocks)
test_node.wait_for_getdata([x.sha256 for x in blocks])
# Test that duplicate headers won't result in duplicate
# getdata requests (the check is further down)
inv_node.send_header_for_blocks(blocks)
inv_node.sync_with_ping()
[test_node.send_message(msg_block(x)) for x in blocks]
test_node.sync_with_ping()
inv_node.sync_with_ping()
# This block should not be announced to the inv node (since it also
# broadcast it)
assert "inv" not in inv_node.last_message
assert "headers" not in inv_node.last_message
tip = self.mine_blocks(1)
inv_node.check_last_inv_announcement(inv=[tip])
test_node.check_last_headers_announcement(headers=[tip])
height += 1
block_time += 1
self.log.info("Part 2: success!")
self.log.info("Part 3: headers announcements can stop after large reorg, and resume after headers/inv from peer...")
# PART 3. Headers announcements can stop after large reorg, and resume after
# getheaders or inv from peer.
for j in range(2):
self.log.debug("Part 3.{}: starting...".format(j))
# First try mining a reorg that can propagate with header announcement
new_block_hashes = self.mine_reorg(length=7)
tip = new_block_hashes[-1]
inv_node.check_last_inv_announcement(inv=[tip])
test_node.check_last_headers_announcement(headers=new_block_hashes)
block_time += 8
# Mine a too-large reorg, which should be announced with a single inv
new_block_hashes = self.mine_reorg(length=8)
tip = new_block_hashes[-1]
inv_node.check_last_inv_announcement(inv=[tip])
test_node.check_last_inv_announcement(inv=[tip])
block_time += 9
fork_point = self.nodes[0].getblock("%064x" % new_block_hashes[0])["previousblockhash"]
fork_point = int(fork_point, 16)
# Use getblocks/getdata
test_node.send_getblocks(locator=[fork_point])
test_node.check_last_inv_announcement(inv=new_block_hashes)
test_node.send_get_data(new_block_hashes)
test_node.wait_for_block(new_block_hashes[-1])
for i in range(3):
self.log.debug("Part 3.{}.{}: starting...".format(j, i))
# Mine another block, still should get only an inv
tip = self.mine_blocks(1)
inv_node.check_last_inv_announcement(inv=[tip])
test_node.check_last_inv_announcement(inv=[tip])
if i == 0:
# Just get the data -- shouldn't cause headers announcements to resume
test_node.send_get_data([tip])
test_node.wait_for_block(tip)
elif i == 1:
# Send a getheaders message that shouldn't trigger headers announcements
# to resume (best header sent will be too old)
test_node.send_get_headers(locator=[fork_point], hashstop=new_block_hashes[1])
test_node.send_get_data([tip])
test_node.wait_for_block(tip)
elif i == 2:
# This time, try sending either a getheaders to trigger resumption
# of headers announcements, or mine a new block and inv it, also
# triggering resumption of headers announcements.
test_node.send_get_data([tip])
test_node.wait_for_block(tip)
if j == 0:
test_node.send_get_headers(locator=[tip], hashstop=0)
test_node.sync_with_ping()
else:
test_node.send_block_inv(tip)
test_node.sync_with_ping()
# New blocks should now be announced with header
tip = self.mine_blocks(1)
inv_node.check_last_inv_announcement(inv=[tip])
test_node.check_last_headers_announcement(headers=[tip])
self.log.info("Part 3: success!")
self.log.info("Part 4: Testing direct fetch behavior...")
tip = self.mine_blocks(1)
height = self.nodes[0].getblockcount() + 1
last_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time']
block_time = last_time + 1
# Create 2 blocks. Send the blocks, then send the headers.
blocks = []
for b in range(2):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
inv_node.send_message(msg_block(blocks[-1]))
inv_node.sync_with_ping() # Make sure blocks are processed
test_node.last_message.pop("getdata", None)
test_node.send_header_for_blocks(blocks)
test_node.sync_with_ping()
# should not have received any getdata messages
with mininode_lock:
assert "getdata" not in test_node.last_message
# This time, direct fetch should work
blocks = []
for b in range(3):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
test_node.send_header_for_blocks(blocks)
test_node.sync_with_ping()
test_node.wait_for_getdata([x.sha256 for x in blocks], timeout=DIRECT_FETCH_RESPONSE_TIME)
[test_node.send_message(msg_block(x)) for x in blocks]
test_node.sync_with_ping()
# Now announce a header that forks the last two blocks
tip = blocks[0].sha256
height -= 2
blocks = []
# Create extra blocks for later
for b in range(20):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
# Announcing one block on fork should not trigger direct fetch
# (less work than tip)
test_node.last_message.pop("getdata", None)
test_node.send_header_for_blocks(blocks[0:1])
test_node.sync_with_ping()
with mininode_lock:
assert "getdata" not in test_node.last_message
# Announcing one more block on fork should trigger direct fetch for
# both blocks (same work as tip)
test_node.send_header_for_blocks(blocks[1:2])
test_node.sync_with_ping()
test_node.wait_for_getdata([x.sha256 for x in blocks[0:2]], timeout=DIRECT_FETCH_RESPONSE_TIME)
# Announcing 16 more headers should trigger direct fetch for 14 more
# blocks
test_node.send_header_for_blocks(blocks[2:18])
test_node.sync_with_ping()
test_node.wait_for_getdata([x.sha256 for x in blocks[2:16]], timeout=DIRECT_FETCH_RESPONSE_TIME)
# Announcing 1 more header should not trigger any response
test_node.last_message.pop("getdata", None)
test_node.send_header_for_blocks(blocks[18:19])
test_node.sync_with_ping()
with mininode_lock:
assert "getdata" not in test_node.last_message
self.log.info("Part 4: success!")
# Now deliver all those blocks we announced.
[test_node.send_message(msg_block(x)) for x in blocks]
self.log.info("Part 5: Testing handling of unconnecting headers")
# First we test that receipt of an unconnecting header doesn't prevent
# chain sync.
for i in range(10):
self.log.debug("Part 5.{}: starting...".format(i))
test_node.last_message.pop("getdata", None)
blocks = []
# Create two more blocks.
for j in range(2):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
# Send the header of the second block -> this won't connect.
with mininode_lock:
test_node.last_message.pop("getheaders", None)
test_node.send_header_for_blocks([blocks[1]])
test_node.wait_for_getheaders()
test_node.send_header_for_blocks(blocks)
test_node.wait_for_getdata([x.sha256 for x in blocks])
[test_node.send_message(msg_block(x)) for x in blocks]
test_node.sync_with_ping()
assert_equal(int(self.nodes[0].getbestblockhash(), 16), blocks[1].sha256)
blocks = []
# Now we test that if we repeatedly don't send connecting headers, we
# don't go into an infinite loop trying to get them to connect.
MAX_UNCONNECTING_HEADERS = 10
for j in range(MAX_UNCONNECTING_HEADERS + 1):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
for i in range(1, MAX_UNCONNECTING_HEADERS):
# Send a header that doesn't connect, check that we get a getheaders.
with mininode_lock:
test_node.last_message.pop("getheaders", None)
test_node.send_header_for_blocks([blocks[i]])
test_node.wait_for_getheaders()
# Next header will connect, should re-set our count:
test_node.send_header_for_blocks([blocks[0]])
# Remove the first two entries (blocks[1] would connect):
blocks = blocks[2:]
# Now try to see how many unconnecting headers we can send
# before we get disconnected. Should be 5*MAX_UNCONNECTING_HEADERS
for i in range(5 * MAX_UNCONNECTING_HEADERS - 1):
# Send a header that doesn't connect, check that we get a getheaders.
with mininode_lock:
test_node.last_message.pop("getheaders", None)
test_node.send_header_for_blocks([blocks[i % len(blocks)]])
test_node.wait_for_getheaders()
# Eventually this stops working.
test_node.send_header_for_blocks([blocks[-1]])
# Should get disconnected
test_node.wait_for_disconnect()
self.log.info("Part 5: success!")
# Finally, check that the inv node never received a getdata request,
# throughout the test
assert "getdata" not in inv_node.last_message
if __name__ == '__main__':
SendHeadersTest().main()
|
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import inspect
import logging
from typing import Dict, List, Optional, Union
from ray.tune.result import DEFAULT_METRIC
from ray.tune.sample import Domain, Float, Quantized
from ray.tune.suggest.suggestion import UNRESOLVED_SEARCH_SPACE, \
UNDEFINED_METRIC_MODE, UNDEFINED_SEARCH_SPACE
from ray.tune.suggest.variant_generator import parse_spec_vars
from ray.tune.utils.util import flatten_dict, is_nan_or_inf, unflatten_dict
try: # Python 3 only -- needed for lint test.
import dragonfly
from dragonfly.opt.blackbox_optimiser import BlackboxOptimiser
except ImportError:
dragonfly = None
BlackboxOptimiser = None
from ray.tune.suggest.suggestion import Searcher
logger = logging.getLogger(__name__)
class DragonflySearch(Searcher):
"""Uses Dragonfly to optimize hyperparameters.
Dragonfly provides an array of tools to scale up Bayesian optimisation to
expensive large scale problems, including high dimensional optimisation.
parallel evaluations in synchronous or asynchronous settings,
multi-fidelity optimisation (using cheap approximations to speed up the
optimisation process), and multi-objective optimisation. For more info:
* Dragonfly Website: https://github.com/dragonfly/dragonfly
* Dragonfly Documentation: https://dragonfly-opt.readthedocs.io/
To use this search algorithm, install Dragonfly:
.. code-block:: bash
$ pip install dragonfly-opt
This interface requires using FunctionCallers and optimizers provided by
Dragonfly.
This searcher will automatically filter out any NaN, inf or -inf
results.
Parameters:
optimizer (dragonfly.opt.BlackboxOptimiser|str): Optimizer provided
from dragonfly. Choose an optimiser that extends BlackboxOptimiser.
If this is a string, `domain` must be set and `optimizer` must be
one of [random, bandit, genetic].
domain (str): Optional domain. Should only be set if you don't pass
an optimizer as the `optimizer` argument.
Has to be one of [cartesian, euclidean].
space (list|dict): Search space. Should only be set if you don't pass
an optimizer as the `optimizer` argument. Defines the search space
and requires a `domain` to be set. Can be automatically converted
from the `config` dict passed to `tune.run()`.
metric (str): The training result objective value attribute. If None
but a mode was passed, the anonymous metric `_metric` will be used
per default.
mode (str): One of {min, max}. Determines whether objective is
minimizing or maximizing the metric attribute.
points_to_evaluate (list): Initial parameter suggestions to be run
first. This is for when you already have some good parameters
you want to run first to help the algorithm make better suggestions
for future parameters. Needs to be a list of dicts containing the
configurations.
evaluated_rewards (list): If you have previously evaluated the
parameters passed in as points_to_evaluate you can avoid
re-running those trials by passing in the reward attributes
as a list so the optimiser can be told the results without
needing to re-compute the trial. Must be the same length as
points_to_evaluate.
Tune automatically converts search spaces to Dragonfly's format:
.. code-block:: python
from ray import tune
config = {
"LiNO3_vol": tune.uniform(0, 7),
"Li2SO4_vol": tune.uniform(0, 7),
"NaClO4_vol": tune.uniform(0, 7)
}
df_search = DragonflySearch(
optimizer="bandit",
domain="euclidean",
metric="objective",
mode="max")
tune.run(my_func, config=config, search_alg=df_search)
If you would like to pass the search space/optimizer manually,
the code would look like this:
.. code-block:: python
from ray import tune
space = [{
"name": "LiNO3_vol",
"type": "float",
"min": 0,
"max": 7
}, {
"name": "Li2SO4_vol",
"type": "float",
"min": 0,
"max": 7
}, {
"name": "NaClO4_vol",
"type": "float",
"min": 0,
"max": 7
}]
df_search = DragonflySearch(
optimizer="bandit",
domain="euclidean",
space=space,
metric="objective",
mode="max")
tune.run(my_func, search_alg=df_search)
"""
def __init__(self,
optimizer: Optional[Union[str, BlackboxOptimiser]] = None,
domain: Optional[str] = None,
space: Optional[Union[Dict, List[Dict]]] = None,
metric: Optional[str] = None,
mode: Optional[str] = None,
points_to_evaluate: Optional[List[Dict]] = None,
evaluated_rewards: Optional[List] = None,
**kwargs):
assert dragonfly is not None, """dragonfly must be installed!
You can install Dragonfly with the command:
`pip install dragonfly-opt`."""
if mode:
assert mode in ["min", "max"], "`mode` must be 'min' or 'max'."
super(DragonflySearch, self).__init__(
metric=metric, mode=mode, **kwargs)
self._opt_arg = optimizer
self._domain = domain
if isinstance(space, dict) and space:
resolved_vars, domain_vars, grid_vars = parse_spec_vars(space)
if domain_vars or grid_vars:
logger.warning(
UNRESOLVED_SEARCH_SPACE.format(
par="space", cls=type(self)))
space = self.convert_search_space(space)
self._space = space
self._points_to_evaluate = points_to_evaluate
self._evaluated_rewards = evaluated_rewards
self._initial_points = []
self._live_trial_mapping = {}
self._point_parameter_names = []
self._opt = None
if isinstance(optimizer, BlackboxOptimiser):
if domain or space:
raise ValueError(
"If you pass an optimizer instance to dragonfly, do not "
"pass a `domain` or `space`.")
self._opt = optimizer
self.init_dragonfly()
elif self._space:
self._setup_dragonfly()
def _setup_dragonfly(self):
"""Setup dragonfly when no optimizer has been passed."""
assert not self._opt, "Optimizer already set."
from dragonfly import load_config
from dragonfly.exd.experiment_caller import CPFunctionCaller, \
EuclideanFunctionCaller
from dragonfly.opt.blackbox_optimiser import BlackboxOptimiser
from dragonfly.opt.random_optimiser import CPRandomOptimiser, \
EuclideanRandomOptimiser
from dragonfly.opt.cp_ga_optimiser import CPGAOptimiser
from dragonfly.opt.gp_bandit import CPGPBandit, EuclideanGPBandit
if not self._space:
raise ValueError(
"You have to pass a `space` when initializing dragonfly, or "
"pass a search space definition to the `config` parameter "
"of `tune.run()`.")
if not self._domain:
raise ValueError(
"You have to set a `domain` when initializing dragonfly. "
"Choose one of [Cartesian, Euclidean].")
self._point_parameter_names = [param["name"] for param in self._space]
if self._domain.lower().startswith("cartesian"):
function_caller_cls = CPFunctionCaller
elif self._domain.lower().startswith("euclidean"):
function_caller_cls = EuclideanFunctionCaller
else:
raise ValueError("Dragonfly's `domain` argument must be one of "
"[Cartesian, Euclidean].")
optimizer_cls = None
if inspect.isclass(self._opt_arg) and issubclass(
self._opt_arg, BlackboxOptimiser):
optimizer_cls = self._opt_arg
elif isinstance(self._opt_arg, str):
if self._opt_arg.lower().startswith("random"):
if function_caller_cls == CPFunctionCaller:
optimizer_cls = CPRandomOptimiser
else:
optimizer_cls = EuclideanRandomOptimiser
elif self._opt_arg.lower().startswith("bandit"):
if function_caller_cls == CPFunctionCaller:
optimizer_cls = CPGPBandit
else:
optimizer_cls = EuclideanGPBandit
elif self._opt_arg.lower().startswith("genetic"):
if function_caller_cls == CPFunctionCaller:
optimizer_cls = CPGAOptimiser
else:
raise ValueError(
"Currently only the `cartesian` domain works with "
"the `genetic` optimizer.")
else:
raise ValueError(
"Invalid optimizer specification. Either pass a full "
"dragonfly optimizer, or a string "
"in [random, bandit, genetic].")
assert optimizer_cls, "No optimizer could be determined."
domain_config = load_config({"domain": self._space})
function_caller = function_caller_cls(
None, domain_config.domain.list_of_domains[0])
self._opt = optimizer_cls(function_caller, ask_tell_mode=True)
self.init_dragonfly()
def init_dragonfly(self):
if self._points_to_evaluate:
points_to_evaluate = [[
config[par] for par in self._point_parameter_names
] for config in self._points_to_evaluate]
else:
points_to_evaluate = None
self._opt.initialise()
if points_to_evaluate and self._evaluated_rewards:
self._opt.tell([(points_to_evaluate, self._evaluated_rewards)])
elif points_to_evaluate:
self._initial_points = points_to_evaluate
# Dragonfly internally maximizes, so "min" => -1
if self._mode == "min":
self._metric_op = -1.
elif self._mode == "max":
self._metric_op = 1.
if self._metric is None and self._mode:
# If only a mode was passed, use anonymous metric
self._metric = DEFAULT_METRIC
def set_search_properties(self, metric: Optional[str], mode: Optional[str],
config: Dict) -> bool:
if self._opt:
return False
space = self.convert_search_space(config)
self._space = space
if metric:
self._metric = metric
if mode:
self._mode = mode
self._setup_dragonfly()
return True
def suggest(self, trial_id: str) -> Optional[Dict]:
if not self._opt:
raise RuntimeError(
UNDEFINED_SEARCH_SPACE.format(
cls=self.__class__.__name__, space="space"))
if not self._metric or not self._mode:
raise RuntimeError(
UNDEFINED_METRIC_MODE.format(
cls=self.__class__.__name__,
metric=self._metric,
mode=self._mode))
if self._initial_points:
suggested_config = self._initial_points[0]
del self._initial_points[0]
else:
try:
suggested_config = self._opt.ask()
except Exception as exc:
logger.warning(
"Dragonfly errored when querying. This may be due to a "
"higher level of parallelism than supported. Try reducing "
"parallelism in the experiment: %s", str(exc))
return None
self._live_trial_mapping[trial_id] = suggested_config
config = dict(zip(self._point_parameter_names, suggested_config))
# Keep backwards compatibility
config.update(point=suggested_config)
return unflatten_dict(config)
def on_trial_complete(self,
trial_id: str,
result: Optional[Dict] = None,
error: bool = False):
"""Passes result to Dragonfly unless early terminated or errored."""
trial_info = self._live_trial_mapping.pop(trial_id)
if result and not is_nan_or_inf(result[self._metric]):
self._opt.tell([(trial_info,
self._metric_op * result[self._metric])])
@staticmethod
def convert_search_space(spec: Dict) -> List[Dict]:
resolved_vars, domain_vars, grid_vars = parse_spec_vars(spec)
if grid_vars:
raise ValueError(
"Grid search parameters cannot be automatically converted "
"to a Dragonfly search space.")
# Flatten and resolve again after checking for grid search.
spec = flatten_dict(spec, prevent_delimiter=True)
resolved_vars, domain_vars, grid_vars = parse_spec_vars(spec)
def resolve_value(par: str, domain: Domain) -> Dict:
sampler = domain.get_sampler()
if isinstance(sampler, Quantized):
logger.warning(
"Dragonfly search does not support quantization. "
"Dropped quantization.")
sampler = sampler.get_sampler()
if isinstance(domain, Float):
if domain.sampler is not None:
logger.warning(
"Dragonfly does not support specific sampling methods."
" The {} sampler will be dropped.".format(sampler))
return {
"name": par,
"type": "float",
"min": domain.lower,
"max": domain.upper
}
raise ValueError("Dragonfly does not support parameters of type "
"`{}`".format(type(domain).__name__))
# Parameter name is e.g. "a/b/c" for nested dicts
space = [
resolve_value("/".join(path), domain)
for path, domain in domain_vars
]
return space
|
|
# -*- coding: utf-8 -*-
from django.contrib.auth import get_user_model
from django.contrib.auth.models import AnonymousUser, Group
from django.contrib.sites.models import Site
from django.test.utils import override_settings
from cms.api import create_page
from cms.cms_menus import get_visible_pages
from cms.models import Page
from cms.models import ACCESS_DESCENDANTS, ACCESS_CHILDREN, ACCESS_PAGE
from cms.models import ACCESS_PAGE_AND_CHILDREN, ACCESS_PAGE_AND_DESCENDANTS
from cms.models.permissionmodels import GlobalPagePermission, PagePermission
from cms.test_utils.testcases import CMSTestCase
from cms.utils.page_permissions import user_can_view_page
from menus.menu_pool import menu_pool
__all__ = [
'ViewPermissionTreeBugTests',
'ViewPermissionComplexMenuAllNodesTests'
]
class ViewPermissionTests(CMSTestCase):
"""
Test various combinations of view permissions pages and menus
Focus on the different grant types and inheritance options of grant on
Given the tree:
|- Page_a
|- Page_b
| |- Page_b_a
| |- Page_b_b
| | |- Page_b_b_a
| | | |- Page_b_b_a_a
| | |- Page_b_b_b
| | |- Page_b_b_c
| |- Page_b_c
| |- Page_b_d
| | |- Page_b_d_a
| | |- Page_b_d_b
| | |- Page_b_d_c
|- Page_c
| |- Page_c_a
| |- Page_c_b
|- Page_d
| |- Page_d_a
| |- Page_d_b
| |- Page_d_c
"""
GROUPNAME_1 = 'group_b_ACCESS_PAGE_AND_CHILDREN'
GROUPNAME_2 = 'group_b_b_ACCESS_CHILDREN'
GROUPNAME_3 = 'group_b_ACCESS_PAGE_AND_DESCENDANTS'
GROUPNAME_4 = 'group_b_b_ACCESS_DESCENDANTS'
GROUPNAME_5 = 'group_d_ACCESS_PAGE'
def setUp(self):
self.site = Site()
self.site.pk = 1
super(ViewPermissionTests, self).setUp()
def tearDown(self):
super(ViewPermissionTests, self).tearDown()
def _setup_tree_pages(self):
stdkwargs = {
'template': 'nav_playground.html',
'language': 'en',
'published': True,
'in_navigation': True,
}
page_a = create_page("page_a", **stdkwargs) # first page slug is /
page_b = create_page("page_b", **stdkwargs)
page_c = create_page("page_c", **stdkwargs)
page_d = create_page("page_d", **stdkwargs)
page_b_a = create_page("page_b_a", parent=page_b, **stdkwargs)
page_b_b = create_page("page_b_b", parent=page_b, **stdkwargs)
page_b_b_a = create_page("page_b_b_a", parent=page_b_b, **stdkwargs)
page_b_b_b = create_page("page_b_b_b", parent=page_b_b, **stdkwargs)
page_b_b_c = create_page("page_b_b_c", parent=page_b_b, **stdkwargs)
page_b_b_a_a = create_page("page_b_b_a_a", parent=page_b_b_a, **stdkwargs)
page_b_c = create_page("page_b_c", parent=page_b, **stdkwargs)
page_b_d = create_page("page_b_d", parent=page_b, **stdkwargs)
page_b_d_a = create_page("page_b_d_a", parent=page_b_d, **stdkwargs)
page_b_d_b = create_page("page_b_d_b", parent=page_b_d, **stdkwargs)
page_b_d_c = create_page("page_b_d_c", parent=page_b_d, **stdkwargs)
page_c_a = create_page("page_c_a", parent=page_c, **stdkwargs)
page_c_b = create_page("page_c_b", parent=page_c, **stdkwargs)
page_d_a = create_page("page_d_a", parent=page_d, **stdkwargs)
page_d_b = create_page("page_d_b", parent=page_d, **stdkwargs)
page_d_c = create_page("page_d_c", parent=page_d, **stdkwargs)
page_d_d = create_page("page_d_d", parent=page_d, **stdkwargs)
pages = [
page_a,
page_b,
page_b_a,
page_b_b,
page_b_b_a,
page_b_b_a_a,
page_b_b_b,
page_b_b_c,
page_b_c,
page_b_d,
page_b_d_a,
page_b_d_b,
page_b_d_c,
page_c,
page_c_a,
page_c_b,
page_d,
page_d_a,
page_d_b,
page_d_c,
page_d_d,
]
new_pages = []
for page in pages:
new_pages.append(page.reload())
return new_pages
def _setup_user_groups(self):
"""
Setup a group for every grant on ACCESS TYPE
"""
userdata = [
('user_1', True, self.GROUPNAME_1),
('user_1_nostaff', False, self.GROUPNAME_1),
('user_2', True, self.GROUPNAME_2),
('user_2_nostaff', False, self.GROUPNAME_2),
('user_3', True, self.GROUPNAME_3),
('user_3_nostaff', False, self.GROUPNAME_3),
('user_4', True, self.GROUPNAME_4),
('user_4_nostaff', False, self.GROUPNAME_4),
('user_5', True, self.GROUPNAME_5),
('user_5_nostaff', False, self.GROUPNAME_5),
('user_staff', True, None),
]
default_users_count = get_user_model().objects.all().count()
for username, is_staff, groupname in userdata:
user = self._create_user(username, is_staff)
if groupname:
group, _ = Group.objects.get_or_create(name=groupname)
user_set = getattr(group, 'user_set')
user_set.add(user)
group.save()
self.assertEqual(11, get_user_model().objects.all().count()-default_users_count)
def _setup_view_restrictions(self):
"""
Setup a view restriction with every type of the grant_on ACCESS_*
"""
data = [("page_b", self.GROUPNAME_1, ACCESS_PAGE_AND_CHILDREN),
("page_b_b", self.GROUPNAME_2, ACCESS_CHILDREN),
("page_b", self.GROUPNAME_3, ACCESS_PAGE_AND_DESCENDANTS),
("page_b_b", self.GROUPNAME_4, ACCESS_DESCENDANTS),
("page_d", self.GROUPNAME_5, ACCESS_PAGE),
]
for title, groupname, inherit in data:
page = Page.objects.drafts().get(title_set__title=title)
group = Group.objects.get(name__iexact=groupname)
PagePermission.objects.create(can_view=True, group=group, page=page, grant_on=inherit)
self.assertEqual(5, PagePermission.objects.all().count())
self.assertEqual(0, GlobalPagePermission.objects.all().count())
def assertPageFound(self, url, client=None):
if not client:
client = self.client
response = client.get(url)
self.assertEqual(response.status_code, 200)
def assertPageNotFound(self, url, client=None):
if not client:
client = self.client
response = client.get(url)
self.assertEqual(response.status_code, 404)
def assertViewAllowed(self, page, user):
self.assertTrue(user_can_view_page(user, page))
def assertViewNotAllowed(self, page, user):
self.assertFalse(user_can_view_page(user, page))
def assertInMenu(self, page, user):
request = self.get_request(user, page)
menu_renderer = menu_pool.get_renderer(request)
nodes = menu_renderer.get_nodes()
target_url = page.get_absolute_url()
found_in_menu = False
for node in nodes:
if node.get_absolute_url() == target_url:
found_in_menu = True
break
self.assertTrue(found_in_menu)
def assertNotInMenu(self, page, user):
request = self.get_request(user, page)
menu_renderer = menu_pool.get_renderer(request)
nodes = menu_renderer.get_nodes()
target_url = page.get_absolute_url()
found_in_menu = False
for node in nodes:
if node.get_absolute_url() == target_url:
found_in_menu = True
break
self.assertFalse(found_in_menu)
def assertNodeMemberships(self, visible_page_ids, restricted_pages, public_page_ids):
"""
test all visible page ids are either in_public and not in_restricted
or not in_public and in_restricted
"""
for page_id in visible_page_ids:
in_restricted = False
in_public = False
if page_id in restricted_pages:
in_restricted = True
if page_id in public_page_ids:
in_public = True
self.assertTrue((in_public and not in_restricted) or
(not in_public and in_restricted),
msg="page_id %s in_public: %s, in_restricted: %s" % (page_id, in_public, in_restricted))
def assertGrantedVisibility(self, all_pages, expected_granted_pages, username=None):
"""
helper function to check the expected_granted_pages are
not in the restricted_pages list and
all visible pages are in the expected_granted_pages
"""
# log the user in if present
user = None
if username is not None:
if get_user_model().USERNAME_FIELD == 'email':
username = username + '@django-cms.org'
query = dict()
query[get_user_model().USERNAME_FIELD+'__iexact'] = username
user = get_user_model().objects.get(**query)
request = self.get_request(user)
visible_page_ids = get_visible_pages(request, all_pages, self.site)
public_page_ids = Page.objects.drafts().filter(title_set__title__in=expected_granted_pages).values_list('id',
flat=True)
self.assertEqual(len(visible_page_ids), len(expected_granted_pages))
restricted_pages = Page.objects.public().exclude(title_set__title__in=expected_granted_pages).values_list('id',
flat=True)
self.assertNodeMemberships(visible_page_ids, restricted_pages, public_page_ids)
def get_request(self, user=None, page=None):
# see tests/menu.py line 753
path = "/"
if page:
path = page.get_absolute_url()
attrs = {
'user': user or AnonymousUser(),
'REQUEST': {},
'POST': {},
'GET': {},
'path': path,
'session': {},
}
return type('Request', (object,), attrs)
def get_url_dict(self, pages, language='en'):
return dict((page.get_absolute_url(language=language), page) for page in pages)
@override_settings(
CMS_PERMISSION=True,
CMS_PUBLIC_FOR='all',
)
class ViewPermissionComplexMenuAllNodesTests(ViewPermissionTests):
"""
Test CMS_PUBLIC_FOR=all group access and menu nodes rendering
"""
def test_public_pages_anonymous_norestrictions(self):
"""
All pages are visible to an anonymous user
"""
all_pages = self._setup_tree_pages()
request = self.get_request()
visible_page_ids = get_visible_pages(request, all_pages, self.site)
self.assertEqual(len(all_pages), len(visible_page_ids))
menu_renderer = menu_pool.get_renderer(request)
nodes = menu_renderer.get_nodes()
self.assertEqual(len(nodes), len(all_pages))
def test_public_menu_anonymous_user(self):
"""
Anonymous user should only see the pages in the rendered menu
that have no permissions assigned,directly or indirectly
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_a',
'page_c',
'page_c_a',
'page_c_b',
'page_d_a',
'page_d_b',
'page_d_c',
'page_d_d'
]
self.assertGrantedVisibility(all_pages, granted)
urls = self.get_url_dict(all_pages)
user = AnonymousUser()
request = self.get_request(user, urls['/en/'])
menu_renderer = menu_pool.get_renderer(request)
nodes = menu_renderer.get_nodes()
self.assertEqual(len(nodes), 4)
self.assertInMenu(urls["/en/"], user)
self.assertInMenu(urls["/en/page_c/"], user)
self.assertInMenu(urls["/en/page_c/page_c_a/"], user)
self.assertInMenu(urls["/en/page_c/page_c_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/"], user)
self.assertNotInMenu(urls["/en/page_b/"], user)
self.assertViewNotAllowed(urls["/en/page_d/"], user)
self.assertNotInMenu(urls["/en/page_d/"], user)
def test_menu_access_page_and_children_group_1(self):
"""
simulate behaviour of group b member
group_b_ACCESS_PAGE_AND_CHILDREN to page_b
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_a',
'page_c',
'page_c_a',
'page_c_b',
#group_1
'page_b', #page_id b has page_id and children restricted - group 1
'page_b_a',
'page_b_b', #page_id b_b children restricted - group 2
'page_b_c',
'page_b_d',
# not restricted
'page_d_a',
'page_d_b',
'page_d_c',
'page_d_d'
]
urls = self.get_url_dict(all_pages)
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_1@django-cms.org')
else:
user = get_user_model().objects.get(username='user_1')
self.assertGrantedVisibility(all_pages, granted, username='user_1')
self.assertViewAllowed(urls["/en/page_b/"], user)
self.assertInMenu(urls["/en/page_b/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_b/"], user)
self.assertInMenu(urls["/en/page_b/page_b_b/"], user)
# descendant
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
self.assertNotInMenu(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
# group 5
self.assertViewNotAllowed(urls["/en/page_d/"], user)
self.assertNotInMenu(urls["/en/page_d/"], user)
# should be public as only page_d is restricted
self.assertViewAllowed(urls["/en/page_d/page_d_a/"], user)
self.assertNotInMenu(urls["/en/page_d/page_d_a/"], user)
def test_menu_access_children_group_2(self):
"""
simulate behaviour of group 2 member
GROUPNAME_2 = 'group_b_b_ACCESS_CHILDREN'
to page_b_b
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = [
'page_a',
'page_c',
'page_c_a',
'page_c_b',
'page_b_b_a',
'page_b_b_b',
'page_b_b_c',
# not restricted
'page_d_a',
'page_d_b',
'page_d_c',
'page_d_d',
]
self.assertGrantedVisibility(all_pages, granted, username='user_2')
urls = self.get_url_dict(all_pages)
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_2@django-cms.org')
else:
user = get_user_model().objects.get(username='user_2')
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/page_b_b_a/page_b_b_a_a/"], user)
self.assertViewNotAllowed(urls["/en/page_d/"], user)
self.assertViewAllowed(urls["/en/page_d/page_d_a/"], user)
def test_menu_access_page_and_descendants_group_3(self):
"""
simulate behaviour of group 3 member
group_b_ACCESS_PAGE_AND_DESCENDANTS to page_b
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_a',
'page_b',
'page_b_a',
'page_b_b',
'page_b_b_a',
'page_b_b_a_a',
'page_b_b_b',
'page_b_b_c',
'page_b_c',
'page_b_d',
'page_b_d_a',
'page_b_d_b',
'page_b_d_c',
'page_c',
'page_c_a',
'page_c_b',
'page_d_a',
'page_d_b',
'page_d_c',
'page_d_d',
]
self.assertGrantedVisibility(all_pages, granted, username='user_3')
urls = self.get_url_dict(all_pages)
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_3@django-cms.org')
else:
user = get_user_model().objects.get(username='user_3')
self.assertViewAllowed(urls["/en/page_b/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_d/page_b_d_a/"], user)
self.assertViewNotAllowed(urls["/en/page_d/"], user)
self.assertViewAllowed(urls["/en/page_d/page_d_a/"], user)
def test_menu_access_descendants_group_4(self):
"""
simulate behaviour of group 4 member
group_b_b_ACCESS_DESCENDANTS to page_b_b
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_a',
'page_b_b_a',
'page_b_b_a_a',
'page_b_b_b',
'page_b_b_c',
'page_c',
'page_c_a',
'page_c_b',
'page_d_a',
'page_d_b',
'page_d_c',
'page_d_d',
]
self.assertGrantedVisibility(all_pages, granted, username='user_4')
urls = self.get_url_dict(all_pages)
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_4@django-cms.org')
else:
user = get_user_model().objects.get(username='user_4')
self.assertViewNotAllowed(urls["/en/page_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
self.assertViewNotAllowed(urls["/en/page_d/"], user)
self.assertViewAllowed(urls["/en/page_d/page_d_a/"], user)
def test_menu_access_page_group_5(self):
"""
simulate behaviour of group b member
group_d_ACCESS_PAGE to page_d
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_a',
'page_c',
'page_c_a',
'page_c_b',
'page_d',
'page_d_a',
'page_d_b',
'page_d_c',
'page_d_d',
]
self.assertGrantedVisibility(all_pages, granted, username='user_5')
urls = self.get_url_dict(all_pages)
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_5@django-cms.org')
else:
user = get_user_model().objects.get(username='user_5')
# call /
self.assertViewNotAllowed(urls["/en/page_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
self.assertViewAllowed(urls["/en/page_d/"], user)
self.assertViewAllowed(urls["/en/page_d/page_d_a/"], user)
def test_non_view_permission_doesnt_hide(self):
"""
PagePermissions with can_view=False shouldn't hide pages in the menu.
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
page = Page.objects.drafts().get(title_set__title="page_b")
group = Group.objects.get(name=self.GROUPNAME_1)
PagePermission.objects.create(can_view=False, group=group, page=page)
urls = self.get_url_dict(all_pages)
self.assertInMenu(urls["/en/page_b/"], AnonymousUser())
@override_settings(
CMS_PERMISSION=True,
CMS_PUBLIC_FOR='all',
)
class ViewPermissionTreeBugTests(ViewPermissionTests):
"""Test issue 1113
https://github.com/divio/django-cms/issues/1113
Wrong view permission calculation in PagePermission.objects.for_page
grant_on=ACCESS_PAGE_AND_CHILDREN or ACCESS_PAGE_AND_DESCENDANTS to page 6
Test if this affects the menu entries and page visibility
"""
GROUPNAME_6 = 'group_6_ACCESS_PAGE'
def _setup_pages(self):
"""
Tree Structure
|- Page_1
| |- Page_2
| |- Page_3
| |- Page_4 (false positive)
| |- Page_5
| | |- Page_6 (group 6 page access)
"""
stdkwargs = {
'template': 'nav_playground.html',
'language': 'en',
'published': True,
'in_navigation': True,
}
page_1 = create_page("page_1", **stdkwargs) # first page slug is /
page_2 = create_page("page_2", parent=page_1, **stdkwargs)
page_3 = create_page("page_3", parent=page_2, **stdkwargs)
page_4 = create_page("page_4", parent=page_3, **stdkwargs)
page_5 = create_page("page_5", parent=page_1, **stdkwargs)
page_6 = create_page("page_6", parent=page_5, **stdkwargs)
return [page_1,
page_2,
page_3,
page_4,
page_5,
page_6,
]
def _setup_user(self):
user = self._create_user('user_6', True)
group = Group.objects.create(name=self.GROUPNAME_6)
user_set = getattr(group, 'user_set')
user_set.add(user)
group.save()
def _setup_permviewbug(self):
"""
Setup group_6_ACCESS_PAGE view restriction
"""
page = Page.objects.drafts().get(title_set__title="page_6")
group = Group.objects.get(name__iexact=self.GROUPNAME_6)
PagePermission.objects.create(can_view=True, group=group, page=page, grant_on=ACCESS_PAGE_AND_CHILDREN)
PagePermission.objects.create(can_view=True, group=group, page=page, grant_on=ACCESS_PAGE_AND_DESCENDANTS)
def test_pageforbug(self):
all_pages = self._setup_pages()
self._setup_user()
self._setup_permviewbug()
for page in all_pages:
perm = PagePermission.objects.for_page(page=page)
# only page_6 has a permission assigned
if page.get_title() == 'page_6':
self.assertEqual(len(perm), 2)
else:
msg = "Permission wrong at page %s" % (page.get_title())
self.assertEqual(len(perm), 0, msg)
granted = ['page_1',
'page_2',
'page_3',
'page_4',
'page_5',
]
urls = self.get_url_dict(all_pages)
user = AnonymousUser()
# anonymous doesn't see page_6
self.assertGrantedVisibility(all_pages, granted)
self.assertViewAllowed(urls["/en/page_2/page_3/page_4/"], user)
self.assertViewAllowed(urls["/en/page_5/"], user)
self.assertViewNotAllowed(urls["/en/page_5/page_6/"], user)
# group member
granted = ['page_1',
'page_2',
'page_3',
'page_4',
'page_5',
'page_6',
]
self.assertGrantedVisibility(all_pages, granted, username='user_6')
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_6@django-cms.org')
else:
user = get_user_model().objects.get(username='user_6')
url = "/en/page_2/page_3/page_4/"
self.assertViewAllowed(urls[url], user)
url = "/en/page_5/page_6/"
self.assertViewAllowed(urls[url], user)
|
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for multi-gpu training utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python import data
from tensorflow.python import keras
from tensorflow.python.platform import test
def check_if_compatible_devices(gpus=2):
available_devices = [
keras.utils.multi_gpu_utils._normalize_device_name(name)
for name in keras.utils.multi_gpu_utils._get_available_devices()
]
if '/gpu:%d' % (gpus - 1) not in available_devices:
return False
return True
class TestMultiGPUModel(test.TestCase):
def test_multi_gpu_test_simple_model(self):
gpus = 2
num_samples = 1000
input_dim = 10
output_dim = 1
hidden_dim = 10
epochs = 2
target_gpu_id = [0, 1]
if not check_if_compatible_devices(gpus=gpus):
return
with self.cached_session():
model = keras.models.Sequential()
model.add(keras.layers.Dense(hidden_dim,
input_shape=(input_dim,)))
model.add(keras.layers.Dense(output_dim))
x = np.random.random((num_samples, input_dim))
y = np.random.random((num_samples, output_dim))
parallel_model = keras.utils.multi_gpu_model(model, gpus=gpus)
parallel_model.compile(loss='mse', optimizer='rmsprop')
parallel_model.fit(x, y, epochs=epochs)
parallel_model = keras.utils.multi_gpu_model(model, gpus=target_gpu_id)
parallel_model.compile(loss='mse', optimizer='rmsprop')
parallel_model.fit(x, y, epochs=epochs)
def test_multi_gpu_test_multi_io_model(self):
gpus = 2
num_samples = 1000
input_dim_a = 10
input_dim_b = 5
output_dim_a = 1
output_dim_b = 2
hidden_dim = 10
epochs = 2
target_gpu_id = [0, 1]
if not check_if_compatible_devices(gpus=gpus):
return
with self.cached_session():
input_a = keras.Input((input_dim_a,))
input_b = keras.Input((input_dim_b,))
a = keras.layers.Dense(hidden_dim)(input_a)
b = keras.layers.Dense(hidden_dim)(input_b)
c = keras.layers.concatenate([a, b])
output_a = keras.layers.Dense(output_dim_a)(c)
output_b = keras.layers.Dense(output_dim_b)(c)
model = keras.models.Model([input_a, input_b], [output_a, output_b])
a_x = np.random.random((num_samples, input_dim_a))
b_x = np.random.random((num_samples, input_dim_b))
a_y = np.random.random((num_samples, output_dim_a))
b_y = np.random.random((num_samples, output_dim_b))
parallel_model = keras.utils.multi_gpu_model(model, gpus=gpus)
parallel_model.compile(loss='mse', optimizer='rmsprop')
parallel_model.fit([a_x, b_x], [a_y, b_y], epochs=epochs)
parallel_model = keras.utils.multi_gpu_model(model, gpus=target_gpu_id)
parallel_model.compile(loss='mse', optimizer='rmsprop')
parallel_model.fit([a_x, b_x], [a_y, b_y], epochs=epochs)
def test_multi_gpu_test_invalid_devices(self):
if not check_if_compatible_devices(gpus=2):
return
with self.cached_session():
input_shape = (1000, 10)
model = keras.models.Sequential()
model.add(keras.layers.Dense(10,
activation='relu',
input_shape=input_shape[1:]))
model.add(keras.layers.Dense(1, activation='sigmoid'))
model.compile(loss='mse', optimizer='rmsprop')
x = np.random.random(input_shape)
y = np.random.random((input_shape[0], 1))
with self.assertRaises(ValueError):
parallel_model = keras.utils.multi_gpu_model(
model, gpus=len(keras.backend._get_available_gpus()) + 1)
parallel_model.fit(x, y, epochs=2)
with self.assertRaises(ValueError):
parallel_model = keras.utils.multi_gpu_model(
model, gpus=[0, 2, 4, 6, 8])
parallel_model.fit(x, y, epochs=2)
with self.assertRaises(ValueError):
parallel_model = keras.utils.multi_gpu_model(model, gpus=1)
parallel_model.fit(x, y, epochs=2)
with self.assertRaises(ValueError):
parallel_model = keras.utils.multi_gpu_model(model, gpus=[0])
parallel_model.fit(x, y, epochs=2)
def test_nested_model_with_tensor_input(self):
gpus = 2
input_dim = 10
shape = (input_dim,)
num_samples = 16
num_classes = 10
if not check_if_compatible_devices(gpus=gpus):
return
with self.cached_session():
input_shape = (num_samples,) + shape
x_train = np.random.randint(0, 255, input_shape)
y_train = np.random.randint(0, num_classes, (input_shape[0],))
keras.backend.set_learning_phase(True)
y_train = keras.utils.to_categorical(y_train, num_classes)
x_train = x_train.astype('float32')
y_train = y_train.astype('float32')
dataset = data.Dataset.from_tensor_slices((x_train, y_train))
dataset = dataset.repeat()
dataset = dataset.batch(4)
iterator = data.make_one_shot_iterator(dataset)
inputs, targets = iterator.get_next()
input_tensor = keras.layers.Input(tensor=inputs)
model = keras.models.Sequential()
model.add(keras.layers.Dense(3,
input_shape=(input_dim,)))
model.add(keras.layers.Dense(num_classes))
output = model(input_tensor)
outer_model = keras.Model(input_tensor, output)
parallel_model = keras.utils.multi_gpu_model(outer_model, gpus=gpus)
parallel_model.compile(
loss='categorical_crossentropy',
optimizer=keras.optimizers.RMSprop(lr=0.0001, decay=1e-6),
metrics=['accuracy'],
target_tensors=[targets])
parallel_model.fit(epochs=1, steps_per_epoch=3)
def test_multi_gpu_with_multi_input_layers(self):
gpus = 2
if not check_if_compatible_devices(gpus=gpus):
return
with self.cached_session():
inputs = keras.Input((4, 3))
init_state = keras.Input((3,))
outputs = keras.layers.SimpleRNN(
3, return_sequences=True)(inputs, initial_state=init_state)
x = [np.random.randn(2, 4, 3), np.random.randn(2, 3)]
y = np.random.randn(2, 4, 3)
model = keras.Model([inputs, init_state], outputs)
parallel_model = keras.utils.multi_gpu_model(model, gpus=gpus)
parallel_model.compile(loss='mean_squared_error', optimizer='adam')
parallel_model.train_on_batch(x, y)
def test_multi_gpu_with_siamese_network(self):
gpus = 2
if not check_if_compatible_devices(gpus=gpus):
return
with self.cached_session():
input_shape = (3,)
nested_model = keras.models.Sequential([
keras.layers.Dense(32, input_shape=input_shape),
keras.layers.Dense(1)
], name='nested')
input1 = keras.Input(input_shape)
input2 = keras.Input(input_shape)
score1 = nested_model(input1)
score2 = nested_model(input2)
score_sum = keras.layers.Add(name='add')([score1, score2])
siamese = keras.models.Model(inputs=[input1, input2],
outputs=[score_sum, score1, score2],
name='siamese')
parallel_siamese = keras.utils.multi_gpu_model(siamese, gpus)
self.assertEqual(parallel_siamese.output_names,
['add', 'nested_1', 'nested_2'])
if __name__ == '__main__':
test.main()
|
|
# This file is generated by mkstringprep.py. DO NOT EDIT.
"""Library that exposes various tables found in the StringPrep RFC 3454.
There are two kinds of tables: sets, for which a member test is provided,
and mappings, for which a mapping function is provided.
"""
import unicodedata
#assert unicodedata.unidata_version == '3.2.0'
def in_table_a1(code):
if unicodedata.category(code) != 'Cn': return False
c = ord(code)
if 0xFDD0 <= c < 0xFDF0: return False
return (c & 0xFFFF) not in (0xFFFE, 0xFFFF)
b1_set = set([173, 847, 6150, 6155, 6156, 6157, 8203, 8204, 8205, 8288, 65279] + range(65024,65040))
def in_table_b1(code):
return ord(code) in b1_set
b3_exceptions = {
0xb5:u'\u03bc', 0xdf:u'ss', 0x130:u'i\u0307', 0x149:u'\u02bcn',
0x17f:u's', 0x1f0:u'j\u030c', 0x345:u'\u03b9', 0x37a:u' \u03b9',
0x390:u'\u03b9\u0308\u0301', 0x3b0:u'\u03c5\u0308\u0301', 0x3c2:u'\u03c3', 0x3d0:u'\u03b2',
0x3d1:u'\u03b8', 0x3d2:u'\u03c5', 0x3d3:u'\u03cd', 0x3d4:u'\u03cb',
0x3d5:u'\u03c6', 0x3d6:u'\u03c0', 0x3f0:u'\u03ba', 0x3f1:u'\u03c1',
0x3f2:u'\u03c3', 0x3f5:u'\u03b5', 0x587:u'\u0565\u0582', 0x1e96:u'h\u0331',
0x1e97:u't\u0308', 0x1e98:u'w\u030a', 0x1e99:u'y\u030a', 0x1e9a:u'a\u02be',
0x1e9b:u'\u1e61', 0x1f50:u'\u03c5\u0313', 0x1f52:u'\u03c5\u0313\u0300', 0x1f54:u'\u03c5\u0313\u0301',
0x1f56:u'\u03c5\u0313\u0342', 0x1f80:u'\u1f00\u03b9', 0x1f81:u'\u1f01\u03b9', 0x1f82:u'\u1f02\u03b9',
0x1f83:u'\u1f03\u03b9', 0x1f84:u'\u1f04\u03b9', 0x1f85:u'\u1f05\u03b9', 0x1f86:u'\u1f06\u03b9',
0x1f87:u'\u1f07\u03b9', 0x1f88:u'\u1f00\u03b9', 0x1f89:u'\u1f01\u03b9', 0x1f8a:u'\u1f02\u03b9',
0x1f8b:u'\u1f03\u03b9', 0x1f8c:u'\u1f04\u03b9', 0x1f8d:u'\u1f05\u03b9', 0x1f8e:u'\u1f06\u03b9',
0x1f8f:u'\u1f07\u03b9', 0x1f90:u'\u1f20\u03b9', 0x1f91:u'\u1f21\u03b9', 0x1f92:u'\u1f22\u03b9',
0x1f93:u'\u1f23\u03b9', 0x1f94:u'\u1f24\u03b9', 0x1f95:u'\u1f25\u03b9', 0x1f96:u'\u1f26\u03b9',
0x1f97:u'\u1f27\u03b9', 0x1f98:u'\u1f20\u03b9', 0x1f99:u'\u1f21\u03b9', 0x1f9a:u'\u1f22\u03b9',
0x1f9b:u'\u1f23\u03b9', 0x1f9c:u'\u1f24\u03b9', 0x1f9d:u'\u1f25\u03b9', 0x1f9e:u'\u1f26\u03b9',
0x1f9f:u'\u1f27\u03b9', 0x1fa0:u'\u1f60\u03b9', 0x1fa1:u'\u1f61\u03b9', 0x1fa2:u'\u1f62\u03b9',
0x1fa3:u'\u1f63\u03b9', 0x1fa4:u'\u1f64\u03b9', 0x1fa5:u'\u1f65\u03b9', 0x1fa6:u'\u1f66\u03b9',
0x1fa7:u'\u1f67\u03b9', 0x1fa8:u'\u1f60\u03b9', 0x1fa9:u'\u1f61\u03b9', 0x1faa:u'\u1f62\u03b9',
0x1fab:u'\u1f63\u03b9', 0x1fac:u'\u1f64\u03b9', 0x1fad:u'\u1f65\u03b9', 0x1fae:u'\u1f66\u03b9',
0x1faf:u'\u1f67\u03b9', 0x1fb2:u'\u1f70\u03b9', 0x1fb3:u'\u03b1\u03b9', 0x1fb4:u'\u03ac\u03b9',
0x1fb6:u'\u03b1\u0342', 0x1fb7:u'\u03b1\u0342\u03b9', 0x1fbc:u'\u03b1\u03b9', 0x1fbe:u'\u03b9',
0x1fc2:u'\u1f74\u03b9', 0x1fc3:u'\u03b7\u03b9', 0x1fc4:u'\u03ae\u03b9', 0x1fc6:u'\u03b7\u0342',
0x1fc7:u'\u03b7\u0342\u03b9', 0x1fcc:u'\u03b7\u03b9', 0x1fd2:u'\u03b9\u0308\u0300', 0x1fd3:u'\u03b9\u0308\u0301',
0x1fd6:u'\u03b9\u0342', 0x1fd7:u'\u03b9\u0308\u0342', 0x1fe2:u'\u03c5\u0308\u0300', 0x1fe3:u'\u03c5\u0308\u0301',
0x1fe4:u'\u03c1\u0313', 0x1fe6:u'\u03c5\u0342', 0x1fe7:u'\u03c5\u0308\u0342', 0x1ff2:u'\u1f7c\u03b9',
0x1ff3:u'\u03c9\u03b9', 0x1ff4:u'\u03ce\u03b9', 0x1ff6:u'\u03c9\u0342', 0x1ff7:u'\u03c9\u0342\u03b9',
0x1ffc:u'\u03c9\u03b9', 0x20a8:u'rs', 0x2102:u'c', 0x2103:u'\xb0c',
0x2107:u'\u025b', 0x2109:u'\xb0f', 0x210b:u'h', 0x210c:u'h',
0x210d:u'h', 0x2110:u'i', 0x2111:u'i', 0x2112:u'l',
0x2115:u'n', 0x2116:u'no', 0x2119:u'p', 0x211a:u'q',
0x211b:u'r', 0x211c:u'r', 0x211d:u'r', 0x2120:u'sm',
0x2121:u'tel', 0x2122:u'tm', 0x2124:u'z', 0x2128:u'z',
0x212c:u'b', 0x212d:u'c', 0x2130:u'e', 0x2131:u'f',
0x2133:u'm', 0x213e:u'\u03b3', 0x213f:u'\u03c0', 0x2145:u'd',
0x3371:u'hpa', 0x3373:u'au', 0x3375:u'ov', 0x3380:u'pa',
0x3381:u'na', 0x3382:u'\u03bca', 0x3383:u'ma', 0x3384:u'ka',
0x3385:u'kb', 0x3386:u'mb', 0x3387:u'gb', 0x338a:u'pf',
0x338b:u'nf', 0x338c:u'\u03bcf', 0x3390:u'hz', 0x3391:u'khz',
0x3392:u'mhz', 0x3393:u'ghz', 0x3394:u'thz', 0x33a9:u'pa',
0x33aa:u'kpa', 0x33ab:u'mpa', 0x33ac:u'gpa', 0x33b4:u'pv',
0x33b5:u'nv', 0x33b6:u'\u03bcv', 0x33b7:u'mv', 0x33b8:u'kv',
0x33b9:u'mv', 0x33ba:u'pw', 0x33bb:u'nw', 0x33bc:u'\u03bcw',
0x33bd:u'mw', 0x33be:u'kw', 0x33bf:u'mw', 0x33c0:u'k\u03c9',
0x33c1:u'm\u03c9', 0x33c3:u'bq', 0x33c6:u'c\u2215kg', 0x33c7:u'co.',
0x33c8:u'db', 0x33c9:u'gy', 0x33cb:u'hp', 0x33cd:u'kk',
0x33ce:u'km', 0x33d7:u'ph', 0x33d9:u'ppm', 0x33da:u'pr',
0x33dc:u'sv', 0x33dd:u'wb', 0xfb00:u'ff', 0xfb01:u'fi',
0xfb02:u'fl', 0xfb03:u'ffi', 0xfb04:u'ffl', 0xfb05:u'st',
0xfb06:u'st', 0xfb13:u'\u0574\u0576', 0xfb14:u'\u0574\u0565', 0xfb15:u'\u0574\u056b',
0xfb16:u'\u057e\u0576', 0xfb17:u'\u0574\u056d', 0x1d400:u'a', 0x1d401:u'b',
0x1d402:u'c', 0x1d403:u'd', 0x1d404:u'e', 0x1d405:u'f',
0x1d406:u'g', 0x1d407:u'h', 0x1d408:u'i', 0x1d409:u'j',
0x1d40a:u'k', 0x1d40b:u'l', 0x1d40c:u'm', 0x1d40d:u'n',
0x1d40e:u'o', 0x1d40f:u'p', 0x1d410:u'q', 0x1d411:u'r',
0x1d412:u's', 0x1d413:u't', 0x1d414:u'u', 0x1d415:u'v',
0x1d416:u'w', 0x1d417:u'x', 0x1d418:u'y', 0x1d419:u'z',
0x1d434:u'a', 0x1d435:u'b', 0x1d436:u'c', 0x1d437:u'd',
0x1d438:u'e', 0x1d439:u'f', 0x1d43a:u'g', 0x1d43b:u'h',
0x1d43c:u'i', 0x1d43d:u'j', 0x1d43e:u'k', 0x1d43f:u'l',
0x1d440:u'm', 0x1d441:u'n', 0x1d442:u'o', 0x1d443:u'p',
0x1d444:u'q', 0x1d445:u'r', 0x1d446:u's', 0x1d447:u't',
0x1d448:u'u', 0x1d449:u'v', 0x1d44a:u'w', 0x1d44b:u'x',
0x1d44c:u'y', 0x1d44d:u'z', 0x1d468:u'a', 0x1d469:u'b',
0x1d46a:u'c', 0x1d46b:u'd', 0x1d46c:u'e', 0x1d46d:u'f',
0x1d46e:u'g', 0x1d46f:u'h', 0x1d470:u'i', 0x1d471:u'j',
0x1d472:u'k', 0x1d473:u'l', 0x1d474:u'm', 0x1d475:u'n',
0x1d476:u'o', 0x1d477:u'p', 0x1d478:u'q', 0x1d479:u'r',
0x1d47a:u's', 0x1d47b:u't', 0x1d47c:u'u', 0x1d47d:u'v',
0x1d47e:u'w', 0x1d47f:u'x', 0x1d480:u'y', 0x1d481:u'z',
0x1d49c:u'a', 0x1d49e:u'c', 0x1d49f:u'd', 0x1d4a2:u'g',
0x1d4a5:u'j', 0x1d4a6:u'k', 0x1d4a9:u'n', 0x1d4aa:u'o',
0x1d4ab:u'p', 0x1d4ac:u'q', 0x1d4ae:u's', 0x1d4af:u't',
0x1d4b0:u'u', 0x1d4b1:u'v', 0x1d4b2:u'w', 0x1d4b3:u'x',
0x1d4b4:u'y', 0x1d4b5:u'z', 0x1d4d0:u'a', 0x1d4d1:u'b',
0x1d4d2:u'c', 0x1d4d3:u'd', 0x1d4d4:u'e', 0x1d4d5:u'f',
0x1d4d6:u'g', 0x1d4d7:u'h', 0x1d4d8:u'i', 0x1d4d9:u'j',
0x1d4da:u'k', 0x1d4db:u'l', 0x1d4dc:u'm', 0x1d4dd:u'n',
0x1d4de:u'o', 0x1d4df:u'p', 0x1d4e0:u'q', 0x1d4e1:u'r',
0x1d4e2:u's', 0x1d4e3:u't', 0x1d4e4:u'u', 0x1d4e5:u'v',
0x1d4e6:u'w', 0x1d4e7:u'x', 0x1d4e8:u'y', 0x1d4e9:u'z',
0x1d504:u'a', 0x1d505:u'b', 0x1d507:u'd', 0x1d508:u'e',
0x1d509:u'f', 0x1d50a:u'g', 0x1d50d:u'j', 0x1d50e:u'k',
0x1d50f:u'l', 0x1d510:u'm', 0x1d511:u'n', 0x1d512:u'o',
0x1d513:u'p', 0x1d514:u'q', 0x1d516:u's', 0x1d517:u't',
0x1d518:u'u', 0x1d519:u'v', 0x1d51a:u'w', 0x1d51b:u'x',
0x1d51c:u'y', 0x1d538:u'a', 0x1d539:u'b', 0x1d53b:u'd',
0x1d53c:u'e', 0x1d53d:u'f', 0x1d53e:u'g', 0x1d540:u'i',
0x1d541:u'j', 0x1d542:u'k', 0x1d543:u'l', 0x1d544:u'm',
0x1d546:u'o', 0x1d54a:u's', 0x1d54b:u't', 0x1d54c:u'u',
0x1d54d:u'v', 0x1d54e:u'w', 0x1d54f:u'x', 0x1d550:u'y',
0x1d56c:u'a', 0x1d56d:u'b', 0x1d56e:u'c', 0x1d56f:u'd',
0x1d570:u'e', 0x1d571:u'f', 0x1d572:u'g', 0x1d573:u'h',
0x1d574:u'i', 0x1d575:u'j', 0x1d576:u'k', 0x1d577:u'l',
0x1d578:u'm', 0x1d579:u'n', 0x1d57a:u'o', 0x1d57b:u'p',
0x1d57c:u'q', 0x1d57d:u'r', 0x1d57e:u's', 0x1d57f:u't',
0x1d580:u'u', 0x1d581:u'v', 0x1d582:u'w', 0x1d583:u'x',
0x1d584:u'y', 0x1d585:u'z', 0x1d5a0:u'a', 0x1d5a1:u'b',
0x1d5a2:u'c', 0x1d5a3:u'd', 0x1d5a4:u'e', 0x1d5a5:u'f',
0x1d5a6:u'g', 0x1d5a7:u'h', 0x1d5a8:u'i', 0x1d5a9:u'j',
0x1d5aa:u'k', 0x1d5ab:u'l', 0x1d5ac:u'm', 0x1d5ad:u'n',
0x1d5ae:u'o', 0x1d5af:u'p', 0x1d5b0:u'q', 0x1d5b1:u'r',
0x1d5b2:u's', 0x1d5b3:u't', 0x1d5b4:u'u', 0x1d5b5:u'v',
0x1d5b6:u'w', 0x1d5b7:u'x', 0x1d5b8:u'y', 0x1d5b9:u'z',
0x1d5d4:u'a', 0x1d5d5:u'b', 0x1d5d6:u'c', 0x1d5d7:u'd',
0x1d5d8:u'e', 0x1d5d9:u'f', 0x1d5da:u'g', 0x1d5db:u'h',
0x1d5dc:u'i', 0x1d5dd:u'j', 0x1d5de:u'k', 0x1d5df:u'l',
0x1d5e0:u'm', 0x1d5e1:u'n', 0x1d5e2:u'o', 0x1d5e3:u'p',
0x1d5e4:u'q', 0x1d5e5:u'r', 0x1d5e6:u's', 0x1d5e7:u't',
0x1d5e8:u'u', 0x1d5e9:u'v', 0x1d5ea:u'w', 0x1d5eb:u'x',
0x1d5ec:u'y', 0x1d5ed:u'z', 0x1d608:u'a', 0x1d609:u'b',
0x1d60a:u'c', 0x1d60b:u'd', 0x1d60c:u'e', 0x1d60d:u'f',
0x1d60e:u'g', 0x1d60f:u'h', 0x1d610:u'i', 0x1d611:u'j',
0x1d612:u'k', 0x1d613:u'l', 0x1d614:u'm', 0x1d615:u'n',
0x1d616:u'o', 0x1d617:u'p', 0x1d618:u'q', 0x1d619:u'r',
0x1d61a:u's', 0x1d61b:u't', 0x1d61c:u'u', 0x1d61d:u'v',
0x1d61e:u'w', 0x1d61f:u'x', 0x1d620:u'y', 0x1d621:u'z',
0x1d63c:u'a', 0x1d63d:u'b', 0x1d63e:u'c', 0x1d63f:u'd',
0x1d640:u'e', 0x1d641:u'f', 0x1d642:u'g', 0x1d643:u'h',
0x1d644:u'i', 0x1d645:u'j', 0x1d646:u'k', 0x1d647:u'l',
0x1d648:u'm', 0x1d649:u'n', 0x1d64a:u'o', 0x1d64b:u'p',
0x1d64c:u'q', 0x1d64d:u'r', 0x1d64e:u's', 0x1d64f:u't',
0x1d650:u'u', 0x1d651:u'v', 0x1d652:u'w', 0x1d653:u'x',
0x1d654:u'y', 0x1d655:u'z', 0x1d670:u'a', 0x1d671:u'b',
0x1d672:u'c', 0x1d673:u'd', 0x1d674:u'e', 0x1d675:u'f',
0x1d676:u'g', 0x1d677:u'h', 0x1d678:u'i', 0x1d679:u'j',
0x1d67a:u'k', 0x1d67b:u'l', 0x1d67c:u'm', 0x1d67d:u'n',
0x1d67e:u'o', 0x1d67f:u'p', 0x1d680:u'q', 0x1d681:u'r',
0x1d682:u's', 0x1d683:u't', 0x1d684:u'u', 0x1d685:u'v',
0x1d686:u'w', 0x1d687:u'x', 0x1d688:u'y', 0x1d689:u'z',
0x1d6a8:u'\u03b1', 0x1d6a9:u'\u03b2', 0x1d6aa:u'\u03b3', 0x1d6ab:u'\u03b4',
0x1d6ac:u'\u03b5', 0x1d6ad:u'\u03b6', 0x1d6ae:u'\u03b7', 0x1d6af:u'\u03b8',
0x1d6b0:u'\u03b9', 0x1d6b1:u'\u03ba', 0x1d6b2:u'\u03bb', 0x1d6b3:u'\u03bc',
0x1d6b4:u'\u03bd', 0x1d6b5:u'\u03be', 0x1d6b6:u'\u03bf', 0x1d6b7:u'\u03c0',
0x1d6b8:u'\u03c1', 0x1d6b9:u'\u03b8', 0x1d6ba:u'\u03c3', 0x1d6bb:u'\u03c4',
0x1d6bc:u'\u03c5', 0x1d6bd:u'\u03c6', 0x1d6be:u'\u03c7', 0x1d6bf:u'\u03c8',
0x1d6c0:u'\u03c9', 0x1d6d3:u'\u03c3', 0x1d6e2:u'\u03b1', 0x1d6e3:u'\u03b2',
0x1d6e4:u'\u03b3', 0x1d6e5:u'\u03b4', 0x1d6e6:u'\u03b5', 0x1d6e7:u'\u03b6',
0x1d6e8:u'\u03b7', 0x1d6e9:u'\u03b8', 0x1d6ea:u'\u03b9', 0x1d6eb:u'\u03ba',
0x1d6ec:u'\u03bb', 0x1d6ed:u'\u03bc', 0x1d6ee:u'\u03bd', 0x1d6ef:u'\u03be',
0x1d6f0:u'\u03bf', 0x1d6f1:u'\u03c0', 0x1d6f2:u'\u03c1', 0x1d6f3:u'\u03b8',
0x1d6f4:u'\u03c3', 0x1d6f5:u'\u03c4', 0x1d6f6:u'\u03c5', 0x1d6f7:u'\u03c6',
0x1d6f8:u'\u03c7', 0x1d6f9:u'\u03c8', 0x1d6fa:u'\u03c9', 0x1d70d:u'\u03c3',
0x1d71c:u'\u03b1', 0x1d71d:u'\u03b2', 0x1d71e:u'\u03b3', 0x1d71f:u'\u03b4',
0x1d720:u'\u03b5', 0x1d721:u'\u03b6', 0x1d722:u'\u03b7', 0x1d723:u'\u03b8',
0x1d724:u'\u03b9', 0x1d725:u'\u03ba', 0x1d726:u'\u03bb', 0x1d727:u'\u03bc',
0x1d728:u'\u03bd', 0x1d729:u'\u03be', 0x1d72a:u'\u03bf', 0x1d72b:u'\u03c0',
0x1d72c:u'\u03c1', 0x1d72d:u'\u03b8', 0x1d72e:u'\u03c3', 0x1d72f:u'\u03c4',
0x1d730:u'\u03c5', 0x1d731:u'\u03c6', 0x1d732:u'\u03c7', 0x1d733:u'\u03c8',
0x1d734:u'\u03c9', 0x1d747:u'\u03c3', 0x1d756:u'\u03b1', 0x1d757:u'\u03b2',
0x1d758:u'\u03b3', 0x1d759:u'\u03b4', 0x1d75a:u'\u03b5', 0x1d75b:u'\u03b6',
0x1d75c:u'\u03b7', 0x1d75d:u'\u03b8', 0x1d75e:u'\u03b9', 0x1d75f:u'\u03ba',
0x1d760:u'\u03bb', 0x1d761:u'\u03bc', 0x1d762:u'\u03bd', 0x1d763:u'\u03be',
0x1d764:u'\u03bf', 0x1d765:u'\u03c0', 0x1d766:u'\u03c1', 0x1d767:u'\u03b8',
0x1d768:u'\u03c3', 0x1d769:u'\u03c4', 0x1d76a:u'\u03c5', 0x1d76b:u'\u03c6',
0x1d76c:u'\u03c7', 0x1d76d:u'\u03c8', 0x1d76e:u'\u03c9', 0x1d781:u'\u03c3',
0x1d790:u'\u03b1', 0x1d791:u'\u03b2', 0x1d792:u'\u03b3', 0x1d793:u'\u03b4',
0x1d794:u'\u03b5', 0x1d795:u'\u03b6', 0x1d796:u'\u03b7', 0x1d797:u'\u03b8',
0x1d798:u'\u03b9', 0x1d799:u'\u03ba', 0x1d79a:u'\u03bb', 0x1d79b:u'\u03bc',
0x1d79c:u'\u03bd', 0x1d79d:u'\u03be', 0x1d79e:u'\u03bf', 0x1d79f:u'\u03c0',
0x1d7a0:u'\u03c1', 0x1d7a1:u'\u03b8', 0x1d7a2:u'\u03c3', 0x1d7a3:u'\u03c4',
0x1d7a4:u'\u03c5', 0x1d7a5:u'\u03c6', 0x1d7a6:u'\u03c7', 0x1d7a7:u'\u03c8',
0x1d7a8:u'\u03c9', 0x1d7bb:u'\u03c3', }
def map_table_b3(code):
r = b3_exceptions.get(ord(code))
if r is not None: return r
return code.lower()
def map_table_b2(a):
al = map_table_b3(a)
b = unicodedata.normalize("NFKC", al)
bl = u"".join([map_table_b3(ch) for ch in b])
c = unicodedata.normalize("NFKC", bl)
if b != c:
return c
else:
return al
def in_table_c11(code):
return code == u" "
def in_table_c12(code):
return unicodedata.category(code) == "Zs" and code != u" "
def in_table_c11_c12(code):
return unicodedata.category(code) == "Zs"
def in_table_c21(code):
return ord(code) < 128 and unicodedata.category(code) == "Cc"
c22_specials = set([1757, 1807, 6158, 8204, 8205, 8232, 8233, 65279] + range(8288,8292) + range(8298,8304) + range(65529,65533) + range(119155,119163))
def in_table_c22(code):
c = ord(code)
if c < 128: return False
if unicodedata.category(code) == "Cc": return True
return c in c22_specials
def in_table_c21_c22(code):
return unicodedata.category(code) == "Cc" or \
ord(code) in c22_specials
def in_table_c3(code):
return unicodedata.category(code) == "Co"
def in_table_c4(code):
c = ord(code)
if c < 0xFDD0: return False
if c < 0xFDF0: return True
return (ord(code) & 0xFFFF) in (0xFFFE, 0xFFFF)
def in_table_c5(code):
return unicodedata.category(code) == "Cs"
c6_set = set(range(65529,65534))
def in_table_c6(code):
return ord(code) in c6_set
c7_set = set(range(12272,12284))
def in_table_c7(code):
return ord(code) in c7_set
c8_set = set([832, 833, 8206, 8207] + range(8234,8239) + range(8298,8304))
def in_table_c8(code):
return ord(code) in c8_set
c9_set = set([917505] + range(917536,917632))
def in_table_c9(code):
return ord(code) in c9_set
def in_table_d1(code):
return unicodedata.bidirectional(code) in ("R","AL")
def in_table_d2(code):
return unicodedata.bidirectional(code) == "L"
|
|
"""
A Cython plugin for coverage.py
Requires the coverage package at least in version 4.0 (which added the plugin API).
"""
from __future__ import absolute_import
import re
import os.path
from collections import defaultdict
from coverage.plugin import CoveragePlugin, FileTracer, FileReporter # requires coverage.py 4.0+
from .Utils import find_root_package_dir, is_package_dir, open_source_file
from . import __version__
def _find_c_source(base_path):
if os.path.exists(base_path + '.c'):
c_file = base_path + '.c'
elif os.path.exists(base_path + '.cpp'):
c_file = base_path + '.cpp'
else:
c_file = None
return c_file
def _find_dep_file_path(main_file, file_path):
abs_path = os.path.abspath(file_path)
if file_path.endswith('.pxi') and not os.path.exists(abs_path):
# include files are looked up relative to the main source file
pxi_file_path = os.path.join(os.path.dirname(main_file), file_path)
if os.path.exists(pxi_file_path):
abs_path = os.path.abspath(pxi_file_path)
return abs_path
class Plugin(CoveragePlugin):
# map from traced file paths to absolute file paths
_file_path_map = None
# map from traced file paths to corresponding C files
_c_files_map = None
# map from parsed C files to their content
_parsed_c_files = None
def sys_info(self):
return [('Cython version', __version__)]
def file_tracer(self, filename):
"""
Try to find a C source file for a file path found by the tracer.
"""
if filename.startswith('<') or filename.startswith('memory:'):
return None
c_file = py_file = None
filename = os.path.abspath(filename)
if self._c_files_map and filename in self._c_files_map:
c_file = self._c_files_map[filename][0]
if c_file is None:
c_file, py_file = self._find_source_files(filename)
if not c_file:
return None
# parse all source file paths and lines from C file
# to learn about all relevant source files right away (pyx/pxi/pxd)
# FIXME: this might already be too late if the first executed line
# is not from the main .pyx file but a file with a different
# name than the .c file (which prevents us from finding the
# .c file)
self._parse_lines(c_file, filename)
if self._file_path_map is None:
self._file_path_map = {}
return CythonModuleTracer(filename, py_file, c_file, self._c_files_map, self._file_path_map)
def file_reporter(self, filename):
if os.path.splitext(filename)[1].lower() not in ('.pyx', '.pxi', '.pxd'):
return None # let coverage.py handle it (e.g. .py files)
filename = os.path.abspath(filename)
if self._c_files_map and filename in self._c_files_map:
c_file, rel_file_path, code = self._c_files_map[filename]
else:
c_file, _ = self._find_source_files(filename)
if not c_file:
return None # unknown file
rel_file_path, code = self._parse_lines(c_file, filename)
return CythonModuleReporter(c_file, filename, rel_file_path, code)
def _find_source_files(self, filename):
basename, ext = os.path.splitext(filename)
ext = ext.lower()
if ext in ('.py', '.pyx', '.pxd', '.c', '.cpp'):
pass
elif ext in ('.so', '.pyd'):
platform_suffix = re.search(r'[.]cpython-[0-9]+[a-z]*$', basename, re.I)
if platform_suffix:
basename = basename[:platform_suffix.start()]
elif ext == '.pxi':
# if we get here, it means that the first traced line of a Cython module was
# not in the main module but in an include file, so try a little harder to
# find the main source file
self._find_c_source_files(os.path.dirname(filename), filename)
if filename in self._c_files_map:
return self._c_files_map[filename][0], None
else:
# none of our business
return None, None
c_file = filename if ext in ('.c', '.cpp') else _find_c_source(basename)
if c_file is None:
# a module "pkg/mod.so" can have a source file "pkg/pkg.mod.c"
package_root = find_root_package_dir.uncached(filename)
package_path = os.path.relpath(basename, package_root).split(os.path.sep)
if len(package_path) > 1:
test_basepath = os.path.join(os.path.dirname(filename), '.'.join(package_path))
c_file = _find_c_source(test_basepath)
py_source_file = None
if c_file:
py_source_file = os.path.splitext(c_file)[0] + '.py'
if not os.path.exists(py_source_file):
py_source_file = None
try:
with open(c_file, 'rb') as f:
if b'/* Generated by Cython ' not in f.read(30):
return None # not a Cython file
except (IOError, OSError):
c_file = None
return c_file, py_source_file
def _find_c_source_files(self, dir_path, source_file):
"""
Desperately parse all C files in the directory or its package parents
(not re-descending) to find the (included) source file in one of them.
"""
if not os.path.isdir(dir_path):
return
splitext = os.path.splitext
for filename in os.listdir(dir_path):
ext = splitext(filename)[1].lower()
if ext in ('.c', '.cpp'):
self._parse_lines(os.path.join(dir_path, filename), source_file)
if source_file in self._c_files_map:
return
# not found? then try one package up
if is_package_dir(dir_path):
self._find_c_source_files(os.path.dirname(dir_path), source_file)
def _parse_lines(self, c_file, sourcefile):
"""
Parse a Cython generated C/C++ source file and find the executable lines.
Each executable line starts with a comment header that states source file
and line number, as well as the surrounding range of source code lines.
"""
if self._parsed_c_files is None:
self._parsed_c_files = {}
if c_file in self._parsed_c_files:
code_lines = self._parsed_c_files[c_file]
else:
match_source_path_line = re.compile(r' */[*] +"(.*)":([0-9]+)$').match
match_current_code_line = re.compile(r' *[*] (.*) # <<<<<<+$').match
match_comment_end = re.compile(r' *[*]/$').match
not_executable = re.compile(
r'\s*c(?:type)?def\s+'
r'(?:(?:public|external)\s+)?'
r'(?:struct|union|enum|class)'
r'(\s+[^:]+|)\s*:'
).match
code_lines = defaultdict(dict)
filenames = set()
with open(c_file) as lines:
lines = iter(lines)
for line in lines:
match = match_source_path_line(line)
if not match:
continue
filename, lineno = match.groups()
filenames.add(filename)
lineno = int(lineno)
for comment_line in lines:
match = match_current_code_line(comment_line)
if match:
code_line = match.group(1).rstrip()
if not_executable(code_line):
break
code_lines[filename][lineno] = code_line
break
elif match_comment_end(comment_line):
# unexpected comment format - false positive?
break
self._parsed_c_files[c_file] = code_lines
if self._c_files_map is None:
self._c_files_map = {}
for filename, code in code_lines.iteritems():
abs_path = _find_dep_file_path(c_file, filename)
self._c_files_map[abs_path] = (c_file, filename, code)
if sourcefile not in self._c_files_map:
return (None,) * 2 # e.g. shared library file
return self._c_files_map[sourcefile][1:]
class CythonModuleTracer(FileTracer):
"""
Find the Python/Cython source file for a Cython module.
"""
def __init__(self, module_file, py_file, c_file, c_files_map, file_path_map):
super(CythonModuleTracer, self).__init__()
self.module_file = module_file
self.py_file = py_file
self.c_file = c_file
self._c_files_map = c_files_map
self._file_path_map = file_path_map
def has_dynamic_source_filename(self):
return True
def dynamic_source_filename(self, filename, frame):
"""
Determine source file path. Called by the function call tracer.
"""
source_file = frame.f_code.co_filename
try:
return self._file_path_map[source_file]
except KeyError:
pass
abs_path = os.path.abspath(source_file)
if self.py_file and source_file[-3:].lower() == '.py':
# always let coverage.py handle this case itself
self._file_path_map[source_file] = self.py_file
return self.py_file
assert self._c_files_map is not None
if abs_path not in self._c_files_map:
self._c_files_map[abs_path] = (self.c_file, source_file, None)
self._file_path_map[source_file] = abs_path
return abs_path
class CythonModuleReporter(FileReporter):
"""
Provide detailed trace information for one source file to coverage.py.
"""
def __init__(self, c_file, source_file, rel_file_path, code):
super(CythonModuleReporter, self).__init__(source_file)
self.name = rel_file_path
self.c_file = c_file
self._code = code
def statements(self):
return self._code.viewkeys()
def _iter_source_tokens(self):
current_line = 1
for line_no, code_line in sorted(self._code.iteritems()):
while line_no > current_line:
yield []
current_line += 1
yield [('txt', code_line)]
current_line += 1
def source(self):
if os.path.exists(self.filename):
with open_source_file(self.filename) as f:
return f.read()
else:
return '\n'.join(
(tokens[0][1] if tokens else '')
for tokens in self._iter_source_tokens())
def source_token_lines(self):
if os.path.exists(self.filename):
with open_source_file(self.filename) as f:
for line in f:
yield [('txt', line.rstrip('\n'))]
else:
for line in self._iter_source_tokens():
yield [('txt', line)]
def coverage_init(reg, options):
reg.add_file_tracer(Plugin())
|
|
from characteristic import Attribute, attributes
from rply import ParserGenerator
from rply.errors import LexingError as _RPlyLexingError
from cycy.exceptions import CyCyError
from cycy.parser.ast import (
Array,
ArrayDereference,
Assembler,
Assignment,
BinaryOperation,
Block,
Call,
Char,
For,
Function,
If,
Include,
Int32,
Double,
Node,
Null,
PostOperation,
PreOperation,
Program,
ReturnStatement,
String,
Variable,
VariableDeclaration,
Type,
)
from cycy.parser.lexer import RULES, lexer
from cycy.parser.preprocessor import Preprocessor
class LexingError(CyCyError):
def __init__(self, source_pos, message):
self.message = message
self.source_pos = source_pos
def __str__(self):
return "Lexer failed at %s (message: %s)" % (
self.source_pos, self.message,
)
@attributes(
[
Attribute(name="token"),
Attribute(name="source", exclude_from_repr=True),
],
apply_with_init=False,
)
class ParseError(CyCyError):
def __init__(self, token, source):
self.token = token
self.source = source
def __str__(self):
token_type = self.token.gettokentype()
token_value = self.token.value
source_pos = self.token.source_pos
if source_pos is None:
return "Unexpected %s %s" % (token_type, token_value)
line, column = source_pos.lineno, source_pos.colno
return (
self._hint(line_number=line - 1, column_number=column - 1) +
"Unexpected %s %s at line %s, column %s" % (
token_type,
"'%s'" % (token_value,),
source_pos.lineno,
source_pos.colno,
)
)
def _hint(self, line_number, column_number):
"""
Find a hint in the source at the given line and column.
"""
line = self.source.splitlines(True)[line_number]
return line + " " * column_number + "^\n"
class UnexpectedEnd(ParseError):
"""
There was an unexpected end in the input.
"""
class NodeList(Node):
"""
A list of nodes used for temporary accumulation during parsing, this
should never appear in the final AST
"""
def __init__(self, items=None):
if items is None:
items = []
self.items = items
def append(self, item):
self.items.append(item)
def extend(self, items):
self.items.extend(items)
def get_items(self):
return self.items
class BoolWrapper(Node):
pass
BoolTrue = BoolWrapper()
BoolFalse = BoolWrapper()
class _Parser(object):
@property
def input_in_progress(self):
return False
@attributes(
[
Attribute(name="lexer", exclude_from_repr=True),
Attribute(name="preprocessor", exclude_from_repr=True),
],
apply_with_init=False,
)
class Parser(_Parser):
def __init__(self, preprocessor=None, lexer=lexer):
if preprocessor is None:
preprocessor = Preprocessor()
self.preprocessor = preprocessor
self.lexer = lexer
def parse(self, source):
tokens = self.lexer.lex(source)
preprocessed = self.preprocessor.preprocessed(
tokens=tokens, parser=self,
)
state = _ParseState(source=source)
try:
program = self._parser.parse(preprocessed, state=state)
except _RPlyLexingError as error:
raise LexingError(
source_pos=error.source_pos,
message=error.message,
)
assert isinstance(program, Program)
return program
_pg = ParserGenerator(RULES, cache_id="cycy")
@_pg.production("main : program")
def main_program(self, p):
return p[0]
@_pg.production("program : unit")
def program_function(self, p):
return Program([p[0]])
@_pg.production("program : unit program")
def program_unit_program(self, p):
p[1].add_unit(p[0])
return p[1]
@_pg.production("return_statement : return expr ;")
def return_statement(self, p):
return ReturnStatement(value=p[1])
@_pg.production("unit : function")
@_pg.production("unit : prototype")
@_pg.production("unit : preprocessor_directive")
def unit(self, p):
return p[0]
@_pg.production("function : type IDENTIFIER LEFT_PARENTHESIS void RIGHT_PARENTHESIS block")
def function_void_param(self, p):
return Function(
return_type=p[0],
name=p[1].getstr(),
params=[],
body=p[5]
)
@_pg.production("function : type IDENTIFIER LEFT_PARENTHESIS arg_decl_list RIGHT_PARENTHESIS block")
def function_with_args(self, p):
return Function(
return_type=p[0],
name=p[1].getstr(),
params=p[3].get_items(),
body=p[5]
)
@_pg.production("prototype : type IDENTIFIER LEFT_PARENTHESIS void RIGHT_PARENTHESIS ;")
def function_void_param(self, p):
return Function(
return_type=p[0],
name=p[1].getstr(),
params=[],
prototype=True
)
@_pg.production("prototype : type IDENTIFIER LEFT_PARENTHESIS arg_decl_list RIGHT_PARENTHESIS ;")
def function_with_args(self, p):
return Function(
return_type=p[0],
name=p[1].getstr(),
params=p[3].get_items(),
prototype=True
)
@_pg.production("prototype : type IDENTIFIER LEFT_PARENTHESIS type_list RIGHT_PARENTHESIS ;")
def function_with_args(self, p):
return Function(
return_type=p[0],
name=p[1].getstr(),
params=[VariableDeclaration(name=None, vtype=x, value=None) for x in p[3].get_items()],
prototype=True
)
@_pg.production("arg_decl_list : declaration")
def arg_decl_list_declaration(self, p):
return NodeList([p[0]])
@_pg.production("arg_decl_list : arg_decl_list , declaration")
def arg_decl_list(self, p):
p[0].append(p[2])
return p[0]
@_pg.production("block : LEFT_CURLY_BRACE statement_list RIGHT_CURLY_BRACE")
def block_statement_list(self, p):
return Block(statements=p[1].get_items())
@_pg.production("statement_list : statement")
def statement_list_statement(self, p):
return NodeList([p[0]])
@_pg.production("statement_list : statement statement_list")
def statement_list_statement_list(self, p):
st = NodeList([p[0]])
st.extend(p[1].get_items())
return st
@_pg.production("statement : return_statement")
@_pg.production("statement : expr ;")
@_pg.production("statement : declaration ;")
@_pg.production("statement : primary_expression ;")
@_pg.production("statement : func_call_statement")
@_pg.production("statement : while_loop")
@_pg.production("statement : for_loop")
@_pg.production("statement : if_loop")
@_pg.production("statement : assembler ;")
def statement_list_return(self, p):
return p[0]
@_pg.production("expr : assignment")
def expr_assignment(self, p):
return p[0]
@_pg.production("assembler : ASM LEFT_PARENTHESIS STRING_LITERAL RIGHT_PARENTHESIS")
def assembler(self, p):
return Assembler(instruction=String(p[2].getstr().strip("\"")))
@_pg.production("preprocessor_directive : include")
def preprocessor_directive(self, p):
return p[0]
@_pg.production("include : INCLUDE STRING_LITERAL")
def include(self, p):
return Include(name=p[1].getstr().strip('"'))
@_pg.production("if_loop : if LEFT_PARENTHESIS expr RIGHT_PARENTHESIS block")
def if_loop(self, p):
return If(condition=p[2], body=p[4])
@_pg.production("if_loop : if LEFT_PARENTHESIS expr RIGHT_PARENTHESIS statement")
def if_loop_single_line(self, p):
return If(condition=p[2], body=Block(statements=[p[4]]))
@_pg.production("while_loop : while LEFT_PARENTHESIS expr RIGHT_PARENTHESIS block")
def while_loop(self, p):
return For(condition=p[2], body=p[4])
@_pg.production("while_loop : while LEFT_PARENTHESIS expr RIGHT_PARENTHESIS statement")
def while_loop_single_line(self, p):
return For(condition=p[2], body=Block(statements=[p[4]]))
@_pg.production("for_loop : for LEFT_PARENTHESIS expr ; expr ; expr RIGHT_PARENTHESIS statement")
def for_loop_single_line(self, p):
return For(initial=p[2], condition=p[4], increment=p[6], body=Block(statements=[p[8]]))
@_pg.production("for_loop : for LEFT_PARENTHESIS expr ; expr ; expr RIGHT_PARENTHESIS block")
def for_loop(self, p):
return For(initial=p[2], condition=p[4], increment=p[6], body=p[8])
@_pg.production("func_call : IDENTIFIER LEFT_PARENTHESIS param_list RIGHT_PARENTHESIS")
def function_call(self, p):
return Call(name=p[0].getstr(), args=p[2].get_items())
@_pg.production("func_call_statement : func_call ;")
@_pg.production("expr : func_call")
def function_call_expr(self, p):
return p[0]
@_pg.production("param_list : expr")
@_pg.production("param_list : ")
def param_list(self, p):
return NodeList(items=[p[0]] if p else None)
@_pg.production("assignment : IDENTIFIER = expr")
def assign(self, p):
return Assignment(left=Variable(p[0].getstr()), right=p[2])
@_pg.production("expr : expr - expr")
@_pg.production("expr : expr + expr")
@_pg.production("expr : expr * expr")
@_pg.production("expr : expr / expr")
@_pg.production("expr : expr % expr")
@_pg.production('expr : expr || expr')
@_pg.production('expr : expr && expr')
@_pg.production('expr : expr == expr')
@_pg.production('expr : expr != expr')
@_pg.production("expr : expr <= expr")
@_pg.production("expr : expr >= expr")
@_pg.production("expr : expr < expr")
@_pg.production("expr : expr > expr")
def binop(self, p):
return BinaryOperation(operator=p[1].getstr(), left=p[0], right=p[2])
@_pg.production("expr : STRING_LITERAL")
def expr_string(self, p):
return String(p[0].getstr().strip("\""))
@_pg.production("expr : null")
def expr_null(self, p):
return Null()
@_pg.production("expr : array LEFT_SQUARE_BRACKET expr RIGHT_SQUARE_BRACKET")
def array_dereference(self, p):
return ArrayDereference(array=p[0], index=p[2])
@_pg.production("array : IDENTIFIER")
def array_variable(self, p):
return Variable(name=p[0].getstr())
@_pg.production("declaration : type IDENTIFIER")
def declare_int(self, p):
return VariableDeclaration(name=p[1].getstr(), vtype=p[0], value=None)
@_pg.production("declaration : type IDENTIFIER LEFT_SQUARE_BRACKET INTEGER_LITERAL RIGHT_SQUARE_BRACKET")
def declare_array(self, p):
return VariableDeclaration(name=p[1].getstr(), vtype=Type(base="array", arraylength=int(p[3].getstr()), reference=p[0]))
@_pg.production("declaration : type IDENTIFIER = INTEGER_LITERAL")
def declare_assign_int(self, p):
return VariableDeclaration(
name=p[1].getstr(),
vtype=p[0],
value=Int32(int(p[3].getstr()))
)
@_pg.production("declaration : type IDENTIFIER = FLOAT_LITERAL")
def declare_assign_float(self, p):
return VariableDeclaration(
name=p[1].getstr(),
vtype=p[0],
value=Double(float(p[3].getstr()))
)
@_pg.production("declaration : type IDENTIFIER = STRING_LITERAL")
def declare_assign_string(self, p):
return VariableDeclaration(
name=p[1].getstr(),
vtype=p[0],
value=String(p[3].getstr().strip("\""))
)
@_pg.production("type_list : type")
def type_list(self, p):
return NodeList([p[0]])
@_pg.production("type_list : type_list , type")
def type_list_type(self, p):
p[0].append(p[2])
return p[0]
@_pg.production("type : optional_unsigned optional_const core_or_pointer_type")
def type_object(self, p):
the_type = p[2]
assert isinstance(the_type, Type)
the_type.unsigned = (p[0] == BoolTrue)
the_type.const = (p[1] == BoolTrue)
return the_type
@_pg.production("optional_const : ")
def const_false(self, p):
return BoolFalse
@_pg.production("optional_const : CONST")
def const_true(self, p):
return BoolTrue
@_pg.production("optional_unsigned : ")
def unsigned_false(self, p):
return BoolFalse
@_pg.production("optional_unsigned : UNSIGNED")
def unsigned_true(self, p):
return BoolTrue
@_pg.production("core_or_pointer_type : core_type")
def core_type(self, p):
return p[0]
@_pg.production("core_or_pointer_type : core_or_pointer_type *")
def pointer_type(self, p):
return Type(base="pointer", reference=p[0])
@_pg.production("core_type : CHAR")
@_pg.production("core_type : INT")
@_pg.production("core_type : SHORT")
@_pg.production("core_type : LONG")
@_pg.production("core_type : FLOAT")
@_pg.production("core_type : DOUBLE")
def generic_vtype(self, p):
return Type(base=p[0].getstr())
@_pg.production("core_type : LONG LONG")
def long_long_vtype(self, p):
return Type(base='long long')
@_pg.production("core_type : LONG DOUBLE")
def long_double_vtype(self, p):
return Type(base='long double')
@_pg.production("expr : primary_expression ++")
def post_incr(self, p):
return PostOperation(operator="++", variable=p[0])
@_pg.production("expr : primary_expression --")
def post_incr(self, p):
return PostOperation(operator="--", variable=p[0])
@_pg.production("expr : ++ primary_expression")
def post_incr(self, p):
return PreOperation(operator="++", variable=p[1])
@_pg.production("expr : -- primary_expression")
def post_incr(self, p):
return PreOperation(operator="--", variable=p[1])
@_pg.production("expr : primary_expression")
def expr_const(self, p):
return p[0]
@_pg.production("primary_expression : const")
@_pg.production("primary_expression : IDENTIFIER")
@_pg.production("primary_expression : STRING_LITERAL")
@_pg.production("primary_expression : LEFT_PARENTHESIS primary_expression RIGHT_PARENTHESIS")
def primary_expression(self, p):
if isinstance(p[0], Node):
# const
return p[0]
elif p[0].gettokentype() == "IDENTIFIER":
return Variable(name=p[0].getstr())
elif p[0].gettokentype() == "STRING_LITERAL":
vals = []
for v in p[0].getstr().strip('"'):
vals.append(Char(value=v))
vals.append(Char(value=chr(0)))
return Array(value=vals)
else:
return p[1]
@_pg.production("const : FLOAT_LITERAL")
@_pg.production("const : INTEGER_LITERAL")
@_pg.production("const : CHAR_LITERAL")
def const(self, p):
if p[0].gettokentype() == "INTEGER_LITERAL":
return Int32(int(p[0].getstr()))
elif p[0].gettokentype() == "FLOAT_LITERAL":
return Double(float(p[0].getstr()))
elif p[0].gettokentype() == "CHAR_LITERAL":
return Char(p[0].getstr().strip("'"))
raise AssertionError("Bad token type in const")
@_pg.error
def error_handler(self, token):
is_unexpected_end = token.gettokentype() == "$end"
if is_unexpected_end:
ParseException = UnexpectedEnd
else:
ParseException = ParseError
raise ParseException(token=token, source=self.source)
_parser = _pg.build()
class _ParseState(object):
def __init__(self, source):
self.source = source
@attributes([Attribute(name="parser")], apply_with_init=False)
class IncrementalParser(_Parser):
buffer = ""
def __init__(self, parser=None):
if parser is None:
parser = Parser(preprocessor=Preprocessor())
self.parser = parser
@property
def input_in_progress(self):
return bool(self.buffer)
def parse(self, source):
try:
ast = self.parser.parse(self.buffer + source)
except UnexpectedEnd:
self.buffer += source
else:
self.buffer = ""
return ast
|
|
#!/usr/bin/env python3
# Copyright (c) 2017-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test various command line arguments and configuration file parameters."""
import os
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework import util
class ConfArgsTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.supports_cli = False
self.wallet_names = []
self.disable_autoconnect = False
def test_config_file_parser(self):
self.stop_node(0)
inc_conf_file_path = os.path.join(self.nodes[0].datadir, 'include.conf')
with open(os.path.join(self.nodes[0].datadir, 'bitcoin.conf'), 'a', encoding='utf-8') as conf:
conf.write(f'includeconf={inc_conf_file_path}\n')
self.nodes[0].assert_start_raises_init_error(
expected_msg='Error: Error parsing command line arguments: Invalid parameter -dash_cli=1',
extra_args=['-dash_cli=1'],
)
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('dash_conf=1\n')
with self.nodes[0].assert_debug_log(expected_msgs=['Ignoring unknown configuration value dash_conf']):
self.start_node(0)
self.stop_node(0)
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('-dash=1\n')
self.nodes[0].assert_start_raises_init_error(expected_msg='Error: Error reading configuration file: parse error on line 1: -dash=1, options in configuration file must be specified without leading -')
if self.is_wallet_compiled():
with open(inc_conf_file_path, 'w', encoding='utf8') as conf:
conf.write("wallet=foo\n")
self.nodes[0].assert_start_raises_init_error(expected_msg=f'Error: Config setting for -wallet only applied on {self.chain} network when in [{self.chain}] section.')
main_conf_file_path = os.path.join(self.options.tmpdir, 'node0', 'bitcoin_main.conf')
util.write_config(main_conf_file_path, n=0, chain='', extra_config=f'includeconf={inc_conf_file_path}\n')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('acceptnonstdtxn=1\n')
self.nodes[0].assert_start_raises_init_error(extra_args=[f"-conf={main_conf_file_path}"], expected_msg='Error: acceptnonstdtxn is not currently supported for main chain')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('nono\n')
self.nodes[0].assert_start_raises_init_error(expected_msg='Error: Error reading configuration file: parse error on line 1: nono, if you intended to specify a negated option, use nono=1 instead')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('server=1\nrpcuser=someuser\nrpcpassword=some#pass')
self.nodes[0].assert_start_raises_init_error(expected_msg='Error: Error reading configuration file: parse error on line 3, using # in rpcpassword can be ambiguous and should be avoided')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('server=1\nrpcuser=someuser\nmain.rpcpassword=some#pass')
self.nodes[0].assert_start_raises_init_error(expected_msg='Error: Error reading configuration file: parse error on line 3, using # in rpcpassword can be ambiguous and should be avoided')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('server=1\nrpcuser=someuser\n[main]\nrpcpassword=some#pass')
self.nodes[0].assert_start_raises_init_error(expected_msg='Error: Error reading configuration file: parse error on line 4, using # in rpcpassword can be ambiguous and should be avoided')
inc_conf_file2_path = os.path.join(self.nodes[0].datadir, 'include2.conf')
with open(os.path.join(self.nodes[0].datadir, 'bitcoin.conf'), 'a', encoding='utf-8') as conf:
conf.write(f'includeconf={inc_conf_file2_path}\n')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('testnot.datadir=1\n')
with open(inc_conf_file2_path, 'w', encoding='utf-8') as conf:
conf.write('[testnet]\n')
self.restart_node(0)
self.nodes[0].stop_node(expected_stderr=f'Warning: {inc_conf_file_path}:1 Section [testnot] is not recognized.{os.linesep}{inc_conf_file2_path}:1 Section [testnet] is not recognized.')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('') # clear
with open(inc_conf_file2_path, 'w', encoding='utf-8') as conf:
conf.write('') # clear
def test_invalid_command_line_options(self):
self.nodes[0].assert_start_raises_init_error(
expected_msg='Error: No proxy server specified. Use -proxy=<ip> or -proxy=<ip:port>.',
extra_args=['-proxy'],
)
def test_log_buffer(self):
self.stop_node(0)
with self.nodes[0].assert_debug_log(expected_msgs=['Warning: parsed potentially confusing double-negative -connect=0\n']):
self.start_node(0, extra_args=['-noconnect=0'])
def test_args_log(self):
self.stop_node(0)
self.log.info('Test config args logging')
with self.nodes[0].assert_debug_log(
expected_msgs=[
'Command-line arg: addnode="some.node"',
'Command-line arg: rpcauth=****',
'Command-line arg: rpcbind=****',
'Command-line arg: rpcpassword=****',
'Command-line arg: rpcuser=****',
'Command-line arg: torpassword=****',
f'Config file arg: {self.chain}="1"',
f'Config file arg: [{self.chain}] server="1"',
],
unexpected_msgs=[
'alice:f7efda5c189b999524f151318c0c86$d5b51b3beffbc0',
'127.1.1.1',
'secret-rpcuser',
'secret-torpassword',
]):
self.start_node(0, extra_args=[
'-addnode=some.node',
'-rpcauth=alice:f7efda5c189b999524f151318c0c86$d5b51b3beffbc0',
'-rpcbind=127.1.1.1',
'-rpcpassword=',
'-rpcuser=secret-rpcuser',
'-torpassword=secret-torpassword',
])
def test_networkactive(self):
self.log.info('Test -networkactive option')
self.stop_node(0)
with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: true\n']):
self.start_node(0)
self.stop_node(0)
with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: true\n']):
self.start_node(0, extra_args=['-networkactive'])
self.stop_node(0)
with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: true\n']):
self.start_node(0, extra_args=['-networkactive=1'])
self.stop_node(0)
with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: false\n']):
self.start_node(0, extra_args=['-networkactive=0'])
self.stop_node(0)
with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: false\n']):
self.start_node(0, extra_args=['-nonetworkactive'])
self.stop_node(0)
with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: false\n']):
self.start_node(0, extra_args=['-nonetworkactive=1'])
def test_seed_peers(self):
self.log.info('Test seed peers')
default_data_dir = self.nodes[0].datadir
# Only regtest has no fixed seeds. To avoid connections to random
# nodes, regtest is the only network where it is safe to enable
# -fixedseeds in tests
util.assert_equal(self.nodes[0].getblockchaininfo()['chain'],'regtest')
self.stop_node(0)
# No peers.dat exists and -dnsseed=1
# We expect the node will use DNS Seeds, but Regtest mode does not have
# any valid DNS seeds. So after 60 seconds, the node should fallback to
# fixed seeds
assert not os.path.exists(os.path.join(default_data_dir, "peers.dat"))
start = int(time.time())
with self.nodes[0].assert_debug_log(
expected_msgs=[
"Loaded 0 addresses from peers.dat",
"0 addresses found from DNS seeds",
"opencon thread start", # Ensure ThreadOpenConnections::start time is properly set
],
timeout=10,
):
self.start_node(0, extra_args=['-dnsseed=1', '-fixedseeds=1', f'-mocktime={start}'])
with self.nodes[0].assert_debug_log(expected_msgs=[
"Adding fixed seeds as 60 seconds have passed and addrman is empty",
]):
self.nodes[0].setmocktime(start + 65)
self.stop_node(0)
# No peers.dat exists and -dnsseed=0
# We expect the node will fallback immediately to fixed seeds
assert not os.path.exists(os.path.join(default_data_dir, "peers.dat"))
start = time.time()
with self.nodes[0].assert_debug_log(expected_msgs=[
"Loaded 0 addresses from peers.dat",
"DNS seeding disabled",
"Adding fixed seeds as -dnsseed=0, -addnode is not provided and all -seednode(s) attempted\n",
]):
self.start_node(0, extra_args=['-dnsseed=0', '-fixedseeds=1'])
assert time.time() - start < 60
self.stop_node(0)
# No peers.dat exists and dns seeds are disabled.
# We expect the node will not add fixed seeds when explicitly disabled.
assert not os.path.exists(os.path.join(default_data_dir, "peers.dat"))
start = time.time()
with self.nodes[0].assert_debug_log(expected_msgs=[
"Loaded 0 addresses from peers.dat",
"DNS seeding disabled",
"Fixed seeds are disabled",
]):
self.start_node(0, extra_args=['-dnsseed=0', '-fixedseeds=0'])
assert time.time() - start < 60
self.stop_node(0)
# No peers.dat exists and -dnsseed=0, but a -addnode is provided
# We expect the node will allow 60 seconds prior to using fixed seeds
assert not os.path.exists(os.path.join(default_data_dir, "peers.dat"))
start = int(time.time())
with self.nodes[0].assert_debug_log(
expected_msgs=[
"Loaded 0 addresses from peers.dat",
"DNS seeding disabled",
"opencon thread start", # Ensure ThreadOpenConnections::start time is properly set
],
timeout=10,
):
self.start_node(0, extra_args=['-dnsseed=0', '-fixedseeds=1', '-addnode=fakenodeaddr', f'-mocktime={start}'])
with self.nodes[0].assert_debug_log(expected_msgs=[
"Adding fixed seeds as 60 seconds have passed and addrman is empty",
]):
self.nodes[0].setmocktime(start + 65)
def run_test(self):
self.test_log_buffer()
self.test_args_log()
self.test_seed_peers()
self.test_networkactive()
self.test_config_file_parser()
self.test_invalid_command_line_options()
# Remove the -datadir argument so it doesn't override the config file
self.nodes[0].args = [arg for arg in self.nodes[0].args if not arg.startswith("-datadir")]
default_data_dir = self.nodes[0].datadir
new_data_dir = os.path.join(default_data_dir, 'newdatadir')
new_data_dir_2 = os.path.join(default_data_dir, 'newdatadir2')
# Check that using -datadir argument on non-existent directory fails
self.nodes[0].datadir = new_data_dir
self.nodes[0].assert_start_raises_init_error([f'-datadir={new_data_dir}'], f'Error: Specified data directory "{new_data_dir}" does not exist.')
# Check that using non-existent datadir in conf file fails
conf_file = os.path.join(default_data_dir, "bitcoin.conf")
# datadir needs to be set before [chain] section
conf_file_contents = open(conf_file, encoding='utf8').read()
with open(conf_file, 'w', encoding='utf8') as f:
f.write(f"datadir={new_data_dir}\n")
f.write(conf_file_contents)
self.nodes[0].assert_start_raises_init_error([f'-conf={conf_file}'], f'Error: Error reading configuration file: specified data directory "{new_data_dir}" does not exist.')
# Check that an explicitly specified config file that cannot be opened fails
none_existent_conf_file = os.path.join(default_data_dir, "none_existent_bitcoin.conf")
self.nodes[0].assert_start_raises_init_error(['-conf=' + none_existent_conf_file], 'Error: Error reading configuration file: specified config file "' + none_existent_conf_file + '" could not be opened.')
# Create the directory and ensure the config file now works
os.mkdir(new_data_dir)
self.start_node(0, [f'-conf={conf_file}'])
self.stop_node(0)
assert os.path.exists(os.path.join(new_data_dir, self.chain, 'blocks'))
# Ensure command line argument overrides datadir in conf
os.mkdir(new_data_dir_2)
self.nodes[0].datadir = new_data_dir_2
self.start_node(0, [f'-datadir={new_data_dir_2}', f'-conf={conf_file}'])
assert os.path.exists(os.path.join(new_data_dir_2, self.chain, 'blocks'))
if __name__ == '__main__':
ConfArgsTest().main()
|
|
from flask import Blueprint, current_app, Response, request, jsonify
from croplands_api.models import Record, Location
from croplands_api import db, cache, limiter
from croplands_api.exceptions import FieldError
from requests.models import PreparedRequest
from flask_jwt import current_user
from croplands_api.auth import is_anonymous, generate_token
from sqlalchemy import func, asc, desc
import uuid
import datetime
data_blueprint = Blueprint('data', __name__, url_prefix='/data')
categorical_columns = {"id": Record.id,
"land_use_type": Record.land_use_type,
"crop_primary": Record.crop_primary,
"crop_secondary": Record.crop_secondary,
"water": Record.water,
"intensity": Record.intensity,
"year": Record.year,
"month": Record.month,
"source_type": Record.source_type,
"country": Location.country,
"use_validation": Location.use_validation}
def row_to_list(r, headers=False):
"""
Flattens query tuple to list
:param r: tuple of columns
:return: list
"""
# todo should pass columns to this function and have it get the data or headers
if headers:
return ['id', 'year', 'month', 'lat', 'lon', 'country', 'land_use_type', 'crop_primary',
'crop_secondary',
'water', 'intensity', 'source_type', 'source_class', 'source_description',
'use_validation']
return [r[0].id, r[0].year, r[0].month, round(r[1].lat, 8), round(r[1].lon, 8), r[1].country,
r[0].land_use_type,
r[0].crop_primary, r[0].crop_secondary,
r[0].water, r[0].intensity, r[0].source_type, r[0].source_class,
r[0].source_description, r[1].use_validation]
def safe_for_csv(value):
"""
Simple helper for converting to csv value...
TODO Find built-in replacement that is more robust
:param value: anything
:return: string
"""
escape_chars = ["'", "\""]
try:
value = value.replace(",", "_")
except AttributeError:
pass
if value is None:
return ""
elif any((c in str(value) for c in escape_chars)):
return "\"" + str(value) + "\""
else:
return str(value)
def query(meta=None, filters=None, count_all=False, count_filtered=False):
if filters is None:
filters = {}
if meta is None:
meta = {
"offset": 0,
"limit": 1000,
"order_by": 'id'
}
q = db.session.query(Record, Location)\
.join(Location).filter(Location.id == Record.location_id)
if count_all:
return q.count()
# filter by bounds
if 'southWestBounds' in filters and 'northEastBounds' in filters:
south_west = filters['southWestBounds'].split(',')
north_east = filters['northEastBounds'].split(',')
q = q.filter(Location.lat > float(south_west[0]), Location.lon > float(south_west[1]),
Location.lat < float(north_east[0]), Location.lon < float(north_east[1]))
if 'ndvi_limit_lower' in filters and 'ndvi_limit_upper' in filters:
upper = [int(v) for v in filters['ndvi_limit_upper'].split(',')]
lower = [int(v) for v in filters['ndvi_limit_lower'].split(',')]
q = q.filter(func.array_bounds(Record.ndvi, upper, lower))
for name, column in categorical_columns.iteritems():
if name not in filters:
continue
values = filters[name]
if values:
q = q.filter(column.in_(values))
if 'delay' in filters and filters['delay']:
q = q.filter(Record.date_created < datetime.datetime.utcnow() - current_app.config.get(
'DATA_QUERY_DELAY'))
print('delay', datetime.datetime.utcnow() - current_app.config.get(
'DATA_QUERY_DELAY'))
if count_filtered:
return q.count()
# order by
if meta["order_by"] and meta["order_by"] in categorical_columns or meta[
"order_by_direction"] == 'rand':
if meta["order_by_direction"].lower() == 'desc':
q = q.order_by(desc(categorical_columns[meta["order_by"]]))
elif meta["order_by_direction"].lower() == 'rand':
q = q.order_by(func.random())
else:
q = q.order_by(asc(categorical_columns[meta["order_by"]]))
else:
q = q.order_by(asc(Record.id))
results = q.offset(meta["offset"]).limit(meta["limit"]).all()
return results
def result_generator(results):
for i, r in enumerate(results):
if i == 0:
yield ','.join(row_to_list(None, headers=True)) + '\n'
yield ','.join([safe_for_csv(c) for c in row_to_list(r)]) + '\n'
def get_filters():
filters = {}
if is_anonymous():
filters['delay'] = request.args.get('d', 1) == 1
else:
filters['delay'] = request.args.get('d', 0) == 1
if is_anonymous() or current_user.role not in ['validation', 'admin']:
filters['use_validation'] = [False]
if request.args.get('southWestBounds') is not None and request.args.get(
'northEastBounds') is not None:
filters['northEastBounds'] = request.args.get('northEastBounds')
filters['southWestBounds'] = request.args.get('southWestBounds')
if request.args.get('ndvi_limit_upper') is not None and request.args.get(
'ndvi_limit_lower') is not None:
filters['ndvi_limit_upper'] = request.args.get('ndvi_limit_upper')
filters['ndvi_limit_lower'] = request.args.get('ndvi_limit_lower')
if len(filters['ndvi_limit_upper'].split(',')) != 23 or len(
filters['ndvi_limit_lower'].split(',')) != 23:
raise FieldError(description="Invalid Array Bounds Length")
for name, column in categorical_columns.iteritems():
values = request.args.getlist(name)
if values:
filters[name] = values
return filters
def get_meta(page_size=1000):
try:
page = int(request.args.get('page', 1))
page_size = min(int(request.args.get('page_size', page_size)), current_app.config.get('DATA_DOWNLOAD_MAX_PAGE_SIZE'))
except ValueError:
raise FieldError(description="Invalid page or page size")
offset = (page - 1) * page_size
if offset < 0:
raise FieldError(description="Invalid page or page size")
order_by = request.args.get('order_by', 'id')
if order_by not in categorical_columns:
raise FieldError(description="Invalid order by column")
order_by_direction = request.args.get('order_by_direction', 'desc')
return {
"page": page,
"page_size": page_size,
"offset": offset,
"limit": min(page_size, 1000000),
"order_by": order_by,
"order_by_direction": order_by_direction
}
@data_blueprint.route('/search')
@limiter.limit("80 per minute")
def search():
meta = get_meta()
filters = get_filters()
# get counts
count_total = query(count_all=True)
count_filtered = query(filters=filters, count_filtered=True)
# build response
results = query(meta=meta, filters=filters)
headers = {
"Query-Count-Total": str(count_total),
"Query-Count-Filtered": str(count_filtered),
"Cache-Control": "max-age=259200",
"Access-Control-Expose-Headers": "Query-Count-Total, Query-Count-Filtered, Query-Next"
}
if count_filtered > meta["page"] * meta["limit"]:
next_url_params = {
'page': str(meta["page"] + 1),
'page_size': str(meta["limit"]),
'order_by': meta["order_by"],
'order_by_direction': meta["order_by_direction"]
}
next_url_params.update(filters)
next_request = PreparedRequest()
next_request.prepare_url(request.base_url, next_url_params)
headers['Query-Next'] = next_request.url
return Response(result_generator(results), headers=[(k, v) for k, v in headers.iteritems()],
mimetype='text/csv')
@data_blueprint.route('/image')
@limiter.limit("80 per minute")
def image():
filters = get_filters()
meta = {
"order_by": "id",
"order_by_direction": "rand",
"limit": 1000,
"offset": 0
}
# build response
results = query(meta, filters=filters)
headers = {
"Cache-Control": "max-age=259200"
}
paths = ''
for r in results:
if r[0].ndvi is None:
continue
path = None
segments = 0
for i, val in enumerate(r[0].ndvi):
if val is None:
if path is not None:
paths += '<path d="' + path + '"/>'
path = None
segments = 0
continue
x, y = (i * 52.17, 1000 - max(3, min(val, 1000)))
if path is None:
path = "M%d %d" % (x, y)
else:
if segments == 1:
path += "L%d %d" % (x, y)
else:
path += " %d %d" % (x, y)
if i + 1 == len(r[0].ndvi):
paths += '<path d="' + path + '"/>'
path = None
segments = 0
continue
segments += 1
svg = '''<svg viewbox="0 0 1500 1210" preserveAspectRatio="xMidYMid meet">
<g transform="translate(20,20)">
<g class="paths" fill="none" stroke="black" stroke-width="2" transform="translate(150,0)">''' + paths + '''</g>
<g class="y labels" font-size="45">
<text x="90" y="40" text-anchor="end" alignment-baseline="start">1</text>
<text x="90" y="280" text-anchor="end" alignment-baseline="start">0.75</text>
<text x="90" y="530" text-anchor="end" alignment-baseline="start">0.5</text>
<text x="90" y="780" text-anchor="end" alignment-baseline="start">0.25</text>
<text x="90" y="1010" text-anchor="end" alignment-baseline="start">0</text>
</g>
<polyline class="axis" fill="none" stroke="#000000" points="110,10 110,1000 1330,1000 "></polyline>
<g class="y labels" font-size="45">
<text x="115" y="1050" alignment-baseline="start">Jan</text>
<text x="215" y="1050" alignment-baseline="start">Feb</text>
<text x="315" y="1050" alignment-baseline="start">Mar</text>
<text x="415" y="1050" alignment-baseline="start">Apr</text>
<text x="515" y="1050" alignment-baseline="start">May</text>
<text x="615" y="1050" alignment-baseline="start">Jun</text>
<text x="715" y="1050" alignment-baseline="start">Jul</text>
<text x="815" y="1050" alignment-baseline="start">Aug</text>
<text x="915" y="1050" alignment-baseline="start">Sep</text>
<text x="1015" y="1050" alignment-baseline="start">Oct</text>
<text x="1115" y="1050" alignment-baseline="start">Nov</text>
<text x="1215" y="1050" alignment-baseline="start">Dec</text>
</g>
</g>
<g data-temporal-bounds transform="translate(150,0)" data-intervals="23" data-interval-width="52.17"></g>
Sorry, your browser does not support inline SVG.
</svg>'''
return Response(svg, headers=[(k, v) for k, v in headers.iteritems()], mimetype='image/svg+xml')
@data_blueprint.route("/download")
@limiter.limit("10 per minute")
def download():
meta = get_meta(page_size=1000000)
filters = get_filters()
results = query(meta=meta, filters=filters)
return Response(result_generator(results), mimetype='text/csv')
@data_blueprint.route("/download/<country>")
@limiter.limit("10 per minute")
def download_country(country):
meta = get_meta(page_size=1000000)
filters = get_filters()
if request.args.get('justin_says', 'nope') == 'yes':
filters['use_validation'] = [False, True]
filters['country'] = [country]
filters['delay'] = False
results = query(meta=meta, filters=filters)
return Response(result_generator(results), mimetype='text/csv')
|
|
# Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Backup manager manages volume backups.
Volume Backups are full copies of persistent volumes stored in a backup
store e.g. an object store or any other backup store if and when support is
added. They are usable without the original object being available. A
volume backup can be restored to the original volume it was created from or
any other available volume with a minimum size of the original volume.
Volume backups can be created, restored, deleted and listed.
**Related Flags**
:backup_topic: What :mod:`rpc` topic to listen to (default:
`cinder-backup`).
:backup_manager: The module name of a class derived from
:class:`manager.Manager` (default:
:class:`cinder.backup.manager.Manager`).
"""
from oslo import messaging
from oslo.utils import excutils
from oslo.utils import importutils
from oslo_config import cfg
from cinder.backup import driver
from cinder.backup import rpcapi as backup_rpcapi
from cinder import context
from cinder import exception
from cinder.i18n import _, _LE, _LI, _LW
from cinder import manager
from cinder.openstack.common import log as logging
from cinder import quota
from cinder import rpc
from cinder import utils
from cinder.volume import utils as volume_utils
LOG = logging.getLogger(__name__)
backup_manager_opts = [
cfg.StrOpt('backup_driver',
default='cinder.backup.drivers.swift',
help='Driver to use for backups.',
deprecated_name='backup_service'),
]
# This map doesn't need to be extended in the future since it's only
# for old backup services
mapper = {'cinder.backup.services.swift': 'cinder.backup.drivers.swift',
'cinder.backup.services.ceph': 'cinder.backup.drivers.ceph'}
CONF = cfg.CONF
CONF.register_opts(backup_manager_opts)
QUOTAS = quota.QUOTAS
class BackupManager(manager.SchedulerDependentManager):
"""Manages backup of block storage devices."""
RPC_API_VERSION = '1.0'
target = messaging.Target(version=RPC_API_VERSION)
def __init__(self, service_name=None, *args, **kwargs):
self.service = importutils.import_module(self.driver_name)
self.az = CONF.storage_availability_zone
self.volume_managers = {}
self._setup_volume_drivers()
self.backup_rpcapi = backup_rpcapi.BackupAPI()
super(BackupManager, self).__init__(service_name='backup',
*args, **kwargs)
@property
def driver_name(self):
"""This function maps old backup services to backup drivers."""
return self._map_service_to_driver(CONF.backup_driver)
def _map_service_to_driver(self, service):
"""Maps services to drivers."""
if service in mapper:
return mapper[service]
return service
@property
def driver(self):
return self._get_driver()
def _get_volume_backend(self, host=None, allow_null_host=False):
if host is None:
if not allow_null_host:
msg = _("NULL host not allowed for volume backend lookup.")
raise exception.BackupFailedToGetVolumeBackend(msg)
else:
LOG.debug("Checking hostname '%s' for backend info." % (host))
part = host.partition('@')
if (part[1] == '@') and (part[2] != ''):
backend = part[2]
LOG.debug("Got backend '%s'." % (backend))
return backend
LOG.info(_LI("Backend not found in hostname (%s) so using default.") %
(host))
if 'default' not in self.volume_managers:
# For multi-backend we just pick the top of the list.
return self.volume_managers.keys()[0]
return 'default'
def _get_manager(self, backend):
LOG.debug("Manager requested for volume_backend '%s'." %
(backend))
if backend is None:
LOG.debug("Fetching default backend.")
backend = self._get_volume_backend(allow_null_host=True)
if backend not in self.volume_managers:
msg = (_("Volume manager for backend '%s' does not exist.") %
(backend))
raise exception.BackupFailedToGetVolumeBackend(msg)
return self.volume_managers[backend]
def _get_driver(self, backend=None):
LOG.debug("Driver requested for volume_backend '%s'." %
(backend))
if backend is None:
LOG.debug("Fetching default backend.")
backend = self._get_volume_backend(allow_null_host=True)
mgr = self._get_manager(backend)
mgr.driver.db = self.db
return mgr.driver
def _setup_volume_drivers(self):
if CONF.enabled_backends:
for backend in CONF.enabled_backends:
host = "%s@%s" % (CONF.host, backend)
mgr = importutils.import_object(CONF.volume_manager,
host=host,
service_name=backend)
config = mgr.configuration
backend_name = config.safe_get('volume_backend_name')
LOG.debug("Registering backend %(backend)s (host=%(host)s "
"backend_name=%(backend_name)s)." %
{'backend': backend, 'host': host,
'backend_name': backend_name})
self.volume_managers[backend] = mgr
else:
default = importutils.import_object(CONF.volume_manager)
LOG.debug("Registering default backend %s." % (default))
self.volume_managers['default'] = default
def _init_volume_driver(self, ctxt, driver):
LOG.info(_LI("Starting volume driver %(driver_name)s (%(version)s).") %
{'driver_name': driver.__class__.__name__,
'version': driver.get_version()})
try:
driver.do_setup(ctxt)
driver.check_for_setup_error()
except Exception as ex:
LOG.error(_LE("Error encountered during initialization of driver: "
"%(name)s.") %
{'name': driver.__class__.__name__})
LOG.exception(ex)
# we don't want to continue since we failed
# to initialize the driver correctly.
return
driver.set_initialized()
def init_host(self):
"""Do any initialization that needs to be run if this is a
standalone service.
"""
ctxt = context.get_admin_context()
for mgr in self.volume_managers.itervalues():
self._init_volume_driver(ctxt, mgr.driver)
LOG.info(_LI("Cleaning up incomplete backup operations."))
volumes = self.db.volume_get_all_by_host(ctxt, self.host)
for volume in volumes:
volume_host = volume_utils.extract_host(volume['host'], 'backend')
backend = self._get_volume_backend(host=volume_host)
if volume['status'] == 'backing-up':
LOG.info(_LI('Resetting volume %s to available '
'(was backing-up).') % volume['id'])
mgr = self._get_manager(backend)
mgr.detach_volume(ctxt, volume['id'])
if volume['status'] == 'restoring-backup':
LOG.info(_LI('Resetting volume %s to error_restoring '
'(was restoring-backup).') % volume['id'])
mgr = self._get_manager(backend)
mgr.detach_volume(ctxt, volume['id'])
self.db.volume_update(ctxt, volume['id'],
{'status': 'error_restoring'})
# TODO(smulcahy) implement full resume of backup and restore
# operations on restart (rather than simply resetting)
backups = self.db.backup_get_all_by_host(ctxt, self.host)
for backup in backups:
if backup['status'] == 'creating':
LOG.info(_LI('Resetting backup %s to error (was creating).')
% backup['id'])
err = 'incomplete backup reset on manager restart'
self.db.backup_update(ctxt, backup['id'], {'status': 'error',
'fail_reason': err})
if backup['status'] == 'restoring':
LOG.info(_LI('Resetting backup %s to '
' available (was restoring).')
% backup['id'])
self.db.backup_update(ctxt, backup['id'],
{'status': 'available'})
if backup['status'] == 'deleting':
LOG.info(_LI('Resuming delete on backup: %s.') % backup['id'])
self.delete_backup(ctxt, backup['id'])
def create_backup(self, context, backup_id):
"""Create volume backups using configured backup service."""
backup = self.db.backup_get(context, backup_id)
volume_id = backup['volume_id']
volume = self.db.volume_get(context, volume_id)
LOG.info(_LI('Create backup started, backup: %(backup_id)s '
'volume: %(volume_id)s.') %
{'backup_id': backup_id, 'volume_id': volume_id})
self._notify_about_backup_usage(context, backup, "create.start")
volume_host = volume_utils.extract_host(volume['host'], 'backend')
backend = self._get_volume_backend(host=volume_host)
self.db.backup_update(context, backup_id, {'host': self.host,
'service':
self.driver_name})
expected_status = 'backing-up'
actual_status = volume['status']
if actual_status != expected_status:
err = _('Create backup aborted, expected volume status '
'%(expected_status)s but got %(actual_status)s.') % {
'expected_status': expected_status,
'actual_status': actual_status,
}
self.db.backup_update(context, backup_id, {'status': 'error',
'fail_reason': err})
raise exception.InvalidVolume(reason=err)
expected_status = 'creating'
actual_status = backup['status']
if actual_status != expected_status:
err = _('Create backup aborted, expected backup status '
'%(expected_status)s but got %(actual_status)s.') % {
'expected_status': expected_status,
'actual_status': actual_status,
}
self.db.volume_update(context, volume_id, {'status': 'available'})
self.db.backup_update(context, backup_id, {'status': 'error',
'fail_reason': err})
raise exception.InvalidBackup(reason=err)
try:
# NOTE(flaper87): Verify the driver is enabled
# before going forward. The exception will be caught,
# the volume status will be set back to available and
# the backup status to 'error'
utils.require_driver_initialized(self.driver)
backup_service = self.service.get_backup_driver(context)
self._get_driver(backend).backup_volume(context, backup,
backup_service)
except Exception as err:
with excutils.save_and_reraise_exception():
self.db.volume_update(context, volume_id,
{'status': 'available'})
self.db.backup_update(context, backup_id,
{'status': 'error',
'fail_reason': unicode(err)})
self.db.volume_update(context, volume_id, {'status': 'available'})
backup = self.db.backup_update(context, backup_id,
{'status': 'available',
'size': volume['size'],
'availability_zone': self.az})
LOG.info(_LI('Create backup finished. backup: %s.'), backup_id)
self._notify_about_backup_usage(context, backup, "create.end")
def restore_backup(self, context, backup_id, volume_id):
"""Restore volume backups from configured backup service."""
LOG.info(_LI('Restore backup started, backup: %(backup_id)s '
'volume: %(volume_id)s.') %
{'backup_id': backup_id, 'volume_id': volume_id})
backup = self.db.backup_get(context, backup_id)
volume = self.db.volume_get(context, volume_id)
volume_host = volume_utils.extract_host(volume['host'], 'backend')
backend = self._get_volume_backend(host=volume_host)
self._notify_about_backup_usage(context, backup, "restore.start")
self.db.backup_update(context, backup_id, {'host': self.host})
expected_status = 'restoring-backup'
actual_status = volume['status']
if actual_status != expected_status:
err = (_('Restore backup aborted, expected volume status '
'%(expected_status)s but got %(actual_status)s.') %
{'expected_status': expected_status,
'actual_status': actual_status})
self.db.backup_update(context, backup_id, {'status': 'available'})
raise exception.InvalidVolume(reason=err)
expected_status = 'restoring'
actual_status = backup['status']
if actual_status != expected_status:
err = (_('Restore backup aborted: expected backup status '
'%(expected_status)s but got %(actual_status)s.') %
{'expected_status': expected_status,
'actual_status': actual_status})
self.db.backup_update(context, backup_id, {'status': 'error',
'fail_reason': err})
self.db.volume_update(context, volume_id, {'status': 'error'})
raise exception.InvalidBackup(reason=err)
if volume['size'] > backup['size']:
LOG.info(_LI('Volume: %(vol_id)s, size: %(vol_size)d is '
'larger than backup: %(backup_id)s, '
'size: %(backup_size)d, continuing with restore.'),
{'vol_id': volume['id'],
'vol_size': volume['size'],
'backup_id': backup['id'],
'backup_size': backup['size']})
backup_service = self._map_service_to_driver(backup['service'])
configured_service = self.driver_name
if backup_service != configured_service:
err = _('Restore backup aborted, the backup service currently'
' configured [%(configured_service)s] is not the'
' backup service that was used to create this'
' backup [%(backup_service)s].') % {
'configured_service': configured_service,
'backup_service': backup_service,
}
self.db.backup_update(context, backup_id, {'status': 'available'})
self.db.volume_update(context, volume_id, {'status': 'error'})
raise exception.InvalidBackup(reason=err)
try:
# NOTE(flaper87): Verify the driver is enabled
# before going forward. The exception will be caught,
# the volume status will be set back to available and
# the backup status to 'error'
utils.require_driver_initialized(self.driver)
backup_service = self.service.get_backup_driver(context)
self._get_driver(backend).restore_backup(context, backup,
volume,
backup_service)
except Exception:
with excutils.save_and_reraise_exception():
self.db.volume_update(context, volume_id,
{'status': 'error_restoring'})
self.db.backup_update(context, backup_id,
{'status': 'available'})
self.db.volume_update(context, volume_id, {'status': 'available'})
backup = self.db.backup_update(context, backup_id,
{'status': 'available'})
LOG.info(_LI('Restore backup finished, backup %(backup_id)s restored'
' to volume %(volume_id)s.') %
{'backup_id': backup_id, 'volume_id': volume_id})
self._notify_about_backup_usage(context, backup, "restore.end")
def delete_backup(self, context, backup_id):
"""Delete volume backup from configured backup service."""
try:
# NOTE(flaper87): Verify the driver is enabled
# before going forward. The exception will be caught
# and the backup status updated. Fail early since there
# are no other status to change but backup's
utils.require_driver_initialized(self.driver)
except exception.DriverNotInitialized as err:
with excutils.save_and_reraise_exception():
self.db.backup_update(context, backup_id,
{'status': 'error',
'fail_reason':
unicode(err)})
LOG.info(_LI('Delete backup started, backup: %s.'), backup_id)
backup = self.db.backup_get(context, backup_id)
self._notify_about_backup_usage(context, backup, "delete.start")
self.db.backup_update(context, backup_id, {'host': self.host})
expected_status = 'deleting'
actual_status = backup['status']
if actual_status != expected_status:
err = _('Delete_backup aborted, expected backup status '
'%(expected_status)s but got %(actual_status)s.') \
% {'expected_status': expected_status,
'actual_status': actual_status}
self.db.backup_update(context, backup_id,
{'status': 'error', 'fail_reason': err})
raise exception.InvalidBackup(reason=err)
backup_service = self._map_service_to_driver(backup['service'])
if backup_service is not None:
configured_service = self.driver_name
if backup_service != configured_service:
err = _('Delete backup aborted, the backup service currently'
' configured [%(configured_service)s] is not the'
' backup service that was used to create this'
' backup [%(backup_service)s].')\
% {'configured_service': configured_service,
'backup_service': backup_service}
self.db.backup_update(context, backup_id,
{'status': 'error'})
raise exception.InvalidBackup(reason=err)
try:
backup_service = self.service.get_backup_driver(context)
backup_service.delete(backup)
except Exception as err:
with excutils.save_and_reraise_exception():
self.db.backup_update(context, backup_id,
{'status': 'error',
'fail_reason':
unicode(err)})
# Get reservations
try:
reserve_opts = {
'backups': -1,
'backup_gigabytes': -backup['size'],
}
reservations = QUOTAS.reserve(context,
project_id=backup['project_id'],
**reserve_opts)
except Exception:
reservations = None
LOG.exception(_LE("Failed to update usages deleting backup"))
context = context.elevated()
self.db.backup_destroy(context, backup_id)
# Commit the reservations
if reservations:
QUOTAS.commit(context, reservations,
project_id=backup['project_id'])
LOG.info(_LI('Delete backup finished, backup %s deleted.'), backup_id)
self._notify_about_backup_usage(context, backup, "delete.end")
def _notify_about_backup_usage(self,
context,
backup,
event_suffix,
extra_usage_info=None):
volume_utils.notify_about_backup_usage(
context, backup, event_suffix,
extra_usage_info=extra_usage_info,
host=self.host)
def export_record(self, context, backup_id):
"""Export all volume backup metadata details to allow clean import.
Export backup metadata so it could be re-imported into the database
without any prerequisite in the backup database.
:param context: running context
:param backup_id: backup id to export
:returns: backup_record - a description of how to import the backup
:returns: contains 'backup_url' - how to import the backup, and
:returns: 'backup_service' describing the needed driver.
:raises: InvalidBackup
"""
LOG.info(_LI('Export record started, backup: %s.'), backup_id)
backup = self.db.backup_get(context, backup_id)
expected_status = 'available'
actual_status = backup['status']
if actual_status != expected_status:
err = (_('Export backup aborted, expected backup status '
'%(expected_status)s but got %(actual_status)s.') %
{'expected_status': expected_status,
'actual_status': actual_status})
raise exception.InvalidBackup(reason=err)
backup_record = {}
backup_record['backup_service'] = backup['service']
backup_service = self._map_service_to_driver(backup['service'])
configured_service = self.driver_name
if backup_service != configured_service:
err = (_('Export record aborted, the backup service currently'
' configured [%(configured_service)s] is not the'
' backup service that was used to create this'
' backup [%(backup_service)s].') %
{'configured_service': configured_service,
'backup_service': backup_service})
raise exception.InvalidBackup(reason=err)
# Call driver to create backup description string
try:
utils.require_driver_initialized(self.driver)
backup_service = self.service.get_backup_driver(context)
backup_url = backup_service.export_record(backup)
backup_record['backup_url'] = backup_url
except Exception as err:
msg = unicode(err)
raise exception.InvalidBackup(reason=msg)
LOG.info(_LI('Export record finished, backup %s exported.'), backup_id)
return backup_record
def import_record(self,
context,
backup_id,
backup_service,
backup_url,
backup_hosts):
"""Import all volume backup metadata details to the backup db.
:param context: running context
:param backup_id: The new backup id for the import
:param backup_service: The needed backup driver for import
:param backup_url: An identifier string to locate the backup
:param backup_hosts: Potential hosts to execute the import
:raises: InvalidBackup
:raises: ServiceNotFound
"""
LOG.info(_LI('Import record started, backup_url: %s.'), backup_url)
# Can we import this backup?
if (backup_service != self.driver_name):
# No, are there additional potential backup hosts in the list?
if len(backup_hosts) > 0:
# try the next host on the list, maybe he can import
first_host = backup_hosts.pop()
self.backup_rpcapi.import_record(context,
first_host,
backup_id,
backup_service,
backup_url,
backup_hosts)
else:
# empty list - we are the last host on the list, fail
err = _('Import record failed, cannot find backup '
'service to perform the import. Request service '
'%(service)s') % {'service': backup_service}
self.db.backup_update(context, backup_id, {'status': 'error',
'fail_reason': err})
raise exception.ServiceNotFound(service_id=backup_service)
else:
# Yes...
try:
utils.require_driver_initialized(self.driver)
backup_service = self.service.get_backup_driver(context)
backup_options = backup_service.import_record(backup_url)
except Exception as err:
msg = unicode(err)
self.db.backup_update(context,
backup_id,
{'status': 'error',
'fail_reason': msg})
raise exception.InvalidBackup(reason=msg)
required_import_options = ['display_name',
'display_description',
'container',
'size',
'service_metadata',
'service',
'object_count']
backup_update = {}
backup_update['status'] = 'available'
backup_update['service'] = self.driver_name
backup_update['availability_zone'] = self.az
backup_update['host'] = self.host
for entry in required_import_options:
if entry not in backup_options:
msg = (_('Backup metadata received from driver for '
'import is missing %s.'), entry)
self.db.backup_update(context,
backup_id,
{'status': 'error',
'fail_reason': msg})
raise exception.InvalidBackup(reason=msg)
backup_update[entry] = backup_options[entry]
# Update the database
self.db.backup_update(context, backup_id, backup_update)
# Verify backup
try:
if isinstance(backup_service, driver.BackupDriverWithVerify):
backup_service.verify(backup_id)
else:
LOG.warn(_LW('Backup service %(service)s does not support '
'verify. Backup id %(id)s is not verified. '
'Skipping verify.') % {'service':
self.driver_name,
'id': backup_id})
except exception.InvalidBackup as err:
with excutils.save_and_reraise_exception():
self.db.backup_update(context, backup_id,
{'status': 'error',
'fail_reason':
unicode(err)})
LOG.info(_LI('Import record id %s metadata from driver '
'finished.') % backup_id)
def reset_status(self, context, backup_id, status):
"""Reset volume backup status.
:param context: running context
:param backup_id: The backup id for reset status operation
:param status: The status to be set
:raises: InvalidBackup
:raises: BackupVerifyUnsupportedDriver
:raises: AttributeError
"""
LOG.info(_LI('Reset backup status started, backup_id: '
'%(backup_id)s, status: %(status)s.'),
{'backup_id': backup_id,
'status': status})
try:
# NOTE(flaper87): Verify the driver is enabled
# before going forward. The exception will be caught
# and the backup status updated. Fail early since there
# are no other status to change but backup's
utils.require_driver_initialized(self.driver)
except exception.DriverNotInitialized:
with excutils.save_and_reraise_exception():
LOG.exception(_LE("Backup driver has not been initialized"))
backup = self.db.backup_get(context, backup_id)
backup_service = self._map_service_to_driver(backup['service'])
LOG.info(_LI('Backup service: %s.'), backup_service)
if backup_service is not None:
configured_service = self.driver_name
if backup_service != configured_service:
err = _('Reset backup status aborted, the backup service'
' currently configured [%(configured_service)s] '
'is not the backup service that was used to create'
' this backup [%(backup_service)s].') % \
{'configured_service': configured_service,
'backup_service': backup_service}
raise exception.InvalidBackup(reason=err)
# Verify backup
try:
# check whether the backup is ok or not
if status == 'available' and backup['status'] != 'restoring':
# check whether we could verify the backup is ok or not
if isinstance(backup_service,
driver.BackupDriverWithVerify):
backup_service.verify(backup_id)
self.db.backup_update(context, backup_id,
{'status': status})
# driver does not support verify function
else:
msg = (_('Backup service %(configured_service)s '
'does not support verify. Backup id'
' %(id)s is not verified. '
'Skipping verify.') %
{'configured_service': self.driver_name,
'id': backup_id})
raise exception.BackupVerifyUnsupportedDriver(
reason=msg)
# reset status to error or from restoring to available
else:
if (status == 'error' or
(status == 'available' and
backup['status'] == 'restoring')):
self.db.backup_update(context, backup_id,
{'status': status})
except exception.InvalidBackup:
with excutils.save_and_reraise_exception():
msg = (_("Backup id %(id)s is not invalid. "
"Skipping reset.") % {'id': backup_id})
LOG.error(msg)
except exception.BackupVerifyUnsupportedDriver:
with excutils.save_and_reraise_exception():
msg = (_('Backup service %(configured_service)s '
'does not support verify. Backup id'
' %(id)s is not verified. '
'Skipping verify.') %
{'configured_service': self.driver_name,
'id': backup_id})
LOG.error(msg)
except AttributeError:
msg = (_('Backup service %(service)s does not support '
'verify. Backup id %(id)s is not verified. '
'Skipping reset.') %
{'service': self.driver_name,
'id': backup_id})
LOG.error(msg)
raise exception.BackupVerifyUnsupportedDriver(
reason=msg)
# send notification to ceilometer
notifier_info = {'id': backup_id, 'update': {'status': status}}
notifier = rpc.get_notifier('backupStatusUpdate')
notifier.info(context, "backups" + '.reset_status.end',
notifier_info)
|
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2016 Red Hat Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import re
import ast
import operator
from itertools import chain
from ansible.module_utils.six import iteritems, string_types
from ansible.module_utils.basic import AnsibleFallbackNotFound
try:
from jinja2 import Environment, StrictUndefined
from jinja2.exceptions import UndefinedError
HAS_JINJA2 = True
except ImportError:
HAS_JINJA2 = False
OPERATORS = frozenset(['ge', 'gt', 'eq', 'neq', 'lt', 'le'])
ALIASES = frozenset([('min', 'ge'), ('max', 'le'), ('exactly', 'eq'), ('neq', 'ne')])
def to_list(val):
if isinstance(val, (list, tuple, set)):
return list(val)
elif val is not None:
return [val]
else:
return list()
def sort_list(val):
if isinstance(val, list):
return sorted(val)
return val
class Entity(object):
"""Transforms a dict to with an argument spec
This class will take a dict and apply an Ansible argument spec to the
values. The resulting dict will contain all of the keys in the param
with appropriate values set.
Example::
argument_spec = dict(
command=dict(key=True),
display=dict(default='text', choices=['text', 'json']),
validate=dict(type='bool')
)
transform = Entity(module, argument_spec)
value = dict(command='foo')
result = transform(value)
print result
{'command': 'foo', 'display': 'text', 'validate': None}
Supported argument spec:
* key - specifies how to map a single value to a dict
* read_from - read and apply the argument_spec from the module
* required - a value is required
* type - type of value (uses AnsibleModule type checker)
* fallback - implements fallback function
* choices - set of valid options
* default - default value
"""
def __init__(self, module, attrs=None, args=[], keys=None, from_argspec=False):
self._attributes = attrs or {}
self._module = module
for arg in args:
self._attributes[arg] = dict()
if from_argspec:
self._attributes[arg]['read_from'] = arg
if keys and arg in keys:
self._attributes[arg]['key'] = True
self.attr_names = frozenset(self._attributes.keys())
_has_key = False
for name, attr in iteritems(self._attributes):
if attr.get('read_from'):
if attr['read_from'] not in self._module.argument_spec:
module.fail_json(msg='argument %s does not exist' % attr['read_from'])
spec = self._module.argument_spec.get(attr['read_from'])
for key, value in iteritems(spec):
if key not in attr:
attr[key] = value
if attr.get('key'):
if _has_key:
module.fail_json(msg='only one key value can be specified')
_has_key = True
attr['required'] = True
def serialize(self):
return self._attributes
def to_dict(self, value):
obj = {}
for name, attr in iteritems(self._attributes):
if attr.get('key'):
obj[name] = value
else:
obj[name] = attr.get('default')
return obj
def __call__(self, value, strict=True):
if not isinstance(value, dict):
value = self.to_dict(value)
if strict:
unknown = set(value).difference(self.attr_names)
if unknown:
self._module.fail_json(msg='invalid keys: %s' % ','.join(unknown))
for name, attr in iteritems(self._attributes):
if value.get(name) is None:
value[name] = attr.get('default')
if attr.get('fallback') and not value.get(name):
fallback = attr.get('fallback', (None,))
fallback_strategy = fallback[0]
fallback_args = []
fallback_kwargs = {}
if fallback_strategy is not None:
for item in fallback[1:]:
if isinstance(item, dict):
fallback_kwargs = item
else:
fallback_args = item
try:
value[name] = fallback_strategy(*fallback_args, **fallback_kwargs)
except AnsibleFallbackNotFound:
continue
if attr.get('required') and value.get(name) is None:
self._module.fail_json(msg='missing required attribute %s' % name)
if 'choices' in attr:
if value[name] not in attr['choices']:
self._module.fail_json(msg='%s must be one of %s, got %s' % (name, ', '.join(attr['choices']), value[name]))
if value[name] is not None:
value_type = attr.get('type', 'str')
type_checker = self._module._CHECK_ARGUMENT_TYPES_DISPATCHER[value_type]
type_checker(value[name])
elif value.get(name):
value[name] = self._module.params[name]
return value
class EntityCollection(Entity):
"""Extends ```Entity``` to handle a list of dicts """
def __call__(self, iterable, strict=True):
if iterable is None:
iterable = [super(EntityCollection, self).__call__(self._module.params, strict)]
if not isinstance(iterable, (list, tuple)):
self._module.fail_json(msg='value must be an iterable')
return [(super(EntityCollection, self).__call__(i, strict)) for i in iterable]
# these two are for backwards compatibility and can be removed once all of the
# modules that use them are updated
class ComplexDict(Entity):
def __init__(self, attrs, module, *args, **kwargs):
super(ComplexDict, self).__init__(module, attrs, *args, **kwargs)
class ComplexList(EntityCollection):
def __init__(self, attrs, module, *args, **kwargs):
super(ComplexList, self).__init__(module, attrs, *args, **kwargs)
def dict_diff(base, comparable):
""" Generate a dict object of differences
This function will compare two dict objects and return the difference
between them as a dict object. For scalar values, the key will reflect
the updated value. If the key does not exist in `comparable`, then then no
key will be returned. For lists, the value in comparable will wholly replace
the value in base for the key. For dicts, the returned value will only
return keys that are different.
:param base: dict object to base the diff on
:param comparable: dict object to compare against base
:returns: new dict object with differences
"""
assert isinstance(base, dict), "`base` must be of type <dict>"
assert isinstance(comparable, dict), "`comparable` must be of type <dict>"
updates = dict()
for key, value in iteritems(base):
if isinstance(value, dict):
item = comparable.get(key)
if item is not None:
updates[key] = dict_diff(value, comparable[key])
else:
comparable_value = comparable.get(key)
if comparable_value is not None:
if sort_list(base[key]) != sort_list(comparable_value):
updates[key] = comparable_value
for key in set(comparable.keys()).difference(base.keys()):
updates[key] = comparable.get(key)
return updates
def dict_merge(base, other):
""" Return a new dict object that combines base and other
This will create a new dict object that is a combination of the key/value
pairs from base and other. When both keys exist, the value will be
selected from other. If the value is a list object, the two lists will
be combined and duplicate entries removed.
:param base: dict object to serve as base
:param other: dict object to combine with base
:returns: new combined dict object
"""
assert isinstance(base, dict), "`base` must be of type <dict>"
assert isinstance(other, dict), "`other` must be of type <dict>"
combined = dict()
for key, value in iteritems(base):
if isinstance(value, dict):
if key in other:
item = other.get(key)
if item is not None:
combined[key] = dict_merge(value, other[key])
else:
combined[key] = item
else:
combined[key] = value
elif isinstance(value, list):
if key in other:
item = other.get(key)
if item is not None:
combined[key] = list(set(chain(value, item)))
else:
combined[key] = item
else:
combined[key] = value
else:
if key in other:
other_value = other.get(key)
if other_value is not None:
if sort_list(base[key]) != sort_list(other_value):
combined[key] = other_value
else:
combined[key] = value
else:
combined[key] = other_value
else:
combined[key] = value
for key in set(other.keys()).difference(base.keys()):
combined[key] = other.get(key)
return combined
def conditional(expr, val, cast=None):
match = re.match('^(.+)\((.+)\)$', str(expr), re.I)
if match:
op, arg = match.groups()
else:
op = 'eq'
assert (' ' not in str(expr)), 'invalid expression: cannot contain spaces'
arg = expr
if cast is None and val is not None:
arg = type(val)(arg)
elif callable(cast):
arg = cast(arg)
val = cast(val)
op = next((oper for alias, oper in ALIASES if op == alias), op)
if not hasattr(operator, op) and op not in OPERATORS:
raise ValueError('unknown operator: %s' % op)
func = getattr(operator, op)
return func(val, arg)
def ternary(value, true_val, false_val):
''' value ? true_val : false_val '''
if value:
return true_val
else:
return false_val
def remove_default_spec(spec):
for item in spec:
if 'default' in spec[item]:
del spec[item]['default']
class Template:
def __init__(self):
if not HAS_JINJA2:
raise ImportError("jinja2 is required but does not appear to be installed. "
"It can be installed using `pip install jinja2`")
self.env = Environment(undefined=StrictUndefined)
self.env.filters.update({'ternary': ternary})
def __call__(self, value, variables=None, fail_on_undefined=True):
variables = variables or {}
if not self.contains_vars(value):
return value
try:
value = self.env.from_string(value).render(variables)
except UndefinedError:
if not fail_on_undefined:
return None
raise
if value:
try:
return ast.literal_eval(value)
except:
return str(value)
else:
return None
def contains_vars(self, data):
if isinstance(data, string_types):
for marker in (self.env.block_start_string, self.env.variable_start_string, self.env.comment_start_string):
if marker in data:
return True
return False
|
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Unit tests for grit.format.resource_map'''
import os
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
import StringIO
import unittest
from grit import grd_reader
from grit import util
from grit.format import resource_map
class FormatResourceMapUnittest(unittest.TestCase):
def testFormatResourceMap(self):
grd = grd_reader.Parse(StringIO.StringIO(
'''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en" current_release="3"
base_dir=".">
<outputs>
<output type="rc_header" filename="the_rc_header.h" />
<output type="resource_map_header"
filename="the_resource_map_header.h" />
</outputs>
<release seq="3">
<structures first_id="300">
<structure type="menu" name="IDC_KLONKMENU"
file="grit\\testdata\\klonk.rc" encoding="utf-16" />
</structures>
<includes first_id="10000">
<include type="foo" file="abc" name="IDS_FIRSTPRESENT" />
<if expr="False">
<include type="foo" file="def" name="IDS_MISSING" />
</if>
<if expr="lang != 'es'">
<include type="foo" file="ghi" name="IDS_LANGUAGESPECIFIC" />
</if>
<if expr="lang == 'es'">
<include type="foo" file="jkl" name="IDS_LANGUAGESPECIFIC" />
</if>
<include type="foo" file="mno" name="IDS_THIRDPRESENT" />
</includes>
</release>
</grit>'''), util.PathFromRoot('.'))
grd.SetOutputLanguage('en')
grd.RunGatherers()
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_header')(grd, 'en', '.')))
self.assertEqual('''\
#include <stddef.h>
#ifndef GRIT_RESOURCE_MAP_STRUCT_
#define GRIT_RESOURCE_MAP_STRUCT_
struct GritResourceMap {
const char* const name;
int value;
};
#endif // GRIT_RESOURCE_MAP_STRUCT_
extern const GritResourceMap kTheRcHeader[];
extern const size_t kTheRcHeaderSize;''', output)
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_source')(grd, 'en', '.')))
self.assertEqual('''\
#include "the_resource_map_header.h"
#include <stddef.h>
#include "base/macros.h"
#include "the_rc_header.h"
const GritResourceMap kTheRcHeader[] = {
{"IDC_KLONKMENU", IDC_KLONKMENU},
{"IDS_FIRSTPRESENT", IDS_FIRSTPRESENT},
{"IDS_MISSING", IDS_MISSING},
{"IDS_LANGUAGESPECIFIC", IDS_LANGUAGESPECIFIC},
{"IDS_THIRDPRESENT", IDS_THIRDPRESENT},
};
const size_t kTheRcHeaderSize = arraysize(kTheRcHeader);''', output)
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_file_map_source')(grd, 'en', '.')))
self.assertEqual('''\
#include "the_resource_map_header.h"
#include <stddef.h>
#include "base/macros.h"
#include "the_rc_header.h"
const GritResourceMap kTheRcHeader[] = {
{"grit/testdata/klonk.rc", IDC_KLONKMENU},
{"abc", IDS_FIRSTPRESENT},
{"def", IDS_MISSING},
{"ghi", IDS_LANGUAGESPECIFIC},
{"jkl", IDS_LANGUAGESPECIFIC},
{"mno", IDS_THIRDPRESENT},
};
const size_t kTheRcHeaderSize = arraysize(kTheRcHeader);''', output)
def testFormatResourceMapWithOutputAllEqualsFalseForStructures(self):
grd = grd_reader.Parse(StringIO.StringIO(
'''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en" current_release="3"
base_dir="." output_all_resource_defines="false">
<outputs>
<output type="rc_header" filename="the_rc_header.h" />
<output type="resource_map_header"
filename="the_resource_map_header.h" />
<output type="resource_map_source"
filename="the_resource_map_header.cc" />
</outputs>
<release seq="3">
<structures first_id="300">
<structure type="chrome_scaled_image" name="IDR_KLONKMENU"
file="foo.png" />
<if expr="False">
<structure type="chrome_scaled_image" name="IDR_MISSING"
file="bar.png" />
</if>
<if expr="True">
<structure type="chrome_scaled_image" name="IDR_BLOB"
file="blob.png" />
</if>
<if expr="True">
<then>
<structure type="chrome_scaled_image" name="IDR_METEOR"
file="meteor.png" />
</then>
<else>
<structure type="chrome_scaled_image" name="IDR_METEOR"
file="roetem.png" />
</else>
</if>
<if expr="False">
<structure type="chrome_scaled_image" name="IDR_LAST"
file="zyx.png" />
</if>
<if expr="True">
<structure type="chrome_scaled_image" name="IDR_LAST"
file="xyz.png" />
</if>
</structures>
</release>
</grit>'''), util.PathFromRoot('.'))
grd.SetOutputLanguage('en')
grd.RunGatherers()
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_header')(grd, 'en', '.')))
self.assertEqual('''\
#include <stddef.h>
#ifndef GRIT_RESOURCE_MAP_STRUCT_
#define GRIT_RESOURCE_MAP_STRUCT_
struct GritResourceMap {
const char* const name;
int value;
};
#endif // GRIT_RESOURCE_MAP_STRUCT_
extern const GritResourceMap kTheRcHeader[];
extern const size_t kTheRcHeaderSize;''', output)
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_source')(grd, 'en', '.')))
self.assertEqual('''\
#include "the_resource_map_header.h"
#include <stddef.h>
#include "base/macros.h"
#include "the_rc_header.h"
const GritResourceMap kTheRcHeader[] = {
{"IDR_KLONKMENU", IDR_KLONKMENU},
{"IDR_BLOB", IDR_BLOB},
{"IDR_METEOR", IDR_METEOR},
{"IDR_LAST", IDR_LAST},
};
const size_t kTheRcHeaderSize = arraysize(kTheRcHeader);''', output)
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_source')(grd, 'en', '.')))
self.assertEqual('''\
#include "the_resource_map_header.h"
#include <stddef.h>
#include "base/macros.h"
#include "the_rc_header.h"
const GritResourceMap kTheRcHeader[] = {
{"IDR_KLONKMENU", IDR_KLONKMENU},
{"IDR_BLOB", IDR_BLOB},
{"IDR_METEOR", IDR_METEOR},
{"IDR_LAST", IDR_LAST},
};
const size_t kTheRcHeaderSize = arraysize(kTheRcHeader);''', output)
def testFormatResourceMapWithOutputAllEqualsFalseForIncludes(self):
grd = grd_reader.Parse(StringIO.StringIO(
'''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en" current_release="3"
base_dir="." output_all_resource_defines="false">
<outputs>
<output type="rc_header" filename="the_rc_header.h" />
<output type="resource_map_header"
filename="the_resource_map_header.h" />
</outputs>
<release seq="3">
<structures first_id="300">
<structure type="menu" name="IDC_KLONKMENU"
file="grit\\testdata\\klonk.rc" encoding="utf-16" />
</structures>
<includes first_id="10000">
<include type="foo" file="abc" name="IDS_FIRSTPRESENT" />
<if expr="False">
<include type="foo" file="def" name="IDS_MISSING" />
</if>
<include type="foo" file="mno" name="IDS_THIRDPRESENT" />
<if expr="True">
<include type="foo" file="blob" name="IDS_BLOB" />
</if>
<if expr="True">
<then>
<include type="foo" file="meteor" name="IDS_METEOR" />
</then>
<else>
<include type="foo" file="roetem" name="IDS_METEOR" />
</else>
</if>
<if expr="False">
<include type="foo" file="zyx" name="IDS_LAST" />
</if>
<if expr="True">
<include type="foo" file="xyz" name="IDS_LAST" />
</if>
</includes>
</release>
</grit>'''), util.PathFromRoot('.'))
grd.SetOutputLanguage('en')
grd.RunGatherers()
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_header')(grd, 'en', '.')))
self.assertEqual('''\
#include <stddef.h>
#ifndef GRIT_RESOURCE_MAP_STRUCT_
#define GRIT_RESOURCE_MAP_STRUCT_
struct GritResourceMap {
const char* const name;
int value;
};
#endif // GRIT_RESOURCE_MAP_STRUCT_
extern const GritResourceMap kTheRcHeader[];
extern const size_t kTheRcHeaderSize;''', output)
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_source')(grd, 'en', '.')))
self.assertEqual('''\
#include "the_resource_map_header.h"
#include <stddef.h>
#include "base/macros.h"
#include "the_rc_header.h"
const GritResourceMap kTheRcHeader[] = {
{"IDC_KLONKMENU", IDC_KLONKMENU},
{"IDS_FIRSTPRESENT", IDS_FIRSTPRESENT},
{"IDS_THIRDPRESENT", IDS_THIRDPRESENT},
{"IDS_BLOB", IDS_BLOB},
{"IDS_METEOR", IDS_METEOR},
{"IDS_LAST", IDS_LAST},
};
const size_t kTheRcHeaderSize = arraysize(kTheRcHeader);''', output)
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_file_map_source')(grd, 'en', '.')))
self.assertEqual('''\
#include "the_resource_map_header.h"
#include <stddef.h>
#include "base/macros.h"
#include "the_rc_header.h"
const GritResourceMap kTheRcHeader[] = {
{"grit/testdata/klonk.rc", IDC_KLONKMENU},
{"abc", IDS_FIRSTPRESENT},
{"mno", IDS_THIRDPRESENT},
{"blob", IDS_BLOB},
{"meteor", IDS_METEOR},
{"xyz", IDS_LAST},
};
const size_t kTheRcHeaderSize = arraysize(kTheRcHeader);''', output)
def testFormatStringResourceMap(self):
grd = grd_reader.Parse(StringIO.StringIO(
'''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en" current_release="3"
base_dir=".">
<outputs>
<output type="rc_header" filename="the_rc_header.h" />
<output type="resource_map_header" filename="the_rc_map_header.h" />
<output type="resource_map_source" filename="the_rc_map_source.cc" />
</outputs>
<release seq="1" allow_pseudo="false">
<messages fallback_to_english="true">
<message name="IDS_PRODUCT_NAME" desc="The application name">
Application
</message>
<if expr="True">
<message name="IDS_DEFAULT_TAB_TITLE_TITLE_CASE"
desc="In Title Case: The default title in a tab.">
New Tab
</message>
</if>
<if expr="False">
<message name="IDS_DEFAULT_TAB_TITLE"
desc="The default title in a tab.">
New tab
</message>
</if>
</messages>
</release>
</grit>'''), util.PathFromRoot('.'))
grd.SetOutputLanguage('en')
grd.RunGatherers()
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_header')(grd, 'en', '.')))
self.assertEqual('''\
#include <stddef.h>
#ifndef GRIT_RESOURCE_MAP_STRUCT_
#define GRIT_RESOURCE_MAP_STRUCT_
struct GritResourceMap {
const char* const name;
int value;
};
#endif // GRIT_RESOURCE_MAP_STRUCT_
extern const GritResourceMap kTheRcHeader[];
extern const size_t kTheRcHeaderSize;''', output)
output = util.StripBlankLinesAndComments(''.join(
resource_map.GetFormatter('resource_map_source')(grd, 'en', '.')))
self.assertEqual('''\
#include "the_rc_map_header.h"
#include <stddef.h>
#include "base/macros.h"
#include "the_rc_header.h"
const GritResourceMap kTheRcHeader[] = {
{"IDS_PRODUCT_NAME", IDS_PRODUCT_NAME},
{"IDS_DEFAULT_TAB_TITLE_TITLE_CASE", IDS_DEFAULT_TAB_TITLE_TITLE_CASE},
};
const size_t kTheRcHeaderSize = arraysize(kTheRcHeader);''', output)
if __name__ == '__main__':
unittest.main()
|
|
"""
The different agnocomplete classes to be discovered
"""
from copy import copy
from six import with_metaclass
from abc import abstractmethod, ABCMeta
import logging
from django.db.models import Q
from django.core.exceptions import ImproperlyConfigured
from django.utils.encoding import force_text as text
from django.conf import settings
import requests
from .constants import AGNOCOMPLETE_DEFAULT_PAGESIZE
from .constants import AGNOCOMPLETE_MIN_PAGESIZE
from .constants import AGNOCOMPLETE_MAX_PAGESIZE
from .constants import AGNOCOMPLETE_DEFAULT_QUERYSIZE
from .constants import AGNOCOMPLETE_MIN_QUERYSIZE
from .exceptions import AuthenticationRequiredAgnocompleteException
from .exceptions import SkipItem
from .exceptions import ItemNotFound
logger = logging.getLogger(__name__)
class ClassPropertyDescriptor(object):
"""
Toolkit class used to instanciate a class property.
"""
def __init__(self, fget, fset=None):
self.fget = fget
self.fset = fset
def __get__(self, obj, klass=None):
if klass is None:
klass = type(obj)
return self.fget.__get__(obj, klass)()
def __set__(self, obj, value):
if not self.fset:
raise AttributeError("can't set attribute")
type_ = type(obj)
return self.fset.__get__(obj, type_)(value)
def setter(self, func):
"""
Setter: the decorated method will become a class property.
"""
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
self.fset = func
return self
def classproperty(func):
"""
Decorator: the given function will become a class property.
e.g::
class SafeClass(object):
@classproperty
def safe(cls):
return True
class UnsafeClass(object):
@classproperty
def safe(cls):
return False
"""
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
return ClassPropertyDescriptor(func)
def load_settings_sizes():
"""
Load sizes from settings or fallback to the module constants
"""
page_size = AGNOCOMPLETE_DEFAULT_PAGESIZE
settings_page_size = getattr(
settings, 'AGNOCOMPLETE_DEFAULT_PAGESIZE', None)
page_size = settings_page_size or page_size
page_size_min = AGNOCOMPLETE_MIN_PAGESIZE
settings_page_size_min = getattr(
settings, 'AGNOCOMPLETE_MIN_PAGESIZE', None)
page_size_min = settings_page_size_min or page_size_min
page_size_max = AGNOCOMPLETE_MAX_PAGESIZE
settings_page_size_max = getattr(
settings, 'AGNOCOMPLETE_MAX_PAGESIZE', None)
page_size_max = settings_page_size_max or page_size_max
# Query sizes
query_size = AGNOCOMPLETE_DEFAULT_QUERYSIZE
settings_query_size = getattr(
settings, 'AGNOCOMPLETE_DEFAULT_QUERYSIZE', None)
query_size = settings_query_size or query_size
query_size_min = AGNOCOMPLETE_MIN_QUERYSIZE
settings_query_size_min = getattr(
settings, 'AGNOCOMPLETE_MIN_QUERYSIZE', None)
query_size_min = settings_query_size_min or query_size_min
return (
page_size, page_size_min, page_size_max,
query_size, query_size_min,
)
class AgnocompleteBase(with_metaclass(ABCMeta, object)):
"""
Base class for Agnocomplete tools.
"""
# To be overridden by settings, or constructor arguments
page_size = None
page_size_max = None
page_size_min = None
query_size = None
query_size_min = None
url = None
def __init__(self, user=None, page_size=None, url=None):
# Loading the user context
self.user = user
# Load from settings or fallback to constants
settings_page_size, settings_page_size_min, settings_page_size_max, \
query_size, query_size_min = load_settings_sizes()
# Use the class attributes or fallback to settings
self._conf_page_size = self.page_size or settings_page_size
self._conf_page_size_min = self.page_size_min or settings_page_size_min
self._conf_page_size_max = self.page_size_max or settings_page_size_max
# Use instance constructor parameters to eventually override defaults
page_size = page_size or self._conf_page_size
if page_size > self._conf_page_size_max \
or page_size < self._conf_page_size_min:
page_size = self._conf_page_size
# Finally set this as the wanted page_size
self._page_size = page_size
# set query sizes
self._query_size = self.query_size or query_size
self._query_size_min = self.query_size_min or query_size_min
# Eventual custom URL
self._url = url
def set_agnocomplete_field(self, field):
self.agnocomplete_field = field
@classproperty
def slug(cls):
"""
Return the key used in the register, used as a slug for the URL.
You can override this by adding a class property.
"""
return cls.__name__
def get_url(self):
return self._url or self.url
def get_page_size(self):
"""
Return the computed page_size
It takes into account:
* class variables
* constructor arguments,
* settings
* fallback to the module constants if needed.
"""
return self._page_size
def get_query_size(self):
"""
Return the computed default query size
It takes into account:
* class variables
* settings,
* fallback to the module constants
"""
return self._query_size
def get_query_size_min(self):
"""
Return the computed minimum query size
It takes into account:
* class variables
* settings,
* fallback to the module constants
"""
return self._query_size_min
@abstractmethod
def get_choices(self):
pass
@abstractmethod
def items(self, query=None, **kwargs):
pass
@abstractmethod
def selected(self, ids):
"""
Return the values (as a tuple of pairs) for the ids provided
"""
pass
def is_valid_query(self, query):
"""
Return True if the search query is valid.
e.g.:
* not empty,
* not too short,
"""
# No query, no item
if not query:
return False
# Query is too short, no item
if len(query) < self.get_query_size_min():
return False
return True
class AgnocompleteChoices(AgnocompleteBase):
"""
Usage Example::
class AgnocompleteColor(AgnocompleteChoices):
choices = (
('red', 'Red'),
('green', 'Green'),
('blue', 'Blue'),
)
"""
choices = ()
def get_choices(self):
return self.choices
def item(self, current_item):
value, label = current_item
return dict(value=value, label=label)
def items(self, query=None, **kwargs):
if not self.is_valid_query(query):
return []
result = copy(self.choices)
if query:
result = filter(lambda x: x[1].lower().startswith(query), result)
result = tuple(result)
# Slicing before rendering
result = result[:self.get_page_size()]
return [self.item(item) for item in result]
def selected(self, ids):
"""
Return the selected options as a list of tuples
"""
result = copy(self.choices)
result = filter(lambda x: x[0] in ids, result)
# result = ((item, item) for item in result)
return list(result)
class AgnocompleteModelBase(with_metaclass(ABCMeta, AgnocompleteBase)):
model = None
requires_authentication = False
@abstractmethod
def get_queryset(self):
pass
@property
def fields(self):
raise NotImplementedError(
"Integrator: You must have a `fields` property")
def get_model(self):
"""
Return the class Model used by this Agnocomplete
"""
if hasattr(self, 'model') and self.model:
return self.model
# Give me a "none" queryset
try:
none = self.get_queryset().none()
return none.model
except Exception:
raise ImproperlyConfigured(
"Integrator: Unable to determine the model with this queryset."
" Please add a `model` property")
def get_model_queryset(self):
"""
Return an unfiltered complete model queryset.
To be used for the select Input initialization
"""
return self.get_model().objects.all()
get_choices = get_model_queryset
def get_field_name(self):
"""
Return the model field name to be used as a value, or 'pk' if unset
"""
if hasattr(self, 'agnocomplete_field') and \
hasattr(self.agnocomplete_field, 'to_field_name'):
return self.agnocomplete_field.to_field_name or 'pk'
return 'pk'
class AgnocompleteModel(AgnocompleteModelBase):
"""
Example::
class AgnocompletePeople(AgnocompleteModel):
model = People
fields = ['first_name', 'last_name']
class AgnocompletePersonQueryset(AgnocompleteModel):
fields = ['first_name', 'last_name']
def get_queryset(self):
return People.objects.filter(email__contains='example.com')
"""
def __init__(self, *args, **kwargs):
super(AgnocompleteModel, self).__init__(*args, **kwargs)
self.__final_queryset = None
def _construct_qs_filter(self, field_name):
"""
Using a field name optionnaly prefixed by `^`, `=`, `@`, return a
case-insensitive filter condition name usable as a queryset `filter()`
keyword argument.
"""
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
else:
return "%s__icontains" % field_name
def get_queryset(self):
if not hasattr(self, 'model') or not self.model:
raise NotImplementedError(
"Integrator: You must either have a `model` property "
"or a `get_queryset()` method"
)
return self.model.objects.all()
def get_queryset_filters(self, query):
"""
Return the filtered queryset
"""
conditions = Q()
for field_name in self.fields:
conditions |= Q(**{
self._construct_qs_filter(field_name): query
})
return conditions
def paginate(self, qs):
"""
Paginate a given Queryset
"""
return qs[:self.get_page_size()]
@property
def _final_queryset(self):
"""
Paginated final queryset
"""
if self.__final_queryset is None:
return None
return self.paginate(self.__final_queryset)
# final_queryset alias
final_queryset = _final_queryset
@property
def final_raw_queryset(self):
return self.__final_queryset
def serialize(self, queryset):
result = []
for item in self.paginate(queryset):
result.append(self.item(item))
return result
def item(self, current_item):
"""
Return the current item.
@param current_item: Current item
@type param: django.models
@return: Value and label of the current item
@rtype : dict
"""
return {
'value': text(getattr(current_item, self.get_field_name())),
'label': self.label(current_item)
}
def label(self, current_item):
"""
Return a label for the current item.
@param current_item: Current item
@type param: django.models
@return: Label of the current item
@rtype : text
"""
return text(current_item)
def build_extra_filtered_queryset(self, queryset, **kwargs):
"""
Apply eventual queryset filters, based on the optional extra arguments
passed to the query.
By default, this method returns the queryset "verbatim". You can
override or overwrite this to perform custom filter on this QS.
* `queryset`: it's the final queryset build using the search terms.
* `kwargs`: this dictionary contains the extra arguments passed to the
agnocomplete class.
"""
# By default, we're ignoring these arguments and return verbatim QS
return queryset
def build_filtered_queryset(self, query, **kwargs):
"""
Build and return the fully-filtered queryset
"""
# Take the basic queryset
qs = self.get_queryset()
# filter it via the query conditions
qs = qs.filter(self.get_queryset_filters(query))
return self.build_extra_filtered_queryset(qs, **kwargs)
def items(self, query=None, **kwargs):
"""
Return the items to be sent to the client
"""
# Cut this, we don't need no empty query
if not query:
self.__final_queryset = self.get_model().objects.none()
return self.serialize(self.__final_queryset)
# Query is too short, no item
if len(query) < self.get_query_size_min():
self.__final_queryset = self.get_model().objects.none()
return self.serialize(self.__final_queryset)
if self.requires_authentication:
if not self.user:
raise AuthenticationRequiredAgnocompleteException(
"Authentication is required to use this autocomplete"
)
if not self.user.is_authenticated:
raise AuthenticationRequiredAgnocompleteException(
"Authentication is required to use this autocomplete"
)
qs = self.build_filtered_queryset(query, **kwargs)
# The final queryset is the paginated queryset
self.__final_queryset = qs
return self.serialize(qs)
def selected(self, ids):
"""
Return the selected options as a list of tuples
"""
# Cleanup the ID list
if self.get_field_name() == 'pk':
ids = filter(lambda x: "{}".format(x).isdigit(), copy(ids))
else:
ids = filter(lambda x: len("{}".format(x)) > 0, copy(ids))
# Prepare the QS
# TODO: not contextually filtered, check if it's possible at some point
qs = self.get_model_queryset().filter(
**{'{}__in'.format(self.get_field_name()): ids})
result = []
for item in qs:
item_repr = self.item(item)
result.append(
(item_repr['value'], item_repr['label'])
)
return result
class AgnocompleteUrlProxy(with_metaclass(ABCMeta, AgnocompleteBase)):
"""
This class serves as a proxy between your application and a 3rd party
URL (typically a REST HTTP API).
"""
value_key = 'value'
label_key = 'label'
method = 'get'
data_key = 'data'
def get_search_url(self):
raise NotImplementedError(
"Integrator: You must implement a `get_search_url` method"
" or have a `search_url` property in this class.")
@property
def search_url(self):
return self.get_search_url()
def get_item_url(self, pk):
raise NotImplementedError(
"Integrator: You must implement a `get_item_url` method")
def get_choices(self):
return []
def get_http_method_arg_name(self):
"""
Return the HTTP function to call and the params/data argument name
"""
if self.method == 'get':
arg_name = 'params'
else:
arg_name = 'data'
return getattr(requests, self.method), arg_name
def http_call(self, url=None, **kwargs):
"""
Call the target URL via HTTP and return the JSON result
"""
if not url:
url = self.search_url
http_func, arg_name = self.get_http_method_arg_name()
# Build the argument dictionary to pass in the http function
_kwargs = {
arg_name: kwargs,
}
# The actual HTTP call
response = http_func(
url=url.format(**kwargs),
headers=self.get_http_headers(),
**_kwargs
)
# Error handling
if response.status_code != 200:
logger.warning('Invalid Request for `%s`', response.url)
# Raising a "requests" exception
response.raise_for_status()
return response.json()
def item(self, current_item):
return dict(
value=text(current_item[self.value_key]),
label=text(current_item[self.label_key]),
)
def get_http_headers(self):
"""
Return a dictionary that will be added to the HTTP request to the API
You can overwrite this method, that return an empty dict by default.
"""
return {}
def get_http_result(self, http_result):
"""
Return an iterable with all the result items in.
You can override/overwrite this method to adapt it to the payload
returned by the 3rd party API.
"""
return http_result.get(self.data_key, [])
def get_http_call_kwargs(self, query, **kwargs):
"""
Return the HTTP query arguments.
You can override this method to pass further arguments corresponding
to your search_url.
"""
return {'q': query}
def items(self, query=None, **kwargs):
if not self.is_valid_query(query):
return []
# Call to search URL
http_result = self.http_call(
**self.get_http_call_kwargs(query, **kwargs)
)
# In case of error, on the API side, the error is raised and handled
# in the view.
http_result = self.get_http_result(http_result)
result = []
for item in http_result:
# Eventual result reshaping.
try:
result.append(self.item(item))
except SkipItem:
continue
return result
def selected(self, ids):
data = []
# Filter out "falsy IDs" (empty string, None, 0...)
ids = filter(lambda x: x, ids)
for _id in ids:
if _id:
# Call to the item URL
result = self.http_call(url=self.get_item_url(pk=_id))
if self.data_key in result and len(result[self.data_key]):
for item in result[self.data_key]:
data.append(
(
text(item[self.value_key]),
text(item[self.label_key])
)
)
return data
def validate(self, value):
"""
From a value available on the remote server, the method returns the
complete item matching the value.
If case the value is not available on the server side or filtered
through :meth:`item`, the class:`agnocomplete.exceptions.ItemNotFound`
is raised.
"""
url = self.get_item_url(value)
try:
data = self.http_call(url=url)
except requests.HTTPError:
raise ItemNotFound()
data = self.get_http_result(data)
try:
self.item(data)
except SkipItem:
raise ItemNotFound()
return value
|
|
# PlantCV classes
import os
import json
from plantcv.plantcv import fatal_error
class Params:
"""PlantCV parameters class."""
def __init__(self, device=0, debug=None, debug_outdir=".", line_thickness=5, dpi=100, text_size=0.55,
text_thickness=2, marker_size=60, color_scale="gist_rainbow", color_sequence="sequential",
saved_color_scale=None, verbose=True):
"""Initialize parameters.
Keyword arguments/parameters:
device = Device number. Used to count steps in the pipeline. (default: 0)
debug = None, print, or plot. Print = save to file, Plot = print to screen. (default: None)
debug_outdir = Debug images output directory. (default: .)
line_thickness = Width of line drawings. (default: 5)
dpi = Figure plotting resolution, dots per inch. (default: 100)
text_size = Size of plotting text. (default: 0.55)
text_thickness = Thickness of plotting text. (default: 2)
marker_size = Size of plotting markers (default: 60)
color_scale = Name of plotting color scale (matplotlib colormap). (default: gist_rainbow)
color_sequence = Build color scales in "sequential" or "random" order. (default: sequential)
saved_color_scale = Saved color scale that will be applied next time color_palette is called. (default: None)
verbose = Whether or not in verbose mode. (default: True)
:param device: int
:param debug: str
:param debug_outdir: str
:param line_thickness: numeric
:param dpi: int
:param text_size: float
:param text_thickness: int
:param marker_size: int
:param color_scale: str
:param color_sequence: str
:param saved_color_scale: list
:param verbose: bool
"""
self.device = device
self.debug = debug
self.debug_outdir = debug_outdir
self.line_thickness = line_thickness
self.dpi = dpi
self.text_size = text_size
self.text_thickness = text_thickness
self.marker_size = marker_size
self.color_scale = color_scale
self.color_sequence = color_sequence
self.saved_color_scale = saved_color_scale
self.verbose = verbose
class Outputs:
"""PlantCV outputs class
"""
def __init__(self):
self.measurements = {}
self.images = []
self.observations = {}
# Add a method to clear measurements
def clear(self):
self.measurements = {}
self.images = []
self.observations = {}
# Method to add observation to outputs
def add_observation(self, sample, variable, trait, method, scale, datatype, value, label):
"""
Keyword arguments/parameters:
sample = Sample name. Used to distinguish between multiple samples
variable = A local unique identifier of a variable, e.g. a short name,
that is a key linking the definitions of variables with observations.
trait = A name of the trait mapped to an external ontology; if there is no exact mapping, an informative
description of the trait.
method = A name of the measurement method mapped to an external ontology; if there is no exact mapping, an
informative description of the measurement procedure
scale = Units of the measurement or scale in which the observations are expressed; if possible, standard
units and scales should be used and mapped to existing ontologies; in the case of non-standard
scale a full explanation should be given
datatype = The type of data to be stored, e.g. 'int', 'float', 'str', 'list', 'bool', etc.
value = The data itself
label = The label for each value (most useful when the data is a frequency table as in hue,
or other tables)
:param sample: str
:param variable: str
:param trait: str
:param method: str
:param scale: str
:param datatype: type
:param value:
:param label:
"""
# Create an empty dictionary for the sample if it does not exist
if sample not in self.observations:
self.observations[sample] = {}
# Supported data types
supported_dtype = ["int", "float", "str", "list", "bool", "tuple", "dict", "NoneType", "numpy.float64"]
# Supported class types
class_list = [f"<class '{cls}'>" for cls in supported_dtype]
# Send an error message if datatype is not supported by json
if str(type(value)) not in class_list:
# String list of supported types
type_list = ', '.join(map(str, supported_dtype))
fatal_error(f"The Data type {type(value)} is not compatible with JSON! Please use only these: {type_list}!")
# Save the observation for the sample and variable
self.observations[sample][variable] = {
"trait": trait,
"method": method,
"scale": scale,
"datatype": str(datatype),
"value": value,
"label": label
}
# Method to save observations to a file
def save_results(self, filename, outformat="json"):
"""Save results to a file.
Keyword arguments/parameters:
filename = Output filename
outformat = Output file format ("json" or "csv"). Default = "json"
:param filename: str
:param outformat: str
"""
if outformat.upper() == "JSON":
if os.path.isfile(filename):
with open(filename, 'r') as f:
hierarchical_data = json.load(f)
hierarchical_data["observations"] = self.observations
else:
hierarchical_data = {"metadata": {}, "observations": self.observations}
with open(filename, mode='w') as f:
json.dump(hierarchical_data, f)
elif outformat.upper() == "CSV":
# Open output CSV file
csv_table = open(filename, "w")
# Write the header
csv_table.write(",".join(map(str, ["sample", "trait", "value", "label"])) + "\n")
# Iterate over data samples
for sample in self.observations:
# Iterate over traits for each sample
for var in self.observations[sample]:
val = self.observations[sample][var]["value"]
# If the data type is a list or tuple we need to unpack the data
if isinstance(val, list) or isinstance(val, tuple):
# Combine each value with its label
for value, label in zip(self.observations[sample][var]["value"],
self.observations[sample][var]["label"]):
# Skip list of tuple data types
if not isinstance(value, tuple):
# Save one row per value-label
row = [sample, var, value, label]
csv_table.write(",".join(map(str, row)) + "\n")
# If the data type is Boolean, store as a numeric 1/0 instead of True/False
elif isinstance(val, bool):
row = [sample,
var,
int(self.observations[sample][var]["value"]),
self.observations[sample][var]["label"]]
csv_table.write(",".join(map(str, row)) + "\n")
# For all other supported data types, save one row per trait
# Assumes no unusual data types are present (possibly a bad assumption)
else:
row = [sample,
var,
self.observations[sample][var]["value"],
self.observations[sample][var]["label"]
]
csv_table.write(",".join(map(str, row)) + "\n")
class Spectral_data:
# PlantCV Hyperspectral data class
def __init__(self, array_data, max_wavelength, min_wavelength, max_value, min_value, d_type, wavelength_dict,
samples, lines, interleave, wavelength_units, array_type, pseudo_rgb, filename, default_bands):
# The actual array/datacube
self.array_data = array_data
# Min/max available wavelengths (for spectral datacube)
self.max_wavelength = max_wavelength
self.min_wavelength = min_wavelength
# Min/max pixel value for single wavelength or index
self.max_value = max_value
self.min_value = min_value
# Numpy data type
self.d_type = d_type
# Contains all available wavelengths where keys are wavelength and value are indices
self.wavelength_dict = wavelength_dict
# Resolution of a single band of spectral data is (samples, lines) rather than (x,y) with other arrays
self.samples = samples
self.lines = lines
# Interleave type
self.interleave = interleave
self.wavelength_units = wavelength_units
# The type of array data (entire datacube, specific index, first derivative, etc)
self.array_type = array_type
# Pseudo-RGB image if the array_type is a datacube
self.pseudo_rgb = pseudo_rgb
# The filename where the data originated from
self.filename = filename
# The default band indices needed to make an pseudo_rgb image, if not available then store None
self.default_bands = default_bands
# Example
# spectral_array = Spectral_data(max_wavelength=1000.95, min_wavelength=379.027, d_type=numpy.float32,
# wavelength_dict=dictionary, samples=1600, lines=1704, interleave='bil',
# wavelength_units='nm', array_type="datacube", filename=fname, default_bands={159,253,520})
|
|
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
''' CONSTANTS '''
removed_keys = ['endpoint', 'domain', 'hostname']
''' CLIENT CLASS '''
class Client(BaseClient):
def __init__(self, base_url, verify=True, proxy=False, ok_codes=tuple(), headers=None, auth=None, timeout=10):
super().__init__(base_url, verify=verify, proxy=proxy, ok_codes=ok_codes, headers=headers, auth=auth)
self.timeout = timeout
def domain_tags(self, hostname: str = None):
res = self._http_request(
'GET',
f'domain/{hostname}/tags',
ok_codes=(200, 403),
timeout=self.timeout
)
return res.get('tags', [])
def domain_details(self, hostname: str = None):
return self._http_request(
'GET',
f'domain/{hostname}',
ok_codes=(200, 403),
timeout=self.timeout
)
def domain_subdomains(self, hostname: str = None, children_only: str = 'true'):
query_string = {"children_only": children_only}
res = self._http_request(
'GET',
f'domain/{hostname}/subdomains',
params=query_string,
ok_codes=(200, 403),
timeout=self.timeout
)
return res
def associated_domains(self, hostname: str = None, page: int = 1):
params = {
"page": page
}
res = self._http_request(
'GET',
f'domain/{hostname}/associated',
params=params,
ok_codes=(200, 403),
timeout=self.timeout
)
return res
def get_ssl_certificates(self, query_type: str = "stream", hostname: str = None, params: dict = None):
# There's a bug in the API where the result is malformed.
if query_type == "paged":
res = self._http_request(
'GET',
f'domain/{hostname}/ssl',
params=params or {},
ok_codes=(200, 403),
resp_type='response',
timeout=self.timeout
)
elif query_type == "stream":
res = self._http_request(
'GET',
f'domain/{hostname}/ssl_stream',
params=params,
ok_codes=(200, 403),
resp_type='response',
timeout=self.timeout
)
return res
def get_company(self, domain: str = None):
res = self._http_request(
'GET',
f'company/{domain}',
ok_codes=(200, 403),
timeout=self.timeout
)
return res.get('record', {})
def get_useragents(self, ip_address: str = None, params: dict = None):
return self._http_request(
'GET',
f'ips/{ip_address}/useragents',
params=params or {},
ok_codes=(200, 403),
timeout=self.timeout
)
def get_company_associated_ips(self, domain: str = None):
res = self._http_request(
'GET',
f'company/{domain}/associated-ips',
ok_codes=(200, 403),
timeout=self.timeout
)
return res.get('record', {})
def get_whois(self, query_type: str = "domain", hostname: str = None):
if query_type == "domain":
return self._http_request(
'GET',
f'domain/{hostname}/whois',
ok_codes=(200, 403),
timeout=self.timeout
)
elif query_type == "ip":
return self._http_request(
'GET',
f'ips/{hostname}/whois',
ok_codes=(200, 403),
timeout=self.timeout
)
def get_dns_history(self, hostname: str = None, record_type: str = None, page: int = 1):
params = {
"page": page
}
return self._http_request(
'GET',
f'history/{hostname}/dns/{record_type}',
params=params,
ok_codes=(200, 403),
timeout=self.timeout
)
def get_whois_history(self, hostname: str = None, page: int = 1):
params = {
"page": page
}
res = self._http_request(
'GET',
f'history/{hostname}/whois',
params=params,
ok_codes=(200, 403),
timeout=self.timeout
)
return res.get('result')
def get_ip_neighbors(self, ipaddress: str = None):
res = self._http_request(
'GET',
f'ips/nearby/{ipaddress}',
ok_codes=(200, 403),
timeout=self.timeout
)
return res.get('blocks')
def query(self, query_type: str = "domain_search", body: dict = None, params: dict = None):
if query_type == "domain_search":
return self._http_request(
'POST',
'domains/list',
params=params,
json_data=body,
ok_codes=(200, 403),
timeout=self.timeout
)
elif query_type == "domain_stats":
return self._http_request(
'POST',
'domains/stats',
json_data=body,
ok_codes=(200, 403),
timeout=self.timeout
)
elif query_type == "ip_search":
return self._http_request(
'POST',
'ips/list',
params=params,
json_data=body,
ok_codes=(200, 403),
timeout=self.timeout
)
elif query_type == "ip_stats":
return self._http_request(
'POST',
'ips/stats',
params=params,
json_data=body,
ok_codes=(200, 403),
timeout=self.timeout
)
def sql(self, sql: dict = None, timeout: int = 20):
return self._http_request(
'POST',
'query/scroll',
json_data=sql,
timeout=self.timeout
)
def sql_next(self, next_id: str = None, timeout: int = 20):
return self._http_request(
'GET',
f'query/scroll/{next_id}',
timeout=self.timeout
)
''' HELPER FUNCTIONS '''
#################################
# Standard Context Outputs
#################################
def create_standard_domain_context(domain_data):
command_results = CommandResults(
outputs_prefix="Domain",
outputs_key_field="Name",
outputs=domain_data,
readable_output=tableToMarkdown("Domain(s):", domain_data)
)
return_results(command_results)
def create_standard_ip_context(ip_data):
command_results = CommandResults(
outputs_prefix="IP",
outputs_key_field="Address",
outputs=ip_data,
readable_output=tableToMarkdown("IP Address(es):", ip_data)
)
return_results(command_results)
def domain_command(client, args):
domains = argToList(args.get('domain'))
command_results: List[CommandResults] = []
for domain in domains:
try:
domain_details = client.domain_details(hostname=domain)
except Exception:
demisto.info(f'No information found for domain: {domain}')
return_results(f'No information found for domain: {domain}')
continue
domain_subdomains = client.domain_subdomains(hostname=domain)
domain_whois = client.get_whois(query_type="domain", hostname=domain)
domain_tags = client.domain_tags(hostname=domain)
admin_contact = [{
"Name": x.get('name'),
"Email": x.get('email'),
"Phone": x.get('telephone'),
"Country": x.get('country')
} for x in domain_whois.get('contacts', []) if "admin" in x.get('type', '').lower()]
registrant_contact = [{
"Name": x.get('name', None),
"Email": x.get('email', None),
"Phone": x.get('telephone', None),
"Country": x.get('country', None)
} for x in domain_whois.get('contacts', []) if "registrant" in x.get('type', '').lower()]
registrar_contact = [{
"Name": x.get('name', None),
"Email": x.get('email', None),
"Phone": x.get('telephone', None),
"Country": x.get('country', None)
} for x in domain_whois.get('contacts', []) if "registrar" in x.get('type', '').lower()]
domain_data = {
"Name": domain,
"DNS": ",".join(
[x.get('ip', '') for x in domain_details.get('current_dns', {}).get('a', {}).get('values', [])]),
"NameServers": ",".join(
[x.get('nameserver', '') for x in
domain_details.get('current_dns', {}).get('ns', {}).get('values', [])]),
"Organization": domain_details.get('name', None),
"Subdomains": ",".join(domain_subdomains.get('subdomains', [])),
"WHOIS": {
"DomainStatus": domain_whois.get('status'),
"NameServers": ",".join(domain_whois.get('nameServers')) if domain_whois.get('nameServers') else None,
"CreationDate": domain_whois.get('createdDate'),
"UpdatedDate": domain_whois.get('updatedDate'),
"ExpirationDate": domain_whois.get('expiresData'),
"Registrant": {
"Name": registrant_contact[0].get('Name', None) if registrant_contact else None,
"Email": registrant_contact[0].get('Email', None) if registrant_contact else None,
"Phone": registrant_contact[0].get('Phone', None) if registrant_contact else None
},
"Registrar": {
"Name": registrar_contact[0].get('Name', None) if registrar_contact else None,
"Email": registrar_contact[0].get('Email', None) if registrar_contact else None,
"Phone": registrar_contact[0].get('Phone', None) if registrar_contact else None
},
"Admin": {
"Name": admin_contact[0].get('Name', None) if admin_contact else None,
"Email": admin_contact[0].get('Email', None) if admin_contact else None,
"Phone": admin_contact[0].get('Phone', None) if admin_contact else None
}
},
"Tags": ",".join(domain_tags),
"Admin": {
"Country": admin_contact[0].get('Country', None) if admin_contact else None,
"Name": admin_contact[0].get('Name', None) if admin_contact else None,
"Email": admin_contact[0].get('Email', None) if admin_contact else None,
"Phone": admin_contact[0].get('Phone', None) if admin_contact else None
},
"Registrant": {
"Country": registrant_contact[0].get('Country', None) if registrant_contact else None,
"Name": registrant_contact[0].get('Name', None) if registrant_contact else None,
"Email": registrant_contact[0].get('Email', None) if registrant_contact else None,
"Phone": registrant_contact[0].get('Phone', None) if registrant_contact else None
}
}
dbot_score = Common.DBotScore(
indicator=domain,
indicator_type=DBotScoreType.DOMAIN,
integration_name='SecurityTrails',
score=Common.DBotScore.NONE
)
domain_indicator = Common.Domain(
domain=domain,
dbot_score=dbot_score
)
md = tableToMarkdown(f"Domain {domain}:", domain_data)
result = CommandResults(
outputs_prefix="Domain",
outputs_key_field="Name",
outputs=domain_data,
indicator=domain_indicator,
readable_output=md
)
command_results.append(result)
return_results(command_results)
#################################
# Company endpoints
#################################
def get_company_details_command(client, args):
domain = args.get('domain')
res = client.get_company(domain=domain)
readable_output = f"### Company for {domain}: {res.get('name', None)}"
command_results = CommandResults(
outputs_prefix="SecurityTrails.Domain",
outputs_key_field="name",
outputs={"name": domain, "company": res.get('name', None)},
readable_output=readable_output
)
return_results(command_results)
create_standard_domain_context(
domain_data={
"Name": domain,
"Organization": res.get('name', None),
"Registrant": {
"Name": res.get('name', None)
},
"WHOIS": {
"Registrant": {
"Name": res.get('name', None)
}
}
})
def get_company_associated_ips_command(client, args):
domain = args.get('domain')
res = client.get_company_associated_ips(domain=domain)
readable_output = tableToMarkdown(f"Associated IPs for {domain}", res)
output_data = {
"name": domain,
"associatedips": res,
"associatedips_count": len(res)
}
command_results = CommandResults(
outputs_prefix="SecurityTrails.Domain",
outputs_key_field="name",
outputs=output_data,
readable_output=readable_output
)
return_results(command_results)
#################################
# Domain endpoints
#################################
def domain_details_command(client, args):
hostname = args.get('hostname')
res = client.domain_details(hostname=hostname)
res = {k: v for k, v in res.items() if k not in removed_keys}
res['name'] = hostname
output_data = sorted([{"Type": k, "Record Count": len(v.get('values', []))}
for k, v in res.get('current_dns', {}).items()], key=lambda x: x['Type'])
readable_output = tableToMarkdown(f"Domain details for {hostname}:", output_data, ['Type', 'Record Count'])
command_results = CommandResults(
outputs_prefix="SecurityTrails.Domain",
outputs_key_field="name",
outputs=res,
readable_output=readable_output
)
return_results(command_results)
create_standard_domain_context(
domain_data={
"Name": hostname,
"NameServers": ", ".join(
[x.get('nameserver', None) for x in res.get('current_dns', {}).get('ns', {}).get('values', [])])
})
def domains_subdomains_command(client, args):
hostname = args.get('hostname')
children_only = args.get('children_only', 'true')
res = client.domain_subdomains(hostname=hostname, children_only=children_only)
subdomains = res.get('subdomains', [])
md = tableToMarkdown(f"Subdomains for {hostname}:", [{"Subdomain": x} for x in subdomains])
output_data = {
"name": hostname,
"subdomains": subdomains,
"subdomains_count": len(subdomains)
}
command_results = CommandResults(
outputs_prefix="SecurityTrails.Domain",
outputs_key_field="name",
outputs=output_data,
readable_output=md
)
return_results(command_results)
def get_domain_tags_command(client, args):
hostname = args.get('hostname')
res = client.domain_tags(hostname=hostname)
tags = ', '.join(res)
readable_output = f"### Tags for {hostname}:\n\n{tags}"
command_results = CommandResults(
outputs_prefix="SecurityTrails.Domain",
outputs_key_field="name",
outputs={"name": hostname, "tags": res},
readable_output=readable_output
)
return_results(command_results)
create_standard_domain_context(
domain_data={
"Name": hostname,
"Tags": tags
})
def get_whois_command(client, args):
command = demisto.command()
if command == "securitytrails-get-domain-whois":
hostname = args.get("hostname")
res = client.get_whois(query_type="domain", hostname=hostname)
res = {k: v for k, v in res.items() if k not in removed_keys}
res["name"] = hostname
readable_output = tableToMarkdown(f"WHOIS data for {hostname}", res)
command_results = CommandResults(
outputs_prefix="SecurityTrails.Domain",
outputs_key_field="name",
outputs=res,
readable_output=readable_output
)
return_results(command_results)
contacts = res.get('contacts', [])
domain_data = {
"Name": hostname,
"UpdatedDate": res.get('updatedDate'),
"DomainStatus": res.get('status'),
"WHOIS": {
"DomainStatus": res.get('status'),
"CreationDate": res.get('createdDate'),
"UpdatedDate": res.get('updatedDate'),
"ExpirationDate": res.get('expiresDate'),
"Registrar": {
"Name": res.get('registrarName')
}
}
}
if res.get('nameServers', None):
name_servers = ", ".join(x for x in res.get('nameServers', []))
domain_data['NameServers'] = name_servers
domain_data['WHOIS']['NameServers'] = name_servers
# Find the admin contact
admin = None
for contact in contacts:
if (contact.get('type').lower()).startswith("admin"):
admin = contact
break
if admin:
domain_data['Admin'] = {
"Country": admin.get('country', None),
"Email": admin.get('email', None),
"Name": admin.get('name', None),
"Phone": admin.get('telephone', None)
}
create_standard_domain_context(domain_data=domain_data)
elif command == "securitytrails-get-ip-whois":
ip_address = args.get('ipaddress')
res = client.get_whois(query_type="ip", hostname=ip_address)
res = res.get('record', {})
res = {k: v for k, v in res.items() if k not in removed_keys}
res["ip"] = ip_address
readable_output = tableToMarkdown(f"WHOIS data for {ip_address}", res)
command_results = CommandResults(
outputs_prefix="SecurityTrails.IP",
outputs_key_field="ip",
outputs=res,
readable_output=readable_output
)
return_results(command_results)
ip_data = {
"Address": ip_address
}
create_standard_ip_context(ip_data=ip_data)
def domain_search_command(client, args):
include_ips = argToBoolean(args.get('include_ips', 'false'))
page = int(args.get('page', 1))
scroll = True if args.get('include_ips', 'false') == "true" else False
query = args.get('query', None)
filter = args.get('filter', None)
if not query and not filter:
return_error("You must provide at least a query or a filter")
params = {
"include_ips": include_ips,
"page": page,
"scroll": scroll
}
body = dict()
if query:
body['query'] = query
elif filter:
body['filter'] = filter
res = client.query(query_type="domain_search", params=params, body=body)
records = res.get('records')
record_count = res.get('record_count')
md = tableToMarkdown(f"Domain DSL Search Results ({record_count} record(s)):", records)
command_results = CommandResults(
outputs_prefix="SecurityTrails.Domain.Search",
outputs_key_field="hostname",
outputs=records,
readable_output=md
)
return_results(command_results)
def domain_statistics_command(client, args):
query = args.get('query', None)
filter = args.get('filter', None)
if not query and not filter:
return_error("You must provide at least a query or a filter")
body = dict()
if query:
body['query'] = query
elif filter:
body['filter'] = filter
res = client.query(query_type="domain_stats", body=body)
res = {k: v for k, v in res.items() if k not in removed_keys}
top_orgs = res.get('top_organizations', [])
tld_count = res.get('tld_count', 0)
hostname_count = res.get('hostname_count', {})
domain_count = res.get('domain_count', 0)
table_data = {
"Top Organizations Count": len(top_orgs),
"TLD Count": tld_count,
"Hostname Count": hostname_count,
"Domain Count": domain_count
}
md = tableToMarkdown("Domain Statistics:", table_data)
command_results = CommandResults(
outputs_prefix="SecurityTrails.Domain.Search.DomainStats",
outputs_key_field="hostname",
outputs=res,
readable_output=md
)
return_results(command_results)
def associated_domains_command(client, args):
hostname = args.get('hostname')
page = args.get('page', 1)
res = client.associated_domains(hostname=hostname, page=page)
records = res.get('records', [])
record_count = res.get('record_count', 0)
table_data = {
"Count": record_count,
"Domains": ", ".join([x.get('hostname') for x in records]),
"Current Page": page,
"Total Pages": res.get('meta', {}).get('total_pages', 1)
}
md = tableToMarkdown(f"{hostname} Associated Domains:", table_data,
['Count', 'Current Page', 'Total Pages', 'Domains'])
output_data = {
"name": hostname,
"associated_domains": records,
"associated_domain_count": record_count
}
command_results = CommandResults(
outputs_prefix="SecurityTrails.Domain",
outputs_key_field="name",
outputs=output_data,
readable_output=md
)
return_results(command_results)
def get_ssl_certificates(client, args):
hostname = args.get('hostname')
include_subdomains = True if args.get('include_subdomains', 'false') == 'true' else False
status = args.get('status', 'valid')
page = None
params = {
"include_subdomains": include_subdomains,
"status": status
}
if "page" in args:
page = int(args.get('page', 1))
params['page'] = page
query_type = "paged" if page else "stream"
res = client.get_ssl_certificates(query_type=query_type, hostname=hostname, params=params)
records = res.get('records', [])
# record_count = res.get('record_count', 0)
table_data = [{
"Subject Key ID": x.get('subject_key_id'),
"Subject Common Name": x.get('subject', {}).get('common_name'),
"Subject Alternative Names": ", ".join([y for y in x.get('subject', {}).get('alt_names', [])]),
"Serial Number": x.get('serial_number'),
"Public Key Type": x.get('public_key', {}).get('key_type'),
"Public Key": x.get('public_key', {}).get('key'),
"Public Key Bit Length": x.get('public_key', {}).get('bit_length'),
"Precert": x.get('precert'),
"Not Before": x.get('not_before'),
"Not After": x.get('not_after'),
"Issuer Organization": ",".join(x.get('issuer', {}).get('organization')),
"Issuer Country": ",".join(x.get('issuer', {}).get('country')),
"Issuer Common Name": x.get('issuer', {}).get('common_name'),
"ID": x.get('id'),
"Fingerprints": x.get('fingerprints'),
"DNS Names": ",".join(x.get('dns_names'))
} for x in records]
md = tableToMarkdown(f"SSL Certificates for {hostname}", table_data, [
"ID",
"Subject Key ID",
"Subject Common Name",
"Subject Alternative Names",
"Serial Number",
"Public Key Type",
"Public Key",
"Public Key Bit Length",
"Precert",
"Not Before",
"Not After",
"Issuer Organization",
"Issuer Country",
"Issuer Common Name",
"Fingerprints",
"DNS Names"
])
output_data = {
"name": hostname,
"ssl_certiticates": records
}
command_results = CommandResults(
outputs_prefix="SecurityTrails.Domain",
outputs_key_field="name",
outputs=output_data,
readable_output=md
)
return_results(command_results)
#################################
# History endpoints
#################################
def get_dns_history_command(client, args):
hostname = args.get('hostname')
record_type = args.get('type')
page = int(args.get('page', 1))
res = client.get_dns_history(hostname=hostname, record_type=record_type, page=page)
res = {k: v for k, v in res.items() if k not in removed_keys}
records_list = list()
if record_type == "a":
pull_field = "ip"
elif record_type == "aaaa":
pull_field = "ipv6"
elif record_type == "mx":
pull_field = "host"
elif record_type == "ns":
pull_field = "nameserver"
elif record_type == "soa":
pull_field = "email"
elif record_type == "txt":
pull_field = "value"
records = res.get('records', {})
for record in records:
for value in record.get('values'):
if pull_field in value:
records_list.append(
{
"Record Type": record_type,
"Value(s)": value.get(pull_field)
}
)
readable_output = tableToMarkdown(f"DNS history for {hostname}:", records_list)
command_results = CommandResults(
outputs_prefix="SecurityTrails.Domain",
outputs_key_field="name",
outputs={
"name": hostname,
f"{record_type}_history_records": res.get('records'),
f"{record_type}_history_record_pages": res.get('pages', 1)
},
readable_output=readable_output
)
return_results(command_results)
latest_record = res.get('records', [])[0]
values = latest_record.get('values', [])
values = [values] if type(values) == dict else values
# hosts = [x['host'] for x in values if "host" in x]
ipv4 = [x['ip'] for x in values if "ip" in x]
ipv6 = [x['ip'] for x in values if "ipv6" in x]
nameservers = [x['nameserver'] for x in values if "nameserver" in x]
domain_data = {
"Name": hostname
}
if nameservers:
domain_data['NameServers'] = ", ".join(nameservers)
create_standard_domain_context(domain_data=domain_data)
if ipv4:
[create_standard_ip_context({"Address": x}) for x in ipv4]
if ipv6:
[create_standard_ip_context({"Address": x}) for x in ipv6]
def get_whois_history_command(client, args):
hostname = args.get('hostname')
page = int(args.get('page', 1))
res = client.get_whois_history(hostname=hostname, page=page)
readable_output = tableToMarkdown(f"WHOIS history for {hostname}:", res.get('items'))
command_results = CommandResults(
outputs_prefix="SecurityTrails.Domain",
outputs_key_field="name",
outputs={
"name": hostname,
"WHOIS_history": res.get('items', []),
"WHOIS_history_count": res.get('count', 0)
},
readable_output=readable_output
)
return_results(command_results)
domain_data = {
"Name": hostname
}
contacts = res.get('items', [])[0].get('contact') if res.get('items', None) else []
admin_contact = [x for x in contacts if x.get('type', None) == "administrativeContact"]
admin_contact = admin_contact[0] if admin_contact else None
registrant_contact = [x for x in contacts if x.get('type', None) == "registrant"]
registrant_contact = registrant_contact[0] if registrant_contact else None
registrar_contact = admin_contact if admin_contact else None
whois_objects = list()
for x in res.get('items', []):
whois_object = {
"DomainStatus": ", ".join(x.get('status', [])),
"NameServers": ", ".join(x.get('nameServers', [])),
"CreationDate": datetime.fromtimestamp((x.get('createdDate') / 1000)).strftime(
"%Y-%m-%dT%H:%M:%SZ") if x.get('createdDate', None) else None,
"UpdatedDate": datetime.fromtimestamp((x.get('updatedDate') / 1000)).strftime(
"%Y-%m-%dT%H:%M:%SZ") if x.get('updatedDate', None) else None,
"ExpirationDate": datetime.fromtimestamp((x.get('expiresDate') / 1000)).strftime(
"%Y-%m-%dT%H:%M:%SZ") if x.get('expiresDate', None) else None
}
if admin_contact:
whois_object['Admin'] = { # type: ignore
"Name": admin_contact.get('name'),
"Email": admin_contact.get('email'),
"Phone": admin_contact.get('telephone')
}
if registrant_contact:
whois_object['Registrant'] = { # type: ignore
"Name": registrant_contact.get('name'),
"Email": registrant_contact.get('email'),
"Phone": registrant_contact.get('telephone')
}
if registrar_contact:
whois_object['Registrar'] = { # type: ignore
"Name": registrar_contact.get('name'),
"Email": registrar_contact.get('email'),
"Phone": registrar_contact.get('telephone')
}
whois_objects.append(whois_object)
if len(whois_objects) > 0:
domain_data['WHOIS/History'] = whois_objects
create_standard_domain_context(domain_data=domain_data)
#################################
# IPs endpoints
#################################
def get_ip_neighbors_command(client, args):
ipaddress = args.get('ipaddress')
res = client.get_ip_neighbors(ipaddress=ipaddress)
readable_output = tableToMarkdown(
f"IP neighbors for {ipaddress}:",
[{
"IP": x.get('ip', ''),
"Hostnames": x.get('hostnames', None),
"Sites": x.get('sites', 0),
"Ports": x.get('ports', None),
"Active Egress": x.get('active_egress')
} for x in res],
["IP", "Hostnames", "Sites", "Ports", "Active Egress"])
command_results = CommandResults(
outputs_prefix="SecurityTrails.IP",
outputs_key_field="ip",
outputs={
"ip": ipaddress,
"blocks": res
},
readable_output=readable_output
)
return_results(command_results)
create_standard_ip_context(
ip_data=[{
"Address": x.get('ip').split("/")[0]
} for x in res])
def ip_search_command(client, args):
page = arg_to_number(args.get('page', 1))
query = args.get('query', None)
params = {
"page": page
}
body = {
"query": query
}
res = client.query(query_type="ip_search", params=params, body=body)
records = res.get('records')
record_count = res.get('record_count')
md = tableToMarkdown(f"IP DSL Search Results ({record_count} record(s)):", records)
command_results = CommandResults(
outputs_prefix="SecurityTrails.IP.Search",
outputs_key_field="ip",
outputs=records,
readable_output=md
)
return_results(command_results)
create_standard_ip_context(
ip_data=[{
"Address": x.get('ip'),
"Hostname": x.get('ptr'),
"Ports": ", ".join([str(y['port']) for y in x.get('ports')])
} for x in records])
def ip_statistics_command(client, args):
query = args.get('query')
body = {
"query": query
}
res = client.query(query_type="ip_stats", body=body)
res = {k: v for k, v in res.items() if k not in removed_keys}
top_ptrs = res.get('top_ptr_patterns', [])
ports = res.get('ports', [])
total = res.get('total', {}).get('value')
table_data = {
"Top PTRs Count": len(top_ptrs),
"Ports": len(ports),
"Total": total
}
md = tableToMarkdown("IP Statistics:", table_data)
command_results = CommandResults(
outputs_prefix="SecurityTrails.IP.Search.IPStats",
outputs=res,
readable_output=md
)
return_results(command_results)
def get_useragents_command(client, args):
ip_address = args.get('ipaddress')
page = arg_to_number(args.get('page', 1))
params = {
"page": page
}
res = client.get_useragents(ip_address=ip_address, params=params)
records = res.get('records', [])
record_count = res.get('record_count', 0)
table_data = [{
"User Agent": x.get('user_agent'),
"OS Name": x.get('os', {}).get('name'),
"OS Platform": x.get('os', {}).get('platform'),
"OS Version": x.get('os', {}).get('version'),
"Browser Family": x.get('browser_family'),
"Last Seen": x.get('lastseen'),
"Device Type": x.get('device', {}).get('type'),
"Device Brand": x.get('device', {}).get('brand'),
"Device Model": x.get('device', {}).get('model'),
"Client Type": x.get('client', {}).get('type'),
"Client Name": x.get('client', {}).get('name'),
"Client Version": x.get('client', {}).get('version'),
"Client Engine": x.get('client', {}).get('engine'),
"Client Engine Verison": x.get('client', {}).get('engine_version'),
} for x in records]
md = tableToMarkdown(f"User Agents for {ip_address}:", table_data, [
'User Agent',
'OS Name',
'OS Platform',
'OS Version',
'Browser Family',
'Last Seen',
'Device Type',
'Device Brand',
'Device Model',
'Client Type',
'Client Name',
'Client Version',
'Client Engine',
'Client Engine Verison'
])
output_data = {
"ip": ip_address,
"useragents": records,
"useragent_records_count": record_count
}
command_results = CommandResults(
outputs_prefix="SecurityTrails.IP",
outputs_key_field="ip",
outputs=output_data,
readable_output=md
)
return_results(command_results)
#################################
# Query endpoints
#################################
def query_sql_command(client, args):
sql = args.get('sql')
timeout = int(args.get('timeout', '20'))
query = {
"query": sql
}
res = client.sql(sql=query, timeout=timeout)
total = res.get('total', {}).get('value')
if total:
pages = total // 100
output = {
"total": res.get('total', {}).get('value'),
"pages": pages,
"records": res.get('records'),
"id": res.get('id'),
"query": res.get('query')
}
readable_output = tableToMarkdown("SQL Query Results:", output)
command_results = CommandResults(
outputs_prefix='SecurityTrails.SQL',
outputs_key_field=['query', 'id'],
outputs=output,
readable_output=readable_output
)
return_results(command_results)
def query_sql_get_next_command(client, args):
next_id = str(args.get('id'))
timeout = int(args.get('timeout', '20'))
res = client.sql_next(next_id=next_id, timeout=timeout)
output = {
"total": res.get('total', {}).get('value'),
"records": res.get('records'),
"id": res.get('id'),
"query": res.get('query')
}
readable_output = tableToMarkdown("SQL Query Results:", output)
command_results = CommandResults(
outputs_prefix='SecurityTrails.SQL',
outputs_key_field=['query', 'id'],
outputs=output,
readable_output=readable_output
)
return_results(command_results)
def test_module(client):
results = client._http_request('GET', 'ping', ok_codes=(200, 403))
if "success" in results:
return "ok"
else:
return results.get('message')
''' MAIN FUNCTION '''
def main() -> None:
params = demisto.params()
args = demisto.args()
api_key = params.get('apikey')
timeout = int(params.get('timeout', '10'))
verify_certificate = not params.get('insecure', False)
proxy = params.get('proxy', False)
base_url = "https://api.securitytrails.com/v1/"
commands = {
'securitytrails-sql-query': query_sql_command,
'securitytrails-sql-get-next': query_sql_get_next_command,
'securitytrails-get-subdomains': domains_subdomains_command,
'securitytrails-get-domain-details': domain_details_command,
'securitytrails-get-tags': get_domain_tags_command,
'securitytrails-get-company-details': get_company_details_command,
'securitytrails-get-company-associated-ips': get_company_associated_ips_command,
'securitytrails-get-domain-whois': get_whois_command,
'securitytrails-get-dns-history': get_dns_history_command,
'securitytrails-get-whois-history': get_whois_history_command,
'securitytrails-get-ip-neighbors': get_ip_neighbors_command,
'securitytrails-search-domain': domain_search_command,
'securitytrails-statistics-domain': domain_statistics_command,
'securitytrails-get-associated-domains': associated_domains_command,
# These 2 commands have issues with the response object - error when trying to parse to JSON
# 'securitytrails-get-ssl-certitficates': get_ssl_certificates,
# 'securitytrails-get-ssl-certitficates-stream': get_ssl_certificates,
'securitytrails-search-ip': ip_search_command,
'securitytrails-statistics-ip': ip_statistics_command,
'securitytrails-get-ip-whois': get_whois_command,
'securitytrails-get-useragents': get_useragents_command,
'domain': domain_command
}
command = demisto.command()
demisto.debug(f'Command being called is {command}')
try:
headers = {
"Content-Type": "application/json",
"Accept": "application/json",
'APIKEY': api_key
}
client = Client(
base_url=base_url,
verify=verify_certificate,
headers=headers,
proxy=proxy,
timeout=timeout)
if demisto.command() == 'test-module':
# This is the call made when pressing the integration Test button.
result = test_module(client)
return_results(result)
elif command in commands:
commands[command](client, args)
# Log exceptions and return errors
except Exception as e:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
|
|
"""Gridworld Domain."""
from rlpy.Tools import plt, FONTSIZE, linearMap
import numpy as np
from .Domain import Domain
from rlpy.Tools import __rlpy_location__, findElemArray1D, perms
import os
__copyright__ = "Copyright 2013, RLPy http://acl.mit.edu/RLPy"
__credits__ = ["Alborz Geramifard", "Robert H. Klein", "Christoph Dann",
"William Dabney", "Jonathan P. How"]
__license__ = "BSD 3-Clause"
__author__ = "Alborz Geramifard"
class GridWorld(Domain):
"""
The GridWorld domain simulates a path-planning problem for a mobile robot
in an environment with obstacles. The goal of the agent is to
navigate from the starting point to the goal state.
The map is loaded from a text file filled with numbers showing the map with the following
coding for each cell:
* 0: empty
* 1: blocked
* 2: start
* 3: goal
* 4: pit
**STATE:**
The Row and Column corresponding to the agent's location. \n
**ACTIONS:**
Four cardinal directions: up, down, left, right (given that
the destination is not blocked or out of range). \n
**TRANSITION:**
There is 30% probability of failure for each move, in which case the action
is replaced with a random action at each timestep. Otherwise the move succeeds
and the agent moves in the intended direction. \n
**REWARD:**
The reward on each step is -.001 , except for actions
that bring the agent to the goal with reward of +1.\n
"""
map = start_state = goal = None
# Used for graphics to show the domain
agent_fig = upArrows_fig = downArrows_fig = leftArrows_fig = None
rightArrows_fig = domain_fig = valueFunction_fig = None
#: Number of rows and columns of the map
ROWS = COLS = 0
#: Reward constants
GOAL_REWARD = +1
PIT_REWARD = -1
STEP_REWARD = -.001
#: Set by the domain = min(100,rows*cols)
episodeCap = None
#: Movement Noise
NOISE = 0
# Used for graphical normalization
MAX_RETURN = 1
RMAX = MAX_RETURN
# Used for graphical normalization
MIN_RETURN = -1
# Used for graphical shifting of arrows
SHIFT = .1
actions_num = 4
# Constants in the map
EMPTY, BLOCKED, START, GOAL, PIT, AGENT = range(6)
#: Up, Down, Left, Right
ACTIONS = np.array([[-1, 0], [+1, 0], [0, -1], [0, +1]])
# directory of maps shipped with rlpy
default_map_dir = os.path.join(
__rlpy_location__,
"Domains",
"GridWorldMaps")
def __init__(self, mapname=os.path.join(default_map_dir, "4x5.txt"),
noise=.1, episodeCap=None, step_reward = -0.001):
self.map = np.loadtxt(mapname, dtype=np.uint8)
if self.map.ndim == 1:
self.map = self.map[np.newaxis, :]
self.start_state = np.argwhere(self.map == self.START)[0]
self.ROWS, self.COLS = np.shape(self.map)
self.statespace_limits = np.array(
[[0, self.ROWS - 1], [0, self.COLS - 1]])
self.NOISE = noise
self.STEP_REWARD = step_reward
print self.STEP_REWARD
self.DimNames = ['Row', 'Col']
# 2*self.ROWS*self.COLS, small values can cause problem for some
# planning techniques
self.episodeCap = 1000
super(GridWorld, self).__init__()
def showDomain(self, a=0, s=None):
if s is None:
s = self.state
# Draw the environment
if self.domain_fig is None:
self.agent_fig = plt.figure("Domain")
self.domain_fig = plt.imshow(
self.map,
cmap='GridWorld',
interpolation='nearest',
vmin=0,
vmax=5)
plt.xticks(np.arange(self.COLS), fontsize=FONTSIZE)
plt.yticks(np.arange(self.ROWS), fontsize=FONTSIZE)
# pl.tight_layout()
self.agent_fig = plt.gca(
).plot(s[1],
s[0],
'kd',
markersize=20.0 - self.COLS)
plt.show()
self.agent_fig.pop(0).remove()
self.agent_fig = plt.figure("Domain")
#mapcopy = copy(self.map)
#mapcopy[s[0],s[1]] = self.AGENT
# self.domain_fig.set_data(mapcopy)
# Instead of '>' you can use 'D', 'o'
self.agent_fig = plt.gca(
).plot(s[1],
s[0],
'k>',
markersize=20.0 - self.COLS)
plt.draw()
def showLearning(self, representation):
if self.valueFunction_fig is None:
plt.figure("Value Function")
self.valueFunction_fig = plt.imshow(
self.map,
cmap='ValueFunction',
interpolation='nearest',
vmin=self.MIN_RETURN,
vmax=self.MAX_RETURN)
plt.xticks(np.arange(self.COLS), fontsize=12)
plt.yticks(np.arange(self.ROWS), fontsize=12)
# Create quivers for each action. 4 in total
X = np.arange(self.ROWS) - self.SHIFT
Y = np.arange(self.COLS)
X, Y = np.meshgrid(X, Y)
DX = DY = np.ones(X.shape)
C = np.zeros(X.shape)
C[0, 0] = 1 # Making sure C has both 0 and 1
# length of arrow/width of bax. Less then 0.5 because each arrow is
# offset, 0.4 looks nice but could be better/auto generated
arrow_ratio = 0.4
Max_Ratio_ArrowHead_to_ArrowLength = 0.25
ARROW_WIDTH = 0.5 * Max_Ratio_ArrowHead_to_ArrowLength / 5.0
self.upArrows_fig = plt.quiver(
Y,
X,
DY,
DX,
C,
units='y',
cmap='Actions',
scale_units="height",
scale=self.ROWS /
arrow_ratio,
width=-
1 *
ARROW_WIDTH)
self.upArrows_fig.set_clim(vmin=0, vmax=1)
X = np.arange(self.ROWS) + self.SHIFT
Y = np.arange(self.COLS)
X, Y = np.meshgrid(X, Y)
self.downArrows_fig = plt.quiver(
Y,
X,
DY,
DX,
C,
units='y',
cmap='Actions',
scale_units="height",
scale=self.ROWS /
arrow_ratio,
width=-
1 *
ARROW_WIDTH)
self.downArrows_fig.set_clim(vmin=0, vmax=1)
X = np.arange(self.ROWS)
Y = np.arange(self.COLS) - self.SHIFT
X, Y = np.meshgrid(X, Y)
self.leftArrows_fig = plt.quiver(
Y,
X,
DY,
DX,
C,
units='x',
cmap='Actions',
scale_units="width",
scale=self.COLS /
arrow_ratio,
width=ARROW_WIDTH)
self.leftArrows_fig.set_clim(vmin=0, vmax=1)
X = np.arange(self.ROWS)
Y = np.arange(self.COLS) + self.SHIFT
X, Y = np.meshgrid(X, Y)
self.rightArrows_fig = plt.quiver(
Y,
X,
DY,
DX,
C,
units='x',
cmap='Actions',
scale_units="width",
scale=self.COLS /
arrow_ratio,
width=ARROW_WIDTH)
self.rightArrows_fig.set_clim(vmin=0, vmax=1)
plt.show()
plt.figure("Value Function")
V = np.zeros((self.ROWS, self.COLS))
# Boolean 3 dimensional array. The third array highlights the action.
# Thie mask is used to see in which cells what actions should exist
Mask = np.ones(
(self.COLS,
self.ROWS,
self.actions_num),
dtype='bool')
arrowSize = np.zeros(
(self.COLS,
self.ROWS,
self.actions_num),
dtype='float')
# 0 = suboptimal action, 1 = optimal action
arrowColors = np.zeros(
(self.COLS,
self.ROWS,
self.actions_num),
dtype='uint8')
for r in xrange(self.ROWS):
for c in xrange(self.COLS):
if self.map[r, c] == self.BLOCKED:
V[r, c] = 0
if self.map[r, c] == self.GOAL:
V[r, c] = self.MAX_RETURN
if self.map[r, c] == self.PIT:
V[r, c] = self.MIN_RETURN
if self.map[r, c] == self.EMPTY or self.map[r, c] == self.START:
s = np.array([r, c])
As = self.possibleActions(s)
terminal = self.isTerminal(s)
Qs = representation.Qs(s, terminal)
bestA = representation.bestActions(s, terminal, As)
V[r, c] = max(Qs[As])
Mask[c, r, As] = False
arrowColors[c, r, bestA] = 1
for i in xrange(len(As)):
a = As[i]
Q = Qs[i]
value = linearMap(
Q,
self.MIN_RETURN,
self.MAX_RETURN,
0,
1)
arrowSize[c, r, a] = value
# Show Value Function
self.valueFunction_fig.set_data(V)
### DEBUG ####
import pickle;
temp_path = "./Results/GradientCheck/"
if not os.path.exists(temp_path):
os.makedirs(temp_path)
with open(temp_path + "CTRL.p", "w") as f:
pickle.dump(V, f)
####
# Show Policy Up Arrows
DX = arrowSize[:, :, 0]
DY = np.zeros((self.ROWS, self.COLS))
DX = np.ma.masked_array(DX, mask=Mask[:, :, 0])
DY = np.ma.masked_array(DY, mask=Mask[:, :, 0])
C = np.ma.masked_array(arrowColors[:, :, 0], mask=Mask[:,:, 0])
self.upArrows_fig.set_UVC(DY, DX, C)
# Show Policy Down Arrows
DX = -arrowSize[:, :, 1]
DY = np.zeros((self.ROWS, self.COLS))
DX = np.ma.masked_array(DX, mask=Mask[:, :, 1])
DY = np.ma.masked_array(DY, mask=Mask[:, :, 1])
C = np.ma.masked_array(arrowColors[:, :, 1], mask=Mask[:,:, 1])
self.downArrows_fig.set_UVC(DY, DX, C)
# Show Policy Left Arrows
DX = np.zeros((self.ROWS, self.COLS))
DY = -arrowSize[:, :, 2]
DX = np.ma.masked_array(DX, mask=Mask[:, :, 2])
DY = np.ma.masked_array(DY, mask=Mask[:, :, 2])
C = np.ma.masked_array(arrowColors[:, :, 2], mask=Mask[:,:, 2])
self.leftArrows_fig.set_UVC(DY, DX, C)
# Show Policy Right Arrows
DX = np.zeros((self.ROWS, self.COLS))
DY = arrowSize[:, :, 3]
DX = np.ma.masked_array(DX, mask=Mask[:, :, 3])
DY = np.ma.masked_array(DY, mask=Mask[:, :, 3])
C = np.ma.masked_array(arrowColors[:, :, 3], mask=Mask[:,:, 3])
self.rightArrows_fig.set_UVC(DY, DX, C)
plt.draw()
def step(self, a):
r = self.STEP_REWARD
ns = self.state.copy()
if self.random_state.random_sample() < self.NOISE:
# Random Move
a = self.random_state.choice(self.possibleActions())
# Take action
ns = self.state + self.ACTIONS[a]
# Check bounds on state values
if (ns[0] < 0 or ns[0] == self.ROWS or
ns[1] < 0 or ns[1] == self.COLS or
self.map[ns[0], ns[1]] == self.BLOCKED):
ns = self.state.copy()
else:
# If in bounds, update the current state
self.state = ns.copy()
# Compute the reward
if self.map[ns[0], ns[1]] == self.GOAL:
r = self.GOAL_REWARD
if self.map[ns[0], ns[1]] == self.PIT:
r = self.PIT_REWARD
terminal = self.isTerminal()
return r, ns, terminal, self.possibleActions()
def s0(self):
self.state = self.start_state.copy()
return self.state, self.isTerminal(), self.possibleActions()
def isTerminal(self, s=None):
if s is None:
s = self.state
if self.map[s[0], s[1]] == self.GOAL:
return True
if self.map[s[0], s[1]] == self.PIT:
return True
return False
def showTrajectory(self, trajectory):
'''
:param trajectory: sequence of states to show
'''
import ipdb; ipdb.set_trace()
for runs, trial in sorted(trajectory.items(), key=lambda pair: pair[0]):
for state in trial:
self.showDomain(s=state)
def possibleActions(self, s=None):
if s is None:
s = self.state
possibleA = np.array([], np.uint8)
for a in xrange(self.actions_num):
ns = s + self.ACTIONS[a]
if (
ns[0] < 0 or ns[0] == self.ROWS or
ns[1] < 0 or ns[1] == self.COLS or
self.map[int(ns[0]), int(ns[1])] == self.BLOCKED):
continue
possibleA = np.append(possibleA, [a])
return possibleA
def expectedStep(self, s, a):
# Returns k possible outcomes
# p: k-by-1 probability of each transition
# r: k-by-1 rewards
# ns: k-by-|s| next state
# t: k-by-1 terminal values
# pa: k-by-?? possible actions for each next state
actions = self.possibleActions(s)
k = len(actions)
# Make Probabilities
intended_action_index = findElemArray1D(a, actions)
p = np.ones((k, 1)) * self.NOISE / (k * 1.)
p[intended_action_index, 0] += 1 - self.NOISE
# Make next states
ns = np.tile(s, (k, 1)).astype(int)
actions = self.ACTIONS[actions]
ns += actions
# Make next possible actions
pa = np.array([self.possibleActions(sn) for sn in ns])
# Make rewards
r = np.ones((k, 1)) * self.STEP_REWARD
goal = self.map[ns[:, 0], ns[:, 1]] == self.GOAL
pit = self.map[ns[:, 0], ns[:, 1]] == self.PIT
r[goal] = self.GOAL_REWARD
r[pit] = self.PIT_REWARD
# Make terminals
t = np.zeros((k, 1), bool)
t[goal] = True
t[pit] = True
return p, r, ns, t, pa
def allStates(self):
if self.continuous_dims == []:
# Recall that discrete dimensions are assumed to be integer
return (
perms(
self.discrete_statespace_limits[:,
1] - self.discrete_statespace_limits[:,
0] + 1) + self.discrete_statespace_limits[
:,
0]
)
|
|
import os
import types
import sys
import codecs
import tempfile
import tkinter.filedialog as tkFileDialog
import tkinter.messagebox as tkMessageBox
import re
from tkinter import *
from tkinter.simpledialog import askstring
from idlelib.configHandler import idleConf
from codecs import BOM_UTF8
# Try setting the locale, so that we can find out
# what encoding to use
try:
import locale
locale.setlocale(locale.LC_CTYPE, "")
except (ImportError, locale.Error):
pass
# Encoding for file names
filesystemencoding = sys.getfilesystemencoding() ### currently unused
locale_encoding = 'ascii'
if sys.platform == 'win32':
# On Windows, we could use "mbcs". However, to give the user
# a portable encoding name, we need to find the code page
try:
locale_encoding = locale.getdefaultlocale()[1]
codecs.lookup(locale_encoding)
except LookupError:
pass
else:
try:
# Different things can fail here: the locale module may not be
# loaded, it may not offer nl_langinfo, or CODESET, or the
# resulting codeset may be unknown to Python. We ignore all
# these problems, falling back to ASCII
locale_encoding = locale.nl_langinfo(locale.CODESET)
if locale_encoding is None or locale_encoding is '':
# situation occurs on Mac OS X
locale_encoding = 'ascii'
codecs.lookup(locale_encoding)
except (NameError, AttributeError, LookupError):
# Try getdefaultlocale: it parses environment variables,
# which may give a clue. Unfortunately, getdefaultlocale has
# bugs that can cause ValueError.
try:
locale_encoding = locale.getdefaultlocale()[1]
if locale_encoding is None or locale_encoding is '':
# situation occurs on Mac OS X
locale_encoding = 'ascii'
codecs.lookup(locale_encoding)
except (ValueError, LookupError):
pass
locale_encoding = locale_encoding.lower()
encoding = locale_encoding ### KBK 07Sep07 This is used all over IDLE, check!
### 'encoding' is used below in encode(), check!
coding_re = re.compile("coding[:=]\s*([-\w_.]+)")
def coding_spec(data):
"""Return the encoding declaration according to PEP 263.
When checking encoded data, only the first two lines should be passed
in to avoid a UnicodeDecodeError if the rest of the data is not unicode.
The first two lines would contain the encoding specification.
Raise a LookupError if the encoding is declared but unknown.
"""
if isinstance(data, bytes):
# This encoding might be wrong. However, the coding
# spec must be ASCII-only, so any non-ASCII characters
# around here will be ignored. Decoding to Latin-1 should
# never fail (except for memory outage)
lines = data.decode('iso-8859-1')
else:
lines = data
# consider only the first two lines
if '\n' in lines:
lst = lines.split('\n')[:2]
elif '\r' in lines:
lst = lines.split('\r')[:2]
else:
lst = list(lines)
str = '\n'.join(lst)
match = coding_re.search(str)
if not match:
return None
name = match.group(1)
try:
codecs.lookup(name)
except LookupError:
# The standard encoding error does not indicate the encoding
raise LookupError("Unknown encoding: "+name)
return name
class IOBinding:
def __init__(self, editwin):
self.editwin = editwin
self.text = editwin.text
self.__id_open = self.text.bind("<<open-window-from-file>>", self.open)
self.__id_save = self.text.bind("<<save-window>>", self.save)
self.__id_saveas = self.text.bind("<<save-window-as-file>>",
self.save_as)
self.__id_savecopy = self.text.bind("<<save-copy-of-window-as-file>>",
self.save_a_copy)
self.fileencoding = None
self.__id_print = self.text.bind("<<print-window>>", self.print_window)
def close(self):
# Undo command bindings
self.text.unbind("<<open-window-from-file>>", self.__id_open)
self.text.unbind("<<save-window>>", self.__id_save)
self.text.unbind("<<save-window-as-file>>",self.__id_saveas)
self.text.unbind("<<save-copy-of-window-as-file>>", self.__id_savecopy)
self.text.unbind("<<print-window>>", self.__id_print)
# Break cycles
self.editwin = None
self.text = None
self.filename_change_hook = None
def get_saved(self):
return self.editwin.get_saved()
def set_saved(self, flag):
self.editwin.set_saved(flag)
def reset_undo(self):
self.editwin.reset_undo()
filename_change_hook = None
def set_filename_change_hook(self, hook):
self.filename_change_hook = hook
filename = None
dirname = None
def set_filename(self, filename):
if filename and os.path.isdir(filename):
self.filename = None
self.dirname = filename
else:
self.filename = filename
self.dirname = None
self.set_saved(1)
if self.filename_change_hook:
self.filename_change_hook()
def open(self, event=None, editFile=None):
if self.editwin.flist:
if not editFile:
filename = self.askopenfile()
else:
filename=editFile
if filename:
# If the current window has no filename and hasn't been
# modified, we replace its contents (no loss). Otherwise
# we open a new window. But we won't replace the
# shell window (which has an interp(reter) attribute), which
# gets set to "not modified" at every new prompt.
try:
interp = self.editwin.interp
except AttributeError:
interp = None
if not self.filename and self.get_saved() and not interp:
self.editwin.flist.open(filename, self.loadfile)
else:
self.editwin.flist.open(filename)
else:
self.text.focus_set()
return "break"
#
# Code for use outside IDLE:
if self.get_saved():
reply = self.maybesave()
if reply == "cancel":
self.text.focus_set()
return "break"
if not editFile:
filename = self.askopenfile()
else:
filename=editFile
if filename:
self.loadfile(filename)
else:
self.text.focus_set()
return "break"
eol = r"(\r\n)|\n|\r" # \r\n (Windows), \n (UNIX), or \r (Mac)
eol_re = re.compile(eol)
eol_convention = os.linesep # default
def loadfile(self, filename):
try:
# open the file in binary mode so that we can handle
# end-of-line convention ourselves.
f = open(filename,'rb')
two_lines = f.readline() + f.readline()
f.seek(0)
bytes = f.read()
f.close()
except IOError as msg:
tkMessageBox.showerror("I/O Error", str(msg), master=self.text)
return False
chars, converted = self._decode(two_lines, bytes)
if chars is None:
tkMessageBox.showerror("Decoding Error",
"File %s\nFailed to Decode" % filename,
parent=self.text)
return False
# We now convert all end-of-lines to '\n's
firsteol = self.eol_re.search(chars)
if firsteol:
self.eol_convention = firsteol.group(0)
chars = self.eol_re.sub(r"\n", chars)
self.text.delete("1.0", "end")
self.set_filename(None)
self.text.insert("1.0", chars)
self.reset_undo()
self.set_filename(filename)
if converted:
# We need to save the conversion results first
# before being able to execute the code
self.set_saved(False)
self.text.mark_set("insert", "1.0")
self.text.see("insert")
self.updaterecentfileslist(filename)
return True
def _decode(self, two_lines, bytes):
"Create a Unicode string."
chars = None
# Check presence of a UTF-8 signature first
if bytes.startswith(BOM_UTF8):
try:
chars = bytes[3:].decode("utf-8")
except UnicodeDecodeError:
# has UTF-8 signature, but fails to decode...
return None, False
else:
# Indicates that this file originally had a BOM
self.fileencoding = 'BOM'
return chars, False
# Next look for coding specification
try:
enc = coding_spec(two_lines)
except LookupError as name:
tkMessageBox.showerror(
title="Error loading the file",
message="The encoding '%s' is not known to this Python "\
"installation. The file may not display correctly" % name,
master = self.text)
enc = None
except UnicodeDecodeError:
return None, False
if enc:
try:
chars = str(bytes, enc)
self.fileencoding = enc
return chars, False
except UnicodeDecodeError:
pass
# Try ascii:
try:
chars = str(bytes, 'ascii')
self.fileencoding = None
return chars, False
except UnicodeDecodeError:
pass
# Try utf-8:
try:
chars = str(bytes, 'utf-8')
self.fileencoding = 'utf-8'
return chars, False
except UnicodeDecodeError:
pass
# Finally, try the locale's encoding. This is deprecated;
# the user should declare a non-ASCII encoding
try:
# Wait for the editor window to appear
self.editwin.text.update()
enc = askstring(
"Specify file encoding",
"The file's encoding is invalid for Python 3.x.\n"
"IDLE will convert it to UTF-8.\n"
"What is the current encoding of the file?",
initialvalue = locale_encoding,
parent = self.editwin.text)
if enc:
chars = str(bytes, enc)
self.fileencoding = None
return chars, True
except (UnicodeDecodeError, LookupError):
pass
return None, False # None on failure
def maybesave(self):
if self.get_saved():
return "yes"
message = "Do you want to save %s before closing?" % (
self.filename or "this untitled document")
m = tkMessageBox.Message(
title="Save On Close",
message=message,
icon=tkMessageBox.QUESTION,
type=tkMessageBox.YESNOCANCEL,
master=self.text)
reply = m.show()
if reply == "yes":
self.save(None)
if not self.get_saved():
reply = "cancel"
self.text.focus_set()
return reply
def save(self, event):
if not self.filename:
self.save_as(event)
else:
if self.writefile(self.filename):
self.set_saved(1)
try:
self.editwin.store_file_breaks()
except AttributeError: # may be a PyShell
pass
self.text.focus_set()
return "break"
def save_as(self, event):
filename = self.asksavefile()
if filename:
if self.writefile(filename):
self.set_filename(filename)
self.set_saved(1)
try:
self.editwin.store_file_breaks()
except AttributeError:
pass
self.text.focus_set()
self.updaterecentfileslist(filename)
return "break"
def save_a_copy(self, event):
filename = self.asksavefile()
if filename:
self.writefile(filename)
self.text.focus_set()
self.updaterecentfileslist(filename)
return "break"
def writefile(self, filename):
self.fixlastline()
text = self.text.get("1.0", "end-1c")
if self.eol_convention != "\n":
text = text.replace("\n", self.eol_convention)
chars = self.encode(text)
try:
f = open(filename, "wb")
f.write(chars)
f.flush()
f.close()
return True
except IOError as msg:
tkMessageBox.showerror("I/O Error", str(msg),
master=self.text)
return False
def encode(self, chars):
if isinstance(chars, bytes):
# This is either plain ASCII, or Tk was returning mixed-encoding
# text to us. Don't try to guess further.
return chars
# Preserve a BOM that might have been present on opening
if self.fileencoding == 'BOM':
return BOM_UTF8 + chars.encode("utf-8")
# See whether there is anything non-ASCII in it.
# If not, no need to figure out the encoding.
try:
return chars.encode('ascii')
except UnicodeError:
pass
# Check if there is an encoding declared
try:
# a string, let coding_spec slice it to the first two lines
enc = coding_spec(chars)
failed = None
except LookupError as msg:
failed = msg
enc = None
else:
if not enc:
# PEP 3120: default source encoding is UTF-8
enc = 'utf-8'
if enc:
try:
return chars.encode(enc)
except UnicodeError:
failed = "Invalid encoding '%s'" % enc
tkMessageBox.showerror(
"I/O Error",
"%s.\nSaving as UTF-8" % failed,
master = self.text)
# Fallback: save as UTF-8, with BOM - ignoring the incorrect
# declared encoding
return BOM_UTF8 + chars.encode("utf-8")
def fixlastline(self):
c = self.text.get("end-2c")
if c != '\n':
self.text.insert("end-1c", "\n")
def print_window(self, event):
m = tkMessageBox.Message(
title="Print",
message="Print to Default Printer",
icon=tkMessageBox.QUESTION,
type=tkMessageBox.OKCANCEL,
default=tkMessageBox.OK,
master=self.text)
reply = m.show()
if reply != tkMessageBox.OK:
self.text.focus_set()
return "break"
tempfilename = None
saved = self.get_saved()
if saved:
filename = self.filename
# shell undo is reset after every prompt, looks saved, probably isn't
if not saved or filename is None:
(tfd, tempfilename) = tempfile.mkstemp(prefix='IDLE_tmp_')
filename = tempfilename
os.close(tfd)
if not self.writefile(tempfilename):
os.unlink(tempfilename)
return "break"
platform=os.name
printPlatform=1
if platform == 'posix': #posix platform
command = idleConf.GetOption('main','General',
'print-command-posix')
command = command + " 2>&1"
elif platform == 'nt': #win32 platform
command = idleConf.GetOption('main','General','print-command-win')
else: #no printing for this platform
printPlatform=0
if printPlatform: #we can try to print for this platform
command = command % filename
pipe = os.popen(command, "r")
# things can get ugly on NT if there is no printer available.
output = pipe.read().strip()
status = pipe.close()
if status:
output = "Printing failed (exit status 0x%x)\n" % \
status + output
if output:
output = "Printing command: %s\n" % repr(command) + output
tkMessageBox.showerror("Print status", output, master=self.text)
else: #no printing for this platform
message="Printing is not enabled for this platform: %s" % platform
tkMessageBox.showinfo("Print status", message, master=self.text)
if tempfilename:
os.unlink(tempfilename)
return "break"
opendialog = None
savedialog = None
filetypes = [
("Python files", "*.py *.pyw", "TEXT"),
("Text files", "*.txt", "TEXT"),
("All files", "*"),
]
def askopenfile(self):
dir, base = self.defaultfilename("open")
if not self.opendialog:
self.opendialog = tkFileDialog.Open(master=self.text,
filetypes=self.filetypes)
filename = self.opendialog.show(initialdir=dir, initialfile=base)
return filename
def defaultfilename(self, mode="open"):
if self.filename:
return os.path.split(self.filename)
elif self.dirname:
return self.dirname, ""
else:
try:
pwd = os.getcwd()
except os.error:
pwd = ""
return pwd, ""
def asksavefile(self):
dir, base = self.defaultfilename("save")
if not self.savedialog:
self.savedialog = tkFileDialog.SaveAs(master=self.text,
filetypes=self.filetypes)
filename = self.savedialog.show(initialdir=dir, initialfile=base)
return filename
def updaterecentfileslist(self,filename):
"Update recent file list on all editor windows"
if self.editwin.flist:
self.editwin.update_recent_files_list(filename)
def test():
root = Tk()
class MyEditWin:
def __init__(self, text):
self.text = text
self.flist = None
self.text.bind("<Control-o>", self.open)
self.text.bind("<Control-s>", self.save)
self.text.bind("<Alt-s>", self.save_as)
self.text.bind("<Alt-z>", self.save_a_copy)
def get_saved(self): return 0
def set_saved(self, flag): pass
def reset_undo(self): pass
def open(self, event):
self.text.event_generate("<<open-window-from-file>>")
def save(self, event):
self.text.event_generate("<<save-window>>")
def save_as(self, event):
self.text.event_generate("<<save-window-as-file>>")
def save_a_copy(self, event):
self.text.event_generate("<<save-copy-of-window-as-file>>")
text = Text(root)
text.pack()
text.focus_set()
editwin = MyEditWin(text)
io = IOBinding(editwin)
root.mainloop()
if __name__ == "__main__":
test()
|
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import config
from . import state
class prefix_limit(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/bgp/global/afi-safis/afi-safi/ipv6-labeled-unicast/prefix-limit. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configure the maximum number of prefixes that will be
accepted from a peer
"""
__slots__ = ("_path_helper", "_extmethods", "__config", "__state")
_yang_name = "prefix-limit"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"bgp",
"global",
"afi-safis",
"afi-safi",
"ipv6-labeled-unicast",
"prefix-limit",
]
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/config (container)
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """config must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__config = t
if hasattr(self, "_set"):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state (container)
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict([("config", config), ("state", state)])
from . import config
from . import state
class prefix_limit(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/bgp/global/afi-safis/afi-safi/ipv6-labeled-unicast/prefix-limit. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configure the maximum number of prefixes that will be
accepted from a peer
"""
__slots__ = ("_path_helper", "_extmethods", "__config", "__state")
_yang_name = "prefix-limit"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"bgp",
"global",
"afi-safis",
"afi-safi",
"ipv6-labeled-unicast",
"prefix-limit",
]
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/config (container)
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """config must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__config = t
if hasattr(self, "_set"):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state (container)
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict([("config", config), ("state", state)])
|
|
import json
try:
from http.client import HTTPConnection
except ImportError:
from httplib import HTTPConnection
try:
from urllib.parse import quote
except ImportError:
from urllib import quote
from testcases import TestServerTestCase
from .utils import skipIfSpatialite
golden_gate_park_query = quote("""{"type": "MultiPolygon", "coordinates": [[[[-122.511067, 37.771276], [-122.510037, 37.766391], [-122.510037, 37.763813], [-122.456822, 37.765848], [-122.452960, 37.766459], [-122.454848, 37.773990], [-122.475362, 37.773040], [-122.511067, 37.771276]]]]}""")
class HTTPTestCase(TestServerTestCase):
def setUp(self):
self.start_test_server(address='localhost', port=8001)
def tearDown(self):
self.stop_test_server()
def get_connection(self):
return HTTPConnection('localhost', 8001)
def test_get_apis_json(self):
connection = self.get_connection()
connection.request('GET', '/api/v1/', headers={'Accept': 'application/json'})
response = connection.getresponse()
connection.close()
data = json.loads(response.read().decode('utf-8'))
self.assertEqual(response.status, 200)
self.assertEqual(data, {"geonotes": {"list_endpoint": "/api/v1/geonotes/", "schema": "/api/v1/geonotes/schema/"}, "users": {"list_endpoint": "/api/v1/users/", "schema": "/api/v1/users/schema/"}})
def test_get_apis_xml(self):
connection = self.get_connection()
connection.request('GET', '/api/v1/', headers={'Accept': 'application/xml'})
response = connection.getresponse()
connection.close()
data = response.read().decode('utf-8')
self.assertEqual(response.status, 200)
self.assertEqual(data, '<?xml version=\'1.0\' encoding=\'utf-8\'?>\n<response><geonotes type="hash"><list_endpoint>/api/v1/geonotes/</list_endpoint><schema>/api/v1/geonotes/schema/</schema></geonotes><users type="hash"><list_endpoint>/api/v1/users/</list_endpoint><schema>/api/v1/users/schema/</schema></users></response>')
def test_get_list(self):
connection = self.get_connection()
connection.request('GET', '/api/v1/geonotes/', headers={'Accept': 'application/json'})
response = connection.getresponse()
connection.close()
data = json.loads(response.read().decode('utf-8'))
self.assertEqual(response.status, 200)
self.assertEqual(len(data['objects']), 3)
# Because floating point.
self.assertEqual(data['objects'][0]['content'], "Wooo two points inside Golden Gate park")
self.assertEqual(data['objects'][0]['points']['type'], 'MultiPoint')
self.assertAlmostEqual(data['objects'][0]['points']['coordinates'][0][0], -122.475233, places=5)
self.assertAlmostEqual(data['objects'][0]['points']['coordinates'][0][1], 37.768616, places=5)
self.assertAlmostEqual(data['objects'][0]['points']['coordinates'][1][0], -122.470416, places=5)
self.assertAlmostEqual(data['objects'][0]['points']['coordinates'][1][1], 37.767381, places=5)
self.assertEqual(data['objects'][1]['content'], "This is a note about Golden Gate Park. It contains Golden Gate Park\'s polygon")
self.assertEqual(data['objects'][1]['polys']['type'], 'MultiPolygon')
self.assertEqual(len(data['objects'][1]['polys']['coordinates']), 1)
self.assertEqual(len(data['objects'][1]['polys']['coordinates'][0]), 1)
self.assertEqual(len(data['objects'][1]['polys']['coordinates'][0][0]), 8)
self.assertEqual(data['objects'][2]['content'], "A path inside Golden Gate Park! Huzzah!")
self.assertEqual(data['objects'][2]['lines']['type'], 'MultiLineString')
self.assertAlmostEqual(data['objects'][2]['lines']['coordinates'][0][0][0], -122.504544, places=5)
self.assertAlmostEqual(data['objects'][2]['lines']['coordinates'][0][0][1], 37.767002, places=5)
self.assertAlmostEqual(data['objects'][2]['lines']['coordinates'][0][1][0], -122.499995, places=5)
self.assertAlmostEqual(data['objects'][2]['lines']['coordinates'][0][1][1], 37.768223, places=5)
def test_post_object(self):
connection = self.get_connection()
post_data = '{"content": "A new post.", "is_active": true, "title": "New Title", "slug": "new-title", "user": "/api/v1/users/1/"}'
connection.request('POST', '/api/v1/geonotes/', body=post_data, headers={'Accept': 'application/json', 'Content-type': 'application/json'})
response = connection.getresponse()
self.assertEqual(response.status, 201)
headers = dict(response.getheaders())
location = headers.get('location', headers.get('Location'))
self.assertEqual(location, 'http://localhost:8001/api/v1/geonotes/4/')
# make sure posted object exists
connection.request('GET', '/api/v1/geonotes/4/', headers={'Accept': 'application/json'})
response = connection.getresponse()
connection.close()
self.assertEqual(response.status, 200)
data = response.read().decode('utf-8')
obj = json.loads(data)
self.assertEqual(obj['content'], 'A new post.')
self.assertEqual(obj['is_active'], True)
self.assertEqual(obj['user'], '/api/v1/users/1/')
def test_post_geojson(self):
connection = self.get_connection()
post_data = """{
"content": "A new post.", "is_active": true, "title": "New Title2",
"slug": "new-title2", "user": "/api/v1/users/1/",
"polys": { "type": "MultiPolygon", "coordinates": [ [ [ [ -122.511067, 37.771276 ], [ -122.510037, 37.766391 ], [ -122.510037, 37.763813 ], [ -122.456822, 37.765848 ], [ -122.452960, 37.766459 ], [ -122.454848, 37.773990 ], [ -122.475362, 37.773040 ], [ -122.511067, 37.771276 ] ] ] ] }
}"""
connection.request('POST', '/api/v1/geonotes/', body=post_data, headers={'Accept': 'application/json', 'Content-type': 'application/json'})
response = connection.getresponse()
self.assertEqual(response.status, 201)
headers = dict(response.getheaders())
location = headers.get('location', headers.get('Location'))
self.assertEqual(location, 'http://localhost:8001/api/v1/geonotes/4/')
# make sure posted object exists
connection.request('GET', '/api/v1/geonotes/4/', headers={'Accept': 'application/json'})
response = connection.getresponse()
connection.close()
self.assertEqual(response.status, 200)
data = response.read().decode('utf-8')
obj = json.loads(data)
self.assertEqual(obj['content'], 'A new post.')
self.assertEqual(obj['is_active'], True)
self.assertEqual(obj['user'], '/api/v1/users/1/')
self.assertEqual(obj['polys'], {u'type': u'MultiPolygon', u'coordinates': [[[[-122.511067, 37.771276], [-122.510037, 37.766390999999999], [-122.510037, 37.763812999999999], [-122.456822, 37.765847999999998], [-122.45296, 37.766458999999998], [-122.454848, 37.773989999999998], [-122.475362, 37.773040000000002], [-122.511067, 37.771276]]]]})
def test_post_xml(self):
connection = self.get_connection()
post_data = """<object><created>2010-03-30T20:05:00</created><polys type="null"/><is_active type="boolean">True</is_active><title>Points inside Golden Gate Park note 2</title><lines type="null"/><slug>points-inside-golden-gate-park-note-2</slug><content>A new post.</content><points type="hash"><type>MultiPoint</type><coordinates type="list"><objects><value type="float">-122.475233</value><value type="float">37.768617</value></objects><objects><value type="float">-122.470416</value><value type="float">37.767382</value></objects></coordinates></points><user>/api/v1/users/1/</user></object>"""
connection.request('POST', '/api/v1/geonotes/', body=post_data, headers={'Accept': 'application/xml', 'Content-type': 'application/xml'})
response = connection.getresponse()
self.assertEqual(response.status, 201)
headers = dict(response.getheaders())
location = headers.get('location', headers.get('Location'))
self.assertEqual(location, 'http://localhost:8001/api/v1/geonotes/4/')
# make sure posted object exists
connection.request('GET', '/api/v1/geonotes/4/', headers={'Accept': 'application/json'})
response = connection.getresponse()
connection.close()
self.assertEqual(response.status, 200)
data = response.read().decode('utf-8')
obj = json.loads(data)
self.assertEqual(obj['content'], 'A new post.')
self.assertEqual(obj['is_active'], True)
self.assertEqual(obj['user'], '/api/v1/users/1/')
# Weeeee! GeoJSON returned!
self.assertEqual(obj['points'], {"coordinates": [[-122.475233, 37.768616999999999], [-122.470416, 37.767381999999998]], "type": "MultiPoint"})
# Or we can ask for XML
connection.request('GET', '/api/v1/geonotes/4/', headers={'Accept': 'application/xml'})
response = connection.getresponse()
connection.close()
self.assertEqual(response.status, 200)
data = response.read().decode('utf-8')
self.assertIn('<points type="hash"><coordinates type="list"><objects><value type="float">-122.475233</value><value type="float">37.768617</value></objects><objects><value type="float">-122.470416</value><value type="float">37.767382</value></objects></coordinates><type>MultiPoint</type></points>', data)
def test_filter_within_on_points(self):
# Get points
connection = self.get_connection()
connection.request('GET', '/api/v1/geonotes/?points__within=%s' % golden_gate_park_query, headers={'Accept': 'application/json'})
response = connection.getresponse()
connection.close()
self.assertEqual(response.status, 200)
data = json.loads(response.read().decode('utf-8'))
# We get back the points inside Golden Gate park!
self.assertEqual(data['objects'][0]['content'], "Wooo two points inside Golden Gate park")
self.assertEqual(data['objects'][0]['points']['type'], 'MultiPoint')
self.assertAlmostEqual(data['objects'][0]['points']['coordinates'][0][0], -122.475233, places=5)
self.assertAlmostEqual(data['objects'][0]['points']['coordinates'][0][1], 37.768616, places=5)
self.assertAlmostEqual(data['objects'][0]['points']['coordinates'][1][0], -122.470416, places=5)
self.assertAlmostEqual(data['objects'][0]['points']['coordinates'][1][1], 37.767381, places=5)
@skipIfSpatialite
def test_filter_within_on_lines(self):
# Get lines
connection = self.get_connection()
connection.request('GET', '/api/v1/geonotes/?lines__within=%s' % golden_gate_park_query, headers={'Accept': 'application/json'})
response = connection.getresponse()
connection.close()
self.assertEqual(response.status, 200)
data = json.loads(response.read().decode('utf-8'))
# We get back the line inside Golden Gate park!
self.assertEqual(data['objects'][0]['content'], "A path inside Golden Gate Park! Huzzah!")
self.assertEqual(data['objects'][0]['lines']['type'], 'MultiLineString')
self.assertAlmostEqual(data['objects'][0]['lines']['coordinates'][0][0][0], -122.504544, places=5)
self.assertAlmostEqual(data['objects'][0]['lines']['coordinates'][0][0][1], 37.767002, places=5)
self.assertAlmostEqual(data['objects'][0]['lines']['coordinates'][0][1][0], -122.499995, places=5)
self.assertAlmostEqual(data['objects'][0]['lines']['coordinates'][0][1][1], 37.768223, places=5)
@skipIfSpatialite
def test_filter_contains(self):
points_inside_golden_gate_park = """{"coordinates": [[-122.475233, 37.768616999999999], [-122.470416, 37.767381999999998]], "type": "MultiPoint"}"""
# Get polys that contain the points
connection = self.get_connection()
connection.request('GET', '/api/v1/geonotes/?polys__contains=%s' % quote(points_inside_golden_gate_park), headers={'Accept': 'application/json'})
response = connection.getresponse()
connection.close()
self.assertEqual(response.status, 200)
data = json.loads(response.read().decode('utf-8'))
# We get back the golden gate park polygon!
self.assertEqual(data['objects'][0]['content'], "This is a note about Golden Gate Park. It contains Golden Gate Park\'s polygon")
self.assertEqual(data['objects'][0]['polys']['type'], 'MultiPolygon')
self.assertEqual(len(data['objects'][0]['polys']['coordinates']), 1)
self.assertEqual(len(data['objects'][0]['polys']['coordinates'][0]), 1)
self.assertEqual(len(data['objects'][0]['polys']['coordinates'][0][0]), 8)
|
|
import string, re, sys, datetime
from .core import TomlError
if sys.version_info[0] == 2:
_chr = unichr
else:
_chr = chr
def load(fin, translate=lambda t, x, v: v):
return loads(fin.read(), translate=translate, filename=fin.name)
def loads(s, filename='<string>', translate=lambda t, x, v: v):
if isinstance(s, bytes):
s = s.decode('utf-8')
s = s.replace('\r\n', '\n')
root = {}
tables = {}
scope = root
src = _Source(s, filename=filename)
ast = _p_toml(src)
def error(msg):
raise TomlError(msg, pos[0], pos[1], filename)
def process_value(v):
kind, text, value, pos = v
if kind == 'str' and value.startswith('\n'):
value = value[1:]
if kind == 'array':
if value and any(k != value[0][0] for k, t, v, p in value[1:]):
error('array-type-mismatch')
value = [process_value(item) for item in value]
elif kind == 'table':
value = dict([(k, process_value(value[k])) for k in value])
return translate(kind, text, value)
for kind, value, pos in ast:
if kind == 'kv':
k, v = value
if k in scope:
error('duplicate_keys. Key "{0}" was used more than once.'.format(k))
scope[k] = process_value(v)
else:
is_table_array = (kind == 'table_array')
cur = tables
for name in value[:-1]:
if isinstance(cur.get(name), list):
d, cur = cur[name][-1]
else:
d, cur = cur.setdefault(name, (None, {}))
scope = {}
name = value[-1]
if name not in cur:
if is_table_array:
cur[name] = [(scope, {})]
else:
cur[name] = (scope, {})
elif isinstance(cur[name], list):
if not is_table_array:
error('table_type_mismatch')
cur[name].append((scope, {}))
else:
if is_table_array:
error('table_type_mismatch')
old_scope, next_table = cur[name]
if old_scope is not None:
error('duplicate_tables')
cur[name] = (scope, next_table)
def merge_tables(scope, tables):
if scope is None:
scope = {}
for k in tables:
if k in scope:
error('key_table_conflict')
v = tables[k]
if isinstance(v, list):
scope[k] = [merge_tables(sc, tbl) for sc, tbl in v]
else:
scope[k] = merge_tables(v[0], v[1])
return scope
return merge_tables(root, tables)
class _Source:
def __init__(self, s, filename=None):
self.s = s
self._pos = (1, 1)
self._last = None
self._filename = filename
self.backtrack_stack = []
def last(self):
return self._last
def pos(self):
return self._pos
def fail(self):
return self._expect(None)
def consume_dot(self):
if self.s:
self._last = self.s[0]
self.s = self[1:]
self._advance(self._last)
return self._last
return None
def expect_dot(self):
return self._expect(self.consume_dot())
def consume_eof(self):
if not self.s:
self._last = ''
return True
return False
def expect_eof(self):
return self._expect(self.consume_eof())
def consume(self, s):
if self.s.startswith(s):
self.s = self.s[len(s):]
self._last = s
self._advance(s)
return True
return False
def expect(self, s):
return self._expect(self.consume(s))
def consume_re(self, re):
m = re.match(self.s)
if m:
self.s = self.s[len(m.group(0)):]
self._last = m
self._advance(m.group(0))
return m
return None
def expect_re(self, re):
return self._expect(self.consume_re(re))
def __enter__(self):
self.backtrack_stack.append((self.s, self._pos))
def __exit__(self, type, value, traceback):
if type is None:
self.backtrack_stack.pop()
else:
self.s, self._pos = self.backtrack_stack.pop()
return type == TomlError
def commit(self):
self.backtrack_stack[-1] = (self.s, self._pos)
def _expect(self, r):
if not r:
raise TomlError('msg', self._pos[0], self._pos[1], self._filename)
return r
def _advance(self, s):
suffix_pos = s.rfind('\n')
if suffix_pos == -1:
self._pos = (self._pos[0], self._pos[1] + len(s))
else:
self._pos = (self._pos[0] + s.count('\n'), len(s) - suffix_pos)
_ews_re = re.compile(r'(?:[ \t]|#[^\n]*\n|#[^\n]*\Z|\n)*')
def _p_ews(s):
s.expect_re(_ews_re)
_ws_re = re.compile(r'[ \t]*')
def _p_ws(s):
s.expect_re(_ws_re)
_escapes = { 'b': '\b', 'n': '\n', 'r': '\r', 't': '\t', '"': '"', '\'': '\'',
'\\': '\\', '/': '/', 'f': '\f' }
_basicstr_re = re.compile(r'[^"\\\000-\037]*')
_short_uni_re = re.compile(r'u([0-9a-fA-F]{4})')
_long_uni_re = re.compile(r'U([0-9a-fA-F]{8})')
_escapes_re = re.compile('[bnrt"\'\\\\/f]')
_newline_esc_re = re.compile('\n[ \t\n]*')
def _p_basicstr_content(s, content=_basicstr_re):
res = []
while True:
res.append(s.expect_re(content).group(0))
if not s.consume('\\'):
break
if s.consume_re(_newline_esc_re):
pass
elif s.consume_re(_short_uni_re) or s.consume_re(_long_uni_re):
res.append(_chr(int(s.last().group(1), 16)))
else:
s.expect_re(_escapes_re)
res.append(_escapes[s.last().group(0)])
return ''.join(res)
_key_re = re.compile(r'[0-9a-zA-Z-_]+')
def _p_key(s):
with s:
s.expect('"')
r = _p_basicstr_content(s, _basicstr_re)
s.expect('"')
return r
return s.expect_re(_key_re).group(0)
_float_re = re.compile(r'[+-]?(?:0|[1-9](?:_?\d)*)(?:\.\d(?:_?\d)*)?(?:[eE][+-]?(?:\d(?:_?\d)*))?')
_datetime_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))')
_basicstr_ml_re = re.compile(r'(?:(?:|"|"")[^"\\\000-\011\013-\037])*')
_litstr_re = re.compile(r"[^'\000-\037]*")
_litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\011\013-\037]))*")
def _p_value(s):
pos = s.pos()
if s.consume('true'):
return 'bool', s.last(), True, pos
if s.consume('false'):
return 'bool', s.last(), False, pos
if s.consume('"'):
if s.consume('""'):
r = _p_basicstr_content(s, _basicstr_ml_re)
s.expect('"""')
else:
r = _p_basicstr_content(s, _basicstr_re)
s.expect('"')
return 'str', r, r, pos
if s.consume('\''):
if s.consume('\'\''):
r = s.expect_re(_litstr_ml_re).group(0)
s.expect('\'\'\'')
else:
r = s.expect_re(_litstr_re).group(0)
s.expect('\'')
return 'str', r, r, pos
if s.consume_re(_datetime_re):
m = s.last()
s0 = m.group(0)
r = map(int, m.groups()[:6])
if m.group(7):
micro = float(m.group(7))
else:
micro = 0
if m.group(8):
g = int(m.group(8), 10) * 60 + int(m.group(9), 10)
tz = _TimeZone(datetime.timedelta(0, g * 60))
else:
tz = _TimeZone(datetime.timedelta(0, 0))
y, m, d, H, M, S = r
dt = datetime.datetime(y, m, d, H, M, S, int(micro * 1000000), tz)
return 'datetime', s0, dt, pos
if s.consume_re(_float_re):
m = s.last().group(0)
r = m.replace('_','')
if '.' in m or 'e' in m or 'E' in m:
return 'float', m, float(r), pos
else:
return 'int', m, int(r, 10), pos
if s.consume('['):
items = []
with s:
while True:
_p_ews(s)
items.append(_p_value(s))
s.commit()
_p_ews(s)
s.expect(',')
s.commit()
_p_ews(s)
s.expect(']')
return 'array', None, items, pos
if s.consume('{'):
_p_ws(s)
items = {}
if not s.consume('}'):
k = _p_key(s)
_p_ws(s)
s.expect('=')
_p_ws(s)
items[k] = _p_value(s)
_p_ws(s)
while s.consume(','):
_p_ws(s)
k = _p_key(s)
_p_ws(s)
s.expect('=')
_p_ws(s)
items[k] = _p_value(s)
_p_ws(s)
s.expect('}')
return 'table', None, items, pos
s.fail()
def _p_stmt(s):
pos = s.pos()
if s.consume( '['):
is_array = s.consume('[')
_p_ws(s)
keys = [_p_key(s)]
_p_ws(s)
while s.consume('.'):
_p_ws(s)
keys.append(_p_key(s))
_p_ws(s)
s.expect(']')
if is_array:
s.expect(']')
return 'table_array' if is_array else 'table', keys, pos
key = _p_key(s)
_p_ws(s)
s.expect('=')
_p_ws(s)
value = _p_value(s)
return 'kv', (key, value), pos
_stmtsep_re = re.compile(r'(?:[ \t]*(?:#[^\n]*)?\n)+[ \t]*')
def _p_toml(s):
stmts = []
_p_ews(s)
with s:
stmts.append(_p_stmt(s))
while True:
s.commit()
s.expect_re(_stmtsep_re)
stmts.append(_p_stmt(s))
_p_ews(s)
s.expect_eof()
return stmts
class _TimeZone(datetime.tzinfo):
def __init__(self, offset):
self._offset = offset
def utcoffset(self, dt):
return self._offset
def dst(self, dt):
return None
def tzname(self, dt):
m = self._offset.total_seconds() // 60
if m < 0:
res = '-'
m = -m
else:
res = '+'
h = m // 60
m = m - h * 60
return '{}{:.02}{:.02}'.format(res, h, m)
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .cqlhandling import CqlParsingRuleSet, Hint
from cassandra.metadata import maybe_escape_name
simple_cql_types = set(('ascii', 'bigint', 'blob', 'boolean', 'counter', 'date', 'decimal', 'double', 'float', 'inet', 'int',
'smallint', 'text', 'time', 'timestamp', 'timeuuid', 'tinyint', 'uuid', 'varchar', 'varint'))
simple_cql_types.difference_update(('set', 'map', 'list'))
from . import helptopics
cqldocs = helptopics.CQL3HelpTopics()
class UnexpectedTableStructure(UserWarning):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return 'Unexpected table structure; may not translate correctly to CQL. ' + self.msg
SYSTEM_KEYSPACES = ('system', 'system_schema', 'system_traces', 'system_auth', 'system_distributed')
NONALTERBALE_KEYSPACES = ('system', 'system_schema')
class Cql3ParsingRuleSet(CqlParsingRuleSet):
columnfamily_layout_options = (
('bloom_filter_fp_chance', None),
('comment', None),
('dclocal_read_repair_chance', 'local_read_repair_chance'),
('gc_grace_seconds', None),
('min_index_interval', None),
('max_index_interval', None),
('read_repair_chance', None),
('default_time_to_live', None),
('speculative_retry', None),
('memtable_flush_period_in_ms', None),
)
columnfamily_layout_map_options = (
# (CQL3 option name, schema_columnfamilies column name (or None if same),
# list of known map keys)
('compaction', 'compaction_strategy_options',
('class', 'max_threshold', 'tombstone_compaction_interval', 'tombstone_threshold', 'enabled', 'unchecked_tombstone_compaction', 'only_purge_repaired_tombstones')),
('compression', 'compression_parameters',
('sstable_compression', 'chunk_length_kb', 'crc_check_chance')),
('caching', None,
('rows_per_partition', 'keys')),
)
obsolete_cf_options = ()
consistency_levels = (
'ANY',
'ONE',
'TWO',
'THREE',
'QUORUM',
'ALL',
'LOCAL_QUORUM',
'EACH_QUORUM',
'SERIAL'
)
@classmethod
def escape_value(cls, value):
if value is None:
return 'NULL' # this totally won't work
if isinstance(value, bool):
value = str(value).lower()
elif isinstance(value, float):
return '%f' % value
elif isinstance(value, int):
return str(value)
return "'%s'" % value.replace("'", "''")
@staticmethod
def dequote_name(name):
name = name.strip()
if name == '':
return name
if name[0] == '"' and name[-1] == '"':
return name[1:-1].replace('""', '"')
else:
return name.lower()
@staticmethod
def dequote_value(cqlword):
cqlword = cqlword.strip()
if cqlword == '':
return cqlword
if cqlword[0] == "'" and cqlword[-1] == "'":
cqlword = cqlword[1:-1].replace("''", "'")
return cqlword
CqlRuleSet = Cql3ParsingRuleSet()
# convenience for remainder of module
completer_for = CqlRuleSet.completer_for
explain_completion = CqlRuleSet.explain_completion
dequote_value = CqlRuleSet.dequote_value
dequote_name = CqlRuleSet.dequote_name
escape_value = CqlRuleSet.escape_value
# BEGIN SYNTAX/COMPLETION RULE DEFINITIONS
syntax_rules = r'''
<Start> ::= <CQL_Statement>*
;
<CQL_Statement> ::= [statements]=<statementBody> ";"
;
# the order of these terminal productions is significant:
<endline> ::= /\n/ ;
JUNK ::= /([ \t\r\f\v]+|(--|[/][/])[^\n\r]*([\n\r]|$)|[/][*].*?[*][/])/ ;
<stringLiteral> ::= <quotedStringLiteral>
| <pgStringLiteral> ;
<quotedStringLiteral> ::= /'([^']|'')*'/ ;
<pgStringLiteral> ::= /\$\$(?:(?!\$\$)|[^$])*\$\$/;
<quotedName> ::= /"([^"]|"")*"/ ;
<float> ::= /-?[0-9]+\.[0-9]+/ ;
<uuid> ::= /[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/ ;
<blobLiteral> ::= /0x[0-9a-f]+/ ;
<wholenumber> ::= /[0-9]+/ ;
<identifier> ::= /[a-z][a-z0-9_]*/ ;
<colon> ::= ":" ;
<star> ::= "*" ;
<endtoken> ::= ";" ;
<op> ::= /[-+=,().]/ ;
<cmp> ::= /[<>!]=?/ ;
<brackets> ::= /[][{}]/ ;
<integer> ::= "-"? <wholenumber> ;
<boolean> ::= "true"
| "false"
;
<unclosedPgString>::= /\$\$(?:(?!\$\$)|[^$])*/ ;
<unclosedString> ::= /'([^']|'')*/ ;
<unclosedName> ::= /"([^"]|"")*/ ;
<unclosedComment> ::= /[/][*].*$/ ;
<term> ::= <stringLiteral>
| <integer>
| <float>
| <uuid>
| <boolean>
| <blobLiteral>
| <collectionLiteral>
| <functionLiteral> <functionArguments>
| "NULL"
;
<functionLiteral> ::= (<identifier> ( "." <identifier> )?)
| "TOKEN"
;
<functionArguments> ::= "(" ( <term> ( "," <term> )* )? ")"
;
<tokenDefinition> ::= token="TOKEN" "(" <term> ( "," <term> )* ")"
| <term>
;
<cident> ::= <quotedName>
| <identifier>
| <unreservedKeyword>
;
<colname> ::= <cident> ; # just an alias
<collectionLiteral> ::= <listLiteral>
| <setLiteral>
| <mapLiteral>
;
<listLiteral> ::= "[" ( <term> ( "," <term> )* )? "]"
;
<setLiteral> ::= "{" ( <term> ( "," <term> )* )? "}"
;
<mapLiteral> ::= "{" <term> ":" <term> ( "," <term> ":" <term> )* "}"
;
<anyFunctionName> ::= ( ksname=<cfOrKsName> dot="." )? udfname=<cfOrKsName> ;
<userFunctionName> ::= ( ksname=<nonSystemKeyspaceName> dot="." )? udfname=<cfOrKsName> ;
<refUserFunctionName> ::= udfname=<cfOrKsName> ;
<userAggregateName> ::= ( ksname=<nonSystemKeyspaceName> dot="." )? udaname=<cfOrKsName> ;
<functionAggregateName> ::= ( ksname=<nonSystemKeyspaceName> dot="." )? functionname=<cfOrKsName> ;
<aggregateName> ::= <userAggregateName>
;
<functionName> ::= <functionAggregateName>
| "TOKEN"
;
<statementBody> ::= <useStatement>
| <selectStatement>
| <dataChangeStatement>
| <schemaChangeStatement>
| <authenticationStatement>
| <authorizationStatement>
;
<dataChangeStatement> ::= <insertStatement>
| <updateStatement>
| <deleteStatement>
| <truncateStatement>
| <batchStatement>
;
<schemaChangeStatement> ::= <createKeyspaceStatement>
| <createColumnFamilyStatement>
| <createIndexStatement>
| <createMaterializedViewStatement>
| <createUserTypeStatement>
| <createFunctionStatement>
| <createAggregateStatement>
| <createTriggerStatement>
| <dropKeyspaceStatement>
| <dropColumnFamilyStatement>
| <dropIndexStatement>
| <dropMaterializedViewStatement>
| <dropUserTypeStatement>
| <dropFunctionStatement>
| <dropAggregateStatement>
| <dropTriggerStatement>
| <alterTableStatement>
| <alterKeyspaceStatement>
| <alterUserTypeStatement>
;
<authenticationStatement> ::= <createUserStatement>
| <alterUserStatement>
| <dropUserStatement>
| <listUsersStatement>
| <createRoleStatement>
| <alterRoleStatement>
| <dropRoleStatement>
| <listRolesStatement>
;
<authorizationStatement> ::= <grantStatement>
| <grantRoleStatement>
| <revokeStatement>
| <revokeRoleStatement>
| <listPermissionsStatement>
;
# timestamp is included here, since it's also a keyword
<simpleStorageType> ::= typename=( <identifier> | <stringLiteral> | "timestamp" ) ;
<userType> ::= utname=<cfOrKsName> ;
<storageType> ::= <simpleStorageType> | <collectionType> | <frozenCollectionType> | <userType> ;
# Note: autocomplete for frozen collection types does not handle nesting past depth 1 properly,
# but that's a lot of work to fix for little benefit.
<collectionType> ::= "map" "<" <simpleStorageType> "," ( <simpleStorageType> | <userType> ) ">"
| "list" "<" ( <simpleStorageType> | <userType> ) ">"
| "set" "<" ( <simpleStorageType> | <userType> ) ">"
;
<frozenCollectionType> ::= "frozen" "<" "map" "<" <storageType> "," <storageType> ">" ">"
| "frozen" "<" "list" "<" <storageType> ">" ">"
| "frozen" "<" "set" "<" <storageType> ">" ">"
;
<columnFamilyName> ::= ( ksname=<cfOrKsName> dot="." )? cfname=<cfOrKsName> ;
<materializedViewName> ::= ( ksname=<cfOrKsName> dot="." )? mvname=<cfOrKsName> ;
<userTypeName> ::= ( ksname=<cfOrKsName> dot="." )? utname=<cfOrKsName> ;
<keyspaceName> ::= ksname=<cfOrKsName> ;
<nonSystemKeyspaceName> ::= ksname=<cfOrKsName> ;
<alterableKeyspaceName> ::= ksname=<cfOrKsName> ;
<cfOrKsName> ::= <identifier>
| <quotedName>
| <unreservedKeyword>;
<unreservedKeyword> ::= nocomplete=
( "key"
| "clustering"
# | "count" -- to get count(*) completion, treat count as reserved
| "ttl"
| "compact"
| "storage"
| "type"
| "values" )
;
<property> ::= [propname]=<cident> propeq="=" [propval]=<propertyValue>
;
<propertyValue> ::= propsimpleval=( <stringLiteral>
| <identifier>
| <integer>
| <float>
| <unreservedKeyword> )
# we don't use <mapLiteral> here so we can get more targeted
# completions:
| propsimpleval="{" [propmapkey]=<term> ":" [propmapval]=<term>
( ender="," [propmapkey]=<term> ":" [propmapval]=<term> )*
ender="}"
;
'''
def prop_equals_completer(ctxt, cass):
if not working_on_keyspace(ctxt):
# we know if the thing in the property name position is "compact" or
# "clustering" that there won't actually be an equals sign, because
# there are no properties by those names. there are, on the other hand,
# table properties that start with those keywords which don't have
# equals signs at all.
curprop = ctxt.get_binding('propname')[-1].upper()
if curprop in ('COMPACT', 'CLUSTERING'):
return ()
return ['=']
completer_for('property', 'propeq')(prop_equals_completer)
@completer_for('property', 'propname')
def prop_name_completer(ctxt, cass):
if working_on_keyspace(ctxt):
return ks_prop_name_completer(ctxt, cass)
else:
return cf_prop_name_completer(ctxt, cass)
@completer_for('propertyValue', 'propsimpleval')
def prop_val_completer(ctxt, cass):
if working_on_keyspace(ctxt):
return ks_prop_val_completer(ctxt, cass)
else:
return cf_prop_val_completer(ctxt, cass)
@completer_for('propertyValue', 'propmapkey')
def prop_val_mapkey_completer(ctxt, cass):
if working_on_keyspace(ctxt):
return ks_prop_val_mapkey_completer(ctxt, cass)
else:
return cf_prop_val_mapkey_completer(ctxt, cass)
@completer_for('propertyValue', 'propmapval')
def prop_val_mapval_completer(ctxt, cass):
if working_on_keyspace(ctxt):
return ks_prop_val_mapval_completer(ctxt, cass)
else:
return cf_prop_val_mapval_completer(ctxt, cass)
@completer_for('propertyValue', 'ender')
def prop_val_mapender_completer(ctxt, cass):
if working_on_keyspace(ctxt):
return ks_prop_val_mapender_completer(ctxt, cass)
else:
return cf_prop_val_mapender_completer(ctxt, cass)
def ks_prop_name_completer(ctxt, cass):
optsseen = ctxt.get_binding('propname', ())
if 'replication' not in optsseen:
return ['replication']
return ["durable_writes"]
def ks_prop_val_completer(ctxt, cass):
optname = ctxt.get_binding('propname')[-1]
if optname == 'durable_writes':
return ["'true'", "'false'"]
if optname == 'replication':
return ["{'class': '"]
return ()
def ks_prop_val_mapkey_completer(ctxt, cass):
optname = ctxt.get_binding('propname')[-1]
if optname != 'replication':
return ()
keysseen = map(dequote_value, ctxt.get_binding('propmapkey', ()))
valsseen = map(dequote_value, ctxt.get_binding('propmapval', ()))
for k, v in zip(keysseen, valsseen):
if k == 'class':
repclass = v
break
else:
return ["'class'"]
if repclass in CqlRuleSet.replication_factor_strategies:
opts = set(('replication_factor',))
elif repclass == 'NetworkTopologyStrategy':
return [Hint('<dc_name>')]
return map(escape_value, opts.difference(keysseen))
def ks_prop_val_mapval_completer(ctxt, cass):
optname = ctxt.get_binding('propname')[-1]
if optname != 'replication':
return ()
currentkey = dequote_value(ctxt.get_binding('propmapkey')[-1])
if currentkey == 'class':
return map(escape_value, CqlRuleSet.replication_strategies)
return [Hint('<term>')]
def ks_prop_val_mapender_completer(ctxt, cass):
optname = ctxt.get_binding('propname')[-1]
if optname != 'replication':
return [',']
keysseen = map(dequote_value, ctxt.get_binding('propmapkey', ()))
valsseen = map(dequote_value, ctxt.get_binding('propmapval', ()))
for k, v in zip(keysseen, valsseen):
if k == 'class':
repclass = v
break
else:
return [',']
if repclass in CqlRuleSet.replication_factor_strategies:
if 'replication_factor' not in keysseen:
return [',']
if repclass == 'NetworkTopologyStrategy' and len(keysseen) == 1:
return [',']
return ['}']
def cf_prop_name_completer(ctxt, cass):
return [c[0] for c in (CqlRuleSet.columnfamily_layout_options +
CqlRuleSet.columnfamily_layout_map_options)]
def cf_prop_val_completer(ctxt, cass):
exist_opts = ctxt.get_binding('propname')
this_opt = exist_opts[-1]
if this_opt == 'compression':
return ["{'sstable_compression': '"]
if this_opt == 'compaction':
return ["{'class': '"]
if this_opt == 'caching':
return ["{'keys': '"]
if any(this_opt == opt[0] for opt in CqlRuleSet.obsolete_cf_options):
return ["'<obsolete_option>'"]
if this_opt in ('read_repair_chance', 'bloom_filter_fp_chance',
'dclocal_read_repair_chance'):
return [Hint('<float_between_0_and_1>')]
if this_opt in ('min_compaction_threshold', 'max_compaction_threshold',
'gc_grace_seconds', 'min_index_interval', 'max_index_interval'):
return [Hint('<integer>')]
return [Hint('<option_value>')]
def cf_prop_val_mapkey_completer(ctxt, cass):
optname = ctxt.get_binding('propname')[-1]
for cql3option, _, subopts in CqlRuleSet.columnfamily_layout_map_options:
if optname == cql3option:
break
else:
return ()
keysseen = map(dequote_value, ctxt.get_binding('propmapkey', ()))
valsseen = map(dequote_value, ctxt.get_binding('propmapval', ()))
pairsseen = dict(zip(keysseen, valsseen))
if optname == 'compression':
return map(escape_value, set(subopts).difference(keysseen))
if optname == 'caching':
return map(escape_value, set(subopts).difference(keysseen))
if optname == 'compaction':
opts = set(subopts)
try:
csc = pairsseen['class']
except KeyError:
return ["'class'"]
csc = csc.split('.')[-1]
if csc == 'SizeTieredCompactionStrategy':
opts.add('min_sstable_size')
opts.add('min_threshold')
opts.add('bucket_high')
opts.add('bucket_low')
elif csc == 'LeveledCompactionStrategy':
opts.add('sstable_size_in_mb')
elif csc == 'DateTieredCompactionStrategy':
opts.add('base_time_seconds')
opts.add('max_sstable_age_days')
opts.add('min_threshold')
opts.add('max_window_size_seconds')
opts.add('timestamp_resolution')
return map(escape_value, opts)
return ()
def cf_prop_val_mapval_completer(ctxt, cass):
opt = ctxt.get_binding('propname')[-1]
key = dequote_value(ctxt.get_binding('propmapkey')[-1])
if opt == 'compaction':
if key == 'class':
return map(escape_value, CqlRuleSet.available_compaction_classes)
return [Hint('<option_value>')]
elif opt == 'compression':
if key == 'sstable_compression':
return map(escape_value, CqlRuleSet.available_compression_classes)
return [Hint('<option_value>')]
elif opt == 'caching':
if key == 'rows_per_partition':
return ["'ALL'", "'NONE'", Hint('#rows_per_partition')]
elif key == 'keys':
return ["'ALL'", "'NONE'"]
return ()
def cf_prop_val_mapender_completer(ctxt, cass):
return [',', '}']
@completer_for('tokenDefinition', 'token')
def token_word_completer(ctxt, cass):
return ['token(']
@completer_for('simpleStorageType', 'typename')
def storagetype_completer(ctxt, cass):
return simple_cql_types
@completer_for('keyspaceName', 'ksname')
def ks_name_completer(ctxt, cass):
return map(maybe_escape_name, cass.get_keyspace_names())
@completer_for('nonSystemKeyspaceName', 'ksname')
def ks_name_completer(ctxt, cass):
ksnames = [n for n in cass.get_keyspace_names() if n not in SYSTEM_KEYSPACES]
return map(maybe_escape_name, ksnames)
@completer_for('alterableKeyspaceName', 'ksname')
def ks_name_completer(ctxt, cass):
ksnames = [n for n in cass.get_keyspace_names() if n not in NONALTERBALE_KEYSPACES]
return map(maybe_escape_name, ksnames)
def cf_ks_name_completer(ctxt, cass):
return [maybe_escape_name(ks) + '.' for ks in cass.get_keyspace_names()]
completer_for('columnFamilyName', 'ksname')(cf_ks_name_completer)
completer_for('materializedViewName', 'ksname')(cf_ks_name_completer)
def cf_ks_dot_completer(ctxt, cass):
name = dequote_name(ctxt.get_binding('ksname'))
if name in cass.get_keyspace_names():
return ['.']
return []
completer_for('columnFamilyName', 'dot')(cf_ks_dot_completer)
completer_for('materializedViewName', 'dot')(cf_ks_dot_completer)
@completer_for('columnFamilyName', 'cfname')
def cf_name_completer(ctxt, cass):
ks = ctxt.get_binding('ksname', None)
if ks is not None:
ks = dequote_name(ks)
try:
cfnames = cass.get_columnfamily_names(ks)
except Exception:
if ks is None:
return ()
raise
return map(maybe_escape_name, cfnames)
@completer_for('materializedViewName', 'mvname')
def mv_name_completer(ctxt, cass):
ks = ctxt.get_binding('ksname', None)
if ks is not None:
ks = dequote_name(ks)
try:
mvnames = cass.get_materialized_view_names(ks)
except Exception:
if ks is None:
return ()
raise
return map(maybe_escape_name, mvnames)
completer_for('userTypeName', 'ksname')(cf_ks_name_completer)
completer_for('userTypeName', 'dot')(cf_ks_dot_completer)
def ut_name_completer(ctxt, cass):
ks = ctxt.get_binding('ksname', None)
if ks is not None:
ks = dequote_name(ks)
try:
utnames = cass.get_usertype_names(ks)
except Exception:
if ks is None:
return ()
raise
return map(maybe_escape_name, utnames)
completer_for('userTypeName', 'utname')(ut_name_completer)
completer_for('userType', 'utname')(ut_name_completer)
@completer_for('unreservedKeyword', 'nocomplete')
def unreserved_keyword_completer(ctxt, cass):
# we never want to provide completions through this production;
# this is always just to allow use of some keywords as column
# names, CF names, property values, etc.
return ()
def get_table_meta(ctxt, cass):
ks = ctxt.get_binding('ksname', None)
if ks is not None:
ks = dequote_name(ks)
cf = dequote_name(ctxt.get_binding('cfname'))
return cass.get_table_meta(ks, cf)
def get_ut_layout(ctxt, cass):
ks = ctxt.get_binding('ksname', None)
if ks is not None:
ks = dequote_name(ks)
ut = dequote_name(ctxt.get_binding('utname'))
return cass.get_usertype_layout(ks, ut)
def working_on_keyspace(ctxt):
wat = ctxt.get_binding('wat').upper()
if wat in ('KEYSPACE', 'SCHEMA'):
return True
return False
syntax_rules += r'''
<useStatement> ::= "USE" <keyspaceName>
;
<selectStatement> ::= "SELECT" ( "JSON" )? <selectClause>
"FROM" (cf=<columnFamilyName> | mv=<materializedViewName>)
( "WHERE" <whereClause> )?
( "ORDER" "BY" <orderByClause> ( "," <orderByClause> )* )?
( "LIMIT" limit=<wholenumber> )?
( "ALLOW" "FILTERING" )?
;
<whereClause> ::= <relation> ( "AND" <relation> )*
;
<relation> ::= [rel_lhs]=<cident> ( "[" <term> "]" )? ( "=" | "<" | ">" | "<=" | ">=" | "CONTAINS" ( "KEY" )? ) <term>
| token="TOKEN" "(" [rel_tokname]=<cident>
( "," [rel_tokname]=<cident> )*
")" ("=" | "<" | ">" | "<=" | ">=") <tokenDefinition>
| [rel_lhs]=<cident> "IN" "(" <term> ( "," <term> )* ")"
;
<selectClause> ::= "DISTINCT"? <selector> ("AS" <cident>)? ("," <selector> ("AS" <cident>)?)*
| "*"
;
<udtSubfieldSelection> ::= <identifier> "." <identifier>
;
<selector> ::= [colname]=<cident>
| <udtSubfieldSelection>
| "WRITETIME" "(" [colname]=<cident> ")"
| "TTL" "(" [colname]=<cident> ")"
| "COUNT" "(" star=( "*" | "1" ) ")"
| "CAST" "(" <selector> "AS" <storageType> ")"
| <functionName> <selectionFunctionArguments>
;
<selectionFunctionArguments> ::= "(" ( <selector> ( "," <selector> )* )? ")"
;
<orderByClause> ::= [ordercol]=<cident> ( "ASC" | "DESC" )?
;
'''
def udf_name_completer(ctxt, cass):
ks = ctxt.get_binding('ksname', None)
if ks is not None:
ks = dequote_name(ks)
try:
udfnames = cass.get_userfunction_names(ks)
except Exception:
if ks is None:
return ()
raise
return map(maybe_escape_name, udfnames)
def uda_name_completer(ctxt, cass):
ks = ctxt.get_binding('ksname', None)
if ks is not None:
ks = dequote_name(ks)
try:
udanames = cass.get_useraggregate_names(ks)
except Exception:
if ks is None:
return ()
raise
return map(maybe_escape_name, udanames)
def udf_uda_name_completer(ctxt, cass):
ks = ctxt.get_binding('ksname', None)
if ks is not None:
ks = dequote_name(ks)
try:
functionnames = cass.get_userfunction_names(ks) + cass.get_useraggregate_names(ks)
except Exception:
if ks is None:
return ()
raise
return map(maybe_escape_name, functionnames)
def ref_udf_name_completer(ctxt, cass):
try:
udanames = cass.get_userfunction_names(None)
except Exception:
return ()
return map(maybe_escape_name, udanames)
completer_for('functionAggregateName', 'ksname')(cf_ks_name_completer)
completer_for('functionAggregateName', 'dot')(cf_ks_dot_completer)
completer_for('functionAggregateName', 'functionname')(udf_uda_name_completer)
completer_for('anyFunctionName', 'ksname')(cf_ks_name_completer)
completer_for('anyFunctionName', 'dot')(cf_ks_dot_completer)
completer_for('anyFunctionName', 'udfname')(udf_name_completer)
completer_for('userFunctionName', 'ksname')(cf_ks_name_completer)
completer_for('userFunctionName', 'dot')(cf_ks_dot_completer)
completer_for('userFunctionName', 'udfname')(udf_name_completer)
completer_for('refUserFunctionName', 'udfname')(ref_udf_name_completer)
completer_for('userAggregateName', 'ksname')(cf_ks_name_completer)
completer_for('userAggregateName', 'dot')(cf_ks_dot_completer)
completer_for('userAggregateName', 'udaname')(uda_name_completer)
@completer_for('orderByClause', 'ordercol')
def select_order_column_completer(ctxt, cass):
prev_order_cols = ctxt.get_binding('ordercol', ())
keyname = ctxt.get_binding('keyname')
if keyname is None:
keyname = ctxt.get_binding('rel_lhs', ())
if not keyname:
return [Hint("Can't ORDER BY here: need to specify partition key in WHERE clause")]
layout = get_table_meta(ctxt, cass)
order_by_candidates = [col.name for col in layout.clustering_key]
if len(order_by_candidates) > len(prev_order_cols):
return [maybe_escape_name(order_by_candidates[len(prev_order_cols)])]
return [Hint('No more orderable columns here.')]
@completer_for('relation', 'token')
def relation_token_word_completer(ctxt, cass):
return ['TOKEN(']
@completer_for('relation', 'rel_tokname')
def relation_token_subject_completer(ctxt, cass):
layout = get_table_meta(ctxt, cass)
return [key.name for key in layout.partition_key]
@completer_for('relation', 'rel_lhs')
def select_relation_lhs_completer(ctxt, cass):
layout = get_table_meta(ctxt, cass)
filterable = set()
already_filtered_on = map(dequote_name, ctxt.get_binding('rel_lhs', ()))
for num in range(0, len(layout.partition_key)):
if num == 0 or layout.partition_key[num - 1].name in already_filtered_on:
filterable.add(layout.partition_key[num].name)
else:
break
for num in range(0, len(layout.clustering_key)):
if num == 0 or layout.clustering_key[num - 1].name in already_filtered_on:
filterable.add(layout.clustering_key[num].name)
else:
break
for idx in layout.indexes.itervalues():
filterable.add(idx.index_options["target"])
return map(maybe_escape_name, filterable)
explain_completion('selector', 'colname')
syntax_rules += r'''
<insertStatement> ::= "INSERT" "INTO" cf=<columnFamilyName>
( ( "(" [colname]=<cident> ( "," [colname]=<cident> )* ")"
"VALUES" "(" [newval]=<term> ( valcomma="," [newval]=<term> )* valcomma=")")
| ("JSON" <stringLiteral>))
( "IF" "NOT" "EXISTS")?
( "USING" [insertopt]=<usingOption>
( "AND" [insertopt]=<usingOption> )* )?
;
<usingOption> ::= "TIMESTAMP" <wholenumber>
| "TTL" <wholenumber>
;
'''
def regular_column_names(table_meta):
if not table_meta or not table_meta.columns:
return []
regular_columns = list(set(table_meta.columns.keys()) -
set([key.name for key in table_meta.partition_key]) -
set([key.name for key in table_meta.clustering_key]))
return regular_columns
@completer_for('insertStatement', 'colname')
def insert_colname_completer(ctxt, cass):
layout = get_table_meta(ctxt, cass)
colnames = set(map(dequote_name, ctxt.get_binding('colname', ())))
keycols = layout.primary_key
for k in keycols:
if k.name not in colnames:
return [maybe_escape_name(k.name)]
normalcols = set(regular_column_names(layout)) - colnames
return map(maybe_escape_name, normalcols)
@completer_for('insertStatement', 'newval')
def insert_newval_completer(ctxt, cass):
layout = get_table_meta(ctxt, cass)
insertcols = map(dequote_name, ctxt.get_binding('colname'))
valuesdone = ctxt.get_binding('newval', ())
if len(valuesdone) >= len(insertcols):
return []
curcol = insertcols[len(valuesdone)]
coltype = layout.columns[curcol].cql_type
if coltype in ('map', 'set'):
return ['{']
if coltype == 'list':
return ['[']
if coltype == 'boolean':
return ['true', 'false']
return [Hint('<value for %s (%s)>' % (maybe_escape_name(curcol),
coltype))]
@completer_for('insertStatement', 'valcomma')
def insert_valcomma_completer(ctxt, cass):
layout = get_table_meta(ctxt, cass)
numcols = len(ctxt.get_binding('colname', ()))
numvals = len(ctxt.get_binding('newval', ()))
if numcols > numvals:
return [',']
return [')']
@completer_for('insertStatement', 'insertopt')
def insert_option_completer(ctxt, cass):
opts = set('TIMESTAMP TTL'.split())
for opt in ctxt.get_binding('insertopt', ()):
opts.discard(opt.split()[0])
return opts
syntax_rules += r'''
<updateStatement> ::= "UPDATE" cf=<columnFamilyName>
( "USING" [updateopt]=<usingOption>
( "AND" [updateopt]=<usingOption> )* )?
"SET" <assignment> ( "," <assignment> )*
"WHERE" <whereClause>
( "IF" ( "EXISTS" | <conditions> ))?
;
<assignment> ::= updatecol=<cident>
( "=" update_rhs=( <term> | <cident> )
( counterop=( "+" | "-" ) inc=<wholenumber>
| listadder="+" listcol=<cident> )?
| indexbracket="[" <term> "]" "=" <term> )
;
<conditions> ::= <condition> ( "AND" <condition> )*
;
<condition> ::= <cident> ( "[" <term> "]" )? (("=" | "<" | ">" | "<=" | ">=" | "!=") <term>
| "IN" "(" <term> ( "," <term> )* ")")
;
'''
@completer_for('updateStatement', 'updateopt')
def insert_option_completer(ctxt, cass):
opts = set('TIMESTAMP TTL'.split())
for opt in ctxt.get_binding('updateopt', ()):
opts.discard(opt.split()[0])
return opts
@completer_for('assignment', 'updatecol')
def update_col_completer(ctxt, cass):
layout = get_table_meta(ctxt, cass)
return map(maybe_escape_name, regular_column_names(layout))
@completer_for('assignment', 'update_rhs')
def update_countername_completer(ctxt, cass):
layout = get_table_meta(ctxt, cass)
curcol = dequote_name(ctxt.get_binding('updatecol', ''))
coltype = layout.columns[curcol].cql_type
if coltype == 'counter':
return [maybe_escape_name(curcol)]
if coltype in ('map', 'set'):
return ["{"]
if coltype == 'list':
return ["["]
return [Hint('<term (%s)>' % coltype)]
@completer_for('assignment', 'counterop')
def update_counterop_completer(ctxt, cass):
layout = get_table_meta(ctxt, cass)
curcol = dequote_name(ctxt.get_binding('updatecol', ''))
return ['+', '-'] if layout.columns[curcol].cql_type == 'counter' else []
@completer_for('assignment', 'inc')
def update_counter_inc_completer(ctxt, cass):
layout = get_table_meta(ctxt, cass)
curcol = dequote_name(ctxt.get_binding('updatecol', ''))
if layout.columns[curcol].cql_type == 'counter':
return [Hint('<wholenumber>')]
return []
@completer_for('assignment', 'listadder')
def update_listadder_completer(ctxt, cass):
rhs = ctxt.get_binding('update_rhs')
if rhs.startswith('['):
return ['+']
return []
@completer_for('assignment', 'listcol')
def update_listcol_completer(ctxt, cass):
rhs = ctxt.get_binding('update_rhs')
if rhs.startswith('['):
colname = dequote_name(ctxt.get_binding('updatecol'))
return [maybe_escape_name(colname)]
return []
@completer_for('assignment', 'indexbracket')
def update_indexbracket_completer(ctxt, cass):
layout = get_table_meta(ctxt, cass)
curcol = dequote_name(ctxt.get_binding('updatecol', ''))
coltype = layout.columns[curcol].cql_type
if coltype in ('map', 'list'):
return ['[']
return []
syntax_rules += r'''
<deleteStatement> ::= "DELETE" ( <deleteSelector> ( "," <deleteSelector> )* )?
"FROM" cf=<columnFamilyName>
( "USING" [delopt]=<deleteOption> )?
"WHERE" <whereClause>
( "IF" ( "EXISTS" | <conditions> ) )?
;
<deleteSelector> ::= delcol=<cident> ( memberbracket="[" memberselector=<term> "]" )?
;
<deleteOption> ::= "TIMESTAMP" <wholenumber>
;
'''
@completer_for('deleteStatement', 'delopt')
def delete_opt_completer(ctxt, cass):
opts = set('TIMESTAMP'.split())
for opt in ctxt.get_binding('delopt', ()):
opts.discard(opt.split()[0])
return opts
@completer_for('deleteSelector', 'delcol')
def delete_delcol_completer(ctxt, cass):
layout = get_table_meta(ctxt, cass)
return map(maybe_escape_name, regular_column_names(layout))
syntax_rules += r'''
<batchStatement> ::= "BEGIN" ( "UNLOGGED" | "COUNTER" )? "BATCH"
( "USING" [batchopt]=<usingOption>
( "AND" [batchopt]=<usingOption> )* )?
[batchstmt]=<batchStatementMember> ";"?
( [batchstmt]=<batchStatementMember> ";"? )*
"APPLY" "BATCH"
;
<batchStatementMember> ::= <insertStatement>
| <updateStatement>
| <deleteStatement>
;
'''
@completer_for('batchStatement', 'batchopt')
def batch_opt_completer(ctxt, cass):
opts = set('TIMESTAMP'.split())
for opt in ctxt.get_binding('batchopt', ()):
opts.discard(opt.split()[0])
return opts
syntax_rules += r'''
<truncateStatement> ::= "TRUNCATE" ("COLUMNFAMILY" | "TABLE")? cf=<columnFamilyName>
;
'''
syntax_rules += r'''
<createKeyspaceStatement> ::= "CREATE" wat=( "KEYSPACE" | "SCHEMA" ) ("IF" "NOT" "EXISTS")? ksname=<cfOrKsName>
"WITH" <property> ( "AND" <property> )*
;
'''
@completer_for('createKeyspaceStatement', 'wat')
def create_ks_wat_completer(ctxt, cass):
# would prefer to get rid of the "schema" nomenclature in cql3
if ctxt.get_binding('partial', '') == '':
return ['KEYSPACE']
return ['KEYSPACE', 'SCHEMA']
syntax_rules += r'''
<createColumnFamilyStatement> ::= "CREATE" wat=( "COLUMNFAMILY" | "TABLE" ) ("IF" "NOT" "EXISTS")?
( ks=<nonSystemKeyspaceName> dot="." )? cf=<cfOrKsName>
"(" ( <singleKeyCfSpec> | <compositeKeyCfSpec> ) ")"
( "WITH" <cfamProperty> ( "AND" <cfamProperty> )* )?
;
<cfamProperty> ::= <property>
| "COMPACT" "STORAGE"
| "CLUSTERING" "ORDER" "BY" "(" <cfamOrdering>
( "," <cfamOrdering> )* ")"
;
<cfamOrdering> ::= [ordercol]=<cident> ( "ASC" | "DESC" )
;
<singleKeyCfSpec> ::= [newcolname]=<cident> <storageType> "PRIMARY" "KEY"
( "," [newcolname]=<cident> <storageType> )*
;
<compositeKeyCfSpec> ::= [newcolname]=<cident> <storageType>
"," [newcolname]=<cident> <storageType> ( "static" )?
( "," [newcolname]=<cident> <storageType> ( "static" )? )*
"," "PRIMARY" k="KEY" p="(" ( partkey=<pkDef> | [pkey]=<cident> )
( c="," [pkey]=<cident> )* ")"
;
<pkDef> ::= "(" [ptkey]=<cident> "," [ptkey]=<cident>
( "," [ptkey]=<cident> )* ")"
;
'''
@completer_for('cfamOrdering', 'ordercol')
def create_cf_clustering_order_colname_completer(ctxt, cass):
colnames = map(dequote_name, ctxt.get_binding('newcolname', ()))
# Definitely some of these aren't valid for ordering, but I'm not sure
# precisely which are. This is good enough for now
return colnames
@completer_for('createColumnFamilyStatement', 'wat')
def create_cf_wat_completer(ctxt, cass):
# would prefer to get rid of the "columnfamily" nomenclature in cql3
if ctxt.get_binding('partial', '') == '':
return ['TABLE']
return ['TABLE', 'COLUMNFAMILY']
explain_completion('createColumnFamilyStatement', 'cf', '<new_table_name>')
explain_completion('compositeKeyCfSpec', 'newcolname', '<new_column_name>')
@completer_for('createColumnFamilyStatement', 'dot')
def create_cf_ks_dot_completer(ctxt, cass):
ks = dequote_name(ctxt.get_binding('ks'))
if ks in cass.get_keyspace_names():
return ['.']
return []
@completer_for('pkDef', 'ptkey')
def create_cf_pkdef_declaration_completer(ctxt, cass):
cols_declared = ctxt.get_binding('newcolname')
pieces_already = ctxt.get_binding('ptkey', ())
pieces_already = map(dequote_name, pieces_already)
while cols_declared[0] in pieces_already:
cols_declared = cols_declared[1:]
if len(cols_declared) < 2:
return ()
return [maybe_escape_name(cols_declared[0])]
@completer_for('compositeKeyCfSpec', 'pkey')
def create_cf_composite_key_declaration_completer(ctxt, cass):
cols_declared = ctxt.get_binding('newcolname')
pieces_already = ctxt.get_binding('ptkey', ()) + ctxt.get_binding('pkey', ())
pieces_already = map(dequote_name, pieces_already)
while cols_declared[0] in pieces_already:
cols_declared = cols_declared[1:]
if len(cols_declared) < 2:
return ()
return [maybe_escape_name(cols_declared[0])]
@completer_for('compositeKeyCfSpec', 'k')
def create_cf_composite_primary_key_keyword_completer(ctxt, cass):
return ['KEY (']
@completer_for('compositeKeyCfSpec', 'p')
def create_cf_composite_primary_key_paren_completer(ctxt, cass):
return ['(']
@completer_for('compositeKeyCfSpec', 'c')
def create_cf_composite_primary_key_comma_completer(ctxt, cass):
cols_declared = ctxt.get_binding('newcolname')
pieces_already = ctxt.get_binding('pkey', ())
if len(pieces_already) >= len(cols_declared) - 1:
return ()
return [',']
syntax_rules += r'''
<idxName> ::= <identifier>
| <quotedName>
| <unreservedKeyword>;
<createIndexStatement> ::= "CREATE" "CUSTOM"? "INDEX" ("IF" "NOT" "EXISTS")? indexname=<idxName>? "ON"
cf=<columnFamilyName> "(" (
col=<cident> |
"keys(" col=<cident> ")" |
"full(" col=<cident> ")"
) ")"
( "USING" <stringLiteral> ( "WITH" "OPTIONS" "=" <mapLiteral> )? )?
;
<createMaterializedViewStatement> ::= "CREATE" "MATERIALIZED" "VIEW" ("IF" "NOT" "EXISTS")? <materializedViewName>?
"AS" <selectStatement>
"PRIMARY" "KEY" <pkDef>
;
<createUserTypeStatement> ::= "CREATE" "TYPE" ( ks=<nonSystemKeyspaceName> dot="." )? typename=<cfOrKsName> "(" newcol=<cident> <storageType>
( "," [newcolname]=<cident> <storageType> )*
")"
;
<createFunctionStatement> ::= "CREATE" ("OR" "REPLACE")? "FUNCTION"
("IF" "NOT" "EXISTS")?
<userFunctionName>
( "(" ( newcol=<cident> <storageType>
( "," [newcolname]=<cident> <storageType> )* )?
")" )?
("RETURNS" "NULL" | "CALLED") "ON" "NULL" "INPUT"
"RETURNS" <storageType>
"LANGUAGE" <cident> "AS" <stringLiteral>
;
<createAggregateStatement> ::= "CREATE" ("OR" "REPLACE")? "AGGREGATE"
("IF" "NOT" "EXISTS")?
<userAggregateName>
( "("
( <storageType> ( "," <storageType> )* )?
")" )?
"SFUNC" <refUserFunctionName>
"STYPE" <storageType>
( "FINALFUNC" <refUserFunctionName> )?
( "INITCOND" <term> )?
;
'''
explain_completion('createIndexStatement', 'indexname', '<new_index_name>')
explain_completion('createUserTypeStatement', 'typename', '<new_type_name>')
explain_completion('createUserTypeStatement', 'newcol', '<new_field_name>')
@completer_for('createIndexStatement', 'col')
def create_index_col_completer(ctxt, cass):
""" Return the columns for which an index doesn't exist yet. """
layout = get_table_meta(ctxt, cass)
idx_targets = [idx.index_options["target"] for idx in layout.indexes.itervalues()]
colnames = [cd.name for cd in layout.columns.values() if cd.name not in idx_targets]
return map(maybe_escape_name, colnames)
syntax_rules += r'''
<dropKeyspaceStatement> ::= "DROP" "KEYSPACE" ("IF" "EXISTS")? ksname=<nonSystemKeyspaceName>
;
<dropColumnFamilyStatement> ::= "DROP" ( "COLUMNFAMILY" | "TABLE" ) ("IF" "EXISTS")? cf=<columnFamilyName>
;
<indexName> ::= ( ksname=<idxOrKsName> dot="." )? idxname=<idxOrKsName> ;
<idxOrKsName> ::= <identifier>
| <quotedName>
| <unreservedKeyword>;
<dropIndexStatement> ::= "DROP" "INDEX" ("IF" "EXISTS")? idx=<indexName>
;
<dropMaterializedViewStatement> ::= "DROP" "MATERIALIZED" "VIEW" ("IF" "EXISTS")? mv=<materializedViewName>
;
<dropUserTypeStatement> ::= "DROP" "TYPE" ut=<userTypeName>
;
<dropFunctionStatement> ::= "DROP" "FUNCTION" ( "IF" "EXISTS" )? <userFunctionName>
;
<dropAggregateStatement> ::= "DROP" "AGGREGATE" ( "IF" "EXISTS" )? <userAggregateName>
;
'''
@completer_for('indexName', 'ksname')
def idx_ks_name_completer(ctxt, cass):
return [maybe_escape_name(ks) + '.' for ks in cass.get_keyspace_names()]
@completer_for('indexName', 'dot')
def idx_ks_dot_completer(ctxt, cass):
name = dequote_name(ctxt.get_binding('ksname'))
if name in cass.get_keyspace_names():
return ['.']
return []
@completer_for('indexName', 'idxname')
def idx_ks_idx_name_completer(ctxt, cass):
ks = ctxt.get_binding('ksname', None)
if ks is not None:
ks = dequote_name(ks)
try:
idxnames = cass.get_index_names(ks)
except Exception:
if ks is None:
return ()
raise
return map(maybe_escape_name, idxnames)
syntax_rules += r'''
<alterTableStatement> ::= "ALTER" wat=( "COLUMNFAMILY" | "TABLE" ) cf=<columnFamilyName>
<alterInstructions>
;
<alterInstructions> ::= "ALTER" existcol=<cident> "TYPE" <storageType>
| "ADD" newcol=<cident> <storageType> ("static")?
| "DROP" existcol=<cident>
| "WITH" <cfamProperty> ( "AND" <cfamProperty> )*
| "RENAME" existcol=<cident> "TO" newcol=<cident>
( "AND" existcol=<cident> "TO" newcol=<cident> )*
;
<alterUserTypeStatement> ::= "ALTER" "TYPE" ut=<userTypeName>
<alterTypeInstructions>
;
<alterTypeInstructions> ::= "ALTER" existcol=<cident> "TYPE" <storageType>
| "ADD" newcol=<cident> <storageType>
| "RENAME" existcol=<cident> "TO" newcol=<cident>
( "AND" existcol=<cident> "TO" newcol=<cident> )*
;
'''
@completer_for('alterInstructions', 'existcol')
def alter_table_col_completer(ctxt, cass):
layout = get_table_meta(ctxt, cass)
cols = [str(md) for md in layout.columns]
return map(maybe_escape_name, cols)
@completer_for('alterTypeInstructions', 'existcol')
def alter_type_field_completer(ctxt, cass):
layout = get_ut_layout(ctxt, cass)
fields = [tuple[0] for tuple in layout]
return map(maybe_escape_name, fields)
explain_completion('alterInstructions', 'newcol', '<new_column_name>')
explain_completion('alterTypeInstructions', 'newcol', '<new_field_name>')
syntax_rules += r'''
<alterKeyspaceStatement> ::= "ALTER" wat=( "KEYSPACE" | "SCHEMA" ) ks=<alterableKeyspaceName>
"WITH" <property> ( "AND" <property> )*
;
'''
syntax_rules += r'''
<username> ::= name=( <identifier> | <stringLiteral> )
;
<createUserStatement> ::= "CREATE" "USER" ( "IF" "NOT" "EXISTS" )? <username>
( "WITH" "PASSWORD" <stringLiteral> )?
( "SUPERUSER" | "NOSUPERUSER" )?
;
<alterUserStatement> ::= "ALTER" "USER" <username>
( "WITH" "PASSWORD" <stringLiteral> )?
( "SUPERUSER" | "NOSUPERUSER" )?
;
<dropUserStatement> ::= "DROP" "USER" ( "IF" "EXISTS" )? <username>
;
<listUsersStatement> ::= "LIST" "USERS"
;
'''
syntax_rules += r'''
<rolename> ::= <identifier>
| <quotedName>
| <unreservedKeyword>
;
<createRoleStatement> ::= "CREATE" "ROLE" <rolename>
( "WITH" <roleProperty> ("AND" <roleProperty>)*)?
;
<alterRoleStatement> ::= "ALTER" "ROLE" <rolename>
( "WITH" <roleProperty> ("AND" <roleProperty>)*)?
;
<roleProperty> ::= "PASSWORD" "=" <stringLiteral>
| "OPTIONS" "=" <mapLiteral>
| "SUPERUSER" "=" <boolean>
| "LOGIN" "=" <boolean>
;
<dropRoleStatement> ::= "DROP" "ROLE" <rolename>
;
<grantRoleStatement> ::= "GRANT" <rolename> "TO" <rolename>
;
<revokeRoleStatement> ::= "REVOKE" <rolename> "FROM" <rolename>
;
<listRolesStatement> ::= "LIST" "ROLES"
( "OF" <rolename> )? "NORECURSIVE"?
;
'''
syntax_rules += r'''
<grantStatement> ::= "GRANT" <permissionExpr> "ON" <resource> "TO" <rolename>
;
<revokeStatement> ::= "REVOKE" <permissionExpr> "ON" <resource> "FROM" <rolename>
;
<listPermissionsStatement> ::= "LIST" <permissionExpr>
( "ON" <resource> )? ( "OF" <rolename> )? "NORECURSIVE"?
;
<permission> ::= "AUTHORIZE"
| "CREATE"
| "ALTER"
| "DROP"
| "SELECT"
| "MODIFY"
| "DESCRIBE"
| "EXECUTE"
;
<permissionExpr> ::= ( <permission> "PERMISSION"? )
| ( "ALL" "PERMISSIONS"? )
;
<resource> ::= <dataResource>
| <roleResource>
| <functionResource>
;
<dataResource> ::= ( "ALL" "KEYSPACES" )
| ( "KEYSPACE" <keyspaceName> )
| ( "TABLE"? <columnFamilyName> )
;
<roleResource> ::= ("ALL" "ROLES")
| ("ROLE" <rolename>)
;
<functionResource> ::= ( "ALL" "FUNCTIONS" ("IN KEYSPACE" <keyspaceName>)? )
| ( "FUNCTION" <functionAggregateName>
( "(" ( newcol=<cident> <storageType>
( "," [newcolname]=<cident> <storageType> )* )?
")" )
)
;
'''
@completer_for('username', 'name')
def username_name_completer(ctxt, cass):
def maybe_quote(name):
if CqlRuleSet.is_valid_cql3_name(name):
return name
return "'%s'" % name
# disable completion for CREATE USER.
if ctxt.matched[0][1].upper() == 'CREATE':
return [Hint('<username>')]
session = cass.session
return [maybe_quote(row.values()[0].replace("'", "''")) for row in session.execute("LIST USERS")]
@completer_for('rolename', 'role')
def rolename_completer(ctxt, cass):
def maybe_quote(name):
if CqlRuleSet.is_valid_cql3_name(name):
return name
return "'%s'" % name
# disable completion for CREATE ROLE.
if ctxt.matched[0][1].upper() == 'CREATE':
return [Hint('<rolename>')]
session = cass.session
return [maybe_quote(row[0].replace("'", "''")) for row in session.execute("LIST ROLES")]
syntax_rules += r'''
<createTriggerStatement> ::= "CREATE" "TRIGGER" ( "IF" "NOT" "EXISTS" )? <cident>
"ON" cf=<columnFamilyName> "USING" class=<stringLiteral>
;
<dropTriggerStatement> ::= "DROP" "TRIGGER" ( "IF" "EXISTS" )? triggername=<cident>
"ON" cf=<columnFamilyName>
;
'''
explain_completion('createTriggerStatement', 'class', '\'fully qualified class name\'')
def get_trigger_names(ctxt, cass):
ks = ctxt.get_binding('ksname', None)
if ks is not None:
ks = dequote_name(ks)
return cass.get_trigger_names(ks)
@completer_for('dropTriggerStatement', 'triggername')
def alter_type_field_completer(ctxt, cass):
names = get_trigger_names(ctxt, cass)
return map(maybe_escape_name, names)
# END SYNTAX/COMPLETION RULE DEFINITIONS
CqlRuleSet.append_rules(syntax_rules)
|
|
import csv
import elements
import xml.etree.ElementTree as ET
from copy import deepcopy
ignored_elements = ['desc'] # ignored as elements but used as something else
allowed_elements = ['include', 'testcase', 'template', 'request', 'var', 'print', 'sleep', 'clearcookies', 'callfunction']
# main is the runner class
# parent is the class that holds the elements (either Runner or TestCase)
# element is the XML tag to be parsed
test_dir = 'tests/'
def get_tag_value(tag):
if tag.attrib.get('null'):
if tag.attrib['null'].lower() == 'true': return None
return tag.text.strip() if tag.text else ''
already_imported = []
def element_include(main, parent, element):
global already_imported
if 'file' not in element.attrib.keys(): return False, 'Include tag missing attribute "file"'
if ('repeat' not in element.attrib or str(element.attrib.get('repeat')).lower() != 'true')\
and element.attrib['file'].lower() in already_imported:
#print 'File "%s" already imported, ignoring' % element.attrib['file']
return True, ''
already_imported.append(element.attrib['file'].lower())
if 'parent' in element.attrib.keys(): # the parent attribute in <include>
parent_template = deepcopy(get_step(main, element.attrib['parent']))
parent_template.id = 'parent'
parent.steps.append(parent_template)
return parse_file(main, parent, '%s%s' % (test_dir, element.attrib['file']))
def element_testcase(main, parent, element):
def fill_testcase(id, element):
testcase = elements.TestCase(id, parent=parent)
if 'tags' in element.attrib.keys(): testcase.tags = element.attrib['tags'].split(' ')
desc = get_first_child(element, 'desc')
if desc is not None: testcase.desc = desc.text
testcase.source_code = ET.tostring(element)
return testcase
if 'id' not in element.attrib.keys(): return False, 'TestCase tag missing attribute "id"'
if 'csv' in element.attrib.keys():
try:
csv_reader = csv.reader(open('%s%s' % (test_dir, element.attrib['csv']), 'rb'), escapechar='\\')
loop_counter = 0
var_name = []
for line in csv_reader:
if len(line)>0:
if loop_counter == 0: #first line is for var names
for i in range(len(line)):
var_name.append(line[i])
else: # values
testcase = fill_testcase('%s (%s)' % (element.attrib['id'], loop_counter), element)
for i in range(len(line)):
if i<len(var_name):
testcase.steps.insert(0, elements.Var(name=var_name[i], value=line[i]))
parent.steps.append(testcase)
success, reason = parse_children(main, testcase, element)
if not success: return False, reason
loop_counter += 1
return True, ''
except:
return False, 'Unable to parse csv file "%s%s"' % (test_dir, element.attrib['csv'])
else:
testcase = fill_testcase(element.attrib['id'], element)
parent.steps.append(testcase)
return parse_children(main, testcase, element)
def element_var(main, parent, element):
if 'name' not in element.attrib.keys(): return False, 'Var tag missing attribute "name"'
parent.steps.append(elements.Var(element.attrib['name'], get_tag_value(element)))
return True, ''
def element_print(main, parent, element):
parent.steps.append(elements.Print(element.text))
return True, ''
def element_clearcookies(main, parent, element):
parent.steps.append(elements.ClearCookies())
return True, ''
def element_sleep(main, parent, element):
if not element.text: return False, 'Sleep tag has no value'
parent.steps.append(elements.Sleep(element.text))
return True, ''
def element_callfunction(main, parent, element):
if 'name' not in element.attrib.keys(): return False, 'CallFunction tag missing attribute "name"'
if 'file' not in element.attrib.keys(): return False, 'CallFunction tag missing attribute "file"'
filename = '%s%s' % (test_dir, element.attrib['file'])
try:
with open(filename): pass
except IOError:
return False, 'File "%s" for CallFunction name "%s" not found' % (filename, element.attrib['name'])
params = {}
for child in element.getchildren():
if child.tag.lower() in ['param', 'parameter']:
if 'name' not in child.attrib.keys(): return False, 'Param tag within CallFunction missing attribute "name"'
params[child.attrib['name']] = child.text
else:
return 'Unreconized tag "%s" within CallFunction' % (child.tag)
callfunction = elements.CallFunction(filename=filename, name=element.attrib['name'], params=params)
if 'save_var' in element.attrib.keys(): return False, 'Deprecated attribute "save_var", replace with "save-var"'
if 'save_global' in element.attrib.keys(): return False, 'Deprecated attribute "save_global", replace with "save-global"'
if 'save_file' in element.attrib.keys(): return False, 'Deprecated attribute "save_file", replace with "save-file"'
if 'save-var' in element.attrib.keys(): callfunction.save_var = element.attrib['save-var']
if 'save-global' in element.attrib.keys(): callfunction.save_global = element.attrib['save-global']
if 'save-file' in element.attrib.keys(): callfunction.save_file = element.attrib['save-file']
if not is_child_of_testcase(parent):
testcase = elements.TestCase(callfunction.name, parent=parent)
testcase.steps.append(callfunction)
if 'tags' in element.attrib.keys(): testcase.tags = element.attrib['tags'].split(' ')
parent.steps.append(testcase)
else:
parent.steps.append(callfunction)
return True, ''
def _parse_step(main, parent, element, is_template=True):
if 'id' not in element.attrib.keys(): return False, '%s tag missing attribute "id"' % ('Template' if is_template else 'Request')
step = elements.Template(element.attrib['id']) if is_template else elements.Request(element.attrib['id'])
if 'extends' in element.attrib.keys():
extended = get_step(main, element.attrib['extends'])
if not extended: return False, 'Template or Request named "%s" to extend "%s" not found' % (element.attrib['extends'], element.attrib['id'])
for v in vars(extended).keys():
if v != 'id':
step.__dict__[v] = deepcopy(extended.__dict__[v])
for e in element.getchildren():
tag = e.tag.lower()
if tag == 'desc':
step.desc = get_tag_value(e)
elif tag == 'method':
step.method = get_tag_value(e)
elif tag == 'body':
if 'enctype' in e.attrib.keys():
if e.attrib['enctype'].lower() in ['application/x-www-form-urlencoded', 'urlencoded']:
step.headers['content-type'] = 'application/x-www-form-urlencoded'
elif e.attrib['enctype'].lower() in ['multipart/form-data', 'form-data', 'multipart']:
step.headers['content-type'] = 'multipart/form-data'
elif e.attrib['enctype'].lower() in ['text/plain', 'text', 'plain']:
step.headers['content-type'] = 'text/plain'
else:
step.headers['content-type'] = 'none'
step.body = get_tag_value(e)
for child_e in e.getchildren():
if child_e.tag.lower() == 'file':
if 'name' not in child_e.attrib.keys(): return False, 'Missing attribute "name" in tag File'
if 'source' in child_e.attrib.keys(): return False, 'Attribute "source" in tag File has been deprecated'
step.body_files[child_e.attrib['name']] = get_tag_value(child_e)
if not step.body_files[child_e.attrib['name']]: return False, 'Missing value in tag File (value indicates the filename)'
step.body_file_headers[child_e.attrib['name']] = {}
for file_header_e in child_e.getchildren():
if 'name' not in file_header_e.attrib.keys(): return False, 'Missing attribute "name" in tag Header within File'
step.body_file_headers[child_e.attrib['name']][file_header_e.attrib['name']] = get_tag_value(file_header_e)
elif child_e.tag.lower() == 'field':
if 'name' not in child_e.attrib.keys(): return False, 'Missing attribute "name" in tag Field'
step.body_fields[child_e.attrib['name']] = get_tag_value(child_e)
elif child_e.tag.lower() == 'clearfiles':
step.body_files, step.body_file_headers = {}, {}
elif child_e.tag.lower() in ['clearfilenames', 'clearfilename']:
for name in get_tag_value(child_e).split("|"):
if name in step.body_files.keys(): step.body_files.pop(name)
if name in step.body_file_headers.keys(): step.body_file_headers.pop(name)
elif child_e.tag.lower() == 'clearfields':
step.body_fields = {}
elif child_e.tag.lower() in ['clearfieldnames', 'clearfieldname']:
for name in get_tag_value(child_e).split("|"):
if name in step.body_fields.keys(): step.body_fields.pop(name)
else:
return False, 'Unrecognized tag "%s" within body of step "%s"' % (child_e.tag, element.attrib['id'])
elif tag == 'path':
step.path = get_tag_value(e)
for child_e in e.getchildren():
if child_e.tag.lower() == 'field':
if 'name' not in child_e.attrib.keys(): return False, 'Missing attribute "name" in tag Field'
step.path_fields[child_e.attrib['name']] = get_tag_value(child_e)
else:
return False, 'Unrecognized tag "%s" within path of step "%s"' % (child_e.tag, element.attrib['id'])
step.base_url = e.attrib.get('base-url')
if step.base_url:
step.base_url = str(step.base_url).lower()
if step.base_url not in main.other_base_urls.keys():
return False, 'base-url "%s" not found in configuration' % step.base_url
if e.attrib.get('full'): step.path_full = True if e.attrib['full'].lower() == 'true' else False
else: path_full = False
if e.attrib.get('no-proxy'): step.force_no_proxy = True if e.attrib['no-proxy'].lower() == 'true' else False
elif tag == 'var':
if 'name' not in e.attrib.keys(): return False, 'Var missing attribute "name" in request %s' % step.id
step.vars[e.attrib['name']] = get_tag_value(e)
if step.vars[e.attrib['name']] == None: del step.vars[e.attrib['name']]
elif tag == 'header':
if 'name' not in e.attrib.keys(): return False, 'Header missing attribute "name" in request %s' % step.id
step.headers[e.attrib['name']] = get_tag_value(e)
if step.headers[e.attrib['name']] == None: del step.headers[e.attrib['name']]
elif tag == 'clearheaders':
step.headers.clear()
elif tag in ['assertion', 'assert', 'a']:
if 'type' not in e.attrib.keys(): return False, 'Assertion missing attribute "type" in request %s' % step.id
assertion = elements.Assertion(e.attrib['type'], value=e.text, params=e.attrib)
if 'save_var' in element.attrib.keys(): return False, 'Deprecated attribute "save_var", replace with "save-var"'
if 'save_global' in element.attrib.keys(): return False, 'Deprecated attribute "save_global", replace with "save-global"'
if 'save_file' in element.attrib.keys(): return False, 'Deprecated attribute "save_file", replace with "save-file"'
if 'save-var' in e.attrib.keys(): assertion.save_var = e.attrib['save-var']
if 'save-global' in e.attrib.keys(): assertion.save_global = e.attrib['save-global']
if 'save-file' in e.attrib.keys(): assertion.save_file = e.attrib['save-file']
step.assertions.append(assertion)
elif tag == 'clearassertions':
step.assertions = []
elif tag in ['clearassertiontype', 'clearassertiontypes']:
types = e.text.lower().split('|')
for a in step.assertions:
if a.type.lower() in types:
step.assertions.remove(a)
step.raw_desc, step.raw_path, step.raw_method, step.raw_body = step.desc, step.path, step.method, step.body
step.raw_headers, step.raw_vars = step.headers, step.vars
else:
return False, 'Unrecognized tag "%s" within step "%s"' % (e.tag, element.attrib['id'])
step.source_code = '%s\n%s' % (step.source_code, ET.tostring(element))
if not is_template and not is_child_of_testcase(parent):
testcase = elements.TestCase(step.id, parent=parent, desc=step.desc)
testcase.steps.append(step)
if 'tags' in element.attrib.keys(): testcase.tags = element.attrib['tags'].split(' ')
parent.steps.append(testcase)
else:
parent.steps.append(step)
if parent.desc is None: parent.desc = step.desc
return True, ''
def element_template(main, parent, element):
return _parse_step(main, parent, element, is_template=True)
def element_request(main, parent, element):
return _parse_step(main, parent, element, is_template=False)
def parse_children(main, parent, root): # parent has to have parent.steps to fill
for element in root:
if element.tag.strip().lower() not in ignored_elements:
if element.tag.strip().lower() in allowed_elements:
success, reason = globals()['element_%s' % element.tag.strip().lower()](main, parent, element)
if not success: return False, reason
else:
return False, 'Unexpected tag "%s" in "%s"' % (element.tag, parent.id)
return True, ''
def parse_file(main, parent, file):
global test_dir
if parent.__class__.__name__ == 'Runner': # quick patch to support other test dirs, refactoring needed :P
test_dir = parent.test_dir
try:
root = ET.parse(file).getroot()
except Exception, e:
return False, 'Can\'t parse file %s: %s' % (file, str(e.args))
element_desc = get_first_child(root, 'desc')
block = elements.Block(file, parent=parent, desc=(element_desc.text if element_desc is not None else None))
parent.steps.append(block)
return parse_children(main, block, root)
### Helper Functions ###
def get_step(main, id):
found = None
for s in main.steps:
if s.__class__.__name__ in ['Block', 'TestCase']:
result = get_step(s, id)
if result:
found = result
elif s.__class__.__name__ in ['Request', 'Template']:
if id.lower() == s.id.lower():
found = s
return found
def get_first_child(root, tag):
for element in root:
if element.tag.strip().lower() == tag.lower(): return element
else: return None
def is_child_of_testcase(parent):
p = parent
while True:
if p.__class__.__name__ == 'TestCase':
return True
if p.parent.__class__.__name__ in ['TestCase', 'Block']:
p = p.parent
else:
return False
########################
|
|
# Copyright (C) 2014 VA Linux Systems Japan K.K.
# Copyright (C) 2014 YAMAMOTO Takashi <yamamoto at valinux co jp>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_utils import importutils
import networking_ofagent.plugins.ofagent.agent.metadata as meta
from networking_ofagent.tests.unit.ofagent import ofa_test_base
class TestOFAgentFlows(ofa_test_base.OFATestBase):
_MOD = 'networking_ofagent.plugins.ofagent.agent.flows'
def setUp(self):
super(TestOFAgentFlows, self).setUp()
self.mod = importutils.import_module(self._MOD)
self.br = self.mod.OFAgentIntegrationBridge()
self.br.set_dp(self._mk_test_dp("dp"))
def test_setup_default_table(self):
br = self.br
with mock.patch.object(br, '_send_msg') as sendmsg:
br.setup_default_table()
(dp, ofp, ofpp) = br._get_dp()
arp = importutils.import_module('ryu.lib.packet.arp')
ether = importutils.import_module('ryu.ofproto.ether')
call = mock.call
expected_calls = [
call(ofpp.OFPFlowMod(dp, command=ofp.OFPFC_DELETE,
match=ofpp.OFPMatch(), out_group=ofp.OFPG_ANY,
out_port=ofp.OFPP_ANY, priority=0, table_id=ofp.OFPTT_ALL)),
call(ofpp.OFPFlowMod(dp, priority=0, table_id=0)),
call(ofpp.OFPFlowMod(dp, priority=0, table_id=1)),
call(ofpp.OFPFlowMod(dp, priority=0, table_id=2)),
call(ofpp.OFPFlowMod(dp, instructions=[
ofpp.OFPInstructionGotoTable(table_id=7)],
priority=0, table_id=3)),
call(ofpp.OFPFlowMod(dp, instructions=[
ofpp.OFPInstructionGotoTable(table_id=5)],
priority=0, table_id=4)),
call(ofpp.OFPFlowMod(dp, instructions=[
ofpp.OFPInstructionGotoTable(table_id=6)],
priority=0, table_id=5)),
call(ofpp.OFPFlowMod(dp, instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS,
[ofpp.OFPActionOutput(ofp.OFPP_CONTROLLER)])],
match=ofpp.OFPMatch(arp_op=arp.ARP_REQUEST,
eth_type=ether.ETH_TYPE_ARP), priority=1, table_id=6)),
call(ofpp.OFPFlowMod(dp, instructions=[
ofpp.OFPInstructionGotoTable(table_id=7)],
priority=0, table_id=6)),
call(ofpp.OFPFlowMod(dp, instructions=[
ofpp.OFPInstructionGotoTable(table_id=8)],
priority=0, table_id=7)),
call(ofpp.OFPFlowMod(dp, instructions=[
ofpp.OFPInstructionGotoTable(table_id=9)],
priority=0, table_id=8)),
call(ofpp.OFPFlowMod(dp, instructions=[
ofpp.OFPInstructionGotoTable(table_id=10)],
priority=0, table_id=9)),
call(ofpp.OFPFlowMod(dp, instructions=[
ofpp.OFPInstructionGotoTable(table_id=11)],
priority=0, table_id=10)),
call(ofpp.OFPFlowMod(dp, instructions=[
ofpp.OFPInstructionGotoTable(table_id=12)],
priority=0, table_id=11)),
call(ofpp.OFPFlowMod(dp, instructions=[
ofpp.OFPInstructionGotoTable(table_id=13)],
priority=0, table_id=12)),
call(ofpp.OFPFlowMod(dp, priority=0, table_id=13)),
]
sendmsg.assert_has_calls(expected_calls, any_order=True)
def test_install_arp_responder(self):
br = self.br
with mock.patch.object(br, '_send_msg') as sendmsg:
br.install_arp_responder(table_id=99)
(dp, ofp, ofpp) = br._get_dp()
arp = importutils.import_module('ryu.lib.packet.arp')
ether = importutils.import_module('ryu.ofproto.ether')
call = mock.call
expected_calls = [
call(ofpp.OFPFlowMod(dp, instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS,
[ofpp.OFPActionOutput(ofp.OFPP_CONTROLLER)])],
match=ofpp.OFPMatch(arp_op=arp.ARP_REQUEST,
eth_type=ether.ETH_TYPE_ARP), priority=1, table_id=99)),
call(ofpp.OFPFlowMod(dp, instructions=[
ofpp.OFPInstructionGotoTable(table_id=100)],
priority=0, table_id=99)),
]
sendmsg.assert_has_calls(expected_calls)
def test_install_tunnel_output(self):
br = self.br
with mock.patch.object(br, '_send_msg') as sendmsg:
br.install_tunnel_output(table_id=110, network=111,
segmentation_id=112,
port=113,
remote_ips=['192.0.2.8', '192.0.2.9'],
goto_next=True)
(dp, ofp, ofpp) = br._get_dp()
call = mock.call
expected_calls = [
call(
ofpp.OFPFlowMod(
dp,
instructions=[
ofpp.OFPInstructionActions(
ofp.OFPIT_APPLY_ACTIONS,
[
ofpp.OFPActionSetField(tunnel_id=112),
ofpp.OFPActionSetField(
tun_ipv4_dst='192.0.2.8'),
ofpp.OFPActionOutput(port=113),
ofpp.OFPActionSetField(
tun_ipv4_dst='192.0.2.9'),
ofpp.OFPActionOutput(port=113)
]
),
ofpp.OFPInstructionGotoTable(table_id=111)
],
match=ofpp.OFPMatch(
metadata=meta.mk_metadata(111, meta.LOCAL)
),
priority=1,
table_id=110
)
)
]
sendmsg.assert_has_calls(expected_calls)
def test_delete_tunnel_output(self):
br = self.br
with mock.patch.object(br, '_send_msg') as sendmsg:
br.delete_tunnel_output(table_id=110, network=111)
(dp, ofp, ofpp) = br._get_dp()
call = mock.call
expected_calls = [
call(
ofpp.OFPFlowMod(
dp,
command=ofp.OFPFC_DELETE,
match=ofpp.OFPMatch(
metadata=meta.mk_metadata(111, meta.LOCAL)
),
out_group=ofp.OFPG_ANY,
out_port=ofp.OFPP_ANY, priority=0, table_id=110
)
)
]
sendmsg.assert_has_calls(expected_calls)
def test_provision_tenant_tunnel(self):
br = self.br
with mock.patch.object(br, '_send_msg') as sendmsg:
br.provision_tenant_tunnel(network_type="gre", network=150,
segmentation_id=151)
(dp, ofp, ofpp) = br._get_dp()
call = mock.call
expected_calls = [
call(ofpp.OFPFlowMod(dp, instructions=[
ofpp.OFPInstructionWriteMetadata(metadata=150,
metadata_mask=meta.NETWORK_MASK),
ofpp.OFPInstructionGotoTable(table_id=7)],
match=ofpp.OFPMatch(tunnel_id=151), priority=1, table_id=1))
]
sendmsg.assert_has_calls(expected_calls)
def test_reclaim_tenant_tunnel(self):
br = self.br
with mock.patch.object(br, '_send_msg') as sendmsg:
br.reclaim_tenant_tunnel(network_type="gre", network=150,
segmentation_id=151)
(dp, ofp, ofpp) = br._get_dp()
call = mock.call
expected_calls = [
call(ofpp.OFPFlowMod(dp, command=ofp.OFPFC_DELETE,
match=ofpp.OFPMatch(tunnel_id=151), out_group=ofp.OFPG_ANY,
out_port=ofp.OFPP_ANY, priority=0, table_id=1))
]
sendmsg.assert_has_calls(expected_calls)
def test_provision_tenant_physnet(self):
br = self.br
with mock.patch.object(br, '_send_msg') as sendmsg:
br.provision_tenant_physnet(network_type="vlan", network=150,
segmentation_id=151, phys_port=99)
(dp, ofp, ofpp) = br._get_dp()
call = mock.call
expected_calls = [
call(ofpp.OFPFlowMod(dp, instructions=[
ofpp.OFPInstructionWriteMetadata(metadata=150,
metadata_mask=meta.NETWORK_MASK),
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionPopVlan()]),
ofpp.OFPInstructionGotoTable(table_id=3)],
match=ofpp.OFPMatch(in_port=99,
vlan_vid=151 | ofp.OFPVID_PRESENT),
priority=1, table_id=0)),
call(
ofpp.OFPFlowMod(
dp,
instructions=[
ofpp.OFPInstructionActions(
ofp.OFPIT_APPLY_ACTIONS,
[
ofpp.OFPActionPushVlan(),
ofpp.OFPActionSetField(
vlan_vid=151 | ofp.OFPVID_PRESENT
),
ofpp.OFPActionOutput(port=99),
ofpp.OFPActionPopVlan()
]
),
ofpp.OFPInstructionGotoTable(table_id=13)
],
match=ofpp.OFPMatch(
metadata=meta.mk_metadata(150, meta.LOCAL)
),
priority=1,
table_id=12
)
)
]
sendmsg.assert_has_calls(expected_calls)
def test_reclaim_tenant_physnet(self):
br = self.br
with mock.patch.object(br, '_send_msg') as sendmsg:
br.reclaim_tenant_physnet(network_type="vlan", network=150,
segmentation_id=151, phys_port=99)
(dp, ofp, ofpp) = br._get_dp()
call = mock.call
expected_calls = [
call(ofpp.OFPFlowMod(dp, command=ofp.OFPFC_DELETE,
match=ofpp.OFPMatch(in_port=99,
vlan_vid=151 | ofp.OFPVID_PRESENT),
out_group=ofp.OFPG_ANY, out_port=ofp.OFPP_ANY, priority=0,
table_id=0)),
call(ofpp.OFPFlowMod(dp, command=ofp.OFPFC_DELETE,
match=ofpp.OFPMatch(metadata=meta.mk_metadata(150)),
out_group=ofp.OFPG_ANY, out_port=ofp.OFPP_ANY, priority=0,
table_id=12))
]
sendmsg.assert_has_calls(expected_calls)
def test_check_in_port_add_tunnel_port(self):
br = self.br
with mock.patch.object(br, '_send_msg') as sendmsg:
br.check_in_port_add_tunnel_port(network_type="gre", port=99,
local_ip='192.0.2.11')
(dp, ofp, ofpp) = br._get_dp()
call = mock.call
expected_calls = [
call(ofpp.OFPFlowMod(dp,
instructions=[ofpp.OFPInstructionGotoTable(table_id=1)],
match=ofpp.OFPMatch(in_port=99, tun_ipv4_dst='192.0.2.11'),
priority=1, table_id=0))
]
sendmsg.assert_has_calls(expected_calls)
def test_check_in_port_add_local_port(self):
br = self.br
with mock.patch.object(br, '_send_msg') as sendmsg:
br.check_in_port_add_local_port(network=123, port=99)
(dp, ofp, ofpp) = br._get_dp()
call = mock.call
expected_calls = [
call(ofpp.OFPFlowMod(dp,
instructions=[
ofpp.OFPInstructionWriteMetadata(
metadata=meta.LOCAL | 123,
metadata_mask=meta.LOCAL | meta.NETWORK_MASK),
ofpp.OFPInstructionGotoTable(table_id=4)],
match=ofpp.OFPMatch(in_port=99), priority=1, table_id=0))
]
sendmsg.assert_has_calls(expected_calls)
def test_check_in_port_delete_port(self):
br = self.br
with mock.patch.object(br, '_send_msg') as sendmsg:
br.check_in_port_delete_port(port=99)
(dp, ofp, ofpp) = br._get_dp()
call = mock.call
expected_calls = [
call(ofpp.OFPFlowMod(dp, command=ofp.OFPFC_DELETE,
match=ofpp.OFPMatch(in_port=99), out_group=ofp.OFPG_ANY,
out_port=ofp.OFPP_ANY, priority=0, table_id=0))
]
sendmsg.assert_has_calls(expected_calls)
def test_local_flood_update(self):
br = self.br
with mock.patch.object(br, '_send_msg') as sendmsg:
br.local_flood_update(network=1234, ports=[1, 2, 3],
flood_unicast=True)
(dp, ofp, ofpp) = br._get_dp()
call = mock.call
expected_calls = [
call(ofpp.OFPFlowMod(dp,
instructions=[ofpp.OFPInstructionActions(
ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionOutput(port=1),
ofpp.OFPActionOutput(port=2),
ofpp.OFPActionOutput(port=3)])],
match=ofpp.OFPMatch(metadata=meta.mk_metadata(1234)),
priority=1, table_id=13)),
call(ofpp.OFPFlowMod(dp, command=ofp.OFPFC_DELETE_STRICT,
match=ofpp.OFPMatch(
eth_dst=('01:00:00:00:00:00', '01:00:00:00:00:00'),
metadata=meta.mk_metadata(1234)),
out_group=ofp.OFPG_ANY, out_port=ofp.OFPP_ANY, priority=1,
table_id=13))
]
sendmsg.assert_has_calls(expected_calls)
def test_local_flood_delete(self):
br = self.br
with mock.patch.object(br, '_send_msg') as sendmsg:
br.local_flood_delete(network=1234)
(dp, ofp, ofpp) = br._get_dp()
call = mock.call
expected_calls = [
call(ofpp.OFPFlowMod(dp, command=ofp.OFPFC_DELETE,
match=ofpp.OFPMatch(metadata=meta.mk_metadata(1234)),
out_group=ofp.OFPG_ANY, out_port=ofp.OFPP_ANY, priority=0,
table_id=13))
]
sendmsg.assert_has_calls(expected_calls)
def test_local_out_add_port(self):
br = self.br
with mock.patch.object(br, '_send_msg') as sendmsg:
br.local_out_add_port(network=1234, port=7,
mac='12:34:56:78:9a:bc')
(dp, ofp, ofpp) = br._get_dp()
call = mock.call
expected_calls = [
call(ofpp.OFPFlowMod(dp, instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS,
[ofpp.OFPActionOutput(port=7)])],
match=ofpp.OFPMatch(eth_dst="12:34:56:78:9a:bc",
metadata=meta.mk_metadata(1234)), priority=1, table_id=8))
]
sendmsg.assert_has_calls(expected_calls)
def test_local_out_delete_port(self):
br = self.br
with mock.patch.object(br, '_send_msg') as sendmsg:
br.local_out_delete_port(network=1234, mac='12:34:56:78:9a:bc')
(dp, ofp, ofpp) = br._get_dp()
call = mock.call
expected_calls = [
call(ofpp.OFPFlowMod(dp, command=ofp.OFPFC_DELETE,
match=ofpp.OFPMatch(eth_dst="12:34:56:78:9a:bc",
metadata=meta.mk_metadata(1234)), out_group=ofp.OFPG_ANY,
out_port=ofp.OFPP_ANY, priority=0, table_id=8))
]
sendmsg.assert_has_calls(expected_calls)
def test_arp_passthrough(self):
br = self.br
with mock.patch.object(br, '_send_msg') as sendmsg:
br.arp_passthrough(network=1234, tpa='192.0.2.1')
(dp, ofp, ofpp) = br._get_dp()
arp = importutils.import_module('ryu.lib.packet.arp')
ether = importutils.import_module('ryu.ofproto.ether')
call = mock.call
expected_calls = [
call(ofpp.OFPFlowMod(dp, idle_timeout=5,
instructions=[ofpp.OFPInstructionGotoTable(table_id=7)],
match=ofpp.OFPMatch(arp_op=arp.ARP_REQUEST,
arp_tpa="192.0.2.1", eth_type=ether.ETH_TYPE_ARP,
metadata=meta.mk_metadata(1234)), priority=1, table_id=5))
]
sendmsg.assert_has_calls(expected_calls)
|
|
# ./MARC21relaxed.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:5e592dacc0cf5bbbe827fb7d980f3324ca92c3dc
# Generated 2016-12-21 00:24:34.092428 by PyXB version 1.2.4 using Python 2.7.12.final.0
# Namespace http://www.loc.gov/MARC21/slim
from __future__ import unicode_literals
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
import pyxb.utils.six as _six
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:773ffeee-c70b-11e6-9daf-00e1020040ea')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.4'
# Generated bindings are not compatible across PyXB versions
#if pyxb.__version__ != _PyXBVersion:
# raise pyxb.PyXBVersionError(_PyXBVersion)
# Import bindings for namespaces imported into schema
import pyxb.binding.datatypes
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI('http://www.loc.gov/MARC21/slim', create_if_missing=True)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument (xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement, default_namespace=default_namespace)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, _six.text_type):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM (node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# Atomic simple type: {http://www.loc.gov/MARC21/slim}recordTypeType
class recordTypeType (pyxb.binding.datatypes.NMTOKEN, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'recordTypeType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 63, 2)
_Documentation = None
recordTypeType._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=recordTypeType, enum_prefix=None)
recordTypeType.Bibliographic = recordTypeType._CF_enumeration.addEnumeration(unicode_value='Bibliographic', tag='Bibliographic')
recordTypeType.Authority = recordTypeType._CF_enumeration.addEnumeration(unicode_value='Authority', tag='Authority')
recordTypeType.Holdings = recordTypeType._CF_enumeration.addEnumeration(unicode_value='Holdings', tag='Holdings')
recordTypeType.Classification = recordTypeType._CF_enumeration.addEnumeration(unicode_value='Classification', tag='Classification')
recordTypeType.Community = recordTypeType._CF_enumeration.addEnumeration(unicode_value='Community', tag='Community')
recordTypeType._InitializeFacetMap(recordTypeType._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'recordTypeType', recordTypeType)
# Atomic simple type: {http://www.loc.gov/MARC21/slim}leaderDataType
class leaderDataType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'leaderDataType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 82, 2)
_Documentation = None
leaderDataType._CF_pattern = pyxb.binding.facets.CF_pattern()
leaderDataType._CF_pattern.addPattern(pattern='[\\dA-Za-z\\.| ]{24}')
leaderDataType._CF_whiteSpace = pyxb.binding.facets.CF_whiteSpace(value=pyxb.binding.facets._WhiteSpace_enum.preserve)
leaderDataType._InitializeFacetMap(leaderDataType._CF_pattern,
leaderDataType._CF_whiteSpace)
Namespace.addCategoryObject('typeBinding', 'leaderDataType', leaderDataType)
# Atomic simple type: {http://www.loc.gov/MARC21/slim}controlDataType
class controlDataType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'controlDataType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 99, 2)
_Documentation = None
controlDataType._CF_whiteSpace = pyxb.binding.facets.CF_whiteSpace(value=pyxb.binding.facets._WhiteSpace_enum.preserve)
controlDataType._InitializeFacetMap(controlDataType._CF_whiteSpace)
Namespace.addCategoryObject('typeBinding', 'controlDataType', controlDataType)
# Atomic simple type: {http://www.loc.gov/MARC21/slim}controltagDataType
class controltagDataType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'controltagDataType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 104, 2)
_Documentation = None
controltagDataType._CF_pattern = pyxb.binding.facets.CF_pattern()
controltagDataType._CF_pattern.addPattern(pattern='[0-9A-Za-z]{3}')
controltagDataType._CF_whiteSpace = pyxb.binding.facets.CF_whiteSpace(value=pyxb.binding.facets._WhiteSpace_enum.preserve)
controltagDataType._InitializeFacetMap(controltagDataType._CF_pattern,
controltagDataType._CF_whiteSpace)
Namespace.addCategoryObject('typeBinding', 'controltagDataType', controltagDataType)
# Atomic simple type: {http://www.loc.gov/MARC21/slim}tagDataType
class tagDataType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'tagDataType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 122, 2)
_Documentation = None
tagDataType._CF_pattern = pyxb.binding.facets.CF_pattern()
tagDataType._CF_pattern.addPattern(pattern='(0([0-9A-Z][0-9A-Z])|0([1-9a-z][0-9a-z]))|(([1-9A-Z][0-9A-Z]{2})|([1-9a-z][0-9a-z]{2}))')
tagDataType._CF_whiteSpace = pyxb.binding.facets.CF_whiteSpace(value=pyxb.binding.facets._WhiteSpace_enum.preserve)
tagDataType._InitializeFacetMap(tagDataType._CF_pattern,
tagDataType._CF_whiteSpace)
Namespace.addCategoryObject('typeBinding', 'tagDataType', tagDataType)
# Atomic simple type: {http://www.loc.gov/MARC21/slim}indicatorDataType
class indicatorDataType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'indicatorDataType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 128, 2)
_Documentation = None
indicatorDataType._CF_pattern = pyxb.binding.facets.CF_pattern()
indicatorDataType._CF_pattern.addPattern(pattern='[\\da-zA-Z_ ]{1}')
indicatorDataType._CF_whiteSpace = pyxb.binding.facets.CF_whiteSpace(value=pyxb.binding.facets._WhiteSpace_enum.preserve)
indicatorDataType._InitializeFacetMap(indicatorDataType._CF_pattern,
indicatorDataType._CF_whiteSpace)
Namespace.addCategoryObject('typeBinding', 'indicatorDataType', indicatorDataType)
# Atomic simple type: {http://www.loc.gov/MARC21/slim}subfieldDataType
class subfieldDataType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'subfieldDataType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 142, 2)
_Documentation = None
subfieldDataType._CF_whiteSpace = pyxb.binding.facets.CF_whiteSpace(value=pyxb.binding.facets._WhiteSpace_enum.preserve)
subfieldDataType._InitializeFacetMap(subfieldDataType._CF_whiteSpace)
Namespace.addCategoryObject('typeBinding', 'subfieldDataType', subfieldDataType)
# Atomic simple type: {http://www.loc.gov/MARC21/slim}subfieldcodeDataType
class subfieldcodeDataType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'subfieldcodeDataType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 147, 2)
_Documentation = None
subfieldcodeDataType._CF_pattern = pyxb.binding.facets.CF_pattern()
subfieldcodeDataType._CF_pattern.addPattern(pattern='[\\dA-Za-z!"#$%&\'()*+,-./:;<=>?{}_^`~\\[\\]\\\\]{1}')
subfieldcodeDataType._CF_whiteSpace = pyxb.binding.facets.CF_whiteSpace(value=pyxb.binding.facets._WhiteSpace_enum.preserve)
subfieldcodeDataType._InitializeFacetMap(subfieldcodeDataType._CF_pattern,
subfieldcodeDataType._CF_whiteSpace)
Namespace.addCategoryObject('typeBinding', 'subfieldcodeDataType', subfieldcodeDataType)
# Atomic simple type: {http://www.loc.gov/MARC21/slim}idDataType
class idDataType (pyxb.binding.datatypes.ID):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'idDataType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 154, 2)
_Documentation = None
idDataType._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'idDataType', idDataType)
# Complex type {http://www.loc.gov/MARC21/slim}collectionType with content type ELEMENT_ONLY
class collectionType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.loc.gov/MARC21/slim}collectionType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'collectionType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 46, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.loc.gov/MARC21/slim}record uses Python identifier record
__record = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'record'), 'record', '__httpwww_loc_govMARC21slim_collectionType_httpwww_loc_govMARC21slimrecord', True, pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 36, 2), )
record = property(__record.value, __record.set, None, 'record is a top level container element for all of the field elements which compose the record')
# Attribute id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'id'), 'id', '__httpwww_loc_govMARC21slim_collectionType_id', idDataType)
__id._DeclarationLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 50, 4)
__id._UseLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 50, 4)
id = property(__id.value, __id.set, None, None)
_ElementMap.update({
__record.name() : __record
})
_AttributeMap.update({
__id.name() : __id
})
Namespace.addCategoryObject('typeBinding', 'collectionType', collectionType)
# Complex type {http://www.loc.gov/MARC21/slim}recordType with content type ELEMENT_ONLY
class recordType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.loc.gov/MARC21/slim}recordType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'recordType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 52, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.loc.gov/MARC21/slim}leader uses Python identifier leader
__leader = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'leader'), 'leader', '__httpwww_loc_govMARC21slim_recordType_httpwww_loc_govMARC21slimleader', True, pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 55, 8), )
leader = property(__leader.value, __leader.set, None, None)
# Element {http://www.loc.gov/MARC21/slim}controlfield uses Python identifier controlfield
__controlfield = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'controlfield'), 'controlfield', '__httpwww_loc_govMARC21slim_recordType_httpwww_loc_govMARC21slimcontrolfield', True, pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 56, 8), )
controlfield = property(__controlfield.value, __controlfield.set, None, None)
# Element {http://www.loc.gov/MARC21/slim}datafield uses Python identifier datafield
__datafield = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'datafield'), 'datafield', '__httpwww_loc_govMARC21slim_recordType_httpwww_loc_govMARC21slimdatafield', True, pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 57, 8), )
datafield = property(__datafield.value, __datafield.set, None, None)
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type', '__httpwww_loc_govMARC21slim_recordType_type', recordTypeType)
__type._DeclarationLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 60, 4)
__type._UseLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 60, 4)
type = property(__type.value, __type.set, None, None)
# Attribute id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'id'), 'id', '__httpwww_loc_govMARC21slim_recordType_id', idDataType)
__id._DeclarationLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 61, 4)
__id._UseLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 61, 4)
id = property(__id.value, __id.set, None, None)
_ElementMap.update({
__leader.name() : __leader,
__controlfield.name() : __controlfield,
__datafield.name() : __datafield
})
_AttributeMap.update({
__type.name() : __type,
__id.name() : __id
})
Namespace.addCategoryObject('typeBinding', 'recordType', recordType)
# Complex type {http://www.loc.gov/MARC21/slim}leaderFieldType with content type SIMPLE
class leaderFieldType (pyxb.binding.basis.complexTypeDefinition):
"""MARC21 Leader, 24 bytes"""
_TypeDefinition = leaderDataType
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'leaderFieldType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 72, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is leaderDataType
# Attribute id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'id'), 'id', '__httpwww_loc_govMARC21slim_leaderFieldType_id', idDataType)
__id._DeclarationLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 78, 8)
__id._UseLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 78, 8)
id = property(__id.value, __id.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__id.name() : __id
})
Namespace.addCategoryObject('typeBinding', 'leaderFieldType', leaderFieldType)
# Complex type {http://www.loc.gov/MARC21/slim}controlFieldType with content type SIMPLE
class controlFieldType (pyxb.binding.basis.complexTypeDefinition):
"""MARC21 Fields 001-009"""
_TypeDefinition = controlDataType
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'controlFieldType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 88, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is controlDataType
# Attribute id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'id'), 'id', '__httpwww_loc_govMARC21slim_controlFieldType_id', idDataType)
__id._DeclarationLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 94, 8)
__id._UseLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 94, 8)
id = property(__id.value, __id.set, None, None)
# Attribute tag uses Python identifier tag
__tag = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'tag'), 'tag', '__httpwww_loc_govMARC21slim_controlFieldType_tag', controltagDataType, required=True)
__tag._DeclarationLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 95, 8)
__tag._UseLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 95, 8)
tag = property(__tag.value, __tag.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__id.name() : __id,
__tag.name() : __tag
})
Namespace.addCategoryObject('typeBinding', 'controlFieldType', controlFieldType)
# Complex type {http://www.loc.gov/MARC21/slim}dataFieldType with content type ELEMENT_ONLY
class dataFieldType (pyxb.binding.basis.complexTypeDefinition):
"""MARC21 Variable Data Fields 010-999"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'dataFieldType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 110, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.loc.gov/MARC21/slim}subfield uses Python identifier subfield
__subfield = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'subfield'), 'subfield', '__httpwww_loc_govMARC21slim_dataFieldType_httpwww_loc_govMARC21slimsubfield', True, pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 115, 6), )
subfield = property(__subfield.value, __subfield.set, None, None)
# Attribute id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'id'), 'id', '__httpwww_loc_govMARC21slim_dataFieldType_id', idDataType)
__id._DeclarationLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 117, 4)
__id._UseLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 117, 4)
id = property(__id.value, __id.set, None, None)
# Attribute tag uses Python identifier tag
__tag = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'tag'), 'tag', '__httpwww_loc_govMARC21slim_dataFieldType_tag', tagDataType, required=True)
__tag._DeclarationLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 118, 4)
__tag._UseLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 118, 4)
tag = property(__tag.value, __tag.set, None, None)
# Attribute ind1 uses Python identifier ind1
__ind1 = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'ind1'), 'ind1', '__httpwww_loc_govMARC21slim_dataFieldType_ind1', indicatorDataType, required=True)
__ind1._DeclarationLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 119, 4)
__ind1._UseLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 119, 4)
ind1 = property(__ind1.value, __ind1.set, None, None)
# Attribute ind2 uses Python identifier ind2
__ind2 = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'ind2'), 'ind2', '__httpwww_loc_govMARC21slim_dataFieldType_ind2', indicatorDataType, required=True)
__ind2._DeclarationLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 120, 4)
__ind2._UseLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 120, 4)
ind2 = property(__ind2.value, __ind2.set, None, None)
_ElementMap.update({
__subfield.name() : __subfield
})
_AttributeMap.update({
__id.name() : __id,
__tag.name() : __tag,
__ind1.name() : __ind1,
__ind2.name() : __ind2
})
Namespace.addCategoryObject('typeBinding', 'dataFieldType', dataFieldType)
# Complex type {http://www.loc.gov/MARC21/slim}subfieldatafieldType with content type SIMPLE
class subfieldatafieldType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.loc.gov/MARC21/slim}subfieldatafieldType with content type SIMPLE"""
_TypeDefinition = subfieldDataType
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'subfieldatafieldType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 134, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is subfieldDataType
# Attribute id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'id'), 'id', '__httpwww_loc_govMARC21slim_subfieldatafieldType_id', idDataType)
__id._DeclarationLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 137, 8)
__id._UseLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 137, 8)
id = property(__id.value, __id.set, None, None)
# Attribute code uses Python identifier code
__code = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'code'), 'code', '__httpwww_loc_govMARC21slim_subfieldatafieldType_code', subfieldcodeDataType, required=True)
__code._DeclarationLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 138, 8)
__code._UseLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 138, 8)
code = property(__code.value, __code.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__id.name() : __id,
__code.name() : __code
})
Namespace.addCategoryObject('typeBinding', 'subfieldatafieldType', subfieldatafieldType)
record = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'record'), recordType, nillable=pyxb.binding.datatypes.boolean(1), documentation='record is a top level container element for all of the field elements which compose the record', location=pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 36, 2))
Namespace.addCategoryObject('elementBinding', record.name().localName(), record)
collection = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'collection'), collectionType, nillable=pyxb.binding.datatypes.boolean(1), documentation='collection is a top level container element for 0 or many records', location=pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 41, 2))
Namespace.addCategoryObject('elementBinding', collection.name().localName(), collection)
collectionType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'record'), recordType, nillable=pyxb.binding.datatypes.boolean(1), scope=collectionType, documentation='record is a top level container element for all of the field elements which compose the record', location=pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 36, 2)))
def _BuildAutomaton ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton
del _BuildAutomaton
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 47, 4))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(collectionType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'record')), pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 48, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
collectionType._Automaton = _BuildAutomaton()
recordType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'leader'), leaderFieldType, scope=recordType, location=pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 55, 8)))
recordType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'controlfield'), controlFieldType, scope=recordType, location=pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 56, 8)))
recordType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'datafield'), dataFieldType, scope=recordType, location=pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 57, 8)))
def _BuildAutomaton_ ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_
del _BuildAutomaton_
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 54, 6))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(recordType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'leader')), pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 55, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(recordType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'controlfield')), pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 56, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(recordType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'datafield')), pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 57, 8))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True) ]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
recordType._Automaton = _BuildAutomaton_()
dataFieldType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'subfield'), subfieldatafieldType, scope=dataFieldType, location=pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 115, 6)))
def _BuildAutomaton_2 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_2
del _BuildAutomaton_2
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(dataFieldType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'subfield')), pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 115, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
dataFieldType._Automaton = _BuildAutomaton_2()
|
|
"""Useful utilities for higher level polynomial classes. """
from __future__ import print_function, division
from sympy.polys.polyerrors import PolynomialError, GeneratorsNeeded, GeneratorsError
from sympy.polys.polyoptions import build_options
from sympy.core.exprtools import decompose_power
from sympy.core import S, Add, Mul, Pow, expand_mul, expand_multinomial
from sympy.core.compatibility import range
import re
_gens_order = {
'a': 301, 'b': 302, 'c': 303, 'd': 304,
'e': 305, 'f': 306, 'g': 307, 'h': 308,
'i': 309, 'j': 310, 'k': 311, 'l': 312,
'm': 313, 'n': 314, 'o': 315, 'p': 216,
'q': 217, 'r': 218, 's': 219, 't': 220,
'u': 221, 'v': 222, 'w': 223, 'x': 124,
'y': 125, 'z': 126,
}
_max_order = 1000
_re_gen = re.compile(r"^(.+?)(\d*)$")
def _nsort(roots, separated=False):
"""Sort the numerical roots putting the real roots first, then sorting
according to real and imaginary parts. If ``separated`` is True, then
the real and imaginary roots will be returned in two lists, respectively.
This routine tries to avoid issue 6137 by separating the roots into real
and imaginary parts before evaluation. In addition, the sorting will raise
an error if any computation cannot be done with precision.
"""
if not all(r.is_number for r in roots):
raise NotImplementedError
# see issue 6137:
# get the real part of the evaluated real and imaginary parts of each root
key = [[i.n(2).as_real_imag()[0] for i in r.as_real_imag()] for r in roots]
# make sure the parts were computed with precision
if any(i._prec == 1 for k in key for i in k):
raise NotImplementedError("could not compute root with precision")
# insert a key to indicate if the root has an imaginary part
key = [(1 if i else 0, r, i) for r, i in key]
key = sorted(zip(key, roots))
# return the real and imaginary roots separately if desired
if separated:
r = []
i = []
for (im, _, _), v in key:
if im:
i.append(v)
else:
r.append(v)
return r, i
_, roots = zip(*key)
return list(roots)
def _sort_gens(gens, **args):
"""Sort generators in a reasonably intelligent way. """
opt = build_options(args)
gens_order, wrt = {}, None
if opt is not None:
gens_order, wrt = {}, opt.wrt
for i, gen in enumerate(opt.sort):
gens_order[gen] = i + 1
def order_key(gen):
gen = str(gen)
if wrt is not None:
try:
return (-len(wrt) + wrt.index(gen), gen, 0)
except ValueError:
pass
name, index = _re_gen.match(gen).groups()
if index:
index = int(index)
else:
index = 0
try:
return ( gens_order[name], name, index)
except KeyError:
pass
try:
return (_gens_order[name], name, index)
except KeyError:
pass
return (_max_order, name, index)
try:
gens = sorted(gens, key=order_key)
except TypeError: # pragma: no cover
pass
return tuple(gens)
def _unify_gens(f_gens, g_gens):
"""Unify generators in a reasonably intelligent way. """
f_gens = list(f_gens)
g_gens = list(g_gens)
if f_gens == g_gens:
return tuple(f_gens)
gens, common, k = [], [], 0
for gen in f_gens:
if gen in g_gens:
common.append(gen)
for i, gen in enumerate(g_gens):
if gen in common:
g_gens[i], k = common[k], k + 1
for gen in common:
i = f_gens.index(gen)
gens.extend(f_gens[:i])
f_gens = f_gens[i + 1:]
i = g_gens.index(gen)
gens.extend(g_gens[:i])
g_gens = g_gens[i + 1:]
gens.append(gen)
gens.extend(f_gens)
gens.extend(g_gens)
return tuple(gens)
def _analyze_gens(gens):
"""Support for passing generators as `*gens` and `[gens]`. """
if len(gens) == 1 and hasattr(gens[0], '__iter__'):
return tuple(gens[0])
else:
return tuple(gens)
def _sort_factors(factors, **args):
"""Sort low-level factors in increasing 'complexity' order. """
def order_if_multiple_key(factor):
(f, n) = factor
return (len(f), n, f)
def order_no_multiple_key(f):
return (len(f), f)
if args.get('multiple', True):
return sorted(factors, key=order_if_multiple_key)
else:
return sorted(factors, key=order_no_multiple_key)
def _not_a_coeff(expr):
"""Do not treat NaN and infinities as valid polynomial coefficients. """
return expr in [S.NaN, S.Infinity, S.NegativeInfinity, S.ComplexInfinity]
def _parallel_dict_from_expr_if_gens(exprs, opt):
"""Transform expressions into a multinomial form given generators. """
k, indices = len(opt.gens), {}
for i, g in enumerate(opt.gens):
indices[g] = i
polys = []
for expr in exprs:
poly = {}
if expr.is_Equality:
expr = expr.lhs - expr.rhs
for term in Add.make_args(expr):
coeff, monom = [], [0]*k
for factor in Mul.make_args(term):
if not _not_a_coeff(factor) and factor.is_Number:
coeff.append(factor)
else:
try:
base, exp = decompose_power(factor)
if exp < 0:
exp, base = -exp, Pow(base, -S.One)
monom[indices[base]] = exp
except KeyError:
if not factor.free_symbols.intersection(opt.gens):
coeff.append(factor)
else:
raise PolynomialError("%s contains an element of the generators set" % factor)
monom = tuple(monom)
if monom in poly:
poly[monom] += Mul(*coeff)
else:
poly[monom] = Mul(*coeff)
polys.append(poly)
return polys, opt.gens
def _parallel_dict_from_expr_no_gens(exprs, opt):
"""Transform expressions into a multinomial form and figure out generators. """
if opt.domain is not None:
def _is_coeff(factor):
return factor in opt.domain
elif opt.extension is True:
def _is_coeff(factor):
return factor.is_algebraic
elif opt.greedy is not False:
def _is_coeff(factor):
return False
else:
def _is_coeff(factor):
return factor.is_number
gens, reprs = set([]), []
for expr in exprs:
terms = []
if expr.is_Equality:
expr = expr.lhs - expr.rhs
for term in Add.make_args(expr):
coeff, elements = [], {}
for factor in Mul.make_args(term):
if not _not_a_coeff(factor) and (factor.is_Number or _is_coeff(factor)):
coeff.append(factor)
else:
base, exp = decompose_power(factor)
if exp < 0:
exp, base = -exp, Pow(base, -S.One)
elements[base] = exp
gens.add(base)
terms.append((coeff, elements))
reprs.append(terms)
if not gens:
if len(exprs) == 1:
arg = exprs[0]
else:
arg = (exprs,)
raise GeneratorsNeeded("specify generators to give %s a meaning" % arg)
gens = _sort_gens(gens, opt=opt)
k, indices = len(gens), {}
for i, g in enumerate(gens):
indices[g] = i
polys = []
for terms in reprs:
poly = {}
for coeff, term in terms:
monom = [0]*k
for base, exp in term.items():
monom[indices[base]] = exp
monom = tuple(monom)
if monom in poly:
poly[monom] += Mul(*coeff)
else:
poly[monom] = Mul(*coeff)
polys.append(poly)
return polys, tuple(gens)
def _dict_from_expr_if_gens(expr, opt):
"""Transform an expression into a multinomial form given generators. """
(poly,), gens = _parallel_dict_from_expr_if_gens((expr,), opt)
return poly, gens
def _dict_from_expr_no_gens(expr, opt):
"""Transform an expression into a multinomial form and figure out generators. """
(poly,), gens = _parallel_dict_from_expr_no_gens((expr,), opt)
return poly, gens
def parallel_dict_from_expr(exprs, **args):
"""Transform expressions into a multinomial form. """
reps, opt = _parallel_dict_from_expr(exprs, build_options(args))
return reps, opt.gens
def _parallel_dict_from_expr(exprs, opt):
"""Transform expressions into a multinomial form. """
if opt.expand is not False:
exprs = [ expr.expand() for expr in exprs ]
if any(expr.is_commutative is False for expr in exprs):
raise PolynomialError('non-commutative expressions are not supported')
if opt.gens:
reps, gens = _parallel_dict_from_expr_if_gens(exprs, opt)
else:
reps, gens = _parallel_dict_from_expr_no_gens(exprs, opt)
return reps, opt.clone({'gens': gens})
def dict_from_expr(expr, **args):
"""Transform an expression into a multinomial form. """
rep, opt = _dict_from_expr(expr, build_options(args))
return rep, opt.gens
def _dict_from_expr(expr, opt):
"""Transform an expression into a multinomial form. """
if expr.is_commutative is False:
raise PolynomialError('non-commutative expressions are not supported')
def _is_expandable_pow(expr):
return (expr.is_Pow and expr.exp.is_positive and expr.exp.is_Integer
and expr.base.is_Add)
if opt.expand is not False:
expr = expr.expand()
# TODO: Integrate this into expand() itself
while any(_is_expandable_pow(i) or i.is_Mul and
any(_is_expandable_pow(j) for j in i.args) for i in
Add.make_args(expr)):
expr = expand_multinomial(expr)
while any(i.is_Mul and any(j.is_Add for j in i.args) for i in Add.make_args(expr)):
expr = expand_mul(expr)
if opt.gens:
rep, gens = _dict_from_expr_if_gens(expr, opt)
else:
rep, gens = _dict_from_expr_no_gens(expr, opt)
return rep, opt.clone({'gens': gens})
def expr_from_dict(rep, *gens):
"""Convert a multinomial form into an expression. """
result = []
for monom, coeff in rep.items():
term = [coeff]
for g, m in zip(gens, monom):
if m:
term.append(Pow(g, m))
result.append(Mul(*term))
return Add(*result)
parallel_dict_from_basic = parallel_dict_from_expr
dict_from_basic = dict_from_expr
basic_from_dict = expr_from_dict
def _dict_reorder(rep, gens, new_gens):
"""Reorder levels using dict representation. """
gens = list(gens)
monoms = rep.keys()
coeffs = rep.values()
new_monoms = [ [] for _ in range(len(rep)) ]
used_indices = set()
for gen in new_gens:
try:
j = gens.index(gen)
used_indices.add(j)
for M, new_M in zip(monoms, new_monoms):
new_M.append(M[j])
except ValueError:
for new_M in new_monoms:
new_M.append(0)
for i, _ in enumerate(gens):
if i not in used_indices:
for monom in monoms:
if monom[i]:
raise GeneratorsError("unable to drop generators")
return map(tuple, new_monoms), coeffs
class PicklableWithSlots(object):
"""
Mixin class that allows to pickle objects with ``__slots__``.
Examples
========
First define a class that mixes :class:`PicklableWithSlots` in::
>>> from sympy.polys.polyutils import PicklableWithSlots
>>> class Some(PicklableWithSlots):
... __slots__ = ['foo', 'bar']
...
... def __init__(self, foo, bar):
... self.foo = foo
... self.bar = bar
To make :mod:`pickle` happy in doctest we have to use this hack::
>>> from sympy.core.compatibility import builtins
>>> builtins.Some = Some
Next lets see if we can create an instance, pickle it and unpickle::
>>> some = Some('abc', 10)
>>> some.foo, some.bar
('abc', 10)
>>> from pickle import dumps, loads
>>> some2 = loads(dumps(some))
>>> some2.foo, some2.bar
('abc', 10)
"""
__slots__ = []
def __getstate__(self, cls=None):
if cls is None:
# This is the case for the instance that gets pickled
cls = self.__class__
d = {}
# Get all data that should be stored from super classes
for c in cls.__bases__:
if hasattr(c, "__getstate__"):
d.update(c.__getstate__(self, c))
# Get all information that should be stored from cls and return the dict
for name in cls.__slots__:
if hasattr(self, name):
d[name] = getattr(self, name)
return d
def __setstate__(self, d):
# All values that were pickled are now assigned to a fresh instance
for name, value in d.items():
try:
setattr(self, name, value)
except AttributeError: # This is needed in cases like Rational :> Half
pass
|
|
"""Nose Plugin that supports IPython doctests.
Limitations:
- When generating examples for use as doctests, make sure that you have
pretty-printing OFF. This can be done either by setting the
``PlainTextFormatter.pprint`` option in your configuration file to False, or
by interactively disabling it with %Pprint. This is required so that IPython
output matches that of normal Python, which is used by doctest for internal
execution.
- Do not rely on specific prompt numbers for results (such as using
'_34==True', for example). For IPython tests run via an external process the
prompt numbers may be different, and IPython tests run as normal python code
won't even have these special _NN variables set at all.
"""
#-----------------------------------------------------------------------------
# Module imports
# From the standard library
import __builtin__ as builtin_mod
import commands
import doctest
import inspect
import logging
import os
import re
import sys
import traceback
import unittest
from inspect import getmodule
from StringIO import StringIO
# We are overriding the default doctest runner, so we need to import a few
# things from doctest directly
from doctest import (REPORTING_FLAGS, REPORT_ONLY_FIRST_FAILURE,
_unittest_reportflags, DocTestRunner,
_extract_future_flags, pdb, _OutputRedirectingPdb,
_exception_traceback,
linecache)
# Third-party modules
import nose.core
from nose.plugins import doctests, Plugin
from nose.util import anyp, getpackage, test_address, resolve_name, tolist
# Our own imports
#-----------------------------------------------------------------------------
# Module globals and other constants
#-----------------------------------------------------------------------------
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
def is_extension_module(filename):
"""Return whether the given filename is an extension module.
This simply checks that the extension is either .so or .pyd.
"""
return os.path.splitext(filename)[1].lower() in ('.so','.pyd')
class DocTestSkip(object):
"""Object wrapper for doctests to be skipped."""
ds_skip = """Doctest to skip.
>>> 1 #doctest: +SKIP
"""
def __init__(self,obj):
self.obj = obj
def __getattribute__(self,key):
if key == '__doc__':
return DocTestSkip.ds_skip
else:
return getattr(object.__getattribute__(self,'obj'),key)
# Modified version of the one in the stdlib, that fixes a python bug (doctests
# not found in extension modules, http://bugs.python.org/issue3158)
class DocTestFinder(doctest.DocTestFinder):
def _from_module(self, module, object):
"""
Return true if the given object is defined in the given
module.
"""
if module is None:
return True
elif inspect.isfunction(object):
return module.__dict__ is object.func_globals
elif inspect.isbuiltin(object):
return module.__name__ == object.__module__
elif inspect.isclass(object):
return module.__name__ == object.__module__
elif inspect.ismethod(object):
# This one may be a bug in cython that fails to correctly set the
# __module__ attribute of methods, but since the same error is easy
# to make by extension code writers, having this safety in place
# isn't such a bad idea
return module.__name__ == object.im_class.__module__
elif inspect.getmodule(object) is not None:
return module is inspect.getmodule(object)
elif hasattr(object, '__module__'):
return module.__name__ == object.__module__
elif isinstance(object, property):
return True # [XX] no way not be sure.
else:
raise ValueError("object must be a class or function")
def _find(self, tests, obj, name, module, source_lines, globs, seen):
"""
Find tests for the given object and any contained objects, and
add them to `tests`.
"""
#print '_find for:', obj, name, module # dbg
if hasattr(obj,"skip_doctest"):
#print 'SKIPPING DOCTEST FOR:',obj # dbg
obj = DocTestSkip(obj)
doctest.DocTestFinder._find(self,tests, obj, name, module,
source_lines, globs, seen)
# Below we re-run pieces of the above method with manual modifications,
# because the original code is buggy and fails to correctly identify
# doctests in extension modules.
# Local shorthands
from inspect import isroutine, isclass, ismodule
# Look for tests in a module's contained objects.
if inspect.ismodule(obj) and self._recurse:
for valname, val in obj.__dict__.items():
valname1 = '%s.%s' % (name, valname)
if ( (isroutine(val) or isclass(val))
and self._from_module(module, val) ):
self._find(tests, val, valname1, module, source_lines,
globs, seen)
# Look for tests in a class's contained objects.
if inspect.isclass(obj) and self._recurse:
#print 'RECURSE into class:',obj # dbg
for valname, val in obj.__dict__.items():
# Special handling for staticmethod/classmethod.
if isinstance(val, staticmethod):
val = getattr(obj, valname)
if isinstance(val, classmethod):
val = getattr(obj, valname).im_func
# Recurse to methods, properties, and nested classes.
if ((inspect.isfunction(val) or inspect.isclass(val) or
inspect.ismethod(val) or
isinstance(val, property)) and
self._from_module(module, val)):
valname = '%s.%s' % (name, valname)
self._find(tests, val, valname, module, source_lines,
globs, seen)
class IPDoctestOutputChecker(doctest.OutputChecker):
"""Second-chance checker with support for random tests.
If the default comparison doesn't pass, this checker looks in the expected
output string for flags that tell us to ignore the output.
"""
random_re = re.compile(r'#\s*random\s+')
def check_output(self, want, got, optionflags):
"""Check output, accepting special markers embedded in the output.
If the output didn't pass the default validation but the special string
'#random' is included, we accept it."""
# Let the original tester verify first, in case people have valid tests
# that happen to have a comment saying '#random' embedded in.
ret = doctest.OutputChecker.check_output(self, want, got,
optionflags)
if not ret and self.random_re.search(want):
#print >> sys.stderr, 'RANDOM OK:',want # dbg
return True
return ret
class DocTestCase(doctests.DocTestCase):
"""Proxy for DocTestCase: provides an address() method that
returns the correct address for the doctest case. Otherwise
acts as a proxy to the test case. To provide hints for address(),
an obj may also be passed -- this will be used as the test object
for purposes of determining the test address, if it is provided.
"""
# Note: this method was taken from numpy's nosetester module.
# Subclass nose.plugins.doctests.DocTestCase to work around a bug in
# its constructor that blocks non-default arguments from being passed
# down into doctest.DocTestCase
def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
checker=None, obj=None, result_var='_'):
self._result_var = result_var
doctests.DocTestCase.__init__(self, test,
optionflags=optionflags,
setUp=setUp, tearDown=tearDown,
checker=checker)
# Now we must actually copy the original constructor from the stdlib
# doctest class, because we can't call it directly and a bug in nose
# means it never gets passed the right arguments.
self._dt_optionflags = optionflags
self._dt_checker = checker
self._dt_test = test
self._dt_test_globs_ori = test.globs
self._dt_setUp = setUp
self._dt_tearDown = tearDown
# XXX - store this runner once in the object!
runner = IPDocTestRunner(optionflags=optionflags,
checker=checker, verbose=False)
self._dt_runner = runner
# Each doctest should remember the directory it was loaded from, so
# things like %run work without too many contortions
self._ori_dir = os.path.dirname(test.filename)
# Modified runTest from the default stdlib
def runTest(self):
test = self._dt_test
runner = self._dt_runner
old = sys.stdout
new = StringIO()
optionflags = self._dt_optionflags
if not (optionflags & REPORTING_FLAGS):
# The option flags don't include any reporting flags,
# so add the default reporting flags
optionflags |= _unittest_reportflags
try:
# Save our current directory and switch out to the one where the
# test was originally created, in case another doctest did a
# directory change. We'll restore this in the finally clause.
curdir = os.getcwdu()
#print 'runTest in dir:', self._ori_dir # dbg
os.chdir(self._ori_dir)
runner.DIVIDER = "-"*70
failures, tries = runner.run(test,out=new.write,
clear_globs=False)
finally:
sys.stdout = old
os.chdir(curdir)
if failures:
raise self.failureException(self.format_failure(new.getvalue()))
def setUp(self):
"""Modified test setup that syncs with ipython namespace"""
#print "setUp test", self._dt_test.examples # dbg
if isinstance(self._dt_test.examples[0], IPExample):
# for IPython examples *only*, we swap the globals with the ipython
# namespace, after updating it with the globals (which doctest
# fills with the necessary info from the module being tested).
self.user_ns_orig = {}
self.user_ns_orig.update(_ip.user_ns)
_ip.user_ns.update(self._dt_test.globs)
# We must remove the _ key in the namespace, so that Python's
# doctest code sets it naturally
_ip.user_ns.pop('_', None)
_ip.user_ns['__builtins__'] = builtin_mod
self._dt_test.globs = _ip.user_ns
super(DocTestCase, self).setUp()
def tearDown(self):
# Undo the test.globs reassignment we made, so that the parent class
# teardown doesn't destroy the ipython namespace
if isinstance(self._dt_test.examples[0], IPExample):
self._dt_test.globs = self._dt_test_globs_ori
_ip.user_ns.clear()
_ip.user_ns.update(self.user_ns_orig)
# XXX - fperez: I am not sure if this is truly a bug in nose 0.11, but
# it does look like one to me: its tearDown method tries to run
#
# delattr(__builtin__, self._result_var)
#
# without checking that the attribute really is there; it implicitly
# assumes it should have been set via displayhook. But if the
# displayhook was never called, this doesn't necessarily happen. I
# haven't been able to find a little self-contained example outside of
# ipython that would show the problem so I can report it to the nose
# team, but it does happen a lot in our code.
#
# So here, we just protect as narrowly as possible by trapping an
# attribute error whose message would be the name of self._result_var,
# and letting any other error propagate.
try:
super(DocTestCase, self).tearDown()
except AttributeError as exc:
if exc.args[0] != self._result_var:
raise
# A simple subclassing of the original with a different class name, so we can
# distinguish and treat differently IPython examples from pure python ones.
class IPExample(doctest.Example): pass
class IPExternalExample(doctest.Example):
"""Doctest examples to be run in an external process."""
def __init__(self, source, want, exc_msg=None, lineno=0, indent=0,
options=None):
# Parent constructor
doctest.Example.__init__(self,source,want,exc_msg,lineno,indent,options)
# An EXTRA newline is needed to prevent pexpect hangs
self.source += '\n'
class IPDocTestParser(doctest.DocTestParser):
"""
A class used to parse strings containing doctest examples.
Note: This is a version modified to properly recognize IPython input and
convert any IPython examples into valid Python ones.
"""
# This regular expression is used to find doctest examples in a
# string. It defines three groups: `source` is the source code
# (including leading indentation and prompts); `indent` is the
# indentation of the first (PS1) line of the source code; and
# `want` is the expected output (including leading indentation).
# Classic Python prompts or default IPython ones
_PS1_PY = r'>>>'
_PS2_PY = r'\.\.\.'
_PS1_IP = r'In\ \[\d+\]:'
_PS2_IP = r'\ \ \ \.\.\.+:'
_RE_TPL = r'''
# Source consists of a PS1 line followed by zero or more PS2 lines.
(?P<source>
(?:^(?P<indent> [ ]*) (?P<ps1> %s) .*) # PS1 line
(?:\n [ ]* (?P<ps2> %s) .*)*) # PS2 lines
\n? # a newline
# Want consists of any non-blank lines that do not start with PS1.
(?P<want> (?:(?![ ]*$) # Not a blank line
(?![ ]*%s) # Not a line starting with PS1
(?![ ]*%s) # Not a line starting with PS2
.*$\n? # But any other line
)*)
'''
_EXAMPLE_RE_PY = re.compile( _RE_TPL % (_PS1_PY,_PS2_PY,_PS1_PY,_PS2_PY),
re.MULTILINE | re.VERBOSE)
_EXAMPLE_RE_IP = re.compile( _RE_TPL % (_PS1_IP,_PS2_IP,_PS1_IP,_PS2_IP),
re.MULTILINE | re.VERBOSE)
# Mark a test as being fully random. In this case, we simply append the
# random marker ('#random') to each individual example's output. This way
# we don't need to modify any other code.
_RANDOM_TEST = re.compile(r'#\s*all-random\s+')
# Mark tests to be executed in an external process - currently unsupported.
_EXTERNAL_IP = re.compile(r'#\s*ipdoctest:\s*EXTERNAL')
def ip2py(self,source):
"""Convert input IPython source into valid Python."""
block = _ip.input_transformer_manager.transform_cell(source)
if len(block.splitlines()) == 1:
return _ip.prefilter(block)
else:
return block
def parse(self, string, name='<string>'):
"""
Divide the given string into examples and intervening text,
and return them as a list of alternating Examples and strings.
Line numbers for the Examples are 0-based. The optional
argument `name` is a name identifying this string, and is only
used for error messages.
"""
#print 'Parse string:\n',string # dbg
string = string.expandtabs()
# If all lines begin with the same indentation, then strip it.
min_indent = self._min_indent(string)
if min_indent > 0:
string = '\n'.join([l[min_indent:] for l in string.split('\n')])
output = []
charno, lineno = 0, 0
# We make 'all random' tests by adding the '# random' mark to every
# block of output in the test.
if self._RANDOM_TEST.search(string):
random_marker = '\n# random'
else:
random_marker = ''
# Whether to convert the input from ipython to python syntax
ip2py = False
# Find all doctest examples in the string. First, try them as Python
# examples, then as IPython ones
terms = list(self._EXAMPLE_RE_PY.finditer(string))
if terms:
# Normal Python example
#print '-'*70 # dbg
#print 'PyExample, Source:\n',string # dbg
#print '-'*70 # dbg
Example = doctest.Example
else:
# It's an ipython example. Note that IPExamples are run
# in-process, so their syntax must be turned into valid python.
# IPExternalExamples are run out-of-process (via pexpect) so they
# don't need any filtering (a real ipython will be executing them).
terms = list(self._EXAMPLE_RE_IP.finditer(string))
if self._EXTERNAL_IP.search(string):
#print '-'*70 # dbg
#print 'IPExternalExample, Source:\n',string # dbg
#print '-'*70 # dbg
Example = IPExternalExample
else:
#print '-'*70 # dbg
#print 'IPExample, Source:\n',string # dbg
#print '-'*70 # dbg
Example = IPExample
ip2py = True
for m in terms:
# Add the pre-example text to `output`.
output.append(string[charno:m.start()])
# Update lineno (lines before this example)
lineno += string.count('\n', charno, m.start())
# Extract info from the regexp match.
(source, options, want, exc_msg) = \
self._parse_example(m, name, lineno,ip2py)
# Append the random-output marker (it defaults to empty in most
# cases, it's only non-empty for 'all-random' tests):
want += random_marker
if Example is IPExternalExample:
options[doctest.NORMALIZE_WHITESPACE] = True
want += '\n'
# Create an Example, and add it to the list.
if not self._IS_BLANK_OR_COMMENT(source):
output.append(Example(source, want, exc_msg,
lineno=lineno,
indent=min_indent+len(m.group('indent')),
options=options))
# Update lineno (lines inside this example)
lineno += string.count('\n', m.start(), m.end())
# Update charno.
charno = m.end()
# Add any remaining post-example text to `output`.
output.append(string[charno:])
return output
def _parse_example(self, m, name, lineno,ip2py=False):
"""
Given a regular expression match from `_EXAMPLE_RE` (`m`),
return a pair `(source, want)`, where `source` is the matched
example's source code (with prompts and indentation stripped);
and `want` is the example's expected output (with indentation
stripped).
`name` is the string's name, and `lineno` is the line number
where the example starts; both are used for error messages.
Optional:
`ip2py`: if true, filter the input via IPython to convert the syntax
into valid python.
"""
# Get the example's indentation level.
indent = len(m.group('indent'))
# Divide source into lines; check that they're properly
# indented; and then strip their indentation & prompts.
source_lines = m.group('source').split('\n')
# We're using variable-length input prompts
ps1 = m.group('ps1')
ps2 = m.group('ps2')
ps1_len = len(ps1)
self._check_prompt_blank(source_lines, indent, name, lineno,ps1_len)
if ps2:
self._check_prefix(source_lines[1:], ' '*indent + ps2, name, lineno)
source = '\n'.join([sl[indent+ps1_len+1:] for sl in source_lines])
if ip2py:
# Convert source input from IPython into valid Python syntax
source = self.ip2py(source)
# Divide want into lines; check that it's properly indented; and
# then strip the indentation. Spaces before the last newline should
# be preserved, so plain rstrip() isn't good enough.
want = m.group('want')
want_lines = want.split('\n')
if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
del want_lines[-1] # forget final newline & spaces after it
self._check_prefix(want_lines, ' '*indent, name,
lineno + len(source_lines))
# Remove ipython output prompt that might be present in the first line
want_lines[0] = re.sub(r'Out\[\d+\]: \s*?\n?','',want_lines[0])
want = '\n'.join([wl[indent:] for wl in want_lines])
# If `want` contains a traceback message, then extract it.
m = self._EXCEPTION_RE.match(want)
if m:
exc_msg = m.group('msg')
else:
exc_msg = None
# Extract options from the source.
options = self._find_options(source, name, lineno)
return source, options, want, exc_msg
def _check_prompt_blank(self, lines, indent, name, lineno, ps1_len):
"""
Given the lines of a source string (including prompts and
leading indentation), check to make sure that every prompt is
followed by a space character. If any line is not followed by
a space character, then raise ValueError.
Note: IPython-modified version which takes the input prompt length as a
parameter, so that prompts of variable length can be dealt with.
"""
space_idx = indent+ps1_len
min_len = space_idx+1
for i, line in enumerate(lines):
if len(line) >= min_len and line[space_idx] != ' ':
raise ValueError('line %r of the docstring for %s '
'lacks blank after %s: %r' %
(lineno+i+1, name,
line[indent:space_idx], line))
SKIP = doctest.register_optionflag('SKIP')
class IPDocTestRunner(doctest.DocTestRunner,object):
"""Test runner that synchronizes the IPython namespace with test globals.
"""
def run(self, test, compileflags=None, out=None, clear_globs=True):
# Hack: ipython needs access to the execution context of the example,
# so that it can propagate user variables loaded by %run into
# test.globs. We put them here into our modified %run as a function
# attribute. Our new %run will then only make the namespace update
# when called (rather than unconconditionally updating test.globs here
# for all examples, most of which won't be calling %run anyway).
#_ip._ipdoctest_test_globs = test.globs
#_ip._ipdoctest_test_filename = test.filename
test.globs.update(_ip.user_ns)
return super(IPDocTestRunner,self).run(test,
compileflags,out,clear_globs)
class DocFileCase(doctest.DocFileCase):
"""Overrides to provide filename
"""
def address(self):
return (self._dt_test.filename, None, None)
class ExtensionDoctest(doctests.Doctest):
"""Nose Plugin that supports doctests in extension modules.
"""
name = 'extdoctest' # call nosetests with --with-extdoctest
enabled = True
def __init__(self,exclude_patterns=None):
"""Create a new ExtensionDoctest plugin.
Parameters
----------
exclude_patterns : sequence of strings, optional
These patterns are compiled as regular expressions, subsequently used
to exclude any filename which matches them from inclusion in the test
suite (using pattern.search(), NOT pattern.match() ).
"""
if exclude_patterns is None:
exclude_patterns = []
self.exclude_patterns = map(re.compile,exclude_patterns)
doctests.Doctest.__init__(self)
def options(self, parser, env=os.environ):
Plugin.options(self, parser, env)
parser.add_option('--doctest-tests', action='store_true',
dest='doctest_tests',
default=env.get('NOSE_DOCTEST_TESTS',True),
help="Also look for doctests in test modules. "
"Note that classes, methods and functions should "
"have either doctests or non-doctest tests, "
"not both. [NOSE_DOCTEST_TESTS]")
parser.add_option('--doctest-extension', action="append",
dest="doctestExtension",
help="Also look for doctests in files with "
"this extension [NOSE_DOCTEST_EXTENSION]")
# Set the default as a list, if given in env; otherwise
# an additional value set on the command line will cause
# an error.
env_setting = env.get('NOSE_DOCTEST_EXTENSION')
if env_setting is not None:
parser.set_defaults(doctestExtension=tolist(env_setting))
def configure(self, options, config):
Plugin.configure(self, options, config)
# Pull standard doctest plugin out of config; we will do doctesting
config.plugins.plugins = [p for p in config.plugins.plugins
if p.name != 'doctest']
self.doctest_tests = options.doctest_tests
self.extension = tolist(options.doctestExtension)
self.parser = doctest.DocTestParser()
self.finder = DocTestFinder()
self.checker = IPDoctestOutputChecker()
self.globs = None
self.extraglobs = None
def loadTestsFromExtensionModule(self,filename):
bpath,mod = os.path.split(filename)
modname = os.path.splitext(mod)[0]
try:
sys.path.append(bpath)
module = __import__(modname)
tests = list(self.loadTestsFromModule(module))
finally:
sys.path.pop()
return tests
# NOTE: the method below is almost a copy of the original one in nose, with
# a few modifications to control output checking.
def loadTestsFromModule(self, module):
#print '*** ipdoctest - lTM',module # dbg
if not self.matches(module.__name__):
log.debug("Doctest doesn't want module %s", module)
return
tests = self.finder.find(module,globs=self.globs,
extraglobs=self.extraglobs)
if not tests:
return
# always use whitespace and ellipsis options
optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS
tests.sort()
module_file = module.__file__
if module_file[-4:] in ('.pyc', '.pyo'):
module_file = module_file[:-1]
for test in tests:
if not test.examples:
continue
if not test.filename:
test.filename = module_file
yield DocTestCase(test,
optionflags=optionflags,
checker=self.checker)
def loadTestsFromFile(self, filename):
#print "ipdoctest - from file", filename # dbg
if is_extension_module(filename):
for t in self.loadTestsFromExtensionModule(filename):
yield t
else:
if self.extension and anyp(filename.endswith, self.extension):
name = os.path.basename(filename)
dh = open(filename)
try:
doc = dh.read()
finally:
dh.close()
test = self.parser.get_doctest(
doc, globs={'__file__': filename}, name=name,
filename=filename, lineno=0)
if test.examples:
#print 'FileCase:',test.examples # dbg
yield DocFileCase(test)
else:
yield False # no tests to load
def wantFile(self,filename):
"""Return whether the given filename should be scanned for tests.
Modified version that accepts extension modules as valid containers for
doctests.
"""
#print '*** ipdoctest- wantFile:',filename # dbg
for pat in self.exclude_patterns:
if pat.search(filename):
# print '###>>> SKIP:',filename # dbg
return False
if is_extension_module(filename):
return True
else:
return doctests.Doctest.wantFile(self,filename)
def wantDirectory(self, directory):
"""Return whether the given directory should be scanned for tests.
Modified version that supports exclusions.
"""
for pat in self.exclude_patterns:
if pat.search(directory):
return False
return True
class IPythonDoctest(ExtensionDoctest):
"""Nose Plugin that supports doctests in extension modules.
"""
name = 'ipdoctest' # call nosetests with --with-ipdoctest
enabled = True
def makeTest(self, obj, parent):
"""Look for doctests in the given object, which will be a
function, method or class.
"""
#print 'Plugin analyzing:', obj, parent # dbg
# always use whitespace and ellipsis options
optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS
doctests = self.finder.find(obj, module=getmodule(parent))
if doctests:
for test in doctests:
if len(test.examples) == 0:
continue
yield DocTestCase(test, obj=obj,
optionflags=optionflags,
checker=self.checker)
def options(self, parser, env=os.environ):
#print "Options for nose plugin:", self.name # dbg
Plugin.options(self, parser, env)
parser.add_option('--ipdoctest-tests', action='store_true',
dest='ipdoctest_tests',
default=env.get('NOSE_IPDOCTEST_TESTS',True),
help="Also look for doctests in test modules. "
"Note that classes, methods and functions should "
"have either doctests or non-doctest tests, "
"not both. [NOSE_IPDOCTEST_TESTS]")
parser.add_option('--ipdoctest-extension', action="append",
dest="ipdoctest_extension",
help="Also look for doctests in files with "
"this extension [NOSE_IPDOCTEST_EXTENSION]")
# Set the default as a list, if given in env; otherwise
# an additional value set on the command line will cause
# an error.
env_setting = env.get('NOSE_IPDOCTEST_EXTENSION')
if env_setting is not None:
parser.set_defaults(ipdoctest_extension=tolist(env_setting))
def configure(self, options, config):
#print "Configuring nose plugin:", self.name # dbg
Plugin.configure(self, options, config)
# Pull standard doctest plugin out of config; we will do doctesting
config.plugins.plugins = [p for p in config.plugins.plugins
if p.name != 'doctest']
self.doctest_tests = options.ipdoctest_tests
self.extension = tolist(options.ipdoctest_extension)
self.parser = IPDocTestParser()
self.finder = DocTestFinder(parser=self.parser)
self.checker = IPDoctestOutputChecker()
self.globs = None
self.extraglobs = None
|
|
# encoding: utf-8
"""
supervisor.py
Created by Thomas Mangin on 2011-11-29.
Copyright (c) 2011-2013 Exa Networks. All rights reserved.
"""
import os
import sys
import signal
import traceback
from socket import has_ipv6
from .util.pid import PID
from .util.daemon import Daemon
from .util.alarm import alarm_thread
from .reactor.content.manager import ContentManager
from .reactor.client.manager import ClientManager
from .reactor.resolver.manager import ResolverManager
from .network.async import Poller
from .network.server import Server
from .network.server import InterceptServer
from .html.page import Page
from .monitor import Monitor
from .reactor import Reactor
from .reactor.redirector import fork_redirector
from .reactor.redirector import redirector_message_thread
from .configuration import load
from exaproxy.util.log.logger import Logger
from exaproxy.util.log.writer import SysLogWriter
from exaproxy.util.log.writer import UsageWriter
from exaproxy.util.interfaces import getifaddrs,AF_INET,AF_INET6
class Supervisor (object):
alarm_time = 0.1 # regular backend work
second_frequency = int(1/alarm_time) # when we record history
minute_frequency = int(60/alarm_time) # when we want to average history
increase_frequency = int(5/alarm_time) # when we add workers
decrease_frequency = int(60/alarm_time) # when we remove workers
saturation_frequency = int(20/alarm_time) # when we report connection saturation
interface_frequency = int(300/alarm_time) # when we check for new interfaces
# import os
# clear = [hex(ord(c)) for c in os.popen('clear').read()]
# clear = ''.join([chr(int(c,16)) for c in ['0x1b', '0x5b', '0x48', '0x1b', '0x5b', '0x32', '0x4a']])
def __init__ (self,configuration):
self.configuration = configuration
# Only here so the introspection code can find them
self.log = Logger('supervisor', configuration.log.supervisor)
self.log.error('Starting exaproxy version %s' % configuration.proxy.version)
self.signal_log = Logger('signal', configuration.log.signal)
self.log_writer = SysLogWriter('log', configuration.log.destination, configuration.log.enable, level=configuration.log.level)
self.usage_writer = UsageWriter('usage', configuration.usage.destination, configuration.usage.enable)
sys.exitfunc = self.log_writer.writeMessages
self.log_writer.setIdentifier(configuration.daemon.identifier)
#self.usage_writer.setIdentifier(configuration.daemon.identifier)
if configuration.debug.log:
self.log_writer.toggleDebug()
self.usage_writer.toggleDebug()
self.log.error('python version %s' % sys.version.replace(os.linesep,' '))
self.log.debug('starting %s' % sys.argv[0])
self.pid = PID(self.configuration)
self.daemon = Daemon(self.configuration)
self.poller = Poller(self.configuration.daemon)
self.poller.setupRead('read_proxy') # Listening proxy sockets
self.poller.setupRead('read_web') # Listening webserver sockets
self.poller.setupRead('read_icap') # Listening icap sockets
self.poller.setupRead('read_tls') # Listening tls sockets
self.poller.setupRead('read_passthrough') # Listening raw data sockets
self.poller.setupRead('read_redirector') # Pipes carrying responses from the redirector process
self.poller.setupRead('read_resolver') # Sockets currently listening for DNS responses
self.poller.setupRead('read_client') # Active clients
self.poller.setupRead('opening_client') # Clients we have not yet read a request from
self.poller.setupWrite('write_client') # Active clients with buffered data to send
self.poller.setupWrite('write_resolver') # Active DNS requests with buffered data to send
self.poller.setupRead('read_download') # Established connections
self.poller.setupWrite('write_download') # Established connections we have buffered data to send to
self.poller.setupWrite('opening_download') # Opening connections
self.poller.setupRead('read_interrupt') # Scheduled events
self.poller.setupRead('read_control') # Responses from commands sent to the redirector process
self.monitor = Monitor(self)
self.page = Page(self)
self.content = ContentManager(self,configuration)
self.client = ClientManager(self.poller, configuration)
self.resolver = ResolverManager(self.poller, self.configuration, configuration.dns.retries*10)
self.proxy = Server('http proxy',self.poller,'read_proxy', configuration.http)
self.web = Server('web server',self.poller,'read_web', configuration.web)
self.icap = Server('icap server',self.poller,'read_icap', configuration.icap)
self.tls = Server('tls server', self.poller, 'read_tls', configuration.tls)
self.passthrough = InterceptServer('passthrough server', self.poller, 'read_passthrough', configuration.passthrough)
self._shutdown = True if self.daemon.filemax == 0 else False # stop the program
self._softstop = False # stop once all current connection have been dealt with
self._reload = False # unimplemented
self._toggle_debug = False # start logging a lot
self._decrease_spawn_limit = 0
self._increase_spawn_limit = 0
self._refork = False # unimplemented
self._pdb = False # turn on pdb debugging
self._listen = None # listening change ? None: no, True: listen, False: stop listeing
self.wait_time = 5.0 # how long do we wait at maximum once we have been soft-killed
self.local = set() # what addresses are on our local interfaces
if not self.initialise():
self._shutdown = True
elif self.daemon.drop_privileges():
self.log.critical('Could not drop privileges to \'%s\'. Refusing to run as root' % self.daemon.user)
self.log.critical('Set the environment value USER to change the unprivileged user')
self._shutdown = True
# fork the redirector process before performing any further setup
redirector = fork_redirector(self.poller, self.configuration)
# use simple blocking IO for communication with the redirector process
self.redirector = redirector_message_thread(redirector)
# NOTE: create threads _after_ all forking is done
# regularly interrupt the reactor for maintenance
self.interrupt_scheduler = alarm_thread(self.poller, self.alarm_time)
self.reactor = Reactor(self.configuration, self.web, self.proxy, self.passthrough, self.icap, self.tls, self.redirector, self.content, self.client, self.resolver, self.log_writer, self.usage_writer, self.poller)
self.interfaces()
signal.signal(signal.SIGQUIT, self.sigquit)
signal.signal(signal.SIGINT, self.sigterm)
signal.signal(signal.SIGTERM, self.sigterm)
# signal.signal(signal.SIGABRT, self.sigabrt)
# signal.signal(signal.SIGHUP, self.sighup)
signal.signal(signal.SIGTRAP, self.sigtrap)
signal.signal(signal.SIGUSR1, self.sigusr1)
signal.signal(signal.SIGUSR2, self.sigusr2)
signal.signal(signal.SIGTTOU, self.sigttou)
signal.signal(signal.SIGTTIN, self.sigttin)
# make sure we always have data in history
# (done in zero for dependencies reasons)
if self._shutdown is False:
self.redirector.requestStats()
command, control_data = self.redirector.readResponse()
stats_data = control_data if command == 'STATS' else None
stats = self.monitor.statistics(stats_data)
ok = self.monitor.zero(stats)
if ok:
self.redirector.requestStats()
else:
self._shutdown = True
def exit (self):
sys.exit()
def sigquit (self,signum, frame):
if self._softstop:
self.signal_log.critical('multiple SIG INT received, shutdown')
self._shutdown = True
else:
self.signal_log.critical('SIG INT received, soft-stop')
self._softstop = True
self._listen = False
def sigterm (self,signum, frame):
self.signal_log.critical('SIG TERM received, shutdown request')
if os.environ.get('PDB',False):
self._pdb = True
else:
self._shutdown = True
# def sigabrt (self,signum, frame):
# self.signal_log.info('SIG INFO received, refork request')
# self._refork = True
# def sighup (self,signum, frame):
# self.signal_log.info('SIG HUP received, reload request')
# self._reload = True
def sigtrap (self,signum, frame):
self.signal_log.critical('SIG TRAP received, toggle debug')
self._toggle_debug = True
def sigusr1 (self,signum, frame):
self.signal_log.critical('SIG USR1 received, decrease worker number')
self._decrease_spawn_limit += 1
def sigusr2 (self,signum, frame):
self.signal_log.critical('SIG USR2 received, increase worker number')
self._increase_spawn_limit += 1
def sigttou (self,signum, frame):
self.signal_log.critical('SIG TTOU received, stop listening')
self._listen = False
def sigttin (self,signum, frame):
self.signal_log.critical('SIG IN received, star listening')
self._listen = True
def interfaces (self):
local = { '127.0.0.1', '::1' }
for interface in getifaddrs():
if interface.family not in (AF_INET,AF_INET6):
continue
if interface.address not in self.local:
self.log.info('found new local ip %s (%s)' % (interface.address,interface.name))
local.add(interface.address)
for ip in self.local:
if ip not in local:
self.log.info('removed local ip %s' % ip)
if local == self.local:
self.log.info('no ip change')
else:
self.local = local
def run (self):
count_second = 0
count_minute = 0
count_saturation = 0
count_interface = 0
events = {'read_interrupt'}
while True:
count_second = (count_second + 1) % self.second_frequency
count_minute = (count_minute + 1) % self.minute_frequency
count_saturation = (count_saturation + 1) % self.saturation_frequency
count_interface = (count_interface + 1) % self.interface_frequency
try:
if self._pdb:
self._pdb = False
import pdb
pdb.set_trace()
# prime the alarm
if 'read_interrupt' in events:
self.interrupt_scheduler.setAlarm()
# check for IO change with select
status, events = self.reactor.run()
# shut down the server if a child process disappears
if status is False:
self._shutdown = True
# respond to control responses immediately
if 'read_control' in events:
command, control_data = self.redirector.readResponse()
if command == 'STATS':
ok = self.doStats(count_second, count_minute, control_data)
if ok is False:
self._shutdown = True
# jump straight back into the reactor if we haven't yet received an
# interrupt event
if 'read_interrupt' not in events:
continue
# clear the alarm condition
self.interrupt_scheduler.acknowledgeAlarm()
# must follow the reactor so we are sure to go through the reactor at least once
# and flush any logs
if self._shutdown:
self._shutdown = False
self.shutdown()
break
elif self._reload:
self._reload = False
self.reload()
elif self._refork:
self._refork = False
self.signal_log.warning('refork not implemented')
# stop listening to new connections
# refork the program (as we have been updated)
# just handle current open connection
# ask the redirector process for stats
self.redirector.requestStats()
if self._softstop:
if self._listen == False:
self.proxy.rejecting()
self._listen = None
if self.client.softstop():
self._shutdown = True
# only change listening if we are not shutting down
elif self._listen is not None:
if self._listen:
self._shutdown = not self.proxy.accepting()
self._listen = None
else:
self.proxy.rejecting()
self._listen = None
if self._toggle_debug:
self._toggle_debug = False
self.log_writer.toggleDebug()
if self._decrease_spawn_limit:
count = self._decrease_spawn_limit
self.redirector.decreaseSpawnLimit(count)
self._decrease_spawn_limit = 0
if self._increase_spawn_limit:
count = self._increase_spawn_limit
self.redirector.increaseSpawnLimit(count)
self._increase_spawn_limit = 0
# cleanup idle connections
# TODO: track all idle connections, not just the ones that have never sent data
expired = self.reactor.client.expire()
for expire_source, expire_count in expired.items():
if expire_source == 'proxy':
self.proxy.notifyClose(None, count=expire_count)
elif expire_source == 'icap':
self.icap.notifyClose(None, count=expire_count)
elif expire_source == 'passthrough':
self.passthrough.notifyClose(None, count=expire_count)
elif expire_source == 'tls':
self.tls.notifyClose(None, count=expire_count)
elif expire_source == 'web':
self.web.notifyClose(None, count=expire_count)
# report if we saw too many connections
if count_saturation == 0:
self.proxy.saturation()
self.web.saturation()
if self.configuration.daemon.poll_interfaces and count_interface == 0:
self.interfaces()
except KeyboardInterrupt:
self.log.critical('^C received')
self._shutdown = True
except OSError,e:
# This shoould never happen as we are limiting how many connections we accept
if e.errno == 24: # Too many open files
self.log.critical('Too many opened files, shutting down')
for line in traceback.format_exc().split('\n'):
self.log.critical(line)
self._shutdown = True
else:
self.log.critical('unrecoverable io error')
for line in traceback.format_exc().split('\n'):
self.log.critical(line)
self._shutdown = True
finally:
pass
# try:
# from exaproxy.leak import objgraph
# if objgraph:
# count += 1
# if count >= 30:
# print "*"*10, time.strftime('%d-%m-%Y %H:%M:%S')
# print objgraph.show_most_common_types(limit=20)
# print "*"*10
# print
# except KeyboardInterrupt:
# self.log.info('^C received')
# self._shutdown = True
def doStats (self, count_second, count_minute, stats_data):
# parse the data we were sent
stats = self.monitor.statistics(stats_data)
# save our monitoring stats
if count_second == 0:
ok = self.monitor.second(stats)
else:
ok = True
expired = 0
if ok is True and count_minute == 0:
ok = self.monitor.minute(stats)
return ok
def initialise (self):
self.daemon.daemonise()
self.pid.save()
# only start listening once we know we were able to fork our worker processes
tcp4 = self.configuration.tcp4
tcp6 = self.configuration.tcp6
icap = self.configuration.icap
tls = self.configuration.tls
passthrough = self.configuration.passthrough
if not has_ipv6 and (tcp6.listen or tcp6.out or icap.ipv6 or passthrough.ipv6):
tcp6.listen = False
tcp6.out = False
self.log.critical('your python interpreter does not have ipv6 support !')
out = bool(tcp4.out or tcp6.out)
if not out:
self.log.critical('we need to use IPv4 or IPv6 for outgoing connection - both can not be disabled !')
listen = bool(tcp4.listen or tcp6.listen) or bool(icap.host or icap.ipv6) or bool(passthrough.host or passthrough.ipv6)
if not listen:
self.log.critical('Not listening on either IPv4 or IPv6.')
ok = out and listen
if ok and tcp4.listen:
s = self.proxy.listen(tcp4.host,tcp4.port, tcp4.timeout, tcp4.backlog)
ok = bool(s)
if not ok:
self.log.critical('IPv4 proxy, unable to listen on %s:%s' % (tcp4.host,tcp4.port))
if ok and tcp6.listen:
s = self.proxy.listen(tcp6.host,tcp6.port, tcp6.timeout, tcp6.backlog)
ok = bool(s)
if not ok:
self.log.critical('IPv6 proxy, unable to listen on %s:%s' % (tcp6.host,tcp6.port))
if ok and icap.enable:
s = self.icap.listen(icap.host, icap.port, tcp4.timeout, tcp4.backlog)
ok = bool(s)
if not ok:
self.log.critical('ICAP server, unable to listen on %s:%s' % (icap.host, icap.port))
if ok and icap.enable and tcp6.listen:
s = self.icap.listen(icap.ipv6, icap.port, tcp4.timeout, tcp4.backlog)
ok = bool(s)
if not ok:
self.log.critical('ICAP server, unable to listen on %s:%s' % (icap.host, icap.port))
if ok and tls.enable:
s = self.tls.listen(tls.host, tls.port, tcp4.timeout, tcp4.backlog)
ok = bool(s)
if not ok:
self.log.critical('TLS server, unable to listen on %s:%s' % (tls.host, tls.port))
if ok and tls.enable and tcp6.listen:
s = self.tls.listen(tls.ipv6, tls.port, tcp4.timeout, tcp4.backlog)
ok = bool(s)
if not ok:
self.log.critical('TLS server, unable to listen on %s:%s' % (tls.host, tls.port))
if ok and passthrough.enable:
s = self.passthrough.listen(passthrough.host, passthrough.port, tcp4.timeout, tcp4.backlog)
ok = bool(s)
if not ok:
self.log.critical('Passthrough server, unable to listen on %s:%s' % (passthrough.host, passthrough.port))
if ok and self.configuration.web.enable:
s = self.web.listen(self.configuration.web.host,self.configuration.web.port, 10, 10)
ok = bool(s)
if not ok:
self.log.critical('internal web server, unable to listen on %s:%s' % (self.configuration.web.host, self.configuration.web.port))
return ok
def shutdown (self):
"""terminate all the current BGP connections"""
self.log.info('Performing shutdown')
try:
self.web.stop() # accept no new web connection
self.proxy.stop() # accept no new proxy connections
self.redirector.stop() # shut down redirector children
self.content.stop() # stop downloading data
self.client.stop() # close client connections
self.pid.remove()
self.interrupt_scheduler.stop()
except KeyboardInterrupt:
self.log.info('^C received while shutting down. Exiting immediately because you insisted.')
sys.exit()
def reload (self):
self.log.info('Performing reload of exaproxy %s' % self.configuration.proxy.version)
self.redirector.respawn()
|
|
import pickle
from typing import Tuple, List, Iterable, Any
import logging
from PyQt5 import QtWidgets, QtCore
from .samples_ui import Ui_WizardPage
from ......core2.instrument.instrument import Instrument
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class Model(QtCore.QAbstractItemModel):
_data: List[Tuple[str, float, int]]
exposuretime: float=300
exposurecount: int=1
def __init__(self):
self._data = []
super().__init__()
def setDefaultExposureTime(self, exptime: float):
self.exposuretime = exptime
def setDefaultExposureCount(self, expcount: int):
self.exposurecount = expcount
def rowCount(self, parent: QtCore.QModelIndex = ...) -> int:
return len(self._data)
def columnCount(self, parent: QtCore.QModelIndex = ...) -> int:
return 3
def parent(self, child: QtCore.QModelIndex) -> QtCore.QModelIndex:
return QtCore.QModelIndex()
def index(self, row: int, column: int, parent: QtCore.QModelIndex = ...) -> QtCore.QModelIndex:
return self.createIndex(row, column, None)
def flags(self, index: QtCore.QModelIndex) -> QtCore.Qt.ItemFlag:
if not index.isValid():
return QtCore.Qt.ItemIsDropEnabled | QtCore.Qt.ItemIsDragEnabled
elif index.column() == 0:
return QtCore.Qt.ItemNeverHasChildren | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsDragEnabled
else:
return QtCore.Qt.ItemNeverHasChildren | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsDragEnabled | QtCore.Qt.ItemIsEditable
def data(self, index: QtCore.QModelIndex, role: int = ...) -> Any:
if (index.column() == 0) and (role in [QtCore.Qt.DisplayRole, QtCore.Qt.EditRole]):
return self._data[index.row()][0]
elif (index.column() == 1) and (role == QtCore.Qt.DisplayRole):
return f'{self._data[index.row()][1]:.2f} sec'
elif (index.column() == 1) and (role == QtCore.Qt.EditRole):
return self._data[index.row()][1]
elif (index.column() == 2) and (role == QtCore.Qt.DisplayRole):
return f'{self._data[index.row()][2]:d}'
elif (index.column() == 2) and (role == QtCore.Qt.EditRole):
return self._data[index.row()][2]
else:
return None
def setData(self, index: QtCore.QModelIndex, value: Any, role: int = ...) -> bool:
if role != QtCore.Qt.EditRole:
logger.warning(f'setdata(row={index.row()}, column={index.column()}, {value=}, {type(value)=} role={role} != EditRole)')
return False
data = self._data[index.row()]
if index.column() == 0:
self._data[index.row()] = value, data[1], data[2]
elif (index.column() == 1) and (value > 0):
self._data[index.row()] = data[0], float(value), data[2]
elif (index.column() == 2) and (value > 0):
self._data[index.row()] = data[0], data[1], int(value)
else:
return False
self.dataChanged.emit(index, index)
return True
def insertRow(self, row: int, parent: QtCore.QModelIndex = ...) -> bool:
return self.insertRows(row, 1, parent)
def insertRows(self, row: int, count: int, parent: QtCore.QModelIndex = ...) -> bool:
self.beginInsertRows(parent, row, row+count-1)
self._data = self._data[:row] + [('', self.exposuretime, self.exposurecount) for i in range(count)] + self._data[row:]
self.endInsertRows()
return True
def removeRows(self, row: int, count: int, parent: QtCore.QModelIndex = ...) -> bool:
self.beginRemoveRows(parent, row, row+count-1)
self._data = self._data[:row] + self._data[row+count:]
self.endRemoveRows()
return True
def removeRow(self, row: int, parent: QtCore.QModelIndex = ...) -> bool:
return self.removeRows(row, 1, parent)
def headerData(self, section: int, orientation: QtCore.Qt.Orientation, role: int = ...) -> Any:
if (orientation == QtCore.Qt.Horizontal) and (role == QtCore.Qt.DisplayRole):
return ['Sample', 'Exposure time', 'Exposure count'][section]
return None
def dropMimeData(self, data: QtCore.QMimeData, action: QtCore.Qt.DropAction, row: int, column: int, parent: QtCore.QModelIndex) -> bool:
logger.debug(f'dropMimeData({data.formats()}, {action=}, {row=}, {column=}, {parent.isValid()=}')
if parent.isValid():
return False
if row < 0:
row = len(self._data)
if data.hasFormat('application/x-cctsequenceexposurelist'):
lis = pickle.loads(data.data('application/x-cctsequenceexposurelist'))
logger.debug(f'Adding {len(lis)} exposurelist elements')
if not lis:
return False
for r_ in [r for r, l in lis]:
self._data[r_]='', -1, -1
self.beginInsertRows(parent, row, row+len(lis)-1)
self._data = self._data[:row] + [l for r,l in lis] + self._data[row:]
self.endInsertRows()
while (rowstoremove := [r for r, d in enumerate(self._data) if d == ('', -1, -1)]):
self.removeRow(rowstoremove[0], QtCore.QModelIndex())
elif data.hasFormat('application/x-cctsamplelist'):
lis = pickle.loads(data.data('application/x-cctsamplelist'))
logger.debug(f'Adding {len(lis)} samples')
if not lis:
return False
self.beginInsertRows(parent, row, row+len(lis)-1)
self._data = self._data[:row] + [(s.title, self.exposuretime, self.exposurecount) for s in lis] + self._data[row:]
self.endInsertRows()
else:
return False
return True
def supportedDragActions(self) -> QtCore.Qt.DropAction:
return QtCore.Qt.MoveAction
def supportedDropActions(self) -> QtCore.Qt.DropAction:
return QtCore.Qt.CopyAction | QtCore.Qt.MoveAction
def mimeData(self, indexes: Iterable[QtCore.QModelIndex]) -> QtCore.QMimeData:
md = QtCore.QMimeData()
rows = {i.row() for i in indexes}
md.setData('application/x-cctsequenceexposurelist', pickle.dumps([(r, self._data[r]) for r in rows]))
return md
def clear(self):
self.beginResetModel()
self._data = []
self.endResetModel()
def mimeTypes(self) -> List[str]:
return ['application/x-cctsequenceexposurelist', 'application/x-cctsamplelist']
def exposures(self) -> List[Tuple[str, float, int]]:
return self._data
def setExpTimes(self):
for i in range(len(self._data)):
self._data[i] = self._data[i][0], self.exposuretime, self._data[i][2]
self.dataChanged.emit(
self.index(0, 1, QtCore.QModelIndex()),
self.index(len(self._data), 1, QtCore.QModelIndex())
)
def setExpCounts(self):
for i in range(len(self._data)):
self._data[i] = self._data[i][0], self._data[i][1], self.exposurecount
self.dataChanged.emit(
self.index(0, 2, QtCore.QModelIndex()),
self.index(len(self._data), 2, QtCore.QModelIndex())
)
class SamplesPage(QtWidgets.QWizardPage, Ui_WizardPage):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.setupUi(self)
def setupUi(self, WizardPage):
super().setupUi(WizardPage)
instrument = Instrument.instance()
self.sampleListView.setModel(instrument.samplestore.sortedmodel)
self.sampleListView.setSelectionModel(QtCore.QItemSelectionModel(self.sampleListView.model(), self.sampleListView))
self.exposureTreeView.setModel(Model())
self.exposureTreeView.setSelectionModel(QtCore.QItemSelectionModel(self.exposureTreeView.model(), self.exposureTreeView))
self.addSampleToolButton.clicked.connect(self.onAddSample)
self.removeSamplesToolButton.clicked.connect(self.onRemoveSamples)
self.clearExposureListToolButton.clicked.connect(self.onClearExposureList)
self.exposureTimeDoubleSpinBox.valueChanged.connect(self.exposureTreeView.model().setDefaultExposureTime)
self.exposureCountSpinBox.valueChanged.connect(self.exposureTreeView.model().setDefaultExposureCount)
self.setExpCountToolButton.clicked.connect(self.exposureTreeView.model().setExpCounts)
self.setExpTimeToolButton.clicked.connect(self.exposureTreeView.model().setExpTimes)
self.registerField('orderSamples', self.orderSamplesCheckBox, 'checked', self.orderSamplesCheckBox.toggled)
def exposures(self) -> List[Tuple[str, float, int]]:
return self.exposureTreeView.model().exposures()
def onAddSample(self):
logger.debug(f'Adding {len(self.sampleListView.selectedIndexes())} samples')
for index in self.sampleListView.selectedIndexes():
samplename = index.data(QtCore.Qt.DisplayRole)
logger.debug(f'Adding sample {samplename}')
model = self.exposureTreeView.model()
model.insertRow(model.rowCount(QtCore.QModelIndex()) + 1, QtCore.QModelIndex())
model.setData(model.index(model.rowCount(QtCore.QModelIndex())-1, 0, QtCore.QModelIndex()), samplename, QtCore.Qt.EditRole)
self.sampleListView.selectionModel().clearSelection()
def onRemoveSamples(self):
while (indexlist := self.exposureTreeView.selectionModel().selectedRows(0)):
self.exposureTreeView.model().removeRow(indexlist[0].row(), QtCore.QModelIndex())
def onClearExposureList(self):
self.exposureTreeView.model().clear()
|
|
"""
Module for Approximate Bayesian Computation
"""
from abc import ABCMeta, abstractmethod
import multiprocessing
import numpy as np
from scipy import stats
from numpy.lib.recfunctions import stack_arrays
class Model(object):
"""
Base class for constructing models for approximate bayesian computing
and various uses limited only by the user's imagination.
WARNING!! Not meant for direct use! You must implement your own model as a
subclass and override the all following methods:
* Model.draw_theta
* Model.generate_data
* Model.summary_stats
* Model.distance_function
"""
__metaclass__ = ABCMeta
def __call__(self, theta):
return self.generate_data_and_reduce(theta)
def set_data(self, data):
self.data = data
self.data_sum_stats = self.summary_stats(self.data)
#TODO think about a beter way to handle prior functions
def set_prior(self, prior):
self.prior = prior
def generate_data_and_reduce(self, theta):
"""
A combined method for generating data, calculating summary statistics
and evaluating the distance function all at once.
"""
synth = self.generate_data(theta)
sum_stats = self.summary_stats(synth)
d = self.distance_function(sum_stats, self.data_sum_stats)
return d
@abstractmethod
def draw_theta(self):
"""
Sub-classable method for drawing from a prior distribution.
This method should return an array-like iterable that is a vector of
proposed model parameters from your prior distribution.
"""
@abstractmethod
def generate_data(self, theta):
"""
Sub-classable method for generating synthetic data sets from forward
model.
This method should return an array/matrix/table of simulated data
taking vector theta as an argument.
"""
@abstractmethod
def summary_stats(self, data):
"""
Sub-classable method for computing summary statistics.
This method should return an array-like iterable of summary statistics
taking an array/matrix/table as an argument.
"""
@abstractmethod
def distance_function(self, summary_stats, summary_stats_synth):
"""
Sub-classable method for computing a distance function.
This method should return a distance D of for comparing to the
acceptance tolerance (epsilon) taking two array-like iterables of
summary statistics as an argument (nominally the observed summary
statistics and .
"""
################################################################################
######################### ABC Algorithms ##################################
################################################################################
def basic_abc(model, data, epsilon=1, min_samples=10,
parallel=False, n_procs='all', pmc_mode=False,
weights='None', theta_prev='None', tau_squared='None'):
"""
Perform Approximate Bayesian Computation (ABC) on a data set given a
forward model.
ABC is a likelihood-free method of Bayesian inference that uses simulation
to approximate the true posterior distribution of a parameter. It is
appropriate to use in situations where:
The likelihood function is unknown or is too computationally
expensive to compute.
There exists a good forward model that can produce data sets
like the one of interest.
It is not a replacement for other methods when a likelihood
function is available!
Parameters
----------
model : object
A model that is a subclass of simpleabc.Model
data : object, array_like
The "observed" data set for inference.
epsilon : float, optional
The tolerance to accept parameter draws, default is 1.
min_samples : int, optional
Minimum number of posterior samples.
parallel : bool, optional
Run in parallel mode. Default is a single thread.
n_procs : int, str, optional
Number of subprocesses in parallel mode. Default is 'all' one for each
available core.
pmc_mode : bool, optional
Population Monte Carlo mode on or off. Default is False. This is not
meant to be called by the user, but is set by simple_abc.pmc_abc.
weights : object, array_like, str, optional
Importance sampling weights from previous PMC step. Used by
simple_abc.pmc_abc only.
theta_prev : object, array_like, str, optional
Posterior draws from previous PMC step. Used by simple_abc.pmc_abc
only.
tau_squared : object, array_like, str, optional
Previous Gaussian kernel variances. for importance sampling. Used by
simple_abc.pmc_abc only.
Returns
-------
posterior : numpy array
Array of posterior samples.
distances : object
Array of accepted distances.
accepted_count : float
Number of posterior samples.
trial_count : float
Number of total samples attempted.
epsilon : float
Distance tolerance used.
weights : numpy array
Importance sampling weights. Returns an array of 1s where
size = posterior.size when not in pmc mode.
tau_squared : numpy array
Gaussian kernel variances. Returns an array of 0s where
size = posterior.size when not in pmc mode.
eff_sample : numpy array
Effective sample size. Returns an array of 1s where
size = posterior.size when not in pmc mode.
Examples
--------
Forth coming.
"""
posterior, rejected, distances = [], [], []
trial_count, accepted_count = 0, 0
data_summary_stats = model.summary_stats(data)
#TODO Implement pmc option in parallel mode
if parallel:
attempts = 2*min_samples
if n_procs == 'all':
n_procs = multiprocessing.cpu_count()
while accepted_count < min_samples :
thetas = [model.draw_theta() for x in
xrange(attempts)]
#Start a pool of workers
pool = multiprocessing.Pool(n_procs)
ds = pool.map(model, thetas)
#Shut down pool
pool.close()
pool.join()
for j, d in enumerate(ds):
if d < epsilon:
posterior.append(thetas[j])
accepted_count += 1
trial_count += 1
else:
#rejected.append(thetas[j])
trial_count += 1
attempts = int(float(trial_count)/float(accepted_count + 1) *
(min_samples - accepted_count))
return (posterior, distances,
accepted_count, trial_count,
epsilon)
else:
while accepted_count <= min_samples :
trial_count += 1
if pmc_mode:
theta_star = []
theta = []
for j in xrange(theta_prev.shape[0]):
theta_star.append(np.random.choice(theta_prev[j],
replace=True,
p=weights[j]))
#print "t*,tu2: ",theta_star[j], np.sqrt(tau_squared[0][j])
theta.append(stats.norm.rvs(loc=theta_star[j],
scale=np.sqrt(tau_squared[0][j])))
else:
theta = model.draw_theta()
synthetic_data = model.generate_data(theta)
synthetic_summary_stats = model.summary_stats(synthetic_data)
distance = model.distance_function(data_summary_stats,
synthetic_summary_stats)
if distance < epsilon:
accepted_count += 1
posterior.append(theta)
distances.append(distance)
else:
pass
#rejected.append(theta)
posterior = np.asarray(posterior).T
weights = np.ones(posterior.shape)
tau_squared = np.zeros((1, posterior.shape[0]))
eff_sample = np.ones(posterior.shape[0])*posterior.shape[1]
return (posterior, distances,
accepted_count, trial_count,
epsilon, weights, tau_squared, eff_sample)
def pmc_abc(model, data, epsilon_0=1, min_samples=10,
steps=10, resume=None, parallel=False, n_procs='all'):
"""
Perform a sequence of ABC posterior approximations using the sequential
population Monte Carlo algorithm.
Parameters
----------
model : object
A model that is a subclass of simpleabc.Model
data : object, array_like
The "observed" data set for inference.
epsilon_0 : float, optional
The initial tolerance to accept parameter draws, default is 1.
min_samples : int, optional
Minimum number of posterior samples.
steps : int
The number of pmc steps to attempt
resume : numpy record array, optional
A record array of a previous pmc sequence to continue the sequence on.
parallel : bool, optional
Run in parallel mode. Default is a single thread.
n_procs : int, str, optional
Number of subprocesses in parallel mode. Default is 'all' one for each
available core.
Returns
-------
output_record : numpy record array
A record array containing all ABC output for each step indexed by step
(0, 1, ..., n,). Each step sub arrays is made up of the following
variables:
posterior : numpy array
Array of posterior samples.
distances : object
Array of accepted distances.
accepted_count : float
Number of posterior samples.
trial_count : float
Number of total samples attempted.
epsilon : float
Distance tolerance used.
weights : numpy array
Importance sampling weights. Returns an array of 1s where
size = posterior.size when not in pmc mode.
tau_squared : numpy array
Gaussian kernel variances. Returns an array of 0s where
size = posterior.size when not in pmc mode.
eff_sample : numpy array
Effective sample size. Returns an array of 1s where
size = posterior.size when not in pmc mode.
Examples
--------
Forth coming.
"""
output_record = np.empty(steps, dtype=[('theta accepted', object),
#('theta rejected', object),
('D accepted', object),
('n accepted', float),
('n total', float),
('epsilon', float),
('weights', object),
('tau_squared', object),
('eff sample size', object),
])
if resume != None:
steps = xrange(resume.size, resume.size + steps)
output_record = stack_arrays((resume, output_record), asrecarray=True,
usemask=False)
epsilon = stats.scoreatpercentile(resume[-1]['D accepted'],
per=75)
theta = resume['theta accepted'][-1]
weights = resume['weights'][-1]
tau_squared = resume['tau_squared'][-1]
else:
steps = xrange(steps)
epsilon = epsilon_0
for step in steps:
print 'Starting step {}'.format(step)
if step == 0:
#Fist ABC calculation
output_record[step] = basic_abc(model, data, epsilon=epsilon,
min_samples=min_samples,
parallel=parallel,
n_procs=n_procs, pmc_mode=False)
theta = output_record[step]['theta accepted']
#print theta.shape
tau_squared = output_record[step]['tau_squared']
#print tau_squared
weights = output_record[step]['weights']
for j in xrange(theta.shape[0]):
tau_squared[0][j] = 2*np.var(theta[j])
weights[j] = weights[j]*1/float(theta[j].size)
epsilon = stats.scoreatpercentile(output_record[step]['D accepted'],
per=75)
#print tau_squared
#print weights
#print epsilon
else:
#print weights
theta_prev = theta
weights_prev = weights
output_record[step] = basic_abc(model, data, epsilon=epsilon,
min_samples =min_samples,
parallel=parallel,
n_procs= n_procs, pmc_mode=True,
weights=weights,
theta_prev=theta_prev,
tau_squared=tau_squared)
theta = output_record[step]['theta accepted']
epsilon = stats.scoreatpercentile(output_record[step]['D accepted'],
per=75)
if epsilon == 0.0:
epsilon = 0.001
#print theta_prev
weights = calc_weights(theta_prev, theta, tau_squared,
weights_prev, prior=model.prior)
output_record[step]['weights'] = weights
#print "w ",weights
#print "sum(w) ",sum(weights[0]),sum(weights[1])
n = theta[0].size
#print weights_prev
tau_squared = np.zeros((1, theta_prev.shape[0]))
effective_sample = np.zeros((1, theta_prev.shape[0]))
for j in xrange(theta.shape[0]):
w_sum = weights_prev[j].sum()
w_sum2 = sum(weights_prev[j]**2)
effective_sample[0][j] = (w_sum * w_sum) / w_sum2
mean_theta = np.sum(theta[j] * weights[j])
var_theta = np.sum((theta[j] - mean_theta)**2 * weights[j])
tau_squared[0][j] = 2*var_theta
output_record[step]['tau_squared'] = tau_squared
output_record[step]['eff sample size'] = effective_sample
return output_record
def calc_weights(theta_prev, theta, tau_squared, weights, prior="None"):
"""
Calculates importance weights
"""
weights_new = np.zeros_like(theta)
for i in xrange(theta.shape[0]):
for j in xrange(theta[i].size):
weights_new[i][j] = (prior[i].pdf(theta[i][j]) /
np.sum(weights[i]*stats.norm.pdf(theta[i],
theta_prev[i],
np.sqrt(tau_squared[0][i]))))
weights_new[i] = weights_new[i]/sum(weights_new[i])
#print weights_new[i]
return weights_new
|
|
""" Python implementation of Rijndael encryption algorithm.
This code is in the public domain.
This code is based on a public domain C implementation
by Philip J. Erdelsky:
http://www.efgh.com/software/rijndael.htm
"""
import struct
def KEYLENGTH(keybits):
return (keybits)//8
def RKLENGTH(keybits):
return (keybits)//8+28
def NROUNDS(keybits):
return (keybits)//32+6
Te0 = [
0xc66363a5, 0xf87c7c84, 0xee777799, 0xf67b7b8d,
0xfff2f20d, 0xd66b6bbd, 0xde6f6fb1, 0x91c5c554,
0x60303050, 0x02010103, 0xce6767a9, 0x562b2b7d,
0xe7fefe19, 0xb5d7d762, 0x4dababe6, 0xec76769a,
0x8fcaca45, 0x1f82829d, 0x89c9c940, 0xfa7d7d87,
0xeffafa15, 0xb25959eb, 0x8e4747c9, 0xfbf0f00b,
0x41adadec, 0xb3d4d467, 0x5fa2a2fd, 0x45afafea,
0x239c9cbf, 0x53a4a4f7, 0xe4727296, 0x9bc0c05b,
0x75b7b7c2, 0xe1fdfd1c, 0x3d9393ae, 0x4c26266a,
0x6c36365a, 0x7e3f3f41, 0xf5f7f702, 0x83cccc4f,
0x6834345c, 0x51a5a5f4, 0xd1e5e534, 0xf9f1f108,
0xe2717193, 0xabd8d873, 0x62313153, 0x2a15153f,
0x0804040c, 0x95c7c752, 0x46232365, 0x9dc3c35e,
0x30181828, 0x379696a1, 0x0a05050f, 0x2f9a9ab5,
0x0e070709, 0x24121236, 0x1b80809b, 0xdfe2e23d,
0xcdebeb26, 0x4e272769, 0x7fb2b2cd, 0xea75759f,
0x1209091b, 0x1d83839e, 0x582c2c74, 0x341a1a2e,
0x361b1b2d, 0xdc6e6eb2, 0xb45a5aee, 0x5ba0a0fb,
0xa45252f6, 0x763b3b4d, 0xb7d6d661, 0x7db3b3ce,
0x5229297b, 0xdde3e33e, 0x5e2f2f71, 0x13848497,
0xa65353f5, 0xb9d1d168, 0x00000000, 0xc1eded2c,
0x40202060, 0xe3fcfc1f, 0x79b1b1c8, 0xb65b5bed,
0xd46a6abe, 0x8dcbcb46, 0x67bebed9, 0x7239394b,
0x944a4ade, 0x984c4cd4, 0xb05858e8, 0x85cfcf4a,
0xbbd0d06b, 0xc5efef2a, 0x4faaaae5, 0xedfbfb16,
0x864343c5, 0x9a4d4dd7, 0x66333355, 0x11858594,
0x8a4545cf, 0xe9f9f910, 0x04020206, 0xfe7f7f81,
0xa05050f0, 0x783c3c44, 0x259f9fba, 0x4ba8a8e3,
0xa25151f3, 0x5da3a3fe, 0x804040c0, 0x058f8f8a,
0x3f9292ad, 0x219d9dbc, 0x70383848, 0xf1f5f504,
0x63bcbcdf, 0x77b6b6c1, 0xafdada75, 0x42212163,
0x20101030, 0xe5ffff1a, 0xfdf3f30e, 0xbfd2d26d,
0x81cdcd4c, 0x180c0c14, 0x26131335, 0xc3ecec2f,
0xbe5f5fe1, 0x359797a2, 0x884444cc, 0x2e171739,
0x93c4c457, 0x55a7a7f2, 0xfc7e7e82, 0x7a3d3d47,
0xc86464ac, 0xba5d5de7, 0x3219192b, 0xe6737395,
0xc06060a0, 0x19818198, 0x9e4f4fd1, 0xa3dcdc7f,
0x44222266, 0x542a2a7e, 0x3b9090ab, 0x0b888883,
0x8c4646ca, 0xc7eeee29, 0x6bb8b8d3, 0x2814143c,
0xa7dede79, 0xbc5e5ee2, 0x160b0b1d, 0xaddbdb76,
0xdbe0e03b, 0x64323256, 0x743a3a4e, 0x140a0a1e,
0x924949db, 0x0c06060a, 0x4824246c, 0xb85c5ce4,
0x9fc2c25d, 0xbdd3d36e, 0x43acacef, 0xc46262a6,
0x399191a8, 0x319595a4, 0xd3e4e437, 0xf279798b,
0xd5e7e732, 0x8bc8c843, 0x6e373759, 0xda6d6db7,
0x018d8d8c, 0xb1d5d564, 0x9c4e4ed2, 0x49a9a9e0,
0xd86c6cb4, 0xac5656fa, 0xf3f4f407, 0xcfeaea25,
0xca6565af, 0xf47a7a8e, 0x47aeaee9, 0x10080818,
0x6fbabad5, 0xf0787888, 0x4a25256f, 0x5c2e2e72,
0x381c1c24, 0x57a6a6f1, 0x73b4b4c7, 0x97c6c651,
0xcbe8e823, 0xa1dddd7c, 0xe874749c, 0x3e1f1f21,
0x964b4bdd, 0x61bdbddc, 0x0d8b8b86, 0x0f8a8a85,
0xe0707090, 0x7c3e3e42, 0x71b5b5c4, 0xcc6666aa,
0x904848d8, 0x06030305, 0xf7f6f601, 0x1c0e0e12,
0xc26161a3, 0x6a35355f, 0xae5757f9, 0x69b9b9d0,
0x17868691, 0x99c1c158, 0x3a1d1d27, 0x279e9eb9,
0xd9e1e138, 0xebf8f813, 0x2b9898b3, 0x22111133,
0xd26969bb, 0xa9d9d970, 0x078e8e89, 0x339494a7,
0x2d9b9bb6, 0x3c1e1e22, 0x15878792, 0xc9e9e920,
0x87cece49, 0xaa5555ff, 0x50282878, 0xa5dfdf7a,
0x038c8c8f, 0x59a1a1f8, 0x09898980, 0x1a0d0d17,
0x65bfbfda, 0xd7e6e631, 0x844242c6, 0xd06868b8,
0x824141c3, 0x299999b0, 0x5a2d2d77, 0x1e0f0f11,
0x7bb0b0cb, 0xa85454fc, 0x6dbbbbd6, 0x2c16163a,
]
Te1 = [
0xa5c66363, 0x84f87c7c, 0x99ee7777, 0x8df67b7b,
0x0dfff2f2, 0xbdd66b6b, 0xb1de6f6f, 0x5491c5c5,
0x50603030, 0x03020101, 0xa9ce6767, 0x7d562b2b,
0x19e7fefe, 0x62b5d7d7, 0xe64dabab, 0x9aec7676,
0x458fcaca, 0x9d1f8282, 0x4089c9c9, 0x87fa7d7d,
0x15effafa, 0xebb25959, 0xc98e4747, 0x0bfbf0f0,
0xec41adad, 0x67b3d4d4, 0xfd5fa2a2, 0xea45afaf,
0xbf239c9c, 0xf753a4a4, 0x96e47272, 0x5b9bc0c0,
0xc275b7b7, 0x1ce1fdfd, 0xae3d9393, 0x6a4c2626,
0x5a6c3636, 0x417e3f3f, 0x02f5f7f7, 0x4f83cccc,
0x5c683434, 0xf451a5a5, 0x34d1e5e5, 0x08f9f1f1,
0x93e27171, 0x73abd8d8, 0x53623131, 0x3f2a1515,
0x0c080404, 0x5295c7c7, 0x65462323, 0x5e9dc3c3,
0x28301818, 0xa1379696, 0x0f0a0505, 0xb52f9a9a,
0x090e0707, 0x36241212, 0x9b1b8080, 0x3ddfe2e2,
0x26cdebeb, 0x694e2727, 0xcd7fb2b2, 0x9fea7575,
0x1b120909, 0x9e1d8383, 0x74582c2c, 0x2e341a1a,
0x2d361b1b, 0xb2dc6e6e, 0xeeb45a5a, 0xfb5ba0a0,
0xf6a45252, 0x4d763b3b, 0x61b7d6d6, 0xce7db3b3,
0x7b522929, 0x3edde3e3, 0x715e2f2f, 0x97138484,
0xf5a65353, 0x68b9d1d1, 0x00000000, 0x2cc1eded,
0x60402020, 0x1fe3fcfc, 0xc879b1b1, 0xedb65b5b,
0xbed46a6a, 0x468dcbcb, 0xd967bebe, 0x4b723939,
0xde944a4a, 0xd4984c4c, 0xe8b05858, 0x4a85cfcf,
0x6bbbd0d0, 0x2ac5efef, 0xe54faaaa, 0x16edfbfb,
0xc5864343, 0xd79a4d4d, 0x55663333, 0x94118585,
0xcf8a4545, 0x10e9f9f9, 0x06040202, 0x81fe7f7f,
0xf0a05050, 0x44783c3c, 0xba259f9f, 0xe34ba8a8,
0xf3a25151, 0xfe5da3a3, 0xc0804040, 0x8a058f8f,
0xad3f9292, 0xbc219d9d, 0x48703838, 0x04f1f5f5,
0xdf63bcbc, 0xc177b6b6, 0x75afdada, 0x63422121,
0x30201010, 0x1ae5ffff, 0x0efdf3f3, 0x6dbfd2d2,
0x4c81cdcd, 0x14180c0c, 0x35261313, 0x2fc3ecec,
0xe1be5f5f, 0xa2359797, 0xcc884444, 0x392e1717,
0x5793c4c4, 0xf255a7a7, 0x82fc7e7e, 0x477a3d3d,
0xacc86464, 0xe7ba5d5d, 0x2b321919, 0x95e67373,
0xa0c06060, 0x98198181, 0xd19e4f4f, 0x7fa3dcdc,
0x66442222, 0x7e542a2a, 0xab3b9090, 0x830b8888,
0xca8c4646, 0x29c7eeee, 0xd36bb8b8, 0x3c281414,
0x79a7dede, 0xe2bc5e5e, 0x1d160b0b, 0x76addbdb,
0x3bdbe0e0, 0x56643232, 0x4e743a3a, 0x1e140a0a,
0xdb924949, 0x0a0c0606, 0x6c482424, 0xe4b85c5c,
0x5d9fc2c2, 0x6ebdd3d3, 0xef43acac, 0xa6c46262,
0xa8399191, 0xa4319595, 0x37d3e4e4, 0x8bf27979,
0x32d5e7e7, 0x438bc8c8, 0x596e3737, 0xb7da6d6d,
0x8c018d8d, 0x64b1d5d5, 0xd29c4e4e, 0xe049a9a9,
0xb4d86c6c, 0xfaac5656, 0x07f3f4f4, 0x25cfeaea,
0xafca6565, 0x8ef47a7a, 0xe947aeae, 0x18100808,
0xd56fbaba, 0x88f07878, 0x6f4a2525, 0x725c2e2e,
0x24381c1c, 0xf157a6a6, 0xc773b4b4, 0x5197c6c6,
0x23cbe8e8, 0x7ca1dddd, 0x9ce87474, 0x213e1f1f,
0xdd964b4b, 0xdc61bdbd, 0x860d8b8b, 0x850f8a8a,
0x90e07070, 0x427c3e3e, 0xc471b5b5, 0xaacc6666,
0xd8904848, 0x05060303, 0x01f7f6f6, 0x121c0e0e,
0xa3c26161, 0x5f6a3535, 0xf9ae5757, 0xd069b9b9,
0x91178686, 0x5899c1c1, 0x273a1d1d, 0xb9279e9e,
0x38d9e1e1, 0x13ebf8f8, 0xb32b9898, 0x33221111,
0xbbd26969, 0x70a9d9d9, 0x89078e8e, 0xa7339494,
0xb62d9b9b, 0x223c1e1e, 0x92158787, 0x20c9e9e9,
0x4987cece, 0xffaa5555, 0x78502828, 0x7aa5dfdf,
0x8f038c8c, 0xf859a1a1, 0x80098989, 0x171a0d0d,
0xda65bfbf, 0x31d7e6e6, 0xc6844242, 0xb8d06868,
0xc3824141, 0xb0299999, 0x775a2d2d, 0x111e0f0f,
0xcb7bb0b0, 0xfca85454, 0xd66dbbbb, 0x3a2c1616,
]
Te2 = [
0x63a5c663, 0x7c84f87c, 0x7799ee77, 0x7b8df67b,
0xf20dfff2, 0x6bbdd66b, 0x6fb1de6f, 0xc55491c5,
0x30506030, 0x01030201, 0x67a9ce67, 0x2b7d562b,
0xfe19e7fe, 0xd762b5d7, 0xabe64dab, 0x769aec76,
0xca458fca, 0x829d1f82, 0xc94089c9, 0x7d87fa7d,
0xfa15effa, 0x59ebb259, 0x47c98e47, 0xf00bfbf0,
0xadec41ad, 0xd467b3d4, 0xa2fd5fa2, 0xafea45af,
0x9cbf239c, 0xa4f753a4, 0x7296e472, 0xc05b9bc0,
0xb7c275b7, 0xfd1ce1fd, 0x93ae3d93, 0x266a4c26,
0x365a6c36, 0x3f417e3f, 0xf702f5f7, 0xcc4f83cc,
0x345c6834, 0xa5f451a5, 0xe534d1e5, 0xf108f9f1,
0x7193e271, 0xd873abd8, 0x31536231, 0x153f2a15,
0x040c0804, 0xc75295c7, 0x23654623, 0xc35e9dc3,
0x18283018, 0x96a13796, 0x050f0a05, 0x9ab52f9a,
0x07090e07, 0x12362412, 0x809b1b80, 0xe23ddfe2,
0xeb26cdeb, 0x27694e27, 0xb2cd7fb2, 0x759fea75,
0x091b1209, 0x839e1d83, 0x2c74582c, 0x1a2e341a,
0x1b2d361b, 0x6eb2dc6e, 0x5aeeb45a, 0xa0fb5ba0,
0x52f6a452, 0x3b4d763b, 0xd661b7d6, 0xb3ce7db3,
0x297b5229, 0xe33edde3, 0x2f715e2f, 0x84971384,
0x53f5a653, 0xd168b9d1, 0x00000000, 0xed2cc1ed,
0x20604020, 0xfc1fe3fc, 0xb1c879b1, 0x5bedb65b,
0x6abed46a, 0xcb468dcb, 0xbed967be, 0x394b7239,
0x4ade944a, 0x4cd4984c, 0x58e8b058, 0xcf4a85cf,
0xd06bbbd0, 0xef2ac5ef, 0xaae54faa, 0xfb16edfb,
0x43c58643, 0x4dd79a4d, 0x33556633, 0x85941185,
0x45cf8a45, 0xf910e9f9, 0x02060402, 0x7f81fe7f,
0x50f0a050, 0x3c44783c, 0x9fba259f, 0xa8e34ba8,
0x51f3a251, 0xa3fe5da3, 0x40c08040, 0x8f8a058f,
0x92ad3f92, 0x9dbc219d, 0x38487038, 0xf504f1f5,
0xbcdf63bc, 0xb6c177b6, 0xda75afda, 0x21634221,
0x10302010, 0xff1ae5ff, 0xf30efdf3, 0xd26dbfd2,
0xcd4c81cd, 0x0c14180c, 0x13352613, 0xec2fc3ec,
0x5fe1be5f, 0x97a23597, 0x44cc8844, 0x17392e17,
0xc45793c4, 0xa7f255a7, 0x7e82fc7e, 0x3d477a3d,
0x64acc864, 0x5de7ba5d, 0x192b3219, 0x7395e673,
0x60a0c060, 0x81981981, 0x4fd19e4f, 0xdc7fa3dc,
0x22664422, 0x2a7e542a, 0x90ab3b90, 0x88830b88,
0x46ca8c46, 0xee29c7ee, 0xb8d36bb8, 0x143c2814,
0xde79a7de, 0x5ee2bc5e, 0x0b1d160b, 0xdb76addb,
0xe03bdbe0, 0x32566432, 0x3a4e743a, 0x0a1e140a,
0x49db9249, 0x060a0c06, 0x246c4824, 0x5ce4b85c,
0xc25d9fc2, 0xd36ebdd3, 0xacef43ac, 0x62a6c462,
0x91a83991, 0x95a43195, 0xe437d3e4, 0x798bf279,
0xe732d5e7, 0xc8438bc8, 0x37596e37, 0x6db7da6d,
0x8d8c018d, 0xd564b1d5, 0x4ed29c4e, 0xa9e049a9,
0x6cb4d86c, 0x56faac56, 0xf407f3f4, 0xea25cfea,
0x65afca65, 0x7a8ef47a, 0xaee947ae, 0x08181008,
0xbad56fba, 0x7888f078, 0x256f4a25, 0x2e725c2e,
0x1c24381c, 0xa6f157a6, 0xb4c773b4, 0xc65197c6,
0xe823cbe8, 0xdd7ca1dd, 0x749ce874, 0x1f213e1f,
0x4bdd964b, 0xbddc61bd, 0x8b860d8b, 0x8a850f8a,
0x7090e070, 0x3e427c3e, 0xb5c471b5, 0x66aacc66,
0x48d89048, 0x03050603, 0xf601f7f6, 0x0e121c0e,
0x61a3c261, 0x355f6a35, 0x57f9ae57, 0xb9d069b9,
0x86911786, 0xc15899c1, 0x1d273a1d, 0x9eb9279e,
0xe138d9e1, 0xf813ebf8, 0x98b32b98, 0x11332211,
0x69bbd269, 0xd970a9d9, 0x8e89078e, 0x94a73394,
0x9bb62d9b, 0x1e223c1e, 0x87921587, 0xe920c9e9,
0xce4987ce, 0x55ffaa55, 0x28785028, 0xdf7aa5df,
0x8c8f038c, 0xa1f859a1, 0x89800989, 0x0d171a0d,
0xbfda65bf, 0xe631d7e6, 0x42c68442, 0x68b8d068,
0x41c38241, 0x99b02999, 0x2d775a2d, 0x0f111e0f,
0xb0cb7bb0, 0x54fca854, 0xbbd66dbb, 0x163a2c16,
]
Te3 = [
0x6363a5c6, 0x7c7c84f8, 0x777799ee, 0x7b7b8df6,
0xf2f20dff, 0x6b6bbdd6, 0x6f6fb1de, 0xc5c55491,
0x30305060, 0x01010302, 0x6767a9ce, 0x2b2b7d56,
0xfefe19e7, 0xd7d762b5, 0xababe64d, 0x76769aec,
0xcaca458f, 0x82829d1f, 0xc9c94089, 0x7d7d87fa,
0xfafa15ef, 0x5959ebb2, 0x4747c98e, 0xf0f00bfb,
0xadadec41, 0xd4d467b3, 0xa2a2fd5f, 0xafafea45,
0x9c9cbf23, 0xa4a4f753, 0x727296e4, 0xc0c05b9b,
0xb7b7c275, 0xfdfd1ce1, 0x9393ae3d, 0x26266a4c,
0x36365a6c, 0x3f3f417e, 0xf7f702f5, 0xcccc4f83,
0x34345c68, 0xa5a5f451, 0xe5e534d1, 0xf1f108f9,
0x717193e2, 0xd8d873ab, 0x31315362, 0x15153f2a,
0x04040c08, 0xc7c75295, 0x23236546, 0xc3c35e9d,
0x18182830, 0x9696a137, 0x05050f0a, 0x9a9ab52f,
0x0707090e, 0x12123624, 0x80809b1b, 0xe2e23ddf,
0xebeb26cd, 0x2727694e, 0xb2b2cd7f, 0x75759fea,
0x09091b12, 0x83839e1d, 0x2c2c7458, 0x1a1a2e34,
0x1b1b2d36, 0x6e6eb2dc, 0x5a5aeeb4, 0xa0a0fb5b,
0x5252f6a4, 0x3b3b4d76, 0xd6d661b7, 0xb3b3ce7d,
0x29297b52, 0xe3e33edd, 0x2f2f715e, 0x84849713,
0x5353f5a6, 0xd1d168b9, 0x00000000, 0xeded2cc1,
0x20206040, 0xfcfc1fe3, 0xb1b1c879, 0x5b5bedb6,
0x6a6abed4, 0xcbcb468d, 0xbebed967, 0x39394b72,
0x4a4ade94, 0x4c4cd498, 0x5858e8b0, 0xcfcf4a85,
0xd0d06bbb, 0xefef2ac5, 0xaaaae54f, 0xfbfb16ed,
0x4343c586, 0x4d4dd79a, 0x33335566, 0x85859411,
0x4545cf8a, 0xf9f910e9, 0x02020604, 0x7f7f81fe,
0x5050f0a0, 0x3c3c4478, 0x9f9fba25, 0xa8a8e34b,
0x5151f3a2, 0xa3a3fe5d, 0x4040c080, 0x8f8f8a05,
0x9292ad3f, 0x9d9dbc21, 0x38384870, 0xf5f504f1,
0xbcbcdf63, 0xb6b6c177, 0xdada75af, 0x21216342,
0x10103020, 0xffff1ae5, 0xf3f30efd, 0xd2d26dbf,
0xcdcd4c81, 0x0c0c1418, 0x13133526, 0xecec2fc3,
0x5f5fe1be, 0x9797a235, 0x4444cc88, 0x1717392e,
0xc4c45793, 0xa7a7f255, 0x7e7e82fc, 0x3d3d477a,
0x6464acc8, 0x5d5de7ba, 0x19192b32, 0x737395e6,
0x6060a0c0, 0x81819819, 0x4f4fd19e, 0xdcdc7fa3,
0x22226644, 0x2a2a7e54, 0x9090ab3b, 0x8888830b,
0x4646ca8c, 0xeeee29c7, 0xb8b8d36b, 0x14143c28,
0xdede79a7, 0x5e5ee2bc, 0x0b0b1d16, 0xdbdb76ad,
0xe0e03bdb, 0x32325664, 0x3a3a4e74, 0x0a0a1e14,
0x4949db92, 0x06060a0c, 0x24246c48, 0x5c5ce4b8,
0xc2c25d9f, 0xd3d36ebd, 0xacacef43, 0x6262a6c4,
0x9191a839, 0x9595a431, 0xe4e437d3, 0x79798bf2,
0xe7e732d5, 0xc8c8438b, 0x3737596e, 0x6d6db7da,
0x8d8d8c01, 0xd5d564b1, 0x4e4ed29c, 0xa9a9e049,
0x6c6cb4d8, 0x5656faac, 0xf4f407f3, 0xeaea25cf,
0x6565afca, 0x7a7a8ef4, 0xaeaee947, 0x08081810,
0xbabad56f, 0x787888f0, 0x25256f4a, 0x2e2e725c,
0x1c1c2438, 0xa6a6f157, 0xb4b4c773, 0xc6c65197,
0xe8e823cb, 0xdddd7ca1, 0x74749ce8, 0x1f1f213e,
0x4b4bdd96, 0xbdbddc61, 0x8b8b860d, 0x8a8a850f,
0x707090e0, 0x3e3e427c, 0xb5b5c471, 0x6666aacc,
0x4848d890, 0x03030506, 0xf6f601f7, 0x0e0e121c,
0x6161a3c2, 0x35355f6a, 0x5757f9ae, 0xb9b9d069,
0x86869117, 0xc1c15899, 0x1d1d273a, 0x9e9eb927,
0xe1e138d9, 0xf8f813eb, 0x9898b32b, 0x11113322,
0x6969bbd2, 0xd9d970a9, 0x8e8e8907, 0x9494a733,
0x9b9bb62d, 0x1e1e223c, 0x87879215, 0xe9e920c9,
0xcece4987, 0x5555ffaa, 0x28287850, 0xdfdf7aa5,
0x8c8c8f03, 0xa1a1f859, 0x89898009, 0x0d0d171a,
0xbfbfda65, 0xe6e631d7, 0x4242c684, 0x6868b8d0,
0x4141c382, 0x9999b029, 0x2d2d775a, 0x0f0f111e,
0xb0b0cb7b, 0x5454fca8, 0xbbbbd66d, 0x16163a2c,
]
Te4 = [
0x63636363, 0x7c7c7c7c, 0x77777777, 0x7b7b7b7b,
0xf2f2f2f2, 0x6b6b6b6b, 0x6f6f6f6f, 0xc5c5c5c5,
0x30303030, 0x01010101, 0x67676767, 0x2b2b2b2b,
0xfefefefe, 0xd7d7d7d7, 0xabababab, 0x76767676,
0xcacacaca, 0x82828282, 0xc9c9c9c9, 0x7d7d7d7d,
0xfafafafa, 0x59595959, 0x47474747, 0xf0f0f0f0,
0xadadadad, 0xd4d4d4d4, 0xa2a2a2a2, 0xafafafaf,
0x9c9c9c9c, 0xa4a4a4a4, 0x72727272, 0xc0c0c0c0,
0xb7b7b7b7, 0xfdfdfdfd, 0x93939393, 0x26262626,
0x36363636, 0x3f3f3f3f, 0xf7f7f7f7, 0xcccccccc,
0x34343434, 0xa5a5a5a5, 0xe5e5e5e5, 0xf1f1f1f1,
0x71717171, 0xd8d8d8d8, 0x31313131, 0x15151515,
0x04040404, 0xc7c7c7c7, 0x23232323, 0xc3c3c3c3,
0x18181818, 0x96969696, 0x05050505, 0x9a9a9a9a,
0x07070707, 0x12121212, 0x80808080, 0xe2e2e2e2,
0xebebebeb, 0x27272727, 0xb2b2b2b2, 0x75757575,
0x09090909, 0x83838383, 0x2c2c2c2c, 0x1a1a1a1a,
0x1b1b1b1b, 0x6e6e6e6e, 0x5a5a5a5a, 0xa0a0a0a0,
0x52525252, 0x3b3b3b3b, 0xd6d6d6d6, 0xb3b3b3b3,
0x29292929, 0xe3e3e3e3, 0x2f2f2f2f, 0x84848484,
0x53535353, 0xd1d1d1d1, 0x00000000, 0xedededed,
0x20202020, 0xfcfcfcfc, 0xb1b1b1b1, 0x5b5b5b5b,
0x6a6a6a6a, 0xcbcbcbcb, 0xbebebebe, 0x39393939,
0x4a4a4a4a, 0x4c4c4c4c, 0x58585858, 0xcfcfcfcf,
0xd0d0d0d0, 0xefefefef, 0xaaaaaaaa, 0xfbfbfbfb,
0x43434343, 0x4d4d4d4d, 0x33333333, 0x85858585,
0x45454545, 0xf9f9f9f9, 0x02020202, 0x7f7f7f7f,
0x50505050, 0x3c3c3c3c, 0x9f9f9f9f, 0xa8a8a8a8,
0x51515151, 0xa3a3a3a3, 0x40404040, 0x8f8f8f8f,
0x92929292, 0x9d9d9d9d, 0x38383838, 0xf5f5f5f5,
0xbcbcbcbc, 0xb6b6b6b6, 0xdadadada, 0x21212121,
0x10101010, 0xffffffff, 0xf3f3f3f3, 0xd2d2d2d2,
0xcdcdcdcd, 0x0c0c0c0c, 0x13131313, 0xecececec,
0x5f5f5f5f, 0x97979797, 0x44444444, 0x17171717,
0xc4c4c4c4, 0xa7a7a7a7, 0x7e7e7e7e, 0x3d3d3d3d,
0x64646464, 0x5d5d5d5d, 0x19191919, 0x73737373,
0x60606060, 0x81818181, 0x4f4f4f4f, 0xdcdcdcdc,
0x22222222, 0x2a2a2a2a, 0x90909090, 0x88888888,
0x46464646, 0xeeeeeeee, 0xb8b8b8b8, 0x14141414,
0xdededede, 0x5e5e5e5e, 0x0b0b0b0b, 0xdbdbdbdb,
0xe0e0e0e0, 0x32323232, 0x3a3a3a3a, 0x0a0a0a0a,
0x49494949, 0x06060606, 0x24242424, 0x5c5c5c5c,
0xc2c2c2c2, 0xd3d3d3d3, 0xacacacac, 0x62626262,
0x91919191, 0x95959595, 0xe4e4e4e4, 0x79797979,
0xe7e7e7e7, 0xc8c8c8c8, 0x37373737, 0x6d6d6d6d,
0x8d8d8d8d, 0xd5d5d5d5, 0x4e4e4e4e, 0xa9a9a9a9,
0x6c6c6c6c, 0x56565656, 0xf4f4f4f4, 0xeaeaeaea,
0x65656565, 0x7a7a7a7a, 0xaeaeaeae, 0x08080808,
0xbabababa, 0x78787878, 0x25252525, 0x2e2e2e2e,
0x1c1c1c1c, 0xa6a6a6a6, 0xb4b4b4b4, 0xc6c6c6c6,
0xe8e8e8e8, 0xdddddddd, 0x74747474, 0x1f1f1f1f,
0x4b4b4b4b, 0xbdbdbdbd, 0x8b8b8b8b, 0x8a8a8a8a,
0x70707070, 0x3e3e3e3e, 0xb5b5b5b5, 0x66666666,
0x48484848, 0x03030303, 0xf6f6f6f6, 0x0e0e0e0e,
0x61616161, 0x35353535, 0x57575757, 0xb9b9b9b9,
0x86868686, 0xc1c1c1c1, 0x1d1d1d1d, 0x9e9e9e9e,
0xe1e1e1e1, 0xf8f8f8f8, 0x98989898, 0x11111111,
0x69696969, 0xd9d9d9d9, 0x8e8e8e8e, 0x94949494,
0x9b9b9b9b, 0x1e1e1e1e, 0x87878787, 0xe9e9e9e9,
0xcececece, 0x55555555, 0x28282828, 0xdfdfdfdf,
0x8c8c8c8c, 0xa1a1a1a1, 0x89898989, 0x0d0d0d0d,
0xbfbfbfbf, 0xe6e6e6e6, 0x42424242, 0x68686868,
0x41414141, 0x99999999, 0x2d2d2d2d, 0x0f0f0f0f,
0xb0b0b0b0, 0x54545454, 0xbbbbbbbb, 0x16161616,
]
Td0 = [
0x51f4a750, 0x7e416553, 0x1a17a4c3, 0x3a275e96,
0x3bab6bcb, 0x1f9d45f1, 0xacfa58ab, 0x4be30393,
0x2030fa55, 0xad766df6, 0x88cc7691, 0xf5024c25,
0x4fe5d7fc, 0xc52acbd7, 0x26354480, 0xb562a38f,
0xdeb15a49, 0x25ba1b67, 0x45ea0e98, 0x5dfec0e1,
0xc32f7502, 0x814cf012, 0x8d4697a3, 0x6bd3f9c6,
0x038f5fe7, 0x15929c95, 0xbf6d7aeb, 0x955259da,
0xd4be832d, 0x587421d3, 0x49e06929, 0x8ec9c844,
0x75c2896a, 0xf48e7978, 0x99583e6b, 0x27b971dd,
0xbee14fb6, 0xf088ad17, 0xc920ac66, 0x7dce3ab4,
0x63df4a18, 0xe51a3182, 0x97513360, 0x62537f45,
0xb16477e0, 0xbb6bae84, 0xfe81a01c, 0xf9082b94,
0x70486858, 0x8f45fd19, 0x94de6c87, 0x527bf8b7,
0xab73d323, 0x724b02e2, 0xe31f8f57, 0x6655ab2a,
0xb2eb2807, 0x2fb5c203, 0x86c57b9a, 0xd33708a5,
0x302887f2, 0x23bfa5b2, 0x02036aba, 0xed16825c,
0x8acf1c2b, 0xa779b492, 0xf307f2f0, 0x4e69e2a1,
0x65daf4cd, 0x0605bed5, 0xd134621f, 0xc4a6fe8a,
0x342e539d, 0xa2f355a0, 0x058ae132, 0xa4f6eb75,
0x0b83ec39, 0x4060efaa, 0x5e719f06, 0xbd6e1051,
0x3e218af9, 0x96dd063d, 0xdd3e05ae, 0x4de6bd46,
0x91548db5, 0x71c45d05, 0x0406d46f, 0x605015ff,
0x1998fb24, 0xd6bde997, 0x894043cc, 0x67d99e77,
0xb0e842bd, 0x07898b88, 0xe7195b38, 0x79c8eedb,
0xa17c0a47, 0x7c420fe9, 0xf8841ec9, 0x00000000,
0x09808683, 0x322bed48, 0x1e1170ac, 0x6c5a724e,
0xfd0efffb, 0x0f853856, 0x3daed51e, 0x362d3927,
0x0a0fd964, 0x685ca621, 0x9b5b54d1, 0x24362e3a,
0x0c0a67b1, 0x9357e70f, 0xb4ee96d2, 0x1b9b919e,
0x80c0c54f, 0x61dc20a2, 0x5a774b69, 0x1c121a16,
0xe293ba0a, 0xc0a02ae5, 0x3c22e043, 0x121b171d,
0x0e090d0b, 0xf28bc7ad, 0x2db6a8b9, 0x141ea9c8,
0x57f11985, 0xaf75074c, 0xee99ddbb, 0xa37f60fd,
0xf701269f, 0x5c72f5bc, 0x44663bc5, 0x5bfb7e34,
0x8b432976, 0xcb23c6dc, 0xb6edfc68, 0xb8e4f163,
0xd731dcca, 0x42638510, 0x13972240, 0x84c61120,
0x854a247d, 0xd2bb3df8, 0xaef93211, 0xc729a16d,
0x1d9e2f4b, 0xdcb230f3, 0x0d8652ec, 0x77c1e3d0,
0x2bb3166c, 0xa970b999, 0x119448fa, 0x47e96422,
0xa8fc8cc4, 0xa0f03f1a, 0x567d2cd8, 0x223390ef,
0x87494ec7, 0xd938d1c1, 0x8ccaa2fe, 0x98d40b36,
0xa6f581cf, 0xa57ade28, 0xdab78e26, 0x3fadbfa4,
0x2c3a9de4, 0x5078920d, 0x6a5fcc9b, 0x547e4662,
0xf68d13c2, 0x90d8b8e8, 0x2e39f75e, 0x82c3aff5,
0x9f5d80be, 0x69d0937c, 0x6fd52da9, 0xcf2512b3,
0xc8ac993b, 0x10187da7, 0xe89c636e, 0xdb3bbb7b,
0xcd267809, 0x6e5918f4, 0xec9ab701, 0x834f9aa8,
0xe6956e65, 0xaaffe67e, 0x21bccf08, 0xef15e8e6,
0xbae79bd9, 0x4a6f36ce, 0xea9f09d4, 0x29b07cd6,
0x31a4b2af, 0x2a3f2331, 0xc6a59430, 0x35a266c0,
0x744ebc37, 0xfc82caa6, 0xe090d0b0, 0x33a7d815,
0xf104984a, 0x41ecdaf7, 0x7fcd500e, 0x1791f62f,
0x764dd68d, 0x43efb04d, 0xccaa4d54, 0xe49604df,
0x9ed1b5e3, 0x4c6a881b, 0xc12c1fb8, 0x4665517f,
0x9d5eea04, 0x018c355d, 0xfa877473, 0xfb0b412e,
0xb3671d5a, 0x92dbd252, 0xe9105633, 0x6dd64713,
0x9ad7618c, 0x37a10c7a, 0x59f8148e, 0xeb133c89,
0xcea927ee, 0xb761c935, 0xe11ce5ed, 0x7a47b13c,
0x9cd2df59, 0x55f2733f, 0x1814ce79, 0x73c737bf,
0x53f7cdea, 0x5ffdaa5b, 0xdf3d6f14, 0x7844db86,
0xcaaff381, 0xb968c43e, 0x3824342c, 0xc2a3405f,
0x161dc372, 0xbce2250c, 0x283c498b, 0xff0d9541,
0x39a80171, 0x080cb3de, 0xd8b4e49c, 0x6456c190,
0x7bcb8461, 0xd532b670, 0x486c5c74, 0xd0b85742,
]
Td1 = [
0x5051f4a7, 0x537e4165, 0xc31a17a4, 0x963a275e,
0xcb3bab6b, 0xf11f9d45, 0xabacfa58, 0x934be303,
0x552030fa, 0xf6ad766d, 0x9188cc76, 0x25f5024c,
0xfc4fe5d7, 0xd7c52acb, 0x80263544, 0x8fb562a3,
0x49deb15a, 0x6725ba1b, 0x9845ea0e, 0xe15dfec0,
0x02c32f75, 0x12814cf0, 0xa38d4697, 0xc66bd3f9,
0xe7038f5f, 0x9515929c, 0xebbf6d7a, 0xda955259,
0x2dd4be83, 0xd3587421, 0x2949e069, 0x448ec9c8,
0x6a75c289, 0x78f48e79, 0x6b99583e, 0xdd27b971,
0xb6bee14f, 0x17f088ad, 0x66c920ac, 0xb47dce3a,
0x1863df4a, 0x82e51a31, 0x60975133, 0x4562537f,
0xe0b16477, 0x84bb6bae, 0x1cfe81a0, 0x94f9082b,
0x58704868, 0x198f45fd, 0x8794de6c, 0xb7527bf8,
0x23ab73d3, 0xe2724b02, 0x57e31f8f, 0x2a6655ab,
0x07b2eb28, 0x032fb5c2, 0x9a86c57b, 0xa5d33708,
0xf2302887, 0xb223bfa5, 0xba02036a, 0x5ced1682,
0x2b8acf1c, 0x92a779b4, 0xf0f307f2, 0xa14e69e2,
0xcd65daf4, 0xd50605be, 0x1fd13462, 0x8ac4a6fe,
0x9d342e53, 0xa0a2f355, 0x32058ae1, 0x75a4f6eb,
0x390b83ec, 0xaa4060ef, 0x065e719f, 0x51bd6e10,
0xf93e218a, 0x3d96dd06, 0xaedd3e05, 0x464de6bd,
0xb591548d, 0x0571c45d, 0x6f0406d4, 0xff605015,
0x241998fb, 0x97d6bde9, 0xcc894043, 0x7767d99e,
0xbdb0e842, 0x8807898b, 0x38e7195b, 0xdb79c8ee,
0x47a17c0a, 0xe97c420f, 0xc9f8841e, 0x00000000,
0x83098086, 0x48322bed, 0xac1e1170, 0x4e6c5a72,
0xfbfd0eff, 0x560f8538, 0x1e3daed5, 0x27362d39,
0x640a0fd9, 0x21685ca6, 0xd19b5b54, 0x3a24362e,
0xb10c0a67, 0x0f9357e7, 0xd2b4ee96, 0x9e1b9b91,
0x4f80c0c5, 0xa261dc20, 0x695a774b, 0x161c121a,
0x0ae293ba, 0xe5c0a02a, 0x433c22e0, 0x1d121b17,
0x0b0e090d, 0xadf28bc7, 0xb92db6a8, 0xc8141ea9,
0x8557f119, 0x4caf7507, 0xbbee99dd, 0xfda37f60,
0x9ff70126, 0xbc5c72f5, 0xc544663b, 0x345bfb7e,
0x768b4329, 0xdccb23c6, 0x68b6edfc, 0x63b8e4f1,
0xcad731dc, 0x10426385, 0x40139722, 0x2084c611,
0x7d854a24, 0xf8d2bb3d, 0x11aef932, 0x6dc729a1,
0x4b1d9e2f, 0xf3dcb230, 0xec0d8652, 0xd077c1e3,
0x6c2bb316, 0x99a970b9, 0xfa119448, 0x2247e964,
0xc4a8fc8c, 0x1aa0f03f, 0xd8567d2c, 0xef223390,
0xc787494e, 0xc1d938d1, 0xfe8ccaa2, 0x3698d40b,
0xcfa6f581, 0x28a57ade, 0x26dab78e, 0xa43fadbf,
0xe42c3a9d, 0x0d507892, 0x9b6a5fcc, 0x62547e46,
0xc2f68d13, 0xe890d8b8, 0x5e2e39f7, 0xf582c3af,
0xbe9f5d80, 0x7c69d093, 0xa96fd52d, 0xb3cf2512,
0x3bc8ac99, 0xa710187d, 0x6ee89c63, 0x7bdb3bbb,
0x09cd2678, 0xf46e5918, 0x01ec9ab7, 0xa8834f9a,
0x65e6956e, 0x7eaaffe6, 0x0821bccf, 0xe6ef15e8,
0xd9bae79b, 0xce4a6f36, 0xd4ea9f09, 0xd629b07c,
0xaf31a4b2, 0x312a3f23, 0x30c6a594, 0xc035a266,
0x37744ebc, 0xa6fc82ca, 0xb0e090d0, 0x1533a7d8,
0x4af10498, 0xf741ecda, 0x0e7fcd50, 0x2f1791f6,
0x8d764dd6, 0x4d43efb0, 0x54ccaa4d, 0xdfe49604,
0xe39ed1b5, 0x1b4c6a88, 0xb8c12c1f, 0x7f466551,
0x049d5eea, 0x5d018c35, 0x73fa8774, 0x2efb0b41,
0x5ab3671d, 0x5292dbd2, 0x33e91056, 0x136dd647,
0x8c9ad761, 0x7a37a10c, 0x8e59f814, 0x89eb133c,
0xeecea927, 0x35b761c9, 0xede11ce5, 0x3c7a47b1,
0x599cd2df, 0x3f55f273, 0x791814ce, 0xbf73c737,
0xea53f7cd, 0x5b5ffdaa, 0x14df3d6f, 0x867844db,
0x81caaff3, 0x3eb968c4, 0x2c382434, 0x5fc2a340,
0x72161dc3, 0x0cbce225, 0x8b283c49, 0x41ff0d95,
0x7139a801, 0xde080cb3, 0x9cd8b4e4, 0x906456c1,
0x617bcb84, 0x70d532b6, 0x74486c5c, 0x42d0b857,
]
Td2 = [
0xa75051f4, 0x65537e41, 0xa4c31a17, 0x5e963a27,
0x6bcb3bab, 0x45f11f9d, 0x58abacfa, 0x03934be3,
0xfa552030, 0x6df6ad76, 0x769188cc, 0x4c25f502,
0xd7fc4fe5, 0xcbd7c52a, 0x44802635, 0xa38fb562,
0x5a49deb1, 0x1b6725ba, 0x0e9845ea, 0xc0e15dfe,
0x7502c32f, 0xf012814c, 0x97a38d46, 0xf9c66bd3,
0x5fe7038f, 0x9c951592, 0x7aebbf6d, 0x59da9552,
0x832dd4be, 0x21d35874, 0x692949e0, 0xc8448ec9,
0x896a75c2, 0x7978f48e, 0x3e6b9958, 0x71dd27b9,
0x4fb6bee1, 0xad17f088, 0xac66c920, 0x3ab47dce,
0x4a1863df, 0x3182e51a, 0x33609751, 0x7f456253,
0x77e0b164, 0xae84bb6b, 0xa01cfe81, 0x2b94f908,
0x68587048, 0xfd198f45, 0x6c8794de, 0xf8b7527b,
0xd323ab73, 0x02e2724b, 0x8f57e31f, 0xab2a6655,
0x2807b2eb, 0xc2032fb5, 0x7b9a86c5, 0x08a5d337,
0x87f23028, 0xa5b223bf, 0x6aba0203, 0x825ced16,
0x1c2b8acf, 0xb492a779, 0xf2f0f307, 0xe2a14e69,
0xf4cd65da, 0xbed50605, 0x621fd134, 0xfe8ac4a6,
0x539d342e, 0x55a0a2f3, 0xe132058a, 0xeb75a4f6,
0xec390b83, 0xefaa4060, 0x9f065e71, 0x1051bd6e,
0x8af93e21, 0x063d96dd, 0x05aedd3e, 0xbd464de6,
0x8db59154, 0x5d0571c4, 0xd46f0406, 0x15ff6050,
0xfb241998, 0xe997d6bd, 0x43cc8940, 0x9e7767d9,
0x42bdb0e8, 0x8b880789, 0x5b38e719, 0xeedb79c8,
0x0a47a17c, 0x0fe97c42, 0x1ec9f884, 0x00000000,
0x86830980, 0xed48322b, 0x70ac1e11, 0x724e6c5a,
0xfffbfd0e, 0x38560f85, 0xd51e3dae, 0x3927362d,
0xd9640a0f, 0xa621685c, 0x54d19b5b, 0x2e3a2436,
0x67b10c0a, 0xe70f9357, 0x96d2b4ee, 0x919e1b9b,
0xc54f80c0, 0x20a261dc, 0x4b695a77, 0x1a161c12,
0xba0ae293, 0x2ae5c0a0, 0xe0433c22, 0x171d121b,
0x0d0b0e09, 0xc7adf28b, 0xa8b92db6, 0xa9c8141e,
0x198557f1, 0x074caf75, 0xddbbee99, 0x60fda37f,
0x269ff701, 0xf5bc5c72, 0x3bc54466, 0x7e345bfb,
0x29768b43, 0xc6dccb23, 0xfc68b6ed, 0xf163b8e4,
0xdccad731, 0x85104263, 0x22401397, 0x112084c6,
0x247d854a, 0x3df8d2bb, 0x3211aef9, 0xa16dc729,
0x2f4b1d9e, 0x30f3dcb2, 0x52ec0d86, 0xe3d077c1,
0x166c2bb3, 0xb999a970, 0x48fa1194, 0x642247e9,
0x8cc4a8fc, 0x3f1aa0f0, 0x2cd8567d, 0x90ef2233,
0x4ec78749, 0xd1c1d938, 0xa2fe8cca, 0x0b3698d4,
0x81cfa6f5, 0xde28a57a, 0x8e26dab7, 0xbfa43fad,
0x9de42c3a, 0x920d5078, 0xcc9b6a5f, 0x4662547e,
0x13c2f68d, 0xb8e890d8, 0xf75e2e39, 0xaff582c3,
0x80be9f5d, 0x937c69d0, 0x2da96fd5, 0x12b3cf25,
0x993bc8ac, 0x7da71018, 0x636ee89c, 0xbb7bdb3b,
0x7809cd26, 0x18f46e59, 0xb701ec9a, 0x9aa8834f,
0x6e65e695, 0xe67eaaff, 0xcf0821bc, 0xe8e6ef15,
0x9bd9bae7, 0x36ce4a6f, 0x09d4ea9f, 0x7cd629b0,
0xb2af31a4, 0x23312a3f, 0x9430c6a5, 0x66c035a2,
0xbc37744e, 0xcaa6fc82, 0xd0b0e090, 0xd81533a7,
0x984af104, 0xdaf741ec, 0x500e7fcd, 0xf62f1791,
0xd68d764d, 0xb04d43ef, 0x4d54ccaa, 0x04dfe496,
0xb5e39ed1, 0x881b4c6a, 0x1fb8c12c, 0x517f4665,
0xea049d5e, 0x355d018c, 0x7473fa87, 0x412efb0b,
0x1d5ab367, 0xd25292db, 0x5633e910, 0x47136dd6,
0x618c9ad7, 0x0c7a37a1, 0x148e59f8, 0x3c89eb13,
0x27eecea9, 0xc935b761, 0xe5ede11c, 0xb13c7a47,
0xdf599cd2, 0x733f55f2, 0xce791814, 0x37bf73c7,
0xcdea53f7, 0xaa5b5ffd, 0x6f14df3d, 0xdb867844,
0xf381caaf, 0xc43eb968, 0x342c3824, 0x405fc2a3,
0xc372161d, 0x250cbce2, 0x498b283c, 0x9541ff0d,
0x017139a8, 0xb3de080c, 0xe49cd8b4, 0xc1906456,
0x84617bcb, 0xb670d532, 0x5c74486c, 0x5742d0b8,
]
Td3 = [
0xf4a75051, 0x4165537e, 0x17a4c31a, 0x275e963a,
0xab6bcb3b, 0x9d45f11f, 0xfa58abac, 0xe303934b,
0x30fa5520, 0x766df6ad, 0xcc769188, 0x024c25f5,
0xe5d7fc4f, 0x2acbd7c5, 0x35448026, 0x62a38fb5,
0xb15a49de, 0xba1b6725, 0xea0e9845, 0xfec0e15d,
0x2f7502c3, 0x4cf01281, 0x4697a38d, 0xd3f9c66b,
0x8f5fe703, 0x929c9515, 0x6d7aebbf, 0x5259da95,
0xbe832dd4, 0x7421d358, 0xe0692949, 0xc9c8448e,
0xc2896a75, 0x8e7978f4, 0x583e6b99, 0xb971dd27,
0xe14fb6be, 0x88ad17f0, 0x20ac66c9, 0xce3ab47d,
0xdf4a1863, 0x1a3182e5, 0x51336097, 0x537f4562,
0x6477e0b1, 0x6bae84bb, 0x81a01cfe, 0x082b94f9,
0x48685870, 0x45fd198f, 0xde6c8794, 0x7bf8b752,
0x73d323ab, 0x4b02e272, 0x1f8f57e3, 0x55ab2a66,
0xeb2807b2, 0xb5c2032f, 0xc57b9a86, 0x3708a5d3,
0x2887f230, 0xbfa5b223, 0x036aba02, 0x16825ced,
0xcf1c2b8a, 0x79b492a7, 0x07f2f0f3, 0x69e2a14e,
0xdaf4cd65, 0x05bed506, 0x34621fd1, 0xa6fe8ac4,
0x2e539d34, 0xf355a0a2, 0x8ae13205, 0xf6eb75a4,
0x83ec390b, 0x60efaa40, 0x719f065e, 0x6e1051bd,
0x218af93e, 0xdd063d96, 0x3e05aedd, 0xe6bd464d,
0x548db591, 0xc45d0571, 0x06d46f04, 0x5015ff60,
0x98fb2419, 0xbde997d6, 0x4043cc89, 0xd99e7767,
0xe842bdb0, 0x898b8807, 0x195b38e7, 0xc8eedb79,
0x7c0a47a1, 0x420fe97c, 0x841ec9f8, 0x00000000,
0x80868309, 0x2bed4832, 0x1170ac1e, 0x5a724e6c,
0x0efffbfd, 0x8538560f, 0xaed51e3d, 0x2d392736,
0x0fd9640a, 0x5ca62168, 0x5b54d19b, 0x362e3a24,
0x0a67b10c, 0x57e70f93, 0xee96d2b4, 0x9b919e1b,
0xc0c54f80, 0xdc20a261, 0x774b695a, 0x121a161c,
0x93ba0ae2, 0xa02ae5c0, 0x22e0433c, 0x1b171d12,
0x090d0b0e, 0x8bc7adf2, 0xb6a8b92d, 0x1ea9c814,
0xf1198557, 0x75074caf, 0x99ddbbee, 0x7f60fda3,
0x01269ff7, 0x72f5bc5c, 0x663bc544, 0xfb7e345b,
0x4329768b, 0x23c6dccb, 0xedfc68b6, 0xe4f163b8,
0x31dccad7, 0x63851042, 0x97224013, 0xc6112084,
0x4a247d85, 0xbb3df8d2, 0xf93211ae, 0x29a16dc7,
0x9e2f4b1d, 0xb230f3dc, 0x8652ec0d, 0xc1e3d077,
0xb3166c2b, 0x70b999a9, 0x9448fa11, 0xe9642247,
0xfc8cc4a8, 0xf03f1aa0, 0x7d2cd856, 0x3390ef22,
0x494ec787, 0x38d1c1d9, 0xcaa2fe8c, 0xd40b3698,
0xf581cfa6, 0x7ade28a5, 0xb78e26da, 0xadbfa43f,
0x3a9de42c, 0x78920d50, 0x5fcc9b6a, 0x7e466254,
0x8d13c2f6, 0xd8b8e890, 0x39f75e2e, 0xc3aff582,
0x5d80be9f, 0xd0937c69, 0xd52da96f, 0x2512b3cf,
0xac993bc8, 0x187da710, 0x9c636ee8, 0x3bbb7bdb,
0x267809cd, 0x5918f46e, 0x9ab701ec, 0x4f9aa883,
0x956e65e6, 0xffe67eaa, 0xbccf0821, 0x15e8e6ef,
0xe79bd9ba, 0x6f36ce4a, 0x9f09d4ea, 0xb07cd629,
0xa4b2af31, 0x3f23312a, 0xa59430c6, 0xa266c035,
0x4ebc3774, 0x82caa6fc, 0x90d0b0e0, 0xa7d81533,
0x04984af1, 0xecdaf741, 0xcd500e7f, 0x91f62f17,
0x4dd68d76, 0xefb04d43, 0xaa4d54cc, 0x9604dfe4,
0xd1b5e39e, 0x6a881b4c, 0x2c1fb8c1, 0x65517f46,
0x5eea049d, 0x8c355d01, 0x877473fa, 0x0b412efb,
0x671d5ab3, 0xdbd25292, 0x105633e9, 0xd647136d,
0xd7618c9a, 0xa10c7a37, 0xf8148e59, 0x133c89eb,
0xa927eece, 0x61c935b7, 0x1ce5ede1, 0x47b13c7a,
0xd2df599c, 0xf2733f55, 0x14ce7918, 0xc737bf73,
0xf7cdea53, 0xfdaa5b5f, 0x3d6f14df, 0x44db8678,
0xaff381ca, 0x68c43eb9, 0x24342c38, 0xa3405fc2,
0x1dc37216, 0xe2250cbc, 0x3c498b28, 0x0d9541ff,
0xa8017139, 0x0cb3de08, 0xb4e49cd8, 0x56c19064,
0xcb84617b, 0x32b670d5, 0x6c5c7448, 0xb85742d0,
]
Td4 = [
0x52525252, 0x09090909, 0x6a6a6a6a, 0xd5d5d5d5,
0x30303030, 0x36363636, 0xa5a5a5a5, 0x38383838,
0xbfbfbfbf, 0x40404040, 0xa3a3a3a3, 0x9e9e9e9e,
0x81818181, 0xf3f3f3f3, 0xd7d7d7d7, 0xfbfbfbfb,
0x7c7c7c7c, 0xe3e3e3e3, 0x39393939, 0x82828282,
0x9b9b9b9b, 0x2f2f2f2f, 0xffffffff, 0x87878787,
0x34343434, 0x8e8e8e8e, 0x43434343, 0x44444444,
0xc4c4c4c4, 0xdededede, 0xe9e9e9e9, 0xcbcbcbcb,
0x54545454, 0x7b7b7b7b, 0x94949494, 0x32323232,
0xa6a6a6a6, 0xc2c2c2c2, 0x23232323, 0x3d3d3d3d,
0xeeeeeeee, 0x4c4c4c4c, 0x95959595, 0x0b0b0b0b,
0x42424242, 0xfafafafa, 0xc3c3c3c3, 0x4e4e4e4e,
0x08080808, 0x2e2e2e2e, 0xa1a1a1a1, 0x66666666,
0x28282828, 0xd9d9d9d9, 0x24242424, 0xb2b2b2b2,
0x76767676, 0x5b5b5b5b, 0xa2a2a2a2, 0x49494949,
0x6d6d6d6d, 0x8b8b8b8b, 0xd1d1d1d1, 0x25252525,
0x72727272, 0xf8f8f8f8, 0xf6f6f6f6, 0x64646464,
0x86868686, 0x68686868, 0x98989898, 0x16161616,
0xd4d4d4d4, 0xa4a4a4a4, 0x5c5c5c5c, 0xcccccccc,
0x5d5d5d5d, 0x65656565, 0xb6b6b6b6, 0x92929292,
0x6c6c6c6c, 0x70707070, 0x48484848, 0x50505050,
0xfdfdfdfd, 0xedededed, 0xb9b9b9b9, 0xdadadada,
0x5e5e5e5e, 0x15151515, 0x46464646, 0x57575757,
0xa7a7a7a7, 0x8d8d8d8d, 0x9d9d9d9d, 0x84848484,
0x90909090, 0xd8d8d8d8, 0xabababab, 0x00000000,
0x8c8c8c8c, 0xbcbcbcbc, 0xd3d3d3d3, 0x0a0a0a0a,
0xf7f7f7f7, 0xe4e4e4e4, 0x58585858, 0x05050505,
0xb8b8b8b8, 0xb3b3b3b3, 0x45454545, 0x06060606,
0xd0d0d0d0, 0x2c2c2c2c, 0x1e1e1e1e, 0x8f8f8f8f,
0xcacacaca, 0x3f3f3f3f, 0x0f0f0f0f, 0x02020202,
0xc1c1c1c1, 0xafafafaf, 0xbdbdbdbd, 0x03030303,
0x01010101, 0x13131313, 0x8a8a8a8a, 0x6b6b6b6b,
0x3a3a3a3a, 0x91919191, 0x11111111, 0x41414141,
0x4f4f4f4f, 0x67676767, 0xdcdcdcdc, 0xeaeaeaea,
0x97979797, 0xf2f2f2f2, 0xcfcfcfcf, 0xcececece,
0xf0f0f0f0, 0xb4b4b4b4, 0xe6e6e6e6, 0x73737373,
0x96969696, 0xacacacac, 0x74747474, 0x22222222,
0xe7e7e7e7, 0xadadadad, 0x35353535, 0x85858585,
0xe2e2e2e2, 0xf9f9f9f9, 0x37373737, 0xe8e8e8e8,
0x1c1c1c1c, 0x75757575, 0xdfdfdfdf, 0x6e6e6e6e,
0x47474747, 0xf1f1f1f1, 0x1a1a1a1a, 0x71717171,
0x1d1d1d1d, 0x29292929, 0xc5c5c5c5, 0x89898989,
0x6f6f6f6f, 0xb7b7b7b7, 0x62626262, 0x0e0e0e0e,
0xaaaaaaaa, 0x18181818, 0xbebebebe, 0x1b1b1b1b,
0xfcfcfcfc, 0x56565656, 0x3e3e3e3e, 0x4b4b4b4b,
0xc6c6c6c6, 0xd2d2d2d2, 0x79797979, 0x20202020,
0x9a9a9a9a, 0xdbdbdbdb, 0xc0c0c0c0, 0xfefefefe,
0x78787878, 0xcdcdcdcd, 0x5a5a5a5a, 0xf4f4f4f4,
0x1f1f1f1f, 0xdddddddd, 0xa8a8a8a8, 0x33333333,
0x88888888, 0x07070707, 0xc7c7c7c7, 0x31313131,
0xb1b1b1b1, 0x12121212, 0x10101010, 0x59595959,
0x27272727, 0x80808080, 0xecececec, 0x5f5f5f5f,
0x60606060, 0x51515151, 0x7f7f7f7f, 0xa9a9a9a9,
0x19191919, 0xb5b5b5b5, 0x4a4a4a4a, 0x0d0d0d0d,
0x2d2d2d2d, 0xe5e5e5e5, 0x7a7a7a7a, 0x9f9f9f9f,
0x93939393, 0xc9c9c9c9, 0x9c9c9c9c, 0xefefefef,
0xa0a0a0a0, 0xe0e0e0e0, 0x3b3b3b3b, 0x4d4d4d4d,
0xaeaeaeae, 0x2a2a2a2a, 0xf5f5f5f5, 0xb0b0b0b0,
0xc8c8c8c8, 0xebebebeb, 0xbbbbbbbb, 0x3c3c3c3c,
0x83838383, 0x53535353, 0x99999999, 0x61616161,
0x17171717, 0x2b2b2b2b, 0x04040404, 0x7e7e7e7e,
0xbabababa, 0x77777777, 0xd6d6d6d6, 0x26262626,
0xe1e1e1e1, 0x69696969, 0x14141414, 0x63636363,
0x55555555, 0x21212121, 0x0c0c0c0c, 0x7d7d7d7d,
]
rcon = [
0x01000000, 0x02000000, 0x04000000, 0x08000000,
0x10000000, 0x20000000, 0x40000000, 0x80000000,
0x1B000000, 0x36000000,
# 128-bit blocks, Rijndael never uses more than 10 rcon values
]
if len(struct.pack('L',0)) == 4:
# 32bit
def GETU32(x): return struct.unpack('>L', x)[0]
def PUTU32(x): return struct.pack('>L', x)
else:
# 64bit
def GETU32(x): return struct.unpack('>I', x)[0]
def PUTU32(x): return struct.pack('>I', x)
# Expand the cipher key into the encryption key schedule.
#
# @return the number of rounds for the given cipher key size.
def rijndaelSetupEncrypt(key, keybits):
i = p = 0
rk = [0]*RKLENGTH(keybits)
rk[0] = GETU32(key[0:4])
rk[1] = GETU32(key[4:8])
rk[2] = GETU32(key[8:12])
rk[3] = GETU32(key[12:16])
if keybits == 128:
while 1:
temp = rk[p+3]
rk[p+4] = (rk[p+0] ^
(Te4[(temp >> 16) & 0xff] & 0xff000000) ^
(Te4[(temp >> 8) & 0xff] & 0x00ff0000) ^
(Te4[(temp ) & 0xff] & 0x0000ff00) ^
(Te4[(temp >> 24) ] & 0x000000ff) ^
rcon[i])
rk[p+5] = rk[p+1] ^ rk[p+4]
rk[p+6] = rk[p+2] ^ rk[p+5]
rk[p+7] = rk[p+3] ^ rk[p+6]
i += 1
if i == 10: return (rk, 10)
p += 4
rk[4] = GETU32(key[16:20])
rk[5] = GETU32(key[20:24])
if keybits == 192:
while 1:
temp = rk[p+5]
rk[p+6] = (rk[p+0] ^
(Te4[(temp >> 16) & 0xff] & 0xff000000) ^
(Te4[(temp >> 8) & 0xff] & 0x00ff0000) ^
(Te4[(temp ) & 0xff] & 0x0000ff00) ^
(Te4[(temp >> 24) ] & 0x000000ff) ^
rcon[i])
rk[p+7] = rk[p+1] ^ rk[p+6]
rk[p+8] = rk[p+2] ^ rk[p+7]
rk[p+9] = rk[p+3] ^ rk[p+8]
i += 1
if i == 8: return (rk, 12)
rk[p+10] = rk[p+4] ^ rk[p+9]
rk[p+11] = rk[p+5] ^ rk[p+10]
p += 6
rk[6] = GETU32(key[24:28])
rk[7] = GETU32(key[28:32])
if keybits == 256:
while 1:
temp = rk[p+7]
rk[p+8] = (rk[p+0] ^
(Te4[(temp >> 16) & 0xff] & 0xff000000) ^
(Te4[(temp >> 8) & 0xff] & 0x00ff0000) ^
(Te4[(temp ) & 0xff] & 0x0000ff00) ^
(Te4[(temp >> 24) ] & 0x000000ff) ^
rcon[i])
rk[p+9] = rk[p+1] ^ rk[p+8]
rk[p+10] = rk[p+2] ^ rk[p+9]
rk[p+11] = rk[p+3] ^ rk[p+10]
i += 1
if i == 7: return (rk, 14)
temp = rk[p+11]
rk[p+12] = (rk[p+4] ^
(Te4[(temp >> 24) ] & 0xff000000) ^
(Te4[(temp >> 16) & 0xff] & 0x00ff0000) ^
(Te4[(temp >> 8) & 0xff] & 0x0000ff00) ^
(Te4[(temp ) & 0xff] & 0x000000ff))
rk[p+13] = rk[p+5] ^ rk[p+12]
rk[p+14] = rk[p+6] ^ rk[p+13]
rk[p+15] = rk[p+7] ^ rk[p+14]
p += 8
raise ValueError(keybits)
# Expand the cipher key into the decryption key schedule.
#
# @return the number of rounds for the given cipher key size.
def rijndaelSetupDecrypt(key, keybits):
# expand the cipher key:
(rk, nrounds) = rijndaelSetupEncrypt(key, keybits)
# invert the order of the round keys:
i = 0
j = 4*nrounds
while i < j:
temp = rk[i ]; rk[i ] = rk[j ]; rk[j ] = temp
temp = rk[i + 1]; rk[i + 1] = rk[j + 1]; rk[j + 1] = temp
temp = rk[i + 2]; rk[i + 2] = rk[j + 2]; rk[j + 2] = temp
temp = rk[i + 3]; rk[i + 3] = rk[j + 3]; rk[j + 3] = temp
i += 4
j -= 4
# apply the inverse MixColumn transform to all round keys but the first and the last:
p = 0
for i in range(1, nrounds):
p += 4
rk[p+0] = (
Td0[Te4[(rk[p+0] >> 24) ] & 0xff] ^
Td1[Te4[(rk[p+0] >> 16) & 0xff] & 0xff] ^
Td2[Te4[(rk[p+0] >> 8) & 0xff] & 0xff] ^
Td3[Te4[(rk[p+0] ) & 0xff] & 0xff])
rk[p+1] = (
Td0[Te4[(rk[p+1] >> 24) ] & 0xff] ^
Td1[Te4[(rk[p+1] >> 16) & 0xff] & 0xff] ^
Td2[Te4[(rk[p+1] >> 8) & 0xff] & 0xff] ^
Td3[Te4[(rk[p+1] ) & 0xff] & 0xff])
rk[p+2] = (
Td0[Te4[(rk[p+2] >> 24) ] & 0xff] ^
Td1[Te4[(rk[p+2] >> 16) & 0xff] & 0xff] ^
Td2[Te4[(rk[p+2] >> 8) & 0xff] & 0xff] ^
Td3[Te4[(rk[p+2] ) & 0xff] & 0xff])
rk[p+3] = (
Td0[Te4[(rk[p+3] >> 24) ] & 0xff] ^
Td1[Te4[(rk[p+3] >> 16) & 0xff] & 0xff] ^
Td2[Te4[(rk[p+3] >> 8) & 0xff] & 0xff] ^
Td3[Te4[(rk[p+3] ) & 0xff] & 0xff])
return (rk, nrounds)
def rijndaelEncrypt(rk, nrounds, plaintext):
assert len(plaintext) == 16, str(len(plaintext))
# map byte array block to cipher state
# and add initial round key:
s0 = GETU32(plaintext[0:4]) ^ rk[0]
s1 = GETU32(plaintext[4:8]) ^ rk[1]
s2 = GETU32(plaintext[8:12]) ^ rk[2]
s3 = GETU32(plaintext[12:16]) ^ rk[3]
# nrounds - 1 full rounds:
r = nrounds >> 1
p = 0
while 1:
t0 = (
Te0[(s0 >> 24) ] ^
Te1[(s1 >> 16) & 0xff] ^
Te2[(s2 >> 8) & 0xff] ^
Te3[(s3 ) & 0xff] ^
rk[p+4])
t1 = (
Te0[(s1 >> 24) ] ^
Te1[(s2 >> 16) & 0xff] ^
Te2[(s3 >> 8) & 0xff] ^
Te3[(s0 ) & 0xff] ^
rk[p+5])
t2 = (
Te0[(s2 >> 24) ] ^
Te1[(s3 >> 16) & 0xff] ^
Te2[(s0 >> 8) & 0xff] ^
Te3[(s1 ) & 0xff] ^
rk[p+6])
t3 = (
Te0[(s3 >> 24) ] ^
Te1[(s0 >> 16) & 0xff] ^
Te2[(s1 >> 8) & 0xff] ^
Te3[(s2 ) & 0xff] ^
rk[p+7])
p += 8
r -= 1
if r == 0: break
s0 = (
Te0[(t0 >> 24) ] ^
Te1[(t1 >> 16) & 0xff] ^
Te2[(t2 >> 8) & 0xff] ^
Te3[(t3 ) & 0xff] ^
rk[p+0])
s1 = (
Te0[(t1 >> 24) ] ^
Te1[(t2 >> 16) & 0xff] ^
Te2[(t3 >> 8) & 0xff] ^
Te3[(t0 ) & 0xff] ^
rk[p+1])
s2 = (
Te0[(t2 >> 24) ] ^
Te1[(t3 >> 16) & 0xff] ^
Te2[(t0 >> 8) & 0xff] ^
Te3[(t1 ) & 0xff] ^
rk[p+2])
s3 = (
Te0[(t3 >> 24) ] ^
Te1[(t0 >> 16) & 0xff] ^
Te2[(t1 >> 8) & 0xff] ^
Te3[(t2 ) & 0xff] ^
rk[p+3])
ciphertext = b''
# apply last round and
# map cipher state to byte array block:
s0 = (
(Te4[(t0 >> 24) ] & 0xff000000) ^
(Te4[(t1 >> 16) & 0xff] & 0x00ff0000) ^
(Te4[(t2 >> 8) & 0xff] & 0x0000ff00) ^
(Te4[(t3 ) & 0xff] & 0x000000ff) ^
rk[p+0])
ciphertext += PUTU32(s0)
s1 = (
(Te4[(t1 >> 24) ] & 0xff000000) ^
(Te4[(t2 >> 16) & 0xff] & 0x00ff0000) ^
(Te4[(t3 >> 8) & 0xff] & 0x0000ff00) ^
(Te4[(t0 ) & 0xff] & 0x000000ff) ^
rk[p+1])
ciphertext += PUTU32(s1)
s2 = (
(Te4[(t2 >> 24) ] & 0xff000000) ^
(Te4[(t3 >> 16) & 0xff] & 0x00ff0000) ^
(Te4[(t0 >> 8) & 0xff] & 0x0000ff00) ^
(Te4[(t1 ) & 0xff] & 0x000000ff) ^
rk[p+2])
ciphertext += PUTU32(s2)
s3 = (
(Te4[(t3 >> 24) ] & 0xff000000) ^
(Te4[(t0 >> 16) & 0xff] & 0x00ff0000) ^
(Te4[(t1 >> 8) & 0xff] & 0x0000ff00) ^
(Te4[(t2 ) & 0xff] & 0x000000ff) ^
rk[p+3])
ciphertext += PUTU32(s3)
assert len(ciphertext) == 16, str(len(ciphertext))
return ciphertext
def rijndaelDecrypt(rk, nrounds, ciphertext):
assert len(ciphertext) == 16, str(len(ciphertext))
# map byte array block to cipher state
# and add initial round key:
s0 = GETU32(ciphertext[0:4]) ^ rk[0]
s1 = GETU32(ciphertext[4:8]) ^ rk[1]
s2 = GETU32(ciphertext[8:12]) ^ rk[2]
s3 = GETU32(ciphertext[12:16]) ^ rk[3]
# nrounds - 1 full rounds:
r = nrounds >> 1
p = 0
while 1:
t0 = (
Td0[(s0 >> 24) ] ^
Td1[(s3 >> 16) & 0xff] ^
Td2[(s2 >> 8) & 0xff] ^
Td3[(s1 ) & 0xff] ^
rk[p+4])
t1 = (
Td0[(s1 >> 24) ] ^
Td1[(s0 >> 16) & 0xff] ^
Td2[(s3 >> 8) & 0xff] ^
Td3[(s2 ) & 0xff] ^
rk[p+5])
t2 = (
Td0[(s2 >> 24) ] ^
Td1[(s1 >> 16) & 0xff] ^
Td2[(s0 >> 8) & 0xff] ^
Td3[(s3 ) & 0xff] ^
rk[p+6])
t3 = (
Td0[(s3 >> 24) ] ^
Td1[(s2 >> 16) & 0xff] ^
Td2[(s1 >> 8) & 0xff] ^
Td3[(s0 ) & 0xff] ^
rk[p+7])
p += 8
r -= 1
if r == 0: break
s0 = (
Td0[(t0 >> 24) ] ^
Td1[(t3 >> 16) & 0xff] ^
Td2[(t2 >> 8) & 0xff] ^
Td3[(t1 ) & 0xff] ^
rk[p+0])
s1 = (
Td0[(t1 >> 24) ] ^
Td1[(t0 >> 16) & 0xff] ^
Td2[(t3 >> 8) & 0xff] ^
Td3[(t2 ) & 0xff] ^
rk[p+1])
s2 = (
Td0[(t2 >> 24) ] ^
Td1[(t1 >> 16) & 0xff] ^
Td2[(t0 >> 8) & 0xff] ^
Td3[(t3 ) & 0xff] ^
rk[p+2])
s3 = (
Td0[(t3 >> 24) ] ^
Td1[(t2 >> 16) & 0xff] ^
Td2[(t1 >> 8) & 0xff] ^
Td3[(t0 ) & 0xff] ^
rk[p+3])
plaintext = b''
# apply last round and
# map cipher state to byte array block:
s0 = (
(Td4[(t0 >> 24) ] & 0xff000000) ^
(Td4[(t3 >> 16) & 0xff] & 0x00ff0000) ^
(Td4[(t2 >> 8) & 0xff] & 0x0000ff00) ^
(Td4[(t1 ) & 0xff] & 0x000000ff) ^
rk[p+0])
plaintext += PUTU32(s0)
s1 = (
(Td4[(t1 >> 24) ] & 0xff000000) ^
(Td4[(t0 >> 16) & 0xff] & 0x00ff0000) ^
(Td4[(t3 >> 8) & 0xff] & 0x0000ff00) ^
(Td4[(t2 ) & 0xff] & 0x000000ff) ^
rk[p+1])
plaintext += PUTU32(s1)
s2 = (
(Td4[(t2 >> 24) ] & 0xff000000) ^
(Td4[(t1 >> 16) & 0xff] & 0x00ff0000) ^
(Td4[(t0 >> 8) & 0xff] & 0x0000ff00) ^
(Td4[(t3 ) & 0xff] & 0x000000ff) ^
rk[p+2])
plaintext += PUTU32(s2)
s3 = (
(Td4[(t3 >> 24) ] & 0xff000000) ^
(Td4[(t2 >> 16) & 0xff] & 0x00ff0000) ^
(Td4[(t1 >> 8) & 0xff] & 0x0000ff00) ^
(Td4[(t0 ) & 0xff] & 0x000000ff) ^
rk[p+3])
plaintext += PUTU32(s3)
assert len(plaintext) == 16, str(len(plaintext))
return plaintext
# decrypt(key, fin, fout, keybits=256)
class RijndaelDecryptor(object):
"""
>>> key = b'00010203050607080a0b0c0d0f101112'.decode('hex')
>>> ciphertext = b'd8f532538289ef7d06b506a4fd5be9c9'.decode('hex')
>>> RijndaelDecryptor(key, 128).decrypt(ciphertext).encode('hex')
'506812a45f08c889b97f5980038b8359'
"""
def __init__(self, key, keybits=256):
assert len(key) == KEYLENGTH(keybits), str((len(key), KEYLENGTH(keybits)))
(self.rk, self.nrounds) = rijndaelSetupDecrypt(key, keybits)
assert len(self.rk) == RKLENGTH(keybits), str((len(self.rk), RKLENGTH(keybits)))
assert self.nrounds == NROUNDS(keybits), str((self.nrounds, NROUNDS(keybits)))
return
def decrypt(self, ciphertext):
assert len(ciphertext) == 16, str(len(ciphertext))
return rijndaelDecrypt(self.rk, self.nrounds, ciphertext)
# encrypt(key, fin, fout, keybits=256)
class RijndaelEncryptor(object):
def __init__(self, key, keybits=256):
assert len(key) == KEYLENGTH(keybits), str((len(key), KEYLENGTH(keybits)))
(self.rk, self.nrounds) = rijndaelSetupEncrypt(key, keybits)
assert len(self.rk) == RKLENGTH(keybits), str((len(self.rk), RKLENGTH(keybits)))
assert self.nrounds == NROUNDS(keybits), str((self.nrounds, NROUNDS(keybits)))
return
def encrypt(self, plaintext):
assert len(plaintext) == 16, str(len(plaintext))
return rijndaelEncrypt(self.rk, self.nrounds, plaintext)
|
|
#
# Copyright 2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from scapy.fields import ByteField, PacketField, IntField
from scapy.fields import ShortField, ConditionalField
from scapy.packet import Packet
from voltha.extensions.omci.omci_fields import FixedLenField
from voltha.extensions.omci.omci_messages import OmciCreate, OmciDelete, \
OmciDeleteResponse, OmciSet, OmciSetResponse, OmciGet, OmciGetResponse, \
OmciGetAllAlarms, OmciGetAllAlarmsResponse, OmciGetAllAlarmsNext, \
OmciMibResetResponse, OmciMibReset, OmciMibUploadNextResponse, \
OmciMibUploadNext, OmciMibUploadResponse, OmciMibUpload, \
OmciGetAllAlarmsNextResponse, OmciAttributeValueChange, \
OmciTestResult, OmciAlarmNotification, \
OmciReboot, OmciRebootResponse, OmciGetNext, OmciGetNextResponse, \
OmciSynchronizeTime, OmciSynchronizeTimeResponse, OmciGetCurrentData, \
OmciGetCurrentDataResponse, OmciStartSoftwareDownload, OmciStartSoftwareDownloadResponse, \
OmciDownloadSection, OmciDownloadSectionLast, OmciDownloadSectionResponse, \
OmciEndSoftwareDownload, OmciEndSoftwareDownloadResponse, \
OmciActivateImage, OmciActivateImageResponse, \
OmciCommitImage, OmciCommitImageResponse, OmciTest, OmciTestResponse
from voltha.extensions.omci.omci_messages import OmciCreateResponse
class OmciFrame(Packet):
name = "OmciFrame"
fields_desc = [
ShortField("transaction_id", 0),
ByteField("message_type", None),
ByteField("omci", 0x0a),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciCreate), align=36),
lambda pkt: pkt.message_type == OmciCreate.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciCreateResponse), align=36),
lambda pkt: pkt.message_type == OmciCreateResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciDelete), align=36),
lambda pkt: pkt.message_type == OmciDelete.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciDeleteResponse), align=36),
lambda pkt: pkt.message_type == OmciDeleteResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciSet), align=36),
lambda pkt: pkt.message_type == OmciSet.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciSetResponse), align=36),
lambda pkt: pkt.message_type == OmciSetResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciGet), align=36),
lambda pkt: pkt.message_type == OmciGet.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciGetResponse), align=36),
lambda pkt: pkt.message_type == OmciGetResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciGetAllAlarms), align=36),
lambda pkt: pkt.message_type == OmciGetAllAlarms.message_id),
ConditionalField(FixedLenField(
PacketField(
"omci_message", None, OmciGetAllAlarmsResponse), align=36),
lambda pkt:
pkt.message_type == OmciGetAllAlarmsResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciGetAllAlarmsNext), align=36),
lambda pkt: pkt.message_type == OmciGetAllAlarmsNext.message_id),
ConditionalField(FixedLenField(
PacketField(
"omci_message", None, OmciGetAllAlarmsNextResponse), align=36),
lambda pkt:
pkt.message_type == OmciGetAllAlarmsNextResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciMibUpload), align=36),
lambda pkt: pkt.message_type == OmciMibUpload.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciMibUploadResponse), align=36),
lambda pkt: pkt.message_type == OmciMibUploadResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciMibUploadNext), align=36),
lambda pkt:
pkt.message_type == OmciMibUploadNext.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciMibUploadNextResponse), align=36),
lambda pkt: pkt.message_type == OmciMibUploadNextResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciMibReset), align=36),
lambda pkt: pkt.message_type == OmciMibReset.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciMibResetResponse), align=36),
lambda pkt: pkt.message_type == OmciMibResetResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciAlarmNotification), align=36),
lambda pkt: pkt.message_type == OmciAlarmNotification.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciAttributeValueChange), align=36),
lambda pkt: pkt.message_type == OmciAttributeValueChange.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciReboot), align=36),
lambda pkt: pkt.message_type == OmciReboot.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciRebootResponse), align=36),
lambda pkt: pkt.message_type == OmciRebootResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciGetNext), align=36),
lambda pkt: pkt.message_type == OmciGetNext.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciGetNextResponse), align=36),
lambda pkt: pkt.message_type == OmciGetNextResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciSynchronizeTime), align=36),
lambda pkt: pkt.message_type == OmciSynchronizeTime.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciSynchronizeTimeResponse), align=36),
lambda pkt: pkt.message_type == OmciSynchronizeTimeResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciGetCurrentData), align=36),
lambda pkt: pkt.message_type == OmciGetCurrentData.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciGetCurrentDataResponse), align=36),
lambda pkt: pkt.message_type == OmciGetCurrentDataResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciStartSoftwareDownload), align=36),
lambda pkt: pkt.message_type == OmciStartSoftwareDownload.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciStartSoftwareDownloadResponse), align=36),
lambda pkt: pkt.message_type == OmciStartSoftwareDownloadResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciDownloadSection), align=36),
lambda pkt: pkt.message_type == OmciDownloadSection.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciDownloadSectionLast), align=36),
lambda pkt: pkt.message_type == OmciDownloadSectionLast.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciDownloadSectionResponse), align=36),
lambda pkt: pkt.message_type == OmciDownloadSectionResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciEndSoftwareDownload), align=36),
lambda pkt: pkt.message_type == OmciEndSoftwareDownload.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciEndSoftwareDownloadResponse), align=36),
lambda pkt: pkt.message_type == OmciEndSoftwareDownloadResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciActivateImage), align=36),
lambda pkt: pkt.message_type == OmciActivateImage.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciActivateImageResponse), align=36),
lambda pkt: pkt.message_type == OmciActivateImageResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciCommitImage), align=36),
lambda pkt: pkt.message_type == OmciCommitImage.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciCommitImageResponse), align=36),
lambda pkt: pkt.message_type == OmciCommitImageResponse.message_id),
# Create Frame for Omci Test.
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciTest), align=36),
lambda pkt: pkt.message_type == OmciTest.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciTestResponse), align=36),
lambda pkt: pkt.message_type == OmciTestResponse.message_id),
ConditionalField(FixedLenField(
PacketField("omci_message", None, OmciTestResult), align=36),
lambda pkt: pkt.message_type == OmciTestResult.message_id),
# TODO add entries for remaining OMCI message types
IntField("omci_trailer", 0x00000028)
]
# We needed to patch the do_dissect(...) method of Packet, because
# it wiped out already dissected conditional fields with None if they
# referred to the same field name. We marked the only new line of code
# with "Extra condition added".
def do_dissect(self, s):
raw = s
self.raw_packet_cache_fields = {}
for f in self.fields_desc:
if not s:
break
s, fval = f.getfield(self, s)
# We need to track fields with mutable values to discard
# .raw_packet_cache when needed.
if f.islist or f.holds_packets:
self.raw_packet_cache_fields[f.name] = f.do_copy(fval)
# Extra condition added
if fval is not None or f.name not in self.fields:
self.fields[f.name] = fval
assert(raw.endswith(s))
self.raw_packet_cache = raw[:-len(s)] if s else raw
self.explicit = 1
return s
|
|
#
# Overlays.py -- Overlays plugin for Ginga FITS viewer
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
import numpy
from ginga import GingaPlugin, RGBImage, colors
from ginga.misc import Widgets, CanvasTypes
class Overlays(GingaPlugin.LocalPlugin):
def __init__(self, fv, fitsimage):
# superclass defines some variables for us, like logger
super(Overlays, self).__init__(fv, fitsimage)
self.layertag = 'overlays-canvas'
self.dc = fv.getDrawClasses()
canvas = self.dc.DrawingCanvas()
canvas.enable_draw(False)
canvas.setSurface(self.fitsimage)
self.canvas = canvas
self.colornames = colors.get_colors()
# TODO: there is some problem with basic "red", at least on Linux
#self.hi_color = 'red'
self.hi_color = 'palevioletred'
self.hi_value = None
self.lo_color = 'blue'
self.lo_value = None
self.opacity = 0.5
self.arrsize = None
self.rgbarr = numpy.zeros((1, 1, 4), dtype=numpy.uint8)
self.rgbobj = RGBImage.RGBImage(self.rgbarr, logger=self.logger)
self.canvas_img = None
def build_gui(self, container):
top = Widgets.VBox()
top.set_border_width(4)
vbox, sw, orientation = Widgets.get_oriented_box(container)
vbox.set_border_width(4)
vbox.set_spacing(2)
self.msgFont = self.fv.getFont("sansFont", 12)
tw = Widgets.TextArea(wrap=True, editable=False)
tw.set_font(self.msgFont)
self.tw = tw
fr = Widgets.Frame("Instructions")
vbox2 = Widgets.VBox()
vbox2.add_widget(tw)
vbox2.add_widget(Widgets.Label(''), stretch=1)
fr.set_widget(vbox2)
vbox.add_widget(fr, stretch=0)
fr = Widgets.Frame("Limits")
captions = (('Opacity:', 'label', 'Opacity', 'spinfloat'),
('Hi color:', 'label', 'Hi color', 'combobox'),
('Hi limit:', 'label', 'Hi value', 'entry'),
('Lo color:', 'label', 'Lo color', 'combobox'),
('Lo limit:', 'label', 'Lo value', 'entry'),
('Redo', 'button'))
w, b = Widgets.build_info(captions, orientation=orientation)
self.w.update(b)
b.opacity.set_decimals(2)
b.opacity.set_limits(0.0, 1.0, incr_value=0.1)
b.opacity.set_value(self.opacity)
b.opacity.add_callback('value-changed', lambda *args: self.redo())
combobox = b.hi_color
for name in self.colornames:
combobox.append_text(name)
index = self.colornames.index(self.hi_color)
combobox.set_index(index)
combobox.add_callback('activated', lambda *args: self.redo())
b.hi_value.set_length(22)
if self.hi_value is not None:
b.hi_value.set_text(str(self.hi_value))
b.hi_value.add_callback('activated', lambda *args: self.redo())
combobox = b.lo_color
for name in self.colornames:
combobox.append_text(name)
index = self.colornames.index(self.lo_color)
combobox.set_index(index)
combobox.add_callback('activated', lambda *args: self.redo())
b.lo_value.set_length(22)
if self.lo_value is not None:
b.lo_value.set_text(str(self.lo_value))
b.lo_value.add_callback('activated', lambda *args: self.redo())
b.redo.add_callback('activated', lambda *args: self.redo())
fr.set_widget(w)
vbox.add_widget(fr, stretch=0)
spacer = Widgets.Label('')
vbox.add_widget(spacer, stretch=1)
top.add_widget(sw, stretch=1)
btns = Widgets.HBox()
btns.set_spacing(3)
btn = Widgets.Button("Close")
btn.add_callback('activated', lambda w: self.close())
btns.add_widget(btn, stretch=0)
btns.add_widget(Widgets.Label(''), stretch=1)
top.add_widget(btns, stretch=0)
container.add_widget(top, stretch=1)
def close(self):
chname = self.fv.get_channelName(self.fitsimage)
self.fv.stop_local_plugin(chname, str(self))
return True
def instructions(self):
self.tw.set_text("""Enter a limit for saturation.""")
def start(self):
self.instructions()
# start ruler drawing operation
try:
obj = self.fitsimage.getObjectByTag(self.layertag)
except KeyError:
# Add ruler layer
self.fitsimage.add(self.canvas, tag=self.layertag)
self.resume()
if self.hi_value is not None:
self.redo()
def pause(self):
self.canvas.ui_setActive(False)
def resume(self):
#self.canvas.ui_setActive(True)
self.fv.showStatus("Enter a value for saturation limit")
def stop(self):
# remove the canvas from the image
try:
self.fitsimage.deleteObjectByTag(self.layertag)
except:
pass
#self.canvas.ui_setActive(False)
self.fv.showStatus("")
def redo(self):
hi_value_s = self.w.hi_value.get_text().strip()
if len(hi_value_s) > 0:
self.hi_value = float(hi_value_s)
else:
self.hi_value = None
lo_value_s = self.w.lo_value.get_text().strip()
if len(lo_value_s) > 0:
self.lo_value = float(lo_value_s)
else:
self.lo_value = None
self.logger.debug("set lo=%s hi=%s" % (self.lo_value, self.hi_value))
self.opacity = self.w.opacity.get_value()
self.logger.debug("set alpha to %f" % (self.opacity))
# look up the colors
self.hi_color = self.colornames[self.w.hi_color.get_index()]
try:
rh, gh, bh = colors.lookup_color(self.hi_color)
except KeyError:
self.fv.show_error("No such color found: '%s'" % (self.hi_color))
self.lo_color = self.colornames[self.w.lo_color.get_index()]
try:
rl, gl, bl = colors.lookup_color(self.lo_color)
except KeyError:
self.fv.show_error("No such color found: '%s'" % (self.lo_color))
image = self.fitsimage.get_image()
if image is None:
return
self.logger.debug("preparing RGB image")
wd, ht = image.get_size()
if (wd, ht) != self.arrsize:
rgbarr = numpy.zeros((ht, wd, 4), dtype=numpy.uint8)
self.arrsize = (wd, ht)
self.rgbobj.set_data(rgbarr)
else:
rgbarr = self.rgbobj.get_data()
# Set array to the desired saturation color
rc = self.rgbobj.get_slice('R')
gc = self.rgbobj.get_slice('G')
bc = self.rgbobj.get_slice('B')
ac = self.rgbobj.get_slice('A')
self.logger.debug("Calculating alpha channel")
# set alpha channel according to saturation limit
try:
data = image.get_data()
ac[:] = 0
if self.hi_value is not None:
idx = data >= self.hi_value
rc[idx] = int(rh * 255)
gc[idx] = int(gh * 255)
bc[idx] = int(bh * 255)
ac[idx] = int(self.opacity * 255)
if self.lo_value is not None:
idx = data <= self.lo_value
rc[idx] = int(rl * 255)
gc[idx] = int(gl * 255)
bc[idx] = int(bl * 255)
ac[idx] = int(self.opacity * 255)
except Exception as e:
self.logger.error("Error setting alpha channel: %s" % (str(e)))
if self.canvas_img is None:
self.logger.debug("Adding image to canvas")
self.canvas_img = CanvasTypes.Image(0, 0, self.rgbobj)
self.canvas.add(self.canvas_img, redraw=False)
else:
self.logger.debug("Updating canvas image")
self.canvas_img.set_image(self.rgbobj)
self.logger.debug("redrawing canvas")
self.fitsimage.redraw(whence=2)
self.logger.debug("redo completed")
def clear(self, canvas, button, data_x, data_y):
self.canvas.deleteAllObjects()
return False
def __str__(self):
return 'overlays'
#END
|
|
import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait # available since 2.4.0
from selenium.webdriver.support import expected_conditions as EC # available since 2.26.0
from selenium.common.exceptions import TimeoutException, NoSuchElementException, ElementNotVisibleException
import guardian_time,time,re, articleUtil, imageUtil
#TOP_COMMENT_NUM_XPATH = '//div[contains(concat(' ',normalize-space(@class),' '), "d-comment__inner d-comment__inner--top-level")]/div[2]/div[@data-recommend-count]/span[1]/span[1]'
TOP_COMMENT_NUM_XPATH = '//div[contains(concat(' ',normalize-space(@class),' '), "d-comment__inner d-comment__inner--top-level")]/div[2]/div[1]/span[1]/span[1]'
#TOP_COMMENT_NUM_XPATH = '//div[contains(concat(' ',normalize-space(@class),' '), "d-comment__inner d-comment__inner--top-level")]/div/div[@data-recommend-count]/span/span[contains(concat(' ',normalize-space(@class),' '), "d-comment__recommend-count--old")]'
TOP_COMMENT_CONTAINER_CSS = '.d-comment__inner.d-comment__inner--top-level .d-comment__content'
TOP_COMMENT_CSS = '.d-comment__main .d-comment__body>p'
TOP_COMMENT_NUM_CSS = '.d-comment__recommend-count--old'
WORDS_LIMIT = 140
#TOP_COMMENT_XPATH = '//div[contains(concat(' ',normalize-space(@class),' '), "d-comment__inner d-comment__inner--top-level")]/div/div[@data-recommend-count]/span/span'
def findImage(self, articleTitle):
#video[0] get attribute('poster')
imageURL = ""
try:
elememt = WebDriverWait(self.driver, 10).until(EC.presence_of_element_located((By.CSS_SELECTOR,"video")))
imageURL = elememt.get_attribute("poster")
except Exception as e:
pass
if len(imageURL) == 0:
try:
elememt = WebDriverWait(self.driver, 10).until(EC.presence_of_element_located((By.CSS_SELECTOR,"img.maxed.responsive-img")))
imageURL = elememt.get_attribute("src")
except Exception as e:
return False
if len(imageURL) == 0:
return False
print("about to call getImageAndSave")
isSuccess = imageUtil.getImageAndSave(imageURL, articleTitle)
print("return from getImageAndSave")
return isSuccess
##http://www.theguardian.com/film/2015/jan/31/fifty-shades-of-grey-film-sex-sam-taylor-johnson#comments
##.responsive-img
##http://i.guim.co.uk/static/w-620/h--/q-95/sys-images/Guardian/Pix/pictures/2015/2/1/1422790921359/Protesters-carrying-yello-008.jpg
##http://i.guim.co.uk/static/w-460/h--/q-95/sys-images/Guardian/Pix/pictures/2015/2/1/1422790917867/Protesters-carrying-yello-005.jpg
##http://i.guim.co.uk/static/w-460/h--/q-95/sys-images/Guardian/Pix/pictures/2015/1/23/1422034733796/blackpool-011.jpg
##http://i.guim.co.uk/static/w-620/h--/q-95/sys-images/Guardian/Pix/pictures/2015/1/23/1422034735071/blackpool-012.jpg
##http://i.guim.co.uk/static/w-620/h--/q-95/sys-images/Guardian/Pix/pictures/2015/2/1/1422789420200/The-new-Greek-finance-min-008.jpg
##http://i.guim.co.uk/static/w-460/h--/q-95/sys-images/Guardian/Pix/pictures/2015/2/1/1422789416699/The-new-Greek-finance-min-005.jpg
def findComment(self, currentTopCommentNum):
#XPATH //div/@data-recommend-count
# //div[@data-recommend-count]
#/div[@data-recommend-count]
#.d-comment__inner.d-comment__inner--top-level .d-comment__content .d-comment__main .d-comment__body>p
#select one class by XPATH
#//div[contains(concat(" ", normalize-space(@class), " "), " d-comment__main ")]
#CSS_PATH for selecting top level comments .d-comment__inner.d-comment__inner--top-level .d-comment__content
#//div[contains(concat(' ',normalize-space(@class),' '), "ok yes")]
#//div[contains(concat(' ',normalize-space(@class),' '), "d-comment__inner d-comment__inner--top-level")]/
oldNum = currentTopCommentNum
print "find Comment 0: currentTopCommentNum: %s" % currentTopCommentNum
currentIndex = 0
print "find Comment 1"
topCommentContainers = WebDriverWait(self.driver, 10).until(EC.presence_of_all_elements_located((By.CSS_SELECTOR,TOP_COMMENT_CONTAINER_CSS)))
#topCommentNumElms = self.driver.find_elements_by_xpath(TOP_COMMENT_NUM_XPATH)
print "find Comment 2"
#topCommentNumElms = element.find_elements_by_xpath(TOP_COMMENT_NUM_XPATH)
_commentText = ""
for index, container in enumerate(topCommentContainers[:]):
print "find Comment 3"
try:
if not container: continue
elm = container.find_element_by_css_selector(TOP_COMMENT_NUM_CSS)
if not elm: continue
_text = elm.text
if not _text or len(_text) == 0: continue
num = int(_text.strip())
print "find Comment 4 index"
if num > currentTopCommentNum:
print "find Comment 5: new %s old %s oldIndex: %s newIndex:%s" % (num, currentTopCommentNum, currentIndex, index)
currentTopCommentNum = num
currentIndex = index
textElm = container.find_element_by_css_selector(TOP_COMMENT_CSS)
if not textElm: continue
_commentText = textElm.text.strip()
except Exception as e:
print "Error in converting currentTopCommentNum %s" % e
if _commentText and len(_commentText) > 1 and currentTopCommentNum > oldNum:
return [_commentText, currentTopCommentNum]
else:
return
##Todo: use XPATH to reduce to one seek only and obtain the number of the row from the css index above
# comElm = self.driver.find_elements_by_css_selector(TOP_COMMENT_CSS)[currentIndex]
# if not comElm: return
def findTopCommentAndTopNumber(self, page, isFirstPage,WAIT_SECONDS):
print "pre 1"
URL = "%s#comments" % page.url
self.driver.get(URL)
#if isFirstPage == False:
print("time wait {}".format(WAIT_SECONDS * 5))
if isFirstPage == False:
time.sleep(WAIT_SECONDS * 2)
else:
time.sleep(WAIT_SECONDS * 5)
counter = 0
comm = None
try:
print "pre 2"
timeElm = WebDriverWait(self.driver, 20).until(EC.presence_of_element_located((By.XPATH,"//time")))
print "pre 2.1"
if not timeElm:
print "pre 2.2"
return page
timeText = timeElm.get_attribute("datetime")
print "pre 3"
page.age = guardian_time.guardianTimeToTimeStamp(timeText)
# numComments = WebDriverWait(self.driver, 20).until(EC.presence_of_element_located((By.CSS_SELECTOR,'.commentcount2__value'))).text.strip()
# numStr = re.sub(r'\D',"",numComments)
# print "numComment %s" % numStr
# page.numComments = int(numStr)
except Exception as e:
print "findTopCommentAndTopNumber: %s" % e
return page
"""BROWSER UI: LOAD COMMENTS"""
if isFirstPage:
try:
print "pre 4"
WebDriverWait(self.driver, 10).until(EC.element_to_be_clickable((By.XPATH,"//button[@aria-controls='comments-order-popup']"))).click()
print "pre 5"
WebDriverWait(self.driver, 10).until(EC.element_to_be_clickable((By.XPATH, "//button[@data-link-name='comments-oldest']"))).click()
print "AFTER CLICK OLDEST"
except Exception as e:
print "Unexpected viewMore %s" % e
time.sleep(WAIT_SECONDS+5)
# try:
# WebDriverWait(self.driver, 5).until(EC.staleness_of(comm))
# print "d-comment__body 2"
# except TimeoutException:
# print "TimeoutException .d-comment__body";
"""Old page links should be gone"""
pageLinks = None
commentAndCommentNum = ["", 10]
def findCommentProcedure(_self,_currentTopCommentNum, _currentTopComment, _commentAndCommentNum):
originalCommentAndNum = _commentAndCommentNum[:]
newCommentAndCommentNum = findComment(_self, originalCommentAndNum[1])
if newCommentAndCommentNum and isinstance(newCommentAndCommentNum, list) and len (newCommentAndCommentNum) == 2:
num = newCommentAndCommentNum[1]
if num > originalCommentAndNum[1]:
print "num: %s" % num
return newCommentAndCommentNum
else:
return originalCommentAndNum
else:
return originalCommentAndNum
try:
print "currentTopComment 0"
pageLinks = WebDriverWait(self.driver, 10).until(EC.presence_of_all_elements_located((By.CSS_SELECTOR,'.pagination__list>a')))[0:2]
print "currentTopComment 1"
currentTopComment = ""
currentTopCommentNum = 10
for index, link in enumerate(pageLinks[:]):
print "currentTopComment 2"
if not link: continue
pageLinkURL = link.get_attribute('href')
if 'new' in pageLinkURL and not 'newest' in pageLinkURL:
print "ERROR URL: shouldn't contain new %s" % pageLinkURL
continue
if index > 0:
link.click()
time.sleep(WAIT_SECONDS)
print "currentTopComment 3"
commentAndCommentNum = findCommentProcedure(self, currentTopCommentNum, currentTopComment, commentAndCommentNum)[:]
# commentAndCommentNum = findComment(self, currentTopCommentNum)
# if commentAndCommentNum and isinstance(commentAndCommentNum, list) and len (commentAndCommentNum) == 2:
# num = commentAndCommentNum[1]
# if num > currentTopCommentNum:
# print "num: %s" % num
# currentTopCommentNum = commentAndCommentNum[1]
# currentTopComment = commentAndCommentNum[0]
if not pageLinks or len(pageLinks) == 0:
commentAndCommentNum = findCommentProcedure(self, currentTopCommentNum, currentTopComment, commentAndCommentNum)[:]
page.topComment = commentAndCommentNum[0]
page.topCommentNum = commentAndCommentNum[1]
page.topComment = articleUtil.truncatedStringForRow(page.topComment)
##if len(page.topComment) > (WORDS_LIMIT -2):
## page.topComment = "%s..." % page.topComment.strip()[0:WORDS_LIMIT]
print "currentTopCommentText %s" % page.topComment
print "currentTopCommentNum %s" % page.topCommentNum
except Exception as e:
print "Unexpected .button.button--small.button--tertiary.pagination__action.js-discussion-change-page %s" % e
return page
|
|
import numpy as np
import pandas as pd
from nose.tools import assert_almost_equal, assert_equal, eq_, ok_, raises
from numpy.testing import assert_array_equal
from pandas.testing import assert_frame_equal
from rsmtool.preprocessor import FeaturePreprocessor, FeatureSpecsProcessor, FeatureSubsetProcessor
class TestFeaturePreprocessor:
def setUp(self):
self.fpp = FeaturePreprocessor()
def test_select_candidates_with_N_or_more_items(self):
data = pd.DataFrame({'candidate': ['a'] * 3 + ['b'] * 2 + ['c'],
'sc1': [2, 3, 1, 5, 6, 1]})
df_included_expected = pd.DataFrame({'candidate': ['a'] * 3 + ['b'] * 2,
'sc1': [2, 3, 1, 5, 6]})
df_excluded_expected = pd.DataFrame({'candidate': ['c'],
'sc1': [1]})
(df_included,
df_excluded) = self.fpp.select_candidates(data, 2)
assert_frame_equal(df_included, df_included_expected)
assert_frame_equal(df_excluded, df_excluded_expected)
def test_select_candidates_with_N_or_more_items_all_included(self):
data = pd.DataFrame({'candidate': ['a'] * 2 + ['b'] * 2 + ['c'] * 2,
'sc1': [2, 3, 1, 5, 6, 1]})
(df_included,
df_excluded) = self.fpp.select_candidates(data, 2)
assert_frame_equal(df_included, data)
assert_equal(len(df_excluded), 0)
def test_select_candidates_with_N_or_more_items_all_excluded(self):
data = pd.DataFrame({'candidate': ['a'] * 3 + ['b'] * 2 + ['c'],
'sc1': [2, 3, 1, 5, 6, 1]})
(df_included,
df_excluded) = self.fpp.select_candidates(data, 4)
assert_frame_equal(df_excluded, data)
assert_equal(len(df_included), 0)
def test_select_candidates_with_N_or_more_items_custom_name(self):
data = pd.DataFrame({'ID': ['a'] * 3 + ['b'] * 2 + ['c'],
'sc1': [2, 3, 1, 5, 6, 1]})
df_included_expected = pd.DataFrame({'ID': ['a'] * 3 + ['b'] * 2,
'sc1': [2, 3, 1, 5, 6]})
df_excluded_expected = pd.DataFrame({'ID': ['c'],
'sc1': [1]})
(df_included,
df_excluded) = self.fpp.select_candidates(data, 2, 'ID')
assert_frame_equal(df_included, df_included_expected)
assert_frame_equal(df_excluded, df_excluded_expected)
def test_rename_no_columns(self):
df = pd.DataFrame(columns=['spkitemid', 'sc1', 'sc2', 'length',
'raw', 'candidate', 'feature1', 'feature2'])
df = self.fpp.rename_default_columns(df, [], 'spkitemid', 'sc1', 'sc2',
'length', 'raw', 'candidate')
assert_array_equal(df.columns,
['spkitemid', 'sc1', 'sc2', 'length', 'raw',
'candidate', 'feature1', 'feature2'])
def test_rename_no_columns_some_values_none(self):
df = pd.DataFrame(columns=['spkitemid', 'sc1', 'sc2', 'feature1', 'feature2'])
df = self.fpp.rename_default_columns(df, [], 'spkitemid', 'sc1', 'sc2', None, None, None)
assert_array_equal(df.columns, ['spkitemid', 'sc1', 'sc2', 'feature1', 'feature2'])
def test_rename_no_used_columns_but_unused_columns_with_default_names(self):
df = pd.DataFrame(columns=['spkitemid', 'sc1', 'sc2', 'length', 'feature1', 'feature2'])
df = self.fpp.rename_default_columns(df, [], 'spkitemid', 'sc1', 'sc2', None, None, None)
assert_array_equal(df.columns, ['spkitemid', 'sc1', 'sc2',
'##length##', 'feature1', 'feature2'])
def test_rename_used_columns(self):
df = pd.DataFrame(columns=['id', 'r1', 'r2', 'words', 'SR', 'feature1', 'feature2'])
df = self.fpp.rename_default_columns(df, [], 'id', 'r1', 'r2', 'words', 'SR', None)
assert_array_equal(df.columns, ['spkitemid', 'sc1', 'sc2', 'length',
'raw', 'feature1', 'feature2'])
def test_rename_used_columns_and_unused_columns_with_default_names(self):
df = pd.DataFrame(columns=['id', 'r1', 'r2', 'words', 'raw', 'feature1', 'feature2'])
df = self.fpp.rename_default_columns(df, [], 'id', 'r1', 'r2', 'words', None, None)
assert_array_equal(df.columns, ['spkitemid', 'sc1', 'sc2', 'length',
'##raw##', 'feature1', 'feature2'])
def test_rename_used_columns_with_swapped_names(self):
df = pd.DataFrame(columns=['id', 'sc1', 'sc2', 'raw', 'words', 'feature1', 'feature2'])
df = self.fpp.rename_default_columns(df, [], 'id', 'sc2', 'sc1', 'words', None, None)
assert_array_equal(df.columns, ['spkitemid', 'sc2', 'sc1', '##raw##',
'length', 'feature1', 'feature2'])
def test_rename_used_columns_but_not_features(self):
df = pd.DataFrame(columns=['id', 'sc1', 'sc2', 'length', 'feature2'])
df = self.fpp.rename_default_columns(df, ['length'], 'id', 'sc1', 'sc2', None, None, None)
assert_array_equal(df.columns, ['spkitemid', 'sc1', 'sc2', 'length', 'feature2'])
def test_rename_candidate_column(self):
df = pd.DataFrame(columns=['spkitemid', 'sc1', 'sc2', 'length',
'apptNo', 'feature1', 'feature2'])
df = self.fpp.rename_default_columns(df, [],
'spkitemid', 'sc1', 'sc2', None, None, 'apptNo')
assert_array_equal(df.columns, ['spkitemid', 'sc1', 'sc2', '##length##',
'candidate', 'feature1', 'feature2'])
def test_rename_candidate_named_sc2(self):
df = pd.DataFrame(columns=['id', 'sc1', 'sc2', 'question', 'l1', 'score'])
df_renamed = self.fpp.rename_default_columns(df, [],
'id', 'sc1', None, None, 'score', 'sc2')
assert_array_equal(df_renamed.columns, ['spkitemid', 'sc1',
'candidate', 'question', 'l1', 'raw'])
@raises(KeyError)
def test_check_subgroups_missing_columns(self):
df = pd.DataFrame(columns=['a', 'b', 'c'])
subgroups = ['a', 'd']
self.fpp.check_subgroups(df, subgroups)
def test_check_subgroups_nothing_to_replace(self):
df = pd.DataFrame({'a': ['1', '2'],
'b': ['32', '34'],
'd': ['abc', 'def']})
subgroups = ['a', 'd']
df_out = self.fpp.check_subgroups(df, subgroups)
assert_frame_equal(df_out, df)
def test_check_subgroups_replace_empty(self):
df = pd.DataFrame({'a': ['1', ''],
'b': [' ', '34'],
'd': ['ab c', ' ']})
subgroups = ['a', 'd']
df_expected = pd.DataFrame({'a': ['1', 'No info'],
'b': [' ', '34'],
'd': ['ab c', 'No info']})
df_out = self.fpp.check_subgroups(df, subgroups)
assert_frame_equal(df_out, df_expected)
def test_filter_on_column(self):
bad_df = pd.DataFrame({'spkitemlab': np.arange(1, 9, dtype='int64'),
'sc1': ['00', 'TD', '02', '03'] * 2})
df_filtered_with_zeros = pd.DataFrame({'spkitemlab': [1, 3, 4, 5, 7, 8],
'sc1': [0.0, 2.0, 3.0] * 2})
df_filtered = pd.DataFrame({'spkitemlab': [3, 4, 7, 8], 'sc1': [2.0, 3.0] * 2})
(output_df_with_zeros,
output_excluded_df_with_zeros) = self.fpp.filter_on_column(bad_df, 'sc1',
'spkitemlab',
exclude_zeros=False)
output_df, output_excluded_df = self.fpp.filter_on_column(bad_df, 'sc1',
'spkitemlab',
exclude_zeros=True)
assert_frame_equal(output_df_with_zeros, df_filtered_with_zeros)
assert_frame_equal(output_df, df_filtered)
def test_filter_on_column_all_non_numeric(self):
bad_df = pd.DataFrame({'sc1': ['A', 'I', 'TD', 'TD'] * 2,
'spkitemlab': range(1, 9)})
expected_df_excluded = bad_df.copy()
expected_df_excluded.drop('sc1', axis=1, inplace=True)
df_filtered, df_excluded = self.fpp.filter_on_column(bad_df, 'sc1',
'spkitemlab',
exclude_zeros=True)
ok_(df_filtered.empty)
ok_("sc1" not in df_filtered.columns)
assert_frame_equal(df_excluded, expected_df_excluded, check_dtype=False)
def test_filter_on_column_std_epsilon_zero(self):
# Test that the function exclude columns where std is returned as
# very low value rather than 0
data = {'id': np.arange(1, 21, dtype='int64'),
'feature_ok': np.arange(1, 21),
'feature_zero_sd': [1.5601] * 20}
bad_df = pd.DataFrame(data=data)
output_df, output_excluded_df = self.fpp.filter_on_column(bad_df,
'feature_zero_sd',
'id',
exclude_zeros=False,
exclude_zero_sd=True)
good_df = bad_df[['id', 'feature_ok']].copy()
assert_frame_equal(output_df, good_df)
ok_(output_excluded_df.empty)
def test_filter_on_column_with_inf(self):
# Test that the function exclude columns where feature value is 'inf'
data = pd.DataFrame({'feature_1': [1.5601, 0, 2.33, 11.32],
'feature_ok': np.arange(1, 5)})
data['feature_with_inf'] = 1 / data['feature_1']
data['id'] = np.arange(1, 5, dtype='int64')
bad_df = data[np.isinf(data['feature_with_inf'])].copy()
good_df = data[~np.isinf(data['feature_with_inf'])].copy()
bad_df.reset_index(drop=True, inplace=True)
good_df.reset_index(drop=True, inplace=True)
output_df, output_excluded_df = self.fpp.filter_on_column(data, 'feature_with_inf',
'id',
exclude_zeros=False,
exclude_zero_sd=True)
assert_frame_equal(output_df, good_df)
assert_frame_equal(output_excluded_df, bad_df)
def test_filter_on_flag_column_empty_flag_dictionary(self):
# no flags specified, keep the data frame as is
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': [0, 0, 0, 0],
'flag2': [1, 2, 2, 1]})
flag_dict = {}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df)
eq_(len(df_excluded), 0)
def test_filter_on_flag_column_nothing_to_exclude_int_column_and_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': [0, 1, 2, 3]})
flag_dict = {'flag1': [0, 1, 2, 3, 4]}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df)
eq_(len(df_excluded), 0)
def test_filter_on_flag_column_nothing_to_exclude_float_column_and_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': [0.5, 1.1, 2.2, 3.6]})
flag_dict = {'flag1': [0.5, 1.1, 2.2, 3.6, 4.5]}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df)
eq_(len(df_excluded), 0)
def test_filter_on_flag_column_nothing_to_exclude_str_column_and_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': ['a', 'b', 'c', 'd']})
flag_dict = {'flag1': ['a', 'b', 'c', 'd', 'e']}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df)
eq_(len(df_excluded), 0)
def test_filter_on_flag_column_nothing_to_exclude_float_column_int_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': [0.0, 1.0, 2.0, 3.0]})
flag_dict = {'flag1': [0, 1, 2, 3, 4]}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df)
eq_(len(df_excluded), 0)
def test_filter_on_flag_column_nothing_to_exclude_int_column_float_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': [0, 1, 2, 3]})
flag_dict = {'flag1': [0.0, 1.0, 2.0, 3.0, 4.5]}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df)
eq_(len(df_excluded), 0)
def test_filter_on_flag_column_nothing_to_exclude_str_column_float_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': ['4', '1', '2', '3.5']})
flag_dict = {'flag1': [0.0, 1.0, 2.0, 3.5, 4.0]}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df)
eq_(len(df_excluded), 0)
def test_filter_on_flag_column_nothing_to_exclude_float_column_str_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': [4.0, 1.0, 2.0, 3.5]})
flag_dict = {'flag1': ['1', '2', '3.5', '4', 'TD']}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df)
eq_(len(df_excluded), 0)
def test_filter_on_flag_column_nothing_to_exclude_str_column_int_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': ['0.0', '1.0', '2.0', '3.0']})
flag_dict = {'flag1': [0, 1, 2, 3, 4]}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df)
eq_(len(df_excluded), 0)
def test_filter_on_flag_column_nothing_to_exclude_int_column_str_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': [0, 1, 2, 3]})
flag_dict = {'flag1': ['0.0', '1.0', '2.0', '3.0', 'TD']}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df)
eq_(len(df_excluded), 0)
def test_filter_on_flag_column_nothing_to_exclude_mixed_type_column_str_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': [0, '1.0', 2, 3.5]})
flag_dict = {'flag1': ['0.0', '1.0', '2.0', '3.5', 'TD']}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df)
eq_(len(df_excluded), 0)
def test_filter_on_flag_column_nothing_to_exclude_mixed_type_column_int_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': [0, '1.0', 2, 3.0]})
flag_dict = {'flag1': [0, 1, 2, 3, 4]}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df)
eq_(len(df_excluded), 0)
def test_filter_on_flag_column_nothing_to_exclude_mixed_type_column_float_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': [0, '1.5', 2, 3.5]})
flag_dict = {'flag1': [0.0, 1.5, 2.0, 3.5, 4.0]}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df)
eq_(len(df_excluded), 0)
def test_filter_on_flag_column_nothing_to_exclude_int_column_mixed_type_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': [0, 1, 2, 3]})
flag_dict = {'flag1': [0, 1, 2, 3.0, 3.5, 'TD']}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df)
eq_(len(df_excluded), 0)
def test_filter_on_flag_column_nothing_to_exclude_float_column_mixed_type_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': [0.0, 1.0, 2.0, 3.5]})
flag_dict = {'flag1': [0, 1, 2, 3.0, 3.5, 'TD']}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df)
eq_(len(df_excluded), 0)
def test_filter_on_flag_column_nothing_to_exclude_str_column_mixed_type_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': ['0.0', '1.0', '2.0', '3.5']})
flag_dict = {'flag1': [0, 1, 2, 3.0, 3.5, 'TD']}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df)
eq_(len(df_excluded), 0)
def test_filter_on_flag_column_nothing_to_exclude_mixed_type_column_mixed_type_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': [1, 2, 3.5, 'TD']})
flag_dict = {'flag1': [0, 1, 2, 3.0, 3.5, 'TD']}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df)
eq_(len(df_excluded), 0)
def test_filter_on_flag_column_mixed_type_column_mixed_type_dict_filter_preserve_type(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd', 'e', 'f'],
'sc1': [1, 2, 1, 3, 4, 5],
'feature': [2, 3, 4, 5, 6, 2],
'flag1': [1, 1.5, 2, 3.5, 'TD', 'NS']})
flag_dict = {'flag1': [1.5, 2, 'TD']}
df_new_expected = pd.DataFrame({'spkitemid': ['b', 'c', 'e'],
'sc1': [2, 1, 4],
'feature': [3, 4, 6],
'flag1': [1.5, 2, 'TD']})
df_excluded_expected = pd.DataFrame({'spkitemid': ['a', 'd', 'f'],
'sc1': [1, 3, 5],
'feature': [2, 5, 2],
'flag1': [1, 3.5, 'NS']})
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df_new_expected)
assert_frame_equal(df_excluded, df_excluded_expected)
def test_filter_on_flag_column_with_none_value_in_int_flag_column_int_dict(self):
df = pd.DataFrame({'spkitemid': [1, 2, 3, 4, 5, 6],
'sc1': [1, 2, 1, 3, 4, 5],
'feature': [2, 3, 4, 5, 6, 2],
'flag1': [1, 2, 2, 3, 4, None]}, dtype=object)
flag_dict = {'flag1': [2, 4]}
df_new_expected = pd.DataFrame({'spkitemid': [2, 3, 5],
'sc1': [2, 1, 4],
'feature': [3, 4, 6],
'flag1': [2, 2, 4]}, dtype=object)
df_excluded_expected = pd.DataFrame({'spkitemid': [1, 4, 6],
'sc1': [1, 3, 5],
'feature': [2, 5, 2],
'flag1': [1, 3, None]}, dtype=object)
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df_new_expected)
assert_frame_equal(df_excluded, df_excluded_expected)
def test_filter_on_flag_column_with_none_value_in_float_flag_column_float_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd', 'e', 'f'],
'sc1': [1, 2, 1, 3, 4, 5],
'feature': [2, 3, 4, 5, 6, 2],
'flag1': [1.2, 2.1, 2.1, 3.3, 4.2, None]})
flag_dict = {'flag1': [2.1, 4.2]}
df_new_expected = pd.DataFrame({'spkitemid': ['b', 'c', 'e'],
'sc1': [2, 1, 4],
'feature': [3, 4, 6],
'flag1': [2.1, 2.1, 4.2]})
df_excluded_expected = pd.DataFrame({'spkitemid': ['a', 'd', 'f'],
'sc1': [1, 3, 5],
'feature': [2, 5, 2],
'flag1': [1.2, 3.3, None]})
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df_new_expected)
assert_frame_equal(df_excluded, df_excluded_expected)
def test_filter_on_flag_column_with_none_value_in_str_flag_column_str_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd', 'e', 'f'],
'sc1': [1, 2, 1, 3, 4, 5],
'feature': [2, 3, 4, 5, 6, 2],
'flag1': ['a', 'b', 'b', 'c', 'd', None]})
flag_dict = {'flag1': ['b', 'd']}
df_new_expected = pd.DataFrame({'spkitemid': ['b', 'c', 'e'],
'sc1': [2, 1, 4],
'feature': [3, 4, 6],
'flag1': ['b', 'b', 'd']})
df_excluded_expected = pd.DataFrame({'spkitemid': ['a', 'd', 'f'],
'sc1': [1, 3, 5],
'feature': [2, 5, 2],
'flag1': ['a', 'c', None]})
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df_new_expected)
assert_frame_equal(df_excluded, df_excluded_expected)
def test_filter_on_flag_column_with_none_value_in_mixed_type_flag_column_float_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd', 'e', 'f'],
'sc1': [1, 2, 1, 3, 4, 5],
'feature': [2, 3, 4, 5, 6, 2],
'flag1': [1, 1.5, 2.0, 'TD', 2.0, None]},
dtype=object)
flag_dict = {'flag1': [1.5, 2.0]}
df_new_expected = pd.DataFrame({'spkitemid': ['b', 'c', 'e'],
'sc1': [2, 1, 4],
'feature': [3, 4, 6],
'flag1': [1.5, 2.0, 2.0]},
dtype=object)
df_excluded_expected = pd.DataFrame({'spkitemid': ['a', 'd', 'f'],
'sc1': [1, 3, 5],
'feature': [2, 5, 2],
'flag1': [1, 'TD', None]},
dtype=object)
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df_new_expected)
assert_frame_equal(df_excluded, df_excluded_expected)
def test_filter_on_flag_column_with_none_value_in_mixed_type_flag_column_int_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd', 'e', 'f'],
'sc1': [1, 2, 1, 3, 4, 5],
'feature': [2, 3, 4, 5, 6, 2],
'flag1': [1.5, 2, 2, 'TD', 4, None]},
dtype=object)
flag_dict = {'flag1': [2, 4]}
df_new_expected = pd.DataFrame({'spkitemid': ['b', 'c', 'e'],
'sc1': [2, 1, 4],
'feature': [3, 4, 6],
'flag1': [2, 2, 4]},
dtype=object)
df_excluded_expected = pd.DataFrame({'spkitemid': ['a', 'd', 'f'],
'sc1': [1, 3, 5],
'feature': [2, 5, 2],
'flag1': [1.5, 'TD', None]},
dtype=object)
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df_new_expected)
assert_frame_equal(df_excluded, df_excluded_expected)
def test_filter_on_flag_column_with_none_value_in_mixed_type_flag_column_mixed_type_dict(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd', 'e', 'f'],
'sc1': [1, 2, 1, 3, 4, 5],
'feature': [2, 3, 4, 5, 6, 2],
'flag1': [1, 1.5, 2, 3.5, 'TD', None]},
dtype=object)
flag_dict = {'flag1': [1.5, 2, 'TD']}
df_new_expected = pd.DataFrame({'spkitemid': ['b', 'c', 'e'],
'sc1': [2, 1, 4],
'feature': [3, 4, 6],
'flag1': [1.5, 2, 'TD']}, dtype=object)
df_excluded_expected = pd.DataFrame({'spkitemid': ['a', 'd', 'f'],
'sc1': [1, 3, 5],
'feature': [2, 5, 2],
'flag1': [1, 3.5, None]}, dtype=object)
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df_new_expected)
assert_frame_equal(df_excluded, df_excluded_expected)
def test_filter_on_flag_column_two_flags_same_responses(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd', 'e', 'f'],
'sc1': [1, 2, 1, 3, 4, 5],
'feature': [2, 3, 4, 5, 6, 2],
'flag1': [1, 1.5, 2, 3.5, 'TD', 'NS'],
'flag2': [1, 0, 0, 1, 0, 1]})
flag_dict = {'flag1': [1.5, 2, 'TD'], 'flag2': [0]}
df_new_expected = pd.DataFrame({'spkitemid': ['b', 'c', 'e'],
'sc1': [2, 1, 4],
'feature': [3, 4, 6],
'flag1': [1.5, 2, 'TD'],
'flag2': [0, 0, 0]})
df_excluded_expected = pd.DataFrame({'spkitemid': ['a', 'd', 'f'],
'sc1': [1, 3, 5],
'feature': [2, 5, 2],
'flag1': [1, 3.5, 'NS'],
'flag2': [1, 1, 1]})
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df_new_expected)
assert_frame_equal(df_excluded, df_excluded_expected)
def test_filter_on_flag_column_two_flags_different_responses(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd', 'e', 'f'],
'sc1': [1, 2, 1, 3, 4, 5],
'feature': [2, 3, 4, 5, 6, 2],
'flag1': [1, 1.5, 2, 3.5, 'TD', 'NS'],
'flag2': [2, 0, 0, 1, 0, 1]})
flag_dict = {'flag1': [1.5, 2, 'TD', 'NS'], 'flag2': [0, 2]}
df_new_expected = pd.DataFrame({'spkitemid': ['b', 'c', 'e'],
'sc1': [2, 1, 4],
'feature': [3, 4, 6],
'flag1': [1.5, 2, 'TD'],
'flag2': [0, 0, 0]})
df_excluded_expected = pd.DataFrame({'spkitemid': ['a', 'd', 'f'],
'sc1': [1, 3, 5],
'feature': [2, 5, 2],
'flag1': [1, 3.5, 'NS'],
'flag2': [2, 1, 1]})
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
assert_frame_equal(df_new, df_new_expected)
assert_frame_equal(df_excluded, df_excluded_expected)
@raises(KeyError)
def test_filter_on_flag_column_missing_columns(self):
df = pd.DataFrame({'spkitemid': ['a', 'b', 'c', 'd'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': ['1', '1', '1', '1'],
'flag2': ['1', '2', '2', '1']})
flag_dict = {'flag3': ['0'], 'flag2': ['1', '2']}
df_new, df_excluded = self.fpp.filter_on_flag_columns(df, flag_dict)
@raises(ValueError)
def test_filter_on_flag_column_nothing_left(self):
bad_df = pd.DataFrame({'spkitemid': ['a1', 'b1', 'c1', 'd1'],
'sc1': [1, 2, 1, 3],
'feature': [2, 3, 4, 5],
'flag1': [1, 0, 20, 14],
'flag2': [1, 1.0, 'TD', '03']})
flag_dict = {'flag1': [1, 0, 14], 'flag2': ['TD']}
df_new, df_excluded = self.fpp.filter_on_flag_columns(bad_df, flag_dict)
def test_remove_outliers(self):
# we want to test that even if we pass in a list of
# integers, we still get the right clamped output
data = [1, 1, 2, 2, 1, 1] * 10 + [10]
ceiling = np.mean(data) + 4 * np.std(data)
clamped_data = self.fpp.remove_outliers(data)
assert_almost_equal(clamped_data[-1], ceiling)
def test_generate_feature_names_subset(self):
reserved_column_names = ['reserved_col1', 'reserved_col2']
expected = ['col_1']
df = pd.DataFrame({'reserved_col1': ['X', 'Y', 'Z'],
'reserved_col2': ['Q', 'R', 'S'],
'col_1': [1, 2, 3],
'col_2': ['A', 'B', 'C']})
subset = 'A'
feature_subset = pd.DataFrame({'Feature': ['col_1', 'col_2', 'col_3'],
'A': [1, 0, 0],
'B': [1, 1, 1]})
feat_names = self.fpp.generate_feature_names(df,
reserved_column_names,
feature_subset,
subset)
eq_(feat_names, expected)
def test_generate_feature_names_none(self):
reserved_column_names = ['reserved_col1', 'reserved_col2']
expected = ['col_1', 'col_2']
df = pd.DataFrame({'reserved_col1': ['X', 'Y', 'Z'],
'reserved_col2': ['Q', 'R', 'S'],
'col_1': [1, 2, 3],
'col_2': ['A', 'B', 'C']})
feat_names = self.fpp.generate_feature_names(df,
reserved_column_names,
feature_subset_specs=None,
feature_subset=None)
eq_(feat_names, expected)
def test_model_name_builtin_model(self):
model_name = 'LinearRegression'
model_type = self.fpp.check_model_name(model_name)
eq_(model_type, 'BUILTIN')
def test_model_name_skll_model(self):
model_name = 'AdaBoostRegressor'
model_type = self.fpp.check_model_name(model_name)
eq_(model_type, 'SKLL')
@raises(ValueError)
def test_model_name_wrong_name(self):
model_name = 'random_model'
self.fpp.check_model_name(model_name)
def test_trim(self):
values = np.array([1.4, 8.5, 7.4])
expected = np.array([1.4, 8.4998, 7.4])
actual = self.fpp.trim(values, 1, 8)
assert_array_equal(actual, expected)
def test_trim_with_list(self):
values = [1.4, 8.5, 7.4]
expected = np.array([1.4, 8.4998, 7.4])
actual = self.fpp.trim(values, 1, 8)
assert_array_equal(actual, expected)
def test_trim_with_custom_tolerance(self):
values = [0.6, 8.4, 7.4]
expected = np.array([0.75, 8.25, 7.4])
actual = self.fpp.trim(values, 1, 8, 0.25)
assert_array_equal(actual, expected)
def test_preprocess_feature_fail(self):
np.random.seed(10)
values = np.random.random(size=1000)
values = np.append(values, np.array([10000000]))
mean = values.mean()
std = values.std()
expected = values.copy()
expected[-1] = mean + 4 * std
actual = self.fpp.preprocess_feature(values,
'A',
'raw',
mean,
std)
assert_array_equal(actual, expected)
def test_preprocess_feature_with_outlier(self):
np.random.seed(10)
values = np.random.random(size=1000)
values = np.append(values, np.array([10000000]))
mean = values.mean()
std = values.std()
expected = values.copy()
expected[-1] = mean + 4 * std
actual = self.fpp.preprocess_feature(values,
'A',
'raw',
mean,
std,
exclude_zero_sd=True)
assert_array_equal(actual, expected)
def test_preprocess_features(self):
train = pd.DataFrame({'A': [1, 2, 4, 3]})
test = pd.DataFrame({'A': [4, 3, 2, 1]})
train_expected = (train['A'] - train['A'].mean()) / train['A'].std()
train_expected = pd.DataFrame(train_expected)
test_expected = (test['A'] - test['A'].mean()) / test['A'].std()
test_expected = pd.DataFrame(test_expected)
info_expected = pd.DataFrame({'feature': ['A'],
'sign': [1],
'train_mean': [train.A.mean()],
'train_sd': [train.A.std()],
'train_transformed_mean': [train.A.mean()],
'train_transformed_sd': [test.A.std()],
'transform': ['raw']})
specs = pd.DataFrame({'feature': ['A'],
'transform': ['raw'],
'sign': [1]})
(train_processed,
test_processed,
info_processed) = self.fpp.preprocess_features(train, test, specs)
assert_frame_equal(train_processed.sort_index(axis=1),
train_expected.sort_index(axis=1))
assert_frame_equal(test_processed.sort_index(axis=1),
test_expected.sort_index(axis=1))
assert_frame_equal(info_processed.sort_index(axis=1),
info_expected.sort_index(axis=1))
def test_filter_data_features(self):
data = {'ID': [1, 2, 3, 4],
'LENGTH': [10, 12, 11, 12],
'h1': [1, 2, 3, 1],
'candidate': ['A', 'B', 'C', 'A'],
'h2': [1, 2, 3, 1],
'feature1': [1, 3, 4, 1],
'feature2': [1, 3, 2, 2]}
df_filtered_features_expected = pd.DataFrame({'spkitemid': [1, 2, 3, 4],
'sc1': [1.0, 2.0, 3.0, 1.0],
'feature1': [1.0, 3.0, 4.0, 1.0],
'feature2': [1.0, 3.0, 2.0, 2.0]})
df_filtered_features_expected = df_filtered_features_expected[['spkitemid',
'sc1',
'feature1',
'feature2']]
data = pd.DataFrame(data)
(df_filtered_features,
_,
_,
_,
_,
_,
_,
_,
_,
_) = self.fpp.filter_data(data,
'h1',
'ID',
'LENGTH',
'h2',
'candidate',
['feature1', 'feature2'],
['LENGTH', 'ID', 'candidate', 'h1'],
0,
6,
{},
[])
assert_frame_equal(df_filtered_features,
df_filtered_features_expected)
def test_filter_data_correct_features_and_length_in_other_columns(self):
data = {'ID': [1, 2, 3, 4],
'LENGTH': [10, 10, 10, 10],
'h1': [1, 2, 3, 1],
'candidate': ['A', 'B', 'C', 'A'],
'h2': [1, 2, 3, 1],
'feature1': [1, 3, 4, 1],
'feature2': [1, 3, 2, 2]}
data = pd.DataFrame(data)
(_,
_,
df_filtered_other_columns,
_,
_,
_,
_,
_,
_,
feature_names) = self.fpp.filter_data(data,
'h1',
'ID',
'LENGTH',
'h2',
'candidate',
['feature1', 'feature2'],
['LENGTH', 'ID', 'candidate', 'h1'],
0,
6,
{},
[])
eq_(feature_names, ['feature1', 'feature2'])
assert '##LENGTH##' in df_filtered_other_columns.columns
def test_filter_data_length_in_other_columns(self):
data = {'ID': [1, 2, 3, 4],
'LENGTH': [10, 10, 10, 10],
'h1': [1, 2, 3, 1],
'candidate': ['A', 'B', 'C', 'A'],
'h2': [1, 2, 3, 1],
'feature1': [1, 3, 4, 1],
'feature2': [1, 3, 2, 2]}
data = pd.DataFrame(data)
(_,
_,
df_filtered_other_columns,
_,
_,
_,
_,
_,
_,
feature_names) = self.fpp.filter_data(data,
'h1',
'ID',
'LENGTH',
'h2',
'candidate',
['feature1', 'feature2'],
['LENGTH', 'ID', 'candidate', 'h1'],
0,
6,
{},
[])
eq_(feature_names, ['feature1', 'feature2'])
assert '##LENGTH##' in df_filtered_other_columns.columns
@raises(ValueError)
def test_filter_data_min_candidates_raises_value_error(self):
data = {'ID': [1, 2, 3, 4],
'LENGTH': [10, 10, 10, 10],
'h1': [1, 2, 3, 1],
'candidate': ['A', 'B', 'C', 'A'],
'h2': [1, 2, 3, 1],
'feature1': [1, 3, 4, 1],
'feature2': [1, 3, 2, 2]}
data = pd.DataFrame(data)
self.fpp.filter_data(data,
'h1',
'ID',
'LENGTH',
'h2',
'candidate',
['feature1', 'feature2'],
['LENGTH', 'ID', 'candidate', 'h1'],
0,
6,
{},
[],
min_candidate_items=5)
def test_filter_data_with_min_candidates(self):
data = {'ID': [1, 2, 3, 4],
'LENGTH': [10, 10, 10, 10],
'h1': [1, 2, 3, 1],
'candidate': ['A', 'B', 'C', 'A'],
'h2': [1, 2, 3, 1],
'feature1': [1, 3, 4, 1],
'feature2': [1, 3, 2, 2]}
data = pd.DataFrame(data)
(df_filtered_features,
_,
_,
_,
_,
df_filtered_human_scores,
_,
_,
_,
_) = self.fpp.filter_data(data,
'h1',
'ID',
'LENGTH',
'h2',
'candidate',
['feature1', 'feature2'],
['LENGTH', 'ID', 'candidate', 'h1'],
0,
6,
{},
[],
min_candidate_items=2)
eq_(df_filtered_features.shape[0], 2)
assert all(col in df_filtered_human_scores.columns
for col in ['sc1', 'sc2'])
def test_filter_data_id_candidate_equal(self):
data = {'LENGTH': [10, 12, 18, 21],
'h1': [1, 2, 3, 1],
'candidate': ['A', 'B', 'C', 'D'],
'h2': [1, 2, 3, 1],
'feature1': [1, 3, 4, 1],
'feature2': [1, 3, 2, 2]}
data = pd.DataFrame(data)
(_,
df_filtered_metadata,
_,
_,
_,
_,
_,
_,
_,
_) = self.fpp.filter_data(data,
'h1',
'candidate',
'LENGTH',
'h2',
'candidate',
['feature1', 'feature2'],
['LENGTH', 'ID', 'candidate', 'h1'],
0,
6,
{},
[])
expected = pd.DataFrame({'spkitemid': ['A', 'B', 'C', 'D'],
'candidate': ['A', 'B', 'C', 'D']})
expected = expected[['spkitemid', 'candidate']]
assert_frame_equal(df_filtered_metadata, expected)
class TestFeatureSpecsProcessor:
def setUp(self):
self.fsp = FeatureSpecsProcessor()
def test_generate_default_specs(self):
fnames = ['Grammar', 'Vocabulary', 'Pronunciation']
df_specs = self.fsp.generate_default_specs(fnames)
assert_equal(len(df_specs), 3)
assert_equal(df_specs['feature'][0], 'Grammar')
assert_equal(df_specs['transform'][1], 'raw')
assert_equal(df_specs['sign'][2], 1.0)
def test_generate_specs_from_data_with_negative_sign(self):
feature_subset_specs = pd.DataFrame({'Feature': ['Grammar',
'Vocabulary',
'Fluency',
'Content_coverage',
'Discourse'],
'Sign_SYS1': ['-', '+', '+', '+', '-']})
np.random.seed(10)
data = {'Grammar': np.random.randn(10),
'Fluency': np.random.randn(10),
'Discourse': np.random.randn(10),
'r1': np.random.choice(4, 10),
'spkitemlab': ['a-5'] * 10}
df = pd.DataFrame(data)
df_specs = self.fsp.generate_specs(df,
['Grammar',
'Fluency',
'Discourse'],
'r1',
feature_subset_specs,
'SYS1')
assert_equal(len(df_specs), 3)
assert_array_equal(df_specs['feature'], ['Grammar', 'Fluency', 'Discourse'])
assert_array_equal(df_specs['sign'], [-1.0, 1.0, -1.0])
def test_generate_specs_from_data_with_default_sign(self):
feature_subset_specs = pd.DataFrame({'Feature': ['Grammar',
'Vocabulary',
'Fluency',
'Content_coverage',
'Discourse'],
'Sign_SYS1': ['-', '+', '+', '+', '-']})
np.random.seed(10)
data = {'Grammar': np.random.randn(10),
'Fluency': np.random.randn(10),
'Discourse': np.random.randn(10),
'r1': np.random.choice(4, 10),
'spkitemlab': ['a-5'] * 10}
df = pd.DataFrame(data)
df_specs = self.fsp.generate_specs(df,
['Grammar',
'Fluency',
'Discourse'],
'r1',
feature_subset_specs,
feature_sign=None)
assert_equal(len(df_specs), 3)
assert_array_equal(df_specs['feature'], ['Grammar', 'Fluency', 'Discourse'])
assert_array_equal(df_specs['sign'], [1.0, 1.0, 1.0])
def test_generate_specs_from_data_with_transformation(self):
feature_subset_specs = pd.DataFrame({'Feature': ['Grammar',
'Vocabulary',
'Fluency',
'Content_coverage',
'Discourse'],
'Sign_SYS1': ['-', '+', '+', '+', '-']})
np.random.seed(10)
r1 = np.random.choice(range(1, 5), 10)
data = {'Grammar': np.random.randn(10),
'Vocabulary': r1**2,
'Discourse': np.random.randn(10),
'r1': r1,
'spkitemlab': ['a-5'] * 10}
df = pd.DataFrame(data)
df_specs = self.fsp.generate_specs(df,
['Grammar',
'Vocabulary',
'Discourse'],
'r1',
feature_subset_specs,
'SYS1')
assert_array_equal(df_specs['feature'], ['Grammar', 'Vocabulary', 'Discourse'])
assert_equal(df_specs['transform'][1], 'sqrt')
def test_generate_specs_from_data_when_transformation_changes_sign(self):
feature_subset_specs = pd.DataFrame({'Feature': ['Grammar',
'Vocabulary',
'Fluency',
'Content_coverage',
'Discourse'],
'Sign_SYS1': ['-', '+', '+', '+', '-']})
np.random.seed(10)
r1 = np.random.choice(range(1, 5), 10)
data = {'Grammar': np.random.randn(10),
'Vocabulary': 1 / r1,
'Discourse': np.random.randn(10),
'r1': r1,
'spkitemlab': ['a-5'] * 10}
df = pd.DataFrame(data)
df_specs = self.fsp.generate_specs(df,
['Grammar',
'Vocabulary',
'Discourse'],
'r1',
feature_subset_specs,
'SYS1')
assert_equal(df_specs['feature'][1], 'Vocabulary')
assert_equal(df_specs['transform'][1], 'addOneInv')
assert_equal(df_specs['sign'][1], -1)
def test_generate_specs_from_data_no_subset_specs(self):
np.random.seed(10)
data = {'Grammar': np.random.randn(10),
'Fluency': np.random.randn(10),
'Discourse': np.random.randn(10),
'r1': np.random.choice(4, 10),
'spkitemlab': ['a-5'] * 10}
df = pd.DataFrame(data)
df_specs = self.fsp.generate_specs(df,
['Grammar',
'Fluency',
'Discourse'],
'r1')
assert_equal(len(df_specs), 3)
assert_array_equal(df_specs['feature'], ['Grammar', 'Fluency', 'Discourse'])
assert_array_equal(df_specs['sign'], [1.0, 1.0, 1.0])
def test_validate_feature_specs(self):
df_feature_specs = pd.DataFrame({'feature': ['f1', 'f2', 'f3'],
'sign': [1.0, 1.0, -1.0],
'transform': ['raw', 'inv', 'sqrt']})
df_new_feature_specs = self.fsp.validate_feature_specs(df_feature_specs)
assert_frame_equal(df_feature_specs, df_new_feature_specs)
def test_validate_feature_specs_with_Feature_as_column(self):
df_feature_specs = pd.DataFrame({'Feature': ['f1', 'f2', 'f3'],
'sign': [1.0, 1.0, -1.0],
'transform': ['raw', 'inv', 'sqrt']})
df_expected_feature_specs = pd.DataFrame({'feature': ['f1', 'f2', 'f3'],
'sign': [1.0, 1.0, -1.0],
'transform': ['raw', 'inv', 'sqrt']})
df_new_feature_specs = self.fsp.validate_feature_specs(df_feature_specs)
assert_frame_equal(df_new_feature_specs, df_expected_feature_specs)
def test_validate_feature_specs_sign_to_float(self):
df_feature_specs = pd.DataFrame({'feature': ['f1', 'f2', 'f3'],
'sign': ['1', '1', '-1'],
'transform': ['raw', 'inv', 'sqrt']})
df_expected_feature_specs = pd.DataFrame({'feature': ['f1', 'f2', 'f3'],
'sign': [1.0, 1.0, -1.0],
'transform': ['raw', 'inv', 'sqrt']})
df_new_feature_specs = self.fsp.validate_feature_specs(df_feature_specs)
assert_frame_equal(df_new_feature_specs, df_expected_feature_specs)
def test_validate_feature_specs_add_default_values(self):
df_feature_specs = pd.DataFrame({'feature': ['f1', 'f2', 'f3']})
df_expected_feature_specs = pd.DataFrame({'feature': ['f1', 'f2', 'f3'],
'sign': [1, 1, 1],
'transform': ['raw', 'raw', 'raw']})
df_new_feature_specs = self.fsp.validate_feature_specs(df_feature_specs)
assert_frame_equal(df_new_feature_specs, df_expected_feature_specs)
@raises(ValueError)
def test_validate_feature_specs_wrong_sign_format(self):
df_feature_specs = pd.DataFrame({'feature': ['f1', 'f2', 'f3'],
'sign': ['+', '+', '-'],
'transform': ['raw', 'inv', 'sqrt']})
self.fsp.validate_feature_specs(df_feature_specs)
@raises(ValueError)
def test_validate_feature_duplicate_feature(self):
df_feature_specs = pd.DataFrame({'feature': ['f1', 'f1', 'f3'],
'sign': ['+', '+', '-'],
'transform': ['raw', 'inv', 'sqrt']})
self.fsp.validate_feature_specs(df_feature_specs)
@raises(KeyError)
def test_validate_feature_missing_feature_column(self):
df_feature_specs = pd.DataFrame({'FeatureName': ['f1', 'f1', 'f3'],
'sign': ['+', '+', '-'],
'transform': ['raw', 'inv', 'sqrt']})
self.fsp.validate_feature_specs(df_feature_specs)
class TestFeatureSubsetProcessor:
def setUp(self):
self.fsp = FeatureSubsetProcessor()
def test_select_by_subset(self):
feature_subset_specs = pd.DataFrame({'Feature': ['Grammar',
'Vocabulary',
'Fluency',
'Content_coverage',
'Discourse',
'Pronunciation',
'Prosody',
'Content_accuracy'],
'high_entropy': [1, 1, 1, 1, 1, 1, 1, 0],
'low_entropy': [0, 0, 1, 0, 0, 1, 1, 1]})
# This list should also trigger a warning about extra subset features not in the data
fnames = ['Grammar', 'Vocabulary', 'Pronunciation', 'Content_accuracy']
high_entropy_fnames = ['Grammar', 'Vocabulary', 'Pronunciation']
assert_array_equal(self.fsp.select_by_subset(fnames,
feature_subset_specs,
'high_entropy'),
high_entropy_fnames)
def test_select_by_subset_warnings(self):
feature_subset_specs = pd.DataFrame({'Feature': ['Grammar',
'Vocabulary',
'Fluency',
'Content_coverage',
'Discourse',
'Pronunciation',
'Prosody',
'Content_accuracy'],
'high_entropy': [1, 1, 1, 1, 1, 1, 1, 0],
'low_entropy': [0, 0, 1, 0, 0, 1, 1, 1]})
extra_fnames = ['Grammar', 'Vocabulary', 'Rhythm']
assert_array_equal(self.fsp.select_by_subset(extra_fnames,
feature_subset_specs,
'high_entropy'),
['Grammar', 'Vocabulary'])
def test_check_feature_subset_file_subset_only(self):
feature_specs = pd.DataFrame({'feature': ['f1', 'f2', 'f3'],
'subset1': [0, 1, 0]})
self.fsp.check_feature_subset_file(feature_specs, 'subset1')
def test_check_feature_subset_file_sign_only(self):
feature_specs = pd.DataFrame({'feature': ['f1', 'f2', 'f3'],
'sign_SYS': ['+', '-', '+']})
self.fsp.check_feature_subset_file(feature_specs, sign='SYS')
def test_check_feature_subset_file_sign_and_subset(self):
feature_specs = pd.DataFrame({'feature': ['f1', 'f2', 'f3'],
'sign_SYS': ['+', '-', '+'],
'subset1': [0, 1, 0]})
self.fsp.check_feature_subset_file(feature_specs,
subset='subset1',
sign='SYS')
def test_check_feature_subset_file_sign_named_with_sign(self):
feature_specs = pd.DataFrame({'feature': ['f1', 'f2', 'f3'],
'sign_SYS': ['+', '-', '+']})
self.fsp.check_feature_subset_file(feature_specs, sign='SYS')
def test_check_feature_subset_file_sign_named_with_Sign(self):
feature_specs = pd.DataFrame({'feature': ['f1', 'f2', 'f3'],
'Sign_SYS': ['+', '-', '+']})
self.fsp.check_feature_subset_file(feature_specs, sign='SYS')
@raises(ValueError)
def test_check_feature_subset_file_sign_named_something_else(self):
feature_specs = pd.DataFrame({'feature': ['f1', 'f2', 'f3'],
'SYS_sign': ['+', '-', '+']})
self.fsp.check_feature_subset_file(feature_specs, sign='SYS')
@raises(ValueError)
def test_check_feature_subset_file_multiple_sign_columns(self):
feature_specs = pd.DataFrame({'feature': ['f1', 'f2', 'f3'],
'sign_SYS': ['+', '-', '+'],
'Sign_SYS': ['-', '+', '-']})
self.fsp.check_feature_subset_file(feature_specs, sign='SYS')
@raises(ValueError)
def test_check_feature_subset_file_no_feature_column(self):
feature_specs = pd.DataFrame({'feat': ['f1', 'f2', 'f3'], 'subset1': [0, 1, 0]})
self.fsp.check_feature_subset_file(feature_specs, 'subset1')
@raises(ValueError)
def test_check_feature_subset_file_no_subset_column(self):
feature_specs = pd.DataFrame({'Feature': ['f1', 'f2', 'f3'], 'subset1': [0, 1, 0]})
self.fsp.check_feature_subset_file(feature_specs, 'subset2')
@raises(ValueError)
def test_check_feature_subset_file_wrong_values_in_subset(self):
feature_specs = pd.DataFrame({'Feature': ['f1', 'f2', 'f3'],
'subset1': ['yes', 'no', 'yes']})
self.fsp.check_feature_subset_file(feature_specs, 'subset1')
@raises(ValueError)
def test_check_feature_subset_file_no_sign_column(self):
feature_specs = pd.DataFrame({'feature': ['f1', 'f2', 'f3'],
'subset1': [0, 1, 0]})
self.fsp.check_feature_subset_file(feature_specs, sign='subset1')
@raises(ValueError)
def test_check_feature_subset_file_wrong_values_in_sign(self):
feature_specs = pd.DataFrame({'Feature': ['f1', 'f2', 'f3'],
'sign_SYS1': ['+1', '-1', '+1']})
self.fsp.check_feature_subset_file(feature_specs, sign='SYS1')
|
|
# -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests."""
import mock
import pytest
from google.cloud import irm_v1alpha2
from google.cloud.irm_v1alpha2.proto import incidents_pb2
from google.cloud.irm_v1alpha2.proto import incidents_service_pb2
from google.protobuf import empty_pb2
class MultiCallableStub(object):
"""Stub for the grpc.UnaryUnaryMultiCallable interface."""
def __init__(self, method, channel_stub):
self.method = method
self.channel_stub = channel_stub
def __call__(self, request, timeout=None, metadata=None, credentials=None):
self.channel_stub.requests.append((self.method, request))
response = None
if self.channel_stub.responses:
response = self.channel_stub.responses.pop()
if isinstance(response, Exception):
raise response
if response:
return response
class ChannelStub(object):
"""Stub for the grpc.Channel interface."""
def __init__(self, responses=[]):
self.responses = responses
self.requests = []
def unary_unary(self, method, request_serializer=None, response_deserializer=None):
return MultiCallableStub(method, self)
class CustomException(Exception):
pass
class TestIncidentServiceClient(object):
def test_create_incident(self):
# Setup Expected Response
name = "name3373707"
title = "title110371416"
etag = "etag3123477"
duplicate_incident = "duplicateIncident-316496506"
expected_response = {
"name": name,
"title": title,
"etag": etag,
"duplicate_incident": duplicate_incident,
}
expected_response = incidents_pb2.Incident(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
incident = {}
parent = client.project_path("[PROJECT]")
response = client.create_incident(incident, parent)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.CreateIncidentRequest(
incident=incident, parent=parent
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_create_incident_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
incident = {}
parent = client.project_path("[PROJECT]")
with pytest.raises(CustomException):
client.create_incident(incident, parent)
def test_get_incident(self):
# Setup Expected Response
name_2 = "name2-1052831874"
title = "title110371416"
etag = "etag3123477"
duplicate_incident = "duplicateIncident-316496506"
expected_response = {
"name": name_2,
"title": title,
"etag": etag,
"duplicate_incident": duplicate_incident,
}
expected_response = incidents_pb2.Incident(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
name = client.incident_path("[PROJECT]", "[INCIDENT]")
response = client.get_incident(name)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.GetIncidentRequest(name=name)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_get_incident_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
name = client.incident_path("[PROJECT]", "[INCIDENT]")
with pytest.raises(CustomException):
client.get_incident(name)
def test_search_incidents(self):
# Setup Expected Response
next_page_token = ""
incidents_element = {}
incidents = [incidents_element]
expected_response = {"next_page_token": next_page_token, "incidents": incidents}
expected_response = incidents_service_pb2.SearchIncidentsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
parent = client.project_path("[PROJECT]")
paged_list_response = client.search_incidents(parent)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.incidents[0] == resources[0]
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.SearchIncidentsRequest(parent=parent)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_search_incidents_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
parent = client.project_path("[PROJECT]")
paged_list_response = client.search_incidents(parent)
with pytest.raises(CustomException):
list(paged_list_response)
def test_update_incident(self):
# Setup Expected Response
name = "name3373707"
title = "title110371416"
etag = "etag3123477"
duplicate_incident = "duplicateIncident-316496506"
expected_response = {
"name": name,
"title": title,
"etag": etag,
"duplicate_incident": duplicate_incident,
}
expected_response = incidents_pb2.Incident(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
incident = {}
response = client.update_incident(incident)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.UpdateIncidentRequest(
incident=incident
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_update_incident_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
incident = {}
with pytest.raises(CustomException):
client.update_incident(incident)
def test_search_similar_incidents(self):
# Setup Expected Response
next_page_token = ""
results_element = {}
results = [results_element]
expected_response = {"next_page_token": next_page_token, "results": results}
expected_response = incidents_service_pb2.SearchSimilarIncidentsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
name = client.incident_path("[PROJECT]", "[INCIDENT]")
paged_list_response = client.search_similar_incidents(name)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.results[0] == resources[0]
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.SearchSimilarIncidentsRequest(
name=name
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_search_similar_incidents_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
name = client.incident_path("[PROJECT]", "[INCIDENT]")
paged_list_response = client.search_similar_incidents(name)
with pytest.raises(CustomException):
list(paged_list_response)
def test_create_annotation(self):
# Setup Expected Response
name = "name3373707"
content = "content951530617"
content_type = "contentType831846208"
expected_response = {
"name": name,
"content": content,
"content_type": content_type,
}
expected_response = incidents_pb2.Annotation(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
annotation = {}
response = client.create_annotation(parent, annotation)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.CreateAnnotationRequest(
parent=parent, annotation=annotation
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_create_annotation_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
annotation = {}
with pytest.raises(CustomException):
client.create_annotation(parent, annotation)
def test_list_annotations(self):
# Setup Expected Response
next_page_token = ""
annotations_element = {}
annotations = [annotations_element]
expected_response = {
"next_page_token": next_page_token,
"annotations": annotations,
}
expected_response = incidents_service_pb2.ListAnnotationsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
paged_list_response = client.list_annotations(parent)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.annotations[0] == resources[0]
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.ListAnnotationsRequest(parent=parent)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_list_annotations_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
paged_list_response = client.list_annotations(parent)
with pytest.raises(CustomException):
list(paged_list_response)
def test_create_tag(self):
# Setup Expected Response
name = "name3373707"
display_name = "displayName1615086568"
expected_response = {"name": name, "display_name": display_name}
expected_response = incidents_pb2.Tag(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
tag = {}
response = client.create_tag(parent, tag)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.CreateTagRequest(
parent=parent, tag=tag
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_create_tag_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
tag = {}
with pytest.raises(CustomException):
client.create_tag(parent, tag)
def test_delete_tag(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
name = client.tag_path("[PROJECT]", "[INCIDENT]", "[TAG]")
client.delete_tag(name)
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.DeleteTagRequest(name=name)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_delete_tag_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
name = client.tag_path("[PROJECT]", "[INCIDENT]", "[TAG]")
with pytest.raises(CustomException):
client.delete_tag(name)
def test_list_tags(self):
# Setup Expected Response
next_page_token = ""
tags_element = {}
tags = [tags_element]
expected_response = {"next_page_token": next_page_token, "tags": tags}
expected_response = incidents_service_pb2.ListTagsResponse(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
paged_list_response = client.list_tags(parent)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.tags[0] == resources[0]
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.ListTagsRequest(parent=parent)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_list_tags_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
paged_list_response = client.list_tags(parent)
with pytest.raises(CustomException):
list(paged_list_response)
def test_create_signal(self):
# Setup Expected Response
name = "name3373707"
etag = "etag3123477"
incident = "incident86983890"
title = "title110371416"
content_type = "contentType831846208"
content = "content951530617"
expected_response = {
"name": name,
"etag": etag,
"incident": incident,
"title": title,
"content_type": content_type,
"content": content,
}
expected_response = incidents_pb2.Signal(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
parent = client.project_path("[PROJECT]")
signal = {}
response = client.create_signal(parent, signal)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.CreateSignalRequest(
parent=parent, signal=signal
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_create_signal_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
parent = client.project_path("[PROJECT]")
signal = {}
with pytest.raises(CustomException):
client.create_signal(parent, signal)
def test_search_signals(self):
# Setup Expected Response
next_page_token = ""
signals_element = {}
signals = [signals_element]
expected_response = {"next_page_token": next_page_token, "signals": signals}
expected_response = incidents_service_pb2.SearchSignalsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
parent = client.project_path("[PROJECT]")
paged_list_response = client.search_signals(parent)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.signals[0] == resources[0]
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.SearchSignalsRequest(parent=parent)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_search_signals_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
parent = client.project_path("[PROJECT]")
paged_list_response = client.search_signals(parent)
with pytest.raises(CustomException):
list(paged_list_response)
def test_get_signal(self):
# Setup Expected Response
name_2 = "name2-1052831874"
etag = "etag3123477"
incident = "incident86983890"
title = "title110371416"
content_type = "contentType831846208"
content = "content951530617"
expected_response = {
"name": name_2,
"etag": etag,
"incident": incident,
"title": title,
"content_type": content_type,
"content": content,
}
expected_response = incidents_pb2.Signal(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
name = client.signal_path("[PROJECT]", "[SIGNAL]")
response = client.get_signal(name)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.GetSignalRequest(name=name)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_get_signal_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
name = client.signal_path("[PROJECT]", "[SIGNAL]")
with pytest.raises(CustomException):
client.get_signal(name)
def test_lookup_signal(self):
# Setup Expected Response
name = "name3373707"
etag = "etag3123477"
incident = "incident86983890"
title = "title110371416"
content_type = "contentType831846208"
content = "content951530617"
expected_response = {
"name": name,
"etag": etag,
"incident": incident,
"title": title,
"content_type": content_type,
"content": content,
}
expected_response = incidents_pb2.Signal(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
response = client.lookup_signal()
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.LookupSignalRequest()
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_lookup_signal_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
with pytest.raises(CustomException):
client.lookup_signal()
def test_update_signal(self):
# Setup Expected Response
name = "name3373707"
etag = "etag3123477"
incident = "incident86983890"
title = "title110371416"
content_type = "contentType831846208"
content = "content951530617"
expected_response = {
"name": name,
"etag": etag,
"incident": incident,
"title": title,
"content_type": content_type,
"content": content,
}
expected_response = incidents_pb2.Signal(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
signal = {}
response = client.update_signal(signal)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.UpdateSignalRequest(signal=signal)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_update_signal_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
signal = {}
with pytest.raises(CustomException):
client.update_signal(signal)
def test_escalate_incident(self):
# Setup Expected Response
expected_response = {}
expected_response = incidents_service_pb2.EscalateIncidentResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
incident = {}
response = client.escalate_incident(incident)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.EscalateIncidentRequest(
incident=incident
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_escalate_incident_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
incident = {}
with pytest.raises(CustomException):
client.escalate_incident(incident)
def test_create_artifact(self):
# Setup Expected Response
name = "name3373707"
display_name = "displayName1615086568"
etag = "etag3123477"
url = "url116079"
expected_response = {
"name": name,
"display_name": display_name,
"etag": etag,
"url": url,
}
expected_response = incidents_pb2.Artifact(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
artifact = {}
response = client.create_artifact(parent, artifact)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.CreateArtifactRequest(
parent=parent, artifact=artifact
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_create_artifact_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
artifact = {}
with pytest.raises(CustomException):
client.create_artifact(parent, artifact)
def test_list_artifacts(self):
# Setup Expected Response
next_page_token = ""
artifacts_element = {}
artifacts = [artifacts_element]
expected_response = {"next_page_token": next_page_token, "artifacts": artifacts}
expected_response = incidents_service_pb2.ListArtifactsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
paged_list_response = client.list_artifacts(parent)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.artifacts[0] == resources[0]
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.ListArtifactsRequest(parent=parent)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_list_artifacts_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
paged_list_response = client.list_artifacts(parent)
with pytest.raises(CustomException):
list(paged_list_response)
def test_update_artifact(self):
# Setup Expected Response
name = "name3373707"
display_name = "displayName1615086568"
etag = "etag3123477"
url = "url116079"
expected_response = {
"name": name,
"display_name": display_name,
"etag": etag,
"url": url,
}
expected_response = incidents_pb2.Artifact(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
artifact = {}
response = client.update_artifact(artifact)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.UpdateArtifactRequest(
artifact=artifact
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_update_artifact_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
artifact = {}
with pytest.raises(CustomException):
client.update_artifact(artifact)
def test_delete_artifact(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
name = client.artifact_path("[PROJECT]", "[INCIDENT]", "[ARTIFACT]")
client.delete_artifact(name)
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.DeleteArtifactRequest(name=name)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_delete_artifact_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
name = client.artifact_path("[PROJECT]", "[INCIDENT]", "[ARTIFACT]")
with pytest.raises(CustomException):
client.delete_artifact(name)
def test_send_shift_handoff(self):
# Setup Expected Response
content_type = "contentType831846208"
content = "content951530617"
expected_response = {"content_type": content_type, "content": content}
expected_response = incidents_service_pb2.SendShiftHandoffResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
parent = client.project_path("[PROJECT]")
recipients = []
subject = "subject-1867885268"
response = client.send_shift_handoff(parent, recipients, subject)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.SendShiftHandoffRequest(
parent=parent, recipients=recipients, subject=subject
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_send_shift_handoff_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
parent = client.project_path("[PROJECT]")
recipients = []
subject = "subject-1867885268"
with pytest.raises(CustomException):
client.send_shift_handoff(parent, recipients, subject)
def test_create_subscription(self):
# Setup Expected Response
name = "name3373707"
etag = "etag3123477"
expected_response = {"name": name, "etag": etag}
expected_response = incidents_pb2.Subscription(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
subscription = {}
response = client.create_subscription(parent, subscription)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.CreateSubscriptionRequest(
parent=parent, subscription=subscription
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_create_subscription_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
subscription = {}
with pytest.raises(CustomException):
client.create_subscription(parent, subscription)
def test_update_subscription(self):
# Setup Expected Response
name = "name3373707"
etag = "etag3123477"
expected_response = {"name": name, "etag": etag}
expected_response = incidents_pb2.Subscription(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
subscription = {}
response = client.update_subscription(subscription)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.UpdateSubscriptionRequest(
subscription=subscription
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_update_subscription_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
subscription = {}
with pytest.raises(CustomException):
client.update_subscription(subscription)
def test_list_subscriptions(self):
# Setup Expected Response
next_page_token = ""
subscriptions_element = {}
subscriptions = [subscriptions_element]
expected_response = {
"next_page_token": next_page_token,
"subscriptions": subscriptions,
}
expected_response = incidents_service_pb2.ListSubscriptionsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
paged_list_response = client.list_subscriptions(parent)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.subscriptions[0] == resources[0]
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.ListSubscriptionsRequest(parent=parent)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_list_subscriptions_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
paged_list_response = client.list_subscriptions(parent)
with pytest.raises(CustomException):
list(paged_list_response)
def test_delete_subscription(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
name = client.subscription_path("[PROJECT]", "[INCIDENT]", "[SUBSCRIPTION]")
client.delete_subscription(name)
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.DeleteSubscriptionRequest(name=name)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_delete_subscription_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
name = client.subscription_path("[PROJECT]", "[INCIDENT]", "[SUBSCRIPTION]")
with pytest.raises(CustomException):
client.delete_subscription(name)
def test_create_incident_role_assignment(self):
# Setup Expected Response
name = "name3373707"
etag = "etag3123477"
expected_response = {"name": name, "etag": etag}
expected_response = incidents_pb2.IncidentRoleAssignment(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
incident_role_assignment = {}
response = client.create_incident_role_assignment(
parent, incident_role_assignment
)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.CreateIncidentRoleAssignmentRequest(
parent=parent, incident_role_assignment=incident_role_assignment
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_create_incident_role_assignment_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
incident_role_assignment = {}
with pytest.raises(CustomException):
client.create_incident_role_assignment(parent, incident_role_assignment)
def test_delete_incident_role_assignment(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
name = client.role_assignment_path(
"[PROJECT]", "[INCIDENT]", "[ROLE_ASSIGNMENT]"
)
client.delete_incident_role_assignment(name)
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.DeleteIncidentRoleAssignmentRequest(
name=name
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_delete_incident_role_assignment_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
name = client.role_assignment_path(
"[PROJECT]", "[INCIDENT]", "[ROLE_ASSIGNMENT]"
)
with pytest.raises(CustomException):
client.delete_incident_role_assignment(name)
def test_list_incident_role_assignments(self):
# Setup Expected Response
next_page_token = ""
incident_role_assignments_element = {}
incident_role_assignments = [incident_role_assignments_element]
expected_response = {
"next_page_token": next_page_token,
"incident_role_assignments": incident_role_assignments,
}
expected_response = incidents_service_pb2.ListIncidentRoleAssignmentsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
paged_list_response = client.list_incident_role_assignments(parent)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.incident_role_assignments[0] == resources[0]
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.ListIncidentRoleAssignmentsRequest(
parent=parent
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_list_incident_role_assignments_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
parent = client.incident_path("[PROJECT]", "[INCIDENT]")
paged_list_response = client.list_incident_role_assignments(parent)
with pytest.raises(CustomException):
list(paged_list_response)
def test_request_incident_role_handover(self):
# Setup Expected Response
name_2 = "name2-1052831874"
etag = "etag3123477"
expected_response = {"name": name_2, "etag": etag}
expected_response = incidents_pb2.IncidentRoleAssignment(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
name = client.role_assignment_path(
"[PROJECT]", "[INCIDENT]", "[ROLE_ASSIGNMENT]"
)
new_assignee = {}
response = client.request_incident_role_handover(name, new_assignee)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.RequestIncidentRoleHandoverRequest(
name=name, new_assignee=new_assignee
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_request_incident_role_handover_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
name = client.role_assignment_path(
"[PROJECT]", "[INCIDENT]", "[ROLE_ASSIGNMENT]"
)
new_assignee = {}
with pytest.raises(CustomException):
client.request_incident_role_handover(name, new_assignee)
def test_confirm_incident_role_handover(self):
# Setup Expected Response
name_2 = "name2-1052831874"
etag = "etag3123477"
expected_response = {"name": name_2, "etag": etag}
expected_response = incidents_pb2.IncidentRoleAssignment(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
name = client.role_assignment_path(
"[PROJECT]", "[INCIDENT]", "[ROLE_ASSIGNMENT]"
)
new_assignee = {}
response = client.confirm_incident_role_handover(name, new_assignee)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.ConfirmIncidentRoleHandoverRequest(
name=name, new_assignee=new_assignee
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_confirm_incident_role_handover_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
name = client.role_assignment_path(
"[PROJECT]", "[INCIDENT]", "[ROLE_ASSIGNMENT]"
)
new_assignee = {}
with pytest.raises(CustomException):
client.confirm_incident_role_handover(name, new_assignee)
def test_force_incident_role_handover(self):
# Setup Expected Response
name_2 = "name2-1052831874"
etag = "etag3123477"
expected_response = {"name": name_2, "etag": etag}
expected_response = incidents_pb2.IncidentRoleAssignment(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
name = client.role_assignment_path(
"[PROJECT]", "[INCIDENT]", "[ROLE_ASSIGNMENT]"
)
new_assignee = {}
response = client.force_incident_role_handover(name, new_assignee)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.ForceIncidentRoleHandoverRequest(
name=name, new_assignee=new_assignee
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_force_incident_role_handover_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
name = client.role_assignment_path(
"[PROJECT]", "[INCIDENT]", "[ROLE_ASSIGNMENT]"
)
new_assignee = {}
with pytest.raises(CustomException):
client.force_incident_role_handover(name, new_assignee)
def test_cancel_incident_role_handover(self):
# Setup Expected Response
name_2 = "name2-1052831874"
etag = "etag3123477"
expected_response = {"name": name_2, "etag": etag}
expected_response = incidents_pb2.IncidentRoleAssignment(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup Request
name = client.role_assignment_path(
"[PROJECT]", "[INCIDENT]", "[ROLE_ASSIGNMENT]"
)
new_assignee = {}
response = client.cancel_incident_role_handover(name, new_assignee)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = incidents_service_pb2.CancelIncidentRoleHandoverRequest(
name=name, new_assignee=new_assignee
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_cancel_incident_role_handover_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = irm_v1alpha2.IncidentServiceClient()
# Setup request
name = client.role_assignment_path(
"[PROJECT]", "[INCIDENT]", "[ROLE_ASSIGNMENT]"
)
new_assignee = {}
with pytest.raises(CustomException):
client.cancel_incident_role_handover(name, new_assignee)
|
|
"""Tasks related to projects
This includes fetching repository code, cleaning ``conf.py`` files, and
rebuilding documentation.
"""
import os
import shutil
import json
import logging
import socket
import requests
import hashlib
from collections import defaultdict
from celery import task, Task
from djcelery import celery as celery_app
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from readthedocs.builds.constants import (LATEST,
BUILD_STATE_CLONING,
BUILD_STATE_INSTALLING,
BUILD_STATE_BUILDING)
from readthedocs.builds.models import Build, Version
from readthedocs.core.utils import send_email, run_on_app_servers
from readthedocs.cdn.purge import purge
from readthedocs.doc_builder.loader import get_builder_class
from readthedocs.doc_builder.config import ConfigWrapper, load_yaml_config
from readthedocs.doc_builder.environments import (LocalEnvironment,
DockerEnvironment)
from readthedocs.doc_builder.exceptions import BuildEnvironmentError
from readthedocs.doc_builder.python_environments import Virtualenv, Conda
from readthedocs.projects.exceptions import ProjectImportError
from readthedocs.projects.models import ImportedFile, Project
from readthedocs.projects.utils import make_api_version, make_api_project, symlink
from readthedocs.projects.constants import LOG_TEMPLATE
from readthedocs.privacy.loader import Syncer
from readthedocs.search.parse_json import process_all_json_files
from readthedocs.search.utils import process_mkdocs_json
from readthedocs.restapi.utils import index_search_request
from readthedocs.vcs_support import utils as vcs_support_utils
from readthedocs.api.client import api as api_v1
from readthedocs.restapi.client import api as api_v2
from readthedocs.projects.signals import before_vcs, after_vcs, before_build, after_build
from readthedocs.core.resolver import resolve_path
log = logging.getLogger(__name__)
HTML_ONLY = getattr(settings, 'HTML_ONLY_PROJECTS', ())
class UpdateDocsTask(Task):
"""
The main entry point for updating documentation.
It handles all of the logic around whether a project is imported or we
created it. Then it will build the html docs and other requested parts.
`pk`
Primary key of the project to update
`record`
Whether or not to keep a record of the update in the database. Useful
for preventing changes visible to the end-user when running commands
from the shell, for example.
"""
max_retries = 5
default_retry_delay = (7 * 60)
name = 'update_docs'
def __init__(self, build_env=None, python_env=None, force=False, search=True, localmedia=True,
build=None, project=None, version=None):
self.build_env = build_env
self.python_env = python_env
self.build_force = force
self.build_search = search
self.build_localmedia = localmedia
self.build = {}
if build is not None:
self.build = build
self.version = {}
if version is not None:
self.version = version
self.project = {}
if project is not None:
self.project = project
def _log(self, msg):
log.info(LOG_TEMPLATE
.format(project=self.project.slug,
version=self.version.slug,
msg=msg))
def run(self, pk, version_pk=None, build_pk=None, record=True, docker=False,
search=True, force=False, localmedia=True, **kwargs):
self.project = self.get_project(pk)
self.version = self.get_version(self.project, version_pk)
self.build = self.get_build(build_pk)
self.build_search = search
self.build_localmedia = localmedia
self.build_force = force
env_cls = LocalEnvironment
self.setup_env = env_cls(project=self.project, version=self.version,
build=self.build, record=record)
# Environment used for code checkout & initial configuration reading
with self.setup_env:
if self.project.skip:
raise BuildEnvironmentError(
_('Builds for this project are temporarily disabled'))
try:
self.setup_vcs()
except vcs_support_utils.LockTimeout, e:
self.retry(exc=e, throw=False)
raise BuildEnvironmentError(
'Version locked, retrying in 5 minutes.',
status_code=423
)
self.config = load_yaml_config(version=self.version)
env_vars = self.get_env_vars()
if docker or settings.DOCKER_ENABLE:
env_cls = DockerEnvironment
self.build_env = env_cls(project=self.project, version=self.version,
build=self.build, record=record, environment=env_vars)
# Environment used for building code, usually with Docker
with self.build_env:
if self.project.documentation_type == 'auto':
self.update_documentation_type()
python_env_cls = Virtualenv
if self.config.use_conda:
self._log('Using conda')
python_env_cls = Conda
self.python_env = python_env_cls(version=self.version,
build_env=self.build_env,
config=self.config)
self.setup_environment()
# TODO the build object should have an idea of these states, extend
# the model to include an idea of these outcomes
outcomes = self.build_docs()
build_id = self.build.get('id')
# Web Server Tasks
if build_id:
finish_build.delay(
version_pk=self.version.pk,
build_pk=build_id,
hostname=socket.gethostname(),
html=outcomes['html'],
search=outcomes['search'],
localmedia=outcomes['localmedia'],
pdf=outcomes['pdf'],
epub=outcomes['epub'],
)
if self.build_env.failed:
self.send_notifications()
@staticmethod
def get_project(project_pk):
"""Get project from API"""
project_data = api_v1.project(project_pk).get()
project = make_api_project(project_data)
return project
@staticmethod
def get_version(project, version_pk):
"""Ensure we're using a sane version"""
if version_pk:
version_data = api_v1.version(version_pk).get()
else:
version_data = (api_v1
.version(project.slug)
.get(slug=LATEST)['objects'][0])
return make_api_version(version_data)
@staticmethod
def get_build(build_pk):
"""
Retrieve build object from API
:param build_pk: Build primary key
"""
build = {}
if build_pk:
build = api_v2.build(build_pk).get()
return dict((key, val) for (key, val) in build.items()
if key not in ['project', 'version', 'resource_uri',
'absolute_uri'])
def setup_vcs(self):
"""
Update the checkout of the repo to make sure it's the latest.
This also syncs versions in the DB.
:param build_env: Build environment
"""
self.setup_env.update_build(state=BUILD_STATE_CLONING)
self._log(msg='Updating docs from VCS')
try:
update_imported_docs(self.version.pk)
commit = self.project.vcs_repo(self.version.slug).commit
if commit:
self.build['commit'] = commit
except ProjectImportError:
raise BuildEnvironmentError('Failed to import project',
status_code=404)
def get_env_vars(self):
"""
Get bash environment variables used for all builder commands.
"""
env = {
'READTHEDOCS': True,
'READTHEDOCS_VERSION': self.version.slug,
'READTHEDOCS_PROJECT': self.project.slug
}
if self.config.use_conda:
env.update({
'CONDA_ENVS_PATH': os.path.join(self.project.doc_path, 'conda'),
'CONDA_DEFAULT_ENV': self.version.slug,
'BIN_PATH': os.path.join(self.project.doc_path, 'conda', self.version.slug, 'bin')
})
else:
env.update({
'BIN_PATH': os.path.join(self.project.doc_path, 'envs', self.version.slug, 'bin')
})
return env
def update_documentation_type(self):
"""
Force Sphinx for 'auto' documentation type
This used to determine the type and automatically set the documentation
type to Sphinx for rST and Mkdocs for markdown. It now just forces
Sphinx, due to markdown support.
"""
ret = 'sphinx'
project_data = api_v2.project(self.project.pk).get()
project_data['documentation_type'] = ret
api_v2.project(self.project.pk).put(project_data)
self.project.documentation_type = ret
def setup_environment(self):
"""
Build the virtualenv and install the project into it.
Always build projects with a virtualenv.
:param build_env: Build environment to pass commands and execution through.
"""
self.build_env.update_build(state=BUILD_STATE_INSTALLING)
self.python_env.delete_existing_build_dir()
self.python_env.setup_base()
self.python_env.install_core_requirements()
self.python_env.install_user_requirements()
self.python_env.install_package()
def build_docs(self):
"""Wrapper to all build functions
Executes the necessary builds for this task and returns whether the
build was successful or not.
:returns: Build outcomes with keys for html, search, localmedia, pdf,
and epub
:rtype: dict
"""
self.build_env.update_build(state=BUILD_STATE_BUILDING)
before_build.send(sender=self.version)
outcomes = defaultdict(lambda: False)
with self.project.repo_nonblockinglock(
version=self.version,
max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)):
outcomes['html'] = self.build_docs_html()
outcomes['search'] = self.build_docs_search()
outcomes['localmedia'] = self.build_docs_localmedia()
outcomes['pdf'] = self.build_docs_pdf()
outcomes['epub'] = self.build_docs_epub()
after_build.send(sender=self.version)
return outcomes
def build_docs_html(self):
"""Build HTML docs"""
html_builder = get_builder_class(self.project.documentation_type)(
build_env=self.build_env,
python_env=self.python_env,
)
if self.build_force:
html_builder.force()
html_builder.append_conf()
success = html_builder.build()
if success:
html_builder.move()
# Gracefully attempt to move files via task on web workers.
try:
move_files.delay(
version_pk=self.version.pk,
html=True,
hostname=socket.gethostname(),
)
except socket.error:
# TODO do something here
pass
return success
def build_docs_search(self):
"""Build search data with separate build"""
if self.build_search:
if self.project.is_type_mkdocs:
return self.build_docs_class('mkdocs_json')
if self.project.is_type_sphinx:
return self.build_docs_class('sphinx_search')
return False
def build_docs_localmedia(self):
"""Get local media files with separate build"""
if self.build_localmedia:
if self.project.is_type_sphinx:
return self.build_docs_class('sphinx_singlehtmllocalmedia')
return False
def build_docs_pdf(self):
"""Build PDF docs"""
if (self.project.slug in HTML_ONLY or
not self.project.is_type_sphinx or
not self.project.enable_pdf_build):
return False
return self.build_docs_class('sphinx_pdf')
def build_docs_epub(self):
"""Build ePub docs"""
if (self.project.slug in HTML_ONLY or
not self.project.is_type_sphinx or
not self.project.enable_epub_build):
return False
return self.build_docs_class('sphinx_epub')
def build_docs_class(self, builder_class):
"""Build docs with additional doc backends
These steps are not necessarily required for the build to halt, so we
only raise a warning exception here. A hard error will halt the build
process.
"""
builder = get_builder_class(builder_class)(self.build_env, python_env=self.python_env)
success = builder.build()
builder.move()
return success
def send_notifications(self):
"""Send notifications on build failure"""
send_notifications.delay(self.version.pk, build_pk=self.build['id'])
update_docs = celery_app.tasks[UpdateDocsTask.name]
@task()
def update_imported_docs(version_pk):
"""
Check out or update the given project's repository
:param version_pk: Version id to update
"""
version_data = api_v1.version(version_pk).get()
version = make_api_version(version_data)
project = version.project
ret_dict = {}
# Make Dirs
if not os.path.exists(project.doc_path):
os.makedirs(project.doc_path)
if not project.vcs_repo():
raise ProjectImportError(("Repo type '{0}' unknown".format(project.repo_type)))
with project.repo_nonblockinglock(
version=version,
max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)):
# Get the actual code on disk
try:
before_vcs.send(sender=version)
if version:
log.info(
LOG_TEMPLATE.format(
project=project.slug,
version=version.slug,
msg='Checking out version {slug}: {identifier}'.format(
slug=version.slug,
identifier=version.identifier
)
)
)
version_slug = version.slug
version_repo = project.vcs_repo(version_slug)
ret_dict['checkout'] = version_repo.checkout(version.identifier)
else:
# Does this ever get called?
log.info(LOG_TEMPLATE.format(
project=project.slug, version=version.slug, msg='Updating to latest revision'))
version_slug = LATEST
version_repo = project.vcs_repo(version_slug)
ret_dict['checkout'] = version_repo.update()
except Exception:
raise
finally:
after_vcs.send(sender=version)
# Update tags/version
version_post_data = {'repo': version_repo.repo_url}
if version_repo.supports_tags:
version_post_data['tags'] = [
{'identifier': v.identifier,
'verbose_name': v.verbose_name,
} for v in version_repo.tags
]
if version_repo.supports_branches:
version_post_data['branches'] = [
{'identifier': v.identifier,
'verbose_name': v.verbose_name,
} for v in version_repo.branches
]
try:
api_v2.project(project.pk).sync_versions.post(version_post_data)
except Exception, e:
print "Sync Versions Exception: %s" % e.message
return ret_dict
# Web tasks
@task(queue='web')
def finish_build(version_pk, build_pk, hostname=None, html=False,
localmedia=False, search=False, pdf=False, epub=False):
"""Build Finished, do house keeping bits"""
version = Version.objects.get(pk=version_pk)
build = Build.objects.get(pk=build_pk)
if html:
version.active = True
version.built = True
version.save()
if not pdf:
clear_pdf_artifacts(version)
if not epub:
clear_epub_artifacts(version)
move_files(
version_pk=version_pk,
hostname=hostname,
html=html,
localmedia=localmedia,
search=search,
pdf=pdf,
epub=epub,
)
symlink(project=version.project)
# Delayed tasks
update_static_metadata.delay(version.project.pk)
fileify.delay(version.pk, commit=build.commit)
update_search.delay(version.pk, commit=build.commit)
@task(queue='web')
def move_files(version_pk, hostname, html=False, localmedia=False, search=False,
pdf=False, epub=False):
"""Task to move built documentation to web servers
:param version_pk: Version id to sync files for
:param hostname: Hostname to sync to
:param html: Sync HTML
:type html: bool
:param localmedia: Sync local media files
:type localmedia: bool
:param search: Sync search files
:type search: bool
:param pdf: Sync PDF files
:type pdf: bool
:param epub: Sync ePub files
:type epub: bool
"""
version = Version.objects.get(pk=version_pk)
if html:
from_path = version.project.artifact_path(
version=version.slug, type_=version.project.documentation_type)
target = version.project.rtd_build_path(version.slug)
Syncer.copy(from_path, target, host=hostname)
if 'sphinx' in version.project.documentation_type:
if localmedia:
from_path = version.project.artifact_path(
version=version.slug, type_='sphinx_localmedia')
to_path = version.project.get_production_media_path(
type_='htmlzip', version_slug=version.slug, include_file=False)
Syncer.copy(from_path, to_path, host=hostname)
if search:
from_path = version.project.artifact_path(
version=version.slug, type_='sphinx_search')
to_path = version.project.get_production_media_path(
type_='json', version_slug=version.slug, include_file=False)
Syncer.copy(from_path, to_path, host=hostname)
# Always move PDF's because the return code lies.
if pdf:
from_path = version.project.artifact_path(version=version.slug,
type_='sphinx_pdf')
to_path = version.project.get_production_media_path(
type_='pdf', version_slug=version.slug, include_file=False)
Syncer.copy(from_path, to_path, host=hostname)
if epub:
from_path = version.project.artifact_path(version=version.slug,
type_='sphinx_epub')
to_path = version.project.get_production_media_path(
type_='epub', version_slug=version.slug, include_file=False)
Syncer.copy(from_path, to_path, host=hostname)
if 'mkdocs' in version.project.documentation_type:
if search:
from_path = version.project.artifact_path(version=version.slug,
type_='mkdocs_json')
to_path = version.project.get_production_media_path(
type_='json', version_slug=version.slug, include_file=False)
Syncer.copy(from_path, to_path, host=hostname)
@task(queue='web')
def update_search(version_pk, commit, delete_non_commit_files=True):
"""Task to update search indexes
:param version_pk: Version id to update
:param commit: Commit that updated index
:param delete_non_commit_files: Delete files not in commit from index
"""
version = Version.objects.get(pk=version_pk)
if version.project.is_type_sphinx:
page_list = process_all_json_files(version, build_dir=False)
elif version.project.is_type_mkdocs:
page_list = process_mkdocs_json(version, build_dir=False)
else:
log.error('Unknown documentation type: %s',
version.project.documentation_type)
return
log_msg = ' '.join([page['path'] for page in page_list])
log.info("(Search Index) Sending Data: %s [%s]", version.project.slug,
log_msg)
index_search_request(
version=version,
page_list=page_list,
commit=commit,
project_scale=0,
page_scale=0,
# Don't index sections to speed up indexing.
# They aren't currently exposed anywhere.
section=False,
delete=delete_non_commit_files,
)
@task(queue='web')
def fileify(version_pk, commit):
"""
Create ImportedFile objects for all of a version's files.
This is a prereq for indexing the docs for search.
It also causes celery-haystack to kick off an index of the file.
"""
version = Version.objects.get(pk=version_pk)
project = version.project
if not project.cdn_enabled:
return
if not commit:
log.info(LOG_TEMPLATE
.format(project=project.slug, version=version.slug,
msg=('Imported File not being built because no commit '
'information')))
path = project.rtd_build_path(version.slug)
if path:
log.info(LOG_TEMPLATE
.format(project=version.project.slug, version=version.slug,
msg='Creating ImportedFiles'))
_manage_imported_files(version, path, commit)
else:
log.info(LOG_TEMPLATE
.format(project=project.slug, version=version.slug,
msg='No ImportedFile files'))
def _manage_imported_files(version, path, commit):
"""Update imported files for version
:param version: Version instance
:param path: Path to search
:param commit: Commit that updated path
"""
changed_files = set()
for root, __, filenames in os.walk(path):
for filename in filenames:
dirpath = os.path.join(root.replace(path, '').lstrip('/'),
filename.lstrip('/'))
full_path = os.path.join(root, filename)
md5 = hashlib.md5(open(full_path, 'rb').read()).hexdigest()
try:
obj, __ = ImportedFile.objects.get_or_create(
project=version.project,
version=version,
path=dirpath,
name=filename,
)
except ImportedFile.MultipleObjectsReturned:
log.exception('Error creating ImportedFile')
continue
if obj.md5 != md5:
obj.md5 = md5
changed_files.add(dirpath)
if obj.commit != commit:
obj.commit = commit
obj.save()
# Delete ImportedFiles from previous versions
ImportedFile.objects.filter(project=version.project,
version=version
).exclude(commit=commit).delete()
# Purge Cache
changed_files = [resolve_path(
version.project, filename=file, version_slug=version.slug,
) for file in changed_files]
cdn_ids = getattr(settings, 'CDN_IDS', None)
if cdn_ids:
if version.project.slug in cdn_ids:
purge(cdn_ids[version.project.slug], changed_files)
@task(queue='web')
def send_notifications(version_pk, build_pk):
version = Version.objects.get(pk=version_pk)
build = Build.objects.get(pk=build_pk)
for hook in version.project.webhook_notifications.all():
webhook_notification(version, build, hook.url)
for email in version.project.emailhook_notifications.all().values_list('email', flat=True):
email_notification(version, build, email)
def email_notification(version, build, email):
"""Send email notifications for build failure
:param version: :py:cls:`Version` instance that failed
:param build: :py:cls:`Build` instance that failed
:param email: Email recipient address
"""
log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug,
msg='sending email to: %s' % email))
context = {'version': version,
'project': version.project,
'build': build,
'build_url': 'https://{0}{1}'.format(
getattr(settings, 'PRODUCTION_DOMAIN', 'readthedocs.org'),
build.get_absolute_url()),
'unsub_url': 'https://{0}{1}'.format(
getattr(settings, 'PRODUCTION_DOMAIN', 'readthedocs.org'),
reverse('projects_notifications', args=[version.project.slug])),
}
if build.commit:
title = _('Failed: {project.name} ({commit})').format(commit=build.commit[:8], **context)
else:
title = _('Failed: {project.name} ({version.verbose_name})').format(**context)
send_email(
email,
title,
template='projects/email/build_failed.txt',
template_html='projects/email/build_failed.html',
context=context
)
def webhook_notification(version, build, hook_url):
"""Send webhook notification for project webhook
:param version: Version instance to send hook for
:param build: Build instance that failed
:param hook_url: Hook URL to send to
"""
project = version.project
data = json.dumps({
'name': project.name,
'slug': project.slug,
'build': {
'id': build.id,
'success': build.success,
'date': build.date.strftime('%Y-%m-%d %H:%M:%S'),
}
})
log.debug(LOG_TEMPLATE
.format(project=project.slug, version='',
msg='sending notification to: %s' % hook_url))
requests.post(hook_url, data=data)
@task(queue='web')
def update_static_metadata(project_pk, path=None):
"""Update static metadata JSON file
Metadata settings include the following project settings:
version
The default version for the project, default: `latest`
language
The default language for the project, default: `en`
languages
List of languages built by linked translation projects.
"""
project = Project.objects.get(pk=project_pk)
if not path:
path = project.static_metadata_path()
log.info(LOG_TEMPLATE.format(
project=project.slug,
version='',
msg='Updating static metadata',
))
translations = [trans.language for trans in project.translations.all()]
languages = set(translations)
# Convert to JSON safe types
metadata = {
'version': project.default_version,
'language': project.language,
'languages': list(languages),
'single_version': project.single_version,
}
try:
fh = open(path, 'w+')
json.dump(metadata, fh)
fh.close()
Syncer.copy(path, path, host=socket.gethostname(), file=True)
except (AttributeError, IOError) as e:
log.debug(LOG_TEMPLATE.format(
project=project.slug,
version='',
msg='Cannot write to metadata.json: {0}'.format(e)
))
# Random Tasks
@task()
def remove_dir(path):
"""
Remove a directory on the build/celery server.
This is mainly a wrapper around shutil.rmtree so that app servers
can kill things on the build server.
"""
log.info("Removing %s", path)
shutil.rmtree(path)
@task(queue='web')
def clear_artifacts(version_pk):
"""Remove artifacts from the web servers"""
version = Version.objects.get(pk=version_pk)
clear_pdf_artifacts(version)
clear_epub_artifacts(version)
clear_htmlzip_artifacts(version)
clear_html_artifacts(version)
def clear_pdf_artifacts(version):
run_on_app_servers('rm -rf %s'
% version.project.get_production_media_path(
type_='pdf', version_slug=version.slug))
def clear_epub_artifacts(version):
run_on_app_servers('rm -rf %s'
% version.project.get_production_media_path(
type_='epub', version_slug=version.slug))
def clear_htmlzip_artifacts(version):
run_on_app_servers('rm -rf %s'
% version.project.get_production_media_path(
type_='htmlzip', version_slug=version.slug))
def clear_html_artifacts(version):
run_on_app_servers('rm -rf %s' % version.project.rtd_build_path(version=version.slug))
@task(queue='web')
def remove_path_from_web(path):
"""
Remove the given path from the web servers file system.
"""
# Santity check for spaces in the path since spaces would result in
# deleting unpredictable paths with "rm -rf".
assert ' ' not in path, "No spaces allowed in path"
# TODO: We need some proper escaping here for the given path.
run_on_app_servers('rm -rf {path}'.format(path=path))
|
|
# coding: utf-8
"""
Wavefront REST API Documentation
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from wavefront_api_client.configuration import Configuration
class SavedAppMapSearchGroup(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'created_epoch_millis': 'int',
'creator_id': 'str',
'id': 'str',
'name': 'str',
'search_filters': 'list[str]',
'updated_epoch_millis': 'int',
'updater_id': 'str'
}
attribute_map = {
'created_epoch_millis': 'createdEpochMillis',
'creator_id': 'creatorId',
'id': 'id',
'name': 'name',
'search_filters': 'searchFilters',
'updated_epoch_millis': 'updatedEpochMillis',
'updater_id': 'updaterId'
}
def __init__(self, created_epoch_millis=None, creator_id=None, id=None, name=None, search_filters=None, updated_epoch_millis=None, updater_id=None, _configuration=None): # noqa: E501
"""SavedAppMapSearchGroup - a model defined in Swagger""" # noqa: E501
if _configuration is None:
_configuration = Configuration()
self._configuration = _configuration
self._created_epoch_millis = None
self._creator_id = None
self._id = None
self._name = None
self._search_filters = None
self._updated_epoch_millis = None
self._updater_id = None
self.discriminator = None
if created_epoch_millis is not None:
self.created_epoch_millis = created_epoch_millis
if creator_id is not None:
self.creator_id = creator_id
if id is not None:
self.id = id
self.name = name
if search_filters is not None:
self.search_filters = search_filters
if updated_epoch_millis is not None:
self.updated_epoch_millis = updated_epoch_millis
if updater_id is not None:
self.updater_id = updater_id
@property
def created_epoch_millis(self):
"""Gets the created_epoch_millis of this SavedAppMapSearchGroup. # noqa: E501
:return: The created_epoch_millis of this SavedAppMapSearchGroup. # noqa: E501
:rtype: int
"""
return self._created_epoch_millis
@created_epoch_millis.setter
def created_epoch_millis(self, created_epoch_millis):
"""Sets the created_epoch_millis of this SavedAppMapSearchGroup.
:param created_epoch_millis: The created_epoch_millis of this SavedAppMapSearchGroup. # noqa: E501
:type: int
"""
self._created_epoch_millis = created_epoch_millis
@property
def creator_id(self):
"""Gets the creator_id of this SavedAppMapSearchGroup. # noqa: E501
:return: The creator_id of this SavedAppMapSearchGroup. # noqa: E501
:rtype: str
"""
return self._creator_id
@creator_id.setter
def creator_id(self, creator_id):
"""Sets the creator_id of this SavedAppMapSearchGroup.
:param creator_id: The creator_id of this SavedAppMapSearchGroup. # noqa: E501
:type: str
"""
self._creator_id = creator_id
@property
def id(self):
"""Gets the id of this SavedAppMapSearchGroup. # noqa: E501
:return: The id of this SavedAppMapSearchGroup. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this SavedAppMapSearchGroup.
:param id: The id of this SavedAppMapSearchGroup. # noqa: E501
:type: str
"""
self._id = id
@property
def name(self):
"""Gets the name of this SavedAppMapSearchGroup. # noqa: E501
Name of the search group # noqa: E501
:return: The name of this SavedAppMapSearchGroup. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this SavedAppMapSearchGroup.
Name of the search group # noqa: E501
:param name: The name of this SavedAppMapSearchGroup. # noqa: E501
:type: str
"""
if self._configuration.client_side_validation and name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def search_filters(self):
"""Gets the search_filters of this SavedAppMapSearchGroup. # noqa: E501
:return: The search_filters of this SavedAppMapSearchGroup. # noqa: E501
:rtype: list[str]
"""
return self._search_filters
@search_filters.setter
def search_filters(self, search_filters):
"""Sets the search_filters of this SavedAppMapSearchGroup.
:param search_filters: The search_filters of this SavedAppMapSearchGroup. # noqa: E501
:type: list[str]
"""
self._search_filters = search_filters
@property
def updated_epoch_millis(self):
"""Gets the updated_epoch_millis of this SavedAppMapSearchGroup. # noqa: E501
:return: The updated_epoch_millis of this SavedAppMapSearchGroup. # noqa: E501
:rtype: int
"""
return self._updated_epoch_millis
@updated_epoch_millis.setter
def updated_epoch_millis(self, updated_epoch_millis):
"""Sets the updated_epoch_millis of this SavedAppMapSearchGroup.
:param updated_epoch_millis: The updated_epoch_millis of this SavedAppMapSearchGroup. # noqa: E501
:type: int
"""
self._updated_epoch_millis = updated_epoch_millis
@property
def updater_id(self):
"""Gets the updater_id of this SavedAppMapSearchGroup. # noqa: E501
:return: The updater_id of this SavedAppMapSearchGroup. # noqa: E501
:rtype: str
"""
return self._updater_id
@updater_id.setter
def updater_id(self, updater_id):
"""Sets the updater_id of this SavedAppMapSearchGroup.
:param updater_id: The updater_id of this SavedAppMapSearchGroup. # noqa: E501
:type: str
"""
self._updater_id = updater_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(SavedAppMapSearchGroup, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, SavedAppMapSearchGroup):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, SavedAppMapSearchGroup):
return True
return self.to_dict() != other.to_dict()
|
|
# Copyright (c) 2013 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
import webob
from cinder.api.contrib import volume_encryption_metadata
from cinder import context
from cinder import db
from cinder import test
from cinder.tests.unit.api import fakes
from cinder.tests.unit import fake_constants as fake
def return_volume_type_encryption_metadata(context, volume_type_id):
return fake_volume_type_encryption()
def fake_volume_type_encryption():
values = {
'cipher': 'cipher',
'key_size': 256,
'provider': 'nova.volume.encryptors.base.VolumeEncryptor',
'volume_type_id': fake.VOLUME_TYPE_ID,
'control_location': 'front-end',
}
return values
class VolumeEncryptionMetadataTest(test.TestCase):
@staticmethod
def _create_volume(context,
display_name='test_volume',
display_description='this is a test volume',
status='creating',
availability_zone='fake_az',
host='fake_host',
size=1,
encryption_key_id=fake.ENCRYPTION_KEY_ID):
"""Create a volume object."""
volume = {
'size': size,
'user_id': fake.USER_ID,
'project_id': fake.PROJECT_ID,
'status': status,
'display_name': display_name,
'display_description': display_description,
'attach_status': 'detached',
'availability_zone': availability_zone,
'host': host,
'encryption_key_id': encryption_key_id,
}
return db.volume_create(context, volume)['id']
def setUp(self):
super(VolumeEncryptionMetadataTest, self).setUp()
self.controller = (volume_encryption_metadata.
VolumeEncryptionMetadataController())
self.mock_object(db.sqlalchemy.api, 'volume_type_encryption_get',
return_volume_type_encryption_metadata)
self.ctxt = context.RequestContext(fake.USER_ID, fake.PROJECT_ID)
self.volume_id = self._create_volume(self.ctxt)
self.addCleanup(db.volume_destroy, self.ctxt.elevated(),
self.volume_id)
def test_index(self):
req = webob.Request.blank('/v2/%s/volumes/%s/encryption' % (
fake.PROJECT_ID, self.volume_id))
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
res_dict = jsonutils.loads(res.body)
expected = {
"encryption_key_id": fake.ENCRYPTION_KEY_ID,
"control_location": "front-end",
"cipher": "cipher",
"provider": "nova.volume.encryptors.base.VolumeEncryptor",
"key_size": 256,
}
self.assertEqual(expected, res_dict)
def test_index_bad_tenant_id(self):
req = webob.Request.blank('/v2/%s/volumes/%s/encryption' % (
fake.WILL_NOT_BE_FOUND_ID, self.volume_id))
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(400, res.status_code)
res_dict = jsonutils.loads(res.body)
expected = {'badRequest': {'code': 400,
'message': 'Malformed request url'}}
self.assertEqual(expected, res_dict)
def test_index_bad_volume_id(self):
bad_volume_id = fake.WILL_NOT_BE_FOUND_ID
req = webob.Request.blank('/v2/%s/volumes/%s/encryption' % (
fake.PROJECT_ID, bad_volume_id))
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(404, res.status_code)
res_dict = jsonutils.loads(res.body)
expected = {'itemNotFound': {'code': 404,
'message': 'Volume %s could not be found.'
% bad_volume_id}}
self.assertEqual(expected, res_dict)
def test_show_key(self):
req = webob.Request.blank('/v2/%s/volumes/%s/encryption/'
'encryption_key_id' % (
fake.PROJECT_ID, self.volume_id))
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
self.assertEqual(fake.ENCRYPTION_KEY_ID, res.body.decode())
def test_show_control(self):
req = webob.Request.blank('/v2/%s/volumes/%s/encryption/'
'control_location' % (
fake.PROJECT_ID, self.volume_id))
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
self.assertEqual(b'front-end', res.body)
def test_show_provider(self):
req = webob.Request.blank('/v2/%s/volumes/%s/encryption/'
'provider' % (
fake.PROJECT_ID, self.volume_id))
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
self.assertEqual(b'nova.volume.encryptors.base.VolumeEncryptor',
res.body)
def test_show_bad_tenant_id(self):
req = webob.Request.blank('/v2/%s/volumes/%s/encryption/'
'encryption_key_id' %
(fake.WILL_NOT_BE_FOUND_ID,
self.volume_id))
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(400, res.status_code)
res_dict = jsonutils.loads(res.body)
expected = {'badRequest': {'code': 400,
'message': 'Malformed request url'}}
self.assertEqual(expected, res_dict)
def test_show_bad_volume_id(self):
bad_volume_id = fake.WILL_NOT_BE_FOUND_ID
req = webob.Request.blank('/v2/%s/volumes/%s/encryption/'
'encryption_key_id' % (
fake.PROJECT_ID, bad_volume_id))
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(404, res.status_code)
res_dict = jsonutils.loads(res.body)
expected = {'itemNotFound': {'code': 404,
'message': 'Volume %s could not be found.'
% bad_volume_id}}
self.assertEqual(expected, res_dict)
def test_retrieve_key_admin(self):
ctxt = context.RequestContext(fake.USER_ID, fake.PROJECT_ID,
is_admin=True)
req = webob.Request.blank('/v2/%s/volumes/%s/encryption/'
'encryption_key_id' % (
fake.PROJECT_ID, self.volume_id))
res = req.get_response(fakes.wsgi_app(fake_auth_context=ctxt))
self.assertEqual(200, res.status_code)
self.assertEqual(fake.ENCRYPTION_KEY_ID, res.body.decode())
def test_show_volume_not_encrypted_type(self):
self.mock_object(db.sqlalchemy.api, 'volume_type_encryption_get',
return_value=None)
volume_id = self._create_volume(self.ctxt, encryption_key_id=None)
self.addCleanup(db.volume_destroy, self.ctxt.elevated(), volume_id)
req = webob.Request.blank('/v2/%s/volumes/%s/encryption/'
'encryption_key_id' % (
fake.PROJECT_ID, volume_id))
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
self.assertEqual(0, len(res.body))
def test_index_volume_not_encrypted_type(self):
self.mock_object(db.sqlalchemy.api, 'volume_type_encryption_get',
return_value=None)
volume_id = self._create_volume(self.ctxt, encryption_key_id=None)
self.addCleanup(db.volume_destroy, self.ctxt.elevated(), volume_id)
req = webob.Request.blank('/v2/%s/volumes/%s/encryption' % (
fake.PROJECT_ID, volume_id))
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
res_dict = jsonutils.loads(res.body)
expected = {
'encryption_key_id': None
}
self.assertEqual(expected, res_dict)
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstackclient.identity.v3 import domain
from openstackclient.tests.unit.identity.v3 import fakes as identity_fakes
class TestDomain(identity_fakes.TestIdentityv3):
def setUp(self):
super(TestDomain, self).setUp()
# Get a shortcut to the DomainManager Mock
self.domains_mock = self.app.client_manager.identity.domains
self.domains_mock.reset_mock()
class TestDomainCreate(TestDomain):
columns = (
'description',
'enabled',
'id',
'name',
'tags'
)
def setUp(self):
super(TestDomainCreate, self).setUp()
self.domain = identity_fakes.FakeDomain.create_one_domain()
self.domains_mock.create.return_value = self.domain
self.datalist = (
self.domain.description,
True,
self.domain.id,
self.domain.name,
self.domain.tags
)
# Get the command object to test
self.cmd = domain.CreateDomain(self.app, None)
def test_domain_create_no_options(self):
arglist = [
self.domain.name,
]
verifylist = [
('name', self.domain.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'name': self.domain.name,
'description': None,
'enabled': True,
}
self.domains_mock.create.assert_called_with(
**kwargs
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
def test_domain_create_description(self):
arglist = [
'--description', 'new desc',
self.domain.name,
]
verifylist = [
('description', 'new desc'),
('name', self.domain.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'name': self.domain.name,
'description': 'new desc',
'enabled': True,
}
self.domains_mock.create.assert_called_with(
**kwargs
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
def test_domain_create_enable(self):
arglist = [
'--enable',
self.domain.name,
]
verifylist = [
('enable', True),
('name', self.domain.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'name': self.domain.name,
'description': None,
'enabled': True,
}
self.domains_mock.create.assert_called_with(
**kwargs
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
def test_domain_create_disable(self):
arglist = [
'--disable',
self.domain.name,
]
verifylist = [
('disable', True),
('name', self.domain.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'name': self.domain.name,
'description': None,
'enabled': False,
}
self.domains_mock.create.assert_called_with(
**kwargs
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
class TestDomainDelete(TestDomain):
domain = identity_fakes.FakeDomain.create_one_domain()
def setUp(self):
super(TestDomainDelete, self).setUp()
# This is the return value for utils.find_resource()
self.domains_mock.get.return_value = self.domain
self.domains_mock.delete.return_value = None
# Get the command object to test
self.cmd = domain.DeleteDomain(self.app, None)
def test_domain_delete(self):
arglist = [
self.domain.id,
]
verifylist = [
('domain', [self.domain.id]),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.domains_mock.delete.assert_called_with(
self.domain.id,
)
self.assertIsNone(result)
class TestDomainList(TestDomain):
domain = identity_fakes.FakeDomain.create_one_domain()
def setUp(self):
super(TestDomainList, self).setUp()
self.domains_mock.list.return_value = [self.domain]
# Get the command object to test
self.cmd = domain.ListDomain(self.app, None)
def test_domain_list_no_options(self):
arglist = []
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class Lister in cliff, abstract method take_action()
# returns a tuple containing the column names and an iterable
# containing the data to be listed.
columns, data = self.cmd.take_action(parsed_args)
self.domains_mock.list.assert_called_with()
collist = ('ID', 'Name', 'Enabled', 'Description')
self.assertEqual(collist, columns)
datalist = ((
self.domain.id,
self.domain.name,
True,
self.domain.description,
), )
self.assertEqual(datalist, tuple(data))
class TestDomainSet(TestDomain):
domain = identity_fakes.FakeDomain.create_one_domain()
def setUp(self):
super(TestDomainSet, self).setUp()
self.domains_mock.get.return_value = self.domain
self.domains_mock.update.return_value = self.domain
# Get the command object to test
self.cmd = domain.SetDomain(self.app, None)
def test_domain_set_no_options(self):
arglist = [
self.domain.name,
]
verifylist = [
('domain', self.domain.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
kwargs = {}
self.domains_mock.update.assert_called_with(
self.domain.id,
**kwargs
)
self.assertIsNone(result)
def test_domain_set_name(self):
arglist = [
'--name', 'qwerty',
self.domain.id,
]
verifylist = [
('name', 'qwerty'),
('domain', self.domain.id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'name': 'qwerty',
}
self.domains_mock.update.assert_called_with(
self.domain.id,
**kwargs
)
self.assertIsNone(result)
def test_domain_set_description(self):
arglist = [
'--description', 'new desc',
self.domain.id,
]
verifylist = [
('description', 'new desc'),
('domain', self.domain.id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'description': 'new desc',
}
self.domains_mock.update.assert_called_with(
self.domain.id,
**kwargs
)
self.assertIsNone(result)
def test_domain_set_enable(self):
arglist = [
'--enable',
self.domain.id,
]
verifylist = [
('enable', True),
('domain', self.domain.id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'enabled': True,
}
self.domains_mock.update.assert_called_with(
self.domain.id,
**kwargs
)
self.assertIsNone(result)
def test_domain_set_disable(self):
arglist = [
'--disable',
self.domain.id,
]
verifylist = [
('disable', True),
('domain', self.domain.id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'enabled': False,
}
self.domains_mock.update.assert_called_with(
self.domain.id,
**kwargs
)
self.assertIsNone(result)
class TestDomainShow(TestDomain):
def setUp(self):
super(TestDomainShow, self).setUp()
self.domain = identity_fakes.FakeDomain.create_one_domain()
self.domains_mock.get.return_value = self.domain
# Get the command object to test
self.cmd = domain.ShowDomain(self.app, None)
def test_domain_show(self):
arglist = [
self.domain.id,
]
verifylist = [
('domain', self.domain.id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.app.client_manager.identity.tokens.get_token_data.return_value = \
{'token':
{'project':
{'domain':
{'id': 'd1',
'name': 'd1'
}
}
}
}
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
self.domains_mock.get.assert_called_with(
self.domain.id,
)
collist = ('description', 'enabled', 'id', 'name', 'tags')
self.assertEqual(collist, columns)
datalist = (
self.domain.description,
True,
self.domain.id,
self.domain.name,
self.domain.tags
)
self.assertEqual(datalist, data)
|
|
# Copyright (c) 2010-2021 Manfred Moitzi
# License: MIT License
from typing import TYPE_CHECKING, Sequence, Iterator, Iterable
import math
from ezdxf.math import Vec2, linspace
from .line import ConstructionRay, ConstructionLine
from .bbox import BoundingBox2d
if TYPE_CHECKING:
from ezdxf.eztypes import Vertex
HALF_PI = math.pi / 2.0
__all__ = ["ConstructionCircle"]
class ConstructionCircle:
"""Circle construction tool.
Args:
center: center point as :class:`Vec2` compatible object
radius: circle radius > `0`
"""
def __init__(self, center: "Vertex", radius: float = 1.0):
self.center = Vec2(center)
self.radius = float(radius)
if self.radius <= 0.0:
raise ValueError("Radius has to be > 0.")
def __str__(self) -> str:
"""Returns string representation of circle
"ConstructionCircle(center, radius)".
"""
return f"ConstructionCircle({self.center}, {self.radius})"
@staticmethod
def from_3p(
p1: "Vertex", p2: "Vertex", p3: "Vertex"
) -> "ConstructionCircle":
"""Creates a circle from three points, all points have to be compatible
to :class:`Vec2` class.
"""
_p1 = Vec2(p1)
_p2 = Vec2(p2)
_p3 = Vec2(p3)
ray1 = ConstructionRay(_p1, _p2)
ray2 = ConstructionRay(_p1, _p3)
center_ray1 = ray1.orthogonal(_p1.lerp(_p2))
center_ray2 = ray2.orthogonal(_p1.lerp(_p3))
center = center_ray1.intersect(center_ray2)
return ConstructionCircle(center, center.distance(_p1))
@property
def bounding_box(self) -> "BoundingBox2d":
"""2D bounding box of circle as :class:`BoundingBox2d` object."""
rvec = Vec2((self.radius, self.radius))
return BoundingBox2d((self.center - rvec, self.center + rvec))
def translate(self, dx: float, dy: float) -> None:
"""Move circle about `dx` in x-axis and about `dy` in y-axis.
Args:
dx: translation in x-axis
dy: translation in y-axis
"""
self.center += Vec2((dx, dy))
def point_at(self, angle: float) -> Vec2:
"""Returns point on circle at `angle` as :class:`Vec2` object.
Args:
angle: angle in radians, angle goes counter
clockwise around the z-axis, x-axis = 0 deg.
"""
return self.center + Vec2.from_angle(angle, self.radius)
def vertices(self, angles: Iterable[float]) -> Iterable[Vec2]:
"""Yields vertices of the circle for iterable `angles`.
Args:
angles: iterable of angles as radians, angle goes counter
clockwise around the z-axis, x-axis = 0 deg.
.. versionadded:: 0.17.1
"""
center = self.center
radius = self.radius
for angle in angles:
yield center + Vec2.from_angle(angle, radius)
def flattening(self, sagitta: float) -> Iterator[Vec2]:
"""Approximate the circle by vertices, argument `sagitta` is the
max. distance from the center of an arc segment to the center of its
chord. Returns a closed polygon where the start vertex is coincident
with the end vertex!
.. versionadded:: 0.17.1
"""
from .arc import arc_segment_count
count = arc_segment_count(self.radius, math.tau, sagitta)
yield from self.vertices(linspace(0.0, math.tau, count + 1))
def inside(self, point: "Vertex") -> bool:
"""Returns ``True`` if `point` is inside circle."""
return self.radius >= self.center.distance(Vec2(point))
def tangent(self, angle: float) -> ConstructionRay:
"""Returns tangent to circle at `angle` as :class:`ConstructionRay`
object.
Args:
angle: angle in radians
"""
point_on_circle = self.point_at(angle)
ray = ConstructionRay(self.center, point_on_circle)
return ray.orthogonal(point_on_circle)
def intersect_ray(
self, ray: ConstructionRay, abs_tol: float = 1e-10
) -> Sequence[Vec2]:
"""Returns intersection points of circle and `ray` as sequence of
:class:`Vec2` objects.
Args:
ray: intersection ray
abs_tol: absolute tolerance for tests (e.g. test for tangents)
Returns:
tuple of :class:`Vec2` objects
=========== ==================================
tuple size Description
=========== ==================================
0 no intersection
1 ray is a tangent to circle
2 ray intersects with the circle
=========== ==================================
"""
assert isinstance(ray, ConstructionRay)
ortho_ray = ray.orthogonal(self.center)
intersection_point = ray.intersect(ortho_ray)
dist = self.center.distance(intersection_point)
result = []
# Intersect in two points:
if dist < self.radius:
# Ray goes through center point:
if math.isclose(dist, 0.0, abs_tol=abs_tol):
angle = ortho_ray.angle
alpha = HALF_PI
else:
# The exact direction of angle (all 4 quadrants Q1-Q4) is
# important: ortho_ray.angle is only correct at the center point
angle = (intersection_point - self.center).angle
alpha = math.acos(
intersection_point.distance(self.center) / self.radius
)
result.append(self.point_at(angle + alpha))
result.append(self.point_at(angle - alpha))
# Ray is a tangent of the circle:
elif math.isclose(dist, self.radius, abs_tol=abs_tol):
result.append(intersection_point)
# else: No intersection
return tuple(result)
def intersect_line(
self, line: ConstructionLine, abs_tol: float = 1e-10
) -> Sequence[Vec2]:
"""Returns intersection points of circle and `line` as sequence of
:class:`Vec2` objects.
Args:
line: intersection line
abs_tol: absolute tolerance for tests (e.g. test for tangents)
Returns:
tuple of :class:`Vec2` objects
=========== ==================================
tuple size Description
=========== ==================================
0 no intersection
1 line intersects or touches the circle at one point
2 line intersects the circle at two points
=========== ==================================
.. versionadded:: 0.17.1
"""
assert isinstance(line, ConstructionLine)
return [
point
for point in self.intersect_ray(line.ray, abs_tol=abs_tol)
if is_point_in_line_range(line.start, line.end, point)
]
def intersect_circle(
self, other: "ConstructionCircle", abs_tol: float = 1e-10
) -> Sequence[Vec2]:
"""Returns intersection points of two circles as sequence of
:class:`Vec2` objects.
Args:
other: intersection circle
abs_tol: absolute tolerance for tests
Returns:
tuple of :class:`Vec2` objects
=========== ==================================
tuple size Description
=========== ==================================
0 no intersection
1 circle touches the `other` circle at one point
2 circle intersects with the `other` circle
=========== ==================================
"""
assert isinstance(other, ConstructionCircle)
r1 = self.radius
r2 = other.radius
d = self.center.distance(other.center)
if d < abs_tol:
# concentric circles do not intersect by definition
return tuple()
d_max = r1 + r2
d_min = math.fabs(r1 - r2)
if d_min <= d <= d_max:
angle = (other.center - self.center).angle
# Circles touches at one point:
if math.isclose(d, d_max, abs_tol=abs_tol) or math.isclose(
d, d_min, abs_tol=abs_tol
):
return (self.point_at(angle),)
else: # Circles intersect in two points:
# Law of Cosines:
alpha = math.acos((r2 * r2 - r1 * r1 - d * d) / (-2.0 * r1 * d))
return tuple(self.vertices((angle + alpha, angle - alpha)))
return tuple()
def is_point_in_line_range(start: Vec2, end: Vec2, point: Vec2) -> bool:
length = (end - start).magnitude
if (point - start).magnitude > length:
return False
return (point - end).magnitude <= length
|
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright 2011 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
A remote procedure call (rpc) abstraction.
For some wrappers that add message versioning to rpc, see:
rpc.dispatcher
rpc.proxy
"""
from oslo.config import cfg
from healing.openstack.common import importutils
from healing.openstack.common import log as logging
LOG = logging.getLogger(__name__)
rpc_opts = [
cfg.StrOpt('rpc_backend',
default='%s.impl_kombu' % __package__,
help="The messaging module to use, defaults to kombu."),
cfg.IntOpt('rpc_thread_pool_size',
default=64,
help='Size of RPC thread pool'),
cfg.IntOpt('rpc_conn_pool_size',
default=30,
help='Size of RPC connection pool'),
cfg.IntOpt('rpc_response_timeout',
default=60,
help='Seconds to wait for a response from call or multicall'),
cfg.IntOpt('rpc_cast_timeout',
default=30,
help='Seconds to wait before a cast expires (TTL). '
'Only supported by impl_zmq.'),
cfg.ListOpt('allowed_rpc_exception_modules',
default=['nova.exception',
'cinder.exception',
'exceptions',
],
help='Modules of exceptions that are permitted to be recreated'
' upon receiving exception data from an rpc call.'),
cfg.BoolOpt('fake_rabbit',
default=False,
help='If passed, use a fake RabbitMQ provider'),
cfg.StrOpt('control_exchange',
default='openstack',
help='AMQP exchange to connect to if using RabbitMQ or Qpid'),
]
CONF = cfg.CONF
CONF.register_opts(rpc_opts)
def set_defaults(control_exchange):
cfg.set_defaults(rpc_opts,
control_exchange=control_exchange)
def create_connection(new=True):
"""Create a connection to the message bus used for rpc.
For some example usage of creating a connection and some consumers on that
connection, see nova.service.
:param new: Whether or not to create a new connection. A new connection
will be created by default. If new is False, the
implementation is free to return an existing connection from a
pool.
:returns: An instance of openstack.common.rpc.common.Connection
"""
return _get_impl().create_connection(CONF, new=new)
def call(context, topic, msg, timeout=None):
"""Invoke a remote method that returns something.
:param context: Information that identifies the user that has made this
request.
:param topic: The topic to send the rpc message to. This correlates to the
topic argument of
openstack.common.rpc.common.Connection.create_consumer()
and only applies when the consumer was created with
fanout=False.
:param msg: This is a dict in the form { "method" : "method_to_invoke",
"args" : dict_of_kwargs }
:param timeout: int, number of seconds to use for a response timeout.
If set, this overrides the rpc_response_timeout option.
:returns: A dict from the remote method.
:raises: openstack.common.rpc.common.Timeout if a complete response
is not received before the timeout is reached.
"""
return _get_impl().call(CONF, context, topic, msg, timeout)
def cast(context, topic, msg):
"""Invoke a remote method that does not return anything.
:param context: Information that identifies the user that has made this
request.
:param topic: The topic to send the rpc message to. This correlates to the
topic argument of
openstack.common.rpc.common.Connection.create_consumer()
and only applies when the consumer was created with
fanout=False.
:param msg: This is a dict in the form { "method" : "method_to_invoke",
"args" : dict_of_kwargs }
:returns: None
"""
return _get_impl().cast(CONF, context, topic, msg)
def fanout_cast(context, topic, msg):
"""Broadcast a remote method invocation with no return.
This method will get invoked on all consumers that were set up with this
topic name and fanout=True.
:param context: Information that identifies the user that has made this
request.
:param topic: The topic to send the rpc message to. This correlates to the
topic argument of
openstack.common.rpc.common.Connection.create_consumer()
and only applies when the consumer was created with
fanout=True.
:param msg: This is a dict in the form { "method" : "method_to_invoke",
"args" : dict_of_kwargs }
:returns: None
"""
return _get_impl().fanout_cast(CONF, context, topic, msg)
def multicall(context, topic, msg, timeout=None):
"""Invoke a remote method and get back an iterator.
In this case, the remote method will be returning multiple values in
separate messages, so the return values can be processed as the come in via
an iterator.
:param context: Information that identifies the user that has made this
request.
:param topic: The topic to send the rpc message to. This correlates to the
topic argument of
openstack.common.rpc.common.Connection.create_consumer()
and only applies when the consumer was created with
fanout=False.
:param msg: This is a dict in the form { "method" : "method_to_invoke",
"args" : dict_of_kwargs }
:param timeout: int, number of seconds to use for a response timeout.
If set, this overrides the rpc_response_timeout option.
:returns: An iterator. The iterator will yield a tuple (N, X) where N is
an index that starts at 0 and increases by one for each value
returned and X is the Nth value that was returned by the remote
method.
:raises: openstack.common.rpc.common.Timeout if a complete response
is not received before the timeout is reached.
"""
return _get_impl().multicall(CONF, context, topic, msg, timeout)
def notify(context, topic, msg, envelope=False):
"""Send notification event.
:param context: Information that identifies the user that has made this
request.
:param topic: The topic to send the notification to.
:param msg: This is a dict of content of event.
:param envelope: Set to True to enable message envelope for notifications.
:returns: None
"""
return _get_impl().notify(cfg.CONF, context, topic, msg, envelope)
def cleanup():
"""Clean up resources in use by implementation.
Clean up any resources that have been allocated by the RPC implementation.
This is typically open connections to a messaging service. This function
would get called before an application using this API exits to allow
connections to get torn down cleanly.
:returns: None
"""
return _get_impl().cleanup()
def cast_to_server(context, server_params, topic, msg):
"""Invoke a remote method that does not return anything.
:param context: Information that identifies the user that has made this
request.
:param server_params: Connection information
:param topic: The topic to send the notification to.
:param msg: This is a dict in the form { "method" : "method_to_invoke",
"args" : dict_of_kwargs }
:returns: None
"""
return _get_impl().cast_to_server(CONF, context, server_params, topic,
msg)
def fanout_cast_to_server(context, server_params, topic, msg):
"""Broadcast to a remote method invocation with no return.
:param context: Information that identifies the user that has made this
request.
:param server_params: Connection information
:param topic: The topic to send the notification to.
:param msg: This is a dict in the form { "method" : "method_to_invoke",
"args" : dict_of_kwargs }
:returns: None
"""
return _get_impl().fanout_cast_to_server(CONF, context, server_params,
topic, msg)
def queue_get_for(context, topic, host):
"""Get a queue name for a given topic + host.
This function only works if this naming convention is followed on the
consumer side, as well. For example, in nova, every instance of the
nova-foo service calls create_consumer() for two topics:
foo
foo.<host>
Messages sent to the 'foo' topic are distributed to exactly one instance of
the nova-foo service. The services are chosen in a round-robin fashion.
Messages sent to the 'foo.<host>' topic are sent to the nova-foo service on
<host>.
"""
return '%s.%s' % (topic, host) if host else topic
_RPCIMPL = None
def _get_impl():
"""Delay import of rpc_backend until configuration is loaded."""
global _RPCIMPL
if _RPCIMPL is None:
try:
_RPCIMPL = importutils.import_module(CONF.rpc_backend)
except ImportError:
# For backwards compatibility with older nova config.
impl = CONF.rpc_backend.replace('nova.rpc',
'nova.openstack.common.rpc')
_RPCIMPL = importutils.import_module(impl)
return _RPCIMPL
|
|
"""Demo platform that offers a fake climate device."""
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
HVAC_MODES,
SUPPORT_AUX_HEAT,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_SWING_MODE,
SUPPORT_TARGET_HUMIDITY,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT
from . import DOMAIN
SUPPORT_FLAGS = 0
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Demo climate devices."""
async_add_entities(
[
DemoClimate(
unique_id="climate_1",
name="HeatPump",
target_temperature=68,
unit_of_measurement=TEMP_FAHRENHEIT,
preset=None,
current_temperature=77,
fan_mode=None,
target_humidity=None,
current_humidity=None,
swing_mode=None,
hvac_mode=HVAC_MODE_HEAT,
hvac_action=CURRENT_HVAC_HEAT,
aux=None,
target_temp_high=None,
target_temp_low=None,
hvac_modes=[HVAC_MODE_HEAT, HVAC_MODE_OFF],
),
DemoClimate(
unique_id="climate_2",
name="Hvac",
target_temperature=21,
unit_of_measurement=TEMP_CELSIUS,
preset=None,
current_temperature=22,
fan_mode="On High",
target_humidity=67,
current_humidity=54,
swing_mode="Off",
hvac_mode=HVAC_MODE_COOL,
hvac_action=CURRENT_HVAC_COOL,
aux=False,
target_temp_high=None,
target_temp_low=None,
hvac_modes=[mode for mode in HVAC_MODES if mode != HVAC_MODE_HEAT_COOL],
),
DemoClimate(
unique_id="climate_3",
name="Ecobee",
target_temperature=None,
unit_of_measurement=TEMP_CELSIUS,
preset="home",
preset_modes=["home", "eco"],
current_temperature=23,
fan_mode="Auto Low",
target_humidity=None,
current_humidity=None,
swing_mode="Auto",
hvac_mode=HVAC_MODE_HEAT_COOL,
hvac_action=None,
aux=None,
target_temp_high=24,
target_temp_low=21,
hvac_modes=[HVAC_MODE_HEAT_COOL, HVAC_MODE_COOL, HVAC_MODE_HEAT],
),
]
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Demo climate devices config entry."""
await async_setup_platform(hass, {}, async_add_entities)
class DemoClimate(ClimateEntity):
"""Representation of a demo climate device."""
def __init__(
self,
unique_id,
name,
target_temperature,
unit_of_measurement,
preset,
current_temperature,
fan_mode,
target_humidity,
current_humidity,
swing_mode,
hvac_mode,
hvac_action,
aux,
target_temp_high,
target_temp_low,
hvac_modes,
preset_modes=None,
):
"""Initialize the climate device."""
self._unique_id = unique_id
self._name = name
self._support_flags = SUPPORT_FLAGS
if target_temperature is not None:
self._support_flags = self._support_flags | SUPPORT_TARGET_TEMPERATURE
if preset is not None:
self._support_flags = self._support_flags | SUPPORT_PRESET_MODE
if fan_mode is not None:
self._support_flags = self._support_flags | SUPPORT_FAN_MODE
if target_humidity is not None:
self._support_flags = self._support_flags | SUPPORT_TARGET_HUMIDITY
if swing_mode is not None:
self._support_flags = self._support_flags | SUPPORT_SWING_MODE
if aux is not None:
self._support_flags = self._support_flags | SUPPORT_AUX_HEAT
if HVAC_MODE_HEAT_COOL in hvac_modes or HVAC_MODE_AUTO in hvac_modes:
self._support_flags = self._support_flags | SUPPORT_TARGET_TEMPERATURE_RANGE
self._target_temperature = target_temperature
self._target_humidity = target_humidity
self._unit_of_measurement = unit_of_measurement
self._preset = preset
self._preset_modes = preset_modes
self._current_temperature = current_temperature
self._current_humidity = current_humidity
self._current_fan_mode = fan_mode
self._hvac_action = hvac_action
self._hvac_mode = hvac_mode
self._aux = aux
self._current_swing_mode = swing_mode
self._fan_modes = ["On Low", "On High", "Auto Low", "Auto High", "Off"]
self._hvac_modes = hvac_modes
self._swing_modes = ["Auto", "1", "2", "3", "Off"]
self._target_temperature_high = target_temp_high
self._target_temperature_low = target_temp_low
@property
def device_info(self):
"""Return device info."""
return {
"identifiers": {
# Serial numbers are unique identifiers within a specific domain
(DOMAIN, self.unique_id)
},
"name": self.name,
}
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def supported_features(self):
"""Return the list of supported features."""
return self._support_flags
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def name(self):
"""Return the name of the climate device."""
return self._name
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def target_temperature_high(self):
"""Return the highbound target temperature we try to reach."""
return self._target_temperature_high
@property
def target_temperature_low(self):
"""Return the lowbound target temperature we try to reach."""
return self._target_temperature_low
@property
def current_humidity(self):
"""Return the current humidity."""
return self._current_humidity
@property
def target_humidity(self):
"""Return the humidity we try to reach."""
return self._target_humidity
@property
def hvac_action(self):
"""Return current operation ie. heat, cool, idle."""
return self._hvac_action
@property
def hvac_mode(self):
"""Return hvac target hvac state."""
return self._hvac_mode
@property
def hvac_modes(self):
"""Return the list of available operation modes."""
return self._hvac_modes
@property
def preset_mode(self):
"""Return preset mode."""
return self._preset
@property
def preset_modes(self):
"""Return preset modes."""
return self._preset_modes
@property
def is_aux_heat(self):
"""Return true if aux heat is on."""
return self._aux
@property
def fan_mode(self):
"""Return the fan setting."""
return self._current_fan_mode
@property
def fan_modes(self):
"""Return the list of available fan modes."""
return self._fan_modes
@property
def swing_mode(self):
"""Return the swing setting."""
return self._current_swing_mode
@property
def swing_modes(self):
"""List of available swing modes."""
return self._swing_modes
async def async_set_temperature(self, **kwargs):
"""Set new target temperatures."""
if kwargs.get(ATTR_TEMPERATURE) is not None:
self._target_temperature = kwargs.get(ATTR_TEMPERATURE)
if (
kwargs.get(ATTR_TARGET_TEMP_HIGH) is not None
and kwargs.get(ATTR_TARGET_TEMP_LOW) is not None
):
self._target_temperature_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)
self._target_temperature_low = kwargs.get(ATTR_TARGET_TEMP_LOW)
self.async_write_ha_state()
async def async_set_humidity(self, humidity):
"""Set new humidity level."""
self._target_humidity = humidity
self.async_write_ha_state()
async def async_set_swing_mode(self, swing_mode):
"""Set new swing mode."""
self._current_swing_mode = swing_mode
self.async_write_ha_state()
async def async_set_fan_mode(self, fan_mode):
"""Set new fan mode."""
self._current_fan_mode = fan_mode
self.async_write_ha_state()
async def async_set_hvac_mode(self, hvac_mode):
"""Set new operation mode."""
self._hvac_mode = hvac_mode
self.async_write_ha_state()
async def async_set_preset_mode(self, preset_mode):
"""Update preset_mode on."""
self._preset = preset_mode
self.async_write_ha_state()
async def async_turn_aux_heat_on(self):
"""Turn auxiliary heater on."""
self._aux = True
self.async_write_ha_state()
async def async_turn_aux_heat_off(self):
"""Turn auxiliary heater off."""
self._aux = False
self.async_write_ha_state()
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ducktape.tests.test import Test
from ducktape.mark.resource import cluster
from ducktape.utils.util import wait_until
from ducktape.mark import matrix, parametrize
from ducktape.cluster.remoteaccount import RemoteCommandError
from kafkatest.services.zookeeper import ZookeeperService
from kafkatest.services.kafka import KafkaService, config_property
from kafkatest.services.connect import ConnectDistributedService, VerifiableSource, VerifiableSink, ConnectRestError, MockSink, MockSource
from kafkatest.services.console_consumer import ConsoleConsumer
from kafkatest.services.security.security_config import SecurityConfig
from collections import Counter, namedtuple
import itertools
import json
import operator
import time
class ConnectDistributedTest(Test):
"""
Simple test of Kafka Connect in distributed mode, producing data from files on one cluster and consuming it on
another, validating the total output is identical to the input.
"""
FILE_SOURCE_CONNECTOR = 'org.apache.kafka.connect.file.FileStreamSourceConnector'
FILE_SINK_CONNECTOR = 'org.apache.kafka.connect.file.FileStreamSinkConnector'
INPUT_FILE = "/mnt/connect.input"
OUTPUT_FILE = "/mnt/connect.output"
TOPIC = "test"
OFFSETS_TOPIC = "connect-offsets"
CONFIG_TOPIC = "connect-configs"
STATUS_TOPIC = "connect-status"
# Since tasks can be assigned to any node and we're testing with files, we need to make sure the content is the same
# across all nodes.
FIRST_INPUT_LIST = ["foo", "bar", "baz"]
FIRST_INPUTS = "\n".join(FIRST_INPUT_LIST) + "\n"
SECOND_INPUT_LIST = ["razz", "ma", "tazz"]
SECOND_INPUTS = "\n".join(SECOND_INPUT_LIST) + "\n"
SCHEMA = { "type": "string", "optional": False }
def __init__(self, test_context):
super(ConnectDistributedTest, self).__init__(test_context)
self.num_zk = 1
self.num_brokers = 1
self.topics = {
'test' : { 'partitions': 1, 'replication-factor': 1 }
}
self.zk = ZookeeperService(test_context, self.num_zk)
self.key_converter = "org.apache.kafka.connect.json.JsonConverter"
self.value_converter = "org.apache.kafka.connect.json.JsonConverter"
self.schemas = True
def setup_services(self, security_protocol=SecurityConfig.PLAINTEXT, timestamp_type=None):
self.kafka = KafkaService(self.test_context, self.num_brokers, self.zk,
security_protocol=security_protocol, interbroker_security_protocol=security_protocol,
topics=self.topics)
if timestamp_type is not None:
for node in self.kafka.nodes:
node.config[config_property.MESSAGE_TIMESTAMP_TYPE] = timestamp_type
self.cc = ConnectDistributedService(self.test_context, 3, self.kafka, [self.INPUT_FILE, self.OUTPUT_FILE])
self.cc.log_level = "DEBUG"
self.zk.start()
self.kafka.start()
def _start_connector(self, config_file):
connector_props = self.render(config_file)
connector_config = dict([line.strip().split('=', 1) for line in connector_props.split('\n') if line.strip() and not line.strip().startswith('#')])
self.cc.create_connector(connector_config)
def _connector_status(self, connector, node=None):
try:
return self.cc.get_connector_status(connector, node)
except ConnectRestError:
return None
def _connector_has_state(self, status, state):
return status is not None and status['connector']['state'] == state
def _task_has_state(self, task_id, status, state):
if not status:
return False
tasks = status['tasks']
if not tasks:
return False
for task in tasks:
if task['id'] == task_id:
return task['state'] == state
return False
def _all_tasks_have_state(self, status, task_count, state):
if status is None:
return False
tasks = status['tasks']
if len(tasks) != task_count:
return False
return reduce(operator.and_, [task['state'] == state for task in tasks], True)
def is_running(self, connector, node=None):
status = self._connector_status(connector.name, node)
return self._connector_has_state(status, 'RUNNING') and self._all_tasks_have_state(status, connector.tasks, 'RUNNING')
def is_paused(self, connector, node=None):
status = self._connector_status(connector.name, node)
return self._connector_has_state(status, 'PAUSED') and self._all_tasks_have_state(status, connector.tasks, 'PAUSED')
def connector_is_running(self, connector, node=None):
status = self._connector_status(connector.name, node)
return self._connector_has_state(status, 'RUNNING')
def connector_is_failed(self, connector, node=None):
status = self._connector_status(connector.name, node)
return self._connector_has_state(status, 'FAILED')
def task_is_failed(self, connector, task_id, node=None):
status = self._connector_status(connector.name, node)
return self._task_has_state(task_id, status, 'FAILED')
def task_is_running(self, connector, task_id, node=None):
status = self._connector_status(connector.name, node)
return self._task_has_state(task_id, status, 'RUNNING')
@cluster(num_nodes=5)
def test_restart_failed_connector(self):
self.setup_services()
self.cc.set_configs(lambda node: self.render("connect-distributed.properties", node=node))
self.cc.start()
self.sink = MockSink(self.cc, self.topics.keys(), mode='connector-failure', delay_sec=5)
self.sink.start()
wait_until(lambda: self.connector_is_failed(self.sink), timeout_sec=15,
err_msg="Failed to see connector transition to the FAILED state")
self.cc.restart_connector(self.sink.name)
wait_until(lambda: self.connector_is_running(self.sink), timeout_sec=10,
err_msg="Failed to see connector transition to the RUNNING state")
@cluster(num_nodes=5)
@matrix(connector_type=["source", "sink"])
def test_restart_failed_task(self, connector_type):
self.setup_services()
self.cc.set_configs(lambda node: self.render("connect-distributed.properties", node=node))
self.cc.start()
connector = None
if connector_type == "sink":
connector = MockSink(self.cc, self.topics.keys(), mode='task-failure', delay_sec=5)
else:
connector = MockSource(self.cc, mode='task-failure', delay_sec=5)
connector.start()
task_id = 0
wait_until(lambda: self.task_is_failed(connector, task_id), timeout_sec=20,
err_msg="Failed to see task transition to the FAILED state")
self.cc.restart_task(connector.name, task_id)
wait_until(lambda: self.task_is_running(connector, task_id), timeout_sec=10,
err_msg="Failed to see task transition to the RUNNING state")
@cluster(num_nodes=5)
def test_pause_and_resume_source(self):
"""
Verify that source connectors stop producing records when paused and begin again after
being resumed.
"""
self.setup_services()
self.cc.set_configs(lambda node: self.render("connect-distributed.properties", node=node))
self.cc.start()
self.source = VerifiableSource(self.cc)
self.source.start()
wait_until(lambda: self.is_running(self.source), timeout_sec=30,
err_msg="Failed to see connector transition to the RUNNING state")
self.cc.pause_connector(self.source.name)
# wait until all nodes report the paused transition
for node in self.cc.nodes:
wait_until(lambda: self.is_paused(self.source, node), timeout_sec=30,
err_msg="Failed to see connector transition to the PAUSED state")
# verify that we do not produce new messages while paused
num_messages = len(self.source.sent_messages())
time.sleep(10)
assert num_messages == len(self.source.sent_messages()), "Paused source connector should not produce any messages"
self.cc.resume_connector(self.source.name)
for node in self.cc.nodes:
wait_until(lambda: self.is_running(self.source, node), timeout_sec=30,
err_msg="Failed to see connector transition to the RUNNING state")
# after resuming, we should see records produced again
wait_until(lambda: len(self.source.sent_messages()) > num_messages, timeout_sec=30,
err_msg="Failed to produce messages after resuming source connector")
@cluster(num_nodes=5)
def test_pause_and_resume_sink(self):
"""
Verify that sink connectors stop consuming records when paused and begin again after
being resumed.
"""
self.setup_services()
self.cc.set_configs(lambda node: self.render("connect-distributed.properties", node=node))
self.cc.start()
# use the verifiable source to produce a steady stream of messages
self.source = VerifiableSource(self.cc)
self.source.start()
wait_until(lambda: len(self.source.committed_messages()) > 0, timeout_sec=30,
err_msg="Timeout expired waiting for source task to produce a message")
self.sink = VerifiableSink(self.cc)
self.sink.start()
wait_until(lambda: self.is_running(self.sink), timeout_sec=30,
err_msg="Failed to see connector transition to the RUNNING state")
self.cc.pause_connector(self.sink.name)
# wait until all nodes report the paused transition
for node in self.cc.nodes:
wait_until(lambda: self.is_paused(self.sink, node), timeout_sec=30,
err_msg="Failed to see connector transition to the PAUSED state")
# verify that we do not consume new messages while paused
num_messages = len(self.sink.received_messages())
time.sleep(10)
assert num_messages == len(self.sink.received_messages()), "Paused sink connector should not consume any messages"
self.cc.resume_connector(self.sink.name)
for node in self.cc.nodes:
wait_until(lambda: self.is_running(self.sink, node), timeout_sec=30,
err_msg="Failed to see connector transition to the RUNNING state")
# after resuming, we should see records consumed again
wait_until(lambda: len(self.sink.received_messages()) > num_messages, timeout_sec=30,
err_msg="Failed to consume messages after resuming sink connector")
@cluster(num_nodes=5)
def test_pause_state_persistent(self):
"""
Verify that paused state is preserved after a cluster restart.
"""
self.setup_services()
self.cc.set_configs(lambda node: self.render("connect-distributed.properties", node=node))
self.cc.start()
self.source = VerifiableSource(self.cc)
self.source.start()
wait_until(lambda: self.is_running(self.source), timeout_sec=30,
err_msg="Failed to see connector transition to the RUNNING state")
self.cc.pause_connector(self.source.name)
self.cc.restart()
# we should still be paused after restarting
for node in self.cc.nodes:
wait_until(lambda: self.is_paused(self.source, node), timeout_sec=30,
err_msg="Failed to see connector startup in PAUSED state")
@cluster(num_nodes=5)
@parametrize(security_protocol=SecurityConfig.PLAINTEXT)
@cluster(num_nodes=6)
@parametrize(security_protocol=SecurityConfig.SASL_SSL)
def test_file_source_and_sink(self, security_protocol):
"""
Tests that a basic file connector works across clean rolling bounces. This validates that the connector is
correctly created, tasks instantiated, and as nodes restart the work is rebalanced across nodes.
"""
self.setup_services(security_protocol=security_protocol)
self.cc.set_configs(lambda node: self.render("connect-distributed.properties", node=node))
self.cc.start()
self.logger.info("Creating connectors")
self._start_connector("connect-file-source.properties")
self._start_connector("connect-file-sink.properties")
# Generating data on the source node should generate new records and create new output on the sink node. Timeouts
# here need to be more generous than they are for standalone mode because a) it takes longer to write configs,
# do rebalancing of the group, etc, and b) without explicit leave group support, rebalancing takes awhile
for node in self.cc.nodes:
node.account.ssh("echo -e -n " + repr(self.FIRST_INPUTS) + " >> " + self.INPUT_FILE)
wait_until(lambda: self._validate_file_output(self.FIRST_INPUT_LIST), timeout_sec=70, err_msg="Data added to input file was not seen in the output file in a reasonable amount of time.")
# Restarting both should result in them picking up where they left off,
# only processing new data.
self.cc.restart()
for node in self.cc.nodes:
node.account.ssh("echo -e -n " + repr(self.SECOND_INPUTS) + " >> " + self.INPUT_FILE)
wait_until(lambda: self._validate_file_output(self.FIRST_INPUT_LIST + self.SECOND_INPUT_LIST), timeout_sec=70, err_msg="Sink output file never converged to the same state as the input file")
@cluster(num_nodes=5)
@matrix(clean=[True, False])
def test_bounce(self, clean):
"""
Validates that source and sink tasks that run continuously and produce a predictable sequence of messages
run correctly and deliver messages exactly once when Kafka Connect workers undergo clean rolling bounces.
"""
num_tasks = 3
self.setup_services()
self.cc.set_configs(lambda node: self.render("connect-distributed.properties", node=node))
self.cc.start()
self.source = VerifiableSource(self.cc, tasks=num_tasks, throughput=100)
self.source.start()
self.sink = VerifiableSink(self.cc, tasks=num_tasks)
self.sink.start()
for _ in range(3):
for node in self.cc.nodes:
started = time.time()
self.logger.info("%s bouncing Kafka Connect on %s", clean and "Clean" or "Hard", str(node.account))
self.cc.stop_node(node, clean_shutdown=clean)
with node.account.monitor_log(self.cc.LOG_FILE) as monitor:
self.cc.start_node(node)
monitor.wait_until("Starting connectors and tasks using config offset", timeout_sec=90,
err_msg="Kafka Connect worker didn't successfully join group and start work")
self.logger.info("Bounced Kafka Connect on %s and rejoined in %f seconds", node.account, time.time() - started)
# Give additional time for the consumer groups to recover. Even if it is not a hard bounce, there are
# some cases where a restart can cause a rebalance to take the full length of the session timeout
# (e.g. if the client shuts down before it has received the memberId from its initial JoinGroup).
# If we don't give enough time for the group to stabilize, the next bounce may cause consumers to
# be shut down before they have any time to process data and we can end up with zero data making it
# through the test.
time.sleep(15)
self.source.stop()
self.sink.stop()
self.cc.stop()
# Validate at least once delivery of everything that was reported as written since we should have flushed and
# cleanly exited. Currently this only tests at least once delivery because the sink task may not have consumed
# all the messages generated by the source task. This needs to be done per-task since seqnos are not unique across
# tasks.
success = True
errors = []
allow_dups = not clean
src_messages = self.source.committed_messages()
sink_messages = self.sink.flushed_messages()
for task in range(num_tasks):
# Validate source messages
src_seqnos = [msg['seqno'] for msg in src_messages if msg['task'] == task]
# Every seqno up to the largest one we ever saw should appear. Each seqno should only appear once because clean
# bouncing should commit on rebalance.
src_seqno_max = max(src_seqnos)
self.logger.debug("Max source seqno: %d", src_seqno_max)
src_seqno_counts = Counter(src_seqnos)
missing_src_seqnos = sorted(set(range(src_seqno_max)).difference(set(src_seqnos)))
duplicate_src_seqnos = sorted([seqno for seqno,count in src_seqno_counts.iteritems() if count > 1])
if missing_src_seqnos:
self.logger.error("Missing source sequence numbers for task " + str(task))
errors.append("Found missing source sequence numbers for task %d: %s" % (task, missing_src_seqnos))
success = False
if not allow_dups and duplicate_src_seqnos:
self.logger.error("Duplicate source sequence numbers for task " + str(task))
errors.append("Found duplicate source sequence numbers for task %d: %s" % (task, duplicate_src_seqnos))
success = False
# Validate sink messages
sink_seqnos = [msg['seqno'] for msg in sink_messages if msg['task'] == task]
# Every seqno up to the largest one we ever saw should appear. Each seqno should only appear once because
# clean bouncing should commit on rebalance.
sink_seqno_max = max(sink_seqnos)
self.logger.debug("Max sink seqno: %d", sink_seqno_max)
sink_seqno_counts = Counter(sink_seqnos)
missing_sink_seqnos = sorted(set(range(sink_seqno_max)).difference(set(sink_seqnos)))
duplicate_sink_seqnos = sorted([seqno for seqno,count in sink_seqno_counts.iteritems() if count > 1])
if missing_sink_seqnos:
self.logger.error("Missing sink sequence numbers for task " + str(task))
errors.append("Found missing sink sequence numbers for task %d: %s" % (task, missing_sink_seqnos))
success = False
if not allow_dups and duplicate_sink_seqnos:
self.logger.error("Duplicate sink sequence numbers for task " + str(task))
errors.append("Found duplicate sink sequence numbers for task %d: %s" % (task, duplicate_sink_seqnos))
success = False
# Validate source and sink match
if sink_seqno_max > src_seqno_max:
self.logger.error("Found sink sequence number greater than any generated sink sequence number for task %d: %d > %d", task, sink_seqno_max, src_seqno_max)
errors.append("Found sink sequence number greater than any generated sink sequence number for task %d: %d > %d" % (task, sink_seqno_max, src_seqno_max))
success = False
if src_seqno_max < 1000 or sink_seqno_max < 1000:
errors.append("Not enough messages were processed: source:%d sink:%d" % (src_seqno_max, sink_seqno_max))
success = False
if not success:
self.mark_for_collect(self.cc)
# Also collect the data in the topic to aid in debugging
consumer_validator = ConsoleConsumer(self.test_context, 1, self.kafka, self.source.topic, consumer_timeout_ms=1000, print_key=True)
consumer_validator.run()
self.mark_for_collect(consumer_validator, "consumer_stdout")
assert success, "Found validation errors:\n" + "\n ".join(errors)
@cluster(num_nodes=6)
def test_transformations(self):
self.setup_services(timestamp_type='CreateTime')
self.cc.set_configs(lambda node: self.render("connect-distributed.properties", node=node))
self.cc.start()
ts_fieldname = 'the_timestamp'
NamedConnector = namedtuple('Connector', ['name'])
source_connector = NamedConnector(name='file-src')
self.cc.create_connector({
'name': source_connector.name,
'connector.class': 'org.apache.kafka.connect.file.FileStreamSourceConnector',
'tasks.max': 1,
'file': self.INPUT_FILE,
'topic': self.TOPIC,
'transforms': 'hoistToStruct,insertTimestampField',
'transforms.hoistToStruct.type': 'org.apache.kafka.connect.transforms.HoistField$Value',
'transforms.hoistToStruct.field': 'content',
'transforms.insertTimestampField.type': 'org.apache.kafka.connect.transforms.InsertField$Value',
'transforms.insertTimestampField.timestamp.field': ts_fieldname,
})
wait_until(lambda: self.connector_is_running(source_connector), timeout_sec=30, err_msg='Failed to see connector transition to the RUNNING state')
for node in self.cc.nodes:
node.account.ssh("echo -e -n " + repr(self.FIRST_INPUTS) + " >> " + self.INPUT_FILE)
consumer = ConsoleConsumer(self.test_context, 1, self.kafka, self.TOPIC, consumer_timeout_ms=15000, print_timestamp=True)
consumer.run()
assert len(consumer.messages_consumed[1]) == len(self.FIRST_INPUT_LIST)
expected_schema = {
'type': 'struct',
'fields': [
{'field': 'content', 'type': 'string', 'optional': False},
{'field': ts_fieldname, 'name': 'org.apache.kafka.connect.data.Timestamp', 'type': 'int64', 'version': 1, 'optional': True},
],
'optional': False
}
for msg in consumer.messages_consumed[1]:
(ts_info, value) = msg.split('\t')
assert ts_info.startswith('CreateTime:')
ts = int(ts_info[len('CreateTime:'):])
obj = json.loads(value)
assert obj['schema'] == expected_schema
assert obj['payload']['content'] in self.FIRST_INPUT_LIST
assert obj['payload'][ts_fieldname] == ts
def _validate_file_output(self, input):
input_set = set(input)
# Output needs to be collected from all nodes because we can't be sure where the tasks will be scheduled.
# Between the first and second rounds, we might even end up with half the data on each node.
output_set = set(itertools.chain(*[
[line.strip() for line in self._file_contents(node, self.OUTPUT_FILE)] for node in self.cc.nodes
]))
return input_set == output_set
def _file_contents(self, node, file):
try:
# Convert to a list here or the RemoteCommandError may be returned during a call to the generator instead of
# immediately
return list(node.account.ssh_capture("cat " + file))
except RemoteCommandError:
return []
|
|
#
# messages.py
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from __future__ import absolute_import, division, print_function, unicode_literals
import struct
import time
import random
import sys
if sys.version > '3':
import io
else:
import cStringIO as io
from bitcoin.core import *
MSG_TX = 1
MSG_BLOCK = 2
MSG_FILTERED_BLOCK = 3
class msg_version(object):
command = b"version"
def __init__(self, protover=PROTO_VERSION):
self.protover = MIN_PROTO_VERSION
self.nVersion = protover
self.nServices = 1
self.nTime = time.time()
self.addrTo = CAddress(MIN_PROTO_VERSION)
self.addrFrom = CAddress(MIN_PROTO_VERSION)
self.nNonce = random.getrandbits(64)
self.strSubVer = b'/python-bitcoin-0.0.1/'
self.nStartingHeight = -1
def deserialize(self, f):
self.nVersion = struct.unpack(b"<i", f.read(4))[0]
if self.nVersion == 10300:
self.nVersion = 300
self.nServices = struct.unpack(b"<Q", f.read(8))[0]
self.nTime = struct.unpack(b"<q", f.read(8))[0]
self.addrTo = CAddress(MIN_PROTO_VERSION)
self.addrTo.deserialize(f)
if self.nVersion >= 106:
self.addrFrom = CAddress(MIN_PROTO_VERSION)
self.addrFrom.deserialize(f)
self.nNonce = struct.unpack(b"<Q", f.read(8))[0]
self.strSubVer = deser_string(f)
if self.nVersion >= 209:
self.nStartingHeight = struct.unpack(b"<i", f.read(4))[0]
else:
self.nStartingHeight = None
else:
self.addrFrom = None
self.nNonce = None
self.strSubVer = None
self.nStartingHeight = None
def serialize(self):
r = b""
r += struct.pack(b"<i", self.nVersion)
r += struct.pack(b"<Q", self.nServices)
r += struct.pack(b"<q", self.nTime)
r += self.addrTo.serialize()
r += self.addrFrom.serialize()
r += struct.pack(b"<Q", self.nNonce)
r += ser_string(self.strSubVer)
r += struct.pack(b"<i", self.nStartingHeight)
return r
def __repr__(self):
return "msg_version(nVersion=%i nServices=%i nTime=%s addrTo=%s addrFrom=%s nNonce=0x%016X strSubVer=%s nStartingHeight=%i)" % (self.nVersion, self.nServices, time.ctime(self.nTime), repr(self.addrTo), repr(self.addrFrom), self.nNonce, self.strSubVer, self.nStartingHeight)
class msg_verack(object):
command = b"verack"
def __init__(self, protover=PROTO_VERSION):
self.protover = protover
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_verack()"
class msg_addr(object):
command = b"addr"
def __init__(self, protover=PROTO_VERSION):
self.protover = protover
self.addrs = []
def deserialize(self, f):
self.addrs = deser_vector(f, CAddress, self.protover)
def serialize(self):
return ser_vector(self.addrs)
def __repr__(self):
return "msg_addr(addrs=%s)" % (repr(self.addrs))
class msg_alert(object):
command = b"alert"
def __init__(self, protover=PROTO_VERSION):
self.protover = protover
self.alert = CAlert()
def deserialize(self, f):
self.alert = CAlert()
self.alert.deserialize(f)
def serialize(self):
r = b""
r += self.alert.serialize()
return r
def __repr__(self):
return "msg_alert(alert=%s)" % (repr(self.alert), )
class msg_inv(object):
command = b"inv"
def __init__(self, protover=PROTO_VERSION):
self.protover = protover
self.inv = []
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_inv(inv=%s)" % (repr(self.inv))
class msg_getdata(object):
command = b"getdata"
def __init__(self, protover=PROTO_VERSION):
self.protover = protover
self.inv = []
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_getdata(inv=%s)" % (repr(self.inv))
class msg_getblocks(object):
command = b"getblocks"
def __init__(self, protover=PROTO_VERSION):
self.protover = protover
self.locator = CBlockLocator()
self.hashstop = 0
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = f.read(32)
def serialize(self):
r = b""
r += self.locator.serialize()
r += self.hashstop
return r
def __repr__(self):
return "msg_getblocks(locator=%s hashstop=%064x)" % (repr(self.locator), self.hashstop)
class msg_getheaders(object):
command = b"getheaders"
def __init__(self, protover=PROTO_VERSION):
self.protover = protover
self.locator = CBlockLocator()
self.hashstop = 0
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = f.read(32)
def serialize(self):
r = b""
r += self.locator.serialize()
r += self.hashstop
return r
def __repr__(self):
return "msg_getheaders(locator=%s hashstop=%064x)" % (repr(self.locator), self.hashstop)
class msg_headers(object):
command = b"headers"
def __init__(self, protover=PROTO_VERSION):
self.protover = protover
self.headers = []
def deserialize(self, f):
self.headers = deser_vector(f, CBlock)
def serialize(self):
return ser_vector(self.headers)
def __repr__(self):
return "msg_headers(headers=%s)" % (repr(self.headers))
class msg_tx(object):
command = b"tx"
def __init__(self, protover=PROTO_VERSION):
self.protover = protover
self.tx = CTransaction()
def deserialize(self, f):
self.tx.deserialize(f)
def serialize(self):
return self.tx.serialize()
def __repr__(self):
return "msg_tx(tx=%s)" % (repr(self.tx))
class msg_block(object):
command = b"block"
def __init__(self, protover=PROTO_VERSION):
self.protover = protover
self.block = CBlock()
def deserialize(self, f):
self.block.deserialize(f)
def serialize(self):
return self.block.serialize()
def __repr__(self):
return "msg_block(block=%s)" % (repr(self.block))
class msg_getaddr(object):
command = b"getaddr"
def __init__(self, protover=PROTO_VERSION):
self.protover = protover
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_getaddr()"
#msg_checkorder
#msg_submitorder
#msg_reply
class msg_ping(object):
command = b"ping"
def __init__(self, protover=PROTO_VERSION, nonce=0):
self.protover = protover
self.nonce = nonce
def deserialize(self, f):
if self.protover > BIP0031_VERSION:
self.nonce = struct.unpack(b"<Q", f.read(8))[0]
def serialize(self):
r = b""
if self.protover > BIP0031_VERSION:
r += struct.pack(b"<Q", self.nonce)
return r
def __repr__(self):
return "msg_ping(0x%x)" % (self.nonce,)
class msg_pong(object):
command = b"pong"
def __init__(self, protover=PROTO_VERSION, nonce=0):
self.protover = protover
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack(b"<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack(b"<Q", self.nonce)
return r
def __repr__(self):
return "msg_pong(0x%x)" % (self.nonce,)
class msg_mempool(object):
command = b"mempool"
def __init__(self, protover=PROTO_VERSION):
self.protover = protover
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_mempool()"
messagemap = {
"version": msg_version,
"verack": msg_verack,
"addr": msg_addr,
"alert": msg_alert,
"inv": msg_inv,
"getdata": msg_getdata,
"getblocks": msg_getblocks,
"tx": msg_tx,
"block": msg_block,
"getaddr": msg_getaddr,
"ping": msg_ping,
"pong": msg_pong,
"mempool": msg_mempool
}
def message_read(netmagic, f):
try:
recvbuf = f.read(4 + 12 + 4 + 4)
except IOError:
return None
# check magic
if len(recvbuf) < 4:
return
if recvbuf[:4] != netmagic.msg_start:
raise ValueError("got garbage %s" % repr(recvbuf))
# check checksum
if len(recvbuf) < 4 + 12 + 4 + 4:
return
# remaining header fields: command, msg length, checksum
command = recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack(b"<i", recvbuf[4+12:4+12+4])[0]
checksum = recvbuf[4+12+4:4+12+4+4]
# read message body
try:
recvbuf += f.read(msglen)
except IOError:
return None
msg = recvbuf[4+12+4+4:4+12+4+4+msglen]
th = hashlib.sha256(msg).digest()
h = hashlib.sha256(th).digest()
if checksum != h[:4]:
raise ValueError("got bad checksum %s" % repr(recvbuf))
recvbuf = recvbuf[4+12+4+4+msglen:]
if command in messagemap:
f = io.StringIO(msg)
t = messagemap[command]()
t.deserialize(f)
return t
else:
return None
def message_to_str(netmagic, message):
command = message.command
data = message.serialize()
tmsg = netmagic.msg_start
tmsg += command
tmsg += b"\x00" * (12 - len(command))
tmsg += struct.pack(b"<I", len(data))
# add checksum
th = hashlib.sha256(data).digest()
h = hashlib.sha256(th).digest()
tmsg += h[:4]
tmsg += data
return tmsg
|
|
# Copyright (C) 2014,2015 VA Linux Systems Japan K.K.
# Copyright (C) 2014,2015 YAMAMOTO Takashi <yamamoto at valinux co jp>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
* references
** OVS agent https://wiki.openstack.org/wiki/Ovs-flow-logic
"""
import netaddr
from neutron_lib import constants as p_const
from os_ken.lib.packet import ether_types
from os_ken.lib.packet import icmpv6
from os_ken.lib.packet import in_proto
from oslo_log import log as logging
from neutron.plugins.ml2.drivers.openvswitch.agent.common import constants
from neutron.plugins.ml2.drivers.openvswitch.agent.openflow.native \
import ovs_bridge
LOG = logging.getLogger(__name__)
class OVSIntegrationBridge(ovs_bridge.OVSAgentBridge):
"""openvswitch agent br-int specific logic."""
def setup_default_table(self):
self.setup_canary_table()
self.install_goto(dest_table_id=constants.TRANSIENT_TABLE)
self.install_normal(table_id=constants.TRANSIENT_TABLE, priority=3)
self.install_drop(table_id=constants.ARP_SPOOF_TABLE)
self.install_drop(table_id=constants.LOCAL_SWITCHING,
priority=constants.OPENFLOW_MAX_PRIORITY,
vlan_vid=constants.DEAD_VLAN_TAG)
def setup_canary_table(self):
self.install_drop(constants.CANARY_TABLE)
def check_canary_table(self):
try:
flows = self.dump_flows(constants.CANARY_TABLE)
except RuntimeError:
LOG.exception("Failed to communicate with the switch")
return constants.OVS_DEAD
return constants.OVS_NORMAL if flows else constants.OVS_RESTARTED
@staticmethod
def _local_vlan_match(_ofp, ofpp, port, vlan_vid):
return ofpp.OFPMatch(in_port=port, vlan_vid=vlan_vid)
def provision_local_vlan(self, port, lvid, segmentation_id):
(_dp, ofp, ofpp) = self._get_dp()
if segmentation_id is None:
vlan_vid = ofp.OFPVID_NONE
actions = [ofpp.OFPActionPushVlan()]
else:
vlan_vid = segmentation_id | ofp.OFPVID_PRESENT
actions = []
match = self._local_vlan_match(ofp, ofpp, port, vlan_vid)
actions += [
ofpp.OFPActionSetField(vlan_vid=lvid | ofp.OFPVID_PRESENT),
]
instructions = [
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, actions),
ofpp.OFPInstructionGotoTable(table_id=constants.TRANSIENT_TABLE),
]
self.install_instructions(
instructions=instructions,
priority=3,
match=match,
)
def reclaim_local_vlan(self, port, segmentation_id):
(_dp, ofp, ofpp) = self._get_dp()
if segmentation_id is None:
vlan_vid = ofp.OFPVID_NONE
else:
vlan_vid = segmentation_id | ofp.OFPVID_PRESENT
match = self._local_vlan_match(ofp, ofpp, port, vlan_vid)
self.uninstall_flows(match=match)
@staticmethod
def _dvr_to_src_mac_match(ofp, ofpp, vlan_tag, dst_mac):
return ofpp.OFPMatch(vlan_vid=vlan_tag | ofp.OFPVID_PRESENT,
eth_dst=dst_mac)
@staticmethod
def _dvr_to_src_mac_table_id(network_type):
if network_type == p_const.TYPE_VLAN:
return constants.DVR_TO_SRC_MAC_VLAN
else:
return constants.DVR_TO_SRC_MAC
def install_dvr_to_src_mac(self, network_type,
vlan_tag, gateway_mac, dst_mac, dst_port):
table_id = self._dvr_to_src_mac_table_id(network_type)
(_dp, ofp, ofpp) = self._get_dp()
match = self._dvr_to_src_mac_match(ofp, ofpp,
vlan_tag=vlan_tag, dst_mac=dst_mac)
actions = [
ofpp.OFPActionSetField(eth_src=gateway_mac),
]
instructions = [
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, actions),
ofpp.OFPInstructionGotoTable(table_id=constants.TRANSIENT_TABLE),
]
self.install_instructions(table_id=table_id,
priority=4,
match=match,
instructions=instructions)
actions = [
ofpp.OFPActionPopVlan(),
ofpp.OFPActionOutput(dst_port, 0),
]
self.install_apply_actions(table_id=constants.TRANSIENT_TABLE,
priority=4,
match=match,
actions=actions)
def delete_dvr_to_src_mac(self, network_type, vlan_tag, dst_mac):
table_id = self._dvr_to_src_mac_table_id(network_type)
(_dp, ofp, ofpp) = self._get_dp()
match = self._dvr_to_src_mac_match(ofp, ofpp,
vlan_tag=vlan_tag, dst_mac=dst_mac)
for table in (table_id, constants.TRANSIENT_TABLE):
self.uninstall_flows(
strict=True, priority=4, table_id=table, match=match)
def add_dvr_mac_vlan(self, mac, port):
self.install_goto(table_id=constants.LOCAL_SWITCHING,
priority=4,
in_port=port,
eth_src=mac,
dest_table_id=constants.DVR_TO_SRC_MAC_VLAN)
def remove_dvr_mac_vlan(self, mac):
# REVISIT(yamamoto): match in_port as well?
self.uninstall_flows(table_id=constants.LOCAL_SWITCHING,
eth_src=mac)
def add_dvr_mac_tun(self, mac, port):
self.install_goto(table_id=constants.LOCAL_SWITCHING,
priority=2,
in_port=port,
eth_src=mac,
dest_table_id=constants.DVR_TO_SRC_MAC)
def remove_dvr_mac_tun(self, mac, port):
self.uninstall_flows(table_id=constants.LOCAL_SWITCHING,
in_port=port, eth_src=mac)
@staticmethod
def _arp_reply_match(ofp, ofpp, port):
return ofpp.OFPMatch(in_port=port,
eth_type=ether_types.ETH_TYPE_ARP)
@staticmethod
def _icmpv6_reply_match(ofp, ofpp, port):
return ofpp.OFPMatch(in_port=port,
eth_type=ether_types.ETH_TYPE_IPV6,
ip_proto=in_proto.IPPROTO_ICMPV6,
icmpv6_type=icmpv6.ND_NEIGHBOR_ADVERT)
def install_icmpv6_na_spoofing_protection(self, port, ip_addresses):
# Allow neighbor advertisements as long as they match addresses
# that actually belong to the port.
for ip in ip_addresses:
masked_ip = self._cidr_to_os_ken(ip)
self.install_goto(
table_id=constants.ARP_SPOOF_TABLE, priority=2,
eth_type=ether_types.ETH_TYPE_IPV6,
ip_proto=in_proto.IPPROTO_ICMPV6,
icmpv6_type=icmpv6.ND_NEIGHBOR_ADVERT,
ipv6_nd_target=masked_ip, in_port=port,
dest_table_id=constants.TRANSIENT_TABLE)
# Now that the rules are ready, direct icmpv6 neighbor advertisement
# traffic from the port into the anti-spoof table.
(_dp, ofp, ofpp) = self._get_dp()
match = self._icmpv6_reply_match(ofp, ofpp, port=port)
self.install_goto(table_id=constants.LOCAL_SWITCHING,
priority=10,
match=match,
dest_table_id=constants.ARP_SPOOF_TABLE)
def set_allowed_macs_for_port(self, port, mac_addresses=None,
allow_all=False):
if allow_all:
self.uninstall_flows(table_id=constants.LOCAL_SWITCHING,
in_port=port)
self.uninstall_flows(table_id=constants.MAC_SPOOF_TABLE,
in_port=port)
return
mac_addresses = mac_addresses or []
for address in mac_addresses:
self.install_goto(
table_id=constants.MAC_SPOOF_TABLE, priority=2,
eth_src=address, in_port=port,
dest_table_id=constants.TRANSIENT_TABLE)
# normalize so we can see if macs are the same
mac_addresses = {netaddr.EUI(mac) for mac in mac_addresses}
flows = self.dump_flows(constants.MAC_SPOOF_TABLE)
for flow in flows:
matches = dict(flow.match.items())
if matches.get('in_port') != port:
continue
if not matches.get('eth_src'):
continue
flow_mac = matches['eth_src']
if netaddr.EUI(flow_mac) not in mac_addresses:
self.uninstall_flows(table_id=constants.MAC_SPOOF_TABLE,
in_port=port, eth_src=flow_mac)
self.install_goto(table_id=constants.LOCAL_SWITCHING,
priority=9, in_port=port,
dest_table_id=constants.MAC_SPOOF_TABLE)
def install_arp_spoofing_protection(self, port, ip_addresses):
# allow ARP replies as long as they match addresses that actually
# belong to the port.
for ip in ip_addresses:
masked_ip = self._cidr_to_os_ken(ip)
self.install_goto(table_id=constants.ARP_SPOOF_TABLE,
priority=2,
eth_type=ether_types.ETH_TYPE_ARP,
arp_spa=masked_ip,
in_port=port,
dest_table_id=constants.MAC_SPOOF_TABLE)
# Now that the rules are ready, direct ARP traffic from the port into
# the anti-spoof table.
# This strategy fails gracefully because OVS versions that can't match
# on ARP headers will just process traffic normally.
(_dp, ofp, ofpp) = self._get_dp()
match = self._arp_reply_match(ofp, ofpp, port=port)
self.install_goto(table_id=constants.LOCAL_SWITCHING,
priority=10,
match=match,
dest_table_id=constants.ARP_SPOOF_TABLE)
def delete_arp_spoofing_protection(self, port):
(_dp, ofp, ofpp) = self._get_dp()
match = self._arp_reply_match(ofp, ofpp, port=port)
self.uninstall_flows(table_id=constants.LOCAL_SWITCHING,
match=match)
match = self._icmpv6_reply_match(ofp, ofpp, port=port)
self.uninstall_flows(table_id=constants.LOCAL_SWITCHING,
match=match)
self.delete_arp_spoofing_allow_rules(port)
def delete_arp_spoofing_allow_rules(self, port):
self.uninstall_flows(table_id=constants.ARP_SPOOF_TABLE,
in_port=port)
|
|
import datetime
import os
import re
import time
from pprint import pformat
from urllib import urlencode, quote
from urlparse import urljoin
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
# Now I use fast_parse_qsl for query string parse.
# It will be more faster when query string is too long!
# Test Result:
# Query string length: 10.449 MB
# Start parse use fast_parse_qsl
# Finish parse use:0:00:00.059139
#
# Start parse use parse_qsl
# Finish parse use:0:00:05.784829
from fast_parse_qsl import parse_qsl
#from urlparse import parse_qsl
import Cookie
# httponly support exists in Python 2.6's Cookie library,
# but not in Python 2.4 or 2.5.
_morsel_supports_httponly = Cookie.Morsel._reserved.has_key('httponly')
# Some versions of Python 2.7 and later won't need this encoding bug fix:
_cookie_encodes_correctly = Cookie.SimpleCookie().value_encode(';') == (';', '"\\073"')
# See ticket #13007, http://bugs.python.org/issue2193 and http://trac.edgewall.org/ticket/2256
_tc = Cookie.SimpleCookie()
_tc.load('f:oo')
_cookie_allows_colon_in_names = 'Set-Cookie: f:oo=' in _tc.output()
if _morsel_supports_httponly and _cookie_encodes_correctly and _cookie_allows_colon_in_names:
SimpleCookie = Cookie.SimpleCookie
else:
if not _morsel_supports_httponly:
class Morsel(Cookie.Morsel):
def __setitem__(self, K, V):
K = K.lower()
if K == "httponly":
if V:
# The superclass rejects httponly as a key,
# so we jump to the grandparent.
super(Cookie.Morsel, self).__setitem__(K, V)
else:
super(Morsel, self).__setitem__(K, V)
def OutputString(self, attrs=None):
output = super(Morsel, self).OutputString(attrs)
if "httponly" in self:
output += "; httponly"
return output
class SimpleCookie(Cookie.SimpleCookie):
if not _morsel_supports_httponly:
def __set(self, key, real_value, coded_value):
M = self.get(key, Morsel())
M.set(key, real_value, coded_value)
dict.__setitem__(self, key, M)
def __setitem__(self, key, value):
rval, cval = self.value_encode(value)
self.__set(key, rval, cval)
if not _cookie_encodes_correctly:
def value_encode(self, val):
# Some browsers do not support quoted-string from RFC 2109,
# including some versions of Safari and Internet Explorer.
# These browsers split on ';', and some versions of Safari
# are known to split on ', '. Therefore, we encode ';' and ','
# SimpleCookie already does the hard work of encoding and decoding.
# It uses octal sequences like '\\012' for newline etc.
# and non-ASCII chars. We just make use of this mechanism, to
# avoid introducing two encoding schemes which would be confusing
# and especially awkward for javascript.
# NB, contrary to Python docs, value_encode returns a tuple containing
# (real val, encoded_val)
val, encoded = super(SimpleCookie, self).value_encode(val)
encoded = encoded.replace(";", "\\073").replace(",","\\054")
# If encoded now contains any quoted chars, we need double quotes
# around the whole string.
if "\\" in encoded and not encoded.startswith('"'):
encoded = '"' + encoded + '"'
return val, encoded
if not _cookie_allows_colon_in_names:
def load(self, rawdata, ignore_parse_errors=False):
if ignore_parse_errors:
self.bad_cookies = []
self._BaseCookie__set = self._loose_set
super(SimpleCookie, self).load(rawdata)
if ignore_parse_errors:
self._BaseCookie__set = self._strict_set
for key in self.bad_cookies:
del self[key]
_strict_set = Cookie.BaseCookie._BaseCookie__set
def _loose_set(self, key, real_value, coded_value):
try:
self._strict_set(key, real_value, coded_value)
except Cookie.CookieError:
self.bad_cookies.append(key)
dict.__setitem__(self, key, None)
class CompatCookie(SimpleCookie):
def __init__(self, *args, **kwargs):
super(CompatCookie, self).__init__(*args, **kwargs)
import warnings
warnings.warn("CompatCookie is deprecated, use django.http.SimpleCookie instead.",
PendingDeprecationWarning)
from django.utils.datastructures import MultiValueDict, ImmutableList
from django.utils.encoding import smart_str, iri_to_uri, force_unicode
from django.utils.http import cookie_date
from django.http.multipartparser import MultiPartParser
from django.conf import settings
from django.core.files import uploadhandler
from utils import *
RESERVED_CHARS="!*'();:@&=+$,/?%#[]"
absolute_http_url_re = re.compile(r"^https?://", re.I)
class Http404(Exception):
pass
class HttpRequest(object):
"""A basic HTTP request."""
# The encoding used in GET/POST dicts. None means use default setting.
_encoding = None
_upload_handlers = []
def __init__(self):
self.GET, self.POST, self.COOKIES, self.META, self.FILES = {}, {}, {}, {}, {}
self.path = ''
self.path_info = ''
self.method = None
def __repr__(self):
return '<HttpRequest\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' % \
(pformat(self.GET), pformat(self.POST), pformat(self.COOKIES),
pformat(self.META))
def get_host(self):
"""Returns the HTTP host using the environment or request headers."""
# We try three options, in order of decreasing preference.
if 'HTTP_X_FORWARDED_HOST' in self.META:
host = self.META['HTTP_X_FORWARDED_HOST']
elif 'HTTP_HOST' in self.META:
host = self.META['HTTP_HOST']
else:
# Reconstruct the host using the algorithm from PEP 333.
host = self.META['SERVER_NAME']
server_port = str(self.META['SERVER_PORT'])
if server_port != (self.is_secure() and '443' or '80'):
host = '%s:%s' % (host, server_port)
return host
def get_full_path(self):
# RFC 3986 requires query string arguments to be in the ASCII range.
# Rather than crash if this doesn't happen, we encode defensively.
return '%s%s' % (self.path, self.META.get('QUERY_STRING', '') and ('?' + iri_to_uri(self.META.get('QUERY_STRING', ''))) or '')
def build_absolute_uri(self, location=None):
"""
Builds an absolute URI from the location and the variables available in
this request. If no location is specified, the absolute URI is built on
``request.get_full_path()``.
"""
if not location:
location = self.get_full_path()
if not absolute_http_url_re.match(location):
current_uri = '%s://%s%s' % (self.is_secure() and 'https' or 'http',
self.get_host(), self.path)
location = urljoin(current_uri, location)
return iri_to_uri(location)
def is_secure(self):
return os.environ.get("HTTPS") == "on"
def is_ajax(self):
return self.META.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest'
def _set_encoding(self, val):
"""
Sets the encoding used for GET/POST accesses. If the GET or POST
dictionary has already been created, it is removed and recreated on the
next access (so that it is decoded correctly).
"""
self._encoding = val
if hasattr(self, '_get'):
del self._get
if hasattr(self, '_post'):
del self._post
def _get_encoding(self):
return self._encoding
encoding = property(_get_encoding, _set_encoding)
def _initialize_handlers(self):
self._upload_handlers = [uploadhandler.load_handler(handler, self)
for handler in settings.FILE_UPLOAD_HANDLERS]
def _set_upload_handlers(self, upload_handlers):
if hasattr(self, '_files'):
raise AttributeError("You cannot set the upload handlers after the upload has been processed.")
self._upload_handlers = upload_handlers
def _get_upload_handlers(self):
if not self._upload_handlers:
# If thre are no upload handlers defined, initialize them from settings.
self._initialize_handlers()
return self._upload_handlers
upload_handlers = property(_get_upload_handlers, _set_upload_handlers)
def parse_file_upload(self, META, post_data):
"""Returns a tuple of (POST QueryDict, FILES MultiValueDict)."""
self.upload_handlers = ImmutableList(
self.upload_handlers,
warning = "You cannot alter upload handlers after the upload has been processed."
)
parser = MultiPartParser(META, post_data, self.upload_handlers, self.encoding)
return parser.parse()
def _get_raw_post_data(self):
if not hasattr(self, '_raw_post_data'):
if self._read_started:
raise Exception("You cannot access raw_post_data after reading from request's data stream")
try:
content_length = int(self.META.get('CONTENT_LENGTH', 0))
except (ValueError, TypeError):
# If CONTENT_LENGTH was empty string or not an integer, don't
# error out. We've also seen None passed in here (against all
# specs, but see ticket #8259), so we handle TypeError as well.
content_length = 0
if content_length:
self._raw_post_data = self.read(content_length)
else:
self._raw_post_data = self.read()
self._stream = StringIO(self._raw_post_data)
return self._raw_post_data
raw_post_data = property(_get_raw_post_data)
def _mark_post_parse_error(self):
self._post = QueryDict('')
self._files = MultiValueDict()
self._post_parse_error = True
def _load_post_and_files(self):
# Populates self._post and self._files
if self.method != 'POST':
self._post, self._files = QueryDict('', encoding=self._encoding), MultiValueDict()
return
if self._read_started:
self._mark_post_parse_error()
return
if self.META.get('CONTENT_TYPE', '').startswith('multipart'):
self._raw_post_data = ''
try:
self._post, self._files = self.parse_file_upload(self.META, self)
except:
# An error occured while parsing POST data. Since when
# formatting the error the request handler might access
# self.POST, set self._post and self._file to prevent
# attempts to parse POST data again.
# Mark that an error occured. This allows self.__repr__ to
# be explicit about it instead of simply representing an
# empty POST
self._mark_post_parse_error()
raise
else:
self._post, self._files = QueryDict(self.raw_post_data, encoding=self._encoding), MultiValueDict()
## File-like and iterator interface.
##
## Expects self._stream to be set to an appropriate source of bytes by
## a corresponding request subclass (WSGIRequest or ModPythonRequest).
## Also when request data has already been read by request.POST or
## request.raw_post_data, self._stream points to a StringIO instance
## containing that data.
def read(self, *args, **kwargs):
self._read_started = True
return self._stream.read(*args, **kwargs)
def readline(self, *args, **kwargs):
self._read_started = True
return self._stream.readline(*args, **kwargs)
def xreadlines(self):
while True:
buf = self.readline()
if not buf:
break
yield buf
__iter__ = xreadlines
def readlines(self):
return list(iter(self))
class QueryDict(MultiValueDict):
"""
A specialized MultiValueDict that takes a query string when initialized.
This is immutable unless you create a copy of it.
Values retrieved from this class are converted from the given encoding
(DEFAULT_CHARSET by default) to unicode.
"""
# These are both reset in __init__, but is specified here at the class
# level so that unpickling will have valid values
_mutable = True
_encoding = None
def __init__(self, query_string, mutable=False, encoding=None):
MultiValueDict.__init__(self)
if not encoding:
# *Important*: do not import settings any earlier because of note
# in core.handlers.modpython.
from django.conf import settings
encoding = settings.DEFAULT_CHARSET
self.encoding = encoding
pairs = parse_qsl((query_string or ''), True)
for key, value in pairs: # keep_blank_values=True
self.appendlist(force_unicode(key, encoding, errors='replace'),
force_unicode(value, encoding, errors='replace'))
self._mutable = mutable
def _get_encoding(self):
if self._encoding is None:
# *Important*: do not import settings at the module level because
# of the note in core.handlers.modpython.
from django.conf import settings
self._encoding = settings.DEFAULT_CHARSET
return self._encoding
def _set_encoding(self, value):
self._encoding = value
encoding = property(_get_encoding, _set_encoding)
def _assert_mutable(self):
if not self._mutable:
raise AttributeError("This QueryDict instance is immutable")
def __setitem__(self, key, value):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
value = str_to_unicode(value, self.encoding)
MultiValueDict.__setitem__(self, key, value)
def __delitem__(self, key):
self._assert_mutable()
super(QueryDict, self).__delitem__(key)
def __copy__(self):
result = self.__class__('', mutable=True, encoding=self.encoding)
for key, value in dict.items(self):
dict.__setitem__(result, key, value)
return result
def __deepcopy__(self, memo):
import django.utils.copycompat as copy
result = self.__class__('', mutable=True, encoding=self.encoding)
memo[id(self)] = result
for key, value in dict.items(self):
dict.__setitem__(result, copy.deepcopy(key, memo), copy.deepcopy(value, memo))
return result
def setlist(self, key, list_):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
list_ = [str_to_unicode(elt, self.encoding) for elt in list_]
MultiValueDict.setlist(self, key, list_)
def setlistdefault(self, key, default_list=()):
self._assert_mutable()
if key not in self:
self.setlist(key, default_list)
return MultiValueDict.getlist(self, key)
def appendlist(self, key, value):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
value = str_to_unicode(value, self.encoding)
MultiValueDict.appendlist(self, key, value)
def update(self, other_dict):
self._assert_mutable()
f = lambda s: str_to_unicode(s, self.encoding)
if hasattr(other_dict, 'lists'):
for key, valuelist in other_dict.lists():
for value in valuelist:
MultiValueDict.update(self, {f(key): f(value)})
else:
d = dict([(f(k), f(v)) for k, v in other_dict.items()])
MultiValueDict.update(self, d)
def pop(self, key, *args):
self._assert_mutable()
return MultiValueDict.pop(self, key, *args)
def popitem(self):
self._assert_mutable()
return MultiValueDict.popitem(self)
def clear(self):
self._assert_mutable()
MultiValueDict.clear(self)
def setdefault(self, key, default=None):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
default = str_to_unicode(default, self.encoding)
return MultiValueDict.setdefault(self, key, default)
def copy(self):
"""Returns a mutable copy of this object."""
return self.__deepcopy__({})
def urlencode(self, safe=None):
"""
Returns an encoded string of all query string arguments.
:arg safe: Used to specify characters which do not require quoting, for
example::
>>> q = QueryDict('', mutable=True)
>>> q['next'] = '/a&b/'
>>> q.urlencode()
'next=%2Fa%26b%2F'
>>> q.urlencode(safe='/')
'next=/a%26b/'
"""
output = []
if safe:
encode = lambda k, v: '%s=%s' % ((quote(k, safe), quote(v, safe)))
else:
encode = lambda k, v: urlencode({k: v})
for k, list_ in self.lists():
k = smart_str(k, self.encoding)
output.extend([encode(k, smart_str(v, self.encoding))
for v in list_])
return '&'.join(output)
def parse_cookie(cookie):
if cookie == '':
return {}
if not isinstance(cookie, Cookie.BaseCookie):
try:
c = SimpleCookie()
c.load(cookie, ignore_parse_errors=True)
except Cookie.CookieError:
# Invalid cookie
return {}
else:
c = cookie
cookiedict = {}
for key in c.keys():
cookiedict[key] = c.get(key).value
return cookiedict
class BadHeaderError(ValueError):
pass
class HttpResponse(object):
"""A basic HTTP response, with content and dictionary-accessed headers."""
status_code = 200
def __init__(self, content='', mimetype=None, status=None,
content_type=None):
# _headers is a mapping of the lower-case name to the original case of
# the header (required for working with legacy systems) and the header
# value. Both the name of the header and its value are ASCII strings.
self._headers = {}
self._charset = settings.DEFAULT_CHARSET
if mimetype:
content_type = mimetype # For backwards compatibility
if not content_type:
content_type = "%s; charset=%s" % (settings.DEFAULT_CONTENT_TYPE,
self._charset)
if not isinstance(content, basestring) and hasattr(content, '__iter__'):
self._container = content
self._is_string = False
else:
self._container = [content]
self._is_string = True
self.cookies = SimpleCookie()
if status:
self.status_code = status
self['Content-Type'] = content_type
def __str__(self):
"""Full HTTP message, including headers."""
return '\n'.join(['%s: %s' % (key, value)
for key, value in self._headers.values()]) \
+ '\n\n' + self.content
def _convert_to_ascii(self, *values):
"""Converts all values to ascii strings."""
for value in values:
if isinstance(value, unicode):
try:
value = value.encode('us-ascii')
except UnicodeError, e:
e.reason += ', HTTP response headers must be in US-ASCII format'
raise
else:
value = str(value)
if '\n' in value or '\r' in value:
raise BadHeaderError("Header values can't contain newlines (got %r)" % (value))
yield value
def __setitem__(self, header, value):
header, value = self._convert_to_ascii(header, value)
self._headers[header.lower()] = (header, value)
def __delitem__(self, header):
try:
del self._headers[header.lower()]
except KeyError:
pass
def __getitem__(self, header):
return self._headers[header.lower()][1]
def has_header(self, header):
"""Case-insensitive check for a header."""
return self._headers.has_key(header.lower())
__contains__ = has_header
def items(self):
return self._headers.values()
def get(self, header, alternate):
return self._headers.get(header.lower(), (None, alternate))[1]
def set_cookie(self, key, value='', max_age=None, expires=None, path='/',
domain=None, secure=False, httponly=False):
"""
Sets a cookie.
``expires`` can be a string in the correct format or a
``datetime.datetime`` object in UTC. If ``expires`` is a datetime
object then ``max_age`` will be calculated.
"""
self.cookies[key] = value
if expires is not None:
if isinstance(expires, datetime.datetime):
delta = expires - expires.utcnow()
# Add one second so the date matches exactly (a fraction of
# time gets lost between converting to a timedelta and
# then the date string).
delta = delta + datetime.timedelta(seconds=1)
# Just set max_age - the max_age logic will set expires.
expires = None
max_age = max(0, delta.days * 86400 + delta.seconds)
else:
self.cookies[key]['expires'] = expires
if max_age is not None:
self.cookies[key]['max-age'] = max_age
# IE requires expires, so set it if hasn't been already.
if not expires:
self.cookies[key]['expires'] = cookie_date(time.time() +
max_age)
if path is not None:
self.cookies[key]['path'] = path
if domain is not None:
self.cookies[key]['domain'] = domain
if secure:
self.cookies[key]['secure'] = True
if httponly:
self.cookies[key]['httponly'] = True
def delete_cookie(self, key, path='/', domain=None):
self.set_cookie(key, max_age=0, path=path, domain=domain,
expires='Thu, 01-Jan-1970 00:00:00 GMT')
def _get_content(self):
if self.has_header('Content-Encoding'):
return ''.join(self._container)
return smart_str(''.join(self._container), self._charset)
def _set_content(self, value):
self._container = [value]
self._is_string = True
content = property(_get_content, _set_content)
def __iter__(self):
self._iterator = iter(self._container)
return self
def next(self):
chunk = self._iterator.next()
if isinstance(chunk, unicode):
chunk = chunk.encode(self._charset)
return str(chunk)
def close(self):
if hasattr(self._container, 'close'):
self._container.close()
# The remaining methods partially implement the file-like object interface.
# See http://docs.python.org/lib/bltin-file-objects.html
def write(self, content):
if not self._is_string:
raise Exception("This %s instance is not writable" % self.__class__)
self._container.append(content)
def flush(self):
pass
def tell(self):
if not self._is_string:
raise Exception("This %s instance cannot tell its position" % self.__class__)
return sum([len(chunk) for chunk in self._container])
class HttpResponseRedirect(HttpResponse):
status_code = 302
def __init__(self, redirect_to):
super(HttpResponseRedirect, self).__init__()
self['Location'] = iri_to_uri(redirect_to)
class HttpResponsePermanentRedirect(HttpResponse):
status_code = 301
def __init__(self, redirect_to):
super(HttpResponsePermanentRedirect, self).__init__()
self['Location'] = iri_to_uri(redirect_to)
class HttpResponseNotModified(HttpResponse):
status_code = 304
class HttpResponseBadRequest(HttpResponse):
status_code = 400
class HttpResponseNotFound(HttpResponse):
status_code = 404
class HttpResponseForbidden(HttpResponse):
status_code = 403
class HttpResponseNotAllowed(HttpResponse):
status_code = 405
def __init__(self, permitted_methods):
super(HttpResponseNotAllowed, self).__init__()
self['Allow'] = ', '.join(permitted_methods)
class HttpResponseGone(HttpResponse):
status_code = 410
class HttpResponseServerError(HttpResponse):
status_code = 500
# A backwards compatible alias for HttpRequest.get_host.
def get_host(request):
return request.get_host()
# It's neither necessary nor appropriate to use
# django.utils.encoding.smart_unicode for parsing URLs and form inputs. Thus,
# this slightly more restricted function.
def str_to_unicode(s, encoding):
"""
Converts basestring objects to unicode, using the given encoding. Illegally
encoded input characters are replaced with Unicode "unknown" codepoint
(\ufffd).
Returns any non-basestring objects without change.
"""
if isinstance(s, str):
return unicode(s, encoding, 'replace')
else:
return s
|
|
"""
Reaction wheel discipline for CADRE
"""
from six.moves import range
import numpy as np
from openmdao.api import ExplicitComponent
from CADRE import rk4
class ReactionWheel_Motor(ExplicitComponent):
"""
Compute reaction wheel motor torque.
"""
def __init__(self, n):
super(ReactionWheel_Motor, self).__init__()
self.n = n
# Constant
self.J_RW = 2.8e-5
def setup(self):
n = self.n
# Inputs
self.add_input('T_RW', np.zeros((3, n)), units='N*m',
desc='Torque vector of reaction wheel over time')
self.add_input('w_B', np.zeros((3, n)), units='1/s',
desc='Angular velocity vector in body-fixed frame over time')
self.add_input('w_RW', np.zeros((3, n)), units='1/s',
desc='Angular velocity vector of reaction wheel over time')
# Outputs
self.add_output('T_m', np.ones((3, n)), units='N*m',
desc='Torque vector of motor over time')
def compute(self, inputs, outputs):
"""
Calculate outputs.
"""
T_RW = inputs['T_RW']
w_B = inputs['w_B']
w_RW = inputs['w_RW']
T_m = outputs['T_m']
w_Bx = np.zeros((3, 3))
h_RW = self.J_RW * w_RW[:]
for i in range(0, self.n):
w_Bx[0, :] = (0., -w_B[2, i], w_B[1, i])
w_Bx[1, :] = (w_B[2, i], 0., -w_B[0, i])
w_Bx[2, :] = (-w_B[1, i], w_B[0, i], 0.)
T_m[:, i] = -T_RW[:, i] - np.dot(w_Bx, h_RW[:, i])
def compute_partials(self, inputs, partials):
"""
Calculate and save derivatives. (i.e., Jacobian)
"""
# T_RW = inputs['T_RW']
w_B = inputs['w_B']
w_RW = inputs['w_RW']
w_Bx = np.zeros((3, 3))
self.dT_dTm = np.zeros((self.n, 3, 3))
self.dT_dwb = np.zeros((self.n, 3, 3))
self.dT_dh = np.zeros((self.n, 3, 3))
dwx_dwb = np.zeros((3, 3, 3))
h_RW = self.J_RW * w_RW[:]
for i in range(0, self.n):
w_Bx[0, :] = (0., -w_B[2, i], w_B[1, i])
w_Bx[1, :] = (w_B[2, i], 0., -w_B[0, i])
w_Bx[2, :] = (-w_B[1, i], w_B[0, i], 0.)
dwx_dwb[0, :, 0] = (0., 0., 0.)
dwx_dwb[1, :, 0] = (0., 0., -1.)
dwx_dwb[2, :, 0] = (0., 1., 0.)
dwx_dwb[0, :, 1] = (0., 0., 1.)
dwx_dwb[1, :, 1] = (0., 0., 0.)
dwx_dwb[2, :, 1] = (-1., 0., 0.)
dwx_dwb[0, :, 2] = (0., -1., 0.)
dwx_dwb[1, :, 2] = (1., 0., 0.)
dwx_dwb[2, :, 2] = (0., 0., 0.)
for k in range(0, 3):
self.dT_dTm[i, k, k] = -1.
self.dT_dwb[i, :, k] = -np.dot(dwx_dwb[:, :, k], h_RW[:, i])
self.dT_dh[i, :, :] = -w_Bx
def compute_jacvec_product(self, inputs, d_inputs, d_outputs, mode):
"""
Matrix-vector product with the Jacobian.
"""
dT_m = d_outputs['T_m']
if mode == 'fwd':
for k in range(3):
for j in range(3):
if 'T_RW' in d_inputs:
dT_m[k, :] += self.dT_dTm[:, k, j] * d_inputs['T_RW'][j, :]
if 'w_B' in d_inputs:
dT_m[k, :] += self.dT_dwb[:, k, j] * d_inputs['w_B'][j, :]
if 'w_RW' in d_inputs:
dT_m[k, :] += self.dT_dh[:, k, j] * d_inputs['w_RW'][j, :] * self.J_RW
else:
for k in range(3):
for j in range(3):
if 'T_RW' in d_inputs:
d_inputs['T_RW'][j, :] += self.dT_dTm[:, k, j] * dT_m[k, :]
if 'w_B' in d_inputs:
d_inputs['w_B'][j, :] += self.dT_dwb[:, k, j] * dT_m[k, :]
if 'w_RW' in d_inputs:
d_inputs['w_RW'][j, :] += self.dT_dh[:, k, j] * dT_m[k, :] * self.J_RW
class ReactionWheel_Power(ExplicitComponent):
"""
Compute reaction wheel power.
"""
# constants
V = 4.0
a = 4.9e-4
b = 4.5e2
I0 = 0.017
def __init__(self, n):
super(ReactionWheel_Power, self).__init__()
self.n = n
def setup(self):
n = self.n
# Inputs
self.add_input('w_RW', np.zeros((3, n)), units='1/s',
desc='Angular velocity vector of reaction wheel over time')
self.add_input('T_RW', np.zeros((3, n)), units='N*m',
desc='Torque vector of reaction wheel over time')
# Outputs
self.add_output('P_RW', np.ones((3, n)), units='W',
desc='Reaction wheel power over time')
def compute(self, inputs, outputs):
"""
Calculate outputs.
"""
w_RW = inputs['w_RW']
T_RW = inputs['T_RW']
P_RW = outputs['P_RW']
for i in range(self.n):
for k in range(3):
P_RW[k, i] = (self.V * (self.a * w_RW[k, i] +
self.b * T_RW[k, i])**2 +
self.V * self.I0)
def compute_partials(self, inputs, partials):
"""
Calculate and save derivatives. (i.e., Jacobian)
"""
w_RW = inputs['w_RW']
T_RW = inputs['T_RW']
self.dP_dw = np.zeros((self.n, 3))
self.dP_dT = np.zeros((self.n, 3))
for i in range(self.n):
for k in range(3):
prod = 2 * self.V * (self.a * w_RW[k, i] + self.b * T_RW[k, i])
self.dP_dw[i, k] = self.a * prod
self.dP_dT[i, k] = self.b * prod
def compute_jacvec_product(self, inputs, d_inputs, d_outputs, mode):
"""
Matrix-vector product with the Jacobian.
"""
dP_RW = d_outputs['P_RW']
if mode == 'fwd':
for k in range(3):
if 'w_RW' in d_inputs:
dP_RW[k, :] += self.dP_dw[:, k] * d_inputs['w_RW'][k, :]
if 'T_RW' in d_inputs:
dP_RW[k, :] += self.dP_dT[:, k] * d_inputs['T_RW'][k, :]
else:
for k in range(3):
if 'w_RW' in d_inputs:
d_inputs['w_RW'][k, :] += self.dP_dw[:, k] * dP_RW[k, :]
if 'T_RW' in d_inputs:
d_inputs['T_RW'][k, :] += self.dP_dT[:, k] * dP_RW[k, :]
class ReactionWheel_Torque(ExplicitComponent):
"""
Compute torque vector of reaction wheel.
"""
def __init__(self, n):
super(ReactionWheel_Torque, self).__init__()
self.n = n
def setup(self):
n = self.n
# Inputs
self.add_input('T_tot', np.zeros((3, n)), units='N*m',
desc='Total reaction wheel torque over time')
# Outputs
self.add_output('T_RW', np.zeros((3, n)), units='N*m',
desc='Torque vector of reaction wheel over time')
def compute(self, inputs, outputs):
"""
Calculate outputs.
"""
outputs['T_RW'][:] = inputs['T_tot'][:]
def compute_jacvec_product(self, inputs, d_inputs, d_outputs, mode):
"""
Matrix-vector product with the Jacobian.
"""
if mode == 'fwd':
if 'T_tot' in d_inputs:
d_outputs['T_RW'][:] += d_inputs['T_tot'][:]
else:
if 'T_tot' in d_inputs:
d_inputs['T_tot'] += d_outputs['T_RW'][:]
class ReactionWheel_Dynamics(rk4.RK4):
"""
Compute the angular velocity vector of reaction wheel.
"""
def __init__(self, n_times, h):
super(ReactionWheel_Dynamics, self).__init__(n_times, h)
self.n_times = n_times
def setup(self):
n_times = self.n_times
# Inputs
self.add_input('w_B', np.zeros((3, n_times)), units='1/s',
desc='Angular velocity vector in body-fixed frame over time')
self.add_input('T_RW', np.zeros((3, n_times)), units='N*m',
desc='Torque vector of reaction wheel over time')
self.add_input('w_RW0', np.zeros((3,)), units='1/s',
desc='Initial angular velocity vector of reaction wheel')
# Outputs
self.add_output('w_RW', np.zeros((3, n_times)), units='1/s',
desc='Angular velocity vector of reaction wheel over time')
self.options['state_var'] = 'w_RW'
self.options['init_state_var'] = 'w_RW0'
self.options['external_vars'] = ['w_B', 'T_RW']
self.jy = np.zeros((3, 3))
self.djy_dx = np.zeros((3, 3, 3))
self.djy_dx[:, :, 0] = [[0, 0, 0], [0, 0, -1], [0, 1, 0]]
self.djy_dx[:, :, 1] = [[0, 0, 1], [0, 0, 0], [-1, 0, 0]]
self.djy_dx[:, :, 2] = [[0, -1, 0], [1, 0, 0], [0, 0, 0]]
# unit conversion of some kind
self.J_RW = 2.8e-5
def f_dot(self, external, state):
self.jy[0, :] = [0., -external[2], external[1]]
self.jy[1, :] = [external[2], 0., -external[0]]
self.jy[2, :] = [-external[1], external[0], 0.]
# TODO: sort out unit conversion here with T_RW
return (-external[3:]/2.8e-5 - self.jy.dot(state))
def df_dy(self, external, state):
self.jy[0, :] = [0., -external[2], external[1]]
self.jy[1, :] = [external[2], 0., -external[0]]
self.jy[2, :] = [-external[1], external[0], 0.]
return -self.jy
def df_dx(self, external, state):
self.jx = np.zeros((3, 6))
for i in range(3):
self.jx[i, 0:3] = -self.djy_dx[:, :, i].dot(state)
self.jx[i, i+3] = -1.0 / self.J_RW
return self.jx
|
|
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_log_disk_setting
short_description: Settings for local disk logging in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify log_disk feature and setting category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
version_added: 2.9
log_disk_setting:
description:
- Settings for local disk logging.
default: null
type: dict
suboptions:
diskfull:
description:
- Action to take when disk is full. The system can overwrite the oldest log messages or stop logging when the disk is full .
type: str
choices:
- overwrite
- nolog
dlp_archive_quota:
description:
- DLP archive quota (MB).
type: int
full_final_warning_threshold:
description:
- Log full final warning threshold as a percent (3 - 100).
type: int
full_first_warning_threshold:
description:
- Log full first warning threshold as a percent (1 - 98).
type: int
full_second_warning_threshold:
description:
- Log full second warning threshold as a percent (2 - 99).
type: int
ips_archive:
description:
- Enable/disable IPS packet archiving to the local disk.
type: str
choices:
- enable
- disable
log_quota:
description:
- Disk log quota (MB).
type: int
max_log_file_size:
description:
- Maximum log file size before rolling (1 - 100 Mbytes).
type: int
max_policy_packet_capture_size:
description:
- Maximum size of policy sniffer in MB (0 means unlimited).
type: int
maximum_log_age:
description:
- Delete log files older than (days).
type: int
report_quota:
description:
- Report quota (MB).
type: int
roll_day:
description:
- Day of week on which to roll log file.
type: str
choices:
- sunday
- monday
- tuesday
- wednesday
- thursday
- friday
- saturday
roll_schedule:
description:
- Frequency to check log file for rolling.
type: str
choices:
- daily
- weekly
roll_time:
description:
- "Time of day to roll the log file (hh:mm)."
type: str
source_ip:
description:
- Source IP address to use for uploading disk log files.
type: str
status:
description:
- Enable/disable local disk logging.
type: str
choices:
- enable
- disable
upload:
description:
- Enable/disable uploading log files when they are rolled.
type: str
choices:
- enable
- disable
upload_delete_files:
description:
- Delete log files after uploading .
type: str
choices:
- enable
- disable
upload_destination:
description:
- The type of server to upload log files to. Only FTP is currently supported.
type: str
choices:
- ftp-server
upload_ssl_conn:
description:
- Enable/disable encrypted FTPS communication to upload log files.
type: str
choices:
- default
- high
- low
- disable
uploaddir:
description:
- The remote directory on the FTP server to upload log files to.
type: str
uploadip:
description:
- IP address of the FTP server to upload log files to.
type: str
uploadpass:
description:
- Password required to log into the FTP server to upload disk log files.
type: str
uploadport:
description:
- TCP port to use for communicating with the FTP server .
type: int
uploadsched:
description:
- Set the schedule for uploading log files to the FTP server .
type: str
choices:
- disable
- enable
uploadtime:
description:
- "Time of day at which log files are uploaded if uploadsched is enabled (hh:mm or hh)."
type: str
uploadtype:
description:
- Types of log files to upload. Separate multiple entries with a space.
type: str
choices:
- traffic
- event
- virus
- webfilter
- IPS
- spamfilter
- dlp-archive
- anomaly
- voip
- dlp
- app-ctrl
- waf
- netscan
- gtp
- dns
uploaduser:
description:
- Username required to log into the FTP server to upload disk log files.
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Settings for local disk logging.
fortios_log_disk_setting:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
log_disk_setting:
diskfull: "overwrite"
dlp_archive_quota: "4"
full_final_warning_threshold: "5"
full_first_warning_threshold: "6"
full_second_warning_threshold: "7"
ips_archive: "enable"
log_quota: "9"
max_log_file_size: "10"
max_policy_packet_capture_size: "11"
maximum_log_age: "12"
report_quota: "13"
roll_day: "sunday"
roll_schedule: "daily"
roll_time: "<your_own_value>"
source_ip: "84.230.14.43"
status: "enable"
upload: "enable"
upload_delete_files: "enable"
upload_destination: "ftp-server"
upload_ssl_conn: "default"
uploaddir: "<your_own_value>"
uploadip: "<your_own_value>"
uploadpass: "<your_own_value>"
uploadport: "26"
uploadsched: "disable"
uploadtime: "<your_own_value>"
uploadtype: "traffic"
uploaduser: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_log_disk_setting_data(json):
option_list = ['diskfull', 'dlp_archive_quota', 'full_final_warning_threshold',
'full_first_warning_threshold', 'full_second_warning_threshold', 'ips_archive',
'log_quota', 'max_log_file_size', 'max_policy_packet_capture_size',
'maximum_log_age', 'report_quota', 'roll_day',
'roll_schedule', 'roll_time', 'source_ip',
'status', 'upload', 'upload_delete_files',
'upload_destination', 'upload_ssl_conn', 'uploaddir',
'uploadip', 'uploadpass', 'uploadport',
'uploadsched', 'uploadtime', 'uploadtype',
'uploaduser']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def log_disk_setting(data, fos):
vdom = data['vdom']
log_disk_setting_data = data['log_disk_setting']
filtered_data = underscore_to_hyphen(filter_log_disk_setting_data(log_disk_setting_data))
return fos.set('log.disk',
'setting',
data=filtered_data,
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_log_disk(data, fos):
if data['log_disk_setting']:
resp = log_disk_setting(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"log_disk_setting": {
"required": False, "type": "dict", "default": None,
"options": {
"diskfull": {"required": False, "type": "str",
"choices": ["overwrite", "nolog"]},
"dlp_archive_quota": {"required": False, "type": "int"},
"full_final_warning_threshold": {"required": False, "type": "int"},
"full_first_warning_threshold": {"required": False, "type": "int"},
"full_second_warning_threshold": {"required": False, "type": "int"},
"ips_archive": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"log_quota": {"required": False, "type": "int"},
"max_log_file_size": {"required": False, "type": "int"},
"max_policy_packet_capture_size": {"required": False, "type": "int"},
"maximum_log_age": {"required": False, "type": "int"},
"report_quota": {"required": False, "type": "int"},
"roll_day": {"required": False, "type": "str",
"choices": ["sunday", "monday", "tuesday",
"wednesday", "thursday", "friday",
"saturday"]},
"roll_schedule": {"required": False, "type": "str",
"choices": ["daily", "weekly"]},
"roll_time": {"required": False, "type": "str"},
"source_ip": {"required": False, "type": "str"},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"upload": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"upload_delete_files": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"upload_destination": {"required": False, "type": "str",
"choices": ["ftp-server"]},
"upload_ssl_conn": {"required": False, "type": "str",
"choices": ["default", "high", "low",
"disable"]},
"uploaddir": {"required": False, "type": "str"},
"uploadip": {"required": False, "type": "str"},
"uploadpass": {"required": False, "type": "str"},
"uploadport": {"required": False, "type": "int"},
"uploadsched": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"uploadtime": {"required": False, "type": "str"},
"uploadtype": {"required": False, "type": "str",
"choices": ["traffic", "event", "virus",
"webfilter", "IPS", "spamfilter",
"dlp-archive", "anomaly", "voip",
"dlp", "app-ctrl", "waf",
"netscan", "gtp", "dns"]},
"uploaduser": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_log_disk(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_log_disk(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
|
|
# Copyright (C) 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit tests for run_perf_tests."""
import StringIO
import datetime
import json
import re
import unittest
from webkitpy.common.host_mock import MockHost
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.layout_tests.port.driver import DriverOutput
from webkitpy.layout_tests.port.test import TestPort
from webkitpy.performance_tests.perftest import ChromiumStylePerfTest
from webkitpy.performance_tests.perftest import DEFAULT_TEST_RUNNER_COUNT
from webkitpy.performance_tests.perftest import PerfTest
from webkitpy.performance_tests.perftestsrunner import PerfTestsRunner
class MainTest(unittest.TestCase):
def create_runner(self, args=[]):
options, parsed_args = PerfTestsRunner._parse_args(args)
test_port = TestPort(host=MockHost(), options=options)
runner = PerfTestsRunner(args=args, port=test_port)
runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector')
runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings')
runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser')
return runner, test_port
def _add_file(self, runner, dirname, filename, content=True):
dirname = runner._host.filesystem.join(runner._base_path, dirname) if dirname else runner._base_path
runner._host.filesystem.maybe_make_directory(dirname)
runner._host.filesystem.files[runner._host.filesystem.join(dirname, filename)] = content
def test_collect_tests(self):
runner, port = self.create_runner()
self._add_file(runner, 'inspector', 'a_file.html', 'a content')
tests = runner._collect_tests()
self.assertEqual(len(tests), 1)
def _collect_tests_and_sort_test_name(self, runner):
return sorted([test.test_name() for test in runner._collect_tests()])
def test_collect_tests_with_multile_files(self):
runner, port = self.create_runner(args=['PerformanceTests/test1.html', 'test2.html'])
def add_file(filename):
port.host.filesystem.files[runner._host.filesystem.join(runner._base_path, filename)] = 'some content'
add_file('test1.html')
add_file('test2.html')
add_file('test3.html')
port.host.filesystem.chdir(runner._port.perf_tests_dir()[:runner._port.perf_tests_dir().rfind(runner._host.filesystem.sep)])
self.assertItemsEqual(self._collect_tests_and_sort_test_name(runner), ['test1.html', 'test2.html'])
def test_collect_tests_with_skipped_list(self):
runner, port = self.create_runner()
self._add_file(runner, 'inspector', 'test1.html')
self._add_file(runner, 'inspector', 'unsupported_test1.html')
self._add_file(runner, 'inspector', 'test2.html')
self._add_file(runner, 'inspector/resources', 'resource_file.html')
self._add_file(runner, 'unsupported', 'unsupported_test2.html')
port.skipped_perf_tests = lambda: ['inspector/unsupported_test1.html', 'unsupported']
self.assertItemsEqual(self._collect_tests_and_sort_test_name(runner), ['inspector/test1.html', 'inspector/test2.html'])
def test_collect_tests_with_skipped_list_and_files(self):
runner, port = self.create_runner(args=['Suite/Test1.html', 'Suite/SkippedTest1.html', 'SkippedSuite/Test1.html'])
self._add_file(runner, 'SkippedSuite', 'Test1.html')
self._add_file(runner, 'SkippedSuite', 'Test2.html')
self._add_file(runner, 'Suite', 'Test1.html')
self._add_file(runner, 'Suite', 'Test2.html')
self._add_file(runner, 'Suite', 'SkippedTest1.html')
self._add_file(runner, 'Suite', 'SkippedTest2.html')
port.skipped_perf_tests = lambda: ['Suite/SkippedTest1.html', 'Suite/SkippedTest1.html', 'SkippedSuite']
self.assertItemsEqual(self._collect_tests_and_sort_test_name(runner),
['SkippedSuite/Test1.html', 'Suite/SkippedTest1.html', 'Suite/Test1.html'])
def test_collect_tests_with_ignored_skipped_list(self):
runner, port = self.create_runner(args=['--force'])
self._add_file(runner, 'inspector', 'test1.html')
self._add_file(runner, 'inspector', 'unsupported_test1.html')
self._add_file(runner, 'inspector', 'test2.html')
self._add_file(runner, 'inspector/resources', 'resource_file.html')
self._add_file(runner, 'unsupported', 'unsupported_test2.html')
port.skipped_perf_tests = lambda: ['inspector/unsupported_test1.html', 'unsupported']
self.assertItemsEqual(self._collect_tests_and_sort_test_name(runner), ['inspector/test1.html', 'inspector/test2.html', 'inspector/unsupported_test1.html', 'unsupported/unsupported_test2.html'])
def test_default_args(self):
runner, port = self.create_runner()
options, args = PerfTestsRunner._parse_args([])
self.assertTrue(options.build)
self.assertEqual(options.time_out_ms, 600 * 1000)
self.assertTrue(options.generate_results)
self.assertTrue(options.show_results)
self.assertTrue(options.use_skipped_list)
self.assertEqual(options.repeat, 1)
self.assertEqual(options.test_runner_count, DEFAULT_TEST_RUNNER_COUNT)
def test_parse_args(self):
runner, port = self.create_runner()
options, args = PerfTestsRunner._parse_args([
'--build-directory=folder42',
'--platform=platform42',
'--builder-name', 'webkit-mac-1',
'--build-number=56',
'--time-out-ms=42',
'--no-show-results',
'--reset-results',
'--output-json-path=a/output.json',
'--slave-config-json-path=a/source.json',
'--test-results-server=somehost',
'--additional-drt-flag=--enable-threaded-parser',
'--additional-drt-flag=--awesomesauce',
'--repeat=5',
'--test-runner-count=5',
'--debug'])
self.assertTrue(options.build)
self.assertEqual(options.build_directory, 'folder42')
self.assertEqual(options.platform, 'platform42')
self.assertEqual(options.builder_name, 'webkit-mac-1')
self.assertEqual(options.build_number, '56')
self.assertEqual(options.time_out_ms, '42')
self.assertEqual(options.configuration, 'Debug')
self.assertFalse(options.show_results)
self.assertTrue(options.reset_results)
self.assertEqual(options.output_json_path, 'a/output.json')
self.assertEqual(options.slave_config_json_path, 'a/source.json')
self.assertEqual(options.test_results_server, 'somehost')
self.assertEqual(options.additional_drt_flag, ['--enable-threaded-parser', '--awesomesauce'])
self.assertEqual(options.repeat, 5)
self.assertEqual(options.test_runner_count, 5)
def test_upload_json(self):
runner, port = self.create_runner()
port.host.filesystem.files['/mock-checkout/some.json'] = 'some content'
class MockFileUploader:
called = []
upload_single_text_file_throws = False
upload_single_text_file_return_value = None
@classmethod
def reset(cls):
cls.called = []
cls.upload_single_text_file_throws = False
cls.upload_single_text_file_return_value = None
def __init__(mock, url, timeout):
self.assertEqual(url, 'https://some.host/some/path')
self.assertTrue(isinstance(timeout, int) and timeout)
mock.called.append('FileUploader')
def upload_single_text_file(mock, filesystem, content_type, filename):
self.assertEqual(filesystem, port.host.filesystem)
self.assertEqual(content_type, 'application/json')
self.assertEqual(filename, 'some.json')
mock.called.append('upload_single_text_file')
if mock.upload_single_text_file_throws:
raise Exception
return mock.upload_single_text_file_return_value
MockFileUploader.upload_single_text_file_return_value = StringIO.StringIO('OK')
self.assertTrue(runner._upload_json('some.host', 'some.json', '/some/path', MockFileUploader))
self.assertEqual(MockFileUploader.called, ['FileUploader', 'upload_single_text_file'])
MockFileUploader.reset()
MockFileUploader.upload_single_text_file_return_value = StringIO.StringIO('Some error')
output = OutputCapture()
output.capture_output()
self.assertFalse(runner._upload_json('some.host', 'some.json', '/some/path', MockFileUploader))
_, _, logs = output.restore_output()
self.assertEqual(logs, 'Uploaded JSON to https://some.host/some/path but got a bad response:\nSome error\n')
# Throwing an exception upload_single_text_file shouldn't blow up _upload_json
MockFileUploader.reset()
MockFileUploader.upload_single_text_file_throws = True
self.assertFalse(runner._upload_json('some.host', 'some.json', '/some/path', MockFileUploader))
self.assertEqual(MockFileUploader.called, ['FileUploader', 'upload_single_text_file'])
MockFileUploader.reset()
MockFileUploader.upload_single_text_file_return_value = StringIO.StringIO('{"status": "OK"}')
self.assertTrue(runner._upload_json('some.host', 'some.json', '/some/path', MockFileUploader))
self.assertEqual(MockFileUploader.called, ['FileUploader', 'upload_single_text_file'])
MockFileUploader.reset()
MockFileUploader.upload_single_text_file_return_value = StringIO.StringIO('{"status": "SomethingHasFailed", "failureStored": false}')
output = OutputCapture()
output.capture_output()
self.assertFalse(runner._upload_json('some.host', 'some.json', '/some/path', MockFileUploader))
_, _, logs = output.restore_output()
serialized_json = json.dumps({'status': 'SomethingHasFailed', 'failureStored': False}, indent=4)
self.assertEqual(logs, 'Uploaded JSON to https://some.host/some/path but got an error:\n%s\n' % serialized_json)
class InspectorPassTestData:
text = 'RESULT group_name: test_name= 42 ms'
output = """Running inspector/pass.html (2 of 2)
RESULT group_name: test_name= 42 ms
Finished: 0.1 s
"""
class EventTargetWrapperTestData:
text = """Running 20 times
Ignoring warm-up run (1502)
1504
1505
1510
1504
1507
1509
1510
1487
1488
1472
1472
1488
1473
1472
1475
1487
1486
1486
1475
1471
Time:
values 1486, 1471, 1510, 1505, 1478, 1490 ms
avg 1490 ms
median 1488 ms
stdev 15.13935 ms
min 1471 ms
max 1510 ms
"""
output = """Running Bindings/event-target-wrapper.html (1 of 2)
RESULT Bindings: event-target-wrapper: Time= 1490.0 ms
median= 1488.0 ms, stdev= 14.11751 ms, min= 1471.0 ms, max= 1510.0 ms
Finished: 0.1 s
"""
results = {'url': 'http://trac.webkit.org/browser/trunk/PerformanceTests/Bindings/event-target-wrapper.html',
'metrics': {'Time': {'current': [[1486.0, 1471.0, 1510.0, 1505.0, 1478.0, 1490.0]] * 4}}}
class SomeParserTestData:
text = """Running 20 times
Ignoring warm-up run (1115)
Time:
values 1080, 1120, 1095, 1101, 1104 ms
avg 1100 ms
median 1101 ms
stdev 14.50861 ms
min 1080 ms
max 1120 ms
"""
output = """Running Parser/some-parser.html (2 of 2)
RESULT Parser: some-parser: Time= 1100.0 ms
median= 1101.0 ms, stdev= 13.31402 ms, min= 1080.0 ms, max= 1120.0 ms
Finished: 0.1 s
"""
class MemoryTestData:
text = """Running 20 times
Ignoring warm-up run (1115)
Time:
values 1080, 1120, 1095, 1101, 1104 ms
avg 1100 ms
median 1101 ms
stdev 14.50861 ms
min 1080 ms
max 1120 ms
JS Heap:
values 825000, 811000, 848000, 837000, 829000 bytes
avg 830000 bytes
median 829000 bytes
stdev 13784.04875 bytes
min 811000 bytes
max 848000 bytes
Malloc:
values 529000, 511000, 548000, 536000, 521000 bytes
avg 529000 bytes
median 529000 bytes
stdev 14124.44689 bytes
min 511000 bytes
max 548000 bytes
"""
output = """Running 1 tests
Running Parser/memory-test.html (1 of 1)
RESULT Parser: memory-test: Time= 1100.0 ms
median= 1101.0 ms, stdev= 13.31402 ms, min= 1080.0 ms, max= 1120.0 ms
RESULT Parser: memory-test: JSHeap= 830000.0 bytes
median= 829000.0 bytes, stdev= 12649.11064 bytes, min= 811000.0 bytes, max= 848000.0 bytes
RESULT Parser: memory-test: Malloc= 529000.0 bytes
median= 529000.0 bytes, stdev= 12961.48139 bytes, min= 511000.0 bytes, max= 548000.0 bytes
Finished: 0.1 s
"""
results = {'current': [[1080, 1120, 1095, 1101, 1104]] * 4}
js_heap_results = {'current': [[825000, 811000, 848000, 837000, 829000]] * 4}
malloc_results = {'current': [[529000, 511000, 548000, 536000, 521000]] * 4}
class TestDriver:
def run_test(self, driver_input, stop_when_done):
text = ''
timeout = False
crash = False
if driver_input.test_name.endswith('pass.html'):
text = InspectorPassTestData.text
elif driver_input.test_name.endswith('timeout.html'):
timeout = True
elif driver_input.test_name.endswith('failed.html'):
text = None
elif driver_input.test_name.endswith('tonguey.html'):
text = 'we are not expecting an output from perf tests but RESULT blablabla'
elif driver_input.test_name.endswith('crash.html'):
crash = True
elif driver_input.test_name.endswith('event-target-wrapper.html'):
text = EventTargetWrapperTestData.text
elif driver_input.test_name.endswith('some-parser.html'):
text = SomeParserTestData.text
elif driver_input.test_name.endswith('memory-test.html'):
text = MemoryTestData.text
return DriverOutput(text, '', '', '', crash=crash, timeout=timeout)
def start(self):
"""do nothing"""
def stop(self):
"""do nothing"""
class IntegrationTest(unittest.TestCase):
def _normalize_output(self, log):
return re.sub(r'(stdev=\s+\d+\.\d{5})\d+', r'\1', re.sub(r'Finished: [0-9\.]+ s', 'Finished: 0.1 s', log))
def _load_output_json(self, runner):
json_content = runner._host.filesystem.read_text_file(runner._output_json_path())
return json.loads(re.sub(r'("stdev":\s*\d+\.\d{5})\d+', r'\1', json_content))
def create_runner(self, args=[], driver_class=TestDriver):
options, parsed_args = PerfTestsRunner._parse_args(args)
test_port = TestPort(host=MockHost(), options=options)
test_port.create_driver = lambda worker_number=None, no_timeout=False: driver_class()
runner = PerfTestsRunner(args=args, port=test_port)
runner._host.filesystem.maybe_make_directory(runner._base_path, 'inspector')
runner._host.filesystem.maybe_make_directory(runner._base_path, 'Bindings')
runner._host.filesystem.maybe_make_directory(runner._base_path, 'Parser')
return runner, test_port
def run_test(self, test_name):
runner, port = self.create_runner()
tests = [ChromiumStylePerfTest(port, test_name, runner._host.filesystem.join('some-dir', test_name))]
return runner._run_tests_set(tests) == 0
def test_run_passing_test(self):
self.assertTrue(self.run_test('pass.html'))
def test_run_silent_test(self):
self.assertFalse(self.run_test('silent.html'))
def test_run_failed_test(self):
self.assertFalse(self.run_test('failed.html'))
def test_run_tonguey_test(self):
self.assertFalse(self.run_test('tonguey.html'))
def test_run_timeout_test(self):
self.assertFalse(self.run_test('timeout.html'))
def test_run_crash_test(self):
self.assertFalse(self.run_test('crash.html'))
def _tests_for_runner(self, runner, test_names):
filesystem = runner._host.filesystem
tests = []
for test in test_names:
path = filesystem.join(runner._base_path, test)
dirname = filesystem.dirname(path)
if test.startswith('inspector/'):
tests.append(ChromiumStylePerfTest(runner._port, test, path))
else:
tests.append(PerfTest(runner._port, test, path))
return tests
def test_run_test_set(self):
runner, port = self.create_runner()
tests = self._tests_for_runner(runner, ['inspector/pass.html', 'inspector/silent.html', 'inspector/failed.html',
'inspector/tonguey.html', 'inspector/timeout.html', 'inspector/crash.html'])
output = OutputCapture()
output.capture_output()
try:
unexpected_result_count = runner._run_tests_set(tests)
finally:
stdout, stderr, log = output.restore_output()
self.assertEqual(unexpected_result_count, len(tests) - 1)
self.assertTrue('\nRESULT group_name: test_name= 42 ms\n' in log)
def test_run_test_set_kills_drt_per_run(self):
class TestDriverWithStopCount(TestDriver):
stop_count = 0
def stop(self):
TestDriverWithStopCount.stop_count += 1
runner, port = self.create_runner(driver_class=TestDriverWithStopCount)
tests = self._tests_for_runner(runner, ['inspector/pass.html', 'inspector/silent.html', 'inspector/failed.html',
'inspector/tonguey.html', 'inspector/timeout.html', 'inspector/crash.html'])
unexpected_result_count = runner._run_tests_set(tests)
self.assertEqual(TestDriverWithStopCount.stop_count, 6)
def test_run_test_set_for_parser_tests(self):
runner, port = self.create_runner()
tests = self._tests_for_runner(runner, ['Bindings/event-target-wrapper.html', 'Parser/some-parser.html'])
output = OutputCapture()
output.capture_output()
try:
unexpected_result_count = runner._run_tests_set(tests)
finally:
stdout, stderr, log = output.restore_output()
self.assertEqual(unexpected_result_count, 0)
self.assertEqual(self._normalize_output(log), EventTargetWrapperTestData.output + SomeParserTestData.output)
def test_run_memory_test(self):
runner, port = self.create_runner_and_setup_results_template()
runner._timestamp = 123456789
port.host.filesystem.write_text_file(runner._base_path + '/Parser/memory-test.html', 'some content')
output = OutputCapture()
output.capture_output()
try:
unexpected_result_count = runner.run()
finally:
stdout, stderr, log = output.restore_output()
self.assertEqual(unexpected_result_count, 0)
self.assertEqual(self._normalize_output(log), MemoryTestData.output + '\nMOCK: user.open_url: file://...\n')
parser_tests = self._load_output_json(runner)[0]['tests']['Parser']['tests']
self.assertEqual(parser_tests['memory-test']['metrics']['Time'], MemoryTestData.results)
self.assertEqual(parser_tests['memory-test']['metrics']['JSHeap'], MemoryTestData.js_heap_results)
self.assertEqual(parser_tests['memory-test']['metrics']['Malloc'], MemoryTestData.malloc_results)
def _test_run_with_json_output(self, runner, filesystem, upload_succeeds=False, results_shown=True, expected_exit_code=0, repeat=1, compare_logs=True):
filesystem.write_text_file(runner._base_path + '/inspector/pass.html', 'some content')
filesystem.write_text_file(runner._base_path + '/Bindings/event-target-wrapper.html', 'some content')
uploaded = [False]
def mock_upload_json(hostname, json_path, host_path=None):
# FIXME: Get rid of the hard-coded perf.webkit.org once we've completed the transition.
self.assertIn(hostname, ['some.host'])
self.assertIn(json_path, ['/mock-checkout/output.json'])
self.assertIn(host_path, [None, '/api/report'])
uploaded[0] = upload_succeeds
return upload_succeeds
runner._upload_json = mock_upload_json
runner._timestamp = 123456789
runner._utc_timestamp = datetime.datetime(2013, 2, 8, 15, 19, 37, 460000)
output_capture = OutputCapture()
output_capture.capture_output()
try:
self.assertEqual(runner.run(), expected_exit_code)
finally:
stdout, stderr, logs = output_capture.restore_output()
if not expected_exit_code and compare_logs:
expected_logs = ''
for i in xrange(repeat):
runs = ' (Run %d of %d)' % (i + 1, repeat) if repeat > 1 else ''
expected_logs += 'Running 2 tests%s\n' % runs + EventTargetWrapperTestData.output + InspectorPassTestData.output
if results_shown:
expected_logs += 'MOCK: user.open_url: file://...\n'
self.assertEqual(self._normalize_output(logs), expected_logs)
self.assertEqual(uploaded[0], upload_succeeds)
return logs
_event_target_wrapper_and_inspector_results = {
"Bindings":
{"url": "http://trac.webkit.org/browser/trunk/PerformanceTests/Bindings",
"tests": {"event-target-wrapper": EventTargetWrapperTestData.results}}}
def test_run_with_json_output(self):
runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
'--test-results-server=some.host'])
self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=True)
self.assertEqual(self._load_output_json(runner), [{
"buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_target_wrapper_and_inspector_results,
"revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}}])
filesystem = port.host.filesystem
self.assertTrue(filesystem.isfile(runner._output_json_path()))
self.assertTrue(filesystem.isfile(filesystem.splitext(runner._output_json_path())[0] + '.html'))
def test_run_with_description(self):
runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
'--test-results-server=some.host', '--description', 'some description'])
self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=True)
self.assertEqual(self._load_output_json(runner), [{
"buildTime": "2013-02-08T15:19:37.460000", "description": "some description",
"tests": self._event_target_wrapper_and_inspector_results,
"revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}}])
def create_runner_and_setup_results_template(self, args=[]):
runner, port = self.create_runner(args)
filesystem = port.host.filesystem
filesystem.write_text_file(runner._base_path + '/resources/results-template.html',
'BEGIN<script src="%AbsolutePathToWebKitTrunk%/some.js"></script>'
'<script src="%AbsolutePathToWebKitTrunk%/other.js"></script><script>%PeformanceTestsResultsJSON%</script>END')
filesystem.write_text_file(runner._base_path + '/Dromaeo/resources/dromaeo/web/lib/jquery-1.6.4.js', 'jquery content')
return runner, port
def test_run_respects_no_results(self):
runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
'--test-results-server=some.host', '--no-results'])
self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=False, results_shown=False)
self.assertFalse(port.host.filesystem.isfile('/mock-checkout/output.json'))
def test_run_generates_json_by_default(self):
runner, port = self.create_runner_and_setup_results_template()
filesystem = port.host.filesystem
output_json_path = runner._output_json_path()
results_page_path = filesystem.splitext(output_json_path)[0] + '.html'
self.assertFalse(filesystem.isfile(output_json_path))
self.assertFalse(filesystem.isfile(results_page_path))
self._test_run_with_json_output(runner, port.host.filesystem)
self.assertEqual(self._load_output_json(runner), [{
"buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_target_wrapper_and_inspector_results,
"revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}}])
self.assertTrue(filesystem.isfile(output_json_path))
self.assertTrue(filesystem.isfile(results_page_path))
def test_run_merges_output_by_default(self):
runner, port = self.create_runner_and_setup_results_template()
filesystem = port.host.filesystem
output_json_path = runner._output_json_path()
filesystem.write_text_file(output_json_path, '[{"previous": "results"}]')
self._test_run_with_json_output(runner, port.host.filesystem)
self.assertEqual(self._load_output_json(runner), [{"previous": "results"}, {
"buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_target_wrapper_and_inspector_results,
"revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}}])
self.assertTrue(filesystem.isfile(filesystem.splitext(output_json_path)[0] + '.html'))
def test_run_respects_reset_results(self):
runner, port = self.create_runner_and_setup_results_template(args=["--reset-results"])
filesystem = port.host.filesystem
output_json_path = runner._output_json_path()
filesystem.write_text_file(output_json_path, '[{"previous": "results"}]')
self._test_run_with_json_output(runner, port.host.filesystem)
self.assertEqual(self._load_output_json(runner), [{
"buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_target_wrapper_and_inspector_results,
"revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}}])
self.assertTrue(filesystem.isfile(filesystem.splitext(output_json_path)[0] + '.html'))
pass
def test_run_generates_and_show_results_page(self):
runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json'])
page_shown = []
port.show_results_html_file = lambda path: page_shown.append(path)
filesystem = port.host.filesystem
self._test_run_with_json_output(runner, filesystem, results_shown=False)
expected_entry = {"buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_target_wrapper_and_inspector_results,
"revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}}
self.maxDiff = None
self.assertEqual(runner._output_json_path(), '/mock-checkout/output.json')
self.assertEqual(self._load_output_json(runner), [expected_entry])
self.assertEqual(filesystem.read_text_file('/mock-checkout/output.html'),
'BEGIN<script src="/test.checkout/some.js"></script><script src="/test.checkout/other.js"></script>'
'<script>%s</script>END' % port.host.filesystem.read_text_file(runner._output_json_path()))
self.assertEqual(page_shown[0], '/mock-checkout/output.html')
self._test_run_with_json_output(runner, filesystem, results_shown=False)
self.assertEqual(runner._output_json_path(), '/mock-checkout/output.json')
self.assertEqual(self._load_output_json(runner), [expected_entry, expected_entry])
self.assertEqual(filesystem.read_text_file('/mock-checkout/output.html'),
'BEGIN<script src="/test.checkout/some.js"></script><script src="/test.checkout/other.js"></script>'
'<script>%s</script>END' % port.host.filesystem.read_text_file(runner._output_json_path()))
def test_run_respects_no_show_results(self):
show_results_html_file = lambda path: page_shown.append(path)
runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json'])
page_shown = []
port.show_results_html_file = show_results_html_file
self._test_run_with_json_output(runner, port.host.filesystem, results_shown=False)
self.assertEqual(page_shown[0], '/mock-checkout/output.html')
runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
'--no-show-results'])
page_shown = []
port.show_results_html_file = show_results_html_file
self._test_run_with_json_output(runner, port.host.filesystem, results_shown=False)
self.assertEqual(page_shown, [])
def test_run_with_bad_output_json(self):
runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json'])
port.host.filesystem.write_text_file('/mock-checkout/output.json', 'bad json')
self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_MERGE)
port.host.filesystem.write_text_file('/mock-checkout/output.json', '{"another bad json": "1"}')
self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_MERGE)
def test_run_with_slave_config_json(self):
runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
'--slave-config-json-path=/mock-checkout/slave-config.json', '--test-results-server=some.host'])
port.host.filesystem.write_text_file('/mock-checkout/slave-config.json', '{"key": "value"}')
self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=True)
self.assertEqual(self._load_output_json(runner), [{
"buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_target_wrapper_and_inspector_results,
"revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}, "builderKey": "value"}])
def test_run_with_bad_slave_config_json(self):
runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
'--slave-config-json-path=/mock-checkout/slave-config.json', '--test-results-server=some.host'])
logs = self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON)
self.assertTrue('Missing slave configuration JSON file: /mock-checkout/slave-config.json' in logs)
port.host.filesystem.write_text_file('/mock-checkout/slave-config.json', 'bad json')
self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON)
port.host.filesystem.write_text_file('/mock-checkout/slave-config.json', '["another bad json"]')
self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON)
def test_run_with_multiple_repositories(self):
runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
'--test-results-server=some.host'])
port.repository_paths = lambda: [('webkit', '/mock-checkout'), ('some', '/mock-checkout/some')]
self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=True)
self.assertEqual(self._load_output_json(runner), [{
"buildTime": "2013-02-08T15:19:37.460000", "tests": self._event_target_wrapper_and_inspector_results,
"revisions": {"webkit": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"},
"some": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}}])
def test_run_with_upload_json(self):
runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
'--test-results-server', 'some.host', '--platform', 'platform1', '--builder-name', 'builder1', '--build-number', '123'])
self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=True)
generated_json = json.loads(port.host.filesystem.files['/mock-checkout/output.json'])
self.assertEqual(generated_json[0]['platform'], 'platform1')
self.assertEqual(generated_json[0]['builderName'], 'builder1')
self.assertEqual(generated_json[0]['buildNumber'], 123)
self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=False, expected_exit_code=PerfTestsRunner.EXIT_CODE_FAILED_UPLOADING)
def test_run_with_upload_json_should_generate_perf_webkit_json(self):
runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
'--test-results-server', 'some.host', '--platform', 'platform1', '--builder-name', 'builder1', '--build-number', '123',
'--slave-config-json-path=/mock-checkout/slave-config.json'])
port.host.filesystem.write_text_file('/mock-checkout/slave-config.json', '{"key": "value1"}')
self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=True)
generated_json = json.loads(port.host.filesystem.files['/mock-checkout/output.json'])
self.assertTrue(isinstance(generated_json, list))
self.assertEqual(len(generated_json), 1)
output = generated_json[0]
self.maxDiff = None
self.assertEqual(output['platform'], 'platform1')
self.assertEqual(output['buildNumber'], 123)
self.assertEqual(output['buildTime'], '2013-02-08T15:19:37.460000')
self.assertEqual(output['builderName'], 'builder1')
self.assertEqual(output['builderKey'], 'value1')
self.assertEqual(output['revisions'], {'blink': {'revision': '5678', 'timestamp': '2013-02-01 08:48:05 +0000'}})
self.assertEqual(output['tests'].keys(), ['Bindings'])
self.assertEqual(sorted(output['tests']['Bindings'].keys()), ['tests', 'url'])
self.assertEqual(output['tests']['Bindings']['url'], 'http://trac.webkit.org/browser/trunk/PerformanceTests/Bindings')
self.assertEqual(output['tests']['Bindings']['tests'].keys(), ['event-target-wrapper'])
self.assertEqual(output['tests']['Bindings']['tests']['event-target-wrapper'], {
'url': 'http://trac.webkit.org/browser/trunk/PerformanceTests/Bindings/event-target-wrapper.html',
'metrics': {'Time': {'current': [[1486.0, 1471.0, 1510.0, 1505.0, 1478.0, 1490.0]] * 4}}})
def test_run_with_repeat(self):
self.maxDiff = None
runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
'--test-results-server=some.host', '--repeat', '5'])
self._test_run_with_json_output(runner, port.host.filesystem, upload_succeeds=True, repeat=5)
self.assertEqual(self._load_output_json(runner), [
{"buildTime": "2013-02-08T15:19:37.460000",
"tests": self._event_target_wrapper_and_inspector_results,
"revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}},
{"buildTime": "2013-02-08T15:19:37.460000",
"tests": self._event_target_wrapper_and_inspector_results,
"revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}},
{"buildTime": "2013-02-08T15:19:37.460000",
"tests": self._event_target_wrapper_and_inspector_results,
"revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}},
{"buildTime": "2013-02-08T15:19:37.460000",
"tests": self._event_target_wrapper_and_inspector_results,
"revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}},
{"buildTime": "2013-02-08T15:19:37.460000",
"tests": self._event_target_wrapper_and_inspector_results,
"revisions": {"blink": {"timestamp": "2013-02-01 08:48:05 +0000", "revision": "5678"}}}])
def test_run_with_test_runner_count(self):
runner, port = self.create_runner_and_setup_results_template(args=['--output-json-path=/mock-checkout/output.json',
'--test-runner-count=3'])
self._test_run_with_json_output(runner, port.host.filesystem, compare_logs=False)
generated_json = json.loads(port.host.filesystem.files['/mock-checkout/output.json'])
self.assertTrue(isinstance(generated_json, list))
self.assertEqual(len(generated_json), 1)
output = generated_json[0]['tests']['Bindings']['tests']['event-target-wrapper']['metrics']['Time']['current']
self.assertEqual(len(output), 3)
expectedMetrics = EventTargetWrapperTestData.results['metrics']['Time']['current'][0]
for metrics in output:
self.assertEqual(metrics, expectedMetrics)
|
|
from __future__ import print_function
import numpy as np
# from keras.layers import Input
# from keras import backend as K
# from keras.optimizers import RMSprop
import tensorflow as tf
# import tensorlayer as tl
from rlflow.algos.algo import RLAlgorithm
from rlflow.core import tf_utils
class DQN(RLAlgorithm):
"""
Basic deep q network implementation based on TFLearn network
"""
def __init__(self,
env,
train_policy,
clone_policy,
memory,
exploration,
episode_len=np.inf,
discount=1.0,
standardize=True,
input_processor=None,
optimizer='rmsprop',
clip_gradients=(None, None),
sample_size=32,
memory_init_size=5000,
clone_frequency=10000,
test_epsilon=0.05):
super(DQN, self).__init__(env,
clone_policy, # pass clone policy to super since that is the default for action selection
episode_len,
discount,
standardize,
input_processor,
optimizer,
clip_gradients)
self.train_policy = train_policy
self.memory = memory
self.exploration = exploration
self.sample_size = sample_size
self.memory_init_size = memory_init_size
self.clone_frequency = clone_frequency
self.test_epsilon = test_epsilon
self.steps = 0
self.clone_ops = tf_utils.build_policy_copy_ops(self.train_policy, self.policy)
# vars to hold state updates
self.last_state = None
self.train_states = self.train_policy.inputs[0] # self.train_policy.inputs[0]
self.train_q_values = self.train_policy.outputs[0] # self.train_policy.output
self.target_states = self.policy.inputs[0] #self.policy.inputs[0]
self.target_q_values = self.policy.outputs[0] # self.self.policy.output
# self.S1 = Input(shape=(84, 84, 4))
# self.S2 = Input(shape=(84, 84, 4))
# self.A = Input(shape=(1,), dtype='int32')
# self.R = Input(shape=(1,), dtype='float32')
# self.T = Input(shape=(1,), dtype='float32')
# VS = self.train_q_values #self.train_policy.model(self.S1)
# VNS = self.target_q_values #self.policy.model(self.S2)
#
# future_value = (1-self.T) * K.max(VNS, axis=1, keepdims=True)
#
# print ("Past max")
# discounted_future_value = self.discount * future_value
# target = self.R + discounted_future_value
#
# cost = (VS[:, self.A] - target)#**2).mean()
# opt = RMSprop(0.0001)
# updates = opt.get_updates(self.policy.model.trainable_weights, [], cost)
# self.update = K.function([self.train_states, self.target_states, self.A, self.R, self.T], cost, updates=updates)
self.actions = tf.placeholder(tf.int64, shape=[None])
self.a_one_hot = tf.one_hot(self.actions, self.env.action_space.n, 1.0, 0.0)
# This used to reduce the q-value to a single number!
# I don't think that is what I want. I want a list of q-values and a list of targets
# This should be bettwe with axis=1
self.q_estimates = tf.reduce_sum(tf.multiply(self.train_q_values, self.a_one_hot), axis=1)
self.q_targets = tf.placeholder(tf.float32, shape=[None])
self.delta = self.q_targets - self.q_estimates
self.clipped_error = tf.where(tf.abs(self.delta) < 1.0,
0.5 * tf.square(self.delta),
tf.abs(self.delta) - 0.5, name='clipped_error')
self.L = tf.reduce_mean(self.clipped_error, name='loss')
self.grads_and_vars = self.opt.compute_gradients(self.L, var_list=self.train_policy.get_params())
# for idx, (grad, var) in enumerate(self.grads_and_vars):
# if grad is not None:
# self.grads_and_vars[idx] = (tf.clip_by_norm(grad, self.max_grad_norm), var)
self.update = self.opt.apply_gradients(self.grads_and_vars)
# self.L = tf_utils.mean_square(self.q_value, self.y)
# self.grads_and_vars = self.opt.compute_gradients(self.L, var_list=self.train_policy.get_params())
#
# if None not in self.clip_gradients:
# self.clipped_grads_and_vars = [(tf.clip_by_value(gv[0], clip_gradients[0], clip_gradients[1]), gv[1])
# for gv in self.grads_and_vars]
# self.update = self.opt.apply_gradients(self.clipped_grads_and_vars)
# else:
# self.update = self.opt.apply_gradients(self.grads_and_vars)
def clone(self):
"""
Run the clone ops
"""
# print ("Cloning")
self.sess.run(self.clone_ops)
# self.train_policy.model.get_weights()
# self.policy.model.set_weights(self.train_policy.model.get_weights())
# v1 = self.sess.run(self.train_policy.get_params()[0])
# v2 = self.sess.run(self.policy.get_params()[0])
# print (np.allclose(v1, v2))
# import sys
# sys.exit()
def on_train_start(self):
"""
Run the clone ops to make networks same at start
"""
self.clone()
def max_action(self, obs, mode):
actions = super(DQN, self).act(obs, mode)
return np.argmax(actions)
def act(self, obs, mode):
"""
Overriding act so can do proper exploration processing,
add to memory and sample from memory for updates
"""
if mode == RLAlgorithm.TRAIN:
if self.memory.size() < self.memory_init_size:
return self.env.action_space.sample()
if self.exploration.explore(self.steps):
return self.env.action_space.sample()
else:
# find max action
return self.max_action(obs, mode)
# return super(DQN, self).act(obs, mode)
else:
if np.random.random() < self.test_epsilon:
return self.env.action_space.sample()
else:
return self.max_action(obs, mode)
# return super(DQN, self).act(obs, mode)
def on_step_finish(self, obs, action, reward, done, info, mode):
"""
Receive data from the last step, add to memory
"""
if mode == RLAlgorithm.TRAIN:
# clip reward between [-1, 1]
reward = reward if abs(reward) <= 1.0 else float(reward)/reward
# last state is none if this is the start of an episode
# obs is None until the input processor provides valid processing
if self.last_state is not None and obs is not None:
# then this is not the first state seen
self.memory.add_element(self.last_state, action, reward, obs, done)
# else this is the first state in the episode, either way
# keep track of last state, if this is the end of an episode mark it
self.last_state = obs if not done else None
if self.memory.size() >= self.memory_init_size:
# mark that we have done another step for epsilon decrease
# self.exploration.increment_iteration()
states, actions, rewards, next_states, terminals = self.memory.sample(self.sample_size)
# his takes about 0.01 seconds on my laptop
# print ("next states: ", next_states.shape)
target_qs = self.sess.run(self.target_q_values, feed_dict={self.target_states: next_states})
ys = rewards + (1 - terminals) * self.discount * np.max(target_qs, axis=1)
# Is there a performance issue here? this takes about 0.07 seconds on my laptop
self.sess.run(self.update,
feed_dict={self.train_states: states,
self.actions: actions,
self.q_targets: ys})
# if at desired step, clone model
if self.steps % self.clone_frequency == 0:
# print ("Step ", self.steps, ", cloning model")
self.clone()
self.steps += 1
else: # TEST mode
pass
def optimize(self):
"""
In this case all the work happens in the callbacks, just run an episode
"""
print ("Current step: ", self.steps)
return super(DQN, self).optimize()
|
|
"""Miscellaneous utility functions and classes.
This module is used internally by Tornado. It is not necessarily expected
that the functions and classes defined here will be useful to other
applications, but they are documented here in case they are.
The one public-facing part of this module is the `Configurable` class
and its `~Configurable.configure` method, which becomes a part of the
interface of its subclasses, including `.AsyncHTTPClient`, `.IOLoop`,
and `.Resolver`.
"""
from __future__ import absolute_import, division, print_function, with_statement
import array
import os
import sys
import zlib
try:
xrange # py2
except NameError:
xrange = range # py3
# inspect.getargspec() raises DeprecationWarnings in Python 3.5.
# The two functions have compatible interfaces for the parts we need.
try:
from inspect import getfullargspec as getargspec # py3
except ImportError:
from inspect import getargspec # py2
class ObjectDict(dict):
"""Makes a dictionary behave like an object, with attribute-style access.
"""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
self[name] = value
class GzipDecompressor(object):
"""Streaming gzip decompressor.
The interface is like that of `zlib.decompressobj` (without some of the
optional arguments, but it understands gzip headers and checksums.
"""
def __init__(self):
# Magic parameter makes zlib module understand gzip header
# http://stackoverflow.com/questions/1838699/how-can-i-decompress-a-gzip-stream-with-zlib
# This works on cpython and pypy, but not jython.
self.decompressobj = zlib.decompressobj(16 + zlib.MAX_WBITS)
def decompress(self, value, max_length=None):
"""Decompress a chunk, returning newly-available data.
Some data may be buffered for later processing; `flush` must
be called when there is no more input data to ensure that
all data was processed.
If ``max_length`` is given, some input data may be left over
in ``unconsumed_tail``; you must retrieve this value and pass
it back to a future call to `decompress` if it is not empty.
"""
return self.decompressobj.decompress(value, max_length)
@property
def unconsumed_tail(self):
"""Returns the unconsumed portion left over
"""
return self.decompressobj.unconsumed_tail
def flush(self):
"""Return any remaining buffered data not yet returned by decompress.
Also checks for errors such as truncated input.
No other methods may be called on this object after `flush`.
"""
return self.decompressobj.flush()
if not isinstance(b'', type('')):
unicode_type = str
basestring_type = str
else:
# These names don't exist in py3, so use noqa comments to disable
# warnings in flake8.
unicode_type = unicode # noqa
basestring_type = basestring # noqa
def import_object(name):
"""Imports an object by name.
import_object('x') is equivalent to 'import x'.
import_object('x.y.z') is equivalent to 'from x.y import z'.
>>> import tornado.escape
>>> import_object('tornado.escape') is tornado.escape
True
>>> import_object('tornado.escape.utf8') is tornado.escape.utf8
True
>>> import_object('tornado') is tornado
True
>>> import_object('tornado.missing_module')
Traceback (most recent call last):
...
ImportError: No module named missing_module
"""
if isinstance(name, unicode_type) and str is not unicode_type:
# On python 2 a byte string is required.
name = name.encode('utf-8')
if name.count('.') == 0:
return __import__(name, None, None)
parts = name.split('.')
obj = __import__('.'.join(parts[:-1]), None, None, [parts[-1]], 0)
try:
return getattr(obj, parts[-1])
except AttributeError:
raise ImportError("No module named %s" % parts[-1])
# Deprecated alias that was used before we dropped py25 support.
# Left here in case anyone outside Tornado is using it.
bytes_type = bytes
if sys.version_info > (3,):
exec("""
def raise_exc_info(exc_info):
raise exc_info[1].with_traceback(exc_info[2])
def exec_in(code, glob, loc=None):
if isinstance(code, str):
code = compile(code, '<string>', 'exec', dont_inherit=True)
exec(code, glob, loc)
""")
else:
exec("""
def raise_exc_info(exc_info):
raise exc_info[0], exc_info[1], exc_info[2]
def exec_in(code, glob, loc=None):
if isinstance(code, basestring):
# exec(string) inherits the caller's future imports; compile
# the string first to prevent that.
code = compile(code, '<string>', 'exec', dont_inherit=True)
exec code in glob, loc
""")
def errno_from_exception(e):
"""Provides the errno from an Exception object.
There are cases that the errno attribute was not set so we pull
the errno out of the args but if someone instantiates an Exception
without any args you will get a tuple error. So this function
abstracts all that behavior to give you a safe way to get the
errno.
"""
if hasattr(e, 'errno'):
return e.errno
elif e.args:
return e.args[0]
else:
return None
class Configurable(object):
"""Base class for configurable interfaces.
A configurable interface is an (abstract) class whose constructor
acts as a factory function for one of its implementation subclasses.
The implementation subclass as well as optional keyword arguments to
its initializer can be set globally at runtime with `configure`.
By using the constructor as the factory method, the interface
looks like a normal class, `isinstance` works as usual, etc. This
pattern is most useful when the choice of implementation is likely
to be a global decision (e.g. when `~select.epoll` is available,
always use it instead of `~select.select`), or when a
previously-monolithic class has been split into specialized
subclasses.
Configurable subclasses must define the class methods
`configurable_base` and `configurable_default`, and use the instance
method `initialize` instead of ``__init__``.
"""
__impl_class = None
__impl_kwargs = None
def __new__(cls, *args, **kwargs):
base = cls.configurable_base()
init_kwargs = {}
if cls is base:
impl = cls.configured_class()
if base.__impl_kwargs:
init_kwargs.update(base.__impl_kwargs)
else:
impl = cls
init_kwargs.update(kwargs)
instance = super(Configurable, cls).__new__(impl)
# initialize vs __init__ chosen for compatibility with AsyncHTTPClient
# singleton magic. If we get rid of that we can switch to __init__
# here too.
instance.initialize(*args, **init_kwargs)
return instance
@classmethod
def configurable_base(cls):
"""Returns the base class of a configurable hierarchy.
This will normally return the class in which it is defined.
(which is *not* necessarily the same as the cls classmethod parameter).
"""
raise NotImplementedError()
@classmethod
def configurable_default(cls):
"""Returns the implementation class to be used if none is configured."""
raise NotImplementedError()
def initialize(self):
"""Initialize a `Configurable` subclass instance.
Configurable classes should use `initialize` instead of ``__init__``.
.. versionchanged:: 4.2
Now accepts positional arguments in addition to keyword arguments.
"""
@classmethod
def configure(cls, impl, **kwargs):
"""Sets the class to use when the base class is instantiated.
Keyword arguments will be saved and added to the arguments passed
to the constructor. This can be used to set global defaults for
some parameters.
"""
base = cls.configurable_base()
if isinstance(impl, (unicode_type, bytes)):
impl = import_object(impl)
if impl is not None and not issubclass(impl, cls):
raise ValueError("Invalid subclass of %s" % cls)
base.__impl_class = impl
base.__impl_kwargs = kwargs
@classmethod
def configured_class(cls):
"""Returns the currently configured class."""
base = cls.configurable_base()
if cls.__impl_class is None:
base.__impl_class = cls.configurable_default()
return base.__impl_class
@classmethod
def _save_configuration(cls):
base = cls.configurable_base()
return (base.__impl_class, base.__impl_kwargs)
@classmethod
def _restore_configuration(cls, saved):
base = cls.configurable_base()
base.__impl_class = saved[0]
base.__impl_kwargs = saved[1]
class ArgReplacer(object):
"""Replaces one value in an ``args, kwargs`` pair.
Inspects the function signature to find an argument by name
whether it is passed by position or keyword. For use in decorators
and similar wrappers.
"""
def __init__(self, func, name):
self.name = name
try:
self.arg_pos = self._getargnames(func).index(name)
except ValueError:
# Not a positional parameter
self.arg_pos = None
def _getargnames(self, func):
try:
return getargspec(func).args
except TypeError:
if hasattr(func, 'func_code'):
# Cython-generated code has all the attributes needed
# by inspect.getargspec, but the inspect module only
# works with ordinary functions. Inline the portion of
# getargspec that we need here. Note that for static
# functions the @cython.binding(True) decorator must
# be used (for methods it works out of the box).
code = func.func_code
return code.co_varnames[:code.co_argcount]
raise
def get_old_value(self, args, kwargs, default=None):
"""Returns the old value of the named argument without replacing it.
Returns ``default`` if the argument is not present.
"""
if self.arg_pos is not None and len(args) > self.arg_pos:
return args[self.arg_pos]
else:
return kwargs.get(self.name, default)
def replace(self, new_value, args, kwargs):
"""Replace the named argument in ``args, kwargs`` with ``new_value``.
Returns ``(old_value, args, kwargs)``. The returned ``args`` and
``kwargs`` objects may not be the same as the input objects, or
the input objects may be mutated.
If the named argument was not found, ``new_value`` will be added
to ``kwargs`` and None will be returned as ``old_value``.
"""
if self.arg_pos is not None and len(args) > self.arg_pos:
# The arg to replace is passed positionally
old_value = args[self.arg_pos]
args = list(args) # *args is normally a tuple
args[self.arg_pos] = new_value
else:
# The arg to replace is either omitted or passed by keyword.
old_value = kwargs.get(self.name)
kwargs[self.name] = new_value
return old_value, args, kwargs
def timedelta_to_seconds(td):
"""Equivalent to td.total_seconds() (introduced in python 2.7)."""
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
def _websocket_mask_python(mask, data):
"""Websocket masking function.
`mask` is a `bytes` object of length 4; `data` is a `bytes` object of any length.
Returns a `bytes` object of the same length as `data` with the mask applied
as specified in section 5.3 of RFC 6455.
This pure-python implementation may be replaced by an optimized version when available.
"""
mask = array.array("B", mask)
unmasked = array.array("B", data)
for i in xrange(len(data)):
unmasked[i] = unmasked[i] ^ mask[i % 4]
if hasattr(unmasked, 'tobytes'):
# tostring was deprecated in py32. It hasn't been removed,
# but since we turn on deprecation warnings in our tests
# we need to use the right one.
return unmasked.tobytes()
else:
return unmasked.tostring()
if (os.environ.get('TORNADO_NO_EXTENSION') or
os.environ.get('TORNADO_EXTENSION') == '0'):
# These environment variables exist to make it easier to do performance
# comparisons; they are not guaranteed to remain supported in the future.
_websocket_mask = _websocket_mask_python
else:
try:
from tornado.speedups import websocket_mask as _websocket_mask
except ImportError:
if os.environ.get('TORNADO_EXTENSION') == '1':
raise
_websocket_mask = _websocket_mask_python
def doctests():
import doctest
return doctest.DocTestSuite()
|
|
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from json import loads
from qiita_db.user import User
from qiita_db.software import Command, Parameters, DefaultParameters
from qiita_db.processing_job import ProcessingWorkflow, ProcessingJob
from qiita_db.exceptions import QiitaDBUnknownIDError
def list_commands_handler_get_req(artifact_types, exclude_analysis):
"""Retrieves the commands that can process the given artifact types
Parameters
----------
artifact_types : str
Comma-separated list of artifact types
exclude_analysis : bool
If True, return commands that are not part of the analysis pipeline
Returns
-------
dict of objects
A dictionary containing the commands information
{'status': str,
'message': str,
'commands': list of dicts of {'id': int,
'command': str,
'output': list of [str, str]}}
"""
artifact_types = artifact_types.split(',')
cmd_info = [
{'id': cmd.id, 'command': cmd.name, 'output': cmd.outputs}
for cmd in Command.get_commands_by_input_type(
artifact_types, exclude_analysis=exclude_analysis)]
return {'status': 'success',
'message': '',
'commands': cmd_info}
def list_options_handler_get_req(command_id):
"""Returns the available default parameters set for the given command
Parameters
----------
command_id : int
The command id
Returns
-------
dict of objects
A dictionary containing the commands information
{'status': str,
'message': str,
'options': list of dicts of {'id: str', 'name': str,
'values': dict of {str: str}}}
"""
command = Command(command_id)
options = [{'id': p.id, 'name': p.name, 'values': p.values}
for p in command.default_parameter_sets]
return {'status': 'success',
'message': '',
'options': options,
'req_options': command.required_parameters,
'opt_options': command.optional_parameters}
def workflow_handler_post_req(user_id, command_id, params):
"""Creates a new workflow in the system
Parameters
----------
user_id : str
The user creating the workflow
command_id : int
The first command to execute in the workflow
params : str
JSON representations of the parameters for the first command of
the workflow
Returns
-------
dict of objects
A dictionary containing the commands information
{'status': str,
'message': str,
'workflow_id': int}
"""
parameters = Parameters.load(Command(command_id), json_str=params)
status = 'success'
message = ''
try:
wf = ProcessingWorkflow.from_scratch(User(user_id), parameters)
except Exception as exc:
wf = None
wf_id = None
job_info = None
status = 'error'
message = str(exc.message)
if wf is not None:
# this is safe as we are creating the workflow for the first time
# and there is only one node. Remember networkx doesn't assure order
# of nodes
job = wf.graph.nodes()[0]
inputs = [a.id for a in job.input_artifacts]
job_cmd = job.command
wf_id = wf.id
job_info = {'id': job.id, 'inputs': inputs, 'label': job_cmd.name,
'outputs': job_cmd.outputs}
return {'status': status, 'message': message, 'workflow_id': wf_id,
'job': job_info}
def workflow_handler_patch_req(req_op, req_path, req_value=None,
req_from=None):
"""Patches a workflow
Parameters
----------
req_op : str
The operation to perform on the workflow
req_path : str
Path parameter with the workflow to patch
req_value : str, optional
The value that needs to be modified
req_from : str, optional
The original path of the element
Returns
-------
dict of {str: str}
A dictionary of the form: {'status': str, 'message': str} in which
status is the status of the request ('error' or 'success') and message
is a human readable string with the error message in case that status
is 'error'.
"""
if req_op == 'add':
req_path = [v for v in req_path.split('/') if v]
if len(req_path) != 1:
return {'status': 'error',
'message': 'Incorrect path parameter'}
req_path = req_path[0]
try:
wf = ProcessingWorkflow(req_path)
except QiitaDBUnknownIDError:
return {'status': 'error',
'message': 'Workflow %s does not exist' % req_path}
req_value = loads(req_value)
dflt_params = DefaultParameters(req_value['dflt_params'])
req_params = req_value.get('req_params', None)
opt_params = req_value.get('opt_params', None)
connections = {ProcessingJob(k): v
for k, v in req_value['connections'].items()}
job = wf.add(dflt_params, connections=connections,
req_params=req_params, opt_params=opt_params)
job_cmd = job.command
return {'status': 'success',
'message': '',
'job': {'id': job.id,
'inputs': req_value['connections'].keys(),
'label': job_cmd.name,
'outputs': job_cmd.outputs}}
elif req_op == 'remove':
req_path = [v for v in req_path.split('/') if v]
if len(req_path) != 2:
return {'status': 'error',
'message': 'Incorrect path parameter'}
wf_id = req_path[0]
job_id = req_path[1]
wf = ProcessingWorkflow(wf_id)
job = ProcessingJob(job_id)
wf.remove(job, cascade=True)
return {'status': 'success',
'message': ''}
else:
return {'status': 'error',
'message': 'Operation "%s" not supported. Current supported '
'operations: add' % req_op}
def workflow_run_post_req(workflow_id):
"""Submits the workflow for execution
Parameters
----------
workflow_id : str
The workflow id
Returns
-------
dict of {str: str}
A dictionary of the form: {'status': str, 'message': str} in which
status is the status of the request ('error' or 'success') and message
is a human readable string with the error message in case that status
is 'error'.
"""
try:
wf = ProcessingWorkflow(workflow_id)
except QiitaDBUnknownIDError:
return {'status': 'error',
'message': 'Workflow %s does not exist' % workflow_id}
wf.submit()
return {'status': 'success', 'message': ''}
def job_ajax_get_req(job_id):
"""Returns the job information
Parameters
----------
job_id : str
The job id
Returns
-------
dict of objects
A dictionary containing the job information
{'status': str,
'message': str,
'job_id': str,
'job_status': str,
'job_step': str,
'job_parameters': dict of {str: str}}
"""
job = ProcessingJob(job_id)
cmd = job.command
sw = cmd.software
job_status = job.status
job_error = job.log.msg if job.log is not None else None
return {'status': 'success',
'message': '',
'job_id': job.id,
'job_status': job_status,
'job_step': job.step,
'job_parameters': job.parameters.values,
'job_error': job_error,
'command': cmd.name,
'command_description': cmd.description,
'software': sw.name,
'software_version': sw.version}
def job_ajax_patch_req(req_op, req_path, req_value=None, req_from=None):
"""Patches a job
Parameters
----------
req_op : str
The operation to perform on the job
req_path : str
Path parameter with the job to patch
req_value : str, optional
The value that needs to be modified
req_from : str, optional
The original path of the element
Returns
-------
dict of {str: str}
A dictionary of the form: {'status': str, 'message': str} in which
status is the status of the request ('error' or 'success') and message
is a human readable string with the error message in case that status
is 'error'.
"""
if req_op == 'remove':
req_path = [v for v in req_path.split('/') if v]
if len(req_path) != 1:
return {'status': 'error',
'message': 'Incorrect path parameter: missing job id'}
# We have ensured that we only have one element on req_path
job_id = req_path[0]
try:
job = ProcessingJob(job_id)
except QiitaDBUnknownIDError as e:
return {'status': 'error',
'message': 'Incorrect path parameter: '
'%s is not a recognized job id' % job_id}
except Exception as e:
e = str(e)
if "invalid input syntax for uuid" in e:
return {'status': 'error',
'message': 'Incorrect path parameter: '
'%s is not a recognized job id' % job_id}
else:
return {'status': 'error',
'message': 'An error occured while accessing the '
'job: %s' % e}
job_status = job.status
if job_status == 'in_construction':
# A job that is in construction is in a workflow. Use the methods
# defined for workflows to keep everything consistent. This message
# should never be presented to the user, but rather to the
# developer if it makes a mistake during changes in the interface
return {'status': 'error',
'message': "Can't delete job %s. It is 'in_construction' "
"status. Please use /study/process/workflow/"
% job_id}
elif job_status == 'error':
# When the job is in error status, we just need to hide it
job.hide()
return {'status': 'success', 'message': ''}
else:
# In any other state, we currently fail. Adding the else here
# because it can be useful to have it for fixing issue #2307
return {'status': 'error',
'message': 'Only jobs in "error" status can be deleted.'}
else:
return {'status': 'error',
'message': 'Operation "%s" not supported. Current supported '
'operations: remove' % req_op}
|
|
#!/usr/bin/env python
# Copyright 2012 Google Inc. All Rights Reserved.
"""Tests for the hunt."""
import time
import logging
# pylint: disable=unused-import,g-bad-import-order
from grr.lib import server_plugins
# pylint: enable=unused-import,g-bad-import-order
from grr.lib import aff4
from grr.lib import flags
from grr.lib import flow
# These imports populate the GRRHunt registry.
from grr.lib import hunts
from grr.lib import rdfvalue
from grr.lib import test_lib
class BrokenSampleHunt(hunts.SampleHunt):
@flow.StateHandler()
def StoreResults(self, responses):
"""Stores the responses."""
client_id = responses.request.client_id
if not responses.success:
logging.info("Client %s has no file /tmp/evil.txt", client_id)
# Raise on one of the code paths.
raise RuntimeError("Error")
else:
logging.info("Client %s has a file /tmp/evil.txt", client_id)
self.MarkClientDone(client_id)
class HuntTest(test_lib.FlowTestsBaseclass):
"""Tests the Hunt."""
def testRuleAdding(self):
foreman = aff4.FACTORY.Open("aff4:/foreman", mode="rw", token=self.token)
rules = foreman.Get(foreman.Schema.RULES)
# Make sure there are no rules yet.
self.assertEqual(len(rules), 0)
hunt = hunts.GRRHunt.StartHunt("SampleHunt", token=self.token)
regex_rule = rdfvalue.ForemanAttributeRegex(
attribute_name="GRR client",
attribute_regex="HUNT")
int_rule = rdfvalue.ForemanAttributeInteger(
attribute_name="Clock",
operator=rdfvalue.ForemanAttributeInteger.Operator.GREATER_THAN,
value=1336650631137737)
hunt.AddRule([int_rule, regex_rule])
# Push the rules to the foreman.
hunt.Run()
foreman = aff4.FACTORY.Open("aff4:/foreman", mode="rw", token=self.token)
rules = foreman.Get(foreman.Schema.RULES)
# Make sure they were written correctly.
self.assertEqual(len(rules), 1)
rule = rules[0]
self.assertEqual(len(rule.regex_rules), 1)
self.assertEqual(rule.regex_rules[0].attribute_name, "GRR client")
self.assertEqual(rule.regex_rules[0].attribute_regex, "HUNT")
self.assertEqual(len(rule.integer_rules), 1)
self.assertEqual(rule.integer_rules[0].attribute_name, "Clock")
self.assertEqual(rule.integer_rules[0].operator,
rdfvalue.ForemanAttributeInteger.Operator.GREATER_THAN)
self.assertEqual(rule.integer_rules[0].value, 1336650631137737)
self.assertEqual(len(rule.actions), 1)
self.assertEqual(rule.actions[0].hunt_name, "SampleHunt")
# Running a second time should not change the rules any more.
hunt.Run()
foreman = aff4.FACTORY.Open("aff4:/foreman", mode="rw", token=self.token)
rules = foreman.Get(foreman.Schema.RULES)
# Still just one rule.
self.assertEqual(len(rules), 1)
def AddForemanRules(self, to_add):
foreman = aff4.FACTORY.Open("aff4:/foreman", mode="rw", token=self.token)
rules = foreman.Get(foreman.Schema.RULES) or foreman.Schema.RULES()
for rule in to_add:
rules.Append(rule)
foreman.Set(foreman.Schema.RULES, rules)
foreman.Close()
def testStopping(self):
"""Tests if we can stop a hunt."""
foreman = aff4.FACTORY.Open("aff4:/foreman", mode="rw", token=self.token)
rules = foreman.Get(foreman.Schema.RULES)
# Make sure there are no rules yet.
self.assertEqual(len(rules), 0)
now = int(time.time() * 1e6)
expires = now + 3600
# Add some rules.
rules = [rdfvalue.ForemanRule(created=now, expires=expires,
description="Test rule1"),
rdfvalue.ForemanRule(created=now, expires=expires,
description="Test rule2")]
self.AddForemanRules(rules)
hunt = hunts.GRRHunt.StartHunt("SampleHunt", token=self.token)
regex_rule = rdfvalue.ForemanAttributeRegex(
attribute_name="GRR client",
attribute_regex="HUNT")
int_rule = rdfvalue.ForemanAttributeInteger(
attribute_name="Clock",
operator=rdfvalue.ForemanAttributeInteger.Operator.GREATER_THAN,
value=1336650631137737)
# Fire on either of the rules.
hunt.AddRule([int_rule])
hunt.AddRule([regex_rule])
# Push the rules to the foreman.
hunt.Run()
# Add some more rules.
rules = [rdfvalue.ForemanRule(created=now, expires=expires,
description="Test rule3"),
rdfvalue.ForemanRule(created=now, expires=expires,
description="Test rule4")]
self.AddForemanRules(rules)
foreman = aff4.FACTORY.Open("aff4:/foreman", mode="rw", token=self.token)
rules = foreman.Get(foreman.Schema.RULES)
self.assertEqual(len(rules), 6)
self.assertNotEqual(hunt.OutstandingRequests(), 0)
# Now we stop the hunt.
hunt.Stop()
foreman = aff4.FACTORY.Open("aff4:/foreman", mode="rw", token=self.token)
rules = foreman.Get(foreman.Schema.RULES)
# The rule for this hunt should be deleted but the rest should be there.
self.assertEqual(len(rules), 4)
# And the hunt should report no outstanding requests any more.
self.assertEqual(hunt.OutstandingRequests(), 0)
def testInvalidRules(self):
"""Tests the behavior when a wrong attribute name is passed in a rule."""
hunt = hunts.GRRHunt.StartHunt("SampleHunt", token=self.token)
regex_rule = rdfvalue.ForemanAttributeRegex(
attribute_name="no such attribute",
attribute_regex="HUNT")
self.assertRaises(RuntimeError, hunt.AddRule, [regex_rule])
def Callback(self, hunt_id, client_id, client_limit):
self.called.append((hunt_id, client_id, client_limit))
def testCallback(self, client_limit=None):
"""Checks that the foreman uses the callback specified in the action."""
hunt = hunts.GRRHunt.StartHunt("SampleHunt", client_limit=client_limit,
token=self.token)
regex_rule = rdfvalue.ForemanAttributeRegex(
attribute_name="GRR client",
attribute_regex="GRR")
hunt.AddRule([regex_rule])
hunt.Run()
foreman = aff4.FACTORY.Open("aff4:/foreman", mode="rw", token=self.token)
# Create a client that matches our regex.
client = aff4.FACTORY.Open(self.client_id, mode="rw", token=self.token)
info = client.Schema.CLIENT_INFO()
info.client_name = "GRR Monitor"
client.Set(client.Schema.CLIENT_INFO, info)
client.Close()
old_start_client = hunts.SampleHunt.StartClient
try:
hunts.SampleHunt.StartClient = self.Callback
self.called = []
foreman.AssignTasksToClient(client.urn)
self.assertEqual(len(self.called), 1)
self.assertEqual(self.called[0][1], client.urn)
# Clean up.
foreman.Set(foreman.Schema.RULES())
foreman.Close()
finally:
hunts.SampleHunt.StartClient = staticmethod(old_start_client)
def testStartClient(self):
hunt = hunts.GRRHunt.StartHunt("SampleHunt", token=self.token)
hunt.Run()
client = aff4.FACTORY.Open(self.client_id, token=self.token,
age=aff4.ALL_TIMES)
flows = list(client.GetValuesForAttribute(client.Schema.FLOW))
self.assertEqual(flows, [])
hunts.GRRHunt.StartClient(hunt.session_id, self.client_id)
test_lib.TestHuntHelper(None, [self.client_id], False, self.token)
client = aff4.FACTORY.Open(self.client_id, token=self.token,
age=aff4.ALL_TIMES)
flows = list(client.GetValuesForAttribute(client.Schema.FLOW))
# One flow should have been started.
self.assertEqual(len(flows), 1)
def testCallbackWithLimit(self):
self.assertRaises(RuntimeError, self.testCallback, 2000)
self.testCallback(100)
def testProcessing(self):
"""This tests running the hunt on some clients."""
# Set up 10 clients.
client_ids = self.SetupClients(10)
hunt = hunts.GRRHunt.StartHunt("SampleHunt", token=self.token)
regex_rule = rdfvalue.ForemanAttributeRegex(
attribute_name="GRR client",
attribute_regex="GRR")
hunt.AddRule([regex_rule])
hunt.Run()
foreman = aff4.FACTORY.Open("aff4:/foreman", mode="rw", token=self.token)
for client_id in client_ids:
foreman.AssignTasksToClient(client_id)
# Run the hunt.
client_mock = test_lib.SampleHuntMock()
test_lib.TestHuntHelper(client_mock, client_ids, False, self.token)
hunt_obj = aff4.FACTORY.Open(
hunt.session_id, mode="r", age=aff4.ALL_TIMES,
aff4_type="SampleHunt", token=self.token)
started = hunt_obj.GetValuesForAttribute(hunt_obj.Schema.CLIENTS)
finished = hunt_obj.GetValuesForAttribute(hunt_obj.Schema.FINISHED)
self.assertEqual(len(set(started)), 10)
self.assertEqual(len(set(finished)), 10)
# Clean up.
foreman.Set(foreman.Schema.RULES())
foreman.Close()
self.DeleteClients(10)
def testHangingClients(self):
"""This tests if the hunt completes when some clients hang or raise."""
# Set up 10 clients.
client_ids = self.SetupClients(10)
hunt = hunts.GRRHunt.StartHunt("SampleHunt", token=self.token)
regex_rule = rdfvalue.ForemanAttributeRegex(
attribute_name="GRR client",
attribute_regex="GRR")
hunt.AddRule([regex_rule])
hunt.Run()
foreman = aff4.FACTORY.Open("aff4:/foreman", mode="rw", token=self.token)
for client_id in client_ids:
foreman.AssignTasksToClient(client_id)
client_mock = test_lib.SampleHuntMock()
# Just pass 8 clients to run, the other two went offline.
test_lib.TestHuntHelper(client_mock, client_ids[1:9], False, self.token)
hunt_obj = aff4.FACTORY.Open(hunt.session_id, mode="rw",
age=aff4.ALL_TIMES, token=self.token)
started = hunt_obj.GetValuesForAttribute(hunt_obj.Schema.CLIENTS)
finished = hunt_obj.GetValuesForAttribute(hunt_obj.Schema.FINISHED)
# We started the hunt on 10 clients.
self.assertEqual(len(set(started)), 10)
# But only 8 should have finished.
self.assertEqual(len(set(finished)), 8)
# Clean up.
foreman.Set(foreman.Schema.RULES())
foreman.Close()
self.DeleteClients(10)
def testPausingAndRestartingDoesNotStartHuntTwiceOnTheSameClient(self):
"""This tests if the hunt completes when some clients hang or raise."""
client_ids = self.SetupClients(10)
hunt = hunts.GRRHunt.StartHunt("SampleHunt", token=self.token)
regex_rule = rdfvalue.ForemanAttributeRegex(
attribute_name="GRR client",
attribute_regex="GRR")
hunt.AddRule([regex_rule])
hunt.Run()
foreman = aff4.FACTORY.Open("aff4:/foreman", mode="rw", token=self.token)
for client_id in client_ids:
num_tasks = foreman.AssignTasksToClient(client_id)
self.assertEqual(num_tasks, 1)
client_mock = test_lib.SampleHuntMock()
test_lib.TestHuntHelper(client_mock, client_ids, False, self.token)
# Pausing and running hunt: this leads to the fresh rules being written
# to Foreman.RULES.
hunt.Pause()
hunt.Run()
# Recreating the foreman so that it updates list of rules.
foreman = aff4.FACTORY.Open("aff4:/foreman", mode="rw", token=self.token)
for client_id in client_ids:
num_tasks = foreman.AssignTasksToClient(client_id)
# No tasks should be assigned as this hunt ran of all the clients before.
self.assertEqual(num_tasks, 0)
foreman.Set(foreman.Schema.RULES())
foreman.Close()
self.DeleteClients(10)
def testClientLimit(self):
"""This tests that we can limit hunts to a number of clients."""
# Set up 10 clients.
client_ids = self.SetupClients(10)
hunt = hunts.GRRHunt.StartHunt("SampleHunt", token=self.token,
client_limit=5)
regex_rule = rdfvalue.ForemanAttributeRegex(
attribute_name="GRR client",
attribute_regex="GRR")
hunt.AddRule([regex_rule])
hunt.Run()
foreman = aff4.FACTORY.Open("aff4:/foreman", mode="rw", token=self.token)
for client_id in client_ids:
foreman.AssignTasksToClient(client_id)
# Run the hunt.
client_mock = test_lib.SampleHuntMock()
test_lib.TestHuntHelper(client_mock, client_ids, False, self.token)
hunt_obj = aff4.FACTORY.Open(hunt.session_id, mode="rw",
age=aff4.ALL_TIMES, token=self.token)
started = hunt_obj.GetValuesForAttribute(hunt_obj.Schema.CLIENTS)
finished = hunt_obj.GetValuesForAttribute(hunt_obj.Schema.FINISHED)
# We limited here to 5 clients.
self.assertEqual(len(set(started)), 5)
self.assertEqual(len(set(finished)), 5)
# Clean up.
foreman.Set(foreman.Schema.RULES())
foreman.Close()
self.DeleteClients(10)
def testBrokenHunt(self):
"""This tests the behavior when a hunt raises an exception."""
# Set up 10 clients.
client_ids = self.SetupClients(10)
hunt = hunts.GRRHunt.StartHunt("BrokenSampleHunt", token=self.token)
regex_rule = rdfvalue.ForemanAttributeRegex(
attribute_name="GRR client",
attribute_regex="GRR")
hunt.AddRule([regex_rule])
hunt.Run()
foreman = aff4.FACTORY.Open("aff4:/foreman", mode="rw", token=self.token)
for client_id in client_ids:
foreman.AssignTasksToClient(client_id)
# Run the hunt.
client_mock = test_lib.SampleHuntMock()
test_lib.TestHuntHelper(client_mock, client_ids, False, self.token)
hunt_obj = aff4.FACTORY.Open(hunt.session_id, mode="rw",
age=aff4.ALL_TIMES, token=self.token)
started = hunt_obj.GetValuesForAttribute(hunt_obj.Schema.CLIENTS)
finished = hunt_obj.GetValuesForAttribute(hunt_obj.Schema.FINISHED)
errors = hunt_obj.GetValuesForAttribute(hunt_obj.Schema.ERRORS)
self.assertEqual(len(set(started)), 10)
# There should be errors for the five clients where the hunt raised.
self.assertEqual(len(set(errors)), 5)
# All of the clients that have the file should still finish eventually.
self.assertEqual(len(set(finished)), 5)
# Clean up.
foreman.Set(foreman.Schema.RULES())
foreman.Close()
self.DeleteClients(10)
def testHuntNotifications(self):
"""This tests the Hunt notification event."""
received_events = []
class Listener1(flow.EventListener): # pylint: disable=unused-variable
well_known_session_id = rdfvalue.SessionID("aff4:/flows/W:TestHuntDone")
EVENTS = ["TestHuntDone"]
@flow.EventHandler(auth_required=True)
def ProcessMessage(self, message=None, event=None):
_ = event
# Store the results for later inspection.
received_events.append(message)
# Set up 10 clients.
client_ids = self.SetupClients(10)
hunt = hunts.GRRHunt.StartHunt(
"BrokenSampleHunt", notification_event="TestHuntDone", token=self.token)
regex_rule = rdfvalue.ForemanAttributeRegex(
attribute_name="GRR client",
attribute_regex="GRR")
hunt.AddRule([regex_rule])
hunt.Run()
foreman = aff4.FACTORY.Open("aff4:/foreman", mode="rw", token=self.token)
for client_id in client_ids:
foreman.AssignTasksToClient(client_id)
# Run the hunt.
client_mock = test_lib.SampleHuntMock()
test_lib.TestHuntHelper(client_mock, client_ids, check_flow_errors=False,
token=self.token)
self.assertEqual(len(received_events), 5)
# Clean up.
foreman.Set(foreman.Schema.RULES())
foreman.Close()
self.DeleteClients(10)
class FlowTestLoader(test_lib.GRRTestLoader):
base_class = test_lib.FlowTestsBaseclass
def main(argv):
# Run the full test suite
test_lib.GrrTestProgram(argv=argv, testLoader=FlowTestLoader())
if __name__ == "__main__":
flags.StartMain(main)
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Object Store v1 API Library"""
import io
import logging
import os
import six
from six.moves import urllib
try:
from urllib.parse import urlparse # noqa
except ImportError:
from urlparse import urlparse # noqa
from openstackclient.api import api
from openstackclient.common import utils
class APIv1(api.BaseAPI):
"""Object Store v1 API"""
def __init__(self, **kwargs):
super(APIv1, self).__init__(**kwargs)
def container_create(
self,
container=None,
):
"""Create a container
:param string container:
name of container to create
:returns:
dict of returned headers
"""
response = self.create(urllib.parse.quote(container), method='PUT')
data = {
'account': self._find_account_id(),
'container': container,
'x-trans-id': response.headers.get('x-trans-id', None),
}
return data
def container_delete(
self,
container=None,
):
"""Delete a container
:param string container:
name of container to delete
"""
if container:
self.delete(urllib.parse.quote(container))
def container_list(
self,
all_data=False,
limit=None,
marker=None,
end_marker=None,
prefix=None,
**params
):
"""Get containers in an account
:param boolean all_data:
if True, return a full listing, else returns a max of
10000 listings
:param integer limit:
query return count limit
:param string marker:
query marker
:param string end_marker:
query end_marker
:param string prefix:
query prefix
:returns:
list of container names
"""
params['format'] = 'json'
if all_data:
data = listing = self.container_list(
limit=limit,
marker=marker,
end_marker=end_marker,
prefix=prefix,
**params
)
while listing:
marker = listing[-1]['name']
listing = self.container_list(
limit=limit,
marker=marker,
end_marker=end_marker,
prefix=prefix,
**params
)
if listing:
data.extend(listing)
return data
if limit:
params['limit'] = limit
if marker:
params['marker'] = marker
if end_marker:
params['end_marker'] = end_marker
if prefix:
params['prefix'] = prefix
return self.list('', **params)
def container_save(
self,
container=None,
):
"""Save all the content from a container
:param string container:
name of container to save
"""
objects = self.object_list(container=container)
for object in objects:
self.object_save(container=container, object=object['name'])
def container_set(
self,
container,
properties,
):
"""Set container properties
:param string container:
name of container to modify
:param dict properties:
properties to add or update for the container
"""
headers = self._set_properties(properties, 'X-Container-Meta-%s')
if headers:
self.create(urllib.parse.quote(container), headers=headers)
def container_show(
self,
container=None,
):
"""Get container details
:param string container:
name of container to show
:returns:
dict of returned headers
"""
response = self._request('HEAD', urllib.parse.quote(container))
data = {
'account': self._find_account_id(),
'container': container,
'object_count': response.headers.get(
'x-container-object-count',
None,
),
'bytes_used': response.headers.get('x-container-bytes-used', None)
}
if 'x-container-read' in response.headers:
data['read_acl'] = response.headers.get('x-container-read', None)
if 'x-container-write' in response.headers:
data['write_acl'] = response.headers.get('x-container-write', None)
if 'x-container-sync-to' in response.headers:
data['sync_to'] = response.headers.get('x-container-sync-to', None)
if 'x-container-sync-key' in response.headers:
data['sync_key'] = response.headers.get('x-container-sync-key',
None)
properties = self._get_properties(response.headers,
'x-container-meta-')
if properties:
data['properties'] = properties
return data
def container_unset(
self,
container,
properties,
):
"""Unset container properties
:param string container:
name of container to modify
:param dict properties:
properties to remove from the container
"""
headers = self._unset_properties(properties,
'X-Remove-Container-Meta-%s')
if headers:
self.create(urllib.parse.quote(container), headers=headers)
def object_create(
self,
container=None,
object=None,
):
"""Create an object inside a container
:param string container:
name of container to store object
:param string object:
local path to object
:returns:
dict of returned headers
"""
if container is None or object is None:
# TODO(dtroyer): What exception to raise here?
return {}
full_url = "%s/%s" % (urllib.parse.quote(container),
urllib.parse.quote(object))
with io.open(object, 'rb') as f:
response = self.create(
full_url,
method='PUT',
data=f,
)
data = {
'account': self._find_account_id(),
'container': container,
'object': object,
'x-trans-id': response.headers.get('X-Trans-Id', None),
'etag': response.headers.get('Etag', None),
}
return data
def object_delete(
self,
container=None,
object=None,
):
"""Delete an object from a container
:param string container:
name of container that stores object
:param string object:
name of object to delete
"""
if container is None or object is None:
return
self.delete("%s/%s" % (urllib.parse.quote(container),
urllib.parse.quote(object)))
def object_list(
self,
container=None,
all_data=False,
limit=None,
marker=None,
end_marker=None,
delimiter=None,
prefix=None,
**params
):
"""List objects in a container
:param string container:
container name to get a listing for
:param boolean all_data:
if True, return a full listing, else returns a max of
10000 listings
:param integer limit:
query return count limit
:param string marker:
query marker
:param string end_marker:
query end_marker
:param string prefix:
query prefix
:param string delimiter:
string to delimit the queries on
:returns: a tuple of (response headers, a list of objects) The response
headers will be a dict and all header names will be lowercase.
"""
if container is None or object is None:
return None
params['format'] = 'json'
if all_data:
data = listing = self.object_list(
container=container,
limit=limit,
marker=marker,
end_marker=end_marker,
prefix=prefix,
delimiter=delimiter,
**params
)
while listing:
if delimiter:
marker = listing[-1].get('name', listing[-1].get('subdir'))
else:
marker = listing[-1]['name']
listing = self.object_list(
container=container,
limit=limit,
marker=marker,
end_marker=end_marker,
prefix=prefix,
delimiter=delimiter,
**params
)
if listing:
data.extend(listing)
return data
if limit:
params['limit'] = limit
if marker:
params['marker'] = marker
if end_marker:
params['end_marker'] = end_marker
if prefix:
params['prefix'] = prefix
if delimiter:
params['delimiter'] = delimiter
return self.list(urllib.parse.quote(container), **params)
def object_save(
self,
container=None,
object=None,
file=None,
):
"""Save an object stored in a container
:param string container:
name of container that stores object
:param string object:
name of object to save
:param string file:
local name of object
"""
if not file:
file = object
response = self._request(
'GET',
"%s/%s" % (urllib.parse.quote(container),
urllib.parse.quote(object)),
stream=True,
)
if response.status_code == 200:
if not os.path.exists(os.path.dirname(file)):
if len(os.path.dirname(file)) > 0:
os.makedirs(os.path.dirname(file))
with open(file, 'wb') as f:
for chunk in response.iter_content():
f.write(chunk)
def object_set(
self,
container,
object,
properties,
):
"""Set object properties
:param string container:
container name for object to modify
:param string object:
name of object to modify
:param dict properties:
properties to add or update for the container
"""
headers = self._set_properties(properties, 'X-Object-Meta-%s')
if headers:
self.create("%s/%s" % (urllib.parse.quote(container),
urllib.parse.quote(object)),
headers=headers)
def object_unset(
self,
container,
object,
properties,
):
"""Unset object properties
:param string container:
container name for object to modify
:param string object:
name of object to modify
:param dict properties:
properties to remove from the object
"""
headers = self._unset_properties(properties, 'X-Remove-Object-Meta-%s')
if headers:
self.create("%s/%s" % (urllib.parse.quote(container),
urllib.parse.quote(object)),
headers=headers)
def object_show(
self,
container=None,
object=None,
):
"""Get object details
:param string container:
container name for object to get
:param string object:
name of object to get
:returns:
dict of object properties
"""
if container is None or object is None:
return {}
response = self._request('HEAD', "%s/%s" %
(urllib.parse.quote(container),
urllib.parse.quote(object)))
data = {
'account': self._find_account_id(),
'container': container,
'object': object,
'content-type': response.headers.get('content-type', None),
}
if 'content-length' in response.headers:
data['content-length'] = response.headers.get(
'content-length',
None,
)
if 'last-modified' in response.headers:
data['last-modified'] = response.headers.get('last-modified', None)
if 'etag' in response.headers:
data['etag'] = response.headers.get('etag', None)
if 'x-object-manifest' in response.headers:
data['x-object-manifest'] = response.headers.get(
'x-object-manifest',
None,
)
properties = self._get_properties(response.headers, 'x-object-meta-')
if properties:
data['properties'] = properties
return data
def account_set(
self,
properties,
):
"""Set account properties
:param dict properties:
properties to add or update for the account
"""
headers = self._set_properties(properties, 'X-Account-Meta-%s')
if headers:
# NOTE(stevemar): The URL (first argument) in this case is already
# set to the swift account endpoint, because that's how it's
# registered in the catalog
self.create("", headers=headers)
def account_show(self):
"""Show account details"""
# NOTE(stevemar): Just a HEAD request to the endpoint already in the
# catalog should be enough.
response = self._request("HEAD", "")
data = {}
properties = self._get_properties(response.headers, 'x-account-meta-')
if properties:
data['properties'] = properties
# Map containers, bytes and objects a bit nicer
data['Containers'] = response.headers.get('x-account-container-count',
None)
data['Objects'] = response.headers.get('x-account-object-count', None)
data['Bytes'] = response.headers.get('x-account-bytes-used', None)
# Add in Account info too
data['Account'] = self._find_account_id()
return data
def account_unset(
self,
properties,
):
"""Unset account properties
:param dict properties:
properties to remove from the account
"""
headers = self._unset_properties(properties,
'X-Remove-Account-Meta-%s')
if headers:
self.create("", headers=headers)
def _find_account_id(self):
url_parts = urlparse(self.endpoint)
return url_parts.path.split('/')[-1]
def _unset_properties(self, properties, header_tag):
# NOTE(stevemar): As per the API, the headers have to be in the form
# of "X-Remove-Account-Meta-Book: x". In the case where metadata is
# removed, we can set the value of the header to anything, so it's
# set to 'x'. In the case of a Container property we use:
# "X-Remove-Container-Meta-Book: x", and the same logic applies for
# Object properties
headers = {}
for k in properties:
header_name = header_tag % k
headers[header_name] = 'x'
return headers
def _set_properties(self, properties, header_tag):
# NOTE(stevemar): As per the API, the headers have to be in the form
# of "X-Account-Meta-Book: MobyDick". In the case of a Container
# property we use: "X-Add-Container-Meta-Book: MobyDick", and the same
# logic applies for Object properties
log = logging.getLogger(__name__ + '._set_properties')
headers = {}
for k, v in properties.iteritems():
if not utils.is_ascii(k) or not utils.is_ascii(v):
log.error('Cannot set property %s to non-ascii value', k)
continue
header_name = header_tag % k
headers[header_name] = v
return headers
def _get_properties(self, headers, header_tag):
# Add in properties as a top level key, this is consistent with other
# OSC commands
properties = {}
for k, v in six.iteritems(headers):
if k.startswith(header_tag):
properties[k[len(header_tag):]] = v
return properties
|
|
"""
This file is part of pyS5p
https://github.com/rmvanhees/pys5p.git
The class LV2io provides read access to S5p Tropomi S5P_OFFL_L2 products
Copyright (c) 2018-2021 SRON - Netherlands Institute for Space Research
All Rights Reserved
License: BSD-3-Clause
"""
from datetime import datetime, timedelta
from pathlib import Path
import h5py
from netCDF4 import Dataset
import numpy as np
from .s5p_xarray import data_to_xr, h5_to_xr
# - global parameters ------------------------------
# - local functions --------------------------------
# - class definition -------------------------------
class LV2io():
"""
This class should offer all the necessary functionality to read Tropomi
S5P_OFFL_L2 products
Attributes
----------
fid : h5py.File
filename : string
science_product : bool
ground_pixel : int
scanline : int
Methods
-------
close()
Close resources.
get_attr(attr_name, ds_name=None)
Obtain value of an HDF5 file attribute or dataset attribute.
get_orbit()
Returns reference orbit number
get_algorithm_version()
Returns version of the level-2 algorithm.
get_processor_version()
Returns version of the L12 processor used to generate this product.
get_product_version()
Returns version of the level-2 product
get_coverage_time()
Returns start and end of the measurement coverage time.
get_creation_time()
Returns creation date/time of the level-2 product.
get_ref_time()
Returns reference start time of measurements.
get_delta_time()
Returns offset from the reference start time of measurement.
get_geo_data(geo_dsets=None)
Returns data of selected datasets from the GEOLOCATIONS group.
get_geo_bounds(extent=None, data_sel=None)
Returns bounds of latitude/longitude as a mesh for plotting.
get_dataset(name, data_sel=None, fill_as_nan=True)
Read level-2 dataset from PRODUCT group.
get_data_as_s5pmsm(name, data_sel=None, fill_as_nan=True, mol_m2=False)
Read dataset from group PRODUCT/target_product group.
Notes
-----
The Python h5py module can read the operational netCDF4 products without
any problems, however, the SRON science products contain incompatible
attributes. Thus should be fixed when more up-to-date netCDF software is
used to generate the products. Currently, the Python netCDF4 module is
used to read the science products.
Examples
--------
"""
def __init__(self, lv2_product: str):
"""
Initialize access to an S5P_L2 product
Parameters
----------
lv2_product : string
full path to S5P Tropomi level 2 product
"""
if not Path(lv2_product).is_file():
raise FileNotFoundError(f'{lv2_product} does not exist')
# initialize class-attributes
self.filename = lv2_product
# open LV2 product as HDF5 file
if self.science_product:
self.fid = Dataset(lv2_product, "r", format="NETCDF4")
self.ground_pixel = self.fid['/instrument/ground_pixel'][:].max()
self.ground_pixel += 1
self.scanline = self.fid['/instrument/scanline'][:].max()
self.scanline += 1
# alternative set flag sparse
if self.fid['/instrument/scanline'].size % self.ground_pixel != 0:
raise ValueError('not all scanlines are complete')
else:
self.fid = h5py.File(lv2_product, "r")
self.ground_pixel = self.fid['/PRODUCT/ground_pixel'].size
self.scanline = self.fid['/PRODUCT/scanline'].size
def __repr__(self):
class_name = type(self).__name__
return f'{class_name}({self.filename!r})'
def __iter__(self):
for attr in sorted(self.__dict__):
if not attr.startswith("__"):
yield attr
def __enter__(self):
"""
method called to initiate the context manager
"""
return self
def __exit__(self, exc_type, exc_value, traceback):
"""
method called when exiting the context manager
"""
self.close()
return False # any exception is raised by the with statement.
def close(self):
"""
Close the product.
"""
if self.fid is not None:
self.fid.close()
# ----- Class properties --------------------
@property
def science_product(self) -> bool:
"""
Returns if the product is a science product
"""
science_inst = b'SRON Netherlands Institute for Space Research'
res = False
with h5py.File(self.filename) as fid:
if 'institution' in fid.attrs \
and fid.attrs['institution'] == science_inst:
res = True
return res
@property
def orbit(self) -> int:
"""
Returns reference orbit number
"""
if self.science_product:
return int(self.__nc_attr('orbit', 'l1b_file'))
return self.__h5_attr('orbit', None)[0]
@property
def algorithm_version(self) -> str:
"""
Returns version of the level 2 algorithm
"""
res = self.get_attr('algorithm_version')
return res if res is not None else self.get_attr('version')
@property
def processor_version(self) -> str:
"""
Returns version of the level 2 processor
"""
res = self.get_attr('processor_version')
return res if res is not None else self.get_attr('version')
@property
def product_version(self) -> str:
"""
Returns version of the level 2 product
"""
res = self.get_attr('product_version')
return res if res is not None else self.get_attr('version')
@property
def coverage_time(self) -> tuple:
"""
Returns start and end of the measurement coverage time
"""
return (self.get_attr('time_coverage_start'),
self.get_attr('time_coverage_end'))
@property
def creation_time(self) -> str:
"""
Returns creation date/time of the level 2 product
"""
return self.get_attr('date_created')
# ----- Attributes --------------------
def __h5_attr(self, attr_name, ds_name):
"""
read attributes from operational products using hdf5
"""
if ds_name is not None:
dset = self.fid[f'/PRODUCT/{ds_name}']
if attr_name not in dset.attrs.keys():
return None
attr = dset.attrs[attr_name]
else:
if attr_name not in self.fid.attrs:
return None
attr = self.fid.attrs[attr_name]
if isinstance(attr, bytes):
return attr.decode('ascii')
return attr
def __nc_attr(self, attr_name, ds_name):
"""
read attributes from science products using netCDF4
"""
if ds_name is not None:
for grp_name in ['target_product', 'side_product', 'instrument']:
if grp_name not in self.fid.groups:
continue
if ds_name not in self.fid[grp_name].variables:
continue
dset = self.fid[f'/{grp_name}/{ds_name}']
if attr_name in dset.ncattrs():
return dset.getncattr(attr_name)
return None
if attr_name not in self.fid.ncattrs():
return None
return self.fid.getncattr(attr_name)
def get_attr(self, attr_name, ds_name=None):
"""
Obtain value of an HDF5 file attribute or dataset attribute
Parameters
----------
attr_name : string
name of the attribute
ds_name : string (optional)
name of dataset, default is to read the product attributes
"""
if self.science_product:
return self.__nc_attr(attr_name, ds_name)
return self.__h5_attr(attr_name, ds_name)
# ----- Time information ---------------
@property
def ref_time(self) -> datetime:
"""
Returns reference start time of measurements
"""
if self.science_product:
return None
return (datetime(2010, 1, 1, 0, 0, 0)
+ timedelta(seconds=int(self.fid['/PRODUCT/time'][0])))
def get_time(self):
"""
Returns start time of measurement per scan-line
"""
if self.science_product:
buff = self.get_dataset('time')[::self.ground_pixel, :]
return np.array([datetime(*x) for x in buff])
buff = self.fid['/PRODUCT/delta_time'][0, :]
return np.array([self.ref_time + timedelta(seconds=x / 1e3)
for x in buff])
# ----- Geolocation --------------------
def __h5_geo_data(self, geo_dsets):
"""
read gelocation datasets from operational products using HDF5
"""
res = {}
if geo_dsets is None:
geo_dsets = 'latitude,longitude'
for key in geo_dsets.split(','):
for grp_name in ['/PRODUCT', '/PRODUCT/SUPPORT_DATA/GEOLOCATIONS']:
if key in self.fid[grp_name]:
res[key] = np.squeeze(
self.fid[f'{grp_name}/{key}'])
continue
return res
def __nc_geo_data(self, geo_dsets):
"""
read gelocation datasets from science products using netCDF4
"""
res = {}
if geo_dsets is None:
geo_dsets = 'latitude_center,longitude_center'
for key in geo_dsets.split(','):
if key in self.fid['/instrument'].variables.keys():
ds_name = f'/instrument/{key}'
res[key] = self.fid[ds_name][:].reshape(
self.scanline, self.ground_pixel)
return res
def get_geo_data(self, geo_dsets=None):
"""
Returns data of selected datasets from the GEOLOCATIONS group
Parameters
----------
geo_dset : string
Name(s) of datasets, comma separated
Default:
* operational: 'latitude,longitude'
* science: 'latitude_center,longitude_center'
Returns
-------
out : dictonary with arrays
arrays of selected datasets
"""
if self.science_product:
return self.__nc_geo_data(geo_dsets)
return self.__h5_geo_data(geo_dsets)
# ----- Footprints --------------------
def __h5_geo_bounds(self, extent, data_sel):
"""
read bounds of latitude/longitude from operational products using HDF5
"""
indx = None
if extent is not None:
if len(extent) != 4:
raise ValueError('parameter extent must have 4 elements')
lats = self.fid['/PRODUCT/latitude'][0, ...]
lons = self.fid['/PRODUCT/longitude'][0, ...]
indx = ((lons >= extent[0]) & (lons <= extent[1])
& (lats >= extent[2]) & (lats <= extent[3])).nonzero()
data_sel = np.s_[indx[0].min():indx[0].max(),
indx[1].min():indx[1].max()]
gid = self.fid['/PRODUCT/SUPPORT_DATA/GEOLOCATIONS']
if data_sel is None:
lat_bounds = gid['latitude_bounds'][0, ...]
lon_bounds = gid['longitude_bounds'][0, ...]
else:
data_sel0 = (0,) + data_sel + (slice(None),)
lat_bounds = gid['latitude_bounds'][data_sel0]
lon_bounds = gid['longitude_bounds'][data_sel0]
return (data_sel, lon_bounds, lat_bounds)
def __nc_geo_bounds(self, extent, data_sel):
"""
read bounds of latitude/longitude from science products using netCDF4
"""
indx = None
if extent is not None:
if len(extent) != 4:
raise ValueError('parameter extent must have 4 elements')
lats = self.fid['/instrument/latitude_center'][:].reshape(
self.scanline, self.ground_pixel)
lons = self.fid['/instrument/longitude_center'][:].reshape(
self.scanline, self.ground_pixel)
indx = ((lons >= extent[0]) & (lons <= extent[1])
& (lats >= extent[2]) & (lats <= extent[3])).nonzero()
data_sel = np.s_[indx[0].min():indx[0].max(),
indx[1].min():indx[1].max()]
gid = self.fid['/instrument']
lat_bounds = gid['latitude_corners'][:].data.reshape(
self.scanline, self.ground_pixel, 4)
lon_bounds = gid['longitude_corners'][:].data.reshape(
self.scanline, self.ground_pixel, 4)
if data_sel is not None:
lat_bounds = lat_bounds[data_sel + (slice(None),)]
lon_bounds = lon_bounds[data_sel + (slice(None),)]
return (data_sel, lon_bounds, lat_bounds)
def get_geo_bounds(self, extent=None, data_sel=None):
"""
Returns bounds of latitude/longitude as a mesh for plotting
Parameters
----------
extent : list
select data to cover a region with geolocation defined by:
lon_min, lon_max, lat_min, lat_max and return numpy slice
data_sel : numpy slice
a 3-dimensional numpy slice: time, scan_line, ground_pixel
Note 'data_sel' will be overwritten when 'extent' is defined
Returns
-------
data_sel : numpy slice
slice of data which covers geolocation defined by extent. Only
provided if extent is not None.
out : dictionary
with numpy arrays for latitude and longitude
"""
if self.science_product:
res = self.__nc_geo_bounds(extent, data_sel)
else:
res = self.__h5_geo_bounds(extent, data_sel)
data_sel, lon_bounds, lat_bounds = res
res = {}
_sz = lon_bounds.shape
res['longitude'] = np.empty((_sz[0]+1, _sz[1]+1), dtype=float)
res['longitude'][:-1, :-1] = lon_bounds[:, :, 0]
res['longitude'][-1, :-1] = lon_bounds[-1, :, 1]
res['longitude'][:-1, -1] = lon_bounds[:, -1, 1]
res['longitude'][-1, -1] = lon_bounds[-1, -1, 2]
res['latitude'] = np.empty((_sz[0]+1, _sz[1]+1), dtype=float)
res['latitude'][:-1, :-1] = lat_bounds[:, :, 0]
res['latitude'][-1, :-1] = lat_bounds[-1, :, 1]
res['latitude'][:-1, -1] = lat_bounds[:, -1, 1]
res['latitude'][-1, -1] = lat_bounds[-1, -1, 2]
if extent is None:
return res
return data_sel, res
# ----- Datasets (numpy) --------------------
def __h5_dataset(self, name, data_sel, fill_as_nan):
"""
read dataset from operational products using HDF5
"""
fillvalue = float.fromhex('0x1.ep+122')
if name not in self.fid['/PRODUCT']:
raise ValueError(f'dataset {name} for found')
dset = self.fid[f'/PRODUCT/{name}']
if data_sel is None:
if dset.dtype == np.float32:
res = dset.astype(float)[0, ...]
else:
res = dset[0, ...]
else:
if dset.dtype == np.float32:
res = dset.astype(float)[(0,) + data_sel]
else:
res = dset[(0,) + data_sel]
if fill_as_nan and dset.attrs['_FillValue'] == fillvalue:
res[(res == fillvalue)] = np.nan
return res
def __nc_dataset(self, name, data_sel, fill_as_nan):
"""
read dataset from science products using netCDF4
"""
if name in self.fid['/target_product'].variables.keys():
group = '/target_product'
elif name in self.fid['/instrument'].variables.keys():
group = '/instrument'
else:
raise ValueError(f'dataset {name} for found')
dset = self.fid[f'{group}/{name}']
if dset.size == self.scanline * self.ground_pixel:
res = dset[:].reshape(self.scanline, self.ground_pixel)
else:
res = dset[:]
if data_sel is not None:
res = res[data_sel]
if fill_as_nan:
return res.filled(np.nan)
return res.data
def get_dataset(self, name, data_sel=None, fill_as_nan=True):
"""
Read level 2 dataset from PRODUCT group
Parameters
----------
name : string
name of dataset with level 2 data
data_sel : numpy slice
a 3-dimensional numpy slice: time, scan_line, ground_pixel
fill_as_nan : boolean
Replace (float) FillValues with Nan's, when True
Returns
-------
out : array
"""
if self.science_product:
return self.__nc_dataset(name, data_sel, fill_as_nan)
return self.__h5_dataset(name, data_sel, fill_as_nan)
# ----- Dataset (xarray) --------------------
def __h5_data_as_xds(self, name, data_sel, mol_m2):
"""
Read dataset from group target_product using HDF5
Input: operational product
Return: xarray.Dataset
"""
if name not in self.fid['/PRODUCT']:
raise ValueError(f'dataset {name} for found')
dset = self.fid[f'/PRODUCT/{name}']
# ToDo handle parameter mol_m2
return h5_to_xr(dset, (0,) + data_sel).squeeze()
def __nc_data_as_xds(self, name, data_sel):
"""
Read dataset from group PRODUCT using netCDF4
Input: science product
Return: xarray.DataArray
"""
if name in self.fid['/target_product'].variables.keys():
group = '/target_product'
elif name in self.fid['/instrument'].variables.keys():
group = '/instrument'
else:
raise ValueError('dataset {name} for found')
return data_to_xr(self.get_dataset(name, data_sel),
dims=['scanline', 'ground_pixel'], name=name,
long_name=self.get_attr('long_name', name),
units=self.get_attr('units', name))
def get_data_as_xds(self, name, data_sel=None, mol_m2=False):
"""
Read dataset from group PRODUCT/target_product group
Parameters
----------
name : str
name of dataset with level 2 data
data_sel : numpy slice
a 3-dimensional numpy slice: time, scan_line, ground_pixel
mol_m2 : bool
Leaf units as mol per m^2 or convert units to molecules per cm^2
Returns
-------
out : xarray.DataArray
"""
if self.science_product:
return self.__nc_data_as_xds(name, data_sel)
return self.__h5_data_as_xds(name, data_sel, mol_m2)
|
|
test_sub_dir = "test_data/1019436/session_1"
def test_run_func_motion_correct_no_slice_time():
import os
import nibabel as nb
import pkg_resources as p
from qap.functional_preproc import run_func_motion_correct
if "func_motion_correct" in os.listdir(os.getcwd()):
err = "\n[!] The output folder for this workflow already exists.\n"
raise Exception(err)
func_scan = p.resource_filename("qap", os.path.join(test_sub_dir, \
"rest_1", \
"functional_scan", \
"rest.nii.gz"))
ref_out = p.resource_filename("qap", os.path.join(test_sub_dir, \
"rest_1", \
"func_motion_correct", \
"rest_calc_tshift_resample_volreg.nii.gz"))
# run the workflow
output = run_func_motion_correct(func_scan, 0, "End", False)
# make the correlation
ref_out_data = nb.load(ref_out).get_data()
output_data = nb.load(output).get_data()
os.system("rm -R func_motion_correct")
# create a vector of True and False values
bool_vector = ref_out_data == output_data
assert bool_vector.all()
def test_run_func_motion_correct_slice_time():
import os
import nibabel as nb
import pkg_resources as p
from qap.functional_preproc import run_func_motion_correct
if "func_motion_correct" in os.listdir(os.getcwd()):
err = "\n[!] The output folder for this workflow already exists.\n"
raise Exception(err)
func_scan = p.resource_filename("qap", os.path.join(test_sub_dir, \
"rest_1", \
"functional_scan", \
"rest.nii.gz"))
''' NEED A SLICE TIME CORRECTED VERSION OF THIS!!!! NOT COMPLETE '''
ref_out = p.resource_filename("qap", os.path.join(test_sub_dir, \
"rest_1", \
"func_motion_correct", \
"rest_calc_tshift_resample_volreg.nii.gz"))
# run the workflow
output = run_func_motion_correct(func_scan, 0, "End", True)
# make the correlation
ref_out_data = nb.load(ref_out).get_data()
output_data = nb.load(output).get_data()
os.system("rm -R func_motion_correct")
# create a vector of True and False values
bool_vector = ref_out_data == output_data
assert bool_vector.all()
def test_run_functional_brain_mask_3dautomask():
import os
import nibabel as nb
import pkg_resources as p
from qap.functional_preproc import run_functional_brain_mask
if "functional_brain_mask" in os.listdir(os.getcwd()):
err = "\n[!] The output folder for this workflow already exists.\n"
raise Exception(err)
func_motion = p.resource_filename("qap", os.path.join(test_sub_dir, \
"rest_1", \
"func_motion_correct", \
"rest_calc_tshift_resample_" \
"volreg.nii.gz"))
ref_out = p.resource_filename("qap", os.path.join(test_sub_dir, \
"rest_1", \
"functional_brain_mask", \
"rest_calc_tshift_resample_volreg_" \
"mask.nii.gz"))
# run the workflow
output = run_functional_brain_mask(func_motion, False)
# make the correlation
ref_out_data = nb.load(ref_out).get_data()
output_data = nb.load(output).get_data()
os.system("rm -R functional_brain_mask")
# create a vector of True and False values
bool_vector = ref_out_data == output_data
assert bool_vector.all()
def test_run_functional_brain_mask_BET():
import os
import nibabel as nb
import pkg_resources as p
from qap.functional_preproc import run_functional_brain_mask
if "functional_brain_mask" in os.listdir(os.getcwd()):
err = "\n[!] The output folder for this workflow already exists.\n"
raise Exception(err)
func_motion = p.resource_filename("qap", os.path.join(test_sub_dir, \
"rest_1", \
"func_motion_correct", \
"rest_calc_tshift_resample_" \
"volreg.nii.gz"))
ref_out = p.resource_filename("qap", os.path.join(test_sub_dir, \
"rest_1", \
"functional_brain_mask", \
"rest_calc_tshift_resample_volreg_" \
"mask_BET.nii.gz"))
# run the workflow
output = run_functional_brain_mask(func_motion, True)
# make the correlation
ref_out_data = nb.load(ref_out).get_data()
output_data = nb.load(output).get_data()
os.system("rm -R functional_brain_mask")
# create a vector of True and False values
bool_vector = ref_out_data == output_data
assert bool_vector.all()
def test_run_mean_functional():
import os
import nibabel as nb
import pkg_resources as p
from qap.functional_preproc import run_mean_functional
if "mean_functional" in os.listdir(os.getcwd()):
err = "\n[!] The output folder for this workflow already exists.\n"
raise Exception(err)
func_motion = p.resource_filename("qap", os.path.join(test_sub_dir, \
"rest_1", \
"func_motion_correct", \
"rest_calc_tshift_resample_" \
"volreg.nii.gz"))
ref_out = p.resource_filename("qap", os.path.join(test_sub_dir, \
"rest_1", \
"mean_functional", \
"rest_calc_tshift_resample_volreg_" \
"tstat.nii.gz"))
# run the workflow
output = run_mean_functional(func_motion)
# make the correlation
ref_out_data = nb.load(ref_out).get_data()
output_data = nb.load(output).get_data()
os.system("rm -R mean_functional")
# create a vector of True and False values
bool_vector = ref_out_data == output_data
assert bool_vector.all()
|
|
THOST_FTDC_EXP_Normal = '0'
THOST_FTDC_EXP_GenOrderByTrade = '1'
THOST_FTDC_ICT_EID = '0'
THOST_FTDC_ICT_IDCard = '1'
THOST_FTDC_ICT_OfficerIDCard = '2'
THOST_FTDC_ICT_PoliceIDCard = '3'
THOST_FTDC_ICT_SoldierIDCard = '4'
THOST_FTDC_ICT_HouseholdRegister = '5'
THOST_FTDC_ICT_Passport = '6'
THOST_FTDC_ICT_TaiwanCompatriotIDCard = '7'
THOST_FTDC_ICT_HomeComingCard = '8'
THOST_FTDC_ICT_LicenseNo = '9'
THOST_FTDC_ICT_TaxNo = 'A'
THOST_FTDC_ICT_HMMainlandTravelPermit = 'B'
THOST_FTDC_ICT_TwMainlandTravelPermit = 'C'
THOST_FTDC_ICT_DrivingLicense = 'D'
THOST_FTDC_ICT_SocialID = 'F'
THOST_FTDC_ICT_LocalID = 'G'
THOST_FTDC_ICT_BusinessRegistration = 'H'
THOST_FTDC_ICT_HKMCIDCard = 'I'
THOST_FTDC_ICT_AccountsPermits = 'J'
THOST_FTDC_ICT_OtherCard = 'x'
THOST_FTDC_IR_All = '1'
THOST_FTDC_IR_Group = '2'
THOST_FTDC_IR_Single = '3'
THOST_FTDC_DR_All = '1'
THOST_FTDC_DR_Group = '2'
THOST_FTDC_DR_Single = '3'
THOST_FTDC_DS_Asynchronous = '1'
THOST_FTDC_DS_Synchronizing = '2'
THOST_FTDC_DS_Synchronized = '3'
THOST_FTDC_BDS_Synchronized = '1'
THOST_FTDC_BDS_Synchronizing = '2'
THOST_FTDC_ECS_NoConnection = '1'
THOST_FTDC_ECS_QryInstrumentSent = '2'
THOST_FTDC_ECS_GotInformation = '9'
THOST_FTDC_TCS_NotConnected = '1'
THOST_FTDC_TCS_Connected = '2'
THOST_FTDC_TCS_QryInstrumentSent = '3'
THOST_FTDC_TCS_SubPrivateFlow = '4'
THOST_FTDC_FC_DataAsync = '1'
THOST_FTDC_FC_ForceUserLogout = '2'
THOST_FTDC_FC_UserPasswordUpdate = '3'
THOST_FTDC_FC_BrokerPasswordUpdate = '4'
THOST_FTDC_FC_InvestorPasswordUpdate = '5'
THOST_FTDC_FC_OrderInsert = '6'
THOST_FTDC_FC_OrderAction = '7'
THOST_FTDC_FC_SyncSystemData = '8'
THOST_FTDC_FC_SyncBrokerData = '9'
THOST_FTDC_FC_BachSyncBrokerData = 'A'
THOST_FTDC_FC_SuperQuery = 'B'
THOST_FTDC_FC_ParkedOrderInsert = 'C'
THOST_FTDC_FC_ParkedOrderAction = 'D'
THOST_FTDC_FC_SyncOTP = 'E'
THOST_FTDC_FC_DeleteOrder = 'F'
THOST_FTDC_BFC_ForceUserLogout = '1'
THOST_FTDC_BFC_UserPasswordUpdate = '2'
THOST_FTDC_BFC_SyncBrokerData = '3'
THOST_FTDC_BFC_BachSyncBrokerData = '4'
THOST_FTDC_BFC_OrderInsert = '5'
THOST_FTDC_BFC_OrderAction = '6'
THOST_FTDC_BFC_AllQuery = '7'
THOST_FTDC_BFC_log = 'a'
THOST_FTDC_BFC_BaseQry = 'b'
THOST_FTDC_BFC_TradeQry = 'c'
THOST_FTDC_BFC_Trade = 'd'
THOST_FTDC_BFC_Virement = 'e'
THOST_FTDC_BFC_Risk = 'f'
THOST_FTDC_BFC_Session = 'g'
THOST_FTDC_BFC_RiskNoticeCtl = 'h'
THOST_FTDC_BFC_RiskNotice = 'i'
THOST_FTDC_BFC_BrokerDeposit = 'j'
THOST_FTDC_BFC_QueryFund = 'k'
THOST_FTDC_BFC_QueryOrder = 'l'
THOST_FTDC_BFC_QueryTrade = 'm'
THOST_FTDC_BFC_QueryPosition = 'n'
THOST_FTDC_BFC_QueryMarketData = 'o'
THOST_FTDC_BFC_QueryUserEvent = 'p'
THOST_FTDC_BFC_QueryRiskNotify = 'q'
THOST_FTDC_BFC_QueryFundChange = 'r'
THOST_FTDC_BFC_QueryInvestor = 's'
THOST_FTDC_BFC_QueryTradingCode = 't'
THOST_FTDC_BFC_ForceClose = 'u'
THOST_FTDC_BFC_PressTest = 'v'
THOST_FTDC_BFC_RemainCalc = 'w'
THOST_FTDC_BFC_NetPositionInd = 'x'
THOST_FTDC_BFC_RiskPredict = 'y'
THOST_FTDC_BFC_DataExport = 'z'
THOST_FTDC_BFC_RiskTargetSetup = 'A'
THOST_FTDC_BFC_MarketDataWarn = 'B'
THOST_FTDC_BFC_QryBizNotice = 'C'
THOST_FTDC_BFC_CfgBizNotice = 'D'
THOST_FTDC_BFC_SyncOTP = 'E'
THOST_FTDC_BFC_SendBizNotice = 'F'
THOST_FTDC_BFC_CfgRiskLevelStd = 'G'
THOST_FTDC_BFC_TbCommand = 'H'
THOST_FTDC_BFC_DeleteOrder = 'J'
THOST_FTDC_BFC_ParkedOrderInsert = 'K'
THOST_FTDC_BFC_ParkedOrderAction = 'L'
THOST_FTDC_BFC_ExecOrderNoCheck = 'M'
THOST_FTDC_OAS_Submitted = 'a'
THOST_FTDC_OAS_Accepted = 'b'
THOST_FTDC_OAS_Rejected = 'c'
THOST_FTDC_OST_AllTraded = '0'
THOST_FTDC_OST_PartTradedQueueing = '1'
THOST_FTDC_OST_PartTradedNotQueueing = '2'
THOST_FTDC_OST_NoTradeQueueing = '3'
THOST_FTDC_OST_NoTradeNotQueueing = '4'
THOST_FTDC_OST_Canceled = '5'
THOST_FTDC_OST_Unknown = 'a'
THOST_FTDC_OST_NotTouched = 'b'
THOST_FTDC_OST_Touched = 'c'
THOST_FTDC_OSS_InsertSubmitted = '0'
THOST_FTDC_OSS_CancelSubmitted = '1'
THOST_FTDC_OSS_ModifySubmitted = '2'
THOST_FTDC_OSS_Accepted = '3'
THOST_FTDC_OSS_InsertRejected = '4'
THOST_FTDC_OSS_CancelRejected = '5'
THOST_FTDC_OSS_ModifyRejected = '6'
THOST_FTDC_PSD_Today = '1'
THOST_FTDC_PSD_History = '2'
THOST_FTDC_PDT_UseHistory = '1'
THOST_FTDC_PDT_NoUseHistory = '2'
THOST_FTDC_ER_Broker = '1'
THOST_FTDC_ER_Host = '2'
THOST_FTDC_ER_Maker = '3'
THOST_FTDC_PC_Futures = '1'
THOST_FTDC_PC_Options = '2'
THOST_FTDC_PC_Combination = '3'
THOST_FTDC_PC_Spot = '4'
THOST_FTDC_PC_EFP = '5'
THOST_FTDC_PC_SpotOption = '6'
THOST_FTDC_IP_NotStart = '0'
THOST_FTDC_IP_Started = '1'
THOST_FTDC_IP_Pause = '2'
THOST_FTDC_IP_Expired = '3'
THOST_FTDC_D_Buy = '0'
THOST_FTDC_D_Sell = '1'
THOST_FTDC_PT_Net = '1'
THOST_FTDC_PT_Gross = '2'
THOST_FTDC_PD_Net = '1'
THOST_FTDC_PD_Long = '2'
THOST_FTDC_PD_Short = '3'
THOST_FTDC_SS_NonActive = '1'
THOST_FTDC_SS_Startup = '2'
THOST_FTDC_SS_Operating = '3'
THOST_FTDC_SS_Settlement = '4'
THOST_FTDC_SS_SettlementFinished = '5'
THOST_FTDC_RA_Trade = '0'
THOST_FTDC_RA_Settlement = '1'
THOST_FTDC_HF_Speculation = '1'
THOST_FTDC_HF_Arbitrage = '2'
THOST_FTDC_HF_Hedge = '3'
THOST_FTDC_HF_MarketMaker = '5'
THOST_FTDC_BHF_Speculation = '1'
THOST_FTDC_BHF_Arbitrage = '2'
THOST_FTDC_BHF_Hedge = '3'
THOST_FTDC_CIDT_Speculation = '1'
THOST_FTDC_CIDT_Arbitrage = '2'
THOST_FTDC_CIDT_Hedge = '3'
THOST_FTDC_CIDT_MarketMaker = '5'
THOST_FTDC_OPT_AnyPrice = '1'
THOST_FTDC_OPT_LimitPrice = '2'
THOST_FTDC_OPT_BestPrice = '3'
THOST_FTDC_OPT_LastPrice = '4'
THOST_FTDC_OPT_LastPricePlusOneTicks = '5'
THOST_FTDC_OPT_LastPricePlusTwoTicks = '6'
THOST_FTDC_OPT_LastPricePlusThreeTicks = '7'
THOST_FTDC_OPT_AskPrice1 = '8'
THOST_FTDC_OPT_AskPrice1PlusOneTicks = '9'
THOST_FTDC_OPT_AskPrice1PlusTwoTicks = 'A'
THOST_FTDC_OPT_AskPrice1PlusThreeTicks = 'B'
THOST_FTDC_OPT_BidPrice1 = 'C'
THOST_FTDC_OPT_BidPrice1PlusOneTicks = 'D'
THOST_FTDC_OPT_BidPrice1PlusTwoTicks = 'E'
THOST_FTDC_OPT_BidPrice1PlusThreeTicks = 'F'
THOST_FTDC_OPT_FiveLevelPrice = 'G'
THOST_FTDC_OF_Open = '0'
THOST_FTDC_OF_Close = '1'
THOST_FTDC_OF_ForceClose = '2'
THOST_FTDC_OF_CloseToday = '3'
THOST_FTDC_OF_CloseYesterday = '4'
THOST_FTDC_OF_ForceOff = '5'
THOST_FTDC_OF_LocalForceClose = '6'
THOST_FTDC_FCC_NotForceClose = '0'
THOST_FTDC_FCC_LackDeposit = '1'
THOST_FTDC_FCC_ClientOverPositionLimit = '2'
THOST_FTDC_FCC_MemberOverPositionLimit = '3'
THOST_FTDC_FCC_NotMultiple = '4'
THOST_FTDC_FCC_Violation = '5'
THOST_FTDC_FCC_Other = '6'
THOST_FTDC_FCC_PersonDeliv = '7'
THOST_FTDC_ORDT_Normal = '0'
THOST_FTDC_ORDT_DeriveFromQuote = '1'
THOST_FTDC_ORDT_DeriveFromCombination = '2'
THOST_FTDC_ORDT_Combination = '3'
THOST_FTDC_ORDT_ConditionalOrder = '4'
THOST_FTDC_ORDT_Swap = '5'
THOST_FTDC_TC_IOC = '1'
THOST_FTDC_TC_GFS = '2'
THOST_FTDC_TC_GFD = '3'
THOST_FTDC_TC_GTD = '4'
THOST_FTDC_TC_GTC = '5'
THOST_FTDC_TC_GFA = '6'
THOST_FTDC_VC_AV = '1'
THOST_FTDC_VC_MV = '2'
THOST_FTDC_VC_CV = '3'
THOST_FTDC_CC_Immediately = '1'
THOST_FTDC_CC_Touch = '2'
THOST_FTDC_CC_TouchProfit = '3'
THOST_FTDC_CC_ParkedOrder = '4'
THOST_FTDC_CC_LastPriceGreaterThanStopPrice = '5'
THOST_FTDC_CC_LastPriceGreaterEqualStopPrice = '6'
THOST_FTDC_CC_LastPriceLesserThanStopPrice = '7'
THOST_FTDC_CC_LastPriceLesserEqualStopPrice = '8'
THOST_FTDC_CC_AskPriceGreaterThanStopPrice = '9'
THOST_FTDC_CC_AskPriceGreaterEqualStopPrice = 'A'
THOST_FTDC_CC_AskPriceLesserThanStopPrice = 'B'
THOST_FTDC_CC_AskPriceLesserEqualStopPrice = 'C'
THOST_FTDC_CC_BidPriceGreaterThanStopPrice = 'D'
THOST_FTDC_CC_BidPriceGreaterEqualStopPrice = 'E'
THOST_FTDC_CC_BidPriceLesserThanStopPrice = 'F'
THOST_FTDC_CC_BidPriceLesserEqualStopPrice = 'H'
THOST_FTDC_AF_Delete = '0'
THOST_FTDC_AF_Modify = '3'
THOST_FTDC_TR_Allow = '0'
THOST_FTDC_TR_CloseOnly = '1'
THOST_FTDC_TR_Forbidden = '2'
THOST_FTDC_OSRC_Participant = '0'
THOST_FTDC_OSRC_Administrator = '1'
THOST_FTDC_TRDT_SplitCombination = '#'
THOST_FTDC_TRDT_Common = '0'
THOST_FTDC_TRDT_OptionsExecution = '1'
THOST_FTDC_TRDT_OTC = '2'
THOST_FTDC_TRDT_EFPDerived = '3'
THOST_FTDC_TRDT_CombinationDerived = '4'
THOST_FTDC_PSRC_LastPrice = '0'
THOST_FTDC_PSRC_Buy = '1'
THOST_FTDC_PSRC_Sell = '2'
THOST_FTDC_IS_BeforeTrading = '0'
THOST_FTDC_IS_NoTrading = '1'
THOST_FTDC_IS_Continous = '2'
THOST_FTDC_IS_AuctionOrdering = '3'
THOST_FTDC_IS_AuctionBalance = '4'
THOST_FTDC_IS_AuctionMatch = '5'
THOST_FTDC_IS_Closed = '6'
THOST_FTDC_IER_Automatic = '1'
THOST_FTDC_IER_Manual = '2'
THOST_FTDC_IER_Fuse = '3'
THOST_FTDC_BS_NoUpload = '1'
THOST_FTDC_BS_Uploaded = '2'
THOST_FTDC_BS_Failed = '3'
THOST_FTDC_RS_All = '1'
THOST_FTDC_RS_ByProduct = '2'
THOST_FTDC_RP_ByVolume = '1'
THOST_FTDC_RP_ByFeeOnHand = '2'
THOST_FTDC_RL_Level1 = '1'
THOST_FTDC_RL_Level2 = '2'
THOST_FTDC_RL_Level3 = '3'
THOST_FTDC_RL_Level4 = '4'
THOST_FTDC_RL_Level5 = '5'
THOST_FTDC_RL_Level6 = '6'
THOST_FTDC_RL_Level7 = '7'
THOST_FTDC_RL_Level8 = '8'
THOST_FTDC_RL_Level9 = '9'
THOST_FTDC_RSD_ByPeriod = '1'
THOST_FTDC_RSD_ByStandard = '2'
THOST_FTDC_MT_Out = '0'
THOST_FTDC_MT_In = '1'
THOST_FTDC_ISPI_MortgageRatio = '4'
THOST_FTDC_ISPI_MarginWay = '5'
THOST_FTDC_ISPI_BillDeposit = '9'
THOST_FTDC_ESPI_MortgageRatio = '1'
THOST_FTDC_ESPI_OtherFundItem = '2'
THOST_FTDC_ESPI_OtherFundImport = '3'
THOST_FTDC_ESPI_CFFEXMinPrepa = '6'
THOST_FTDC_ESPI_CZCESettlementType = '7'
THOST_FTDC_ESPI_ExchDelivFeeMode = '9'
THOST_FTDC_ESPI_DelivFeeMode = '0'
THOST_FTDC_ESPI_CZCEComMarginType = 'A'
THOST_FTDC_ESPI_DceComMarginType = 'B'
THOST_FTDC_ESPI_OptOutDisCountRate = 'a'
THOST_FTDC_ESPI_OptMiniGuarantee = 'b'
THOST_FTDC_SPI_InvestorIDMinLength = '1'
THOST_FTDC_SPI_AccountIDMinLength = '2'
THOST_FTDC_SPI_UserRightLogon = '3'
THOST_FTDC_SPI_SettlementBillTrade = '4'
THOST_FTDC_SPI_TradingCode = '5'
THOST_FTDC_SPI_CheckFund = '6'
THOST_FTDC_SPI_CommModelRight = '7'
THOST_FTDC_SPI_MarginModelRight = '9'
THOST_FTDC_SPI_IsStandardActive = '8'
THOST_FTDC_SPI_UploadSettlementFile = 'U'
THOST_FTDC_SPI_DownloadCSRCFile = 'D'
THOST_FTDC_SPI_SettlementBillFile = 'S'
THOST_FTDC_SPI_CSRCOthersFile = 'C'
THOST_FTDC_SPI_InvestorPhoto = 'P'
THOST_FTDC_SPI_CSRCData = 'R'
THOST_FTDC_SPI_InvestorPwdModel = 'I'
THOST_FTDC_SPI_CFFEXInvestorSettleFile = 'F'
THOST_FTDC_SPI_InvestorIDType = 'a'
THOST_FTDC_SPI_FreezeMaxReMain = 'r'
THOST_FTDC_SPI_IsSync = 'A'
THOST_FTDC_SPI_RelieveOpenLimit = 'O'
THOST_FTDC_SPI_IsStandardFreeze = 'X'
THOST_FTDC_SPI_CZCENormalProductHedge = 'B'
THOST_FTDC_TPID_EncryptionStandard = 'E'
THOST_FTDC_TPID_RiskMode = 'R'
THOST_FTDC_TPID_RiskModeGlobal = 'G'
THOST_FTDC_TPID_modeEncode = 'P'
THOST_FTDC_TPID_tickMode = 'T'
THOST_FTDC_TPID_SingleUserSessionMaxNum = 'S'
THOST_FTDC_TPID_LoginFailMaxNum = 'L'
THOST_FTDC_TPID_IsAuthForce = 'A'
THOST_FTDC_TPID_IsPosiFreeze = 'F'
THOST_FTDC_TPID_IsPosiLimit = 'M'
THOST_FTDC_TPID_ForQuoteTimeInterval = 'Q'
THOST_FTDC_TPID_IsFuturePosiLimit = 'B'
THOST_FTDC_TPID_IsFutureOrderFreq = 'C'
THOST_FTDC_TPID_IsExecOrderProfit = 'H'
THOST_FTDC_FI_SettlementFund = 'F'
THOST_FTDC_FI_Trade = 'T'
THOST_FTDC_FI_InvestorPosition = 'P'
THOST_FTDC_FI_SubEntryFund = 'O'
THOST_FTDC_FI_CZCECombinationPos = 'C'
THOST_FTDC_FI_CSRCData = 'R'
THOST_FTDC_FI_CZCEClose = 'L'
THOST_FTDC_FI_CZCENoClose = 'N'
THOST_FTDC_FI_PositionDtl = 'D'
THOST_FTDC_FI_OptionStrike = 'S'
THOST_FTDC_FI_SettlementPriceComparison = 'M'
THOST_FTDC_FI_NonTradePosChange = 'B'
THOST_FTDC_FUT_Settlement = '0'
THOST_FTDC_FUT_Check = '1'
THOST_FTDC_FFT_Txt = '0'
THOST_FTDC_FFT_Zip = '1'
THOST_FTDC_FFT_DBF = '2'
THOST_FTDC_FUS_SucceedUpload = '1'
THOST_FTDC_FUS_FailedUpload = '2'
THOST_FTDC_FUS_SucceedLoad = '3'
THOST_FTDC_FUS_PartSucceedLoad = '4'
THOST_FTDC_FUS_FailedLoad = '5'
THOST_FTDC_TD_Out = '0'
THOST_FTDC_TD_In = '1'
THOST_FTDC_SC_NoSpecialRule = '0'
THOST_FTDC_SC_NoSpringFestival = '1'
THOST_FTDC_IPT_LastSettlement = '1'
THOST_FTDC_IPT_LaseClose = '2'
THOST_FTDC_PLP_Active = '1'
THOST_FTDC_PLP_NonActive = '2'
THOST_FTDC_PLP_Canceled = '3'
THOST_FTDC_DM_CashDeliv = '1'
THOST_FTDC_DM_CommodityDeliv = '2'
THOST_FTDC_FIOT_FundIO = '1'
THOST_FTDC_FIOT_Transfer = '2'
THOST_FTDC_FIOT_SwapCurrency = '3'
THOST_FTDC_FT_Deposite = '1'
THOST_FTDC_FT_ItemFund = '2'
THOST_FTDC_FT_Company = '3'
THOST_FTDC_FT_InnerTransfer = '4'
THOST_FTDC_FD_In = '1'
THOST_FTDC_FD_Out = '2'
THOST_FTDC_FS_Record = '1'
THOST_FTDC_FS_Check = '2'
THOST_FTDC_FS_Charge = '3'
THOST_FTDC_PS_None = '1'
THOST_FTDC_PS_Publishing = '2'
THOST_FTDC_PS_Published = '3'
THOST_FTDC_ES_NonActive = '1'
THOST_FTDC_ES_Startup = '2'
THOST_FTDC_ES_Initialize = '3'
THOST_FTDC_ES_Initialized = '4'
THOST_FTDC_ES_Close = '5'
THOST_FTDC_ES_Closed = '6'
THOST_FTDC_ES_Settlement = '7'
THOST_FTDC_STS_Initialize = '0'
THOST_FTDC_STS_Settlementing = '1'
THOST_FTDC_STS_Settlemented = '2'
THOST_FTDC_STS_Finished = '3'
THOST_FTDC_CT_Person = '0'
THOST_FTDC_CT_Company = '1'
THOST_FTDC_CT_Fund = '2'
THOST_FTDC_CT_SpecialOrgan = '3'
THOST_FTDC_CT_Asset = '4'
THOST_FTDC_BT_Trade = '0'
THOST_FTDC_BT_TradeSettle = '1'
THOST_FTDC_FAS_Low = '1'
THOST_FTDC_FAS_Normal = '2'
THOST_FTDC_FAS_Focus = '3'
THOST_FTDC_FAS_Risk = '4'
THOST_FTDC_FAS_ByTrade = '1'
THOST_FTDC_FAS_ByDeliv = '2'
THOST_FTDC_FAS_None = '3'
THOST_FTDC_FAS_FixFee = '4'
THOST_FTDC_PWDT_Trade = '1'
THOST_FTDC_PWDT_Account = '2'
THOST_FTDC_AG_All = '1'
THOST_FTDC_AG_OnlyLost = '2'
THOST_FTDC_AG_OnlyGain = '3'
THOST_FTDC_AG_None = '4'
THOST_FTDC_ICP_Include = '0'
THOST_FTDC_ICP_NotInclude = '2'
THOST_FTDC_AWT_Enable = '0'
THOST_FTDC_AWT_Disable = '2'
THOST_FTDC_AWT_NoHoldEnable = '3'
THOST_FTDC_FPWD_UnCheck = '0'
THOST_FTDC_FPWD_Check = '1'
THOST_FTDC_TT_BankToFuture = '0'
THOST_FTDC_TT_FutureToBank = '1'
THOST_FTDC_TVF_Invalid = '0'
THOST_FTDC_TVF_Valid = '1'
THOST_FTDC_TVF_Reverse = '2'
THOST_FTDC_RN_CD = '0'
THOST_FTDC_RN_ZT = '1'
THOST_FTDC_RN_QT = '2'
THOST_FTDC_SEX_None = '0'
THOST_FTDC_SEX_Man = '1'
THOST_FTDC_SEX_Woman = '2'
THOST_FTDC_UT_Investor = '0'
THOST_FTDC_UT_Operator = '1'
THOST_FTDC_UT_SuperUser = '2'
THOST_FTDC_RATETYPE_MarginRate = '2'
THOST_FTDC_NOTETYPE_TradeSettleBill = '1'
THOST_FTDC_NOTETYPE_TradeSettleMonth = '2'
THOST_FTDC_NOTETYPE_CallMarginNotes = '3'
THOST_FTDC_NOTETYPE_ForceCloseNotes = '4'
THOST_FTDC_NOTETYPE_TradeNotes = '5'
THOST_FTDC_NOTETYPE_DelivNotes = '6'
THOST_FTDC_SBS_Day = '1'
THOST_FTDC_SBS_Volume = '2'
THOST_FTDC_ST_Day = '0'
THOST_FTDC_ST_Month = '1'
THOST_FTDC_URT_Logon = '1'
THOST_FTDC_URT_Transfer = '2'
THOST_FTDC_URT_EMail = '3'
THOST_FTDC_URT_Fax = '4'
THOST_FTDC_URT_ConditionOrder = '5'
THOST_FTDC_MPT_PreSettlementPrice = '1'
THOST_FTDC_MPT_SettlementPrice = '2'
THOST_FTDC_MPT_AveragePrice = '3'
THOST_FTDC_MPT_OpenPrice = '4'
THOST_FTDC_BGS_None = '0'
THOST_FTDC_BGS_NoGenerated = '1'
THOST_FTDC_BGS_Generated = '2'
THOST_FTDC_AT_HandlePositionAlgo = '1'
THOST_FTDC_AT_FindMarginRateAlgo = '2'
THOST_FTDC_HPA_Base = '1'
THOST_FTDC_HPA_DCE = '2'
THOST_FTDC_HPA_CZCE = '3'
THOST_FTDC_FMRA_Base = '1'
THOST_FTDC_FMRA_DCE = '2'
THOST_FTDC_FMRA_CZCE = '3'
THOST_FTDC_HTAA_Base = '1'
THOST_FTDC_HTAA_DCE = '2'
THOST_FTDC_HTAA_CZCE = '3'
THOST_FTDC_PST_Order = '1'
THOST_FTDC_PST_Open = '2'
THOST_FTDC_PST_Fund = '3'
THOST_FTDC_PST_Settlement = '4'
THOST_FTDC_PST_Company = '5'
THOST_FTDC_PST_Corporation = '6'
THOST_FTDC_PST_LinkMan = '7'
THOST_FTDC_PST_Ledger = '8'
THOST_FTDC_PST_Trustee = '9'
THOST_FTDC_PST_TrusteeCorporation = 'A'
THOST_FTDC_PST_TrusteeOpen = 'B'
THOST_FTDC_PST_TrusteeContact = 'C'
THOST_FTDC_PST_ForeignerRefer = 'D'
THOST_FTDC_PST_CorporationRefer = 'E'
THOST_FTDC_QIR_All = '1'
THOST_FTDC_QIR_Group = '2'
THOST_FTDC_QIR_Single = '3'
THOST_FTDC_IRS_Normal = '1'
THOST_FTDC_IRS_Warn = '2'
THOST_FTDC_IRS_Call = '3'
THOST_FTDC_IRS_Force = '4'
THOST_FTDC_IRS_Exception = '5'
THOST_FTDC_UET_Login = '1'
THOST_FTDC_UET_Logout = '2'
THOST_FTDC_UET_Trading = '3'
THOST_FTDC_UET_TradingError = '4'
THOST_FTDC_UET_UpdatePassword = '5'
THOST_FTDC_UET_Authenticate = '6'
THOST_FTDC_UET_Other = '9'
THOST_FTDC_ICS_Close = '0'
THOST_FTDC_ICS_CloseToday = '1'
THOST_FTDC_SM_Non = '0'
THOST_FTDC_SM_Instrument = '1'
THOST_FTDC_SM_Product = '2'
THOST_FTDC_SM_Investor = '3'
THOST_FTDC_PAOS_NotSend = '1'
THOST_FTDC_PAOS_Send = '2'
THOST_FTDC_PAOS_Deleted = '3'
THOST_FTDC_VDS_Dealing = '1'
THOST_FTDC_VDS_DeaclSucceed = '2'
THOST_FTDC_ORGS_Standard = '0'
THOST_FTDC_ORGS_ESunny = '1'
THOST_FTDC_ORGS_KingStarV6 = '2'
THOST_FTDC_VTS_NaturalDeal = '0'
THOST_FTDC_VTS_SucceedEnd = '1'
THOST_FTDC_VTS_FailedEND = '2'
THOST_FTDC_VTS_Exception = '3'
THOST_FTDC_VTS_ManualDeal = '4'
THOST_FTDC_VTS_MesException = '5'
THOST_FTDC_VTS_SysException = '6'
THOST_FTDC_VBAT_BankBook = '1'
THOST_FTDC_VBAT_BankCard = '2'
THOST_FTDC_VBAT_CreditCard = '3'
THOST_FTDC_VMS_Natural = '0'
THOST_FTDC_VMS_Canceled = '9'
THOST_FTDC_VAA_NoAvailAbility = '0'
THOST_FTDC_VAA_AvailAbility = '1'
THOST_FTDC_VAA_Repeal = '2'
THOST_FTDC_VTC_BankBankToFuture = '102001'
THOST_FTDC_VTC_BankFutureToBank = '102002'
THOST_FTDC_VTC_FutureBankToFuture = '202001'
THOST_FTDC_VTC_FutureFutureToBank = '202002'
THOST_FTDC_GEN_Program = '0'
THOST_FTDC_GEN_HandWork = '1'
THOST_FTDC_CFMMCKK_REQUEST = 'R'
THOST_FTDC_CFMMCKK_AUTO = 'A'
THOST_FTDC_CFMMCKK_MANUAL = 'M'
THOST_FTDC_CFT_IDCard = '0'
THOST_FTDC_CFT_Passport = '1'
THOST_FTDC_CFT_OfficerIDCard = '2'
THOST_FTDC_CFT_SoldierIDCard = '3'
THOST_FTDC_CFT_HomeComingCard = '4'
THOST_FTDC_CFT_HouseholdRegister = '5'
THOST_FTDC_CFT_LicenseNo = '6'
THOST_FTDC_CFT_InstitutionCodeCard = '7'
THOST_FTDC_CFT_TempLicenseNo = '8'
THOST_FTDC_CFT_NoEnterpriseLicenseNo = '9'
THOST_FTDC_CFT_OtherCard = 'x'
THOST_FTDC_CFT_SuperDepAgree = 'a'
THOST_FTDC_FBC_Others = '0'
THOST_FTDC_FBC_TransferDetails = '1'
THOST_FTDC_FBC_CustAccStatus = '2'
THOST_FTDC_FBC_AccountTradeDetails = '3'
THOST_FTDC_FBC_FutureAccountChangeInfoDetails = '4'
THOST_FTDC_FBC_CustMoneyDetail = '5'
THOST_FTDC_FBC_CustCancelAccountInfo = '6'
THOST_FTDC_FBC_CustMoneyResult = '7'
THOST_FTDC_FBC_OthersExceptionResult = '8'
THOST_FTDC_FBC_CustInterestNetMoneyDetails = '9'
THOST_FTDC_FBC_CustMoneySendAndReceiveDetails = 'a'
THOST_FTDC_FBC_CorporationMoneyTotal = 'b'
THOST_FTDC_FBC_MainbodyMoneyTotal = 'c'
THOST_FTDC_FBC_MainPartMonitorData = 'd'
THOST_FTDC_FBC_PreparationMoney = 'e'
THOST_FTDC_FBC_BankMoneyMonitorData = 'f'
THOST_FTDC_CEC_Exchange = '1'
THOST_FTDC_CEC_Cash = '2'
THOST_FTDC_YNI_Yes = '0'
THOST_FTDC_YNI_No = '1'
THOST_FTDC_BLT_CurrentMoney = '0'
THOST_FTDC_BLT_UsableMoney = '1'
THOST_FTDC_BLT_FetchableMoney = '2'
THOST_FTDC_BLT_FreezeMoney = '3'
THOST_FTDC_GD_Unknown = '0'
THOST_FTDC_GD_Male = '1'
THOST_FTDC_GD_Female = '2'
THOST_FTDC_FPF_BEN = '0'
THOST_FTDC_FPF_OUR = '1'
THOST_FTDC_FPF_SHA = '2'
THOST_FTDC_PWKT_ExchangeKey = '0'
THOST_FTDC_PWKT_PassWordKey = '1'
THOST_FTDC_PWKT_MACKey = '2'
THOST_FTDC_PWKT_MessageKey = '3'
THOST_FTDC_PWT_Query = '0'
THOST_FTDC_PWT_Fetch = '1'
THOST_FTDC_PWT_Transfer = '2'
THOST_FTDC_PWT_Trade = '3'
THOST_FTDC_EM_NoEncry = '0'
THOST_FTDC_EM_DES = '1'
THOST_FTDC_EM_3DES = '2'
THOST_FTDC_BRF_BankNotNeedRepeal = '0'
THOST_FTDC_BRF_BankWaitingRepeal = '1'
THOST_FTDC_BRF_BankBeenRepealed = '2'
THOST_FTDC_BRORF_BrokerNotNeedRepeal = '0'
THOST_FTDC_BRORF_BrokerWaitingRepeal = '1'
THOST_FTDC_BRORF_BrokerBeenRepealed = '2'
THOST_FTDC_TS_Bank = '0'
THOST_FTDC_TS_Future = '1'
THOST_FTDC_TS_Store = '2'
THOST_FTDC_LF_Yes = '0'
THOST_FTDC_LF_No = '1'
THOST_FTDC_BAS_Normal = '0'
THOST_FTDC_BAS_Freeze = '1'
THOST_FTDC_BAS_ReportLoss = '2'
THOST_FTDC_MAS_Normal = '0'
THOST_FTDC_MAS_Cancel = '1'
THOST_FTDC_MSS_Point = '0'
THOST_FTDC_MSS_PrePoint = '1'
THOST_FTDC_MSS_CancelPoint = '2'
THOST_FTDC_SYT_FutureBankTransfer = '0'
THOST_FTDC_SYT_StockBankTransfer = '1'
THOST_FTDC_SYT_TheThirdPartStore = '2'
THOST_FTDC_TEF_NormalProcessing = '0'
THOST_FTDC_TEF_Success = '1'
THOST_FTDC_TEF_Failed = '2'
THOST_FTDC_TEF_Abnormal = '3'
THOST_FTDC_TEF_ManualProcessedForException = '4'
THOST_FTDC_TEF_CommuFailedNeedManualProcess = '5'
THOST_FTDC_TEF_SysErrorNeedManualProcess = '6'
THOST_FTDC_PSS_NotProcess = '0'
THOST_FTDC_PSS_StartProcess = '1'
THOST_FTDC_PSS_Finished = '2'
THOST_FTDC_CUSTT_Person = '0'
THOST_FTDC_CUSTT_Institution = '1'
THOST_FTDC_FBTTD_FromBankToFuture = '1'
THOST_FTDC_FBTTD_FromFutureToBank = '2'
THOST_FTDC_OOD_Open = '1'
THOST_FTDC_OOD_Destroy = '0'
THOST_FTDC_AVAF_Invalid = '0'
THOST_FTDC_AVAF_Valid = '1'
THOST_FTDC_AVAF_Repeal = '2'
THOST_FTDC_OT_Bank = '1'
THOST_FTDC_OT_Future = '2'
THOST_FTDC_OT_PlateForm = '9'
THOST_FTDC_OL_HeadQuarters = '1'
THOST_FTDC_OL_Branch = '2'
THOST_FTDC_PID_FutureProtocal = '0'
THOST_FTDC_PID_ICBCProtocal = '1'
THOST_FTDC_PID_ABCProtocal = '2'
THOST_FTDC_PID_CBCProtocal = '3'
THOST_FTDC_PID_CCBProtocal = '4'
THOST_FTDC_PID_BOCOMProtocal = '5'
THOST_FTDC_PID_FBTPlateFormProtocal = 'X'
THOST_FTDC_CM_ShortConnect = '0'
THOST_FTDC_CM_LongConnect = '1'
THOST_FTDC_SRM_ASync = '0'
THOST_FTDC_SRM_Sync = '1'
THOST_FTDC_BAT_BankBook = '1'
THOST_FTDC_BAT_SavingCard = '2'
THOST_FTDC_BAT_CreditCard = '3'
THOST_FTDC_FAT_BankBook = '1'
THOST_FTDC_FAT_SavingCard = '2'
THOST_FTDC_FAT_CreditCard = '3'
THOST_FTDC_OS_Ready = '0'
THOST_FTDC_OS_CheckIn = '1'
THOST_FTDC_OS_CheckOut = '2'
THOST_FTDC_OS_CheckFileArrived = '3'
THOST_FTDC_OS_CheckDetail = '4'
THOST_FTDC_OS_DayEndClean = '5'
THOST_FTDC_OS_Invalid = '9'
THOST_FTDC_CCBFM_ByAmount = '1'
THOST_FTDC_CCBFM_ByMonth = '2'
THOST_FTDC_CAPIT_Client = '1'
THOST_FTDC_CAPIT_Server = '2'
THOST_FTDC_CAPIT_UserApi = '3'
THOST_FTDC_LS_Connected = '1'
THOST_FTDC_LS_Disconnected = '2'
THOST_FTDC_BPWDF_NoCheck = '0'
THOST_FTDC_BPWDF_BlankCheck = '1'
THOST_FTDC_BPWDF_EncryptCheck = '2'
THOST_FTDC_SAT_AccountID = '1'
THOST_FTDC_SAT_CardID = '2'
THOST_FTDC_SAT_SHStockholderID = '3'
THOST_FTDC_SAT_SZStockholderID = '4'
THOST_FTDC_TRFS_Normal = '0'
THOST_FTDC_TRFS_Repealed = '1'
THOST_FTDC_SPTYPE_Broker = '0'
THOST_FTDC_SPTYPE_Bank = '1'
THOST_FTDC_REQRSP_Request = '0'
THOST_FTDC_REQRSP_Response = '1'
THOST_FTDC_FBTUET_SignIn = '0'
THOST_FTDC_FBTUET_FromBankToFuture = '1'
THOST_FTDC_FBTUET_FromFutureToBank = '2'
THOST_FTDC_FBTUET_OpenAccount = '3'
THOST_FTDC_FBTUET_CancelAccount = '4'
THOST_FTDC_FBTUET_ChangeAccount = '5'
THOST_FTDC_FBTUET_RepealFromBankToFuture = '6'
THOST_FTDC_FBTUET_RepealFromFutureToBank = '7'
THOST_FTDC_FBTUET_QueryBankAccount = '8'
THOST_FTDC_FBTUET_QueryFutureAccount = '9'
THOST_FTDC_FBTUET_SignOut = 'A'
THOST_FTDC_FBTUET_SyncKey = 'B'
THOST_FTDC_FBTUET_ReserveOpenAccount = 'C'
THOST_FTDC_FBTUET_CancelReserveOpenAccount = 'D'
THOST_FTDC_FBTUET_ReserveOpenAccountConfirm = 'E'
THOST_FTDC_FBTUET_Other = 'Z'
THOST_FTDC_DBOP_Insert = '0'
THOST_FTDC_DBOP_Update = '1'
THOST_FTDC_DBOP_Delete = '2'
THOST_FTDC_SYNF_Yes = '0'
THOST_FTDC_SYNF_No = '1'
THOST_FTDC_SYNT_OneOffSync = '0'
THOST_FTDC_SYNT_TimerSync = '1'
THOST_FTDC_SYNT_TimerFullSync = '2'
THOST_FTDC_FBEDIR_Settlement = '0'
THOST_FTDC_FBEDIR_Sale = '1'
THOST_FTDC_FBERES_Success = '0'
THOST_FTDC_FBERES_InsufficientBalance = '1'
THOST_FTDC_FBERES_UnknownTrading = '8'
THOST_FTDC_FBERES_Fail = 'x'
THOST_FTDC_FBEES_Normal = '0'
THOST_FTDC_FBEES_ReExchange = '1'
THOST_FTDC_FBEFG_DataPackage = '0'
THOST_FTDC_FBEFG_File = '1'
THOST_FTDC_FBEAT_NotTrade = '0'
THOST_FTDC_FBEAT_Trade = '1'
THOST_FTDC_FBEUET_SignIn = '0'
THOST_FTDC_FBEUET_Exchange = '1'
THOST_FTDC_FBEUET_ReExchange = '2'
THOST_FTDC_FBEUET_QueryBankAccount = '3'
THOST_FTDC_FBEUET_QueryExchDetial = '4'
THOST_FTDC_FBEUET_QueryExchSummary = '5'
THOST_FTDC_FBEUET_QueryExchRate = '6'
THOST_FTDC_FBEUET_CheckBankAccount = '7'
THOST_FTDC_FBEUET_SignOut = '8'
THOST_FTDC_FBEUET_Other = 'Z'
THOST_FTDC_FBERF_UnProcessed = '0'
THOST_FTDC_FBERF_WaitSend = '1'
THOST_FTDC_FBERF_SendSuccess = '2'
THOST_FTDC_FBERF_SendFailed = '3'
THOST_FTDC_FBERF_WaitReSend = '4'
THOST_FTDC_NC_NOERROR = '0'
THOST_FTDC_NC_Warn = '1'
THOST_FTDC_NC_Call = '2'
THOST_FTDC_NC_Force = '3'
THOST_FTDC_NC_CHUANCANG = '4'
THOST_FTDC_NC_Exception = '5'
THOST_FTDC_FCT_Manual = '0'
THOST_FTDC_FCT_Single = '1'
THOST_FTDC_FCT_Group = '2'
THOST_FTDC_RNM_System = '0'
THOST_FTDC_RNM_SMS = '1'
THOST_FTDC_RNM_EMail = '2'
THOST_FTDC_RNM_Manual = '3'
THOST_FTDC_RNS_NotGen = '0'
THOST_FTDC_RNS_Generated = '1'
THOST_FTDC_RNS_SendError = '2'
THOST_FTDC_RNS_SendOk = '3'
THOST_FTDC_RNS_Received = '4'
THOST_FTDC_RNS_Confirmed = '5'
THOST_FTDC_RUE_ExportData = '0'
THOST_FTDC_COST_LastPriceAsc = '0'
THOST_FTDC_COST_LastPriceDesc = '1'
THOST_FTDC_COST_AskPriceAsc = '2'
THOST_FTDC_COST_AskPriceDesc = '3'
THOST_FTDC_COST_BidPriceAsc = '4'
THOST_FTDC_COST_BidPriceDesc = '5'
THOST_FTDC_UOAST_NoSend = '0'
THOST_FTDC_UOAST_Sended = '1'
THOST_FTDC_UOAST_Generated = '2'
THOST_FTDC_UOAST_SendFail = '3'
THOST_FTDC_UOAST_Success = '4'
THOST_FTDC_UOAST_Fail = '5'
THOST_FTDC_UOAST_Cancel = '6'
THOST_FTDC_UOACS_NoApply = '1'
THOST_FTDC_UOACS_Submited = '2'
THOST_FTDC_UOACS_Sended = '3'
THOST_FTDC_UOACS_Success = '4'
THOST_FTDC_UOACS_Refuse = '5'
THOST_FTDC_UOACS_Cancel = '6'
THOST_FTDC_QT_Radio = '1'
THOST_FTDC_QT_Option = '2'
THOST_FTDC_QT_Blank = '3'
THOST_FTDC_BT_Request = '1'
THOST_FTDC_BT_Response = '2'
THOST_FTDC_BT_Notice = '3'
THOST_FTDC_CRC_Success = '0'
THOST_FTDC_CRC_Working = '1'
THOST_FTDC_CRC_InfoFail = '2'
THOST_FTDC_CRC_IDCardFail = '3'
THOST_FTDC_CRC_OtherFail = '4'
THOST_FTDC_CfMMCCT_All = '0'
THOST_FTDC_CfMMCCT_Person = '1'
THOST_FTDC_CfMMCCT_Company = '2'
THOST_FTDC_CfMMCCT_Other = '3'
THOST_FTDC_CfMMCCT_SpecialOrgan = '4'
THOST_FTDC_CfMMCCT_Asset = '5'
THOST_FTDC_EIDT_SHFE = 'S'
THOST_FTDC_EIDT_CZCE = 'Z'
THOST_FTDC_EIDT_DCE = 'D'
THOST_FTDC_EIDT_CFFEX = 'J'
THOST_FTDC_EIDT_INE = 'N'
THOST_FTDC_ECIDT_Hedge = '1'
THOST_FTDC_ECIDT_Arbitrage = '2'
THOST_FTDC_ECIDT_Speculation = '3'
THOST_FTDC_UF_NoUpdate = '0'
THOST_FTDC_UF_Success = '1'
THOST_FTDC_UF_Fail = '2'
THOST_FTDC_UF_TCSuccess = '3'
THOST_FTDC_UF_TCFail = '4'
THOST_FTDC_UF_Cancel = '5'
THOST_FTDC_AOID_OpenInvestor = '1'
THOST_FTDC_AOID_ModifyIDCard = '2'
THOST_FTDC_AOID_ModifyNoIDCard = '3'
THOST_FTDC_AOID_ApplyTradingCode = '4'
THOST_FTDC_AOID_CancelTradingCode = '5'
THOST_FTDC_AOID_CancelInvestor = '6'
THOST_FTDC_AOID_FreezeAccount = '8'
THOST_FTDC_AOID_ActiveFreezeAccount = '9'
THOST_FTDC_ASID_NoComplete = '1'
THOST_FTDC_ASID_Submited = '2'
THOST_FTDC_ASID_Checked = '3'
THOST_FTDC_ASID_Refused = '4'
THOST_FTDC_ASID_Deleted = '5'
THOST_FTDC_UOASM_ByAPI = '1'
THOST_FTDC_UOASM_ByFile = '2'
THOST_FTDC_EvM_ADD = '1'
THOST_FTDC_EvM_UPDATE = '2'
THOST_FTDC_EvM_DELETE = '3'
THOST_FTDC_EvM_CHECK = '4'
THOST_FTDC_EvM_COPY = '5'
THOST_FTDC_EvM_CANCEL = '6'
THOST_FTDC_EvM_Reverse = '7'
THOST_FTDC_UOAA_ASR = '1'
THOST_FTDC_UOAA_ASNR = '2'
THOST_FTDC_UOAA_NSAR = '3'
THOST_FTDC_UOAA_NSR = '4'
THOST_FTDC_EvM_InvestorGroupFlow = '1'
THOST_FTDC_EvM_InvestorRate = '2'
THOST_FTDC_EvM_InvestorCommRateModel = '3'
THOST_FTDC_CL_Zero = '0'
THOST_FTDC_CL_One = '1'
THOST_FTDC_CL_Two = '2'
THOST_FTDC_CHS_Init = '0'
THOST_FTDC_CHS_Checking = '1'
THOST_FTDC_CHS_Checked = '2'
THOST_FTDC_CHS_Refuse = '3'
THOST_FTDC_CHS_Cancel = '4'
THOST_FTDC_CHU_Unused = '0'
THOST_FTDC_CHU_Used = '1'
THOST_FTDC_CHU_Fail = '2'
THOST_FTDC_BAO_ByAccProperty = '0'
THOST_FTDC_BAO_ByFBTransfer = '1'
THOST_FTDC_MBTS_ByInstrument = '0'
THOST_FTDC_MBTS_ByDayInsPrc = '1'
THOST_FTDC_MBTS_ByDayIns = '2'
THOST_FTDC_FTC_BankLaunchBankToBroker = '102001'
THOST_FTDC_FTC_BrokerLaunchBankToBroker = '202001'
THOST_FTDC_FTC_BankLaunchBrokerToBank = '102002'
THOST_FTDC_FTC_BrokerLaunchBrokerToBank = '202002'
THOST_FTDC_OTP_NONE = '0'
THOST_FTDC_OTP_TOTP = '1'
THOST_FTDC_OTPS_Unused = '0'
THOST_FTDC_OTPS_Used = '1'
THOST_FTDC_OTPS_Disuse = '2'
THOST_FTDC_BUT_Investor = '1'
THOST_FTDC_BUT_BrokerUser = '2'
THOST_FTDC_FUTT_Commodity = '1'
THOST_FTDC_FUTT_Financial = '2'
THOST_FTDC_FET_Restriction = '0'
THOST_FTDC_FET_TodayRestriction = '1'
THOST_FTDC_FET_Transfer = '2'
THOST_FTDC_FET_Credit = '3'
THOST_FTDC_FET_InvestorWithdrawAlm = '4'
THOST_FTDC_FET_BankRestriction = '5'
THOST_FTDC_FET_Accountregister = '6'
THOST_FTDC_FET_ExchangeFundIO = '7'
THOST_FTDC_FET_InvestorFundIO = '8'
THOST_FTDC_AST_FBTransfer = '0'
THOST_FTDC_AST_ManualEntry = '1'
THOST_FTDC_CST_UnifyAccount = '0'
THOST_FTDC_CST_ManualEntry = '1'
THOST_FTDC_UR_All = '0'
THOST_FTDC_UR_Single = '1'
THOST_FTDC_BG_Investor = '2'
THOST_FTDC_BG_Group = '1'
THOST_FTDC_TSSM_Instrument = '1'
THOST_FTDC_TSSM_Product = '2'
THOST_FTDC_TSSM_Exchange = '3'
THOST_FTDC_ESM_Relative = '1'
THOST_FTDC_ESM_Typical = '2'
THOST_FTDC_RIR_All = '1'
THOST_FTDC_RIR_Model = '2'
THOST_FTDC_RIR_Single = '3'
THOST_FTDC_SDS_Initialize = '0'
THOST_FTDC_SDS_Settlementing = '1'
THOST_FTDC_SDS_Settlemented = '2'
THOST_FTDC_TSRC_NORMAL = '0'
THOST_FTDC_TSRC_QUERY = '1'
THOST_FTDC_FSM_Product = '1'
THOST_FTDC_FSM_Exchange = '2'
THOST_FTDC_FSM_All = '3'
THOST_FTDC_BIR_Property = '1'
THOST_FTDC_BIR_All = '2'
THOST_FTDC_PIR_All = '1'
THOST_FTDC_PIR_Property = '2'
THOST_FTDC_PIR_Single = '3'
THOST_FTDC_FIS_NoCreate = '0'
THOST_FTDC_FIS_Created = '1'
THOST_FTDC_FIS_Failed = '2'
THOST_FTDC_FGS_FileTransmit = '0'
THOST_FTDC_FGS_FileGen = '1'
THOST_FTDC_SoM_Add = '1'
THOST_FTDC_SoM_Update = '2'
THOST_FTDC_SoM_Delete = '3'
THOST_FTDC_SoM_Copy = '4'
THOST_FTDC_SoM_AcTive = '5'
THOST_FTDC_SoM_CanCel = '6'
THOST_FTDC_SoM_ReSet = '7'
THOST_FTDC_SoT_UpdatePassword = '0'
THOST_FTDC_SoT_UserDepartment = '1'
THOST_FTDC_SoT_RoleManager = '2'
THOST_FTDC_SoT_RoleFunction = '3'
THOST_FTDC_SoT_BaseParam = '4'
THOST_FTDC_SoT_SetUserID = '5'
THOST_FTDC_SoT_SetUserRole = '6'
THOST_FTDC_SoT_UserIpRestriction = '7'
THOST_FTDC_SoT_DepartmentManager = '8'
THOST_FTDC_SoT_DepartmentCopy = '9'
THOST_FTDC_SoT_Tradingcode = 'A'
THOST_FTDC_SoT_InvestorStatus = 'B'
THOST_FTDC_SoT_InvestorAuthority = 'C'
THOST_FTDC_SoT_PropertySet = 'D'
THOST_FTDC_SoT_ReSetInvestorPasswd = 'E'
THOST_FTDC_SoT_InvestorPersonalityInfo = 'F'
THOST_FTDC_CSRCQ_Current = '0'
THOST_FTDC_CSRCQ_History = '1'
THOST_FTDC_FRS_Normal = '1'
THOST_FTDC_FRS_Freeze = '0'
THOST_FTDC_STST_Standard = '0'
THOST_FTDC_STST_NonStandard = '1'
THOST_FTDC_RPT_Freeze = '1'
THOST_FTDC_RPT_FreezeActive = '2'
THOST_FTDC_RPT_OpenLimit = '3'
THOST_FTDC_RPT_RelieveOpenLimit = '4'
THOST_FTDC_AMLDS_Normal = '0'
THOST_FTDC_AMLDS_Deleted = '1'
THOST_FTDC_AMLCHS_Init = '0'
THOST_FTDC_AMLCHS_Checking = '1'
THOST_FTDC_AMLCHS_Checked = '2'
THOST_FTDC_AMLCHS_RefuseReport = '3'
THOST_FTDC_AMLDT_DrawDay = '0'
THOST_FTDC_AMLDT_TouchDay = '1'
THOST_FTDC_AMLCL_CheckLevel0 = '0'
THOST_FTDC_AMLCL_CheckLevel1 = '1'
THOST_FTDC_AMLCL_CheckLevel2 = '2'
THOST_FTDC_AMLCL_CheckLevel3 = '3'
THOST_FTDC_EFT_CSV = '0'
THOST_FTDC_EFT_EXCEL = '1'
THOST_FTDC_EFT_DBF = '2'
THOST_FTDC_SMT_Before = '1'
THOST_FTDC_SMT_Settlement = '2'
THOST_FTDC_SMT_After = '3'
THOST_FTDC_SMT_Settlemented = '4'
THOST_FTDC_SML_Must = '1'
THOST_FTDC_SML_Alarm = '2'
THOST_FTDC_SML_Prompt = '3'
THOST_FTDC_SML_Ignore = '4'
THOST_FTDC_SMG_Exhcange = '1'
THOST_FTDC_SMG_ASP = '2'
THOST_FTDC_SMG_CSRC = '3'
THOST_FTDC_LUT_Repeatable = '1'
THOST_FTDC_LUT_Unrepeatable = '2'
THOST_FTDC_DAR_Settle = '1'
THOST_FTDC_DAR_Exchange = '2'
THOST_FTDC_DAR_CSRC = '3'
THOST_FTDC_MGT_ExchMarginRate = '0'
THOST_FTDC_MGT_InstrMarginRate = '1'
THOST_FTDC_MGT_InstrMarginRateTrade = '2'
THOST_FTDC_ACT_Intraday = '1'
THOST_FTDC_ACT_Long = '2'
THOST_FTDC_MRT_Exchange = '1'
THOST_FTDC_MRT_Investor = '2'
THOST_FTDC_MRT_InvestorTrade = '3'
THOST_FTDC_BUS_UnBak = '0'
THOST_FTDC_BUS_BakUp = '1'
THOST_FTDC_BUS_BakUped = '2'
THOST_FTDC_BUS_BakFail = '3'
THOST_FTDC_SIS_UnInitialize = '0'
THOST_FTDC_SIS_Initialize = '1'
THOST_FTDC_SIS_Initialized = '2'
THOST_FTDC_SRS_NoCreate = '0'
THOST_FTDC_SRS_Create = '1'
THOST_FTDC_SRS_Created = '2'
THOST_FTDC_SRS_CreateFail = '3'
THOST_FTDC_SSS_UnSaveData = '0'
THOST_FTDC_SSS_SaveDatad = '1'
THOST_FTDC_SAS_UnArchived = '0'
THOST_FTDC_SAS_Archiving = '1'
THOST_FTDC_SAS_Archived = '2'
THOST_FTDC_SAS_ArchiveFail = '3'
THOST_FTDC_CTPT_Unkown = '0'
THOST_FTDC_CTPT_MainCenter = '1'
THOST_FTDC_CTPT_BackUp = '2'
THOST_FTDC_CDT_Normal = '0'
THOST_FTDC_CDT_SpecFirst = '1'
THOST_FTDC_MFUR_None = '0'
THOST_FTDC_MFUR_Margin = '1'
THOST_FTDC_MFUR_All = '2'
THOST_FTDC_SPT_CzceHedge = '1'
THOST_FTDC_SPT_IneForeignCurrency = '2'
THOST_FTDC_SPT_DceOpenClose = '3'
THOST_FTDC_FMT_Mortgage = '1'
THOST_FTDC_FMT_Redemption = '2'
THOST_FTDC_ASPI_BaseMargin = '1'
THOST_FTDC_ASPI_LowestInterest = '2'
THOST_FTDC_FMD_In = '1'
THOST_FTDC_FMD_Out = '2'
THOST_FTDC_BT_Profit = '0'
THOST_FTDC_BT_Loss = '1'
THOST_FTDC_BT_Other = 'Z'
THOST_FTDC_SST_Manual = '0'
THOST_FTDC_SST_Automatic = '1'
THOST_FTDC_CED_Settlement = '0'
THOST_FTDC_CED_Sale = '1'
THOST_FTDC_CSS_Entry = '1'
THOST_FTDC_CSS_Approve = '2'
THOST_FTDC_CSS_Refuse = '3'
THOST_FTDC_CSS_Revoke = '4'
THOST_FTDC_CSS_Send = '5'
THOST_FTDC_CSS_Success = '6'
THOST_FTDC_CSS_Failure = '7'
THOST_FTDC_REQF_NoSend = '0'
THOST_FTDC_REQF_SendSuccess = '1'
THOST_FTDC_REQF_SendFailed = '2'
THOST_FTDC_REQF_WaitReSend = '3'
THOST_FTDC_RESF_Success = '0'
THOST_FTDC_RESF_InsuffiCient = '1'
THOST_FTDC_RESF_UnKnown = '8'
THOST_FTDC_EXS_Before = '0'
THOST_FTDC_EXS_After = '1'
THOST_FTDC_CR_Domestic = '1'
THOST_FTDC_CR_GMT = '2'
THOST_FTDC_CR_Foreign = '3'
THOST_FTDC_HB_No = '0'
THOST_FTDC_HB_Yes = '1'
THOST_FTDC_SM_Normal = '1'
THOST_FTDC_SM_Emerge = '2'
THOST_FTDC_SM_Restore = '3'
THOST_FTDC_TPT_Full = '1'
THOST_FTDC_TPT_Increment = '2'
THOST_FTDC_TPT_BackUp = '3'
THOST_FTDC_LM_Trade = '0'
THOST_FTDC_LM_Transfer = '1'
THOST_FTDC_CPT_Instrument = '1'
THOST_FTDC_CPT_Margin = '2'
THOST_FTDC_HT_Yes = '1'
THOST_FTDC_HT_No = '0'
THOST_FTDC_AMT_Bank = '1'
THOST_FTDC_AMT_Securities = '2'
THOST_FTDC_AMT_Fund = '3'
THOST_FTDC_AMT_Insurance = '4'
THOST_FTDC_AMT_Trust = '5'
THOST_FTDC_AMT_Other = '9'
THOST_FTDC_CFIOT_FundIO = '0'
THOST_FTDC_CFIOT_SwapCurrency = '1'
THOST_FTDC_CAT_Futures = '1'
THOST_FTDC_CAT_AssetmgrFuture = '2'
THOST_FTDC_CAT_AssetmgrTrustee = '3'
THOST_FTDC_CAT_AssetmgrTransfer = '4'
THOST_FTDC_LT_Chinese = '1'
THOST_FTDC_LT_English = '2'
THOST_FTDC_AMCT_Person = '1'
THOST_FTDC_AMCT_Organ = '2'
THOST_FTDC_AMCT_SpecialOrgan = '4'
THOST_FTDC_ASST_Futures = '3'
THOST_FTDC_ASST_SpecialOrgan = '4'
THOST_FTDC_CIT_HasExch = '0'
THOST_FTDC_CIT_HasATP = '1'
THOST_FTDC_CIT_HasDiff = '2'
THOST_FTDC_DT_HandDeliv = '1'
THOST_FTDC_DT_PersonDeliv = '2'
THOST_FTDC_MMSA_NO = '0'
THOST_FTDC_MMSA_YES = '1'
THOST_FTDC_CACT_Person = '0'
THOST_FTDC_CACT_Company = '1'
THOST_FTDC_CACT_Other = '2'
THOST_FTDC_UOAAT_Futures = '1'
THOST_FTDC_UOAAT_SpecialOrgan = '2'
THOST_FTDC_DEN_Buy = '0'
THOST_FTDC_DEN_Sell = '1'
THOST_FTDC_OFEN_Open = '0'
THOST_FTDC_OFEN_Close = '1'
THOST_FTDC_OFEN_ForceClose = '2'
THOST_FTDC_OFEN_CloseToday = '3'
THOST_FTDC_OFEN_CloseYesterday = '4'
THOST_FTDC_OFEN_ForceOff = '5'
THOST_FTDC_OFEN_LocalForceClose = '6'
THOST_FTDC_HFEN_Speculation = '1'
THOST_FTDC_HFEN_Arbitrage = '2'
THOST_FTDC_HFEN_Hedge = '3'
THOST_FTDC_FIOTEN_FundIO = '1'
THOST_FTDC_FIOTEN_Transfer = '2'
THOST_FTDC_FIOTEN_SwapCurrency = '3'
THOST_FTDC_FTEN_Deposite = '1'
THOST_FTDC_FTEN_ItemFund = '2'
THOST_FTDC_FTEN_Company = '3'
THOST_FTDC_FTEN_InnerTransfer = '4'
THOST_FTDC_FDEN_In = '1'
THOST_FTDC_FDEN_Out = '2'
THOST_FTDC_FMDEN_In = '1'
THOST_FTDC_FMDEN_Out = '2'
THOST_FTDC_CP_CallOptions = '1'
THOST_FTDC_CP_PutOptions = '2'
THOST_FTDC_STM_Continental = '0'
THOST_FTDC_STM_American = '1'
THOST_FTDC_STM_Bermuda = '2'
THOST_FTDC_STT_Hedge = '0'
THOST_FTDC_STT_Match = '1'
THOST_FTDC_APPT_NotStrikeNum = '4'
THOST_FTDC_GUDS_Gen = '0'
THOST_FTDC_GUDS_Hand = '1'
THOST_FTDC_OER_NoExec = 'n'
THOST_FTDC_OER_Canceled = 'c'
THOST_FTDC_OER_OK = '0'
THOST_FTDC_OER_NoPosition = '1'
THOST_FTDC_OER_NoDeposit = '2'
THOST_FTDC_OER_NoParticipant = '3'
THOST_FTDC_OER_NoClient = '4'
THOST_FTDC_OER_NoInstrument = '6'
THOST_FTDC_OER_NoRight = '7'
THOST_FTDC_OER_InvalidVolume = '8'
THOST_FTDC_OER_NoEnoughHistoryTrade = '9'
THOST_FTDC_OER_Unknown = 'a'
THOST_FTDC_COMBT_Future = '0'
THOST_FTDC_COMBT_BUL = '1'
THOST_FTDC_COMBT_BER = '2'
THOST_FTDC_COMBT_STD = '3'
THOST_FTDC_COMBT_STG = '4'
THOST_FTDC_COMBT_PRT = '5'
THOST_FTDC_COMBT_CLD = '6'
THOST_FTDC_ORPT_PreSettlementPrice = '1'
THOST_FTDC_ORPT_OpenPrice = '4'
THOST_FTDC_BLAG_Default = '1'
THOST_FTDC_BLAG_IncludeOptValLost = '2'
THOST_FTDC_ACTP_Exec = '1'
THOST_FTDC_ACTP_Abandon = '2'
THOST_FTDC_FQST_Submitted = 'a'
THOST_FTDC_FQST_Accepted = 'b'
THOST_FTDC_FQST_Rejected = 'c'
THOST_FTDC_VM_Absolute = '0'
THOST_FTDC_VM_Ratio = '1'
THOST_FTDC_EOPF_Reserve = '0'
THOST_FTDC_EOPF_UnReserve = '1'
THOST_FTDC_EOCF_AutoClose = '0'
THOST_FTDC_EOCF_NotToClose = '1'
THOST_FTDC_PTE_Futures = '1'
THOST_FTDC_PTE_Options = '2'
THOST_FTDC_CUFN_CUFN_O = 'O'
THOST_FTDC_CUFN_CUFN_T = 'T'
THOST_FTDC_CUFN_CUFN_P = 'P'
THOST_FTDC_CUFN_CUFN_N = 'N'
THOST_FTDC_CUFN_CUFN_L = 'L'
THOST_FTDC_CUFN_CUFN_F = 'F'
THOST_FTDC_CUFN_CUFN_C = 'C'
THOST_FTDC_CUFN_CUFN_M = 'M'
THOST_FTDC_DUFN_DUFN_O = 'O'
THOST_FTDC_DUFN_DUFN_T = 'T'
THOST_FTDC_DUFN_DUFN_P = 'P'
THOST_FTDC_DUFN_DUFN_F = 'F'
THOST_FTDC_DUFN_DUFN_C = 'C'
THOST_FTDC_DUFN_DUFN_D = 'D'
THOST_FTDC_DUFN_DUFN_M = 'M'
THOST_FTDC_DUFN_DUFN_S = 'S'
THOST_FTDC_SUFN_SUFN_O = 'O'
THOST_FTDC_SUFN_SUFN_T = 'T'
THOST_FTDC_SUFN_SUFN_P = 'P'
THOST_FTDC_SUFN_SUFN_F = 'F'
THOST_FTDC_CFUFN_SUFN_T = 'T'
THOST_FTDC_CFUFN_SUFN_P = 'P'
THOST_FTDC_CFUFN_SUFN_F = 'F'
THOST_FTDC_CFUFN_SUFN_S = 'S'
THOST_FTDC_CMDR_Comb = '0'
THOST_FTDC_CMDR_UnComb = '1'
THOST_FTDC_STOV_RealValue = '1'
THOST_FTDC_STOV_ProfitValue = '2'
THOST_FTDC_STOV_RealRatio = '3'
THOST_FTDC_STOV_ProfitRatio = '4'
THOST_FTDC_ROAST_Processing = '0'
THOST_FTDC_ROAST_Cancelled = '1'
THOST_FTDC_ROAST_Opened = '2'
THOST_FTDC_ROAST_Invalid = '3'
THOST_FTDC_OSCF_CloseSelfOptionPosition = '1'
THOST_FTDC_OSCF_ReserveOptionPosition = '2'
THOST_FTDC_OSCF_SellCloseSelfFuturePosition = '3'
|
|
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for modules/search/."""
__author__ = 'Ellis Michael (emichael@google.com)'
import datetime
import logging
import re
import actions
from controllers import sites
from models import courses
from models import custom_modules
from modules.announcements import announcements
from modules.search import search
from tests.unit import modules_search as search_unit_test
from google.appengine.api import namespace_manager
class SearchTest(search_unit_test.SearchTestBase):
"""Tests the search module."""
# Don't require documentation for self-describing test methods.
# pylint: disable=g-missing-docstring
@classmethod
def enable_module(cls):
custom_modules.Registry.registered_modules[
search.MODULE_NAME].enable()
assert search.custom_module.enabled
@classmethod
def disable_module(cls):
custom_modules.Registry.registered_modules[
search.MODULE_NAME].disable()
assert not search.custom_module.enabled
@classmethod
def get_xsrf_token(cls, body, form_name):
match = re.search(form_name + r'.+[\n\r].+value="([^"]+)"', body)
assert match
return match.group(1)
def index_test_course(self):
email = 'admin@google.com'
actions.login(email, is_admin=True)
response = self.get('/test/dashboard?action=search')
index_token = self.get_xsrf_token(response.body, 'gcb-index-course')
response = self.post('/test/dashboard?action=index_course',
{'xsrf_token': index_token})
self.execute_all_deferred_tasks()
def setUp(self): # Name set by parent. pylint: disable=g-bad-name
super(SearchTest, self).setUp()
self.enable_module()
self.logged_error = ''
def error_report(string, *args, **unused_kwargs):
self.logged_error = string % args
self.error_report = error_report
def test_module_disabled(self):
email = 'admin@google.com'
actions.login(email, is_admin=True)
self.disable_module()
response = self.get('/search?query=lorem', expect_errors=True)
self.assertEqual(response.status_code, 404)
response = self.get('dashboard?action=search')
self.assertIn(
'Google ><a href="%s"> Dashboard </a>> Search' %
self.canonicalize('dashboard'),
response.body)
self.assertNotIn('Index Course', response.body)
self.assertNotIn('Clear Index', response.body)
def test_module_enabled(self):
email = 'admin@google.com'
actions.login(email, is_admin=True)
response = self.get('course')
self.assertIn('gcb-search-box', response.body)
response = self.get('/search?query=lorem')
self.assertEqual(response.status_code, 200)
response = self.get('dashboard?action=search')
self.assertIn(
'Google ><a href="%s"> Dashboard </a>> Search' %
self.canonicalize('dashboard'),
response.body)
self.assertIn('Index Course', response.body)
self.assertIn('Clear Index', response.body)
def test_indexing_and_clearing_buttons(self):
email = 'admin@google.com'
actions.login(email, is_admin=True)
response = self.get('dashboard?action=search')
index_token = self.get_xsrf_token(response.body, 'gcb-index-course')
clear_token = self.get_xsrf_token(response.body, 'gcb-clear-index')
response = self.post('dashboard?action=index_course',
{'xsrf_token': index_token})
self.assertEqual(response.status_int, 302)
response = self.post('dashboard?action=clear_index',
{'xsrf_token': clear_token})
self.assertEqual(response.status_int, 302)
response = self.post('dashboard?action=index_course', {},
expect_errors=True)
assert response.status_int == 403
response = self.post('dashboard?action=clear_index', {},
expect_errors=True)
assert response.status_int == 403
def test_index_search_clear(self):
email = 'admin@google.com'
actions.login(email, is_admin=True)
response = self.get('dashboard?action=search')
index_token = self.get_xsrf_token(response.body, 'gcb-index-course')
clear_token = self.get_xsrf_token(response.body, 'gcb-clear-index')
response = self.post('dashboard?action=index_course',
{'xsrf_token': index_token})
self.execute_all_deferred_tasks()
# weather is a term found in the Power Searching Course and should not
# be in the HTML returned by the patched urlfetch in SearchTestBase
response = self.get('search?query=weather')
self.assertNotIn('gcb-search-result', response.body)
# This term should be present as it is in the dummy content.
response = self.get('search?query=cogito%20ergo%20sum')
self.assertIn('gcb-search-result', response.body)
response = self.post('dashboard?action=clear_index',
{'xsrf_token': clear_token})
self.execute_all_deferred_tasks()
# After the index is cleared, it shouldn't match anything
response = self.get('search?query=cogito%20ergo%20sum')
self.assertNotIn('gcb-search-result', response.body)
def test_bad_search(self):
email = 'user@google.com'
actions.login(email, is_admin=False)
# %3A is a colon, and searching for only punctuation will cause App
# Engine's search to throw an error that should be handled
response = self.get('search?query=%3A')
self.assertEqual(response.status_int, 200)
self.assertIn('gcb-search-info', response.body)
def test_errors_not_displayed_to_user(self):
exception_code = '0xDEADBEEF'
def bad_fetch(*unused_vargs, **unused_kwargs):
raise Exception(exception_code)
self.swap(search, 'fetch', bad_fetch)
self.swap(logging, 'error', self.error_report)
response = self.get('search?query=cogito')
self.assertEqual(response.status_int, 200)
self.assertIn('unavailable', response.body)
self.assertNotIn('gcb-search-result', response.body)
self.assertIn('gcb-search-info', response.body)
self.assertIn(exception_code, self.logged_error)
def test_unicode_pages(self):
sites.setup_courses('course:/test::ns_test, course:/:/')
course = courses.Course(None,
app_context=sites.get_all_courses()[0])
unit = course.add_unit()
unit.now_available = True
lesson_a = course.add_lesson(unit)
lesson_a.notes = search_unit_test.UNICODE_PAGE_URL
lesson_a.now_available = True
course.update_unit(unit)
course.save()
self.index_test_course()
self.swap(logging, 'error', self.error_report)
response = self.get('/test/search?query=paradox')
self.assertEqual('', self.logged_error)
self.assertNotIn('unavailable', response.body)
self.assertIn('gcb-search-result', response.body)
def test_external_links(self):
sites.setup_courses('course:/test::ns_test, course:/:/')
course = courses.Course(None, app_context=sites.get_all_courses()[0])
unit = course.add_unit()
unit.now_available = True
lesson_a = course.add_lesson(unit)
lesson_a.notes = search_unit_test.VALID_PAGE_URL
objectives_link = 'http://objectiveslink.null/'
lesson_a.objectives = '<a href="%s"></a><a href="%s"></a>' % (
search_unit_test.LINKED_PAGE_URL, objectives_link)
lesson_a.now_available = True
course.update_unit(unit)
course.save()
self.index_test_course()
response = self.get('/test/search?query=What%20hath%20God%20wrought')
self.assertIn('gcb-search-result', response.body)
response = self.get('/test/search?query=Cogito')
self.assertIn('gcb-search-result', response.body)
self.assertIn(search_unit_test.VALID_PAGE_URL, response.body)
self.assertIn(objectives_link, response.body)
self.assertNotIn(search_unit_test.PDF_URL, response.body)
# If this test fails, indexing will crawl the entire web
response = self.get('/test/search?query=ABORT')
self.assertNotIn('gcb-search-result', response.body)
self.assertNotIn(search_unit_test.SECOND_LINK_PAGE_URL, response.body)
def test_youtube(self):
sites.setup_courses('course:/test::ns_test, course:/:/')
default_namespace = namespace_manager.get_namespace()
try:
namespace_manager.set_namespace('ns_test')
course = courses.Course(None,
app_context=sites.get_all_courses()[0])
unit = course.add_unit()
unit.now_available = True
lesson_a = course.add_lesson(unit)
lesson_a.video = 'portal'
lesson_a.now_available = True
lesson_b = course.add_lesson(unit)
lesson_b.objectives = '<gcb-youtube videoid="glados">'
lesson_b.now_available = True
course.update_unit(unit)
course.save()
entity = announcements.AnnouncementEntity()
entity.html = '<gcb-youtube videoid="aperature">'
entity.title = 'Sample Announcement'
entity.date = datetime.datetime.now().date()
entity.is_draft = False
entity.put()
self.index_test_course()
response = self.get('/test/search?query=apple')
self.assertIn('gcb-search-result', response.body)
self.assertIn('start=3.14', response.body)
self.assertIn('v=portal', response.body)
self.assertIn('v=glados', response.body)
self.assertIn('v=aperature', response.body)
self.assertIn('lemon', response.body)
self.assertIn('Medicus Quis', response.body)
self.assertIn('- YouTube', response.body)
self.assertIn('http://thumbnail.null', response.body)
# Test to make sure empty notes field doesn't cause a urlfetch
response = self.get('/test/search?query=cogito')
self.assertNotIn('gcb-search-result', response.body)
finally:
namespace_manager.set_namespace(default_namespace)
def test_announcements(self):
email = 'admin@google.com'
actions.login(email, is_admin=True)
self.get('announcements')
response = self.get('dashboard?action=search')
index_token = self.get_xsrf_token(response.body, 'gcb-index-course')
response = self.post('dashboard?action=index_course',
{'xsrf_token': index_token})
self.execute_all_deferred_tasks()
# This matches an announcement in the Power Searching course
response = self.get(
'search?query=Certificates%20qualifying%20participants')
self.assertIn('gcb-search-result', response.body)
self.assertIn('announcements#', response.body)
# The draft announcement in Power Searching should not be indexed
response = self.get('search?query=Welcome%20to%20the%20final%20class')
self.assertNotIn('gcb-search-result', response.body)
self.assertNotIn('announcements#', response.body)
def test_private_units_and_lessons(self):
sites.setup_courses('course:/test::ns_test, course:/:/')
course = courses.Course(None, app_context=sites.get_all_courses()[0])
unit1 = course.add_unit()
lesson11 = course.add_lesson(unit1)
lesson11.notes = search_unit_test.VALID_PAGE_URL
lesson11.objectives = search_unit_test.VALID_PAGE
lesson11.video = 'portal'
unit2 = course.add_unit()
lesson21 = course.add_lesson(unit2)
lesson21.notes = search_unit_test.VALID_PAGE_URL
lesson21.objectives = search_unit_test.VALID_PAGE
lesson21.video = 'portal'
unit1.now_available = True
lesson11.now_available = False
course.update_unit(unit1)
unit2.now_available = False
lesson21.now_available = True
course.update_unit(unit2)
course.save()
self.index_test_course()
response = self.get('/test/search?query=cogito%20ergo%20sum')
self.assertNotIn('gcb-search-result', response.body)
response = self.get('/test/search?query=apple')
self.assertNotIn('gcb-search-result', response.body)
self.assertNotIn('v=portal', response.body)
|
|
"""
app.file.utils
~~~~~~~~~~~~~~~~
synopsis: Helpers for manipulating files.
Switches file-handling interface between sftp and os depending on configuration.
"""
import os
import magic
import hashlib
import paramiko
import shutil
from tempfile import TemporaryFile
from functools import wraps
from contextlib import contextmanager
from flask import current_app, send_from_directory
from app import sentry
TRANSFER_SIZE_LIMIT = 512000 # 512 kb
class MaxTransferSizeExceededException(Exception):
pass
class SFTPCredentialsException(Exception):
pass
@contextmanager
def sftp_ctx():
"""
Context manager that provides an SFTP client object
(an SFTP session across an open SSH Transport)
"""
transport = paramiko.Transport((current_app.config['SFTP_HOSTNAME'],
int(current_app.config['SFTP_PORT'])))
authentication_kwarg = {}
if current_app.config['SFTP_PASSWORD']:
authentication_kwarg['password'] = current_app.config['SFTP_PASSWORD']
elif current_app.config['SFTP_RSA_KEY_FILE']:
authentication_kwarg['pkey'] = paramiko.RSAKey(filename=current_app.config['SFTP_RSA_KEY_FILE'])
else:
raise SFTPCredentialsException
transport.connect(username=current_app.config['SFTP_USERNAME'], **authentication_kwarg)
sftp = paramiko.SFTPClient.from_transport(transport)
try:
yield sftp
except Exception as e:
sentry.captureException()
raise paramiko.SFTPError("Exception occurred with SFTP: {}".format(e))
finally:
sftp.close()
transport.close()
def _sftp_switch(sftp_func):
"""
Check if app is using SFTP and, if so, connect to SFTP server
and call passed function (sftp_func) with connected client,
otherwise call decorated function (which should be using
the os library to accomplish the same file-related action).
"""
def decorator(os_func):
@wraps(os_func)
def wrapper(*args, **kwargs):
if current_app.config['USE_SFTP']:
with sftp_ctx() as sftp:
return sftp_func(sftp, *args, **kwargs)
else:
return os_func(*args, **kwargs)
return wrapper
return decorator
def _raise_if_too_big(bytes_transferred, _):
if bytes_transferred >= TRANSFER_SIZE_LIMIT:
raise MaxTransferSizeExceededException
def _sftp_get_size(sftp, path):
return sftp.stat(path).st_size
def _sftp_exists(sftp, path):
try:
sftp.stat(path)
return True
except IOError:
sentry.captureException()
return False
def _sftp_mkdir(sftp, path):
return sftp.mkdir(path)
def _sftp_makedirs(sftp, path):
""" os.makedirs(path, exists_ok=True) """
dirs = []
while len(path) > 1:
dirs.append(path)
path, _ = os.path.split(path)
while len(dirs):
dir_ = dirs.pop()
try:
sftp.stat(dir_)
except IOError:
sentry.captureException()
sftp.mkdir(dir_)
def _sftp_remove(sftp, path):
sftp.remove(path)
def _sftp_rename(sftp, oldpath, newpath):
sftp.rename(oldpath, newpath)
def _sftp_move(sftp, localpath, remotepath):
sftp.put(localpath, remotepath)
os.remove(localpath)
def _sftp_get_mime_type(sftp, path):
with TemporaryFile() as tmp:
try:
sftp.getfo(path, tmp, _raise_if_too_big)
except MaxTransferSizeExceededException:
sentry.captureException()
tmp.seek(0)
if current_app.config['MAGIC_FILE']:
# Check using custom mime database file
m = magic.Magic(
magic_file=current_app.config['MAGIC_FILE'],
mime=True)
mime_type = m.from_buffer(tmp.read())
else:
mime_type = magic.from_buffer(tmp.read(), mime=True)
return mime_type
def _sftp_get_hash(sftp, path):
sha1 = hashlib.sha1()
with TemporaryFile() as tmp:
sftp.getfo(path, tmp)
tmp.seek(0)
sha1.update(tmp.read())
return sha1.hexdigest()
def _sftp_send_file(sftp, directory, filename, **kwargs):
localpath = _get_file_serving_path(directory, filename)
if not os.path.exists(localpath):
sftp.get(os.path.join(directory, filename), localpath)
return send_from_directory(*os.path.split(localpath), **kwargs)
def _get_file_serving_path(directory, filename):
"""
Returns the upload serving directory path for a file determined by supplied directory and filename.
"""
request_id_folder = os.path.basename(directory)
localpath = os.path.join(current_app.config['UPLOAD_SERVING_DIRECTORY'], request_id_folder)
if not os.path.exists(localpath):
os.mkdir(localpath)
path = os.path.join(request_id_folder, filename)
return os.path.join(current_app.config['UPLOAD_SERVING_DIRECTORY'], path)
@_sftp_switch(_sftp_get_size)
def getsize(path):
return os.path.getsize(path)
@_sftp_switch(_sftp_exists)
def exists(path):
return os.path.exists(path)
@_sftp_switch(_sftp_mkdir)
def mkdir(path):
os.mkdir(path)
@_sftp_switch(_sftp_makedirs)
def makedirs(path, **kwargs):
os.makedirs(path, **kwargs)
@_sftp_switch(_sftp_remove)
def remove(path):
os.remove(path)
@_sftp_switch(_sftp_rename)
def rename(oldpath, newpath):
os.rename(oldpath, newpath)
@_sftp_switch(_sftp_move)
def move(oldpath, newpath):
"""
Use this instead of 'rename' if, when using sftp, 'oldpath'
represents a local file path and 'newpath' a remote path.
"""
os.rename(oldpath, newpath)
@_sftp_switch(_sftp_get_mime_type)
def get_mime_type(path):
return os_get_mime_type(path)
def os_get_mime_type(path):
if current_app.config['MAGIC_FILE']:
# Check using custom mime database file
m = magic.Magic(
magic_file=current_app.config['MAGIC_FILE'],
mime=True)
mime_type = m.from_file(path)
else:
mime_type = magic.from_file(path, mime=True)
return mime_type
@_sftp_switch(_sftp_get_hash)
def get_hash(path):
"""
Returns the sha1 hash of a file a string of
hexadecimal digits.
"""
return os_get_hash(path)
def os_get_hash(path):
sha1 = hashlib.sha1()
with open(path, 'rb') as fp:
sha1.update(fp.read())
return sha1.hexdigest()
@_sftp_switch(_sftp_send_file)
def send_file(directory, filename, **kwargs):
path = _get_file_serving_path(directory, filename)
shutil.copy(os.path.join(directory, filename), path)
return send_from_directory(*os.path.split(path), **kwargs)
|
|
# -*- coding: utf-8 -*-
"""Basic Security Module (BSM) event auditing file parser."""
from dfdatetime import posix_time as dfdatetime_posix_time
from plaso.containers import events
from plaso.containers import time_events
from plaso.lib import definitions
from plaso.lib import errors
from plaso.parsers import dtfabric_parser
from plaso.parsers import manager
class BSMEventData(events.EventData):
"""Basic Security Module (BSM) audit event data.
Attributes:
event_type (int): identifier that represents the type of the event.
extra_tokens (list[dict[str, dict[str, str]]]): event extra tokens, which
is a list of dictionaries that contain: {token type: {token values}}
record_length (int): record length in bytes (trailer number).
return_value (str): processed return value and exit status.
"""
DATA_TYPE = 'bsm:event'
def __init__(self):
"""Initializes event data."""
super(BSMEventData, self).__init__(data_type=self.DATA_TYPE)
self.event_type = None
self.extra_tokens = None
self.record_length = None
self.return_value = None
class BSMParser(dtfabric_parser.DtFabricBaseParser):
"""Parser for Basic Security Module (BSM) event auditing files."""
NAME = 'bsm_log'
DATA_FORMAT = 'Basic Security Module (BSM) event auditing file'
_DEFINITION_FILE = 'bsm.yaml'
_TOKEN_TYPE_AUT_TRAILER = 0x13
_TOKEN_TYPE_AUT_HEADER32 = 0x14
_TOKEN_TYPE_AUT_HEADER32_EX = 0x15
_TOKEN_TYPE_AUT_RETURN32 = 0x27
_TOKEN_TYPE_AUT_RETURN64 = 0x72
_TOKEN_TYPE_AUT_HEADER64 = 0x74
_TOKEN_TYPE_AUT_HEADER64_EX = 0x79
_HEADER_TOKEN_TYPES = frozenset([
_TOKEN_TYPE_AUT_HEADER32,
_TOKEN_TYPE_AUT_HEADER32_EX,
_TOKEN_TYPE_AUT_HEADER64,
_TOKEN_TYPE_AUT_HEADER64_EX])
_TOKEN_TYPES = {
0x00: 'AUT_INVALID',
0x11: 'AUT_OTHER_FILE32',
0x12: 'AUT_OHEADER',
0x13: 'AUT_TRAILER',
0x14: 'AUT_HEADER32',
0x15: 'AUT_HEADER32_EX',
0x21: 'AUT_DATA',
0x22: 'AUT_IPC',
0x23: 'AUT_PATH',
0x24: 'AUT_SUBJECT32',
0x25: 'AUT_XATPATH',
0x26: 'AUT_PROCESS32',
0x27: 'AUT_RETURN32',
0x28: 'AUT_TEXT',
0x29: 'AUT_OPAQUE',
0x2a: 'AUT_IN_ADDR',
0x2b: 'AUT_IP',
0x2c: 'AUT_IPORT',
0x2d: 'AUT_ARG32',
0x2e: 'AUT_SOCKET',
0x2f: 'AUT_SEQ',
0x30: 'AUT_ACL',
0x31: 'AUT_ATTR',
0x32: 'AUT_IPC_PERM',
0x33: 'AUT_LABEL',
0x34: 'AUT_GROUPS',
0x35: 'AUT_ACE',
0x36: 'AUT_SLABEL',
0x37: 'AUT_CLEAR',
0x38: 'AUT_PRIV',
0x39: 'AUT_UPRIV',
0x3a: 'AUT_LIAISON',
0x3b: 'AUT_NEWGROUPS',
0x3c: 'AUT_EXEC_ARGS',
0x3d: 'AUT_EXEC_ENV',
0x3e: 'AUT_ATTR32',
0x3f: 'AUT_UNAUTH',
0x40: 'AUT_XATOM',
0x41: 'AUT_XOBJ',
0x42: 'AUT_XPROTO',
0x43: 'AUT_XSELECT',
0x44: 'AUT_XCOLORMAP',
0x45: 'AUT_XCURSOR',
0x46: 'AUT_XFONT',
0x47: 'AUT_XGC',
0x48: 'AUT_XPIXMAP',
0x49: 'AUT_XPROPERTY',
0x4a: 'AUT_XWINDOW',
0x4b: 'AUT_XCLIENT',
0x51: 'AUT_CMD',
0x52: 'AUT_EXIT',
0x60: 'AUT_ZONENAME',
0x70: 'AUT_HOST',
0x71: 'AUT_ARG64',
0x72: 'AUT_RETURN64',
0x73: 'AUT_ATTR64',
0x74: 'AUT_HEADER64',
0x75: 'AUT_SUBJECT64',
0x76: 'AUT_SERVER64',
0x77: 'AUT_PROCESS64',
0x78: 'AUT_OTHER_FILE64',
0x79: 'AUT_HEADER64_EX',
0x7a: 'AUT_SUBJECT32_EX',
0x7b: 'AUT_PROCESS32_EX',
0x7c: 'AUT_SUBJECT64_EX',
0x7d: 'AUT_PROCESS64_EX',
0x7e: 'AUT_IN_ADDR_EX',
0x7f: 'AUT_SOCKET_EX',
0x80: 'AUT_SOCKINET32',
0x81: 'AUT_SOCKINET128',
0x82: 'AUT_SOCKUNIX'}
_DATA_TYPE_MAP_PER_TOKEN_TYPE = {
0x11: 'bsm_token_data_other_file32',
0x13: 'bsm_token_data_trailer',
0x14: 'bsm_token_data_header32',
0x15: 'bsm_token_data_header32_ex',
0x21: 'bsm_token_data_data',
0x22: 'bsm_token_data_ipc',
0x23: 'bsm_token_data_path',
0x24: 'bsm_token_data_subject32',
0x26: 'bsm_token_data_subject32',
0x27: 'bsm_token_data_return32',
0x28: 'bsm_token_data_text',
0x29: 'bsm_token_data_opaque',
0x2a: 'bsm_token_data_in_addr',
0x2b: 'bsm_token_data_ip',
0x2c: 'bsm_token_data_iport',
0x2d: 'bsm_token_data_arg32',
0x2f: 'bsm_token_data_seq',
0x32: 'bsm_token_data_ipc_perm',
0x34: 'bsm_token_data_groups',
0x3b: 'bsm_token_data_groups',
0x3c: 'bsm_token_data_exec_args',
0x3d: 'bsm_token_data_exec_args',
0x3e: 'bsm_token_data_attr32',
0x52: 'bsm_token_data_exit',
0x60: 'bsm_token_data_zonename',
0x71: 'bsm_token_data_arg64',
0x72: 'bsm_token_data_return64',
0x73: 'bsm_token_data_attr64',
0x74: 'bsm_token_data_header64',
0x75: 'bsm_token_data_subject64',
0x77: 'bsm_token_data_subject64',
0x79: 'bsm_token_data_header64_ex',
0x7a: 'bsm_token_data_subject32_ex',
0x7b: 'bsm_token_data_subject32_ex',
0x7c: 'bsm_token_data_subject64_ex',
0x7d: 'bsm_token_data_subject64_ex',
0x7e: 'bsm_token_data_in_addr_ex',
0x7f: 'bsm_token_data_socket_ex',
0x80: 'bsm_token_data_sockinet32',
0x81: 'bsm_token_data_sockinet64',
0x82: 'bsm_token_data_sockunix'}
_TOKEN_DATA_FORMAT_FUNCTIONS = {
0x11: '_FormatOtherFileToken',
0x21: '_FormatDataToken',
0x22: '_FormatIPCToken',
0x23: '_FormatPathToken',
0x24: '_FormatSubjectOrProcessToken',
0x26: '_FormatSubjectOrProcessToken',
0x27: '_FormatReturnOrExitToken',
0x28: '_FormatTextToken',
0x29: '_FormatOpaqueToken',
0x2a: '_FormatInAddrToken',
0x2b: '_FormatIPToken',
0x2c: '_FormatIPortToken',
0x2d: '_FormatArgToken',
0x2f: '_FormatSeqToken',
0x32: '_FormatIPCPermToken',
0x34: '_FormatGroupsToken',
0x3b: '_FormatGroupsToken',
0x3c: '_FormatExecArgsToken',
0x3d: '_FormatExecArgsToken',
0x3e: '_FormatAttrToken',
0x52: '_FormatReturnOrExitToken',
0x60: '_FormatZonenameToken',
0x71: '_FormatArgToken',
0x72: '_FormatReturnOrExitToken',
0x73: '_FormatAttrToken',
0x75: '_FormatSubjectOrProcessToken',
0x77: '_FormatSubjectOrProcessToken',
0x7a: '_FormatSubjectExOrProcessExToken',
0x7b: '_FormatSubjectExOrProcessExToken',
0x7c: '_FormatSubjectExOrProcessExToken',
0x7d: '_FormatSubjectExOrProcessExToken',
0x7e: '_FormatInAddrExToken',
0x7f: '_FormatSocketExToken',
0x80: '_FormatSocketInet32Token',
0x81: '_FormatSocketInet128Token',
0x82: '_FormatSocketUnixToken'}
_DATA_TOKEN_FORMAT = {
0: 'Binary',
1: 'Octal',
2: 'Decimal',
3: 'Hexadecimal',
4: 'String'}
# BSM identification errors.
_ERRORS = {
0: 'Success',
1: 'Operation not permitted',
2: 'No such file or directory',
3: 'No such process',
4: 'Interrupted system call',
5: 'Input/output error',
6: 'Device not configured',
7: 'Argument list too long',
8: 'Exec format error',
9: 'Bad file descriptor',
10: 'No child processes',
11: 'Resource temporarily unavailable',
12: 'Cannot allocate memory',
13: 'Permission denied',
14: 'Bad address',
15: 'Block device required',
16: 'Device busy',
17: 'File exists',
18: 'ross-device link',
19: 'Operation not supported by device',
20: 'Not a directory',
21: 'Is a directory',
22: 'Invalid argument',
23: 'Too many open files in system',
24: 'Too many open files',
25: 'Inappropriate ioctl for device',
26: 'Text file busy',
27: 'File too large',
28: 'No space left on device',
29: 'Illegal seek',
30: 'Read-only file system',
31: 'Too many links',
32: 'Broken pipe',
33: 'Numerical argument out of domain',
34: 'Result too large',
35: 'No message of desired type',
36: 'Identifier removed',
45: 'Resource deadlock avoided',
46: 'No locks available',
47: 'Operation canceled',
48: 'Operation not supported',
49: 'Disc quota exceeded',
66: 'Too many levels of remote in path',
67: 'Link has been severed',
71: 'Protocol error',
74: 'Multihop attempted',
77: 'Bad message',
78: 'File name too long',
79: 'Value too large to be stored in data type',
88: 'Illegal byte sequence',
89: 'Function not implemented',
90: 'Too many levels of symbolic links',
91: 'Restart syscall',
93: 'Directory not empty',
94: 'Too many users',
95: 'Socket operation on non-socket',
96: 'Destination address required',
97: 'Message too long',
98: 'Protocol wrong type for socket',
99: 'Protocol not available',
120: 'Protocol not supported',
121: 'Socket type not supported',
122: 'Operation not supported',
123: 'Protocol family not supported',
124: 'Address family not supported by protocol family',
125: 'Address already in use',
126: 'Can\'t assign requested address',
127: 'Network is down',
128: 'Network unreachable',
129: 'Network dropped connection on reset',
130: 'Software caused connection abort',
131: 'Connection reset by peer',
132: 'No buffer space available',
133: 'Socket is already connected',
134: 'Socket is not connected',
143: 'Can\'t send after socket shutdown',
144: 'Too many references: can\'t splice',
145: 'Operation timed out',
146: 'Connection refused',
147: 'Host is down',
148: 'No route to host',
149: 'Operation already in progress',
150: 'Operation now in progress',
151: 'Stale NFS file handle',
190: 'PROCLIM',
191: 'BADRPC',
192: 'RPCMISMATCH',
193: 'PROGUNAVAIL',
194: 'PROGMISMATCH',
195: 'PROCUNAVAIL',
196: 'FTYPE',
197: 'AUTH',
198: 'NEEDAUTH',
199: 'NOATTR',
200: 'DOOFUS',
201: 'USTRETURN',
202: 'NOIOCTL',
203: 'DIRIOCTL',
204: 'PWROFF',
205: 'DEVERR',
206: 'BADEXEC',
207: 'BADARCH',
208: 'SHLIBVERS',
209: 'BADMACHO',
210: 'POLICY'}
# BSM network protocolsb based on information from OpenBSD.
_NETWORK_PROTOCOLS = {
0: 'UNSPEC',
1: 'LOCAL',
2: 'INET',
3: 'IMPLINK',
4: 'PUP',
5: 'CHAOS',
6: 'NS',
8: 'ECMA',
9: 'DATAKIT',
10: 'CCITT',
11: 'SNA',
12: 'DECnet',
13: 'DLI',
14: 'LAT',
15: 'HYLINK',
16: 'APPLETALK',
19: 'OSI',
23: 'IPX',
24: 'ROUTE',
25: 'LINK',
26: 'INET6',
27: 'KEY',
500: 'NETBIOS',
501: 'ISO',
502: 'XTP',
503: 'COIP',
504: 'CNT',
505: 'RTIP',
506: 'SIP',
507: 'PIP',
508: 'ISDN',
509: 'E164',
510: 'NATM',
511: 'ATM',
512: 'NETGRAPH',
513: 'SLOW',
514: 'CLUSTER',
515: 'ARP',
516: 'BLUETOOTH'}
def _FormatArgToken(self, token_data):
"""Formats an argument token as a dictionary of values.
Args:
token_data (bsm_token_data_arg32|bsm_token_data_arg64): AUT_ARG32 or
AUT_ARG64 token data.
Returns:
dict[str, str]: token values.
"""
return {
'string': token_data.argument_value.rstrip('\x00'),
'num_arg': token_data.argument_index,
'is': token_data.argument_name}
def _FormatAttrToken(self, token_data):
"""Formats an attribute token as a dictionary of values.
Args:
token_data (bsm_token_data_attr32|bsm_token_data_attr64): AUT_ATTR32 or
AUT_ATTR64 token data.
Returns:
dict[str, str]: token values.
"""
return {
'mode': token_data.file_mode,
'uid': token_data.user_identifier,
'gid': token_data.group_identifier,
'system_id': token_data.file_system_identifier,
'node_id': token_data.file_identifier,
'device': token_data.device}
def _FormatDataToken(self, token_data):
"""Formats a data token as a dictionary of values.
Args:
token_data (bsm_token_data_data): AUT_DATA token data.
Returns:
dict[str, str]: token values.
"""
format_string = self._DATA_TOKEN_FORMAT.get(
token_data.data_format, 'UNKNOWN')
if token_data.data_format == 4:
data = bytes(bytearray(token_data.data)).split(b'\x00')[0]
data = data.decode('utf-8')
else:
data = ''.join(['{0:02x}'.format(byte) for byte in token_data.data])
return {
'format': format_string,
'data': data}
def _FormatInAddrExToken(self, token_data):
"""Formats an extended IPv4 address token as a dictionary of values.
Args:
token_data (bsm_token_data_in_addr_ex): AUT_IN_ADDR_EX token data.
Returns:
dict[str, str]: token values.
"""
protocol = self._NETWORK_PROTOCOLS.get(token_data.net_type, 'UNKNOWN')
if token_data.net_type == 4:
ip_address = self._FormatPackedIPv6Address(token_data.ip_address[:4])
elif token_data.net_type == 16:
ip_address = self._FormatPackedIPv6Address(token_data.ip_address)
return {
'protocols': protocol,
'net_type': token_data.net_type,
'address': ip_address}
def _FormatInAddrToken(self, token_data):
"""Formats an IPv4 address token as a dictionary of values.
Args:
token_data (bsm_token_data_in_addr): AUT_IN_ADDR token data.
Returns:
dict[str, str]: token values.
"""
ip_address = self._FormatPackedIPv4Address(token_data.ip_address)
return {'ip': ip_address}
def _FormatIPCPermToken(self, token_data):
"""Formats an IPC permissions token as a dictionary of values.
Args:
token_data (bsm_token_data_ipc_perm): AUT_IPC_PERM token data.
Returns:
dict[str, str]: token values.
"""
return {
'user_id': token_data.user_identifier,
'group_id': token_data.group_identifier,
'creator_user_id': token_data.creator_user_identifier,
'creator_group_id': token_data.creator_group_identifier,
'access': token_data.access_mode}
def _FormatIPCToken(self, token_data):
"""Formats an IPC token as a dictionary of values.
Args:
token_data (bsm_token_data_ipc): AUT_IPC token data.
Returns:
dict[str, str]: token values.
"""
return {
'object_type': token_data.object_type,
'object_id': token_data.object_identifier}
def _FormatGroupsToken(self, token_data):
"""Formats a groups token as a dictionary of values.
Args:
token_data (bsm_token_data_groups): AUT_GROUPS or AUT_NEWGROUPS token
data.
Returns:
dict[str, str]: token values.
"""
return {
'number_of_groups': token_data.number_of_groups,
'groups': ', '.join(token_data.groups)}
def _FormatExecArgsToken(self, token_data):
"""Formats an execution arguments token as a dictionary of values.
Args:
token_data (bsm_token_data_exec_args): AUT_EXEC_ARGS or AUT_EXEC_ENV
token data.
Returns:
dict[str, str]: token values.
"""
return {
'number_of_strings': token_data.number_of_strings,
'strings': ', '.join(token_data.strings)}
def _FormatIPortToken(self, token_data):
"""Formats an IP port token as a dictionary of values.
Args:
token_data (bsm_token_data_iport): AUT_IPORT token data.
Returns:
dict[str, str]: token values.
"""
return {'port_number': token_data.port_number}
def _FormatIPToken(self, token_data):
"""Formats an IPv4 packet header token as a dictionary of values.
Args:
token_data (bsm_token_data_ip): AUT_IP token data.
Returns:
dict[str, str]: token values.
"""
data = ''.join(['{0:02x}'.format(byte) for byte in token_data.data])
return {'IPv4_Header': data}
def _FormatOpaqueToken(self, token_data):
"""Formats an opaque token as a dictionary of values.
Args:
token_data (bsm_token_data_opaque): AUT_OPAQUE token data.
Returns:
dict[str, str]: token values.
"""
data = ''.join(['{0:02x}'.format(byte) for byte in token_data.data])
return {'data': data}
def _FormatOtherFileToken(self, token_data):
"""Formats an other file token as a dictionary of values.
Args:
token_data (bsm_token_data_other_file32): AUT_OTHER_FILE32 token data.
Returns:
dict[str, str]: token values.
"""
# TODO: if this timestamp is useful, it must be extracted as a separate
# event object.
timestamp = token_data.microseconds + (
token_data.timestamp * definitions.MICROSECONDS_PER_SECOND)
date_time = dfdatetime_posix_time.PosixTimeInMicroseconds(
timestamp=timestamp)
date_time_string = date_time.CopyToDateTimeString()
return {
'string': token_data.name.rstrip('\x00'),
'timestamp': date_time_string}
def _FormatPathToken(self, token_data):
"""Formats a path token as a dictionary of values.
Args:
token_data (bsm_token_data_path): AUT_PATH token data.
Returns:
dict[str, str]: token values.
"""
return {'path': token_data.path.rstrip('\x00')}
def _FormatReturnOrExitToken(self, token_data):
"""Formats a return or exit token as a dictionary of values.
Args:
token_data (bsm_token_data_exit|bsm_token_data_return32|
bsm_token_data_return64): AUT_EXIT, AUT_RETURN32 or
AUT_RETURN64 token data.
Returns:
dict[str, str]: token values.
"""
error_string = self._ERRORS.get(token_data.status, 'UNKNOWN')
return {
'error': error_string,
'token_status': token_data.status,
'call_status': token_data.return_value}
def _FormatSeqToken(self, token_data):
"""Formats a sequence token as a dictionary of values.
Args:
token_data (bsm_token_data_seq): AUT_SEQ token data.
Returns:
dict[str, str]: token values.
"""
return {'sequence_number': token_data.sequence_number}
def _FormatSocketExToken(self, token_data):
"""Formats an extended socket token as a dictionary of values.
Args:
token_data (bsm_token_data_socket_ex): AUT_SOCKET_EX token data.
Returns:
dict[str, str]: token values.
"""
if token_data.socket_domain == 10:
local_ip_address = self._FormatPackedIPv6Address(
token_data.local_ip_address)
remote_ip_address = self._FormatPackedIPv6Address(
token_data.remote_ip_address)
else:
local_ip_address = self._FormatPackedIPv4Address(
token_data.local_ip_address)
remote_ip_address = self._FormatPackedIPv4Address(
token_data.remote_ip_address)
return {
'from': local_ip_address,
'from_port': token_data.local_port,
'to': remote_ip_address,
'to_port': token_data.remote_port}
def _FormatSocketInet32Token(self, token_data):
"""Formats an Internet socket token as a dictionary of values.
Args:
token_data (bsm_token_data_sockinet32): AUT_SOCKINET32 token data.
Returns:
dict[str, str]: token values.
"""
protocol = self._NETWORK_PROTOCOLS.get(token_data.socket_family, 'UNKNOWN')
ip_address = self._FormatPackedIPv4Address(token_data.ip_addresss)
return {
'protocols': protocol,
'family': token_data.socket_family,
'port': token_data.port_number,
'address': ip_address}
def _FormatSocketInet128Token(self, token_data):
"""Formats an Internet socket token as a dictionary of values.
Args:
token_data (bsm_token_data_sockinet64): AUT_SOCKINET128 token data.
Returns:
dict[str, str]: token values.
"""
protocol = self._NETWORK_PROTOCOLS.get(token_data.socket_family, 'UNKNOWN')
ip_address = self._FormatPackedIPv6Address(token_data.ip_addresss)
return {
'protocols': protocol,
'family': token_data.socket_family,
'port': token_data.port_number,
'address': ip_address}
def _FormatSocketUnixToken(self, token_data):
"""Formats an Unix socket token as a dictionary of values.
Args:
token_data (bsm_token_data_sockunix): AUT_SOCKUNIX token data.
Returns:
dict[str, str]: token values.
"""
protocol = self._NETWORK_PROTOCOLS.get(token_data.socket_family, 'UNKNOWN')
return {
'protocols': protocol,
'family': token_data.socket_family,
'path': token_data.socket_path}
def _FormatSubjectOrProcessToken(self, token_data):
"""Formats a subject or process token as a dictionary of values.
Args:
token_data (bsm_token_data_subject32|bsm_token_data_subject64):
AUT_SUBJECT32, AUT_PROCESS32, AUT_SUBJECT64 or AUT_PROCESS64 token
data.
Returns:
dict[str, str]: token values.
"""
ip_address = self._FormatPackedIPv4Address(token_data.ip_address)
return {
'aid': token_data.audit_user_identifier,
'euid': token_data.effective_user_identifier,
'egid': token_data.effective_group_identifier,
'uid': token_data.real_user_identifier,
'gid': token_data.real_group_identifier,
'pid': token_data.process_identifier,
'session_id': token_data.session_identifier,
'terminal_port': token_data.terminal_port,
'terminal_ip': ip_address}
def _FormatSubjectExOrProcessExToken(self, token_data):
"""Formats a subject or process token as a dictionary of values.
Args:
token_data (bsm_token_data_subject32_ex|bsm_token_data_subject64_ex):
AUT_SUBJECT32_EX, AUT_PROCESS32_EX, AUT_SUBJECT64_EX or
AUT_PROCESS64_EX token data.
Returns:
dict[str, str]: token values.
"""
if token_data.net_type == 4:
ip_address = self._FormatPackedIPv4Address(token_data.ip_address)
elif token_data.net_type == 16:
ip_address = self._FormatPackedIPv6Address(token_data.ip_address)
else:
ip_address = 'unknown'
return {
'aid': token_data.audit_user_identifier,
'euid': token_data.effective_user_identifier,
'egid': token_data.effective_group_identifier,
'uid': token_data.real_user_identifier,
'gid': token_data.real_group_identifier,
'pid': token_data.process_identifier,
'session_id': token_data.session_identifier,
'terminal_port': token_data.terminal_port,
'terminal_ip': ip_address}
def _FormatTextToken(self, token_data):
"""Formats a text token as a dictionary of values.
Args:
token_data (bsm_token_data_text): AUT_TEXT token data.
Returns:
dict[str, str]: token values.
"""
return {'text': token_data.text.rstrip('\x00')}
def _FormatTokenData(self, token_type, token_data):
"""Formats the token data as a dictionary of values.
Args:
token_type (int): token type.
token_data (object): token data.
Returns:
dict[str, str]: formatted token values or an empty dictionary if no
formatted token values could be determined.
"""
token_data_format_function = self._TOKEN_DATA_FORMAT_FUNCTIONS.get(
token_type)
if token_data_format_function:
token_data_format_function = getattr(
self, token_data_format_function, None)
if not token_data_format_function:
return {}
return token_data_format_function(token_data)
def _FormatZonenameToken(self, token_data):
"""Formats a time zone name token as a dictionary of values.
Args:
token_data (bsm_token_data_zonename): AUT_ZONENAME token data.
Returns:
dict[str, str]: token values.
"""
return {'name': token_data.name.rstrip('\x00')}
def _ParseRecord(self, parser_mediator, file_object):
"""Parses an event record.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_object (dfvfs.FileIO): file-like object.
Raises:
ParseError: if the event record cannot be read.
"""
header_record_offset = file_object.tell()
# Check the header token type before reading the token data to prevent
# variable size tokens to consume a large amount of memory.
token_type = self._ParseTokenType(file_object, header_record_offset)
if token_type not in self._HEADER_TOKEN_TYPES:
raise errors.ParseError(
'Unsupported header token type: 0x{0:02x}'.format(token_type))
token_type, token_data = self._ParseToken(file_object, header_record_offset)
if token_data.format_version != 11:
raise errors.ParseError('Unsupported format version type: {0:d}'.format(
token_data.format_version))
timestamp = token_data.microseconds + (
token_data.timestamp * definitions.MICROSECONDS_PER_SECOND)
event_type = token_data.event_type
header_record_size = token_data.record_size
record_end_offset = header_record_offset + header_record_size
event_tokens = []
return_token_values = None
file_offset = file_object.tell()
while file_offset < record_end_offset:
token_type, token_data = self._ParseToken(file_object, file_offset)
if not token_data:
raise errors.ParseError('Unsupported token type: 0x{0:02x}'.format(
token_type))
file_offset = file_object.tell()
if token_type == self._TOKEN_TYPE_AUT_TRAILER:
break
token_type_string = self._TOKEN_TYPES.get(token_type, 'UNKNOWN')
token_values = self._FormatTokenData(token_type, token_data)
event_tokens.append({token_type_string: token_values})
if token_type in (
self._TOKEN_TYPE_AUT_RETURN32, self._TOKEN_TYPE_AUT_RETURN64):
# Make sure return_token_values is a string.
return_token_values = (
'{{\'error\': \'{0:s}\', \'token_status\': {1:d}, \'call_status\': '
'{2:d}}}').format(
token_values['error'], token_values['token_status'],
token_values['call_status'])
if token_data.record_size != header_record_size:
raise errors.ParseError(
'Mismatch of event record size between header and trailer token.')
event_data = BSMEventData()
event_data.event_type = event_type
event_data.extra_tokens = event_tokens
event_data.offset = header_record_offset
event_data.record_length = header_record_size
event_data.return_value = return_token_values
date_time = dfdatetime_posix_time.PosixTimeInMicroseconds(
timestamp=timestamp)
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_CREATION)
parser_mediator.ProduceEventWithEventData(event, event_data)
def _ParseToken(self, file_object, file_offset):
"""Parses a token.
Args:
file_object (dfvfs.FileIO): file-like object.
file_offset (int): offset of the token relative to the start of
the file-like object.
Returns:
tuple: containing:
int: token type
object: token data or None if the token type is not supported.
"""
token_type = self._ParseTokenType(file_object, file_offset)
token_data = None
token_data_map_name = self._DATA_TYPE_MAP_PER_TOKEN_TYPE.get(
token_type, None)
if token_data_map_name:
token_data_map = self._GetDataTypeMap(token_data_map_name)
token_data, _ = self._ReadStructureFromFileObject(
file_object, file_offset + 1, token_data_map)
return token_type, token_data
def _ParseTokenType(self, file_object, file_offset):
"""Parses a token type.
Args:
file_object (dfvfs.FileIO): file-like object.
file_offset (int): offset of the token relative to the start of
the file-like object.
Returns:
int: token type
"""
token_type_map = self._GetDataTypeMap('uint8')
token_type, _ = self._ReadStructureFromFileObject(
file_object, file_offset, token_type_map)
return token_type
def ParseFileObject(self, parser_mediator, file_object):
"""Parses a BSM file-like object.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_object (dfvfs.FileIO): a file-like object.
Raises:
UnableToParseFile: when the file cannot be parsed.
"""
file_offset = file_object.get_offset()
file_size = file_object.get_size()
while file_offset < file_size:
try:
self._ParseRecord(parser_mediator, file_object)
except errors.ParseError as exception:
if file_offset == 0:
raise errors.UnableToParseFile(
'Unable to parse first event record with error: {0!s}'.format(
exception))
# TODO: skip to next event record.
file_offset = file_object.get_offset()
manager.ParsersManager.RegisterParser(BSMParser)
|
|
#!/usr/bin/env python
# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
"""Runs a side-by-side comparison of two PerfKitBenchmarker revisions.
Given a pair of revisions (e.g., 'dev', 'master') and command-line arguments,
this tool runs 'pkb.py' with for each and creates a report showing the
differences in the results between the two runs.
"""
import argparse
import collections
import contextlib
import difflib
import itertools
import json
import logging
import os
import pprint
import shlex
import shutil
import subprocess
import tempfile
import jinja2
DEFAULT_FLAGS = ('--cloud=GCP', '--machine_type=n1-standard-4',
'--benchmarks=netperf')
# Keys in the sample JSON we expect to vary between runs.
# These will be removed prior to diffing samples.
VARYING_KEYS = 'run_uri', 'sample_uri', 'timestamp', 'value'
# Template name, in same directory as this file.
TEMPLATE = 'side_by_side.html.j2'
# Thresholds for highlighting results
SMALL_CHANGE_THRESHOLD = 5
MEDIUM_CHANGE_THRESHOLD = 10
LARGE_CHANGE_THRESHOLD = 25
PerfKitBenchmarkerResult = collections.namedtuple(
'PerfKitBenchmarkerResult',
['name', 'description', 'sha1', 'samples', 'flags'])
@contextlib.contextmanager
def TempDir(delete=True, **kwargs):
"""Directory equivalent of tempfile.NamedTemporaryFile.
When used as a context manager, yields a temporary directory which by default
is removed when the context manager goes our of scope.
Example usage:
>>> with TempDir(prefix='perfkit') as td:
... shutil.copy('test.txt', td)
Args:
delete: Delete the directory on exit?
**kwargs: Passed to tempfile.mkdtemp.
Yields:
String. Path to the temporary directory.
"""
td = tempfile.mkdtemp(**kwargs)
logging.info('Created %s', td)
try:
yield td
finally:
if delete:
logging.info('Removing %s', td)
shutil.rmtree(td)
def _GitCommandPrefix():
"""Prefix for all git commands.
Returns:
list of strings; 'git' with an appropriate '--git-dir' flag.
"""
git_dir = os.path.join(os.path.dirname(__file__), '..', '..', '.git')
return ['git', '--git-dir', git_dir]
def _GitRevParse(revision):
"""Returns the output of 'git rev-parse' for 'revision'."""
output = subprocess.check_output(_GitCommandPrefix() +
['rev-parse', revision])
return output.rstrip()
def _GitDescribe(revision):
"""Returns the output of 'git describe' for 'revision'."""
output = subprocess.check_output(_GitCommandPrefix() +
['describe', '--always', revision])
return output.rstrip()
@contextlib.contextmanager
def PerfKitBenchmarkerCheckout(revision):
"""Yields a directory with PerfKitBenchmarker checked out to 'revision'."""
archive_cmd = _GitCommandPrefix() + ['archive', revision]
logging.info('Running: %s', archive_cmd)
p_archive = subprocess.Popen(archive_cmd, stdout=subprocess.PIPE)
with TempDir(prefix='pkb-test-') as td:
tar_cmd = ['tar', 'xf', '-']
logging.info('Running %s in %s', tar_cmd, td)
p_tar = subprocess.Popen(tar_cmd, stdin=p_archive.stdout, cwd=td)
archive_status = p_archive.wait()
tar_status = p_tar.wait()
if archive_status:
raise subprocess.CalledProcessError(archive_cmd, archive_status)
if tar_status:
raise subprocess.CalledProcessError(tar_status, tar_cmd)
yield td
def RunPerfKitBenchmarker(revision, flags):
"""Runs perfkitbenchmarker, returning the results as parsed JSON.
Args:
revision: string. git commit identifier. Version of PerfKitBenchmarker to
run.
flags: list of strings. Default arguments to pass to `pkb.py.`
Returns:
List of dicts. Deserialized JSON output of running PerfKitBenchmarker with
`--json_path`.
"""
sha1 = _GitRevParse(revision)
description = _GitDescribe(revision)
with PerfKitBenchmarkerCheckout(revision) as td:
with tempfile.NamedTemporaryFile(suffix='.json') as tf:
flags = flags + ['--json_path=' + tf.name]
cmd = ['./pkb.py'] + flags
logging.info('Running %s in %s', cmd, td)
subprocess.check_call(cmd, cwd=td)
samples = [json.loads(line) for line in tf]
return PerfKitBenchmarkerResult(name=revision, sha1=sha1, flags=flags,
samples=samples, description=description)
def _SplitLabels(labels):
"""Parse the 'labels' key from a PerfKitBenchmarker record.
Labels are recorded in '|key:value|,|key:value|' form.
This function transforms them to a dict.
Args:
labels: string. labels to parse.
Returns:
dict. Parsed 'labels'.
"""
result = {}
for item in labels.strip('|').split('|,|'):
k, v = item.split(':', 1)
result[k] = v
return result
def _CompareSamples(a, b, context=True, numlines=1):
"""Generate an HTML table showing differences between 'a' and 'b'.
Args:
a: dict, as output by PerfKitBenchmarker.
b: dict, as output by PerfKitBenchmarker.
context: boolean. Show context in diff? If False, all lines are output, even
those which are equal.
numlines: int. Passed to difflib.Htmldiff.make_table.
Returns:
string or None. An HTML table, or None if there are no differences.
"""
a = a.copy()
b = b.copy()
a['metadata'] = _SplitLabels(a.pop('labels', ''))
b['metadata'] = _SplitLabels(b.pop('labels', ''))
# Prune the keys in VARYING_KEYS prior to comparison to make the diff more
# informative.
for d in (a, b):
for key in VARYING_KEYS:
d.pop(key, None)
astr = pprint.pformat(a).splitlines()
bstr = pprint.pformat(b).splitlines()
if astr == bstr and context:
return None
differ = difflib.HtmlDiff()
return differ.make_table(astr, bstr, context=context, numlines=numlines)
def _MatchSamples(base_samples, head_samples):
"""Match items from base_samples with items from head_samples.
Rows are matched using 'test', 'metric', and 'unit' fields.
Args:
base_samples: List of dicts.
head_samples: List of dicts.
Returns:
List of pairs, each item of the pair containing either a dict or None.
"""
def ExtractKeys(samples):
return [(i['test'], i['metric'], i['unit']) for i in samples]
base_keys = ExtractKeys(base_samples)
head_keys = ExtractKeys(head_samples)
sm = difflib.SequenceMatcher('', base_keys, head_keys)
result = []
for opcode, base_begin, base_end, head_begin, head_end in sm.get_opcodes():
if opcode == 'equal':
result.extend(zip(base_samples[base_begin:base_end],
head_samples[head_begin:head_end]))
elif opcode == 'replace':
result.extend(itertools.izip_longest(base_samples[base_begin:base_end],
head_samples[head_begin:head_end]))
elif opcode == 'delete':
result.extend(zip(base_samples[base_begin:base_end],
[None] * (base_end - base_begin)))
elif opcode == 'insert':
result.extend(zip([None] * (head_end - head_begin),
head_samples[head_begin:head_end]))
else:
raise AssertionError('Unknown op: ' + opcode)
return result
def RenderResults(base_result, head_result, template_name=TEMPLATE,
**kwargs):
"""Render the results of a comparison as an HTML page.
Args:
base_result: PerfKitBenchmarkerResult. Result of running against base
revision.
head_result: PerfKitBenchmarkerResult. Result of running against head
revision.
template_name: string. The filename of the template.
kwargs: Additional arguments to Template.render.
Returns:
String. The HTML template.
"""
def _ClassForPercentDifference(percent_diff):
"""Crude highlighting of differences between runs.
Samples varying by >25% are colored red.
Samples varying by 5-25% are colored orange.
Other samples are colored green.
Args:
percent_diff: float. percent difference between values.
"""
if percent_diff < 0:
direction = 'decrease'
else:
direction = 'increase'
percent_diff = abs(percent_diff)
if percent_diff > LARGE_CHANGE_THRESHOLD:
size = 'large'
elif percent_diff > MEDIUM_CHANGE_THRESHOLD:
size = 'medium'
elif percent_diff > SMALL_CHANGE_THRESHOLD:
size = 'small'
else:
return ''
return 'value-{0}-{1}'.format(direction, size)
env = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
undefined=jinja2.StrictUndefined)
env.globals['class_for_percent_diff'] = _ClassForPercentDifference
env.globals['izip_longest'] = itertools.izip_longest
template = env.get_template('side_by_side.html.j2')
matched = _MatchSamples(base_result.samples,
head_result.samples)
# Generate sample diffs
sample_context_diffs = []
sample_diffs = []
for base_sample, head_sample in matched:
if not base_sample or not head_sample:
# Sample inserted or deleted.
continue
sample_context_diffs.append(
_CompareSamples(base_sample, head_sample))
sample_diffs.append(
_CompareSamples(base_sample, head_sample, context=False))
# Generate flag diffs
flag_diffs = difflib.HtmlDiff().make_table(
base_result.flags, head_result.flags, context=False)
# Used for generating a chart with differences.
matched_json = json.dumps(matched)\
.replace(u'<', u'\\u003c') \
.replace(u'>', u'\\u003e') \
.replace(u'&', u'\\u0026') \
.replace(u"'", u'\\u0027')
return template.render(base=base_result,
head=head_result,
matched_samples=matched,
matched_samples_json=matched_json,
sample_diffs=sample_diffs,
sample_context_diffs=sample_context_diffs,
flag_diffs=flag_diffs,
infinity=float('inf'),
**kwargs)
def main():
p = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description=__doc__)
p.add_argument('-t', '--title', default='PerfKitBenchmarker Comparison',
help="""HTML report title""")
p.add_argument('--base', default='master', help="""Base revision.""")
p.add_argument('--head', default='dev', help="""Head revision.""")
p.add_argument('--base-flags', default=None, help="""Flags for run against
'--base' revision. Will be combined with --flags.""",
type=shlex.split)
p.add_argument('--head-flags', default=None, help="""Flags for run against
'--head' revision. Will be combined with --flags.""",
type=shlex.split)
p.add_argument('-f', '--flags', type=shlex.split,
help="""Command line flags (Default: {0})""".format(
' '.join(DEFAULT_FLAGS)))
p.add_argument('-p', '--parallel', default=False, action='store_true',
help="""Run concurrently""")
p.add_argument('--rerender', help="""Re-render the HTML report from a JSON
file [for developers].""", action='store_true')
p.add_argument('json_output', help="""JSON output path.""")
p.add_argument('html_output', help="""HTML output path.""")
a = p.parse_args()
if (a.base_flags or a.head_flags):
if not (a.base_flags and a.head_flags):
p.error('--base-flags and --head-flags must be specified together.\n'
'\tbase flags={0}\n\thead flags={1}'.format(
a.base_flags, a.head_flags))
a.base_flags = a.base_flags + (a.flags or [])
a.head_flags = a.head_flags + (a.flags or [])
else:
# Just --flags
assert not a.base_flags, a.base_flags
assert not a.head_flags, a.head_flags
a.base_flags = a.flags or list(DEFAULT_FLAGS)
a.head_flags = a.flags or list(DEFAULT_FLAGS)
if not a.rerender:
if a.parallel:
from concurrent import futures
with futures.ThreadPoolExecutor(max_workers=2) as executor:
base_res_fut = executor.submit(RunPerfKitBenchmarker, a.base,
a.base_flags)
head_res_fut = executor.submit(RunPerfKitBenchmarker, a.head,
a.head_flags)
base_res = base_res_fut.result()
head_res = head_res_fut.result()
else:
base_res = RunPerfKitBenchmarker(a.base, a.base_flags)
head_res = RunPerfKitBenchmarker(a.head, a.head_flags)
logging.info('Base result: %s', base_res)
logging.info('Head result: %s', head_res)
with argparse.FileType('w')(a.json_output) as json_fp:
logging.info('Writing JSON to %s', a.json_output)
json.dump({'head': head_res._asdict(),
'base': base_res._asdict()},
json_fp,
indent=2)
json_fp.write('\n')
else:
logging.info('Loading results from %s', a.json_output)
with argparse.FileType('r')(a.json_output) as json_fp:
d = json.load(json_fp)
base_res = PerfKitBenchmarkerResult(**d['base'])
head_res = PerfKitBenchmarkerResult(**d['head'])
with argparse.FileType('w')(a.html_output) as html_fp:
logging.info('Writing HTML to %s', a.html_output)
html_fp.write(RenderResults(base_result=base_res,
head_result=head_res,
varying_keys=VARYING_KEYS,
title=a.title))
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
main()
|
|
from __future__ import division
from __future__ import print_function
from scipy.ndimage.filters import generic_filter
from collections import defaultdict
from raster_ops import extract, reclassify_from_data, geometry_mask, \
write_tif
import numpy as np
import csv
import math
import os
# Enableing DEBUG will write all intermediate rasters to disk
DEBUG = False
CUR_PATH = os.path.dirname(__file__)
DEFAULT_DATA_PATH = os.path.join(CUR_PATH, 'data/cdl_data_grouped.csv')
RASTER_PATH = '/opt/icp-crop-data/cdl_reclass_lzw_5070.tif'
SETTINGS = {}
ABUNDANCE_IDX = 0.1 # A constant for managing wild bee yield
CELL_SIZE = 30
FORAGE_DIST = 670
AG_CLASSES = [35, 29, 51, 27, 52, 17, 50, 49, 18, 20, 28, 48]
COVER_CROPS = {35: 53, 29: 54, 51: 55, 27: 56, 52: 57, 17: 58, 50: 59}
def initialize():
"""
Determine model settings which do not change between requests
"""
# Nesting and Floral suitability values per CDL crop type
nesting_reclass, floral_reclass, yield_config = load_crop_data()
max_dist = FORAGE_DIST * 2
# Boundary of matrix for focal window, essentially the number of
# cells a bee can travel
radius = int(round(max_dist/CELL_SIZE)) * 2 + 1
window = np.ones(shape=(radius, radius))
dist_matrix = np.empty(shape=(radius, radius), dtype=np.float32)
focal_center = int(round(radius/2))
# Set cell values to their distance to center of focal window
for (i, j), _ in np.ndenumerate(dist_matrix):
x, y = i+1, j+1
dist_matrix[i, j] = math.sqrt(
((x-0.5) * CELL_SIZE - (focal_center-0.5) * CELL_SIZE)**2 +
((y-0.5) * CELL_SIZE - (focal_center-0.5) * CELL_SIZE)**2)
distances = dist_matrix.ravel()
effective_dist = np.exp(-distances / FORAGE_DIST)
# Where the effective distance > max forage distance, set 0
effective_dist[np.where(distances > max_dist)] = 0
sum_dist = np.sum(effective_dist)
# These settings are valid against all requests and only need to
# be computed once.
SETTINGS['effective_dist'] = effective_dist
SETTINGS['sum_dist'] = sum_dist
SETTINGS['window'] = window
SETTINGS['floral_reclass'] = floral_reclass
SETTINGS['nesting_reclass'] = nesting_reclass
SETTINGS['yield'] = yield_config
def load_crop_data(data_src=DEFAULT_DATA_PATH):
"""
Load the reclassification values for both floral and nesting attributes
from the CDL CSV.
"""
with open(data_src, mode='r') as cdl_data:
reader = csv.reader(cdl_data)
nesting_reclass = []
floral_reclass = []
yield_config = defaultdict(dict)
hf_idx = 3
hn_idx = 4
density_idx = 5
demand_idx = 2
id_idx = 0
next(reader, None) # Skip headers
for row in reader:
id = int(row[id_idx])
nesting_reclass.append([id, float(row[hn_idx])])
floral_reclass.append([id, float(row[hf_idx])])
yield_config[id]['demand'] = float(row[demand_idx])
yield_config[id]['density'] = float(row[density_idx])
return nesting_reclass, floral_reclass, yield_config
def focal_op(x):
"""
Determine focal center value for the window function.
"""
return np.sum(x * SETTINGS['effective_dist']/SETTINGS['sum_dist'])
def calc_abundance(cdl, affine, window, meta):
"""
Calculate farm abundance based on nesting and floral coefficients for
various crop types.
"""
# Create floral and nesting rasters derived from the CDL
fl_out = np.zeros(shape=cdl.shape, dtype=np.float32)
n_out = np.zeros(shape=cdl.shape, dtype=np.float32)
floral = reclassify_from_data(cdl, SETTINGS['floral_reclass'], fl_out)
nesting = reclassify_from_data(cdl, SETTINGS['nesting_reclass'], n_out)
# Create an abundance index based on forage and nesting indexes
# over the area a bee may travel
forage = generic_filter(floral, footprint=SETTINGS['window'],
function=focal_op)
source = forage * nesting
area_abundance = generic_filter(source, footprint=SETTINGS['window'],
function=focal_op)
if DEBUG:
write_tif('cdl', cdl, affine, window, meta)
write_tif('floral', floral, affine, window, meta)
write_tif('nesting', nesting, affine, window, meta)
write_tif('forage', forage, affine, window, meta)
write_tif('source', source, affine, window, meta)
write_tif('abundance', area_abundance, affine, window, meta)
return area_abundance
def yield_calc(crop_id, abundance, managed_hives, config):
"""
Determines the yield change due to landscape factors related to forage
and nesting suitability for wild bees and managed honey bee hives.
Calculate the yield for a single cell position based on values from
the abundance calcualation and the crop data layer.
Args:
crop_id (int): The cell value from the CLD raster
abundance(float): The cell value of abundance at the same position
as crop_id
managed_hives (float): Number of managed hives per acre implemented
config (dict): Crop specific configuration detailing `demand` the crop
places on bee pollination and the recommended `density` of hives
for that crop type
Returns
yield (float): The predicted yield for this cell position
"""
if crop_id not in config:
return 0
demand = config[crop_id]['demand']
rec_hives = config[crop_id]['density']
# Avoid division by 0 for crops which don't have a recommended density
hives_ratio = 0 if rec_hives == 0 else managed_hives/rec_hives
# Calculate the yield for managed honeybee, keeping a ceiling such
# that if more hives are used than recommended, yield remains at 1
yield_hb = (1 - demand) + demand * min(1, hives_ratio)
# Determine the remainig yield to be had from wild bee abundance
yield_wild = (1 - yield_hb) * (abundance / (ABUNDANCE_IDX + abundance))
# Determind total yield from all sources of bee pollination
return yield_hb + yield_wild
def aggregate_crops(yield_field, cdl_field, crops=AG_CLASSES,
paired_crops=COVER_CROPS):
"""
Within the unmasked field portion of the provided yield_field, avg the
yield quantities per ag type, resulting in a total yield increase per
relavent crop type on the field and report the yield in terms of average
crop yield on a scale of 0-100
Args:
yield_field (masked ndarray): The bee shed area of computed yield with
a mask of the field applied.
cdl (masked ndarray): The raw crop data layer corresponding to the same
area covered in `yield_field` with a mask of the field applied
crops (list<int>): Optional. The CDL class types to aggregate on,
defaults to system specified list
paired_crops (dict<int,int>): Optional. The CDL class types that have a
crop they should aggregate with. Keys are the
class types in `crops`; values are class types the keys pair with,
defaults to system specified list
Returns:
dict<cld_id, yield_avg>: A mapping of bee pollinated agricultural
CDL crop types with the avg of their yield across the field
portion of the yield data, reported on 0-100 scale
"""
crop_yields = {}
field_mask = yield_field.mask.copy()
# Average the yield for each each crop type cell, by crop
for crop in crops:
# Create a mask for values that are not this crop type, (or, if it
# has a paired crop, its pair), and include
# the mask which is already applied to non-field areas of AoI
crop_mask = cdl_field != crop
if crop in paired_crops:
crop_mask = crop_mask & (cdl_field != paired_crops[crop])
cdl_mask = np.ma.masked_where(crop_mask, cdl_field).mask
crop_mask = np.ma.mask_or(field_mask, cdl_mask)
# Average the yield from this one crop only over the field
yield_field.mask = crop_mask
crop_yield = np.ma.mean(yield_field).item() * 100 or 0
crop_yields[str(crop)] = crop_yield
# Restore the original mask of just the field
yield_field.mask = field_mask
return crop_yields
def calculate(bee_shed_geom, field_geom, modifications, managed_hives,
raster_path=RASTER_PATH):
"""
Calculate the change in specific crop yield due to bee abundance
"""
# Read in the crop raster clipped to the bee shed geometry
cdl, affine, win, meta = extract(bee_shed_geom, raster_path, modifications)
# Determine pollinator abundance across the entire area
area_abundance = calc_abundance(cdl, affine, win, meta)
# Vectorize the yield function to allow paired element position input
# from the CDL, area abundance raster, plus user input and system config
total_yield = np.vectorize(yield_calc, otypes=[np.float16],
excluded=['managed_hives', 'config'])
# Determine yield change due to abundance and managed hives
yield_area = total_yield(cdl, area_abundance,
managed_hives=managed_hives,
config=SETTINGS['yield'])
# Mask the bee shed into just the delineated field
yield_field = geometry_mask(field_geom, yield_area, affine)
cdl_field = geometry_mask(field_geom, cdl, affine)
# Aggregate yield by agricultural cdl type on the field mask
return aggregate_crops(yield_field, cdl_field)
# Determine settings when module is loaded
if __name__ != '__main__':
initialize()
|
|
"""Test script for the gzip module.
"""
import unittest
from test import test_support
import os
import io
import struct
gzip = test_support.import_module('gzip')
data1 = """ int length=DEFAULTALLOC, err = Z_OK;
PyObject *RetVal;
int flushmode = Z_FINISH;
unsigned long start_total_out;
"""
data2 = """/* zlibmodule.c -- gzip-compatible data compression */
/* See http://www.gzip.org/zlib/
/* See http://www.winimage.com/zLibDll for Windows */
"""
class TestGzip(unittest.TestCase):
filename = test_support.TESTFN
def setUp(self):
test_support.unlink(self.filename)
def tearDown(self):
test_support.unlink(self.filename)
def write_and_read_back(self, data, mode='b'):
b_data = memoryview(data).tobytes()
with gzip.GzipFile(self.filename, 'w'+mode) as f:
l = f.write(data)
self.assertEqual(l, len(b_data))
with gzip.GzipFile(self.filename, 'r'+mode) as f:
self.assertEqual(f.read(), b_data)
@test_support.requires_unicode
def test_unicode_filename(self):
unicode_filename = test_support.TESTFN_UNICODE
try:
unicode_filename.encode(test_support.TESTFN_ENCODING)
except (UnicodeError, TypeError):
self.skipTest("Requires unicode filenames support")
self.filename = unicode_filename
with gzip.GzipFile(unicode_filename, "wb") as f:
f.write(data1 * 50)
with gzip.GzipFile(unicode_filename, "rb") as f:
self.assertEqual(f.read(), data1 * 50)
# Sanity check that we are actually operating on the right file.
with open(unicode_filename, 'rb') as fobj, \
gzip.GzipFile(fileobj=fobj, mode="rb") as f:
self.assertEqual(f.read(), data1 * 50)
def test_write(self):
with gzip.GzipFile(self.filename, 'wb') as f:
f.write(data1 * 50)
# Try flush and fileno.
f.flush()
f.fileno()
if hasattr(os, 'fsync'):
os.fsync(f.fileno())
f.close()
# Test multiple close() calls.
f.close()
# The following test_write_xy methods test that write accepts
# the corresponding bytes-like object type as input
# and that the data written equals bytes(xy) in all cases.
def test_write_memoryview(self):
self.write_and_read_back(memoryview(data1 * 50))
def test_write_incompatible_type(self):
# Test that non-bytes-like types raise TypeError.
# Issue #21560: attempts to write incompatible types
# should not affect the state of the fileobject
with gzip.GzipFile(self.filename, 'wb') as f:
with self.assertRaises(UnicodeEncodeError):
f.write(u'\xff')
with self.assertRaises(TypeError):
f.write([1])
f.write(data1)
with gzip.GzipFile(self.filename, 'rb') as f:
self.assertEqual(f.read(), data1)
def test_read(self):
self.test_write()
# Try reading.
with gzip.GzipFile(self.filename, 'r') as f:
d = f.read()
self.assertEqual(d, data1*50)
def test_read_universal_newlines(self):
# Issue #5148: Reading breaks when mode contains 'U'.
self.test_write()
with gzip.GzipFile(self.filename, 'rU') as f:
d = f.read()
self.assertEqual(d, data1*50)
def test_io_on_closed_object(self):
# Test that I/O operations on closed GzipFile objects raise a
# ValueError, just like the corresponding functions on file objects.
# Write to a file, open it for reading, then close it.
self.test_write()
f = gzip.GzipFile(self.filename, 'r')
f.close()
with self.assertRaises(ValueError):
f.read(1)
with self.assertRaises(ValueError):
f.seek(0)
with self.assertRaises(ValueError):
f.tell()
# Open the file for writing, then close it.
f = gzip.GzipFile(self.filename, 'w')
f.close()
with self.assertRaises(ValueError):
f.write('')
with self.assertRaises(ValueError):
f.flush()
def test_append(self):
self.test_write()
# Append to the previous file
with gzip.GzipFile(self.filename, 'ab') as f:
f.write(data2 * 15)
with gzip.GzipFile(self.filename, 'rb') as f:
d = f.read()
self.assertEqual(d, (data1*50) + (data2*15))
def test_many_append(self):
# Bug #1074261 was triggered when reading a file that contained
# many, many members. Create such a file and verify that reading it
# works.
with gzip.open(self.filename, 'wb', 9) as f:
f.write('a')
for i in range(0, 200):
with gzip.open(self.filename, "ab", 9) as f: # append
f.write('a')
# Try reading the file
with gzip.open(self.filename, "rb") as zgfile:
contents = ""
while 1:
ztxt = zgfile.read(8192)
contents += ztxt
if not ztxt: break
self.assertEqual(contents, 'a'*201)
def test_buffered_reader(self):
# Issue #7471: a GzipFile can be wrapped in a BufferedReader for
# performance.
self.test_write()
with gzip.GzipFile(self.filename, 'rb') as f:
with io.BufferedReader(f) as r:
lines = [line for line in r]
self.assertEqual(lines, 50 * data1.splitlines(True))
def test_readline(self):
self.test_write()
# Try .readline() with varying line lengths
with gzip.GzipFile(self.filename, 'rb') as f:
line_length = 0
while 1:
L = f.readline(line_length)
if not L and line_length != 0: break
self.assertTrue(len(L) <= line_length)
line_length = (line_length + 1) % 50
def test_readlines(self):
self.test_write()
# Try .readlines()
with gzip.GzipFile(self.filename, 'rb') as f:
L = f.readlines()
with gzip.GzipFile(self.filename, 'rb') as f:
while 1:
L = f.readlines(150)
if L == []: break
def test_seek_read(self):
self.test_write()
# Try seek, read test
with gzip.GzipFile(self.filename) as f:
while 1:
oldpos = f.tell()
line1 = f.readline()
if not line1: break
newpos = f.tell()
f.seek(oldpos) # negative seek
if len(line1)>10:
amount = 10
else:
amount = len(line1)
line2 = f.read(amount)
self.assertEqual(line1[:amount], line2)
f.seek(newpos) # positive seek
def test_seek_whence(self):
self.test_write()
# Try seek(whence=1), read test
with gzip.GzipFile(self.filename) as f:
f.read(10)
f.seek(10, whence=1)
y = f.read(10)
self.assertEqual(y, data1[20:30])
def test_seek_write(self):
# Try seek, write test
with gzip.GzipFile(self.filename, 'w') as f:
for pos in range(0, 256, 16):
f.seek(pos)
f.write('GZ\n')
def test_mode(self):
self.test_write()
with gzip.GzipFile(self.filename, 'r') as f:
self.assertEqual(f.myfileobj.mode, 'rb')
def test_1647484(self):
for mode in ('wb', 'rb'):
with gzip.GzipFile(self.filename, mode) as f:
self.assertTrue(hasattr(f, "name"))
self.assertEqual(f.name, self.filename)
def test_mtime(self):
mtime = 123456789
with gzip.GzipFile(self.filename, 'w', mtime = mtime) as fWrite:
fWrite.write(data1)
with gzip.GzipFile(self.filename) as fRead:
dataRead = fRead.read()
self.assertEqual(dataRead, data1)
self.assertTrue(hasattr(fRead, 'mtime'))
self.assertEqual(fRead.mtime, mtime)
def test_metadata(self):
mtime = 123456789
with gzip.GzipFile(self.filename, 'w', mtime = mtime) as fWrite:
fWrite.write(data1)
with open(self.filename, 'rb') as fRead:
# see RFC 1952: http://www.faqs.org/rfcs/rfc1952.html
idBytes = fRead.read(2)
self.assertEqual(idBytes, '\x1f\x8b') # gzip ID
cmByte = fRead.read(1)
self.assertEqual(cmByte, '\x08') # deflate
flagsByte = fRead.read(1)
self.assertEqual(flagsByte, '\x08') # only the FNAME flag is set
mtimeBytes = fRead.read(4)
self.assertEqual(mtimeBytes, struct.pack('<i', mtime)) # little-endian
xflByte = fRead.read(1)
self.assertEqual(xflByte, '\x02') # maximum compression
osByte = fRead.read(1)
self.assertEqual(osByte, '\xff') # OS "unknown" (OS-independent)
# Since the FNAME flag is set, the zero-terminated filename follows.
# RFC 1952 specifies that this is the name of the input file, if any.
# However, the gzip module defaults to storing the name of the output
# file in this field.
expected = self.filename.encode('Latin-1') + '\x00'
nameBytes = fRead.read(len(expected))
self.assertEqual(nameBytes, expected)
# Since no other flags were set, the header ends here.
# Rather than process the compressed data, let's seek to the trailer.
fRead.seek(os.stat(self.filename).st_size - 8)
crc32Bytes = fRead.read(4) # CRC32 of uncompressed data [data1]
self.assertEqual(crc32Bytes, '\xaf\xd7d\x83')
isizeBytes = fRead.read(4)
self.assertEqual(isizeBytes, struct.pack('<i', len(data1)))
def test_with_open(self):
# GzipFile supports the context management protocol
with gzip.GzipFile(self.filename, "wb") as f:
f.write(b"xxx")
f = gzip.GzipFile(self.filename, "rb")
f.close()
try:
with f:
pass
except ValueError:
pass
else:
self.fail("__enter__ on a closed file didn't raise an exception")
try:
with gzip.GzipFile(self.filename, "wb") as f:
1 // 0
except ZeroDivisionError:
pass
else:
self.fail("1 // 0 didn't raise an exception")
def test_zero_padded_file(self):
with gzip.GzipFile(self.filename, "wb") as f:
f.write(data1 * 50)
# Pad the file with zeroes
with open(self.filename, "ab") as f:
f.write("\x00" * 50)
with gzip.GzipFile(self.filename, "rb") as f:
d = f.read()
self.assertEqual(d, data1 * 50, "Incorrect data in file")
def test_fileobj_from_fdopen(self):
# Issue #13781: Creating a GzipFile using a fileobj from os.fdopen()
# should not embed the fake filename "<fdopen>" in the output file.
fd = os.open(self.filename, os.O_WRONLY | os.O_CREAT)
with os.fdopen(fd, "wb") as f:
with gzip.GzipFile(fileobj=f, mode="w") as g:
self.assertEqual(g.name, "")
def test_read_with_extra(self):
# Gzip data with an extra field
gzdata = (b'\x1f\x8b\x08\x04\xb2\x17cQ\x02\xff'
b'\x05\x00Extra'
b'\x0bI-.\x01\x002\xd1Mx\x04\x00\x00\x00')
with gzip.GzipFile(fileobj=io.BytesIO(gzdata)) as f:
self.assertEqual(f.read(), b'Test')
def test_main(verbose=None):
test_support.run_unittest(TestGzip)
if __name__ == "__main__":
test_main(verbose=True)
|
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010, 2degrees Limited <egoddard@tech.2degreesnetwork.com>.
# All Rights Reserved.
#
# This file is part of djangoaudit <https://launchpad.net/django-audit/>,
# which is subject to the provisions of the BSD at
# <http://dev.2degreesnetwork.com/p/2degrees-license.html>. A copy of the
# license should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS"
# AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT
# NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST
# INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Tests for djangoaudit"""
from datetime import datetime, timedelta, date
from decimal import Decimal
import os
# Have to set this here to ensure this is Django-like
os.environ['DJANGO_SETTINGS_MODULE'] = "tests.fixtures.sampledjango.settings"
from django.conf import settings
from django.db.models import Sum
from nose.tools import (eq_, ok_, assert_false, assert_not_equal, assert_raises,
raises)
from pymongo.errors import PyMongoError
from fixture.django_testcase import FixtureTestCase
#from mongofixture import MongoFixtureTestCase
from djangoaudit.models import (_coerce_data_to_model_types, _audit_model,
_coerce_to_bson_compatible, AuditedModel)
from djangoaudit.connection import MONGO_CONNECTION
from tests.fixtures.sampledjango.bsg.models import *
from tests.fixtures.sampledjango.bsg.fixtures import *
class TestEnsureBSONCompatible(object):
"""Test for :func:`_coerce_to_bson_compatible`"""
def test_decimal_to_float(self):
"""Ensure that :class:`Decimal` is converted to :class:`float`"""
got = _coerce_to_bson_compatible(Decimal('1234.5678'))
expected = 1234.5678
eq_(got, expected,
"Expected %r, got %r for Decimal to float conversion" %
(expected, got))
def test_date_to_datetime(self):
"""Ensure that :class:`date` is converted to :class:`datetime`"""
got = _coerce_to_bson_compatible(date(2001, 9, 11))
expected = datetime(2001, 9, 11)
eq_(got, expected,
"Expected %r, got %r for date to datetime conversion" %
(expected, got))
class MockModelMeta(object):
""" Mock of :class:`django.db.options.Options` """
def __init__(self, app_label, model_name):
self.app_label = app_label
self.object_name = model_name
class MockModel(object):
""" Mock of :class:`django.db.models.base.Model` """
def __init__(self, app_label, model_name, pk):
self._meta = MockModelMeta(app_label, model_name)
self.pk = pk
class TestAuditModel(object):
""" Tests for :func:`djangoaudit.models.audit_model` """
def setup(self):
self.audit_collection_name = "audit_data"
self.auditing_collection = MONGO_CONNECTION\
.get_collection(self.audit_collection_name)
self.profile = MockModel("profiles", "Profile", 123)
def fetch_record_by_id(self, id):
return self.auditing_collection.find_one({"_id":id})
def test_no_changes_empty_dicts(self):
"""Check that passing two empty value dicts results in a no-op"""
result = _audit_model(self.profile, {}, {})
eq_(result, None, "No changes should not result in anything being "
"written to the database")
def test_no_changes_same_values(self):
"""Check that passing two identical dicts results in a no-op"""
result = _audit_model(self.profile,
{'foo': 1, 'bar': 'wibble', 'empty': None,
'my_date': datetime(2001, 1, 1, 9, 12)},
{'foo': 1, 'bar': 'wibble', 'empty': None,
'my_date': datetime(2001, 1, 1, 9, 12)})
eq_(result, None, "No changes should not result in anything being "
"written to the database")
def test_single_change_no_other_diff(self):
"""Check that a single changed value is correctly recorded"""
result = _audit_model(self.profile, dict(foo=None), dict(foo='bar'))
assert_not_equal(result, None,
"A change should result in a database object being "
"created")
saved_record = self.fetch_record_by_id(result)
eq_(saved_record['foo'], 'bar',
"The saved record should contain a single difference key")
def test_model_data_write_out(self):
"""Check the correct data is written out for the model"""
result = _audit_model(self.profile, dict(foo=None), dict(foo='bar'))
assert_not_equal(result, None,
"A change should result in a database object being "
"created")
saved_record = self.fetch_record_by_id(result)
eq_(saved_record['object_app'], self.profile._meta.app_label)
eq_(saved_record['object_model'], self.profile._meta.object_name)
eq_(saved_record['object_pk'], self.profile.pk)
def test_date_stamping(self):
"""Check that a date stamp is stored in along with the record"""
result = _audit_model(self.profile, dict(foo=None), dict(foo='bar'))
assert_not_equal(result, None,
"A change should result in a database object being "
"created")
saved_record = self.fetch_record_by_id(result)
record_date_stamp = saved_record['audit_date_stamp']
now = datetime.utcnow()
ok_((now - timedelta(seconds=1)) < record_date_stamp < now,
"Date stamp should be almost the same as now (now: %s, got: %s"
% (now, record_date_stamp))
def test_addition_parameter_write_out(self):
"""Check that additional parameters are correctly stored"""
result = _audit_model(self.profile, dict(foo=None), dict(foo='bar'))
assert_not_equal(result, None,
"A change should result in a database object being "
"created")
saved_record = self.fetch_record_by_id(result)
def test_single_change_others_same(self):
"""Check that a single changed value is correctly recorded when there are no other differences"""
result = _audit_model(self.profile, dict(foo=None, wibble=0),
dict(foo='bar', wibble=0))
assert_not_equal(result, None,
"A change should result in a database object being "
"created")
saved_record = self.fetch_record_by_id(result)
eq_(saved_record['foo'], 'bar',
"The saved record should contain a single difference key")
ok_('wibble' not in saved_record, "There should be no "
"record of changes to the `wibble` key")
def test_multi_change_no_others(self):
"""Check that multiple changed values are correctly recorded when there are no other items"""
result = _audit_model(self.profile, dict(foo=None, wibble=0),
dict(foo='bar', wibble=1))
assert_not_equal(result, None,
"A change should result in a database object being "
"created")
saved_record = self.fetch_record_by_id(result)
eq_(saved_record['foo'], 'bar',
"The saved record should contain a difference for key `foo`")
eq_(saved_record['wibble'], 1,
"The saved record should contain a difference for key `wibble`")
def test_multi_change_others_same(self):
"""Check that multiple changed values are correctly recorded when there are no other differences"""
result = _audit_model(self.profile, dict(foo=None, wibble=0, body_count=1.00),
dict(foo='bar', wibble=1, body_count=1.00))
assert_not_equal(result, None,
"A change should result in a database object being "
"created")
saved_record = self.fetch_record_by_id(result)
eq_(saved_record['foo'], 'bar',
"The saved record should contain a difference for key `foo`")
eq_(saved_record['wibble'], 1,
"The saved record should contain a difference for key `wibble`")
ok_('body_count' not in saved_record, "There should be no "
"record of changes to the `body_count` key")
class TestCoerceDataToModelTypes(object):
"""Tests for :func:`_coerce_data_to_model_types`"""
def setup(self):
checks = (
('age', '40', 40),
('last_flight', date(2010, 1, 1), datetime(2010, 1, 1)),
('fastest_landing',71.10, Decimal("71.10")),
('is_cylon', 0, False),
)
self.initial_data, self.final_data = {}, {}
for key, initial, final in checks:
self.initial_data[key] = initial
self.final_data[key] = final
def test_for_instance(self):
"""Test _coerce_data_to_model_types for model instances"""
pilot = Pilot()
result = _coerce_data_to_model_types(pilot, self.initial_data)
eq_(result, self.final_data,
"Expected to get: %r, got %r" % (result, self.final_data))
def test_for_class(self):
"""Test _coerce_data_to_model_types for the model itself"""
result = _coerce_data_to_model_types(Pilot, self.initial_data)
eq_(result, self.final_data,
"Expected to get: %r, got %r" % (result, self.final_data))
class TestAuditedModel(FixtureTestCase):
"""Tests for AuditedModel"""
datasets = [PilotData, VesselData]
def setUp(self):
self.audit_collection_name = "audit_data"
self.auditing_collection = MONGO_CONNECTION\
.get_collection(self.audit_collection_name)
# Now set up the records:
self.helo = Pilot.objects.filter(call_sign="Helo")[0] # wtf - no idea why fixture seems to be putting two of these in the DB
self.athena = Pilot.objects.get(call_sign="Athena")
self.starbuck = Pilot.objects.get(call_sign="Starbuck")
self.apollo = Pilot.objects.get(call_sign="Apollo")
self.longshot = Pilot.objects.get(call_sign="Longshot")
self.raptor259 = Vessel.objects.get(name=VesselData.Raptor259.name)
@raises(AttributeError)
def test_meta_class(self):
"""Check that any values specified in log_fields which are no fields on the AuditedModel class cause an AttributeError to be raised"""
class NaughtyAuditedModel(AuditedModel):
log_fields = ['foo', 'bar', 'wibble']
def test_no_changes_no_extra(self):
"""Check that when there are no changes to a AuditedModel instance, no changes are recorded"""
# Set up the operator and some notes:
self.helo.set_audit_info(operator='me',
notes='This should not be recorded')
# Save a model with no changes:
self.helo.save()
# Now read back the log to see whether anything was put in there:
num_log_items = len(list(self.helo.get_audit_log()))
eq_(num_log_items, 1, "There should be only be one log entry for this "
"object - the creation log (found %d log entries)." % num_log_items)
def test_change_non_logger_field(self):
"""Check that altering non-logged fields doesn't result in a log entry being generated"""
self.helo.craft = 0
# Set up the operator and some notes:
self.helo.set_audit_info(operator='me',
notes='This should not be recorded')
self.helo.save()
# Now read back the log to see whether anything was put in there:
num_log_items = len(list(self.helo.get_audit_log()))
eq_(num_log_items, 1, "There should be one log entry for this object - "
"the creation log (found %d log entries)." % num_log_items)
def test_create_fresh_record(self):
"""Check that creation of a record logs all the fields correctly"""
self.athena.delete()
params = dict(first_name="Sharon",
last_name="Agathon",
call_sign="Athena",
age=29,
last_flight=datetime(2000, 3, 4, 7, 18),
craft=1,
is_cylon=True,
fastest_landing=Decimal("77.90"))
new_athena = Pilot(**params)
new_athena.save()
log = list(new_athena.get_audit_log())
# Check we've only got one log entry:
eq_(len(log), 1, "There should only be one entry for this object (found"
" %d)" % len(log))
entry = log[0]
# Now verify that we've only got the correct keys in the log, once we've
# popped off the extra ones:
object_app = entry.pop('object_app')
object_model = entry.pop('object_model')
object_pk = entry.pop('object_pk')
id = entry.pop('_id')
audit_date_stamp = entry.pop('audit_date_stamp')
eq_(object_app, "bsg",
"object_app should be 'bsg', got %r" % object_app)
eq_(object_model, "Pilot",
"object_model should be 'Pilot', got %r" % object_model)
eq_(object_pk, new_athena.pk, "object_pk should be %r, got %r" %
(new_athena.pk, object_pk))
# Our resulting entry should have only the audit_changes key as there is
# only audited_data remaining:
expected_keys = set(('audit_changes',))#set(new_athena.log_fields)
found_keys = set(entry.keys())
eq_(expected_keys, found_keys, "Mismatch between expected fields in the"
" log. Expected %r, got %r" % (expected_keys, found_keys))
# Now verify that what's on the new model is what was logged:
for key, value in entry['audit_changes'].iteritems():
expected = (None, getattr(new_athena, key))
eq_(value, expected, "Expected to find %r with value: %r, got %r" %
(key, expected, value))
def test_partial_update(self):
"""Check that partial data updates are recorded correctly"""
orig_name = self.longshot.last_name
self.longshot.last_name = "New name"
orig_age = self.longshot.age
self.longshot.age = 30
orig_fastest_landing = self.longshot.fastest_landing
self.longshot.fastest_landing = Decimal("75.00")
# Ensure we've got some operator testing too:
operator, notes = "me", "This record should be updated"
self.longshot.set_audit_info(operator=operator,notes=notes)
# Now do the save:
self.longshot.save()
# Read back the log:
log = list(self.longshot.get_audit_log())
eq_(len(log), 2, "There should only be two entires for this object ("
"found %d)" % len(log))
entry = log[-1]
# Now verify that we've only got the correct keys in the log, once we've
# popped off the extra ones:
object_app = entry.pop('object_app')
object_model = entry.pop('object_model')
object_pk = entry.pop('object_pk')
id = entry.pop('_id')
audit_date_stamp = entry.pop('audit_date_stamp')
audit_operator = entry.pop('audit_operator')
audit_notes = entry.pop('audit_notes')
eq_(object_app, "bsg",
"object_app should be 'bsg', got %r" % object_app)
eq_(object_model, "Pilot",
"object_model should be 'Pilot', got %r" % object_model)
eq_(object_pk, self.longshot.pk, "object_pk should be %r, got %r" %
(self.longshot.pk, object_pk))
eq_(audit_operator, operator,
"operator should be %r, got %r" % (operator, audit_operator))
eq_(audit_notes, notes,
"notes should be %r, got %r" % (notes, audit_notes))
# Check we've only got one key left (audit_changes):
expected_keys = ['audit_changes']
found_keys = entry.keys()
eq_(expected_keys, found_keys, "Expected to find keys: %r, gor %r" %
(expected_keys, found_keys))
# Ensure that the new values were correctly recorded:
changes= entry['audit_changes']
eq_(changes['last_name'], (orig_name, self.longshot.last_name))
eq_(changes['age'], (orig_age, self.longshot.age))
eq_(changes['fastest_landing'], (orig_fastest_landing,
self.longshot.fastest_landing))
def test_dual_update(self):
"""Test that two log entries are generated for dual updates"""
self.apollo.age = 40
self.apollo.save()
self.apollo.age = 30
self.apollo.save()
log = list(self.apollo.get_audit_log())
eq_(len(log), 3, "There should be three entries in the log, got %d" %
len(log))
expected_ages = [(28, 40), (40, 30)]
for entry, age in zip(log[1:], expected_ages):
eq_(entry['audit_changes']['age'], age,
"Expected age to be %r, got %r" % (entry['audit_changes']['age'], age))
def test_delete(self):
"""Check that delete() records the final state of the model prior to deletion"""
# Define the lookup key we'll need parameters to look up the record:
pk = self.starbuck.pk
self.starbuck.delete()
# Delete another to make sure we don't get log cross-over:
apollo_pk = self.apollo.pk
self.apollo.set_audit_info(notes="Extra note")
self.apollo.delete()
# Get hold of the delete log:
log = list(Pilot.get_deleted_log(pk))
# Make sure there's only one entry:
eq_(len(log), 1,
"There should only be one deleted item for this pk (found %d)" %
len(log))
entry = log[0]
for field in Pilot.log_fields:
expected = getattr(PilotData.Starbuck, field)
found = entry[field]
eq_(expected, found,
"For field %r, expected %r, got %r" % (field, expected, found))
delete_note = "Object deleted. These are the attributes at delete time."
eq_(entry['audit_notes'], delete_note,
"Expected to find notes as: %r, got %r" %
(delete_note, entry['audit_notes']))
# Get hold of the delete log for apollo to check the delete note:
entry = list(Pilot.get_deleted_log(apollo_pk))[0]
got = entry['audit_notes']
expected = "%s\nExtra note" % delete_note
eq_(expected, got, "Expected note: %r, got %r" % (expected, got))
# Since we've deleted two items we can check that we've got the log for
# both of these:
log = list(Pilot.get_deleted_log())
eq_(len(log), 2,
"There should be two deleted log entries for this class (found %d)"
% len(log))
def test_arbitrary_audit(self):
"""Test the arbitrary auditing of data against a model"""
data = dict(hair_colour="Blond",
children=0,
kill_percentage=Decimal('98.7'))
self.starbuck.set_audit_info(**data)
self.starbuck.save()
log = list(self.starbuck.get_audit_log())
eq_(len(log), 2,
"There should only be two entries in the log (found %d)" % len(log))
entry = log[-1]
object_app = entry.pop('object_app')
object_model = entry.pop('object_model')
object_pk = entry.pop('object_pk')
id = entry.pop('_id')
audit_date_stamp = entry.pop('audit_date_stamp')
eq_(object_app, "bsg",
"object_app should be 'bsg', got %r" % object_app)
eq_(object_model, "Pilot",
"object_model should be 'Pilot', got %r" % object_model)
eq_(object_pk, self.starbuck.pk, "object_pk should be %r, got %r" %
(self.starbuck.pk, object_pk))
# Mongo stores Decimals as floats, so coerce what we expect:
data['kill_percentage'] = float(data['kill_percentage'])
eq_(entry, data, "Expecting %r, got %r" % (data, entry))
def test_foreign_keys(self):
"""Test the foreign keyed fields don't interfere with AuditedModel"""
# Due to a call in the metaclass of AuditedModel, the
# _meta.get_all_field_names does not behave correctly unless the cache
# is cleared after this call. Aggregation is one area where this
# manifests itself - here we're ensuring this doesn't fail:
field_names = Pilot._meta.get_all_field_names()
ok_("vessels" in field_names,
"The field names for the Pilot model should contain 'vessels', got "
"%s" % field_names)
# Now verify in aggregation this works:
vessel_sum = Pilot.objects.aggregate(Sum('vessels'))['vessels__sum']
eq_(vessel_sum, 1, "There should only be one vessel, got %r"
% vessel_sum)
def test_get_creation_log(self):
"""Test that the creation log can be retrieved correctly"""
# Create a new object:
hot_dog = Pilot(
first_name="Brendan",
last_name="Costanza",
call_sign="Hot Dog",
age=25,
last_flight=datetime(2000, 6, 4, 23, 01),
craft=1,
is_cylon=False,
fastest_landing=Decimal("101.67")
)
hot_dog.set_audit_info(operator="Admin",
flight_deck="Port side")
hot_dog.save()
# Retrieve the log as a check:
initial_log = hot_dog.get_creation_log()
# Make another entry:
hot_dog.fastest_landing = Decimal("99.98")
hot_dog.save()
# Check we've got two items in the log now:
found_logs = len(list(hot_dog.get_audit_log()))
eq_(2, found_logs, "Expected to find 2 logs, got %d" % found_logs)
# Now check the creation log:
creation_log = hot_dog.get_creation_log()
eq_(creation_log, initial_log, "Expecting initial log entry to be the "
"same as the creation log. Expected:\n%r,\n\ngot\n%r" %
(initial_log, creation_log))
# Test that fail gracefully when no creation log exists:
for item in hot_dog.get_audit_log():
self.auditing_collection.remove(item['_id'])
empty_log = hot_dog.get_creation_log()
eq_(empty_log, None, "The creation log should be None")
def test_get_deletion_log(self):
"""Test that deleted data can be retrieved"""
pre_delete_data = {}
for field in self.apollo.log_fields:
pre_delete_data[field] = getattr(self.apollo, field)
pk = self.apollo.pk
self.apollo.delete()
# Get the deletion log:
entry = list(Pilot.get_deleted_log(pk))[0]
object_app = entry.pop('object_app')
object_model = entry.pop('object_model')
object_pk = entry.pop('object_pk')
id = entry.pop('_id')
audit_date_stamp = entry.pop('audit_date_stamp')
audit_is_delete = entry.pop('audit_is_delete')
audit_notes = entry.pop('audit_notes')
ok_(audit_is_delete, "Should have audit_is_delete is True")
eq_(audit_notes,
'Object deleted. These are the attributes at delete time.')
eq_(pre_delete_data, entry,
"Expected to find deletion log as: %r, got %r" %
(pre_delete_data, entry))
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import argparse
from natsort import natsorted
import funannotate.library as lib
from Bio import SeqIO
from collections import OrderedDict
def scaffold2Dict(input):
# get scaffold names/lengths
scaffLen = {}
with open(input, 'r') as seqin:
for record in SeqIO.parse(seqin, 'fasta'):
if not record.id in scaffLen:
scaffLen[record.id] = len(record.seq)
return scaffLen
def dicts2tbl(genesDict, scaff2genes, scaffLen, SeqCenter, SeqRefNum,
annotations=False, external=False, skipList=[]):
'''
function to take funannotate annotation dictionaries and convert to NCBI tbl output
'''
duplicates = 0
pseudo = 0
nocds = 0
# to parse annotations, will need to have access to GO OBO dictionary
goDict = {}
if annotations:
from goatools import obo_parser
# location of go.obo
for item in obo_parser.OBOReader(os.path.join(os.environ["FUNANNOTATE_DB"], 'go.obo')):
goDict[item.id] = {'name': item.name, 'namespace': item.namespace}
def _goFormat(id, goDict=goDict):
# go_function serine-type endopeptidase activity|0004252||IEA
# go_process proteolysis|0006508||IEA
# go_component nucleus|0005634||IEA
if id in goDict:
if goDict[id]['namespace'] == 'biological_process':
base = 'go_process'
elif goDict[id]['namespace'] == 'molecular_function':
base = 'go_function'
elif goDict[id]['namespace'] == 'cellular_component':
base = 'go_component'
reformatted = '\t\t\t{:}\t{:}|{:}||IEA'.format(
base, goDict[id]['name'], id.replace('GO:', ''))
return reformatted
else:
return False
for k, v in natsorted(list(scaff2genes.items())):
sys.stdout.write('>Feature %s\n' % k)
sys.stdout.write('1\t%s\tREFERENCE\n' % scaffLen.get(k))
sys.stdout.write('\t\t\t%s\t%s\n' % (SeqCenter, SeqRefNum))
for genes in v: # now loop through each gene on the scaffold
if genes in skipList:
continue
# single funannotate standard dictionary
geneInfo = genesDict.get(genes)
if 'pseudo' in geneInfo:
if geneInfo['pseudo']:
try:
log.debug('{:} is pseudo, skipping'.format(genes))
except NameError:
print(('{:} is pseudo, skipping'.format(genes)))
pseudo += 1
continue
if geneInfo['type'] == 'mRNA' and not geneInfo['CDS']:
try:
log.debug(
'Skipping {:} because no CDS found.'.format(genes))
except NameError:
print((
'Skipping {:} because no CDS found.'.format(genes)))
pseudo += 1
continue
if geneInfo['type'] == 'mRNA' and not len(geneInfo['ids']) == len(geneInfo['mRNA']) == len(geneInfo['CDS']):
try:
log.debug('Incompatible annotation found: {:}\n{:}'.format(
genes, geneInfo))
except NameError:
print(('Incompatible annotation found: {:}\n{:}'.format(
genes, geneInfo)))
duplicates += 1
continue
if geneInfo['type'] == 'mRNA' and len(geneInfo['CDS']) == 0:
nocds += 1
continue
if geneInfo['type'] is None:
continue
# check for partial models
if True in geneInfo['partialStart']:
ps = '<'
else:
ps = ''
if True in geneInfo['partialStop']:
pss = '>'
else:
pss = ''
# now write gene model
if geneInfo['strand'] == '+':
sys.stdout.write('%s%i\t%s%i\tgene\n' % (
ps, geneInfo['location'][0], pss, geneInfo['location'][1]))
if annotations:
if geneInfo['name']:
sys.stdout.write('\t\t\tgene\t%s\n' % geneInfo['name'])
if geneInfo['gene_synonym']:
for alias in geneInfo['gene_synonym']:
sys.stdout.write('\t\t\tgene_synonym\t%s\n' % alias)
sys.stdout.write('\t\t\tlocus_tag\t%s\n' % genes)
else:
sys.stdout.write('%s%i\t%s%i\tgene\n' % (
ps, geneInfo['location'][1], pss, geneInfo['location'][0]))
if annotations:
if geneInfo['name']:
sys.stdout.write('\t\t\tgene\t%s\n' % geneInfo['name'])
if geneInfo['gene_synonym']:
for alias in geneInfo['gene_synonym']:
sys.stdout.write('\t\t\tgene_synonym\t%s\n' % alias)
sys.stdout.write('\t\t\tlocus_tag\t%s\n' % genes)
# now will output the gene models with -T1, -T2, -T3 annotations based on expression values
# means need to get the order
order = []
# multiple transcripts, so get order of highest TPM
if len(geneInfo['ids']) > 1:
tpms = []
for num, tpm in enumerate(geneInfo['note']):
for item in tpm:
if item.startswith('TPM:'):
value = float(item.split(':')[-1])
tpms.append((value, num))
if len(tpms) > 0:
for x in sorted(tpms, reverse=True):
order.append(x[1])
else:
order = list(range(0, len(geneInfo['ids'])))
else:
order.append(0)
for num, i in enumerate(order): # now write mRNA and CDS features
# if geneInfo['ids'][i].startswith('evm.model'): #if from predict, rename to match locus_tag
# protein_id = genes+'-T'+str(num+1)
# else:
# protein_id = geneInfo['ids'][i]
if external:
protein_id = geneInfo['ids'][i]
else:
protein_id = genes+'-T'+str(num+1)
if geneInfo['type'] == 'mRNA':
if geneInfo['partialStart'][i] is False:
ps = ''
else:
ps = '<'
if geneInfo['partialStop'][i] is False:
pss = ''
else:
pss = '>'
if geneInfo['strand'] == '+':
for num, exon in enumerate(geneInfo['mRNA'][i]):
# single exon, so slightly differnt method
if num == 0 and num == len(geneInfo['mRNA'][i]) - 1:
sys.stdout.write('%s%s\t%s%s\tmRNA\n' %
(ps, exon[0], pss, exon[1]))
elif num == 0:
sys.stdout.write('%s%s\t%s\tmRNA\n' %
(ps, exon[0], exon[1]))
# this is last one
elif num == len(geneInfo['mRNA'][i]) - 1:
sys.stdout.write('%s\t%s%s\n' %
(exon[0], pss, exon[1]))
else:
sys.stdout.write('%s\t%s\n' % (exon[0], exon[1]))
sys.stdout.write('\t\t\tproduct\t%s\n' %
geneInfo['product'][i])
sys.stdout.write('\t\t\ttranscript_id\tgnl|ncbi|%s_mrna\n' % (protein_id))
sys.stdout.write('\t\t\tprotein_id\tgnl|ncbi|%s\n' %
(protein_id))
for num, cds in enumerate(geneInfo['CDS'][i]):
# single exon, so slightly differnt method
if num == 0 and num == len(geneInfo['CDS'][i]) - 1:
sys.stdout.write('%s%s\t%s%s\tCDS\n' %
(ps, cds[0], pss, cds[1]))
elif num == 0:
sys.stdout.write('%s%s\t%s\tCDS\n' %
(ps, cds[0], cds[1]))
# this is last one
elif num == len(geneInfo['CDS'][i]) - 1:
sys.stdout.write('%s\t%s%s\n' %
(cds[0], pss, cds[1]))
else:
sys.stdout.write('%s\t%s\n' % (cds[0], cds[1]))
sys.stdout.write('\t\t\tcodon_start\t%i\n' %
geneInfo['codon_start'][i])
if annotations: # write functional annotation
if geneInfo['EC_number'][i]:
for EC in geneInfo['EC_number'][i]:
sys.stdout.write('\t\t\tEC_number\t%s\n' % EC)
if geneInfo['db_xref'][i]:
for xref in geneInfo['db_xref'][i]:
sys.stdout.write('\t\t\tdb_xref\t%s\n' % xref)
if geneInfo['go_terms'][i]:
for go in geneInfo['go_terms'][i]:
goLine = _goFormat(go)
if goLine:
sys.stdout.write('{:}\n'.format(goLine))
if geneInfo['note'][i]:
for item in geneInfo['note'][i]:
sys.stdout.write('\t\t\tnote\t%s\n' % item)
sys.stdout.write('\t\t\tproduct\t%s\n' %
geneInfo['product'][i])
sys.stdout.write('\t\t\ttranscript_id\tgnl|ncbi|%s_mrna\n' % (protein_id))
sys.stdout.write('\t\t\tprotein_id\tgnl|ncbi|%s\n' %
(protein_id))
else: # means this is on crick strand
for num, exon in enumerate(geneInfo['mRNA'][i]):
# single exon, so slightly differnt method
if num == 0 and num == len(geneInfo['mRNA'][i]) - 1:
sys.stdout.write('%s%s\t%s%s\tmRNA\n' %
(ps, exon[1], pss, exon[0]))
elif num == 0:
sys.stdout.write('%s%s\t%s\tmRNA\n' %
(ps, exon[1], exon[0]))
# this is last one
elif num == len(geneInfo['mRNA'][i]) - 1:
sys.stdout.write('%s\t%s%s\n' %
(exon[1], pss, exon[0]))
else:
sys.stdout.write('%s\t%s\n' % (exon[1], exon[0]))
sys.stdout.write('\t\t\tproduct\t%s\n' %
geneInfo['product'][i])
sys.stdout.write('\t\t\ttranscript_id\tgnl|ncbi|%s_mrna\n' % (protein_id))
sys.stdout.write('\t\t\tprotein_id\tgnl|ncbi|%s\n' %
(protein_id))
for num, cds in enumerate(geneInfo['CDS'][i]):
# single exon, so slightly differnt method
if num == 0 and num == len(geneInfo['CDS'][i]) - 1:
sys.stdout.write('%s%s\t%s%s\tCDS\n' %
(ps, cds[1], pss, cds[0]))
elif num == 0:
sys.stdout.write('%s%s\t%s\tCDS\n' %
(ps, cds[1], cds[0]))
# this is last one
elif num == (len(geneInfo['CDS'][i]) - 1):
sys.stdout.write('%s\t%s%s\n' %
(cds[1], pss, cds[0]))
else:
sys.stdout.write('%s\t%s\n' % (cds[1], cds[0]))
sys.stdout.write('\t\t\tcodon_start\t%i\n' %
geneInfo['codon_start'][i])
if annotations: # write functional annotation
if geneInfo['EC_number'][i]:
for EC in geneInfo['EC_number'][i]:
sys.stdout.write('\t\t\tEC_number\t%s\n' % EC)
if geneInfo['db_xref'][i]:
for xref in geneInfo['db_xref'][i]:
sys.stdout.write('\t\t\tdb_xref\t%s\n' % xref)
if geneInfo['go_terms'][i]:
for go in geneInfo['go_terms'][i]:
goLine = _goFormat(go)
if goLine:
sys.stdout.write('{:}\n'.format(goLine))
if geneInfo['note'][i]:
for item in geneInfo['note'][i]:
sys.stdout.write('\t\t\tnote\t%s\n' % item)
sys.stdout.write('\t\t\tproduct\t%s\n' %
geneInfo['product'][i])
sys.stdout.write('\t\t\ttranscript_id\tgnl|ncbi|%s_mrna\n' % (protein_id))
sys.stdout.write('\t\t\tprotein_id\tgnl|ncbi|%s\n' %
(protein_id))
elif geneInfo['type'] == 'tRNA':
if geneInfo['strand'] == '+':
for num, exon in enumerate(geneInfo['mRNA'][i]):
if num == 0:
sys.stdout.write('%s\t%s\t%s\n' % (
exon[0], exon[1], geneInfo['type']))
else:
sys.stdout.write('%s\t%s\n' % (exon[0], exon[1]))
sys.stdout.write('\t\t\tproduct\t%s\n' %
geneInfo['product'][i])
if geneInfo['product'] == 'tRNA-Xxx':
sys.stdout.write('\t\t\tpseudo\n')
else:
for num, exon in enumerate(geneInfo['mRNA'][i]):
if num == 0:
sys.stdout.write('%s\t%s\t%s\n' % (
exon[1], exon[0], geneInfo['type']))
else:
sys.stdout.write('%s\t%s\n' % (exon[1], exon[0]))
sys.stdout.write('\t\t\tproduct\t%s\n' %
geneInfo['product'][i])
if geneInfo['product'] == 'tRNA-Xxx':
sys.stdout.write('\t\t\tpseudo\n')
elif geneInfo['type'] in ['rRNA', 'ncRNA']:
if geneInfo['strand'] == '+':
sys.stdout.write('%s\t%s\t%s\n' % (
geneInfo['location'][0], geneInfo['location'][1], geneInfo['type']))
sys.stdout.write('\t\t\tproduct\t%s\n' %
geneInfo['product'][i])
else:
sys.stdout.write('%s\t%s\t%s\n' % (
geneInfo['location'][1], geneInfo['location'][0], geneInfo['type']))
sys.stdout.write('\t\t\tproduct\t%s\n' %
geneInfo['product'][i])
if any(i > 0 for i in [duplicates, pseudo, nocds]):
try:
print(('Skipped {:,} annotations: {:,} pseudo genes; {:,} no CDS; {:,} duplicated features'.format(
sum([pseudo, nocds, duplicates]), pseudo, nocds, duplicates)))
except NameError:
print(('Skipped {:,} annotations: {:,} pseudo genes; {:,} no CDS; {:,} duplicated features'.format(
sum([pseudo, nocds, duplicates]), pseudo, nocds, duplicates)))
def main(args):
# setup menu with argparse
class MyFormatter(argparse.ArgumentDefaultsHelpFormatter):
def __init__(self, prog):
super(MyFormatter, self).__init__(prog, max_help_position=48)
parser = argparse.ArgumentParser(prog='gff2prot.py',
description='''Script to convert GFF3 and FASTA to tbl, proteins, transcripts.''',
epilog="""Written by Jon Palmer (2018) nextgenusfs@gmail.com""",
formatter_class=MyFormatter)
parser.add_argument('-g', '--gff3', required=True,
help='Genome annotation GFF3 format')
parser.add_argument('-f', '--fasta', required=True,
help='Genome in FASTA format')
args = parser.parse_args(args)
# load into dictionary
Genes = {}
Genes = lib.gff2dict(args.gff3, args.fasta, Genes)
# sort the dictionary
def _sortDict(d):
return (d[1]['location'][0], d[1]['location'][1])
# now sort dictionary by contig and location, rename using prefix, translate to protein space to get proper start/stop info
sGenes = sorted(iter(Genes.items()), key=_sortDict)
sortedGenes = OrderedDict(sGenes)
scaff2genes = {}
for k, v in list(sortedGenes.items()):
if not v['contig'] in scaff2genes:
scaff2genes[v['contig']] = [k]
else:
scaff2genes[v['contig']].append(k)
# get length of scaffolds
scaffLen = scaffold2Dict(args.fasta)
# now write table
dicts2tbl(sortedGenes, scaff2genes, scaffLen, 'CFMR', '12345',
annotations=True)
if __name__ == "__main__":
main(sys.argv[1:])
|
|
#!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import argparse
import collections
import logging
import os
import re
import subprocess
import sys
import time
extra_trybots = [
{
"mastername": "luci.chromium.try",
"buildernames": ["win_optional_gpu_tests_rel"]
},
{
"mastername": "luci.chromium.try",
"buildernames": ["mac_optional_gpu_tests_rel"]
},
{
"mastername": "luci.chromium.try",
"buildernames": ["linux_optional_gpu_tests_rel"]
},
{
"mastername": "luci.chromium.try",
"buildernames": ["android_optional_gpu_tests_rel"]
},
# Include the ANGLE tryservers which run the WebGL conformance tests
# in some non-default configurations.
{
"mastername": "luci.chromium.try",
"buildernames": ["linux-angle-rel"]
},
]
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
SRC_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
sys.path.insert(0, os.path.join(SRC_DIR, 'build'))
import find_depot_tools
find_depot_tools.add_depot_tools_to_path()
CHROMIUM_GIT_URL = 'https://chromium.googlesource.com/chromium/src.git'
CL_ISSUE_RE = re.compile('^Issue number: ([0-9]+) \((.*)\)$')
REVIEW_URL_RE = re.compile('^https?://(.*)/(.*)')
ROLL_BRANCH_NAME = 'special_webgl_roll_branch'
TRYJOB_STATUS_SLEEP_SECONDS = 30
# Use a shell for subcommands on Windows to get a PATH search.
IS_WIN = sys.platform.startswith('win')
WEBGL_PATH = os.path.join('third_party', 'webgl', 'src')
WEBGL_REVISION_TEXT_FILE = os.path.join(
'content', 'test', 'gpu', 'gpu_tests', 'webgl_conformance_revision.txt')
CommitInfo = collections.namedtuple('CommitInfo', ['git_commit',
'git_repo_url'])
CLInfo = collections.namedtuple('CLInfo', ['issue', 'url', 'review_server'])
def _VarLookup(local_scope):
return lambda var_name: local_scope['vars'][var_name]
def _PosixPath(path):
"""Convert a possibly-Windows path to a posix-style path."""
(_, path) = os.path.splitdrive(path)
return path.replace(os.sep, '/')
def _ParseGitCommitHash(description):
for line in description.splitlines():
if line.startswith('commit '):
return line.split()[1]
logging.error('Failed to parse git commit id from:\n%s\n', description)
sys.exit(-1)
return None
def _ParseDepsFile(filename):
logging.debug('Parsing deps file %s', filename)
with open(filename, 'rb') as f:
deps_content = f.read()
return _ParseDepsDict(deps_content)
def _ParseDepsDict(deps_content):
local_scope = {}
global_scope = {
'Str': lambda arg: str(arg),
'Var': _VarLookup(local_scope),
'deps_os': {},
}
exec(deps_content, global_scope, local_scope)
return local_scope
def _GenerateCLDescriptionCommand(webgl_current, webgl_new, bugs):
def GetChangeString(current_hash, new_hash):
return '%s..%s' % (current_hash[0:7], new_hash[0:7]);
def GetChangeLogURL(git_repo_url, change_string):
return '%s/+log/%s' % (git_repo_url, change_string)
def GetBugString(bugs):
bug_str = 'Bug: '
for bug in bugs:
bug_str += str(bug) + ','
return bug_str.rstrip(',')
change_str = GetChangeString(webgl_current.git_commit,
webgl_new.git_commit)
changelog_url = GetChangeLogURL(webgl_current.git_repo_url,
change_str)
if webgl_current.git_commit == webgl_new.git_commit:
print('WARNING: WebGL repository is unchanged; proceeding with no-op roll')
def GetExtraTrybotString():
s = ''
for t in extra_trybots:
if s:
s += ';'
s += t['mastername'] + ':' + ','.join(t['buildernames'])
return s
return ('Roll WebGL %s\n\n'
'%s\n\n'
'%s\n'
'Cq-Include-Trybots: %s\n') % (
change_str,
changelog_url,
GetBugString(bugs),
GetExtraTrybotString())
class AutoRoller(object):
def __init__(self, chromium_src):
self._chromium_src = chromium_src
def _RunCommand(self, command, working_dir=None, ignore_exit_code=False,
extra_env=None):
"""Runs a command and returns the stdout from that command.
If the command fails (exit code != 0), the function will exit the process.
"""
working_dir = working_dir or self._chromium_src
logging.debug('cmd: %s cwd: %s', ' '.join(command), working_dir)
env = os.environ.copy()
if extra_env:
logging.debug('extra env: %s', extra_env)
env.update(extra_env)
p = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=IS_WIN, env=env,
cwd=working_dir, universal_newlines=True)
output = p.stdout.read()
p.wait()
p.stdout.close()
p.stderr.close()
if not ignore_exit_code and p.returncode != 0:
logging.error('Command failed: %s\n%s', str(command), output)
sys.exit(p.returncode)
return output
def _GetCommitInfo(self, path_below_src, git_hash=None, git_repo_url=None):
working_dir = os.path.join(self._chromium_src, path_below_src)
self._RunCommand(['git', 'fetch', 'origin'], working_dir=working_dir)
revision_range = git_hash or 'origin/main'
ret = self._RunCommand(
['git', '--no-pager', 'log', revision_range,
'--no-abbrev-commit', '--pretty=full', '-1'],
working_dir=working_dir)
parsed_hash = _ParseGitCommitHash(ret)
logging.debug('parsed Git commit hash: %s', parsed_hash)
return CommitInfo(parsed_hash, git_repo_url)
def _GetDepsCommitInfo(self, deps_dict, path_below_src):
logging.debug('Getting deps commit info for %s', path_below_src)
entry = deps_dict['deps'][_PosixPath('src/%s' % path_below_src)]
at_index = entry.find('@')
git_repo_url = entry[:at_index]
git_hash = entry[at_index + 1:]
return self._GetCommitInfo(path_below_src, git_hash, git_repo_url)
def _GetCLInfo(self):
cl_output = self._RunCommand(['git', 'cl', 'issue'])
m = CL_ISSUE_RE.match(cl_output.strip())
if not m:
logging.error('Cannot find any CL info. Output was:\n%s', cl_output)
sys.exit(-1)
issue_number = int(m.group(1))
url = m.group(2)
# Parse the codereview host from the URL.
m = REVIEW_URL_RE.match(url)
if not m:
logging.error('Cannot parse codereview host from URL: %s', url)
sys.exit(-1)
review_server = m.group(1)
return CLInfo(issue_number, url, review_server)
def _GetCurrentBranchName(self):
return self._RunCommand(
['git', 'rev-parse', '--abbrev-ref', 'HEAD']).splitlines()[0]
def _IsTreeClean(self):
lines = self._RunCommand(
['git', 'status', '--porcelain', '-uno']).splitlines()
if len(lines) == 0:
return True
logging.debug('Dirty/unversioned files:\n%s', '\n'.join(lines))
return False
def _GetBugList(self, path_below_src, webgl_current, webgl_new):
# TODO(kbr): this isn't useful, at least not yet, when run against
# the WebGL Github repository.
working_dir = os.path.join(self._chromium_src, path_below_src)
lines = self._RunCommand(
['git','log',
'%s..%s' % (webgl_current.git_commit, webgl_new.git_commit)],
working_dir=working_dir).split('\n')
bugs = set()
for line in lines:
line = line.strip()
bug_prefix = 'BUG='
if line.startswith(bug_prefix):
bugs_strings = line[len(bug_prefix):].split(',')
for bug_string in bugs_strings:
try:
bugs.add(int(bug_string))
except:
# skip this, it may be a project specific bug such as
# "angleproject:X" or an ill-formed BUG= message
pass
return bugs
def _UpdateReadmeFile(self, readme_path, new_revision):
readme = open(os.path.join(self._chromium_src, readme_path), 'r+')
txt = readme.read()
m = re.sub(re.compile('.*^Revision\: ([0-9]*).*', re.MULTILINE),
('Revision: %s' % new_revision), txt)
readme.seek(0)
readme.write(m)
readme.truncate()
def PrepareRoll(self, ignore_checks, run_tryjobs):
# TODO(kjellander): use os.path.normcase, os.path.join etc for all paths for
# cross platform compatibility.
if not ignore_checks:
if self._GetCurrentBranchName() != 'main':
logging.error('Please checkout the main branch.')
return -1
if not self._IsTreeClean():
logging.error('Please make sure you don\'t have any modified files.')
return -1
# Always clean up any previous roll.
self.Abort()
logging.debug('Pulling latest changes')
if not ignore_checks:
self._RunCommand(['git', 'pull'])
self._RunCommand(['git', 'checkout', '-b', ROLL_BRANCH_NAME])
# Modify Chromium's DEPS file.
# Parse current hashes.
deps_filename = os.path.join(self._chromium_src, 'DEPS')
deps = _ParseDepsFile(deps_filename)
webgl_current = self._GetDepsCommitInfo(deps, WEBGL_PATH)
# Find ToT revisions.
webgl_latest = self._GetCommitInfo(WEBGL_PATH)
if IS_WIN:
# Make sure the roll script doesn't use windows line endings
self._RunCommand(['git', 'config', 'core.autocrlf', 'true'])
self._UpdateDep(deps_filename, WEBGL_PATH, webgl_latest)
self._UpdateWebGLRevTextFile(WEBGL_REVISION_TEXT_FILE, webgl_latest)
if self._IsTreeClean():
logging.debug('Tree is clean - no changes detected.')
self._DeleteRollBranch()
else:
bugs = self._GetBugList(WEBGL_PATH, webgl_current, webgl_latest)
description = _GenerateCLDescriptionCommand(
webgl_current, webgl_latest, bugs)
logging.debug('Committing changes locally.')
self._RunCommand(['git', 'add', '--update', '.'])
self._RunCommand(['git', 'commit', '-m', description])
logging.debug('Uploading changes...')
self._RunCommand(['git', 'cl', 'upload'],
extra_env={'EDITOR': 'true'})
if run_tryjobs:
# Kick off tryjobs.
base_try_cmd = ['git', 'cl', 'try']
self._RunCommand(base_try_cmd)
cl_info = self._GetCLInfo()
print('Issue: %d URL: %s' % (cl_info.issue, cl_info.url))
# Checkout main again.
self._RunCommand(['git', 'checkout', 'main'])
print('Roll branch left as ' + ROLL_BRANCH_NAME)
return 0
def _UpdateDep(self, deps_filename, dep_relative_to_src, commit_info):
dep_name = _PosixPath(os.path.join('src', dep_relative_to_src))
dep_revision = '%s@%s' % (dep_name, commit_info.git_commit)
self._RunCommand(
['gclient', 'setdep', '-r', dep_revision],
working_dir=os.path.dirname(deps_filename))
def _UpdateWebGLRevTextFile(self, txt_filename, commit_info):
# Rolling the WebGL conformance tests must cause at least all of
# the WebGL tests to run. There are already exclusions in
# trybot_analyze_config.json which force all tests to run if
# changes under src/content/test/gpu are made. (This rule
# typically only takes effect on the GPU bots.) To make sure this
# happens all the time, update an autogenerated text file in this
# directory.
with open(txt_filename, 'w') as fh:
print('# AUTOGENERATED FILE - DO NOT EDIT', file=fh)
print('# SEE roll_webgl_conformance.py', file=fh)
print('Current webgl revision %s' % commit_info.git_commit, file=fh)
def _DeleteRollBranch(self):
self._RunCommand(['git', 'checkout', 'main'])
self._RunCommand(['git', 'branch', '-D', ROLL_BRANCH_NAME])
logging.debug('Deleted the local roll branch (%s)', ROLL_BRANCH_NAME)
def _GetBranches(self):
"""Returns a tuple of active,branches.
The 'active' is the name of the currently active branch and 'branches' is a
list of all branches.
"""
lines = self._RunCommand(['git', 'branch']).split('\n')
branches = []
active = ''
for l in lines:
if '*' in l:
# The assumption is that the first char will always be the '*'.
active = l[1:].strip()
branches.append(active)
else:
b = l.strip()
if b:
branches.append(b)
return (active, branches)
def Abort(self):
active_branch, branches = self._GetBranches()
if active_branch == ROLL_BRANCH_NAME:
active_branch = 'main'
if ROLL_BRANCH_NAME in branches:
print('Aborting pending roll.')
self._RunCommand(['git', 'checkout', ROLL_BRANCH_NAME])
# Ignore an error here in case an issue wasn't created for some reason.
self._RunCommand(['git', 'cl', 'set_close'], ignore_exit_code=True)
self._RunCommand(['git', 'checkout', active_branch])
self._RunCommand(['git', 'branch', '-D', ROLL_BRANCH_NAME])
return 0
def main():
parser = argparse.ArgumentParser(
description='Auto-generates a CL containing a WebGL conformance roll.')
parser.add_argument('--abort',
help=('Aborts a previously prepared roll. '
'Closes any associated issues and deletes the roll branches'),
action='store_true')
parser.add_argument(
'--ignore-checks',
action='store_true',
default=False,
help=('Skips checks for being on the main branch, dirty workspaces and '
'the updating of the checkout. Will still delete and create local '
'Git branches.'))
parser.add_argument('--run-tryjobs', action='store_true', default=False,
help=('Start the dry-run tryjobs for the newly generated CL. Use this '
'when you have no need to make changes to the WebGL conformance '
'test expectations in the same CL and want to avoid.'))
parser.add_argument('-v', '--verbose', action='store_true', default=False,
help='Be extra verbose in printing of log messages.')
args = parser.parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.ERROR)
autoroller = AutoRoller(SRC_DIR)
if args.abort:
return autoroller.Abort()
else:
return autoroller.PrepareRoll(args.ignore_checks, args.run_tryjobs)
if __name__ == '__main__':
sys.exit(main())
|
|
import os
import pprint
import shutil
import sys
import textwrap
from pathlib import Path
from typing import List
import pytest
from _pytest.config import ExitCode
from _pytest.fixtures import FixtureRequest
from _pytest.main import _in_venv
from _pytest.main import Session
from _pytest.monkeypatch import MonkeyPatch
from _pytest.nodes import Item
from _pytest.pathlib import symlink_or_skip
from _pytest.pytester import HookRecorder
from _pytest.pytester import Pytester
from _pytest.pytester import Testdir
def ensure_file(file_path: Path) -> Path:
"""Ensure that file exists"""
file_path.parent.mkdir(parents=True, exist_ok=True)
file_path.touch(exist_ok=True)
return file_path
class TestCollector:
def test_collect_versus_item(self) -> None:
from pytest import Collector
from pytest import Item
assert not issubclass(Collector, Item)
assert not issubclass(Item, Collector)
def test_check_equality(self, pytester: Pytester) -> None:
modcol = pytester.getmodulecol(
"""
def test_pass(): pass
def test_fail(): assert 0
"""
)
fn1 = pytester.collect_by_name(modcol, "test_pass")
assert isinstance(fn1, pytest.Function)
fn2 = pytester.collect_by_name(modcol, "test_pass")
assert isinstance(fn2, pytest.Function)
assert fn1 == fn2
assert fn1 != modcol
assert hash(fn1) == hash(fn2)
fn3 = pytester.collect_by_name(modcol, "test_fail")
assert isinstance(fn3, pytest.Function)
assert not (fn1 == fn3)
assert fn1 != fn3
for fn in fn1, fn2, fn3:
assert isinstance(fn, pytest.Function)
assert fn != 3 # type: ignore[comparison-overlap]
assert fn != modcol
assert fn != [1, 2, 3] # type: ignore[comparison-overlap]
assert [1, 2, 3] != fn # type: ignore[comparison-overlap]
assert modcol != fn
assert pytester.collect_by_name(modcol, "doesnotexist") is None
def test_getparent(self, pytester: Pytester) -> None:
modcol = pytester.getmodulecol(
"""
class TestClass:
def test_foo(self):
pass
"""
)
cls = pytester.collect_by_name(modcol, "TestClass")
assert isinstance(cls, pytest.Class)
instance = pytester.collect_by_name(cls, "()")
assert isinstance(instance, pytest.Instance)
fn = pytester.collect_by_name(instance, "test_foo")
assert isinstance(fn, pytest.Function)
module_parent = fn.getparent(pytest.Module)
assert module_parent is modcol
function_parent = fn.getparent(pytest.Function)
assert function_parent is fn
class_parent = fn.getparent(pytest.Class)
assert class_parent is cls
def test_getcustomfile_roundtrip(self, pytester: Pytester) -> None:
hello = pytester.makefile(".xxx", hello="world")
pytester.makepyfile(
conftest="""
import pytest
class CustomFile(pytest.File):
pass
def pytest_collect_file(path, parent):
if path.ext == ".xxx":
return CustomFile.from_parent(fspath=path, parent=parent)
"""
)
node = pytester.getpathnode(hello)
assert isinstance(node, pytest.File)
assert node.name == "hello.xxx"
nodes = node.session.perform_collect([node.nodeid], genitems=False)
assert len(nodes) == 1
assert isinstance(nodes[0], pytest.File)
def test_can_skip_class_with_test_attr(self, pytester: Pytester) -> None:
"""Assure test class is skipped when using `__test__=False` (See #2007)."""
pytester.makepyfile(
"""
class TestFoo(object):
__test__ = False
def __init__(self):
pass
def test_foo():
assert True
"""
)
result = pytester.runpytest()
result.stdout.fnmatch_lines(["collected 0 items", "*no tests ran in*"])
class TestCollectFS:
def test_ignored_certain_directories(self, pytester: Pytester) -> None:
tmpdir = pytester.path
ensure_file(tmpdir / "build" / "test_notfound.py")
ensure_file(tmpdir / "dist" / "test_notfound.py")
ensure_file(tmpdir / "_darcs" / "test_notfound.py")
ensure_file(tmpdir / "CVS" / "test_notfound.py")
ensure_file(tmpdir / "{arch}" / "test_notfound.py")
ensure_file(tmpdir / ".whatever" / "test_notfound.py")
ensure_file(tmpdir / ".bzr" / "test_notfound.py")
ensure_file(tmpdir / "normal" / "test_found.py")
for x in Path(str(tmpdir)).rglob("test_*.py"):
x.write_text("def test_hello(): pass", "utf-8")
result = pytester.runpytest("--collect-only")
s = result.stdout.str()
assert "test_notfound" not in s
assert "test_found" in s
@pytest.mark.parametrize(
"fname",
(
"activate",
"activate.csh",
"activate.fish",
"Activate",
"Activate.bat",
"Activate.ps1",
),
)
def test_ignored_virtualenvs(self, pytester: Pytester, fname: str) -> None:
bindir = "Scripts" if sys.platform.startswith("win") else "bin"
ensure_file(pytester.path / "virtual" / bindir / fname)
testfile = ensure_file(pytester.path / "virtual" / "test_invenv.py")
testfile.write_text("def test_hello(): pass")
# by default, ignore tests inside a virtualenv
result = pytester.runpytest()
result.stdout.no_fnmatch_line("*test_invenv*")
# allow test collection if user insists
result = pytester.runpytest("--collect-in-virtualenv")
assert "test_invenv" in result.stdout.str()
# allow test collection if user directly passes in the directory
result = pytester.runpytest("virtual")
assert "test_invenv" in result.stdout.str()
@pytest.mark.parametrize(
"fname",
(
"activate",
"activate.csh",
"activate.fish",
"Activate",
"Activate.bat",
"Activate.ps1",
),
)
def test_ignored_virtualenvs_norecursedirs_precedence(
self, pytester: Pytester, fname: str
) -> None:
bindir = "Scripts" if sys.platform.startswith("win") else "bin"
# norecursedirs takes priority
ensure_file(pytester.path / ".virtual" / bindir / fname)
testfile = ensure_file(pytester.path / ".virtual" / "test_invenv.py")
testfile.write_text("def test_hello(): pass")
result = pytester.runpytest("--collect-in-virtualenv")
result.stdout.no_fnmatch_line("*test_invenv*")
# ...unless the virtualenv is explicitly given on the CLI
result = pytester.runpytest("--collect-in-virtualenv", ".virtual")
assert "test_invenv" in result.stdout.str()
@pytest.mark.parametrize(
"fname",
(
"activate",
"activate.csh",
"activate.fish",
"Activate",
"Activate.bat",
"Activate.ps1",
),
)
def test__in_venv(self, testdir: Testdir, fname: str) -> None:
"""Directly test the virtual env detection function"""
bindir = "Scripts" if sys.platform.startswith("win") else "bin"
# no bin/activate, not a virtualenv
base_path = testdir.tmpdir.mkdir("venv")
assert _in_venv(base_path) is False
# with bin/activate, totally a virtualenv
base_path.ensure(bindir, fname)
assert _in_venv(base_path) is True
def test_custom_norecursedirs(self, pytester: Pytester) -> None:
pytester.makeini(
"""
[pytest]
norecursedirs = mydir xyz*
"""
)
tmpdir = pytester.path
ensure_file(tmpdir / "mydir" / "test_hello.py").write_text("def test_1(): pass")
ensure_file(tmpdir / "xyz123" / "test_2.py").write_text("def test_2(): 0/0")
ensure_file(tmpdir / "xy" / "test_ok.py").write_text("def test_3(): pass")
rec = pytester.inline_run()
rec.assertoutcome(passed=1)
rec = pytester.inline_run("xyz123/test_2.py")
rec.assertoutcome(failed=1)
def test_testpaths_ini(self, pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
pytester.makeini(
"""
[pytest]
testpaths = gui uts
"""
)
tmpdir = pytester.path
ensure_file(tmpdir / "env" / "test_1.py").write_text("def test_env(): pass")
ensure_file(tmpdir / "gui" / "test_2.py").write_text("def test_gui(): pass")
ensure_file(tmpdir / "uts" / "test_3.py").write_text("def test_uts(): pass")
# executing from rootdir only tests from `testpaths` directories
# are collected
items, reprec = pytester.inline_genitems("-v")
assert [x.name for x in items] == ["test_gui", "test_uts"]
# check that explicitly passing directories in the command-line
# collects the tests
for dirname in ("env", "gui", "uts"):
items, reprec = pytester.inline_genitems(tmpdir.joinpath(dirname))
assert [x.name for x in items] == ["test_%s" % dirname]
# changing cwd to each subdirectory and running pytest without
# arguments collects the tests in that directory normally
for dirname in ("env", "gui", "uts"):
monkeypatch.chdir(pytester.path.joinpath(dirname))
items, reprec = pytester.inline_genitems()
assert [x.name for x in items] == ["test_%s" % dirname]
class TestCollectPluginHookRelay:
def test_pytest_collect_file(self, testdir: Testdir) -> None:
wascalled = []
class Plugin:
def pytest_collect_file(self, path):
if not path.basename.startswith("."):
# Ignore hidden files, e.g. .testmondata.
wascalled.append(path)
testdir.makefile(".abc", "xyz")
pytest.main(testdir.tmpdir, plugins=[Plugin()])
assert len(wascalled) == 1
assert wascalled[0].ext == ".abc"
class TestPrunetraceback:
def test_custom_repr_failure(self, pytester: Pytester) -> None:
p = pytester.makepyfile(
"""
import not_exists
"""
)
pytester.makeconftest(
"""
import pytest
def pytest_collect_file(path, parent):
return MyFile.from_parent(fspath=path, parent=parent)
class MyError(Exception):
pass
class MyFile(pytest.File):
def collect(self):
raise MyError()
def repr_failure(self, excinfo):
if excinfo.errisinstance(MyError):
return "hello world"
return pytest.File.repr_failure(self, excinfo)
"""
)
result = pytester.runpytest(p)
result.stdout.fnmatch_lines(["*ERROR collecting*", "*hello world*"])
@pytest.mark.xfail(reason="other mechanism for adding to reporting needed")
def test_collect_report_postprocessing(self, pytester: Pytester) -> None:
p = pytester.makepyfile(
"""
import not_exists
"""
)
pytester.makeconftest(
"""
import pytest
@pytest.hookimpl(hookwrapper=True)
def pytest_make_collect_report():
outcome = yield
rep = outcome.get_result()
rep.headerlines += ["header1"]
outcome.force_result(rep)
"""
)
result = pytester.runpytest(p)
result.stdout.fnmatch_lines(["*ERROR collecting*", "*header1*"])
class TestCustomConftests:
def test_ignore_collect_path(self, pytester: Pytester) -> None:
pytester.makeconftest(
"""
def pytest_ignore_collect(path, config):
return path.basename.startswith("x") or \
path.basename == "test_one.py"
"""
)
sub = pytester.mkdir("xy123")
ensure_file(sub / "test_hello.py").write_text("syntax error")
sub.joinpath("conftest.py").write_text("syntax error")
pytester.makepyfile("def test_hello(): pass")
pytester.makepyfile(test_one="syntax error")
result = pytester.runpytest("--fulltrace")
assert result.ret == 0
result.stdout.fnmatch_lines(["*1 passed*"])
def test_ignore_collect_not_called_on_argument(self, pytester: Pytester) -> None:
pytester.makeconftest(
"""
def pytest_ignore_collect(path, config):
return True
"""
)
p = pytester.makepyfile("def test_hello(): pass")
result = pytester.runpytest(p)
assert result.ret == 0
result.stdout.fnmatch_lines(["*1 passed*"])
result = pytester.runpytest()
assert result.ret == ExitCode.NO_TESTS_COLLECTED
result.stdout.fnmatch_lines(["*collected 0 items*"])
def test_collectignore_exclude_on_option(self, pytester: Pytester) -> None:
pytester.makeconftest(
"""
collect_ignore = ['hello', 'test_world.py']
def pytest_addoption(parser):
parser.addoption("--XX", action="store_true", default=False)
def pytest_configure(config):
if config.getvalue("XX"):
collect_ignore[:] = []
"""
)
pytester.mkdir("hello")
pytester.makepyfile(test_world="def test_hello(): pass")
result = pytester.runpytest()
assert result.ret == ExitCode.NO_TESTS_COLLECTED
result.stdout.no_fnmatch_line("*passed*")
result = pytester.runpytest("--XX")
assert result.ret == 0
assert "passed" in result.stdout.str()
def test_collectignoreglob_exclude_on_option(self, pytester: Pytester) -> None:
pytester.makeconftest(
"""
collect_ignore_glob = ['*w*l[dt]*']
def pytest_addoption(parser):
parser.addoption("--XX", action="store_true", default=False)
def pytest_configure(config):
if config.getvalue("XX"):
collect_ignore_glob[:] = []
"""
)
pytester.makepyfile(test_world="def test_hello(): pass")
pytester.makepyfile(test_welt="def test_hallo(): pass")
result = pytester.runpytest()
assert result.ret == ExitCode.NO_TESTS_COLLECTED
result.stdout.fnmatch_lines(["*collected 0 items*"])
result = pytester.runpytest("--XX")
assert result.ret == 0
result.stdout.fnmatch_lines(["*2 passed*"])
def test_pytest_fs_collect_hooks_are_seen(self, pytester: Pytester) -> None:
pytester.makeconftest(
"""
import pytest
class MyModule(pytest.Module):
pass
def pytest_collect_file(path, parent):
if path.ext == ".py":
return MyModule.from_parent(fspath=path, parent=parent)
"""
)
pytester.mkdir("sub")
pytester.makepyfile("def test_x(): pass")
result = pytester.runpytest("--co")
result.stdout.fnmatch_lines(["*MyModule*", "*test_x*"])
def test_pytest_collect_file_from_sister_dir(self, pytester: Pytester) -> None:
sub1 = pytester.mkpydir("sub1")
sub2 = pytester.mkpydir("sub2")
conf1 = pytester.makeconftest(
"""
import pytest
class MyModule1(pytest.Module):
pass
def pytest_collect_file(path, parent):
if path.ext == ".py":
return MyModule1.from_parent(fspath=path, parent=parent)
"""
)
conf1.replace(sub1.joinpath(conf1.name))
conf2 = pytester.makeconftest(
"""
import pytest
class MyModule2(pytest.Module):
pass
def pytest_collect_file(path, parent):
if path.ext == ".py":
return MyModule2.from_parent(fspath=path, parent=parent)
"""
)
conf2.replace(sub2.joinpath(conf2.name))
p = pytester.makepyfile("def test_x(): pass")
shutil.copy(p, sub1.joinpath(p.name))
shutil.copy(p, sub2.joinpath(p.name))
result = pytester.runpytest("--co")
result.stdout.fnmatch_lines(["*MyModule1*", "*MyModule2*", "*test_x*"])
class TestSession:
def test_collect_topdir(self, pytester: Pytester) -> None:
p = pytester.makepyfile("def test_func(): pass")
id = "::".join([p.name, "test_func"])
# XXX migrate to collectonly? (see below)
config = pytester.parseconfig(id)
topdir = pytester.path
rcol = Session.from_config(config)
assert topdir == rcol.fspath
# rootid = rcol.nodeid
# root2 = rcol.perform_collect([rcol.nodeid], genitems=False)[0]
# assert root2 == rcol, rootid
colitems = rcol.perform_collect([rcol.nodeid], genitems=False)
assert len(colitems) == 1
assert colitems[0].fspath == p
def get_reported_items(self, hookrec: HookRecorder) -> List[Item]:
"""Return pytest.Item instances reported by the pytest_collectreport hook"""
calls = hookrec.getcalls("pytest_collectreport")
return [
x
for call in calls
for x in call.report.result
if isinstance(x, pytest.Item)
]
def test_collect_protocol_single_function(self, pytester: Pytester) -> None:
p = pytester.makepyfile("def test_func(): pass")
id = "::".join([p.name, "test_func"])
items, hookrec = pytester.inline_genitems(id)
(item,) = items
assert item.name == "test_func"
newid = item.nodeid
assert newid == id
pprint.pprint(hookrec.calls)
topdir = pytester.path # noqa
hookrec.assert_contains(
[
("pytest_collectstart", "collector.fspath == topdir"),
("pytest_make_collect_report", "collector.fspath == topdir"),
("pytest_collectstart", "collector.fspath == p"),
("pytest_make_collect_report", "collector.fspath == p"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.result[0].name == 'test_func'"),
]
)
# ensure we are reporting the collection of the single test item (#2464)
assert [x.name for x in self.get_reported_items(hookrec)] == ["test_func"]
def test_collect_protocol_method(self, pytester: Pytester) -> None:
p = pytester.makepyfile(
"""
class TestClass(object):
def test_method(self):
pass
"""
)
normid = p.name + "::TestClass::test_method"
for id in [p.name, p.name + "::TestClass", normid]:
items, hookrec = pytester.inline_genitems(id)
assert len(items) == 1
assert items[0].name == "test_method"
newid = items[0].nodeid
assert newid == normid
# ensure we are reporting the collection of the single test item (#2464)
assert [x.name for x in self.get_reported_items(hookrec)] == ["test_method"]
def test_collect_custom_nodes_multi_id(self, pytester: Pytester) -> None:
p = pytester.makepyfile("def test_func(): pass")
pytester.makeconftest(
"""
import pytest
class SpecialItem(pytest.Item):
def runtest(self):
return # ok
class SpecialFile(pytest.File):
def collect(self):
return [SpecialItem.from_parent(name="check", parent=self)]
def pytest_collect_file(path, parent):
if path.basename == %r:
return SpecialFile.from_parent(fspath=path, parent=parent)
"""
% p.name
)
id = p.name
items, hookrec = pytester.inline_genitems(id)
pprint.pprint(hookrec.calls)
assert len(items) == 2
hookrec.assert_contains(
[
("pytest_collectstart", "collector.fspath == collector.session.fspath"),
(
"pytest_collectstart",
"collector.__class__.__name__ == 'SpecialFile'",
),
("pytest_collectstart", "collector.__class__.__name__ == 'Module'"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.nodeid.startswith(p.name)"),
]
)
assert len(self.get_reported_items(hookrec)) == 2
def test_collect_subdir_event_ordering(self, pytester: Pytester) -> None:
p = pytester.makepyfile("def test_func(): pass")
aaa = pytester.mkpydir("aaa")
test_aaa = aaa.joinpath("test_aaa.py")
p.replace(test_aaa)
items, hookrec = pytester.inline_genitems()
assert len(items) == 1
pprint.pprint(hookrec.calls)
hookrec.assert_contains(
[
("pytest_collectstart", "collector.fspath == test_aaa"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.nodeid.startswith('aaa/test_aaa.py')"),
]
)
def test_collect_two_commandline_args(self, pytester: Pytester) -> None:
p = pytester.makepyfile("def test_func(): pass")
aaa = pytester.mkpydir("aaa")
bbb = pytester.mkpydir("bbb")
test_aaa = aaa.joinpath("test_aaa.py")
shutil.copy(p, test_aaa)
test_bbb = bbb.joinpath("test_bbb.py")
p.replace(test_bbb)
id = "."
items, hookrec = pytester.inline_genitems(id)
assert len(items) == 2
pprint.pprint(hookrec.calls)
hookrec.assert_contains(
[
("pytest_collectstart", "collector.fspath == test_aaa"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.nodeid == 'aaa/test_aaa.py'"),
("pytest_collectstart", "collector.fspath == test_bbb"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.nodeid == 'bbb/test_bbb.py'"),
]
)
def test_serialization_byid(self, pytester: Pytester) -> None:
pytester.makepyfile("def test_func(): pass")
items, hookrec = pytester.inline_genitems()
assert len(items) == 1
(item,) = items
items2, hookrec = pytester.inline_genitems(item.nodeid)
(item2,) = items2
assert item2.name == item.name
assert item2.fspath == item.fspath
def test_find_byid_without_instance_parents(self, pytester: Pytester) -> None:
p = pytester.makepyfile(
"""
class TestClass(object):
def test_method(self):
pass
"""
)
arg = p.name + "::TestClass::test_method"
items, hookrec = pytester.inline_genitems(arg)
assert len(items) == 1
(item,) = items
assert item.nodeid.endswith("TestClass::test_method")
# ensure we are reporting the collection of the single test item (#2464)
assert [x.name for x in self.get_reported_items(hookrec)] == ["test_method"]
class Test_getinitialnodes:
def test_global_file(self, pytester: Pytester) -> None:
tmpdir = pytester.path
x = ensure_file(tmpdir / "x.py")
with tmpdir.cwd():
config = pytester.parseconfigure(x)
col = pytester.getnode(config, x)
assert isinstance(col, pytest.Module)
assert col.name == "x.py"
assert col.parent is not None
assert col.parent.parent is None
for parent in col.listchain():
assert parent.config is config
def test_pkgfile(self, pytester: Pytester) -> None:
"""Verify nesting when a module is within a package.
The parent chain should match: Module<x.py> -> Package<subdir> -> Session.
Session's parent should always be None.
"""
tmpdir = pytester.path
subdir = tmpdir.joinpath("subdir")
x = ensure_file(subdir / "x.py")
ensure_file(subdir / "__init__.py")
with subdir.cwd():
config = pytester.parseconfigure(x)
col = pytester.getnode(config, x)
assert col is not None
assert col.name == "x.py"
assert isinstance(col, pytest.Module)
assert isinstance(col.parent, pytest.Package)
assert isinstance(col.parent.parent, pytest.Session)
# session is batman (has no parents)
assert col.parent.parent.parent is None
for parent in col.listchain():
assert parent.config is config
class Test_genitems:
def test_check_collect_hashes(self, pytester: Pytester) -> None:
p = pytester.makepyfile(
"""
def test_1():
pass
def test_2():
pass
"""
)
shutil.copy(p, p.parent / (p.stem + "2" + ".py"))
items, reprec = pytester.inline_genitems(p.parent)
assert len(items) == 4
for numi, i in enumerate(items):
for numj, j in enumerate(items):
if numj != numi:
assert hash(i) != hash(j)
assert i != j
def test_example_items1(self, pytester: Pytester) -> None:
p = pytester.makepyfile(
"""
import pytest
def testone():
pass
class TestX(object):
def testmethod_one(self):
pass
class TestY(TestX):
@pytest.mark.parametrize("arg0", [".["])
def testmethod_two(self, arg0):
pass
"""
)
items, reprec = pytester.inline_genitems(p)
assert len(items) == 4
assert items[0].name == "testone"
assert items[1].name == "testmethod_one"
assert items[2].name == "testmethod_one"
assert items[3].name == "testmethod_two[.[]"
# let's also test getmodpath here
assert items[0].getmodpath() == "testone" # type: ignore[attr-defined]
assert items[1].getmodpath() == "TestX.testmethod_one" # type: ignore[attr-defined]
assert items[2].getmodpath() == "TestY.testmethod_one" # type: ignore[attr-defined]
# PR #6202: Fix incorrect result of getmodpath method. (Resolves issue #6189)
assert items[3].getmodpath() == "TestY.testmethod_two[.[]" # type: ignore[attr-defined]
s = items[0].getmodpath(stopatmodule=False) # type: ignore[attr-defined]
assert s.endswith("test_example_items1.testone")
print(s)
def test_class_and_functions_discovery_using_glob(self, pytester: Pytester) -> None:
"""Test that Python_classes and Python_functions config options work
as prefixes and glob-like patterns (#600)."""
pytester.makeini(
"""
[pytest]
python_classes = *Suite Test
python_functions = *_test test
"""
)
p = pytester.makepyfile(
"""
class MyTestSuite(object):
def x_test(self):
pass
class TestCase(object):
def test_y(self):
pass
"""
)
items, reprec = pytester.inline_genitems(p)
ids = [x.getmodpath() for x in items] # type: ignore[attr-defined]
assert ids == ["MyTestSuite.x_test", "TestCase.test_y"]
def test_matchnodes_two_collections_same_file(pytester: Pytester) -> None:
pytester.makeconftest(
"""
import pytest
def pytest_configure(config):
config.pluginmanager.register(Plugin2())
class Plugin2(object):
def pytest_collect_file(self, path, parent):
if path.ext == ".abc":
return MyFile2.from_parent(fspath=path, parent=parent)
def pytest_collect_file(path, parent):
if path.ext == ".abc":
return MyFile1.from_parent(fspath=path, parent=parent)
class MyFile1(pytest.File):
def collect(self):
yield Item1.from_parent(name="item1", parent=self)
class MyFile2(pytest.File):
def collect(self):
yield Item2.from_parent(name="item2", parent=self)
class Item1(pytest.Item):
def runtest(self):
pass
class Item2(pytest.Item):
def runtest(self):
pass
"""
)
p = pytester.makefile(".abc", "")
result = pytester.runpytest()
assert result.ret == 0
result.stdout.fnmatch_lines(["*2 passed*"])
res = pytester.runpytest("%s::item2" % p.name)
res.stdout.fnmatch_lines(["*1 passed*"])
class TestNodekeywords:
def test_no_under(self, pytester: Pytester) -> None:
modcol = pytester.getmodulecol(
"""
def test_pass(): pass
def test_fail(): assert 0
"""
)
values = list(modcol.keywords)
assert modcol.name in values
for x in values:
assert not x.startswith("_")
assert modcol.name in repr(modcol.keywords)
def test_issue345(self, pytester: Pytester) -> None:
pytester.makepyfile(
"""
def test_should_not_be_selected():
assert False, 'I should not have been selected to run'
def test___repr__():
pass
"""
)
reprec = pytester.inline_run("-k repr")
reprec.assertoutcome(passed=1, failed=0)
def test_keyword_matching_is_case_insensitive_by_default(
self, pytester: Pytester
) -> None:
"""Check that selection via -k EXPRESSION is case-insensitive.
Since markers are also added to the node keywords, they too can
be matched without having to think about case sensitivity.
"""
pytester.makepyfile(
"""
import pytest
def test_sPeCiFiCToPiC_1():
assert True
class TestSpecificTopic_2:
def test(self):
assert True
@pytest.mark.sPeCiFiCToPic_3
def test():
assert True
@pytest.mark.sPeCiFiCToPic_4
class Test:
def test(self):
assert True
def test_failing_5():
assert False, "This should not match"
"""
)
num_matching_tests = 4
for expression in ("specifictopic", "SPECIFICTOPIC", "SpecificTopic"):
reprec = pytester.inline_run("-k " + expression)
reprec.assertoutcome(passed=num_matching_tests, failed=0)
COLLECTION_ERROR_PY_FILES = dict(
test_01_failure="""
def test_1():
assert False
""",
test_02_import_error="""
import asdfasdfasdf
def test_2():
assert True
""",
test_03_import_error="""
import asdfasdfasdf
def test_3():
assert True
""",
test_04_success="""
def test_4():
assert True
""",
)
def test_exit_on_collection_error(pytester: Pytester) -> None:
"""Verify that all collection errors are collected and no tests executed"""
pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)
res = pytester.runpytest()
assert res.ret == 2
res.stdout.fnmatch_lines(
[
"collected 2 items / 2 errors",
"*ERROR collecting test_02_import_error.py*",
"*No module named *asdfa*",
"*ERROR collecting test_03_import_error.py*",
"*No module named *asdfa*",
]
)
def test_exit_on_collection_with_maxfail_smaller_than_n_errors(
pytester: Pytester,
) -> None:
"""
Verify collection is aborted once maxfail errors are encountered ignoring
further modules which would cause more collection errors.
"""
pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)
res = pytester.runpytest("--maxfail=1")
assert res.ret == 1
res.stdout.fnmatch_lines(
[
"collected 1 item / 1 error",
"*ERROR collecting test_02_import_error.py*",
"*No module named *asdfa*",
"*! stopping after 1 failures !*",
"*= 1 error in *",
]
)
res.stdout.no_fnmatch_line("*test_03*")
def test_exit_on_collection_with_maxfail_bigger_than_n_errors(
pytester: Pytester,
) -> None:
"""
Verify the test run aborts due to collection errors even if maxfail count of
errors was not reached.
"""
pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)
res = pytester.runpytest("--maxfail=4")
assert res.ret == 2
res.stdout.fnmatch_lines(
[
"collected 2 items / 2 errors",
"*ERROR collecting test_02_import_error.py*",
"*No module named *asdfa*",
"*ERROR collecting test_03_import_error.py*",
"*No module named *asdfa*",
"*! Interrupted: 2 errors during collection !*",
"*= 2 errors in *",
]
)
def test_continue_on_collection_errors(pytester: Pytester) -> None:
"""
Verify tests are executed even when collection errors occur when the
--continue-on-collection-errors flag is set
"""
pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)
res = pytester.runpytest("--continue-on-collection-errors")
assert res.ret == 1
res.stdout.fnmatch_lines(
["collected 2 items / 2 errors", "*1 failed, 1 passed, 2 errors*"]
)
def test_continue_on_collection_errors_maxfail(pytester: Pytester) -> None:
"""
Verify tests are executed even when collection errors occur and that maxfail
is honoured (including the collection error count).
4 tests: 2 collection errors + 1 failure + 1 success
test_4 is never executed because the test run is with --maxfail=3 which
means it is interrupted after the 2 collection errors + 1 failure.
"""
pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)
res = pytester.runpytest("--continue-on-collection-errors", "--maxfail=3")
assert res.ret == 1
res.stdout.fnmatch_lines(["collected 2 items / 2 errors", "*1 failed, 2 errors*"])
def test_fixture_scope_sibling_conftests(pytester: Pytester) -> None:
"""Regression test case for https://github.com/pytest-dev/pytest/issues/2836"""
foo_path = pytester.mkdir("foo")
foo_path.joinpath("conftest.py").write_text(
textwrap.dedent(
"""\
import pytest
@pytest.fixture
def fix():
return 1
"""
)
)
foo_path.joinpath("test_foo.py").write_text("def test_foo(fix): assert fix == 1")
# Tests in `food/` should not see the conftest fixture from `foo/`
food_path = pytester.mkpydir("food")
food_path.joinpath("test_food.py").write_text("def test_food(fix): assert fix == 1")
res = pytester.runpytest()
assert res.ret == 1
res.stdout.fnmatch_lines(
[
"*ERROR at setup of test_food*",
"E*fixture 'fix' not found",
"*1 passed, 1 error*",
]
)
def test_collect_init_tests(pytester: Pytester) -> None:
"""Check that we collect files from __init__.py files when they patch the 'python_files' (#3773)"""
p = pytester.copy_example("collect/collect_init_tests")
result = pytester.runpytest(p, "--collect-only")
result.stdout.fnmatch_lines(
[
"collected 2 items",
"<Package tests>",
" <Module __init__.py>",
" <Function test_init>",
" <Module test_foo.py>",
" <Function test_foo>",
]
)
result = pytester.runpytest("./tests", "--collect-only")
result.stdout.fnmatch_lines(
[
"collected 2 items",
"<Package tests>",
" <Module __init__.py>",
" <Function test_init>",
" <Module test_foo.py>",
" <Function test_foo>",
]
)
# Ignores duplicates with "." and pkginit (#4310).
result = pytester.runpytest("./tests", ".", "--collect-only")
result.stdout.fnmatch_lines(
[
"collected 2 items",
"<Package tests>",
" <Module __init__.py>",
" <Function test_init>",
" <Module test_foo.py>",
" <Function test_foo>",
]
)
# Same as before, but different order.
result = pytester.runpytest(".", "tests", "--collect-only")
result.stdout.fnmatch_lines(
[
"collected 2 items",
"<Package tests>",
" <Module __init__.py>",
" <Function test_init>",
" <Module test_foo.py>",
" <Function test_foo>",
]
)
result = pytester.runpytest("./tests/test_foo.py", "--collect-only")
result.stdout.fnmatch_lines(
["<Package tests>", " <Module test_foo.py>", " <Function test_foo>"]
)
result.stdout.no_fnmatch_line("*test_init*")
result = pytester.runpytest("./tests/__init__.py", "--collect-only")
result.stdout.fnmatch_lines(
["<Package tests>", " <Module __init__.py>", " <Function test_init>"]
)
result.stdout.no_fnmatch_line("*test_foo*")
def test_collect_invalid_signature_message(pytester: Pytester) -> None:
"""Check that we issue a proper message when we can't determine the signature of a test
function (#4026).
"""
pytester.makepyfile(
"""
import pytest
class TestCase:
@pytest.fixture
def fix():
pass
"""
)
result = pytester.runpytest()
result.stdout.fnmatch_lines(
["Could not determine arguments of *.fix *: invalid method signature"]
)
def test_collect_handles_raising_on_dunder_class(pytester: Pytester) -> None:
"""Handle proxy classes like Django's LazySettings that might raise on
``isinstance`` (#4266).
"""
pytester.makepyfile(
"""
class ImproperlyConfigured(Exception):
pass
class RaisesOnGetAttr(object):
def raises(self):
raise ImproperlyConfigured
__class__ = property(raises)
raises = RaisesOnGetAttr()
def test_1():
pass
"""
)
result = pytester.runpytest()
result.stdout.fnmatch_lines(["*1 passed in*"])
assert result.ret == 0
def test_collect_with_chdir_during_import(pytester: Pytester) -> None:
subdir = pytester.mkdir("sub")
pytester.path.joinpath("conftest.py").write_text(
textwrap.dedent(
"""
import os
os.chdir(%r)
"""
% (str(subdir),)
)
)
pytester.makepyfile(
"""
def test_1():
import os
assert os.getcwd() == %r
"""
% (str(subdir),)
)
with pytester.path.cwd():
result = pytester.runpytest()
result.stdout.fnmatch_lines(["*1 passed in*"])
assert result.ret == 0
# Handles relative testpaths.
pytester.makeini(
"""
[pytest]
testpaths = .
"""
)
with pytester.path.cwd():
result = pytester.runpytest("--collect-only")
result.stdout.fnmatch_lines(["collected 1 item"])
def test_collect_pyargs_with_testpaths(
pytester: Pytester, monkeypatch: MonkeyPatch
) -> None:
testmod = pytester.mkdir("testmod")
# NOTE: __init__.py is not collected since it does not match python_files.
testmod.joinpath("__init__.py").write_text("def test_func(): pass")
testmod.joinpath("test_file.py").write_text("def test_func(): pass")
root = pytester.mkdir("root")
root.joinpath("pytest.ini").write_text(
textwrap.dedent(
"""
[pytest]
addopts = --pyargs
testpaths = testmod
"""
)
)
monkeypatch.setenv("PYTHONPATH", str(pytester.path), prepend=os.pathsep)
with root.cwd():
result = pytester.runpytest_subprocess()
result.stdout.fnmatch_lines(["*1 passed in*"])
def test_collect_symlink_file_arg(pytester: Pytester) -> None:
"""Collect a direct symlink works even if it does not match python_files (#4325)."""
real = pytester.makepyfile(
real="""
def test_nodeid(request):
assert request.node.nodeid == "symlink.py::test_nodeid"
"""
)
symlink = pytester.path.joinpath("symlink.py")
symlink_or_skip(real, symlink)
result = pytester.runpytest("-v", symlink)
result.stdout.fnmatch_lines(["symlink.py::test_nodeid PASSED*", "*1 passed in*"])
assert result.ret == 0
def test_collect_symlink_out_of_tree(pytester: Pytester) -> None:
"""Test collection of symlink via out-of-tree rootdir."""
sub = pytester.mkdir("sub")
real = sub.joinpath("test_real.py")
real.write_text(
textwrap.dedent(
"""
def test_nodeid(request):
# Should not contain sub/ prefix.
assert request.node.nodeid == "test_real.py::test_nodeid"
"""
),
)
out_of_tree = pytester.mkdir("out_of_tree")
symlink_to_sub = out_of_tree.joinpath("symlink_to_sub")
symlink_or_skip(sub, symlink_to_sub)
os.chdir(sub)
result = pytester.runpytest("-vs", "--rootdir=%s" % sub, symlink_to_sub)
result.stdout.fnmatch_lines(
[
# Should not contain "sub/"!
"test_real.py::test_nodeid PASSED"
]
)
assert result.ret == 0
def test_collect_symlink_dir(pytester: Pytester) -> None:
"""A symlinked directory is collected."""
dir = pytester.mkdir("dir")
dir.joinpath("test_it.py").write_text("def test_it(): pass", "utf-8")
symlink_or_skip(pytester.path.joinpath("symlink_dir"), dir)
result = pytester.runpytest()
result.assert_outcomes(passed=2)
def test_collectignore_via_conftest(pytester: Pytester) -> None:
"""collect_ignore in parent conftest skips importing child (issue #4592)."""
tests = pytester.mkpydir("tests")
tests.joinpath("conftest.py").write_text("collect_ignore = ['ignore_me']")
ignore_me = tests.joinpath("ignore_me")
ignore_me.mkdir()
ignore_me.joinpath("__init__.py").touch()
ignore_me.joinpath("conftest.py").write_text("assert 0, 'should_not_be_called'")
result = pytester.runpytest()
assert result.ret == ExitCode.NO_TESTS_COLLECTED
def test_collect_pkg_init_and_file_in_args(pytester: Pytester) -> None:
subdir = pytester.mkdir("sub")
init = subdir.joinpath("__init__.py")
init.write_text("def test_init(): pass")
p = subdir.joinpath("test_file.py")
p.write_text("def test_file(): pass")
# NOTE: without "-o python_files=*.py" this collects test_file.py twice.
# This changed/broke with "Add package scoped fixtures #2283" (2b1410895)
# initially (causing a RecursionError).
result = pytester.runpytest("-v", str(init), str(p))
result.stdout.fnmatch_lines(
[
"sub/test_file.py::test_file PASSED*",
"sub/test_file.py::test_file PASSED*",
"*2 passed in*",
]
)
result = pytester.runpytest("-v", "-o", "python_files=*.py", str(init), str(p))
result.stdout.fnmatch_lines(
[
"sub/__init__.py::test_init PASSED*",
"sub/test_file.py::test_file PASSED*",
"*2 passed in*",
]
)
def test_collect_pkg_init_only(pytester: Pytester) -> None:
subdir = pytester.mkdir("sub")
init = subdir.joinpath("__init__.py")
init.write_text("def test_init(): pass")
result = pytester.runpytest(str(init))
result.stdout.fnmatch_lines(["*no tests ran in*"])
result = pytester.runpytest("-v", "-o", "python_files=*.py", str(init))
result.stdout.fnmatch_lines(["sub/__init__.py::test_init PASSED*", "*1 passed in*"])
@pytest.mark.parametrize("use_pkg", (True, False))
def test_collect_sub_with_symlinks(use_pkg: bool, pytester: Pytester) -> None:
"""Collection works with symlinked files and broken symlinks"""
sub = pytester.mkdir("sub")
if use_pkg:
sub.joinpath("__init__.py").touch()
sub.joinpath("test_file.py").write_text("def test_file(): pass")
# Create a broken symlink.
symlink_or_skip("test_doesnotexist.py", sub.joinpath("test_broken.py"))
# Symlink that gets collected.
symlink_or_skip("test_file.py", sub.joinpath("test_symlink.py"))
result = pytester.runpytest("-v", str(sub))
result.stdout.fnmatch_lines(
[
"sub/test_file.py::test_file PASSED*",
"sub/test_symlink.py::test_file PASSED*",
"*2 passed in*",
]
)
def test_collector_respects_tbstyle(pytester: Pytester) -> None:
p1 = pytester.makepyfile("assert 0")
result = pytester.runpytest(p1, "--tb=native")
assert result.ret == ExitCode.INTERRUPTED
result.stdout.fnmatch_lines(
[
"*_ ERROR collecting test_collector_respects_tbstyle.py _*",
"Traceback (most recent call last):",
' File "*/test_collector_respects_tbstyle.py", line 1, in <module>',
" assert 0",
"AssertionError: assert 0",
"*! Interrupted: 1 error during collection !*",
"*= 1 error in *",
]
)
def test_does_not_eagerly_collect_packages(pytester: Pytester) -> None:
pytester.makepyfile("def test(): pass")
pydir = pytester.mkpydir("foopkg")
pydir.joinpath("__init__.py").write_text("assert False")
result = pytester.runpytest()
assert result.ret == ExitCode.OK
def test_does_not_put_src_on_path(pytester: Pytester) -> None:
# `src` is not on sys.path so it should not be importable
ensure_file(pytester.path / "src/nope/__init__.py")
pytester.makepyfile(
"import pytest\n"
"def test():\n"
" with pytest.raises(ImportError):\n"
" import nope\n"
)
result = pytester.runpytest()
assert result.ret == ExitCode.OK
def test_fscollector_from_parent(testdir: Testdir, request: FixtureRequest) -> None:
"""Ensure File.from_parent can forward custom arguments to the constructor.
Context: https://github.com/pytest-dev/pytest-cpp/pull/47
"""
class MyCollector(pytest.File):
def __init__(self, fspath, parent, x):
super().__init__(fspath, parent)
self.x = x
@classmethod
def from_parent(cls, parent, *, fspath, x):
return super().from_parent(parent=parent, fspath=fspath, x=x)
collector = MyCollector.from_parent(
parent=request.session, fspath=testdir.tmpdir / "foo", x=10
)
assert collector.x == 10
class TestImportModeImportlib:
def test_collect_duplicate_names(self, pytester: Pytester) -> None:
"""--import-mode=importlib can import modules with same names that are not in packages."""
pytester.makepyfile(
**{
"tests_a/test_foo.py": "def test_foo1(): pass",
"tests_b/test_foo.py": "def test_foo2(): pass",
}
)
result = pytester.runpytest("-v", "--import-mode=importlib")
result.stdout.fnmatch_lines(
[
"tests_a/test_foo.py::test_foo1 *",
"tests_b/test_foo.py::test_foo2 *",
"* 2 passed in *",
]
)
def test_conftest(self, pytester: Pytester) -> None:
"""Directory containing conftest modules are not put in sys.path as a side-effect of
importing them."""
tests_dir = pytester.path.joinpath("tests")
pytester.makepyfile(
**{
"tests/conftest.py": "",
"tests/test_foo.py": """
import sys
def test_check():
assert r"{tests_dir}" not in sys.path
""".format(
tests_dir=tests_dir
),
}
)
result = pytester.runpytest("-v", "--import-mode=importlib")
result.stdout.fnmatch_lines(["* 1 passed in *"])
def setup_conftest_and_foo(self, pytester: Pytester) -> None:
"""Setup a tests folder to be used to test if modules in that folder can be imported
due to side-effects of --import-mode or not."""
pytester.makepyfile(
**{
"tests/conftest.py": "",
"tests/foo.py": """
def foo(): return 42
""",
"tests/test_foo.py": """
def test_check():
from foo import foo
assert foo() == 42
""",
}
)
def test_modules_importable_as_side_effect(self, pytester: Pytester) -> None:
"""In import-modes `prepend` and `append`, we are able to import modules from folders
containing conftest.py files due to the side effect of changing sys.path."""
self.setup_conftest_and_foo(pytester)
result = pytester.runpytest("-v", "--import-mode=prepend")
result.stdout.fnmatch_lines(["* 1 passed in *"])
def test_modules_not_importable_as_side_effect(self, pytester: Pytester) -> None:
"""In import-mode `importlib`, modules in folders containing conftest.py are not
importable, as don't change sys.path or sys.modules as side effect of importing
the conftest.py file.
"""
self.setup_conftest_and_foo(pytester)
result = pytester.runpytest("-v", "--import-mode=importlib")
result.stdout.fnmatch_lines(
[
"*ModuleNotFoundError: No module named 'foo'",
"tests?test_foo.py:2: ModuleNotFoundError",
"* 1 failed in *",
]
)
def test_does_not_crash_on_error_from_decorated_function(pytester: Pytester) -> None:
"""Regression test for an issue around bad exception formatting due to
assertion rewriting mangling lineno's (#4984)."""
pytester.makepyfile(
"""
@pytest.fixture
def a(): return 4
"""
)
result = pytester.runpytest()
# Not INTERNAL_ERROR
assert result.ret == ExitCode.INTERRUPTED
def test_does_not_crash_on_recursive_symlink(pytester: Pytester) -> None:
"""Regression test for an issue around recursive symlinks (#7951)."""
symlink_or_skip("recursive", pytester.path.joinpath("recursive"))
pytester.makepyfile(
"""
def test_foo(): assert True
"""
)
result = pytester.runpytest()
assert result.ret == ExitCode.OK
assert result.parseoutcomes() == {"passed": 1}
|
|
from __future__ import unicode_literals
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.db.models import AutoField
from django.forms import ValidationError, ModelForm
from django.http import HttpResponseRedirect
from django.shortcuts import redirect
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User as AuthUser
from mezzanine.conf import settings
from mezzanine.core.forms import DynamicInlineAdminForm
from mezzanine.core.models import (Orderable, SitePermission,
CONTENT_STATUS_PUBLISHED)
from mezzanine.utils.urls import admin_url
from mezzanine.utils.models import get_user_model
if settings.USE_MODELTRANSLATION:
from django.utils.datastructures import SortedDict
from django.utils.translation import activate, get_language
from modeltranslation.admin import (TranslationAdmin,
TranslationInlineModelAdmin)
class BaseTranslationModelAdmin(TranslationAdmin):
"""
Mimic modeltranslation's TabbedTranslationAdmin but uses a
custom tabbed_translation_fields.js
"""
class Media:
js = (
"modeltranslation/js/force_jquery.js",
"mezzanine/js/%s" % settings.JQUERY_UI_FILENAME,
"mezzanine/js/admin/tabbed_translation_fields.js",
)
css = {
"all": ("mezzanine/css/admin/tabbed_translation_fields.css",),
}
else:
class BaseTranslationModelAdmin(admin.ModelAdmin):
"""
Abstract class used to handle the switch between translation
and no-translation class logic. We define the basic structure
for the Media class so we can extend it consistently regardless
of whether or not modeltranslation is used.
"""
class Media:
css = {"all": ()}
def getInlineBaseClass(cls):
if settings.USE_MODELTRANSLATION:
class InlineBase(TranslationInlineModelAdmin, cls):
"""
Abstract class that mimics django-modeltranslation's
Translation{Tabular,Stacked}Inline. Used as a placeholder
for future improvement.
"""
pass
return InlineBase
return cls
User = get_user_model()
class DisplayableAdminForm(ModelForm):
def clean_content(form):
status = form.cleaned_data.get("status")
content = form.cleaned_data.get("content")
if status == CONTENT_STATUS_PUBLISHED and not content:
raise ValidationError(_("This field is required if status "
"is set to published."))
return content
class DisplayableAdmin(BaseTranslationModelAdmin):
"""
Admin class for subclasses of the abstract ``Displayable`` model.
"""
list_display = ("title", "status", "admin_link")
list_display_links = ("title",)
list_editable = ("status",)
list_filter = ("status", "keywords__keyword")
date_hierarchy = "publish_date"
radio_fields = {"status": admin.HORIZONTAL}
fieldsets = (
(None, {
"fields": ["title", "status", ("publish_date", "expiry_date")],
}),
(_("Meta data"), {
"fields": ["_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"],
"classes": ("collapse-closed",)
}),
)
form = DisplayableAdminForm
def __init__(self, *args, **kwargs):
super(DisplayableAdmin, self).__init__(*args, **kwargs)
try:
self.search_fields = list(set(list(self.search_fields) + list(
self.model.objects.get_search_fields().keys())))
except AttributeError:
pass
class BaseDynamicInlineAdmin(object):
"""
Admin inline that uses JS to inject an "Add another" link which
when clicked, dynamically reveals another fieldset. Also handles
adding the ``_order`` field and its widget for models that
subclass ``Orderable``.
"""
form = DynamicInlineAdminForm
extra = 20
def __init__(self, *args, **kwargs):
super(BaseDynamicInlineAdmin, self).__init__(*args, **kwargs)
if issubclass(self.model, Orderable):
fields = self.fields
if not fields:
fields = self.model._meta.fields
exclude = self.exclude or []
fields = [f.name for f in fields if f.editable and
f.name not in exclude and not isinstance(f, AutoField)]
if "_order" in fields:
del fields[fields.index("_order")]
fields.append("_order")
self.fields = fields
class TabularDynamicInlineAdmin(BaseDynamicInlineAdmin, getInlineBaseClass(admin.TabularInline)):
template = "admin/includes/dynamic_inline_tabular.html"
class StackedDynamicInlineAdmin(BaseDynamicInlineAdmin, getInlineBaseClass(admin.StackedInline)):
template = "admin/includes/dynamic_inline_stacked.html"
def __init__(self, *args, **kwargs):
"""
Stacked dynamic inlines won't work without grappelli
installed, as the JavaScript in dynamic_inline.js isn't
able to target each of the inlines to set the value of
the order field.
"""
grappelli_name = getattr(settings, "PACKAGE_NAME_GRAPPELLI")
if grappelli_name not in settings.INSTALLED_APPS:
error = "StackedDynamicInlineAdmin requires Grappelli installed."
raise Exception(error)
super(StackedDynamicInlineAdmin, self).__init__(*args, **kwargs)
class OwnableAdmin(admin.ModelAdmin):
"""
Admin class for models that subclass the abstract ``Ownable``
model. Handles limiting the change list to objects owned by the
logged in user, as well as setting the owner of newly created
objects to the logged in user.
Remember that this will include the ``user`` field in the required
fields for the admin change form which may not be desirable. The
best approach to solve this is to define a ``fieldsets`` attribute
that excludes the ``user`` field or simple add ``user`` to your
admin excludes: ``exclude = ('user',)``
"""
def save_form(self, request, form, change):
"""
Set the object's owner as the logged in user.
"""
obj = form.save(commit=False)
if obj.user_id is None:
obj.user = request.user
return super(OwnableAdmin, self).save_form(request, form, change)
def queryset(self, request):
"""
Filter the change list by currently logged in user if not a
superuser. We also skip filtering if the model for this admin
class has been added to the sequence in the setting
``OWNABLE_MODELS_ALL_EDITABLE``, which contains models in the
format ``app_label.object_name``, and allows models subclassing
``Ownable`` to be excluded from filtering, eg: ownership should
not imply permission to edit.
"""
opts = self.model._meta
model_name = ("%s.%s" % (opts.app_label, opts.object_name)).lower()
models_all_editable = settings.OWNABLE_MODELS_ALL_EDITABLE
models_all_editable = [m.lower() for m in models_all_editable]
qs = super(OwnableAdmin, self).queryset(request)
if request.user.is_superuser or model_name in models_all_editable:
return qs
return qs.filter(user__id=request.user.id)
class SingletonAdmin(admin.ModelAdmin):
"""
Admin class for models that should only contain a single instance
in the database. Redirect all views to the change view when the
instance exists, and to the add view when it doesn't.
"""
def handle_save(self, request, response):
"""
Handles redirect back to the dashboard when save is clicked
(eg not save and continue editing), by checking for a redirect
response, which only occurs if the form is valid.
"""
form_valid = isinstance(response, HttpResponseRedirect)
if request.POST.get("_save") and form_valid:
return redirect("admin:index")
return response
def add_view(self, *args, **kwargs):
"""
Redirect to the change view if the singleton instance exists.
"""
try:
singleton = self.model.objects.get()
except (self.model.DoesNotExist, self.model.MultipleObjectsReturned):
kwargs.setdefault("extra_context", {})
kwargs["extra_context"]["singleton"] = True
response = super(SingletonAdmin, self).add_view(*args, **kwargs)
return self.handle_save(args[0], response)
return redirect(admin_url(self.model, "change", singleton.id))
def changelist_view(self, *args, **kwargs):
"""
Redirect to the add view if no records exist or the change
view if the singleton instance exists.
"""
try:
singleton = self.model.objects.get()
except self.model.MultipleObjectsReturned:
return super(SingletonAdmin, self).changelist_view(*args, **kwargs)
except self.model.DoesNotExist:
return redirect(admin_url(self.model, "add"))
return redirect(admin_url(self.model, "change", singleton.id))
def change_view(self, *args, **kwargs):
"""
If only the singleton instance exists, pass ``True`` for
``singleton`` into the template which will use CSS to hide
the "save and add another" button.
"""
kwargs.setdefault("extra_context", {})
kwargs["extra_context"]["singleton"] = self.model.objects.count() == 1
response = super(SingletonAdmin, self).change_view(*args, **kwargs)
return self.handle_save(args[0], response)
###########################################
# Site Permissions Inlines for User Admin #
###########################################
class SitePermissionInline(admin.TabularInline):
model = SitePermission
max_num = 1
can_delete = False
class SitePermissionUserAdmin(UserAdmin):
inlines = [SitePermissionInline]
# only register if User hasn't been overridden
if User == AuthUser:
admin.site.unregister(User)
admin.site.register(User, SitePermissionUserAdmin)
|
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: See license.txt
import frappe, json, os
from frappe.utils import strip, cint
from frappe.translate import (set_default_language, get_dict, send_translations)
from frappe.geo.country_info import get_country_info
from frappe.utils.password import update_password
from werkzeug.useragents import UserAgent
from . import install_fixtures
def get_setup_stages(args):
# App setup stage functions should not include frappe.db.commit
# That is done by frappe after successful completion of all stages
stages = [
{
'status': 'Updating global settings',
'fail_msg': 'Failed to update global settings',
'tasks': [
{
'fn': update_global_settings,
'args': args,
'fail_msg': 'Failed to update global settings'
}
]
}
]
stages += get_stages_hooks(args) + get_setup_complete_hooks(args)
stages.append({
# post executing hooks
'status': 'Wrapping up',
'fail_msg': 'Failed to complete setup',
'tasks': [
{
'fn': run_post_setup_complete,
'args': args,
'fail_msg': 'Failed to complete setup'
}
]
})
return stages
@frappe.whitelist()
def setup_complete(args):
"""Calls hooks for `setup_wizard_complete`, sets home page as `desktop`
and clears cache. If wizard breaks, calls `setup_wizard_exception` hook"""
# Setup complete: do not throw an exception, let the user continue to desk
if cint(frappe.db.get_single_value('System Settings', 'setup_complete')):
return {'status': 'ok'}
args = parse_args(args)
stages = get_setup_stages(args)
try:
frappe.flags.in_setup_wizard = True
current_task = None
for idx, stage in enumerate(stages):
frappe.publish_realtime('setup_task', {"progress": [idx, len(stages)],
"stage_status": stage.get('status')}, user=frappe.session.user)
for task in stage.get('tasks'):
current_task = task
task.get('fn')(task.get('args'))
except Exception:
handle_setup_exception(args)
return {'status': 'fail', 'fail': current_task.get('fail_msg')}
else:
run_setup_success(args)
return {'status': 'ok'}
finally:
frappe.flags.in_setup_wizard = False
def update_global_settings(args):
if args.language and args.language != "English":
set_default_language(get_language_code(args.lang))
frappe.db.commit()
frappe.clear_cache()
update_system_settings(args)
update_user_name(args)
def run_post_setup_complete(args):
disable_future_access()
frappe.db.commit()
frappe.clear_cache()
def run_setup_success(args):
for hook in frappe.get_hooks("setup_wizard_success"):
frappe.get_attr(hook)(args)
install_fixtures.install()
def get_stages_hooks(args):
stages = []
for method in frappe.get_hooks("setup_wizard_stages"):
stages += frappe.get_attr(method)(args)
return stages
def get_setup_complete_hooks(args):
stages = []
for method in frappe.get_hooks("setup_wizard_complete"):
stages.append({
'status': 'Executing method',
'fail_msg': 'Failed to execute method',
'tasks': [
{
'fn': frappe.get_attr(method),
'args': args,
'fail_msg': 'Failed to execute method'
}
]
})
return stages
def handle_setup_exception(args):
frappe.db.rollback()
if args:
traceback = frappe.get_traceback()
print(traceback)
for hook in frappe.get_hooks("setup_wizard_exception"):
frappe.get_attr(hook)(traceback, args)
def update_system_settings(args):
number_format = get_country_info(args.get("country")).get("number_format", "#,###.##")
# replace these as float number formats, as they have 0 precision
# and are currency number formats and not for floats
if number_format=="#.###":
number_format = "#.###,##"
elif number_format=="#,###":
number_format = "#,###.##"
system_settings = frappe.get_doc("System Settings", "System Settings")
system_settings.update({
"country": args.get("country"),
"language": get_language_code(args.get("language")),
"time_zone": args.get("timezone"),
"float_precision": 3,
'date_format': frappe.db.get_value("Country", args.get("country"), "date_format"),
'time_format': frappe.db.get_value("Country", args.get("country"), "time_format"),
'number_format': number_format,
'enable_scheduler': 1 if not frappe.flags.in_test else 0,
'backup_limit': 3 # Default for downloadable backups
})
system_settings.save()
def update_user_name(args):
first_name, last_name = args.get('full_name', ''), ''
if ' ' in first_name:
first_name, last_name = first_name.split(' ', 1)
if args.get("email"):
if frappe.db.exists('User', args.get('email')):
# running again
return
args['name'] = args.get("email")
_mute_emails, frappe.flags.mute_emails = frappe.flags.mute_emails, True
doc = frappe.get_doc({
"doctype":"User",
"email": args.get("email"),
"first_name": first_name,
"last_name": last_name
})
doc.flags.no_welcome_mail = True
doc.insert()
frappe.flags.mute_emails = _mute_emails
update_password(args.get("email"), args.get("password"))
elif first_name:
args.update({
"name": frappe.session.user,
"first_name": first_name,
"last_name": last_name
})
frappe.db.sql("""update `tabUser` SET first_name=%(first_name)s,
last_name=%(last_name)s WHERE name=%(name)s""", args)
if args.get("attach_user"):
attach_user = args.get("attach_user").split(",")
if len(attach_user)==3:
filename, filetype, content = attach_user
_file = frappe.get_doc({
"doctype": "File",
"file_name": filename,
"attached_to_doctype": "User",
"attached_to_name": args.get("name"),
"content": content,
"decode": True})
_file.save()
fileurl = _file.file_url
frappe.db.set_value("User", args.get("name"), "user_image", fileurl)
if args.get('name'):
add_all_roles_to(args.get("name"))
def parse_args(args):
if not args:
args = frappe.local.form_dict
if isinstance(args, str):
args = json.loads(args)
args = frappe._dict(args)
# strip the whitespace
for key, value in args.items():
if isinstance(value, str):
args[key] = strip(value)
return args
def add_all_roles_to(name):
user = frappe.get_doc("User", name)
for role in frappe.db.sql("""select name from tabRole"""):
if role[0] not in ["Administrator", "Guest", "All", "Customer", "Supplier", "Partner", "Employee"]:
d = user.append("roles")
d.role = role[0]
user.save()
def disable_future_access():
frappe.db.set_default('desktop:home_page', 'workspace')
frappe.db.set_value('System Settings', 'System Settings', 'setup_complete', 1)
frappe.db.set_value('System Settings', 'System Settings', 'is_first_startup', 1)
# Enable onboarding after install
frappe.db.set_value('System Settings', 'System Settings', 'enable_onboarding', 1)
if not frappe.flags.in_test:
# remove all roles and add 'Administrator' to prevent future access
page = frappe.get_doc('Page', 'setup-wizard')
page.roles = []
page.append('roles', {'role': 'Administrator'})
page.flags.do_not_update_json = True
page.flags.ignore_permissions = True
page.save()
@frappe.whitelist()
def load_messages(language):
"""Load translation messages for given language from all `setup_wizard_requires`
javascript files"""
frappe.clear_cache()
set_default_language(get_language_code(language))
frappe.db.commit()
m = get_dict("page", "setup-wizard")
for path in frappe.get_hooks("setup_wizard_requires"):
# common folder `assets` served from `sites/`
js_file_path = os.path.abspath(frappe.get_site_path("..", *path.strip("/").split("/")))
m.update(get_dict("jsfile", js_file_path))
m.update(get_dict("boot"))
send_translations(m)
return frappe.local.lang
@frappe.whitelist()
def load_languages():
language_codes = frappe.db.sql('select language_code, language_name from tabLanguage order by name', as_dict=True)
codes_to_names = {}
for d in language_codes:
codes_to_names[d.language_code] = d.language_name
return {
"default_language": frappe.db.get_value('Language', frappe.local.lang, 'language_name') or frappe.local.lang,
"languages": sorted(frappe.db.sql_list('select language_name from tabLanguage order by name')),
"codes_to_names": codes_to_names
}
@frappe.whitelist()
def load_country():
from frappe.sessions import get_geo_ip_country
return get_geo_ip_country(frappe.local.request_ip) if frappe.local.request_ip else None
@frappe.whitelist()
def load_user_details():
return {
"full_name": frappe.cache().hget("full_name", "signup"),
"email": frappe.cache().hget("email", "signup")
}
@frappe.whitelist()
def reset_is_first_startup():
frappe.db.set_value('System Settings', 'System Settings', 'is_first_startup', 0)
def prettify_args(args):
# remove attachments
for key, val in args.items():
if isinstance(val, str) and "data:image" in val:
filename = val.split("data:image", 1)[0].strip(", ")
size = round((len(val) * 3 / 4) / 1048576.0, 2)
args[key] = "Image Attached: '{0}' of size {1} MB".format(filename, size)
pretty_args = []
for key in sorted(args):
pretty_args.append("{} = {}".format(key, args[key]))
return pretty_args
def email_setup_wizard_exception(traceback, args):
if not frappe.local.conf.setup_wizard_exception_email:
return
pretty_args = prettify_args(args)
if frappe.local.request:
user_agent = UserAgent(frappe.local.request.headers.get('User-Agent', ''))
else:
user_agent = frappe._dict()
message = """
#### Traceback
<pre>{traceback}</pre>
---
#### Setup Wizard Arguments
<pre>{args}</pre>
---
#### Request Headers
<pre>{headers}</pre>
---
#### Basic Information
- **Site:** {site}
- **User:** {user}
- **Browser:** {user_agent.platform} {user_agent.browser} version: {user_agent.version} language: {user_agent.language}
- **Browser Languages**: `{accept_languages}`""".format(
site=frappe.local.site,
traceback=traceback,
args="\n".join(pretty_args),
user=frappe.session.user,
user_agent=user_agent,
headers=frappe.local.request.headers,
accept_languages=", ".join(frappe.local.request.accept_languages.values()))
frappe.sendmail(recipients=frappe.local.conf.setup_wizard_exception_email,
sender=frappe.session.user,
subject="Setup failed: {}".format(frappe.local.site),
message=message,
delayed=False)
def log_setup_wizard_exception(traceback, args):
with open('../logs/setup-wizard.log', 'w+') as setup_log:
setup_log.write(traceback)
setup_log.write(json.dumps(args))
def get_language_code(lang):
return frappe.db.get_value('Language', {'language_name':lang})
def enable_twofactor_all_roles():
all_role = frappe.get_doc('Role',{'role_name':'All'})
all_role.two_factor_auth = True
all_role.save(ignore_permissions=True)
def make_records(records, debug=False):
from frappe import _dict
from frappe.modules import scrub
if debug:
print("make_records: in DEBUG mode")
# LOG every success and failure
for record in records:
doctype = record.get("doctype")
condition = record.get('__condition')
if condition and not condition():
continue
doc = frappe.new_doc(doctype)
doc.update(record)
# ignore mandatory for root
parent_link_field = ("parent_" + scrub(doc.doctype))
if doc.meta.get_field(parent_link_field) and not doc.get(parent_link_field):
doc.flags.ignore_mandatory = True
try:
doc.insert(ignore_permissions=True)
except frappe.DuplicateEntryError as e:
# print("Failed to insert duplicate {0} {1}".format(doctype, doc.name))
# pass DuplicateEntryError and continue
if e.args and e.args[0]==doc.doctype and e.args[1]==doc.name:
# make sure DuplicateEntryError is for the exact same doc and not a related doc
frappe.clear_messages()
else:
raise
except Exception as e:
exception = record.get('__exception')
if exception:
config = _dict(exception)
if isinstance(e, config.exception):
config.handler()
else:
show_document_insert_error()
else:
show_document_insert_error()
def show_document_insert_error():
print("Document Insert Error")
print(frappe.get_traceback())
|
|
import csv
import json
import datetime
import math
class System:
def __init__(self, data_dir, date):
json_file_loc = data_dir + "ff_config.json"
print ("File opened: " + json_file_loc)
jf = open(json_file_loc, "r")
self.js = json.loads(jf.read())
jf.close()
print ("File closed: " + json_file_loc)
self.stops = self.readCSV("stops.txt", self.loadStops)
self.trips = self.readCSV("trips.txt", self.loadTrips)
self.readCSV("stop_times.txt", self.loadStopTimes)
# self.loadStopTimes mutates self.stops and self.trips, and
# creates self.services, self.paths, self.paths_ua, and self.segments
self.days = sortCalendar(data_dir + "calendar.txt", date)
self.ss = map(assignCategory(self.js, self.days), self.segments)
def readCSV(self, file_name, function):
"""Reads a CSV file, creates a DictReader, and runs a given function.
Args:
file_name: The str name of the file, including '.txt' (or '.csv').
function: The function to be run, which accepts a single argument
(the csv.DictReader generated from the CSV)
Returns:
The value of function(csv.DictReader(file)).
Side effects:
The side effects of 'function'.
"""
file_loc = self.data_dir + file_name
f = open(file_loc, "r")
print ("File opened: " + file_loc)
r = csv.DictReader(f)
value = function(r)
f.close()
print ("File closed: " + file_loc)
return value
def loadStops(self, reader):
"""Takes the stops from a DictReader and adds them to 'self.stops'.
Args:
reader: A csv.DictReader containing dicts of stop information.
Returns:
A dict where keys = stop_ids, values = Stop objects.
"""
return dict(
(s_dict["stop_id"], Stop(s_dict)) for s_dict in reader
)
def loadTrips(self, reader):
"""Takes the trips from a DictReader and adds them to 'self.trips'.
Args:
reader: A csv.DictReader containing dicts of trip information.
Returns:
A dict where keys = trip_ids, values = Trip objects.
"""
return dict(
(t_dict["trip_id"], Trip(t_dict)) for t_dict in reader
)
def loadStopTimes(self, reader):
# TODO: Clean up this function
for st_dict in reader:
st_dict["departure_time"] = fixTime(st_dict["departure_time"])
st_dict["trip_obj"] = self.trips[st_dict["trip_id"]]
st_obj = StopTime(st_dict)
self.trips[st_dict["trip_id"]].addStopTime(st_obj)
self.stops[st_dict["stop_id"]].addStopTime(st_obj)
print ("Trips aggregated.")
self.services = set(t.getStopSeq() for t in self.trips.values())
# Takes self.trips and returns a set of stop sequence tuples
print ("Services aggregated.")
self.paths = {} # Keys = path tuples (stop0, stop1), values = dicts
for serv in self.services:
for s in range(len(serv)-1):
path = (serv[s], serv[s+1])
if path not in self.paths:
self.paths[path] = {}
# Keys = services (stop sequence tuples),
# values = number of times that service travels this path
if serv not in self.paths[path]:
self.paths[path][serv] = 1
else:
self.paths[path][serv] += 1
# This service travels this path multiple times on a trip
print ("Paths compiled.")
self.segments = [] # List of segments
self.paths_ua = set(self.paths)
# paths_ua = set of the paths that haven't been assigned to a Segment
for serv in self.services:
current_seg = None
path_services = None
for s in range(len(serv)-1):
path = (serv[s], serv[s+1])
stop_obj = self.stops[serv[s]]
if path not in self.paths_ua: # Path has already been assigned
if current_seg:
current_seg.setLastStop(stop_obj)
self.segments.append(current_seg)
current_seg = None
path_services = None
elif self.paths[path] == path_services: # Continue Segment
current_seg.addStop(stop_obj)
self.paths_ua.remove(path)
else: # Path has a different set of services
# End current Segment:
if current_seg:
current_seg.setLastStop(stop_obj)
self.segments.append(current_seg)
# Start new Segment:
path_services = self.paths[path]
current_seg = Segment(stop_obj, path_services)
self.paths_ua.remove(path)
if len(self.paths_ua) > 0:
raise Exception("Not all paths have been assigned to a Segment.")
print ("Segments compiled.")
return
def saveGeoJSON(self, new_file_name):
print ("Generating GeoJSON export.")
geoj = {
"type": "FeatureCollection",
"features": [s.getJSON() for s in self.ss]
}
print ("Saving file: " + new_file_name + " ...")
nf = open(new_file_name, "w")
json.dump(geoj, nf, indent=4, sort_keys=True)
nf.close()
print ("Saved file: " + new_file_name)
return
class StopTime:
def __init__(self, stop_time_dict):
self.departure_time = stop_time_dict["departure_time"]
self.trip_id = stop_time_dict["trip_id"]
self.trip_obj = stop_time_dict["trip_obj"]
self.stop_id = stop_time_dict["stop_id"]
self.service_id = self.trip_obj.getServID()
self.service = self.trip_obj.getStopSeq()
def getTime(self):
return self.departure_time
def getServID(self):
return self.service_id
def getTripID(self):
return self.trip_id
def getTripObj(self):
return self.trip_obj
def getStopID(self):
return self.stop_id
def getService(self):
return self.service
class Trip:
def __init__(self, trip_dict):
self.trip_id = trip_dict["trip_id"]
self.service_id = trip_dict["service_id"]
self.shape_id = trip_dict["shape_id"]
self.route_id = trip_dict["route_id"]
self.stop_times = []
def addStopTime(self, stop_time):
self.stop_times.append(stop_time)
return self
def getTripID(self):
return self.trip_id
def getServID(self):
return self.service_id
def getShapeID(self):
return self.shape_id
def getRouteID(self):
return self.route_id
def getStopTimes(self):
return self.stop_times
def getStopSeq(self):
return tuple(x.getStopID() for x in self.stop_times)
def getService(self):
return self.service
class Service:
def __init__(self):
self.stop_seq = None
self.trips = None
def getStopSeq(self):
return self.stop_seq
def getTrips(self):
return self.trips
class Route:
pass
class Stop:
def __init__(self, stop_dict):
self.stop_id = str(stop_dict["stop_id"])
self.stop_lon = float(stop_dict["stop_lon"])
self.stop_lat = float(stop_dict["stop_lat"])
self.stop_name = stop_dict["stop_name"]
self.stop_desc = stop_dict["stop_desc"]
self.trip_times = {} # keys = time points, values = Trips
self.stop_times = [] # list of StopTime objects
def __repr__(self):
return ("Stop " + self.stop_id)
def addStopTime(self, stop_time):
self.stop_times.append(stop_time)
return self
def getStopTimes(self, filter_func=None):
if filter_func:
return filter(filter_func, self.stop_times)
else:
return self.stop_times
def getStopID(self):
return self.stop_id
def getLonLat(self):
return [self.stop_lon, self.stop_lat]
class Segment:
def __init__(self, init_stop, services):
# Note: All stops are Stop objects, not stop_ids
self.init_stop = init_stop
self.last_stop = None
self.stops = [init_stop]
self.services = services
self.category = None
def setCategory(self, category):
self.category = category
return self
def addStop(self, stop):
self.stops.append(stop)
return self
def setLastStop(self, last_stop):
self.addStop(last_stop)
self.last_stop = last_stop
return self
def getInitStop(self):
return self.init_stop
def getLastStop(self):
return self.last_stop
def getStops(self):
return self.stops
def getServices(self):
return self.services
def getJSON(self):
return {
"type": "Feature",
"geometry": {
"type": "LineString",
"coordinates": [s.getLonLat() for s in self.stops]
},
"properties": {
"category": self.category,
"init_stop": self.init_stop.getStopID(),
"last_stop": self.last_stop.getStopID()
}
}
def assignCategory(js, days):
"""Returns a function that assigns a frequency category to a Segment."""
def assignCategoryFunc(segment):
# Not really necessary, but here for safety:
if not isinstance(segment, Segment):
raise Exception("Argument 'segment' must be a Segment.")
return segment.setCategory(findCategory(segment, js, days))
return assignCategoryFunc
def findCategory(segment, js, days):
"""Finds the appropriate frequency pattern for a given Segment.
Proceeds through the JSON contents in order, so if the JSON config file
(ff-config.json) contains multiple frequency categories, they should be
in order of decreasing frequency, as this function will return the first
category whose standards are met for that segment.
"""
for pattern in js:
if checkPattern(segment, pattern, days):
return pattern["name"]
return None
def checkPattern(segment, pattern, days):
def checkStop(stop):
def checkRule(r):
def checkCombo(c):
# Get stop times for this day and time range:
times = stop.getStopTimes(
lambda st: (bool(st.getServID() in c)) &
# (st.getService() in segment.getServices()) &
(r[u"start_time"] < st.getTime() < r[u"end_time"])
)
times = sorted(times, key=lambda st: st.getTime())
times = [convertTime(st.getTime()) for st in times]
# Create fake stop times to represent the start and end times
times.insert(0, start)
times.append(end)
# Check if there are enough total trips in the day:
if len(times) < min_trips-3: # -3 in case of weird edge cases
return False
# Go through all stop times and check headways:
errors = 0
for i in range(len(times)-1):
if (times[i+1] - times[i]) > headway:
if (times[i+1] - times[i]) <= (headway + error_mins):
errors += 1
else:
return False
if errors > error_times:
return False
return True
# Get unique combinations of service_ids for the days in this rule:
combos = set(tuple(days[x.encode("ascii")]) for x in r[u"days"])
# Calculate allowable error:
# (Note: expects start and end times as strings in "HHMM" format)
start = datetime.timedelta(
hours=int(r["start_time"].encode("ascii")[0:2]),
minutes=int(r["start_time"].encode("ascii")[2:4])
)
end = datetime.timedelta(
hours=int(r["end_time"].encode("ascii")[0:2]),
minutes=int(r["end_time"].encode("ascii")[2:4])
)
duration = end - start
headway = datetime.timedelta(minutes=pattern[u"headway"])
min_trips = int(duration.total_seconds()/headway.total_seconds())
error_mins = datetime.timedelta(minutes=pattern[u"error_mins"])
error_times = int(math.ceil(pattern[u"error_pct"]*0.01*min_trips))
for c in combos:
if not checkCombo(c):
return False
return True
for rule in pattern[u"rules"]:
if not checkRule(rule):
return False
return True
# Check the init stop across all days before checking the last stop,
# because it's unlikely the two will have different results for checkStop,
# so we might as well try to return a False value as soon as possible.
if not checkStop(segment.getInitStop()):
return False
elif not checkStop(segment.getLastStop()):
return False
return True
def sortCalendar(cal_file_loc, date):
"""Takes the calendar file (calendar.txt) and matches service patterns
with days of the week.
Args:
cal_file_loc: String location of the calendar file
date: Int/string date (YYYYMMDD) to be used to find the applicable
service period
Returns:
A dict where keys = days, values = sets of effective service IDs
"""
cf = open(cal_file_loc, "r")
print ("File opened: " + cal_file_loc)
cr = csv.DictReader(cf)
def inDateRange(x):
return (int(x["start_date"]) < int(date) < int(x["end_date"]))
cl = filter(inDateRange, list(cr))
days = {
"monday": set(),
"tuesday": set(),
"wednesday": set(),
"thursday": set(),
"friday": set(),
"saturday": set(),
"sunday": set()
}
for serv in cl:
for i in serv:
if i[-3:] == "day": # if key is a day of the week
if serv[i] == "1": # if this service_id applies on this day
days[i].add(serv["service_id"])
cf.close()
print ("File closed: " + cal_file_loc)
return days
def fixTime(time):
"""Takes a str time and converts the hour to a two-digit value if needed.
Needed because GTFS allows AM (morning) times to have a one- or two-digit
hour, but sorting string times requires two-digit hours to work properly.
"""
if time.find(":") == 2:
return time
elif time.find(":") == 1:
return "0" + time
else:
raise Exception("Time must begin with a one- or two-digit hour.")
def convertTime(time):
"""Converts a str time ("HH:MM:SS") to a datetime.timedelta object."""
h, m, s = time.split(":")
return datetime.timedelta(hours=int(h), minutes=int(m), seconds=int(s))
if __name__ == "__main__":
system = System("data/spokane/", 20150808)
system.saveGeoJSON("data/spokane/frequency.geojson")
|
|
#!/usr/bin/env python
# This document is part of Acronym
# https://github.com/geowurster/Acronym
# =================================================================================== #
#
# New BSD License
#
# Copyright (c) 2014, Kevin D. Wurster
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * The names of its contributors may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# =================================================================================== #
"""Buffer geometries"""
from __future__ import unicode_literals
import collections
import inspect
import os
from os.path import *
from ..components import *
from ... import settings
try:
from osgeo import ogr
from osgeo import osr
except ImportError:
import ogr
import osr
ogr.UseExceptions()
osr.UseExceptions()
#/* ======================================================================= */#
#/* Document level information
#/* ======================================================================= */#
__all__ = ['print_help', 'main']
SUBCOMMAND_NAME = basename(inspect.getfile(inspect.currentframe())).rsplit('.')[0].replace('subcommand_', '')
#/* ======================================================================= */#
#/* Define print_help() function
#/* ======================================================================= */#
def print_help():
"""
Detailed help information
Returns
-------
1 for exit code purposes
"""
global SUBCOMMAND_NAME
# TODO: Populate help
vprint("""
Help: {0}
------{1}
{2}
""".format(SUBCOMMAND_NAME, '-' * len(SUBCOMMAND_NAME), main.__doc__))
#/* ======================================================================= */#
#/* Define print_help() function
#/* ======================================================================= */#
def print_usage():
"""
Commandline usage information
Returns
-------
1 for exit code purposes
"""
global SUBCOMMAND_NAME
vprint("""
{0} [-of driver ][-overwrite] [-update] [-dsco NAME=VAL[,N=V]] [-lco NAME=VAL[,N=V]]
{1} -i infile layer [layer ...] -o outfile newlayer [newlayer ...]
""".format(SUBCOMMAND_NAME, ' ' * len(SUBCOMMAND_NAME)))
return 1
#/* ======================================================================= */#
#/* Define print_long_usage() function
#/* ======================================================================= */#
def print_long_usage():
"""
Detailed commandline usage
Returns
-------
1 for exit code purposes
"""
print_usage()
vprint("""Options:
-of -output-format OGR supported driver
[default: {0}]
-i -input Input datasource
-o -output Output datasource
layer Specify specific input layers to be processed. By
default all layers are processed
newlayer Rename an input layer or specify a target output
layer for use with append. If any input layers
are renamed, all output layers must be specified
-overwrite Overwrite entire output datasource
-update Overwrite output layer or create if it doesn't
already exist
-dsco -ds-creation-options Datasource creation options for output driver
Ignored if using -append
-lco -lyr-creation-options Layer creation options for output driver
Ignored if using -append
""".format(settings.DEFAULT_VECTOR_DRIVER))
return 1
#/* ======================================================================= */#
#/* Define main() function
#/* ======================================================================= */#
def main(args):
"""
Buffer vector geometries
"""
#/* ----------------------------------------------------------------------- */#
#/* Print usage
#/* ----------------------------------------------------------------------- */#
if len(args) is 0:
return print_usage()
#/* ----------------------------------------------------------------------- */#
#/* Defaults
#/* ----------------------------------------------------------------------- */#
output_driver_name = settings.DEFAULT_VECTOR_DRIVER
output_lco = []
output_dsco = []
overwrite_mode = False
update_mode = False
append_mode = False
#/* ----------------------------------------------------------------------- */#
#/* Containers
#/* ----------------------------------------------------------------------- */#
layers_to_copy = []
new_layer_names = []
input_datasource = None
output_datasource = None
#/* ----------------------------------------------------------------------- */#
#/* Parse arguments
#/* ----------------------------------------------------------------------- */#
i = 0
arg = None
arg_error = False
while i < len(args):
try:
arg = args[i]
# Help arguments
if arg in ('--help-info', '-help-info', '--helpinfo', '-help-info', '-h', '--h'):
return print_help_info()
elif arg in ('--help', '-help', '-h'):
return print_help()
elif arg in ('--usage', '-usage', '-u'):
return print_usage()
elif arg in ('--long-usage', '-long-usage', '-lu'):
return print_long_usage()
elif arg in ('--version', '-version'):
return print_version()
elif arg in ('--short-version', '-short-version', '-sv'):
return print_short_version()
elif arg in ('--license', '-license'):
return print_license()
# I/O
elif arg in ('-i', '-input', '--input'):
i += 2
input_datasource = args[i - 1]
while i < len(args) and args[i][0] != '-':
layers_to_copy += args[i].split(',')
i += 1
elif arg in ('-o', '-output', '--output'):
i += 2
output_datasource = args[i - 1]
while i < len(args) and args[i][0] != '-':
new_layer_names += args[i].split(',')
i += 1
elif arg in ('-overwrite', '--overwrite'):
i += 1
overwrite_mode = True
elif arg in ('-update', '--update'):
i += 1
update_mode = True
# OGR Options
elif arg in ('-of', '-output-format', '--output-format'):
i += 2
output_driver_name = args[i - 1]
elif arg in ('-lco', '-lyr-creation-options', '--lyr-creation-options',
'-layer-creation-options', '--layer-creation-options'):
i += 1
while i < len(args) and args[i][0] != '-':
output_lco += args[i].split(',')
i += 1
elif arg in ('-dsco', '-ds-creation-options', '--ds-creation-options',
'-datasource-creation-options', '--datasource-creation-options'):
i += 1
while i < len(args) and args[i][0] != '-':
output_dsco += args[i].split(',')
i += 1
# Unrecognized arguments
else:
i += 1
arg_error = True
vprint("ERROR: Unrecognized argument: %s" % arg)
# This catches several conditions:
# 1. The last argument is a flag that requires parameters but the user did not supply the parameter
# 2. The arg parser did not properly consume all parameters for an argument
# 3. The arg parser did not properly iterate the 'i' variable
# 4. An argument split on '=' doesn't have anything after '=' - e.g. '--output-file='
except (IndexError, ValueError):
i += 1
arg_error = True
vprint("ERROR: An argument has invalid parameters: %s" % arg)
#/* ----------------------------------------------------------------------- */#
#/* Transform parameters
#/* ----------------------------------------------------------------------- */#
if '~' in output_datasource or os.sep in output_datasource:
output_datasource = abspath(expanduser(output_datasource))
if '~' in input_datasource or os.sep in input_datasource:
input_datasource = abspath(expanduser(input_datasource))
#/* ----------------------------------------------------------------------- */#
#/* Validate parameters
#/* ----------------------------------------------------------------------- */#
bail = False
# Check arguments
if arg_error:
bail = True
vprint("ERROR: Did not successfully parse arguments")
# Check input datasource
try:
_input_ds = ogr.Open(input_datasource)
except RuntimeError:
_input_ds = None
if not input_datasource:
bail = True
vprint("ERROR: Need an input datasource")
elif _input_ds is None:
bail = True
vprint("ERROR: Can't open input datasource: %s" % input_datasource)
else:
# Check layers to process
# Define layers to process if none were specified
input_layer_names = []
for _layer_idx in range(_input_ds.GetLayerCount()):
_layer = _input_ds.GetLayer(_layer_idx)
input_layer_names.append(_layer.GetName())
if not layers_to_copy:
layers_to_copy = input_layer_names
else:
for layer_name in layers_to_copy:
if layer_name not in input_layer_names:
bail = True
vprint("ERROR: Layer to process does not exist in input datasource: %s" % layer_name)
# Check input and output layers
# This check only runs if the input datasource checks get far enough to compare the layers to process
# If no new layer names are specified, new layers will be named the same as the input
if not new_layer_names:
new_layer_names = layers_to_copy
if len(layers_to_copy) is not len(new_layer_names):
bail = True
vprint("ERROR: Number of new layer names does not equal the number of layers to process")
vprint(" Layers to process: %s" % ', '.join(layers_to_copy))
vprint(" New layer names: %s" % ', '.join(new_layer_names))
# Check output datasource
if not output_datasource:
bail = True
vprint("ERROR: Need an output datasource")
# Check write modes
_count = collections.Counter([overwrite_mode, update_mode, append_mode])
if True in _count and _count[True] > 1:
bail = True
vprint("ERROR: Cannot combine update, and append")
# Exit if something did not pass validation
if bail:
return 1
#/* ----------------------------------------------------------------------- */#
#/* Prep OGR objects
#/* ----------------------------------------------------------------------- */#
vprint("Prepping data ...")
# OGR only accepts strings, not unicode
output_driver_name = str(output_driver_name)
input_datasource = str(input_datasource)
output_datasource = str(output_datasource)
layers_to_copy = [str(i) for i in layers_to_copy]
new_layer_names = [str(i) for i in new_layer_names]
# Prep input objects
input_ds = ogr.Open(input_datasource)
# Overwrite an existing datasource
if overwrite_mode:
output_drv = ogr.GetDriverByName(output_driver_name)
try:
output_drv.DeleteDataSource()
except RuntimeError:
pass
output_ds = output_drv.CreateDataSource(output_datasource, options=output_dsco)
# Update an existing datasource
elif update_mode:
output_ds = ogr.Open(output_datasource, 1)
if not output_ds:
vprint("ERROR: Trying to update or append to a datasource that does not exist: %s" % output_datasource)
return 1
# Creating a new datasource
else:
output_drv = ogr.GetDriverByName(output_driver_name)
try:
output_ds = output_drv.CreateDataSource(output_datasource, options=output_dsco)
except RuntimeError as e:
vprint("ERROR: Can't create datasource - try appending or updating: %s" % output_datasource)
vprint(" %s" % e)
output_drv = None
return 1
# Make sure that output layers exist when appending
output_ds_layers = [output_ds.GetLayer(l_idx).GetName() for l_idx in range(output_ds.GetLayerCount())]
if append_mode:
for nln in new_layer_names:
if nln not in output_ds_layers:
bail = True
vprint("ERROR: Trying to append to a layer that doesn't exist: %s" % nln)
if bail:
return 1
#/* ----------------------------------------------------------------------- */#
#/* Process data
#/* ----------------------------------------------------------------------- */#
for layer_idx in range(input_ds.GetLayerCount()):
input_layer = input_ds.GetLayer(layer_idx)
# Updating layer - delete existing
if update_mode:
try:
output_ds.DeleteLayer(new_layer_names[layer_idx])
except (RuntimeError, AttributeError):
# Output layer doesn't exist - create it
pass
# Add the layer
output_ds.CopyLayer(input_layer, new_layer_names[layer_idx], options=output_lco)
vprint("Copied %s -> %s" % (input_layer.GetName(), new_layer_names[layer_idx]))
#/* ----------------------------------------------------------------------- */#
#/* Cleanup and return
#/* ----------------------------------------------------------------------- */#
input_layer = None
output_ds = None
input_ds = None
output_drv = None
vprint("Done")
return 0
|
|
# Copyright 2009-2010 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Master-Slave connection to Mongo.
Performs all writes to Master instance and distributes reads among all
slaves. Reads are tried on each slave in turn until the read succeeds
or all slaves failed.
"""
from pymongo import helpers
from pymongo import ReadPreference
from pymongo.common import BaseObject
from pymongo.connection import Connection
from pymongo.database import Database
from pymongo.errors import AutoReconnect
class MasterSlaveConnection(BaseObject):
"""A master-slave connection to Mongo.
"""
def __init__(self, master, slaves=[], document_class=dict, tz_aware=False):
"""Create a new Master-Slave connection.
The resultant connection should be interacted with using the same
mechanisms as a regular `Connection`. The `Connection` instances used
to create this `MasterSlaveConnection` can themselves make use of
connection pooling, etc. 'Connection' instances used as slaves should
be created with the read_preference option set to
:attr:`~pymongo.ReadPreference.SECONDARY`. Safe options are
inherited from `master` and can be changed in this instance.
Raises TypeError if `master` is not an instance of `Connection` or
slaves is not a list of at least one `Connection` instances.
:Parameters:
- `master`: `Connection` instance for the writable Master
- `slaves` (optional): list of `Connection` instances for the
read-only slaves
- `document_class` (optional): default class to use for
documents returned from queries on this connection
- `tz_aware` (optional): if ``True``,
:class:`~datetime.datetime` instances returned as values
in a document by this :class:`MasterSlaveConnection` will be timezone
aware (otherwise they will be naive)
"""
if not isinstance(master, Connection):
raise TypeError("master must be a Connection instance")
if not isinstance(slaves, list) or len(slaves) == 0:
raise TypeError("slaves must be a list of length >= 1")
for slave in slaves:
if not isinstance(slave, Connection):
raise TypeError("slave %r is not an instance of Connection" %
slave)
super(MasterSlaveConnection,
self).__init__(read_preference=ReadPreference.SECONDARY,
safe=master.safe,
**(master.get_lasterror_options()))
self.__in_request = False
self.__master = master
self.__slaves = slaves
self.__document_class = document_class
self.__tz_aware = tz_aware
@property
def master(self):
return self.__master
@property
def slaves(self):
return self.__slaves
def get_document_class(self):
return self.__document_class
def set_document_class(self, klass):
self.__document_class = klass
document_class = property(get_document_class, set_document_class,
doc="""Default class to use for documents
returned on this connection.""")
@property
def tz_aware(self):
return self.__tz_aware
def disconnect(self):
"""Disconnect from MongoDB.
Disconnecting will call disconnect on all master and slave
connections.
.. seealso:: Module :mod:`~pymongo.connection`
.. versionadded:: 1.10.1
"""
self.__master.disconnect()
for slave in self.__slaves:
slave.disconnect()
def set_cursor_manager(self, manager_class):
"""Set the cursor manager for this connection.
Helper to set cursor manager for each individual `Connection` instance
that make up this `MasterSlaveConnection`.
"""
self.__master.set_cursor_manager(manager_class)
for slave in self.__slaves:
slave.set_cursor_manager(manager_class)
# _connection_to_use is a hack that we need to include to make sure
# that killcursor operations can be sent to the same instance on which
# the cursor actually resides...
def _send_message(self, message, safe=False, _connection_to_use=None):
"""Say something to Mongo.
Sends a message on the Master connection. This is used for inserts,
updates, and deletes.
Raises ConnectionFailure if the message cannot be sent. Returns the
request id of the sent message.
:Parameters:
- `operation`: opcode of the message
- `data`: data to send
- `safe`: perform a getLastError after sending the message
"""
if _connection_to_use is None or _connection_to_use == -1:
return self.__master._send_message(message, safe)
return self.__slaves[_connection_to_use]._send_message(message, safe)
# _connection_to_use is a hack that we need to include to make sure
# that getmore operations can be sent to the same instance on which
# the cursor actually resides...
def _send_message_with_response(self, message, _connection_to_use=None,
_must_use_master=False, **kwargs):
"""Receive a message from Mongo.
Sends the given message and returns a (connection_id, response) pair.
:Parameters:
- `operation`: opcode of the message to send
- `data`: data to send
"""
if _connection_to_use is not None:
if _connection_to_use == -1:
return (-1,
self.__master._send_message_with_response(message,
**kwargs))
else:
return (_connection_to_use,
self.__slaves[_connection_to_use]
._send_message_with_response(message, **kwargs))
# _must_use_master is set for commands, which must be sent to the
# master instance. any queries in a request must be sent to the
# master since that is where writes go.
if _must_use_master or self.__in_request:
return (-1, self.__master._send_message_with_response(message,
**kwargs))
# Iterate through the slaves randomly until we have success. Raise
# reconnect if they all fail.
for connection_id in helpers.shuffled(xrange(len(self.__slaves))):
try:
slave = self.__slaves[connection_id]
return (connection_id,
slave._send_message_with_response(message, **kwargs))
except AutoReconnect:
pass
raise AutoReconnect("failed to connect to slaves")
def start_request(self):
"""Start a "request".
Start a sequence of operations in which order matters. Note
that all operations performed within a request will be sent
using the Master connection.
"""
self.__in_request = True
def end_request(self):
"""End the current "request".
See documentation for `Connection.end_request`.
"""
self.__in_request = False
self.__master.end_request()
def __cmp__(self, other):
if isinstance(other, MasterSlaveConnection):
return cmp((self.__master, self.__slaves),
(other.__master, other.__slaves))
return NotImplemented
def __repr__(self):
return "MasterSlaveConnection(%r, %r)" % (self.__master, self.__slaves)
def __getattr__(self, name):
"""Get a database by name.
Raises InvalidName if an invalid database name is used.
:Parameters:
- `name`: the name of the database to get
"""
return Database(self, name)
def __getitem__(self, name):
"""Get a database by name.
Raises InvalidName if an invalid database name is used.
:Parameters:
- `name`: the name of the database to get
"""
return self.__getattr__(name)
def close_cursor(self, cursor_id, connection_id):
"""Close a single database cursor.
Raises TypeError if cursor_id is not an instance of (int, long). What
closing the cursor actually means depends on this connection's cursor
manager.
:Parameters:
- `cursor_id`: cursor id to close
- `connection_id`: id of the `Connection` instance where the cursor
was opened
"""
if connection_id == -1:
return self.__master.close_cursor(cursor_id)
return self.__slaves[connection_id].close_cursor(cursor_id)
def database_names(self):
"""Get a list of all database names.
"""
return self.__master.database_names()
def drop_database(self, name_or_database):
"""Drop a database.
:Parameters:
- `name_or_database`: the name of a database to drop or the object
itself
"""
return self.__master.drop_database(name_or_database)
def __iter__(self):
return self
def next(self):
raise TypeError("'MasterSlaveConnection' object is not iterable")
def _cached(self, database_name, collection_name, index_name):
return self.__master._cached(database_name,
collection_name, index_name)
def _cache_index(self, database_name, collection_name, index_name, ttl):
return self.__master._cache_index(database_name, collection_name,
index_name, ttl)
def _purge_index(self, database_name,
collection_name=None, index_name=None):
return self.__master._purge_index(database_name,
collection_name,
index_name)
|
|
"""
Benchmark different solver of the same CSC univariate or multivariate problem.
This script needs the following packages:
pip install pandas pyfftw
pip install alphacsc/other/sporco
- Use bench_methods_run.py to run the benchmark.
The results are saved in alphacsc/figures.
- Use bench_methods_plot.py to plot the results.
The figures are saved in alphacsc/figures.
"""
from __future__ import print_function
import os
import time
import itertools
import numpy as np
import pandas as pd
import scipy.sparse as sp
from joblib import Parallel, delayed
import alphacsc.other.heide_csc as CSC
from sporco.admm.cbpdndl import ConvBPDNDictLearn
from alphacsc.update_d import update_d_block
from alphacsc.learn_d_z import learn_d_z
from alphacsc.learn_d_z_multi import learn_d_z_multi
from alphacsc.datasets.mne_data import load_data
from alphacsc.init_dict import init_dictionary
from alphacsc.utils.dictionary import get_uv
START = time.time()
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(30, 38)
##############################
# Parameters of the simulation
##############################
verbose = 1
# base string for the save names.
base_name = 'run_0'
# n_jobs for the parallel running of single core methods
n_jobs = 1
# number of random states
n_states = 1
# loop over parameters
n_times_atom_list = [32]
n_atoms_list = [2]
n_channel_list = [1]
reg_list = [10.]
######################################
# Functions compared in the benchmark
######################################
def run_admm(X, ds_init, reg, n_iter, random_state, label, max_it_d=10,
max_it_z=10):
# admm with the following differences
# - positivity constraints
# - different init
# - d step and z step are swapped
tol = np.float64(1e-3)
size_kernel = ds_init.shape
assert size_kernel[1] % 2 == 1
[d, z, Dz, list_obj_val, times_admm] = CSC.learn_conv_sparse_coder(
X, size_kernel, max_it=n_iter, tol=tol, random_state=random_state,
lambda_prior=reg, ds_init=ds_init, verbose=verbose, max_it_d=max_it_d,
max_it_z=max_it_z)
# z.shape = (n_trials, n_atoms, n_times + 2 * n_times_atom)
z = z[:, :, 2 * n_times_atom:-2 * n_times_atom]
z = z.swapaxes(0, 1)
# z.shape = (n_atoms, n_trials, n_times - 2 * n_times_atom)
return list_obj_val, np.cumsum(times_admm)[::2], d, z
def run_cbpdn(X, ds_init, reg, n_iter, random_state, label):
# Use only one thread in fft for fair comparison
import sporco.linalg
sporco.linalg.pyfftw_threads = 1
if X.ndim == 2: # univariate CSC
ds_init = np.swapaxes(ds_init, 0, 1)[:, None, :]
X = np.swapaxes(X, 0, 1)[:, None, :]
single_channel = True
else: # multivariate CSC
ds_init = np.swapaxes(ds_init, 0, 2)
X = np.swapaxes(X, 0, 2)
single_channel = False
options = {
'Verbose': verbose > 0,
'StatusHeader': False,
'MaxMainIter': n_iter,
'CBPDN': dict(NonNegCoef=True),
'CCMOD': dict(ZeroMean=False),
'DictSize': ds_init.shape,
}
# wolberg / convolutional basis pursuit
opt = ConvBPDNDictLearn.Options(options)
cbpdn = ConvBPDNDictLearn(ds_init, X, reg, opt, dimN=1)
results = cbpdn.solve()
times = np.cumsum(cbpdn.getitstat().Time)
d_hat, pobj = results
if single_channel: # univariate CSC
d_hat = d_hat.squeeze().T
n_atoms, n_times_atom = d_hat.shape
else:
d_hat = d_hat.squeeze().swapaxes(0, 2)
n_atoms, n_channels, n_times_atom = d_hat.shape
z_hat = cbpdn.getcoef().squeeze().swapaxes(0, 2)
times = np.concatenate([[0], times])
# z_hat.shape = (n_atoms, n_trials, n_times)
z_hat = z_hat[:, :, :-n_times_atom + 1]
# z_hat.shape = (n_atoms, n_trials, n_times_valid)
return pobj, times, d_hat, z_hat
def run_fista(X, ds_init, reg, n_iter, random_state, label):
assert X.ndim == 2
n_atoms, n_times_atom = ds_init.shape
pobj, times, d_hat, z_hat = learn_d_z(
X, n_atoms, n_times_atom, func_d=update_d_block, solver_z='fista',
solver_z_kwargs=dict(max_iter=2), reg=reg, n_iter=n_iter,
random_state=random_state, ds_init=ds_init, n_jobs=1, verbose=verbose)
return pobj[::2], np.cumsum(times)[::2], d_hat, z_hat
def run_l_bfgs(X, ds_init, reg, n_iter, random_state, label, factr_d=1e7,
factr_z=1e14):
assert X.ndim == 2
n_atoms, n_times_atom = ds_init.shape
pobj, times, d_hat, z_hat = learn_d_z(
X, n_atoms, n_times_atom,
func_d=update_d_block, solver_z='l-bfgs', solver_z_kwargs=dict(
factr=factr_z), reg=reg, n_iter=n_iter, solver_d_kwargs=dict(
factr=factr_d), random_state=random_state, ds_init=ds_init,
n_jobs=1, verbose=verbose)
return pobj[::2], np.cumsum(times)[::2], d_hat, z_hat
def run_multichannel_gcd(X, ds_init, reg, n_iter, random_state, label):
if X.ndim == 2:
n_atoms, n_times_atom = ds_init.shape
ds_init = np.c_[np.ones((n_atoms, 1)), ds_init]
X = X[:, None, :]
else:
n_atoms, n_channels, n_times_atom = ds_init.shape
ds_init = get_uv(ds_init) # project init to rank 1
solver_z_kwargs = dict(max_iter=2, tol=1e-3)
pobj, times, d_hat, z_hat, reg = learn_d_z_multi(
X, n_atoms, n_times_atom, solver_d='alternate_adaptive',
solver_z="lgcd", uv_constraint='separate', eps=-np.inf,
solver_z_kwargs=solver_z_kwargs, reg=reg, solver_d_kwargs=dict(
max_iter=100), n_iter=n_iter, random_state=random_state,
raise_on_increase=False, D_init=ds_init, n_jobs=1, verbose=verbose)
# remove the ds init duration
times[0] = 0
return pobj[::2], np.cumsum(times)[::2], d_hat, z_hat
def run_multichannel_gcd_fullrank(X, ds_init, reg, n_iter, random_state,
label):
assert X.ndim == 3
n_atoms, n_channels, n_times_atom = ds_init.shape
solver_z_kwargs = dict(max_iter=2, tol=1e-3)
pobj, times, d_hat, z_hat, reg = learn_d_z_multi(
X, n_atoms, n_times_atom, solver_d='fista', solver_z="lgcd",
uv_constraint='separate', eps=-np.inf, solver_z_kwargs=solver_z_kwargs,
reg=reg, solver_d_kwargs=dict(max_iter=100), n_iter=n_iter,
random_state=random_state, raise_on_increase=False, D_init=ds_init,
n_jobs=1, verbose=verbose, rank1=False)
# remove the ds init duration
times[0] = 0
return pobj[::2], np.cumsum(times)[::2], d_hat, z_hat
def colorify(message, color=BLUE):
"""Change color of the standard output"""
return ("\033[1;%dm" % color) + message + "\033[0m"
#########################################
# List of functions used in the benchmark
#########################################
n_iter = 100
methods_univariate = [
[run_cbpdn, 'Garcia-Cardona et al (2017)', n_iter * 2],
[run_fista, 'Jas et al (2017) FISTA', n_iter],
[run_l_bfgs, 'Jas et al (2017) LBFGS', n_iter],
[run_multichannel_gcd, 'Proposed (univariate)', n_iter],
]
n_iter_multi = 20
methods_multivariate = [
[run_cbpdn, 'Wohlberg (2016)', n_iter_multi * 2],
[run_multichannel_gcd_fullrank, 'Proposed (multivariate)', n_iter_multi],
[run_multichannel_gcd, 'Proposed (multichannel)', n_iter_multi],
]
###################################
# Calling function of the benchmark
###################################
def one_run(X, X_shape, random_state, method, n_atoms, n_times_atom, reg):
assert X.shape == X_shape
func, label, n_iter = method
current_time = time.time() - START
msg = ('%s - %s: started at T=%.0f sec' % (random_state, label,
current_time))
print(colorify(msg, BLUE))
if len(X_shape) == 2:
n_trials, n_times = X.shape
n_channels = 1
X_init = X[:, None, :]
else:
n_trials, n_channels, n_times = X.shape
X_init = X
# use the same init for all methods
ds_init = init_dictionary(X_init, n_atoms, n_times_atom, D_init='chunk',
rank1=False, uv_constraint='separate',
D_init_params=dict(), random_state=random_state)
if len(X_shape) == 2:
ds_init = ds_init[:, 0, :]
# run the selected algorithm with one iter to remove compilation overhead
_, _, _, _ = func(X, ds_init, reg, 1, random_state, label)
# run the selected algorithm
pobj, times, d_hat, z_hat = func(X, ds_init, reg, n_iter, random_state,
label)
# store z_hat in a sparse matrix to reduce size
for z in z_hat:
z[z < 1e-3] = 0
z_hat = [sp.csr_matrix(z) for z in z_hat]
duration = time.time() - START - current_time
current_time = time.time() - START
msg = ('%s - %s: done in %.0f sec at T=%.0f sec' %
(random_state, label, duration, current_time))
print(colorify(msg, GREEN))
return (random_state, label, np.asarray(pobj), np.asarray(times),
np.asarray(d_hat), np.asarray(z_hat), n_atoms, n_times_atom,
n_trials, n_times, n_channels, reg)
#################################################
# Iteration over parameter settings and functions
#################################################
if __name__ == '__main__':
out_iterator = itertools.product(n_times_atom_list, n_atoms_list,
n_channel_list, reg_list)
for params in out_iterator:
n_times_atom, n_atoms, n_channels, reg = params
msg = 'n_times_atom, n_atoms, n_channels, reg = ' + str(params)
print(colorify(msg, RED))
print(colorify('-' * len(msg), RED))
save_name = base_name + str(params)
save_name = os.path.join('figures', save_name)
all_results = []
X, info = load_data(
dataset='somato', epoch=False, n_jobs=n_jobs, n_trials=2
)
if n_channels == 1:
X = X[:, 0, :] # take only one channel
elif n_channels is not None:
X = X[:, :n_channels, :]
assert X.shape[0] > 1 # we need at least two trials for sporco
X_shape = X.shape
if n_channels == 1:
methods = methods_univariate
else:
methods = methods_multivariate
iterator = itertools.product(methods, range(n_states))
if n_jobs == 1:
results = [
one_run(X, X_shape, random_state, method, n_atoms,
n_times_atom, reg)
for method, random_state in iterator
]
else:
# run the methods for different random_state
delayed_one_run = delayed(one_run)
results = Parallel(n_jobs=n_jobs)(delayed_one_run(
X, X_shape, random_state, method, n_atoms, n_times_atom,
reg) for method, random_state in iterator)
all_results.extend(results)
all_results_df = pd.DataFrame(
all_results, columns='random_state label pobj times d_hat '
'z_hat n_atoms n_times_atom n_trials n_times n_channels reg'.
split(' '))
all_results_df.to_pickle(save_name + '.pkl')
print('-- End of the script --')
|
|
from docutils.nodes import figure, caption, Text, reference, raw, SkipNode, Element, topic
from sphinx.roles import XRefRole
from inlineav import av_dgm, av_ss
import json
# Element classes
class page_ref(reference):
pass
class num_ref(reference):
pass
figids_1={}
table_doc={}
def loadTable():
try:
table=open('table.json')
data = json.load(table)
table.close()
return data
except IOError:
print 'ERROR: No table.json file.'
# Visit/depart functions
def skip_page_ref(self, node):
raise SkipNode
def latex_visit_page_ref(self, node):
self.body.append("\\pageref{%s:%s}" % (node['refdoc'], node['reftarget']))
raise SkipNode
def latex_visit_num_ref(self, node):
fields = node['reftarget'].split('#')
if len(fields) > 1:
label, target = fields
ref_link = '%s:%s' % (node['refdoc'], target)
latex = "\\hyperref[%s]{%s \\ref*{%s}}" % (ref_link, label, ref_link)
self.body.append(latex)
else:
self.body.append('\\ref{%s:%s}' % (node['refdoc'], fields[0]))
raise SkipNode
def html_visit_num_ref(self, node):
fields = node['reftarget'].split('#')
json_data = loadTable()
if len(fields) > 1:
label, target = fields
target_file = ''
chapter = ''
if target in json_data:
chapter = json_data[target].rsplit('.',1)[0]
for name_l, idx in json_data.iteritems():
if idx == chapter:
target_file = name_l
if node['refdoc']==target_file: #target file and curent file are the same
link = "%s.html#%s" %(node['refdoc'], target.lower())
else:
link = "%s.html#%s" %(target_file, target.lower())
html = '<a href="%s">%s</a>' %(link, json_data[target][:-1])
self.body.append(html)
else:
print 'WARNING: Missing object reference %s' %target
else:
self.body.append('<a href="%s.html">%s</a>' % (node['refdoc'], fields[0]))
raise SkipNode
def doctree_read(app, doctree):
# first generate figure numbers for each figure
env = app.builder.env
json_data = loadTable()
i = getattr(env, 'i', 1)
figids = getattr(env, 'figids', {})
figid_docname_map = getattr(env, 'figid_docname_map', {})
module = ''
num_module = 0
_table = 1
_exple = 1
_thrm = 1
for figure_info in doctree.traverse(Element): # figure):
if app.builder.name != 'latex' and app.config.number_figures:
if env.docname != module:
i = 1
_table = 1
_exple = 1
_thrm = 1
if isinstance( figure_info, figure):
if env.docname in json_data:
module = env.docname
num_module = json_data[env.docname]
for cap in figure_info.traverse(caption):
cap[0] = Text(" %s %s.%d: %s" % (app.config.figure_caption_prefix, num_module, i, cap[0]))
figids_1[env.docname]= '%s.%d' %(num_module, i)
for id in figure_info['ids']:
figids[id] = i
figid_docname_map[id] = env.docname
i += 1
if isinstance( figure_info, av_dgm ):
module = env.docname
i += 1
if isinstance( figure_info, av_ss ) and len(figure_info.attributes['ids']) > 0:
module = env.docname
i += 1
if isinstance( figure_info, topic):
numbered_label = ''
if env.docname in json_data:
module = env.docname
num_module = json_data[env.docname]
if module not in app.config.expleid:
app.config.expleid[module] = {}
if len(figure_info.attributes['ids']) > 0:
for label in figure_info.attributes['ids']:
xrefs = ''
if label in json_data:
xrefs = json_data[label]
if '#' in xrefs:
xrefs = xrefs[:-1]
numbered_label = ' %s' %xrefs
break
if 'example' in figure_info.children[0].children[0].lower():
title = str(figure_info.children[0].children[0]) + numbered_label
figure_info.children[0].children[0] = Text(title)
title = 'Example %s.%d ' %(num_module,_exple)
figure_info.children[0].children[0] = Text(title)
#_exple += 1
for mod in app.config.expleid:
if mod == module:
expl_dict = app.config.expleid[mod]
for id in figure_info['ids']:
expl_dict[id] = _exple
figids[id] = _exple
figid_docname_map[id] = env.docname
_exple += 1
if 'table' in figure_info.children[0].children[0].lower():
title = str(figure_info.children[0].children[0]) + numbered_label
figure_info.children[0].children[0] = Text(title)
title = 'Table %s.%d %s' %(num_module,_table,str(figure_info.children[0].children[0]).split('Table')[1])
figure_info.children[0].children[0] = Text(title)
_table += 1
if 'theorem' in figure_info.children[0].children[0].lower():
title = str(figure_info.children[0].children[0]) + numbered_label
figure_info.children[0].children[0] = Text(title)
title = 'Theorem %s.%d %s' %(num_module,_thrm,str(figure_info.children[0].children[0]).split('Theorem')[1])
figure_info.children[0].children[0] = Text(title)
_thrm += 1
env.figid_docname_map = figid_docname_map
env.i = i
env.figids = figids
def doctree_resolved(app, doctree, docname):
# replace numfig nodes with links
figids = app.builder.env.figids
if app.builder.name != 'latex':
for ref_info in doctree.traverse(num_ref):
if '#' in ref_info['reftarget']:
label, target = ref_info['reftarget'].split('#')
labelfmt = label + " %d"
else:
labelfmt = '%d'
target = ref_info['reftarget']
if target not in figids:
continue
if app.builder.name == 'html':
target_doc = app.builder.env.figid_docname_map[target]
link = "%s#%s" % (app.builder.get_relative_uri(docname, target_doc),
target)
html = '<a href="%s">%s</a>' % (link, labelfmt %(figids[target]))
ref_info.replace_self(raw(html, html, format='html'))
else:
ref_info.replace_self(Text(labelfmt % (figids[target])))
def setup(app):
app.add_config_value('number_figures', True, True)
app.add_config_value('figure_caption_prefix', "Figure", True)
app.add_config_value('expleid', {}, True)
app.add_node(page_ref,
text=(skip_page_ref, None),
html=(skip_page_ref, None),
latex=(latex_visit_page_ref, None))
app.connect('doctree-read', doctree_read)
app.connect('doctree-resolved', doctree_resolved)
app.add_role('page', XRefRole(nodeclass=page_ref))
app.add_node(num_ref,
html=(html_visit_num_ref, None))
app.add_role('num', XRefRole(nodeclass=num_ref))
# app.connect('doctree-read', doctree_read)
# app.connect('doctree-resolved', doctree_resolved)
|
|
"""
Example: scikits.statsmodels.WLS
example is extended to look at the meaning of rsquared in WLS,
at outliers, compares with RLM and a short bootstrap
"""
import numpy as np
import scikits.statsmodels.api as sm
import matplotlib.pyplot as plt
data = sm.datasets.ccard.load()
data.exog = sm.add_constant(data.exog)
ols_fit = sm.OLS(data.endog, data.exog).fit()
# perhaps the residuals from this fit depend on the square of income
incomesq = data.exog[:,2]
plt.scatter(incomesq, ols_fit.resid)
plt.grid()
# If we think that the variance is proportional to income**2
# we would want to weight the regression by income
# the weights argument in WLS weights the regression by its square root
# and since income enters the equation, if we have income/income
# it becomes the constant, so we would want to perform
# this type of regression without an explicit constant in the design
#data.exog = data.exog[:,:-1]
wls_fit = sm.WLS(data.endog, data.exog[:,:-1], weights=1/incomesq).fit()
# This however, leads to difficulties in interpreting the post-estimation
# statistics. Statsmodels does not yet handle this elegantly, but
# the following may be more appropriate
# explained sum of squares
ess = wls_fit.uncentered_tss - wls_fit.ssr
# rsquared
rsquared = ess/wls_fit.uncentered_tss
# mean squared error of the model
mse_model = ess/(wls_fit.df_model + 1) # add back the dof of the constant
# f statistic
fvalue = mse_model/wls_fit.mse_resid
# adjusted r-squared
rsquared_adj = 1 -(wls_fit.nobs)/(wls_fit.df_resid)*(1-rsquared)
#Trying to figure out what's going on in this example
#----------------------------------------------------
#JP: I need to look at this again. Even if I exclude the weight variable
# from the regressors and keep the constant in then the reported rsquared
# stays small. Below also compared using squared or sqrt of weight variable.
# TODO: need to add 45 degree line to graphs
wls_fit3 = sm.WLS(data.endog, data.exog[:,(0,1,3,4)], weights=1/incomesq).fit()
print wls_fit3.summary()
print 'corrected rsquared',
print (wls_fit3.uncentered_tss - wls_fit3.ssr)/wls_fit3.uncentered_tss
plt.figure()
plt.title('WLS dropping heteroscedasticity variable from regressors')
plt.plot(data.endog, wls_fit3.fittedvalues, 'o')
plt.xlim([0,2000])
plt.ylim([0,2000])
print 'raw correlation of endog and fittedvalues'
print np.corrcoef(data.endog, wls_fit.fittedvalues)
print 'raw correlation coefficient of endog and fittedvalues squared'
print np.corrcoef(data.endog, wls_fit.fittedvalues)[0,1]**2
# compare with robust regression,
# heteroscedasticity correction downweights the outliers
rlm_fit = sm.RLM(data.endog, data.exog).fit()
plt.figure()
plt.title('using robust for comparison')
plt.plot(data.endog, rlm_fit.fittedvalues, 'o')
plt.xlim([0,2000])
plt.ylim([0,2000])
#What is going on? A more systematic look at the data
#----------------------------------------------------
# two helper functions
def getrsq(fitresult):
'''calculates rsquared residual, total and explained sums of squares
Parameters
----------
fitresult : instance of Regression Result class, or tuple of (resid, endog) arrays
regression residuals and endogenous variable
Returns
-------
rsquared
residual sum of squares
(centered) total sum of squares
explained sum of squares (for centered)
'''
if hasattr(fitresult, 'resid') and hasattr(fitresult, 'model'):
resid = fitresult.resid
endog = fitresult.model.endog
nobs = fitresult.nobs
else:
resid = fitresult[0]
endog = fitresult[1]
nobs = resid.shape[0]
rss = np.dot(resid, resid)
tss = np.var(endog)*nobs
return 1-rss/tss, rss, tss, tss-rss
def index_trim_outlier(resid, k):
'''returns indices to residual array with k outliers removed
Parameters
----------
resid : array_like, 1d
data vector, usually residuals of a regression
k : int
number of outliers to remove
Returns
-------
trimmed_index : array, 1d
index array with k outliers removed
outlier_index : array, 1d
index array of k outliers
Notes
-----
Outliers are defined as the k observations with the largest
absolute values.
'''
sort_index = np.argsort(np.abs(resid))
# index of non-outlier
trimmed_index = np.sort(sort_index[:-k])
outlier_index = np.sort(sort_index[-k:])
return trimmed_index, outlier_index
#Comparing estimation results for ols, rlm and wls with and without outliers
#---------------------------------------------------------------------------
#ols_test_fit = sm.OLS(data.endog, data.exog).fit()
olskeep, olsoutl = index_trim_outlier(ols_fit.resid, 2)
print 'ols outliers', olsoutl, ols_fit.resid[olsoutl]
ols_fit_rm2 = sm.OLS(data.endog[olskeep], data.exog[olskeep,:]).fit()
rlm_fit_rm2 = sm.RLM(data.endog[olskeep], data.exog[olskeep,:]).fit()
#weights = 1/incomesq
results = [ols_fit, ols_fit_rm2, rlm_fit, rlm_fit_rm2]
#Note: I think incomesq is already square
for weights in [1/incomesq, 1/incomesq**2, np.sqrt(incomesq)]:
print '\nComparison OLS and WLS with and without outliers'
wls_fit0 = sm.WLS(data.endog, data.exog, weights=weights).fit()
wls_fit_rm2 = sm.WLS(data.endog[olskeep], data.exog[olskeep,:],
weights=weights[olskeep]).fit()
wlskeep, wlsoutl = index_trim_outlier(ols_fit.resid, 2)
print '2 outliers candidates and residuals'
print wlsoutl, wls_fit.resid[olsoutl]
# redundant because ols and wls outliers are the same:
##wls_fit_rm2_ = sm.WLS(data.endog[wlskeep], data.exog[wlskeep,:],
## weights=1/incomesq[wlskeep]).fit()
print 'outliers ols, wls:', olsoutl, wlsoutl
print 'rsquared'
print 'ols vs ols rm2', ols_fit.rsquared, ols_fit_rm2.rsquared
print 'wls vs wls rm2', wls_fit0.rsquared, wls_fit_rm2.rsquared #, wls_fit_rm2_.rsquared
print 'compare R2_resid versus R2_wresid'
print 'ols minus 2', getrsq(ols_fit_rm2)[0],
print getrsq((ols_fit_rm2.wresid, ols_fit_rm2.model.wendog))[0]
print 'wls ', getrsq(wls_fit)[0],
print getrsq((wls_fit.wresid, wls_fit.model.wendog))[0]
print 'wls minus 2', getrsq(wls_fit_rm2)[0],
# next is same as wls_fit_rm2.rsquared for cross checking
print getrsq((wls_fit_rm2.wresid, wls_fit_rm2.model.wendog))[0]
#print getrsq(wls_fit_rm2_)[0],
#print getrsq((wls_fit_rm2_.wresid, wls_fit_rm2_.model.wendog))[0]
results.extend([wls_fit0, wls_fit_rm2])
print ' ols ols_rm2 rlm rlm_rm2 wls (lin) wls_rm2 (lin) wls (squ) wls_rm2 (squ) wls (sqrt) wls_rm2 (sqrt)'
print 'Parameter estimates'
print np.column_stack([r.params for r in results])
print 'R2 original data, next line R2 weighted data'
print np.column_stack([getattr(r, 'rsquared', None) for r in results])
print 'Standard errors'
print np.column_stack([getattr(r, 'bse', None) for r in results])
print 'Heteroscedasticity robust standard errors (with ols)'
print 'with outliers'
print np.column_stack([getattr(ols_fit, se, None) for se in ['HC0_se', 'HC1_se', 'HC2_se', 'HC3_se']])
'''
ols ols_rm2 rlm rlm_rm2 wls (lin) wls_rm2 (lin) wls (squ) wls_rm2 (squ) wls (sqrt) wls_rm2 (sqrt)
Parameter estimates
[[ -3.08181404 -5.06103843 -4.98510966 -5.34410309 -2.69418516 -3.1305703 -1.43815462 -1.58893054 -3.57074829 -6.80053364]
[ 234.34702702 115.08753715 129.85391456 109.01433492 158.42697752 128.38182357 60.95113284 100.25000841 254.82166855 103.75834726]
[ -14.99684418 -5.77558429 -6.46204829 -4.77409191 -7.24928987 -7.41228893 6.84943071 -3.34972494 -16.40524256 -4.5924465 ]
[ 27.94090839 85.46566835 89.91389709 95.85086459 60.44877369 79.7759146 55.9884469 60.97199734 -3.8085159 84.69170048]
[-237.1465136 39.51639838 -15.50014814 31.39771833 -114.10886935 -40.04207242 -6.41976501 -38.83583228 -260.72084271 117.20540179]]
R2 original data, next line R2 weighted data
[[ 0.24357792 0.31745994 0.19220308 0.30527648 0.22861236 0.3112333 0.06573949 0.29366904 0.24114325 0.31218669]]
[[ 0.24357791 0.31745994 None None 0.05936888 0.0679071 0.06661848 0.12769654 0.35326686 0.54681225]]
-> R2 with weighted data is jumping all over
standard errors
[[ 5.51471653 3.31028758 2.61580069 2.39537089 3.80730631 2.90027255 2.71141739 2.46959477 6.37593755 3.39477842]
[ 80.36595035 49.35949263 38.12005692 35.71722666 76.39115431 58.35231328 87.18452039 80.30086861 86.99568216 47.58202096]
[ 7.46933695 4.55366113 3.54293763 3.29509357 9.72433732 7.41259156 15.15205888 14.10674821 7.18302629 3.91640711]
[ 82.92232357 50.54681754 39.33262384 36.57639175 58.55088753 44.82218676 43.11017757 39.31097542 96.4077482 52.57314209]
[ 199.35166485 122.1287718 94.55866295 88.3741058 139.68749646 106.89445525 115.79258539 105.99258363 239.38105863 130.32619908]]
robust standard errors (with ols)
with outliers
HC0_se HC1_se HC2_se HC3_se'
[[ 3.30166123 3.42264107 3.4477148 3.60462409]
[ 88.86635165 92.12260235 92.08368378 95.48159869]
[ 6.94456348 7.19902694 7.19953754 7.47634779]
[ 92.18777672 95.56573144 95.67211143 99.31427277]
[ 212.9905298 220.79495237 221.08892661 229.57434782]]
removing 2 outliers
[[ 2.57840843 2.67574088 2.68958007 2.80968452]
[ 36.21720995 37.58437497 37.69555106 39.51362437]
[ 3.1156149 3.23322638 3.27353882 3.49104794]
[ 50.09789409 51.98904166 51.89530067 53.79478834]
[ 94.27094886 97.82958699 98.25588281 102.60375381]]
'''
# a quick bootstrap analysis
# --------------------------
#
#(I didn't check whether this is fully correct statistically)
nobs, nvar = data.exog.shape
niter = 2000
bootres = np.zeros((niter, nvar*2))
for it in range(niter):
rind = np.random.randint(nobs, size=nobs)
endog = data.endog[rind]
exog = data.exog[rind,:]
res = sm.OLS(endog, exog).fit()
bootres[it, :nvar] = res.params
bootres[it, nvar:] = res.bse
np.set_printoptions(linewidth=200)
print 'Bootstrap Results of parameters and parameter standard deviation OLS'
print 'Parameter estimates'
print 'median', np.median(bootres[:,:5], 0)
print 'mean ', np.mean(bootres[:,:5], 0)
print 'std ', np.std(bootres[:,:5], 0)
print 'Standard deviation of parameter estimates'
print 'median', np.median(bootres[:,5:], 0)
print 'mean ', np.mean(bootres[:,5:], 0)
print 'std ', np.std(bootres[:,5:], 0)
plt.figure()
for i in range(4):
plt.subplot(2,2,i+1)
plt.hist(bootres[:,i],50)
plt.title('var%d'%i)
plt.figtext(0.5, 0.935, 'OLS Bootstrap',
ha='center', color='black', weight='bold', size='large')
data_endog = data.endog[olskeep]
data_exog = data.exog[olskeep,:]
incomesq_rm2 = incomesq[olskeep]
nobs, nvar = data_exog.shape
niter = 500 # a bit slow
bootreswls = np.zeros((niter, nvar*2))
for it in range(niter):
rind = np.random.randint(nobs, size=nobs)
endog = data_endog[rind]
exog = data_exog[rind,:]
res = sm.WLS(endog, exog, weights=1/incomesq[rind,:]).fit()
bootreswls[it, :nvar] = res.params
bootreswls[it, nvar:] = res.bse
print 'Bootstrap Results of parameters and parameter standard deviation',
print 'WLS removed 2 outliers from sample'
print 'Parameter estimates'
print 'median', np.median(bootreswls[:,:5], 0)
print 'mean ', np.mean(bootreswls[:,:5], 0)
print 'std ', np.std(bootreswls[:,:5], 0)
print 'Standard deviation of parameter estimates'
print 'median', np.median(bootreswls[:,5:], 0)
print 'mean ', np.mean(bootreswls[:,5:], 0)
print 'std ', np.std(bootreswls[:,5:], 0)
plt.figure()
for i in range(4):
plt.subplot(2,2,i+1)
plt.hist(bootreswls[:,i],50)
plt.title('var%d'%i)
plt.figtext(0.5, 0.935, 'WLS rm2 Bootstrap',
ha='center', color='black', weight='bold', size='large')
#plt.show()
#plt.close('all')
'''
The following a random variables not fixed by a seed
Bootstrap Results of parameters and parameter standard deviation
OLS
Parameter estimates
median [ -3.26216383 228.52546429 -14.57239967 34.27155426 -227.02816597]
mean [ -2.89855173 234.37139359 -14.98726881 27.96375666 -243.18361746]
std [ 3.78704907 97.35797802 9.16316538 94.65031973 221.79444244]
Standard deviation of parameter estimates
median [ 5.44701033 81.96921398 7.58642431 80.64906783 200.19167735]
mean [ 5.44840542 86.02554883 8.56750041 80.41864084 201.81196849]
std [ 1.43425083 29.74806562 4.22063268 19.14973277 55.34848348]
Bootstrap Results of parameters and parameter standard deviation
WLS removed 2 outliers from sample
Parameter estimates
median [ -3.95876112 137.10419042 -9.29131131 88.40265447 -44.21091869]
mean [ -3.67485724 135.42681207 -8.7499235 89.74703443 -46.38622848]
std [ 2.96908679 56.36648967 7.03870751 48.51201918 106.92466097]
Standard deviation of parameter estimates
median [ 2.89349748 59.19454402 6.70583332 45.40987953 119.05241283]
mean [ 2.97600894 60.14540249 6.92102065 45.66077486 121.35519673]
std [ 0.55378808 11.77831934 1.69289179 7.4911526 23.72821085]
Conclusion: problem with outliers and possibly heteroscedasticity
-----------------------------------------------------------------
in bootstrap results
* bse in OLS underestimates the standard deviation of the parameters
compared to standard deviation in bootstrap
* OLS heteroscedasticity corrected standard errors for the original
data (above) are close to bootstrap std
* using WLS with 2 outliers removed has a relatively good match between
the mean or median bse and the std of the parameter estimates in the
bootstrap
We could also include rsquared in bootstrap, and do it also for RLM.
The problems could also mean that the linearity assumption is violated,
e.g. try non-linear transformation of exog variables, but linear
in parameters.
for statsmodels
* In this case rsquared for original data looks less random/arbitrary.
* Don't change definition of rsquared from centered tss to uncentered
tss when calculating rsquared in WLS if the original exog contains
a constant. The increase in rsquared because of a change in definition
will be very misleading.
* Whether there is a constant in the transformed exog, wexog, or not,
might affect also the degrees of freedom calculation, but I haven't
checked this. I would guess that the df_model should stay the same,
but needs to be verified with a textbook.
* df_model has to be adjusted if the original data does not have a
constant, e.g. when regressing an endog on a single exog variable
without constant. This case might require also a redefinition of
the rsquare and f statistic for the regression anova to use the
uncentered tss.
This can be done through keyword parameter to model.__init__ or
through autodedection with hasconst = (exog.var(0)<1e-10).any()
I'm not sure about fixed effects with a full dummy set but
without a constant. In this case autodedection wouldn't work this
way. Also, I'm not sure whether a ddof keyword parameter can also
handle the hasconst case.
'''
|
|
from collections import defaultdict
import datetime
from google.appengine.ext import db
from google.appengine.ext import ndb
import pickle_util
import synchronized_counter
# We are explicit here about which model properties are indexed and
# which aren't (even when we're just repeating the default behavior),
# to be maximally clear. We keep indexed properties to a minimum to
# reduce put()-time. (The cost is you can't pass an unindexed
# property to filter().)
# If you use a datastore model to uniquely identify each user,
# let it inherit from this class, like so...
#
# class UserData(GAEBingoIdentityModel)
#
# ...this will let gae_bingo automatically take care of persisting ab_test
# identities from unregistered users to logged in users.
class GAEBingoIdentityModel(db.Model):
gae_bingo_identity = db.StringProperty(indexed=False)
class ConversionTypes():
# Binary conversions are counted at most once per user
Binary = "binary"
# Counting conversions increment each time
Counting = "counting"
@staticmethod
def get_all_as_list():
return [ConversionTypes.Binary, ConversionTypes.Counting]
def __setattr__(self, attr, value):
pass
class _GAEBingoExperiment(db.Model):
# This is used for a db-query in fetch_for_experiment()
name = db.StringProperty(indexed=True)
# Not necessarily unique. Experiments "monkeys" and "monkeys (2)" both have
# canonical_name "monkeys"
# This isn't used for db-querying in code, but can be for one-offs.
canonical_name = db.StringProperty(indexed=True)
family_name = db.StringProperty(indexed=False)
conversion_name = db.StringProperty(indexed=False)
conversion_type = db.StringProperty(
indexed=False,
default=ConversionTypes.Binary,
choices=set(ConversionTypes.get_all_as_list()))
# Experiments can be live (running), stopped (not running, not archived),
# or archived (not running, permanently archived).
# Stopped experiments aren't collecting data, but they exist and can be
# used to "short-circuit" an alternative by showing it to all users even
# before the code is appropriately modified to do so.
live = db.BooleanProperty(indexed=False, default=True)
# This is used for a db-query in cache.py:load_from_datastore()
archived = db.BooleanProperty(indexed=True, default=False)
dt_started = db.DateTimeProperty(indexed=False, auto_now_add=True)
short_circuit_pickled_content = db.BlobProperty(indexed=False)
@property
def stopped(self):
return not (self.archived or self.live)
@property
def short_circuit_content(self):
if self.short_circuit_pickled_content:
return pickle_util.load(self.short_circuit_pickled_content)
else:
return None
def set_short_circuit_content(self, value):
self.short_circuit_pickled_content = pickle_util.dump(value)
@property
def pretty_name(self):
return self.name.capitalize().replace("_", " ")
@property
def pretty_conversion_name(self):
return self.conversion_name.capitalize().replace("_", " ")
@property
def pretty_canonical_name(self):
return self.canonical_name.capitalize().replace("_", " ")
@property
def conversion_group(self):
if "_" in self.conversion_name:
group = "_".join(self.conversion_name.split("_")[:-1])
return group.capitalize().replace("_", " ")
else:
return self.conversion_name
@property
def hashable_name(self):
return self.family_name if self.family_name else self.canonical_name
@property
def age_desc(self):
if self.archived:
return "Ran %s UTC" % self.dt_started.strftime('%Y-%m-%d at %H:%M:%S')
days_running = (datetime.datetime.now() - self.dt_started).days
if days_running < 1:
return "Less than a day old"
else:
return "%s day%s old" % (days_running, ("" if days_running == 1 else "s"))
@property
def y_axis_title(self):
if self.conversion_type == ConversionTypes.Counting:
"Average Conversions per Participant"
else:
"Conversions (%)"
@property
def participants_key(self):
return "%s:participants" % self.name
@property
def conversions_key(self):
return "%s:conversions" % self.name
def reset_counters(self):
"""Reset the participants and conversions accumulating counters."""
synchronized_counter.SynchronizedCounter.delete_multi(
[self.participants_key, self.conversions_key])
class _GAEBingoAlternative(db.Model):
number = db.IntegerProperty(indexed=False)
experiment_name = db.StringProperty(indexed=False)
pickled_content = db.BlobProperty(indexed=False)
conversions = db.IntegerProperty(indexed=False, default=0)
participants = db.IntegerProperty(indexed=False, default=0)
live = db.BooleanProperty(indexed=False, default=True)
# This is used for a db-query in cache.py:load_from_datastore()
archived = db.BooleanProperty(indexed=True, default=False)
weight = db.IntegerProperty(indexed=False, default=1)
@staticmethod
def key_for_experiment_name_and_number(experiment_name, number):
return "_gae_alternative:%s:%s" % (experiment_name, number)
@property
def content(self):
return pickle_util.load(self.pickled_content)
@property
def pretty_content(self):
return str(self.content).capitalize()
@property
def conversion_rate(self):
if self.participants > 0:
return float(self.conversions) / float(self.participants)
return 0
@property
def pretty_conversion_rate(self):
return "%4.2f%%" % (self.conversion_rate * 100)
@property
def participants_key(self):
return "%s:participants" % self.experiment_name
@property
def conversions_key(self):
return "%s:conversions" % self.experiment_name
@ndb.tasklet
def increment_participants_async(self):
"""Increment a memcache.incr-backed counter to keep track of
participants in a scalable fashion.
It's possible that the cached _GAEBingoAlternative entities will fall a
bit behind due to concurrency issues, but the memcache.incr'd version
should stay up-to-date and be persisted.
Returns:
True if participants was successfully incremented, False otherwise.
"""
incremented = (yield
synchronized_counter.SynchronizedCounter.incr_async(
self.participants_key, self.number))
raise ndb.Return(incremented)
@ndb.tasklet
def increment_conversions_async(self):
"""Increment a memcache.incr-backed counter to keep track of
conversions in a scalable fashion.
It's possible that the cached _GAEBingoAlternative entities will fall a
bit behind due to concurrency issues, but the memcache.incr'd version
should stay up-to-date and be persisted.
Returns:
True if conversions was successfully incremented, False otherwise.
"""
incremented = (yield
synchronized_counter.SynchronizedCounter.incr_async(
self.conversions_key, self.number))
raise ndb.Return(incremented)
def latest_participants_count(self):
running_count = synchronized_counter.SynchronizedCounter.get(
self.participants_key, self.number)
return self.participants + running_count
def latest_conversions_count(self):
running_count = synchronized_counter.SynchronizedCounter.get(
self.conversions_key, self.number)
return self.conversions + running_count
class _GAEBingoSnapshotLog(db.Model):
"""A snapshot of bingo metrics for a given experiment alternative.
This is always created with the _GAEBingoExperiment as the entity parent.
"""
alternative_number = db.IntegerProperty(indexed=False)
conversions = db.IntegerProperty(indexed=False, default=0)
participants = db.IntegerProperty(indexed=False, default=0)
# This is used for a db-query in fetch_for_experiment().
time_recorded = db.DateTimeProperty(indexed=True, auto_now_add=True)
@staticmethod
def fetch_for_experiment(name, limit=100):
"""Retrieves the most recent snapshots for a given experiment.
Arguments:
name -- the name of the experiment (not canonical name).
e.g. "Homepage layout v2point3 (answer_added_binary)"
limit -- number of snapshots across all the alternatives to fetch
(note it could be that some alternatives have one more than
others, depending on the distribution.)
Returns:
A dict of snapshots, indexed by alternative_number.
"""
exp = _GAEBingoExperiment.all().filter("name =", name).get()
if not exp:
return {}
results = (_GAEBingoSnapshotLog.all()
.ancestor(exp)
.order("-time_recorded")
.fetch(limit))
groups = defaultdict(list)
for s in results:
groups[s.alternative_number].append(s)
return groups
class _GAEBingoExperimentNotes(db.Model):
"""Notes and list of emotions associated w/ results of an experiment."""
# arbitrary user-supplied notes
notes = db.TextProperty(indexed=False)
# list of choices from selection of emotions, such as "happy" and "surprised"
pickled_emotions = db.BlobProperty(indexed=False)
@staticmethod
def key_for_experiment(experiment):
"""Return the key for this experiment's notes."""
return "_gae_bingo_notes:%s" % experiment.name
@staticmethod
def get_for_experiment(experiment):
"""Return GAEBingoExperimentNotes, if it exists, for the experiment."""
return _GAEBingoExperimentNotes.get_by_key_name(
_GAEBingoExperimentNotes.key_for_experiment(experiment),
parent=experiment)
@staticmethod
def save(experiment, notes, emotions):
"""Save notes and emo list, associating with specified experiment."""
notes = _GAEBingoExperimentNotes(
key_name = _GAEBingoExperimentNotes.key_for_experiment(experiment),
parent = experiment,
notes = notes,
pickled_emotions = pickle_util.dump(emotions))
notes.put()
@property
def emotions(self):
"""Return unpickled list of emotions tied to these notes."""
if self.pickled_emotions:
return pickle_util.load(self.pickled_emotions)
else:
return None
class _GAEBingoIdentityRecord(db.Model):
identity = db.StringProperty(indexed=False)
# Stores a pickled BingoIdentityCache object.
pickled = db.BlobProperty(indexed=False)
# A timestamp for keeping track when this record was last updated.
# Used (well, potentially used) by analytics.git:src/fetch_entities.py.
backup_timestamp = db.DateTimeProperty(indexed=True, auto_now=True)
@staticmethod
def key_for_identity(identity):
return "_gae_bingo_identity_record:%s" % identity
@staticmethod
def load(identity):
gae_bingo_identity_record = (
_GAEBingoIdentityRecord.get_by_key_name(
_GAEBingoIdentityRecord.key_for_identity(identity)))
if gae_bingo_identity_record:
return pickle_util.load(gae_bingo_identity_record.pickled)
return None
def create_experiment_and_alternatives(experiment_name, canonical_name, alternative_params = None, conversion_name = None, conversion_type = ConversionTypes.Binary, family_name = None):
if not experiment_name:
raise Exception("gae_bingo experiments must be named.")
conversion_name = conversion_name or experiment_name
if not alternative_params:
# Default to simple True/False testing
alternative_params = [True, False]
# Generate a random key name for this experiment so it doesn't collide with
# any past experiments of the same name. All other entities, such as
# alternatives, snapshots, and notes, will then use this entity as their
# parent.
experiment = _GAEBingoExperiment(
key_name = "_gae_experiment:%s" % experiment_name,
name = experiment_name,
canonical_name = canonical_name,
family_name = family_name,
conversion_name = conversion_name,
conversion_type = conversion_type,
live = True,
)
alternatives = []
is_dict = type(alternative_params) == dict
for i, content in enumerate(alternative_params):
alternatives.append(
_GAEBingoAlternative(
key_name = _GAEBingoAlternative.key_for_experiment_name_and_number(experiment_name, i),
parent = experiment,
experiment_name = experiment.name,
number = i,
pickled_content = pickle_util.dump(content),
live = True,
weight = alternative_params[content] if is_dict else 1,
)
)
return experiment, alternatives
|
|
#! /usr/bin/env python
"""
myfunctions.py
My functions module containing commonly used functions
- Math
-- adjAvSmooth(dataarray, N=10)
-- weibullPlot(dataarray)
-- numInt(function, a, b, step)
-- numDiff(y, x)
-- numDifference(y)
-- mean_sterr(x)
- Array manipulation
-- findNearest(arr, val)
-- outputMultiList(data)
-- resized(arr, s)
- File import
-- paImport(datafile, path, ext_cut=6)
-- paImportLV(datafile, path, ext_cut=7)
-- paImportIV(datafile, path, ext_cut=6)
-- paramImport(paramfile, path, param_no=3)
-- paImportImpSpec(datafile, path, ext_cut=9)
-- csvImport(datafile, path, headerlength)
-- csvBiasStressImport(datafile, path)
- File output
-- dataOutput(filename, path, datalist, format='%.1f\t %e\t %e\t %e\n')
-- dataOutputHead(filename, path, datalist, headerlist, format_d='%.1f\t %e\t %e\t %e\n', format_h='%s\n')
-- dataOutputGen(filename, path, datalist)
-- quickPlot(filename, path, datalist, xlabel="x", ylabel="y", xrange=["auto", "auto"], yrange=["auto", "auto"], yscale="linear", xscale="linear", col=["r","b"])
Created by Jeremy Smith on 2015-06-05
Modified 2017-03-20
j.smith.03@cantab.net
Version 3.1
"""
import sys
import os
import numpy as np
from matplotlib.ticker import ScalarFormatter
from matplotlib.figure import Figure
from matplotlib.backends.backend_pdf import FigureCanvasPdf
import seaborn
from scipy.signal import medfilt
__author__ = "Jeremy Smith"
__version__ = "3.1"
EPS0 = 8.85418782e-12
QELEC = 1.60217662e-19
HBAR = 1.0545718e-34
MELEC = 9.10938356e-31
KBOLZ = 1.38064852e-23
FARA = 96485.3399
def adjAvSmooth(dataarray, N=10):
"""Applies Median Filter then Smooths N Times with Adjacent Averaging and Fixed End-points"""
lp = dataarray[-1]
dataarray = medfilt(dataarray)
dataarray[-1] = lp
for i in range(N):
dplus1 = np.roll(dataarray, 1)
dplus1[0] = dplus1[1]
dminus1 = np.roll(dataarray, -1)
dminus1[-1] = dminus1[-2]
dataarray = (dataarray + 0.5*dplus1 + 0.5*dminus1)/2.0
return dataarray
def weibullPlot(dataarray):
"""Calculates Weibull Plot Data from Input Array"""
n = len(dataarray)
datasorted = np.sort(abs(np.array(dataarray)))
ecdf = []
for i in range(n):
ecdf.append(float(len(np.where(datasorted <= datasorted[i])[0]))/n)
ecdf = np.array(ecdf)
weibull = np.log(-np.log(1 - ecdf[:-1]))
return np.log(datasorted)[:-1], weibull, datasorted, ecdf
def numInt(function, a, b, step):
"""Numerical Integration of a Function with x=a and x=b Limits"""
x = np.array([float(x)*step for x in range(int(a/step), int(b/step)+1)])
y = function(x)
trpsum = 0
for i, yi in enumerate(y[:-1]):
trap = (x[i+1]-x[i])*(y[i+1]+yi)/2
trpsum += trap
return trpsum
def numDiff(y, x):
"""Numerical Differentiation using Two-point Finite Difference"""
grad = [0]
for i, yi in enumerate(y[:-2]):
g = (y[i+2] - yi)/(x[i+2] - x[i])
grad.append(g)
grad.append(0)
return grad
def numDifference(y):
"""Takes First Difference Between Adjacent Points"""
diff = []
for i, yi in enumerate(y[:-1]):
d = y[i+1] - yi
diff.append(d)
diff.append(0)
return diff
def mean_sterr(x):
"""Mean and Standard Error Function"""
n, mean, std = len(x), 0, 0
for a in x:
mean = mean + a
mean = mean/float(n)
for a in x:
std = std + (a - mean)**2
std = np.sqrt(std/float(n - 1))
return mean, std/np.sqrt(n)
def findNearest(arr, val):
"""Finds Nearest Element in Array to val"""
i = (np.abs(arr - val)).argmin()
return i, arr[i]
def resized(arr, s):
"""Returns resized array padded with zeros"""
tmparr = np.copy(arr)
tmparr.resize(s)
return tmparr
def outputMultiList(data):
"""Converts Single List of Output Data to Muliple Lists for Each VG"""
dtnum = data["VGS"].count(data["VGS"][0])
vds = data["VDS"][:dtnum]
data2 = {"VDS": vds}
for i in range(len(data["VGS"])/dtnum):
data2["IDS" + str(i+1)] = data["IDS"][i*dtnum:(i+1)*dtnum]
return data2
def paImport(datafile, path, ext_cut=6):
"""Importer for Keithley PA Files"""
device_name = datafile[:-ext_cut].strip()
print device_name
data = {}
with open(os.path.join(path, datafile), 'r') as dfile:
headers = dfile.readline().strip().split('\t')
for h in headers:
data[h] = []
for line in dfile:
splitline = line.strip().split('\t')
if len(splitline) == 1:
continue
for i, a in enumerate(splitline):
if "#REF" in a:
a = 0
data[headers[i]].append(float(a))
return data, device_name
def paramImport(paramfile, path, param_no=3):
"""Importer for Device Parameter File"""
params = []
for i in range(param_no):
params.append({})
with open(os.path.join(path, paramfile), 'r') as pfile:
for line in pfile:
splitline = line.strip().split('\t')
name, values = splitline[0], splitline[1:]
for i in range(param_no):
params[i][name] = float(values[i])
return params
def paImportIV(datafile, path, ext_cut=6):
"""Importer for LabView Format IV Files"""
device_name = datafile[:-ext_cut].strip()
headers = ["Vbias", "Imeas"]
print device_name
data = {}
with open(os.path.join(path, datafile), 'r') as dfile:
for h in headers:
data[h] = []
dfile.readline()
for line in dfile:
a = line.strip().split('\t')
if float(a[0]) == 0:
continue
if len(a) == 1:
continue
for i in range(len(a)):
data[headers[i]].append(float(a[i]))
return data, device_name
def paImportLV(datafile, path, ext_cut=7):
"""Importer for LabView Format Files"""
device_name = datafile[:-ext_cut].strip()
file_type = datafile[-ext_cut+1:-4].strip()
if file_type == "oo":
headers = ["VDS", "IDS", "VGS"]
else:
headers = ["VDS", "IDS", "VGS", "IGS"]
data = {}
for h in headers:
data[h] = []
with open(os.path.join(path, datafile), 'r') as dfile:
dfile.readline()
for line in dfile:
splitline = line.strip().split("\t")
if len(splitline) == 1:
continue
for i, a in enumerate(splitline):
data[headers[i]].append(float(a))
return data, device_name, file_type
def paImportImpSpec(datafile, path, ext_cut=9):
"""Importer for Impedance Spec Format Files"""
device_name = datafile[:-ext_cut].strip()
file_type = datafile[-ext_cut+1:-4].strip()
if file_type == "freq":
headers = ["Freq", "ReZ", "ImZ", "T"]
elif file_type == "bias":
headers = ["Vbias", "ReZ", "ImZ", "T"]
else:
print "No File Type Tag"
return
data = {}
for h in headers:
data[h] = []
with open(os.path.join(path, datafile), 'r') as dfile:
for line in dfile:
splitline = line.strip().split("\t")
if len(splitline) == 1:
continue
if "NaN" in splitline:
continue
if float(splitline[2]) == 0:
continue
for i, a in enumerate(splitline):
data[headers[i]].append(float(a))
return data, device_name, file_type
def csvImport(datafile, path, headerlength):
"""Importer for B1500 csv Files"""
header = []
data = {}
with open(os.path.join(path, datafile), 'r') as dfile:
for i in range(headerlength):
splitline = dfile.readline().strip().split(',')
header.append(splitline)
colhead = dfile.readline().strip().split(',') # Column headers
for h in colhead:
if h == '':
continue
data[h] = []
for line in dfile:
splitline = line.strip().split(',')
if len(splitline) == 1:
continue
for i, a in enumerate(splitline):
if a == '':
continue
data[colhead[i]].append(float(a))
return data, header
def csvBiasStressImport(datafile, path):
"""Importer for Bias Stress Test csv Files from EasyExpert"""
datalist_transfer = []
datalist_stress = []
data_transfer = {}
data_stress = {}
datatype = None
with open(os.path.join(path, datafile), 'r') as dfile:
for line in dfile:
splitline = line.strip().split(', ')
if splitline[0] == 'SetupTitle':
if splitline[1] == 'I/V-t Sampling':
datatype = 1
elif splitline[1] == 'I/V Sweep':
datatype = 2
else:
datatype = None
continue
if datatype is None:
continue
if splitline[0] == 'DataName':
headerlist = splitline[1:]
if datatype == 1:
datalist_stress.append(data_stress)
data_stress = {}
for h in headerlist:
data_stress[h] = []
if datatype == 2:
datalist_transfer.append(data_transfer)
data_transfer = {}
for h in headerlist:
data_transfer[h] = []
if splitline[0] == 'DataValue':
if datatype == 1:
for i, a in enumerate(splitline[1:]):
data_stress[headerlist[i]].append(float(a))
if datatype == 2:
for i, a in enumerate(splitline[1:]):
data_transfer[headerlist[i]].append(float(a))
datalist_stress.append(data_stress)
datalist_transfer.append(data_transfer)
return datalist_transfer[:0:-1], datalist_stress[:0:-1]
def dataOutput(filename, path, datalist, format="%.1f\t %e\t %e\t %e\n"):
"""Writes Output to File in Results Folder"""
formatlist = format.split(" ")
if len(formatlist) != len(datalist):
print "FORMAT ERROR"
return
if "results" not in os.listdir(path):
os.mkdir(os.path.join(path, "results"))
with open(os.path.join(path, "results", filename), 'w') as outfile:
for i in range(len(datalist[0])):
for cnum, c in enumerate(datalist):
outfile.write(formatlist[cnum] %c[i])
return
def dataOutputHead(filename, path, datalist, headerlist, format_d="%.1f\t %e\t %e\t %e\n", format_h="%s\n"):
"""Writes Output to File in Results Folder and Includes Header"""
formatlist_d = format_d.split(" ")
formatlist_h = format_h.split(" ")
if len(formatlist_d) != len(datalist):
print "DATA FORMAT ERROR"
return
if len(formatlist_h) != len(headerlist):
print "HEADER FORMAT ERROR"
return
if "results" not in os.listdir(path):
os.mkdir(os.path.join(path, "results"))
with open(os.path.join(path, "results", filename), 'w') as outfile:
for i in range(len(headerlist[0])):
for cnum, c in enumerate(headerlist):
outfile.write(formatlist_h[cnum] %c[i])
outfile.write('\n')
for i in range(len(datalist[0])):
for cnum, c in enumerate(datalist):
outfile.write(formatlist_d[cnum] %c[i])
return
def dataOutputGen(filename, path, datalist):
"""Writes Output to File in Results Folder from 1D or 2D Arrays"""
datalist = np.array(datalist)
if len(datalist.shape) not in (1, 2):
print "1D or 2D data array only"
return
if "results" not in os.listdir(path):
os.mkdir(os.path.join(path, "results"))
with open(os.path.join(path, "results", filename), 'w') as outfile:
for row in datalist:
if len(datalist.shape) == 1:
outfile.write("{:s}\n".format(str(row)))
else:
for col in row:
outfile.write("{:s}, ".format(str(col)))
outfile.write('\n')
return
def quickPlot(filename, path, datalist, xlabel="x", ylabel="y", xrange=["auto", "auto"], yrange=["auto", "auto"], yscale="linear", xscale="linear", col=["r", "b"]):
"""Plots Data to .pdf File in Plots Folder Using matplotlib"""
if "plots" not in os.listdir(path):
os.mkdir(os.path.join(path, "plots"))
coltab = col*10
seaborn.set_context("notebook", rc={"lines.linewidth": 1.0})
formatter = ScalarFormatter(useMathText=True)
formatter.set_scientific(True)
formatter.set_powerlimits((-2, 3))
fig = Figure(figsize=(6, 6))
ax = fig.add_subplot(111)
for i, ydata in enumerate(datalist[1:]):
ax.plot(datalist[0], ydata, c=coltab[i])
ax.set_title(filename)
ax.set_yscale(yscale)
ax.set_xscale(xscale)
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
if xrange[0] != "auto":
ax.set_xlim(xmin=xrange[0])
if xrange[1] != "auto":
ax.set_xlim(xmax=xrange[1])
if yrange[0] != "auto":
ax.set_ylim(ymin=yrange[0])
if yrange[1] != "auto":
ax.set_ylim(ymax=yrange[1])
if yscale == "linear":
ax.yaxis.set_major_formatter(formatter)
ax.xaxis.set_major_formatter(formatter)
canvas = FigureCanvasPdf(fig)
canvas.print_figure(os.path.join(path, "plots", filename+".pdf"))
return
|
|
import os
import plotly as py
import pandas as pd
import numpy as np
import plotly.plotly as py
import plotly.tools as plotly_tools
import plotly.graph_objs as go
import plotly.offline as offline
import matplotlib.pyplot as plt
import matplotlib as mpl
def readJson(cur_dir):
rmsd = {}
week_num = ''
protocol = ''
for sub in os.listdir(cur_dir):
if sub.endswith('.txt') or sub.endswith('.tsv'):
continue
li = []
target = cur_dir + '/' + sub
if not week_num:
try:
with open(target + '/visual.txt', 'r') as visual:
line = visual.next().split()
week_num = line[0]
protocol = line[1]
except IOError as e:
print(e)
continue
try:
with open(target + '/rmsd.txt', 'r') as data:
for s in data.readlines():
li.append(float(s[7:]))
except IOError as e:
print(e)
continue
rmsd[sub] = li
return rmsd, week_num, protocol
def box_plot(rmsd, week_num):
x_data = list(rmsd.keys())
y_data = []
for x in x_data:
data = rmsd.get(x)
y_data.append(data)
y_best_rmsd = []
y_first_rmsd = []
for y in y_data:
min_rmsd = min(y)
first_rmsd = y[0]
y_best_rmsd.append(min_rmsd)
y_first_rmsd.append(first_rmsd)
N = len(x_data)
colors = ['hsl('+str(h)+',50%'+',50%)' for h in np.linspace(0, 360, N)]
traces = []
for xd, yd, ybest, yfirst, cls in zip(x_data, y_data, y_best_rmsd, y_first_rmsd, colors):
traces.append(go.Box(
y=yd,
name=xd,
boxpoints='all',
jitter=1,
whiskerwidth=1,
pointpos = -2,
fillcolor=cls,
marker=dict(size=3,),
line=dict(width=1.5),))
traces.append(go.Scatter(
showlegend = False,
legendgroup = 'Best RMSD',
y = ybest,
x = xd,
name = xd + ' Best RMSD',
fillcolor=cls,
marker = dict(size = 15, symbol = 'square-open', ), ))
traces.append(go.Scatter(
showlegend = False,
legendgroup = 'First Pose RMSD',
y = yfirst,
x = xd,
name = xd + ' First Pose RMSD',
fillcolor = cls,
marker = dict(size = 15, symbol = 'star', ),))
layout = go.Layout(title='RMSD for all Targets in Week' + str(week_num),
yaxis=dict(autorange=True,showgrid=True,zeroline=True,dtick=5,
gridcolor='rgb(255, 255, 255)', gridwidth=1,
zerolinecolor='rgb(255, 255, 255)',zerolinewidth=2,),
margin=dict(l=40,r=30,b=80,t=100,),
paper_bgcolor='rgb(243, 243, 243)',
plot_bgcolor='rgb(243, 243, 243)',
showlegend=False)
fig = go.Figure(data=traces, layout=layout)
return fig
def bar_plot(averages, week_num):
x_data = list(averages.keys())
trace1 = go.Bar(
x = x_data,
y = [x[0] for x in averages.values() if x],
name='Best RMSD'
)
trace2 = go.Bar(
x = x_data,
y = [x[1] for x in averages.values() if x],
name='First Pose RMSD'
)
data = [trace1, trace2]
layout = go.Layout(title='Best and First Pose RMSD for all Targets in Week' + str(week_num),
barmode='group'
)
fig = go.Figure(data=data, layout=layout)
return fig
def generate_reports():
py.sign_in("juz19", "gns0PM7FQ368i6A8tNOZ")
try:
if not os.path.exists('challengedata'):
os.makedirs('challengedata')
except IOError as e:
print('Failed to create directory: challengedata. '+e)
return
averages = {}
url_by_week = {}
for sub in os.listdir(os.getcwd()+'/challengedata'):
if sub.startswith('celpp_week'):
rmsd, week_num, protocol = readJson(sub)
print(week_num)
if not rmsd:
continue
averages[week_num] = stats(rmsd)
url_by_week[week_num] = py.plot(box_plot(rmsd, week_num), filename='Box Plot - '+week_num.split('_')[1][4:], auto_open=False)
html_string = '''
<html>
<head>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.1/css/bootstrap.min.css">
<style>body{ margin:0 100; background:whitesmoke; }</style>
</head>
<body>
<h1>Week '''+week_num.split('_')[1][4:]+''' Visualization of RMSD for Smina</h1>
<!-- *** Section 1 *** --->
<h2>Section 1: RMSDs for All Targets in Week '''+week_num.split('_')[1][4:]+'''</h2>
<iframe width="1000" height="550" frameborder="0" seamless="seamless" scrolling="no" \
src="''' + url_by_week[week_num] + '''.embed?width=800&height=550"></iframe>
</body>
</html>'''
try:
if not os.path.exists('visual'):
os.makedirs('visual')
if not os.path.exists('visual/'+week_num):
os.makedirs('visual/'+week_num)
f = open('visual/'+week_num+'/report.html','w')
f.write(html_string)
f.close()
except IOError as e:
print('Failed to create report.html. '+e)
break
generate_summary(averages, week_num, url_by_week)
def generate_summary(averages, week_num, url_by_week):
summary_url = py.plot(bar_plot(averages, week_num) ,filename='Best and First Plot', auto_open=False)
buttons = ''''''
for week, url in url_by_week.items():
buttons += '''<button onclick='location.href="'''+ url +'''"'>'''+week.split('_')[1][4:]+'''</button>'''
html_string = '''
<html>
<head>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.1/css/bootstrap.min.css">
<style>body{ margin:0 100; background:whitesmoke; }</style>
</head>
<body>
<h1>Visualization Summary of RMSD for Smina</h1>
<iframe width="1000" height="550" frameborder="0" seamless="seamless" scrolling="no" \
src="''' + summary_url + '''.embed?width=800&height=550"></iframe><br>
'''+buttons+'''
</body>
</html>'''
try:
f = open('summary_report.html','w')
f.write(html_string)
f.close()
except IOError as e:
print('Failed to create summary_report.html. '+e)
def stats(rmsd):
li = [x for x in rmsd.values() if np.inf not in x]
if not li:
return []
ave_first = sum([x[0] for x in li])/len(li)
ave_best = sum([min(x) for x in li])/len(li)
return [round(ave_best, 6), round(ave_first, 6)]
generate_reports()
|
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 OpenStack Foundation
# Copyright 2012 Nebula, Inc.
# Copyright (c) 2012 X.commerce, a business unit of eBay Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import logging
from django.conf import settings
from django.utils.functional import cached_property # noqa
from django.utils.translation import ugettext_lazy as _
from novaclient import exceptions as nova_exceptions
from novaclient.v1_1 import client as nova_client
from novaclient.v1_1.contrib import instance_action as nova_instance_action
from novaclient.v1_1.contrib import list_extensions as nova_list_extensions
from novaclient.v1_1 import security_group_rules as nova_rules
from novaclient.v1_1 import security_groups as nova_security_groups
from novaclient.v1_1 import servers as nova_servers
from horizon import conf
from horizon.utils import functions as utils
from horizon.utils.memoized import memoized # noqa
from openstack_dashboard.api import base
from openstack_dashboard.api import network_base
LOG = logging.getLogger(__name__)
# API static values
INSTANCE_ACTIVE_STATE = 'ACTIVE'
VOLUME_STATE_AVAILABLE = "available"
DEFAULT_QUOTA_NAME = 'default'
class VNCConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_vnc_console method.
"""
_attrs = ['url', 'type']
class SPICEConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_spice_console method.
"""
_attrs = ['url', 'type']
class RDPConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_rdp_console method.
"""
_attrs = ['url', 'type']
class Server(base.APIResourceWrapper):
"""Simple wrapper around novaclient.server.Server.
Preserves the request info so image name can later be retrieved.
"""
_attrs = ['addresses', 'attrs', 'id', 'image', 'links',
'metadata', 'name', 'private_ip', 'public_ip', 'status', 'uuid',
'image_name', 'VirtualInterfaces', 'flavor', 'key_name', 'fault',
'tenant_id', 'user_id', 'created', 'OS-EXT-STS:power_state',
'OS-EXT-STS:task_state', 'OS-EXT-SRV-ATTR:instance_name',
'OS-EXT-SRV-ATTR:host', 'OS-EXT-AZ:availability_zone',
'OS-DCF:diskConfig']
def __init__(self, apiresource, request):
super(Server, self).__init__(apiresource)
self.request = request
# TODO(gabriel): deprecate making a call to Glance as a fallback.
@property
def image_name(self):
import glanceclient.exc as glance_exceptions # noqa
from openstack_dashboard.api import glance # noqa
if not self.image:
return "-"
if hasattr(self.image, 'name'):
return self.image.name
if 'name' in self.image:
return self.image['name']
else:
try:
image = glance.image_get(self.request, self.image['id'])
return image.name
except glance_exceptions.ClientException:
return "-"
@property
def internal_name(self):
return getattr(self, 'OS-EXT-SRV-ATTR:instance_name', "")
@property
def availability_zone(self):
return getattr(self, 'OS-EXT-AZ:availability_zone', "")
class Hypervisor(base.APIDictWrapper):
"""Simple wrapper around novaclient.hypervisors.Hypervisor."""
_attrs = ['manager', '_loaded', '_info', 'hypervisor_hostname', 'id',
'servers']
@property
def servers(self):
# if hypervisor doesn't have servers, the attribute is not present
servers = []
try:
servers = self._apidict.servers
except Exception:
pass
return servers
class NovaUsage(base.APIResourceWrapper):
"""Simple wrapper around contrib/simple_usage.py."""
_attrs = ['start', 'server_usages', 'stop', 'tenant_id',
'total_local_gb_usage', 'total_memory_mb_usage',
'total_vcpus_usage', 'total_hours']
def get_summary(self):
return {'instances': self.total_active_instances,
'memory_mb': self.memory_mb,
'vcpus': getattr(self, "total_vcpus_usage", 0),
'vcpu_hours': self.vcpu_hours,
'local_gb': self.local_gb,
'disk_gb_hours': self.disk_gb_hours}
@property
def total_active_instances(self):
return sum(1 for s in self.server_usages if s['ended_at'] is None)
@property
def vcpus(self):
return sum(s['vcpus'] for s in self.server_usages
if s['ended_at'] is None)
@property
def vcpu_hours(self):
return getattr(self, "total_hours", 0)
@property
def local_gb(self):
return sum(s['local_gb'] for s in self.server_usages
if s['ended_at'] is None)
@property
def memory_mb(self):
return sum(s['memory_mb'] for s in self.server_usages
if s['ended_at'] is None)
@property
def disk_gb_hours(self):
return getattr(self, "total_local_gb_usage", 0)
class SecurityGroup(base.APIResourceWrapper):
"""Wrapper around novaclient.security_groups.SecurityGroup.
Wraps its rules in SecurityGroupRule objects and allows access to them.
"""
_attrs = ['id', 'name', 'description', 'tenant_id']
@cached_property
def rules(self):
"""Wraps transmitted rule info in the novaclient rule class."""
manager = nova_rules.SecurityGroupRuleManager(None)
rule_objs = [nova_rules.SecurityGroupRule(manager, rule)
for rule in self._apiresource.rules]
return [SecurityGroupRule(rule) for rule in rule_objs]
class SecurityGroupRule(base.APIResourceWrapper):
"""Wrapper for individual rules in a SecurityGroup."""
_attrs = ['id', 'ip_protocol', 'from_port', 'to_port', 'ip_range', 'group']
def __unicode__(self):
if 'name' in self.group:
vals = {'from': self.from_port,
'to': self.to_port,
'group': self.group['name']}
return _('ALLOW %(from)s:%(to)s from %(group)s') % vals
else:
vals = {'from': self.from_port,
'to': self.to_port,
'cidr': self.ip_range['cidr']}
return _('ALLOW %(from)s:%(to)s from %(cidr)s') % vals
# The following attributes are defined to keep compatibility with Neutron
@property
def ethertype(self):
return None
@property
def direction(self):
return 'ingress'
class SecurityGroupManager(network_base.SecurityGroupManager):
backend = 'nova'
def __init__(self, request):
self.request = request
self.client = novaclient(request)
def list(self):
return [SecurityGroup(g) for g
in self.client.security_groups.list()]
def get(self, sg_id):
return SecurityGroup(self.client.security_groups.get(sg_id))
def create(self, name, desc):
return SecurityGroup(self.client.security_groups.create(name, desc))
def update(self, sg_id, name, desc):
return SecurityGroup(self.client.security_groups.update(sg_id,
name, desc))
def delete(self, security_group_id):
self.client.security_groups.delete(security_group_id)
def rule_create(self, parent_group_id,
direction=None, ethertype=None,
ip_protocol=None, from_port=None, to_port=None,
cidr=None, group_id=None):
# Nova Security Group API does not use direction and ethertype fields.
sg = self.client.security_group_rules.create(parent_group_id,
ip_protocol,
from_port,
to_port,
cidr,
group_id)
return SecurityGroupRule(sg)
def rule_delete(self, security_group_rule_id):
self.client.security_group_rules.delete(security_group_rule_id)
def list_by_instance(self, instance_id):
"""Gets security groups of an instance."""
# TODO(gabriel): This needs to be moved up to novaclient, and should
# be removed once novaclient supports this call.
security_groups = []
nclient = self.client
resp, body = nclient.client.get('/servers/%s/os-security-groups'
% instance_id)
if body:
# Wrap data in SG objects as novaclient would.
sg_objs = [
nova_security_groups.SecurityGroup(
nclient.security_groups, sg, loaded=True)
for sg in body.get('security_groups', [])]
# Then wrap novaclient's object with our own. Yes, sadly wrapping
# with two layers of objects is necessary.
security_groups = [SecurityGroup(sg) for sg in sg_objs]
return security_groups
def update_instance_security_group(self, instance_id,
new_security_group_ids):
try:
all_groups = self.list()
except Exception:
raise Exception(_("Couldn't get security group list."))
wanted_groups = set([sg.name for sg in all_groups
if sg.id in new_security_group_ids])
try:
current_groups = self.list_by_instance(instance_id)
except Exception:
raise Exception(_("Couldn't get current security group "
"list for instance %s.")
% instance_id)
current_group_names = set([sg.name for sg in current_groups])
groups_to_add = wanted_groups - current_group_names
groups_to_remove = current_group_names - wanted_groups
num_groups_to_modify = len(groups_to_add | groups_to_remove)
try:
for group in groups_to_add:
self.client.servers.add_security_group(instance_id, group)
num_groups_to_modify -= 1
for group in groups_to_remove:
self.client.servers.remove_security_group(instance_id, group)
num_groups_to_modify -= 1
except nova_exceptions.ClientException as err:
LOG.error(_("Failed to modify %(num_groups_to_modify)d instance "
"security groups: %(err)s") %
dict(num_groups_to_modify=num_groups_to_modify,
err=err))
# reraise novaclient.exceptions.ClientException, but with
# a sanitized error message so we don't risk exposing
# sensitive information to the end user. This has to be
# novaclient.exceptions.ClientException, not just
# Exception, since the former is recognized as a
# "recoverable" exception by horizon, and therefore the
# error message is passed along to the end user, while
# Exception is swallowed alive by horizon and a gneric
# error message is given to the end user
raise nova_exceptions.ClientException(
err.code,
_("Failed to modify %d instance security groups") %
num_groups_to_modify)
return True
class FlavorExtraSpec(object):
def __init__(self, flavor_id, key, val):
self.flavor_id = flavor_id
self.id = key
self.key = key
self.value = val
class FloatingIp(base.APIResourceWrapper):
_attrs = ['id', 'ip', 'fixed_ip', 'port_id', 'instance_id',
'instance_type', 'pool']
def __init__(self, fip):
fip.__setattr__('port_id', fip.instance_id)
fip.__setattr__('instance_type',
'compute' if fip.instance_id else None)
super(FloatingIp, self).__init__(fip)
class FloatingIpPool(base.APIDictWrapper):
def __init__(self, pool):
pool_dict = {'id': pool.name,
'name': pool.name}
super(FloatingIpPool, self).__init__(pool_dict)
class FloatingIpTarget(base.APIDictWrapper):
def __init__(self, server):
server_dict = {'name': '%s (%s)' % (server.name, server.id),
'id': server.id}
super(FloatingIpTarget, self).__init__(server_dict)
class FloatingIpManager(network_base.FloatingIpManager):
def __init__(self, request):
self.request = request
self.client = novaclient(request)
def list_pools(self):
return [FloatingIpPool(pool)
for pool in self.client.floating_ip_pools.list()]
def list(self):
return [FloatingIp(fip)
for fip in self.client.floating_ips.list()]
def get(self, floating_ip_id):
return FloatingIp(self.client.floating_ips.get(floating_ip_id))
def allocate(self, pool):
return FloatingIp(self.client.floating_ips.create(pool=pool))
def release(self, floating_ip_id):
self.client.floating_ips.delete(floating_ip_id)
def associate(self, floating_ip_id, port_id):
# In Nova implied port_id is instance_id
server = self.client.servers.get(port_id)
fip = self.client.floating_ips.get(floating_ip_id)
self.client.servers.add_floating_ip(server.id, fip.ip)
def disassociate(self, floating_ip_id, port_id):
fip = self.client.floating_ips.get(floating_ip_id)
server = self.client.servers.get(fip.instance_id)
self.client.servers.remove_floating_ip(server.id, fip.ip)
def list_targets(self):
return [FloatingIpTarget(s) for s in self.client.servers.list()]
def get_target_id_by_instance(self, instance_id, target_list=None):
return instance_id
def list_target_id_by_instance(self, instance_id, target_list=None):
return [instance_id, ]
def is_simple_associate_supported(self):
return conf.HORIZON_CONFIG["simple_ip_management"]
def is_supported(self):
return True
@memoized
def novaclient(request):
insecure = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
cacert = getattr(settings, 'OPENSTACK_SSL_CACERT', None)
LOG.debug('novaclient connection created using token "%s" and url "%s"' %
(request.user.token.id, base.url_for(request, 'compute')))
c = nova_client.Client(request.user.username,
request.user.token.id,
project_id=request.user.tenant_id,
auth_url=base.url_for(request, 'compute'),
insecure=insecure,
cacert=cacert,
http_log_debug=settings.DEBUG)
c.client.auth_token = request.user.token.id
c.client.management_url = base.url_for(request, 'compute')
return c
def server_vnc_console(request, instance_id, console_type='novnc'):
return VNCConsole(novaclient(request).servers.get_vnc_console(instance_id,
console_type)['console'])
def server_spice_console(request, instance_id, console_type='spice-html5'):
return SPICEConsole(novaclient(request).servers.get_spice_console(
instance_id, console_type)['console'])
def server_rdp_console(request, instance_id, console_type='rdp-html5'):
return RDPConsole(novaclient(request).servers.get_rdp_console(
instance_id, console_type)['console'])
def flavor_create(request, name, memory, vcpu, disk, flavorid='auto',
ephemeral=0, swap=0, metadata=None, is_public=True):
flavor = novaclient(request).flavors.create(name, memory, vcpu, disk,
flavorid=flavorid,
ephemeral=ephemeral,
swap=swap, is_public=is_public)
if (metadata):
flavor_extra_set(request, flavor.id, metadata)
return flavor
def flavor_delete(request, flavor_id):
novaclient(request).flavors.delete(flavor_id)
def flavor_get(request, flavor_id):
return novaclient(request).flavors.get(flavor_id)
@memoized
def flavor_list(request, is_public=True):
"""Get the list of available instance sizes (flavors)."""
return novaclient(request).flavors.list(is_public=is_public)
@memoized
def flavor_access_list(request, flavor=None):
"""Get the list of access instance sizes (flavors)."""
return novaclient(request).flavor_access.list(flavor=flavor)
def add_tenant_to_flavor(request, flavor, tenant):
"""Add a tenant to the given flavor access list."""
return novaclient(request).flavor_access.add_tenant_access(
flavor=flavor, tenant=tenant)
def remove_tenant_from_flavor(request, flavor, tenant):
"""Remove a tenant from the given flavor access list."""
return novaclient(request).flavor_access.remove_tenant_access(
flavor=flavor, tenant=tenant)
def flavor_get_extras(request, flavor_id, raw=False):
"""Get flavor extra specs."""
flavor = novaclient(request).flavors.get(flavor_id)
extras = flavor.get_keys()
if raw:
return extras
return [FlavorExtraSpec(flavor_id, key, value) for
key, value in extras.items()]
def flavor_extra_delete(request, flavor_id, keys):
"""Unset the flavor extra spec keys."""
flavor = novaclient(request).flavors.get(flavor_id)
return flavor.unset_keys(keys)
def flavor_extra_set(request, flavor_id, metadata):
"""Set the flavor extra spec keys."""
flavor = novaclient(request).flavors.get(flavor_id)
if (not metadata): # not a way to delete keys
return None
return flavor.set_keys(metadata)
def snapshot_create(request, instance_id, name):
return novaclient(request).servers.create_image(instance_id, name)
def keypair_create(request, name):
return novaclient(request).keypairs.create(name)
def keypair_import(request, name, public_key):
return novaclient(request).keypairs.create(name, public_key)
def keypair_delete(request, keypair_id):
novaclient(request).keypairs.delete(keypair_id)
def keypair_list(request):
return novaclient(request).keypairs.list()
def server_create(request, name, image, flavor, key_name, user_data,
security_groups, block_device_mapping=None,
block_device_mapping_v2=None, nics=None,
availability_zone=None, instance_count=1, admin_pass=None,
disk_config=None, config_drive=None, meta=None):
return Server(novaclient(request).servers.create(
name, image, flavor, userdata=user_data,
security_groups=security_groups,
key_name=key_name, block_device_mapping=block_device_mapping,
block_device_mapping_v2=block_device_mapping_v2,
nics=nics, availability_zone=availability_zone,
min_count=instance_count, admin_pass=admin_pass,
disk_config=disk_config, config_drive=config_drive,
meta=meta), request)
def server_delete(request, instance):
novaclient(request).servers.delete(instance)
def server_get(request, instance_id):
return Server(novaclient(request).servers.get(instance_id), request)
def server_list(request, search_opts=None, all_tenants=False):
page_size = utils.get_page_size(request)
c = novaclient(request)
paginate = False
if search_opts is None:
search_opts = {}
elif 'paginate' in search_opts:
paginate = search_opts.pop('paginate')
if paginate:
search_opts['limit'] = page_size + 1
if all_tenants:
search_opts['all_tenants'] = True
else:
search_opts['project_id'] = request.user.tenant_id
servers = [Server(s, request)
for s in c.servers.list(True, search_opts)]
has_more_data = False
if paginate and len(servers) > page_size:
servers.pop(-1)
has_more_data = True
elif paginate and len(servers) == getattr(settings, 'API_RESULT_LIMIT',
1000):
has_more_data = True
return (servers, has_more_data)
def server_console_output(request, instance_id, tail_length=None):
"""Gets console output of an instance."""
return novaclient(request).servers.get_console_output(instance_id,
length=tail_length)
def server_pause(request, instance_id):
novaclient(request).servers.pause(instance_id)
def server_unpause(request, instance_id):
novaclient(request).servers.unpause(instance_id)
def server_suspend(request, instance_id):
novaclient(request).servers.suspend(instance_id)
def server_resume(request, instance_id):
novaclient(request).servers.resume(instance_id)
def server_reboot(request, instance_id, soft_reboot=False):
hardness = nova_servers.REBOOT_HARD
if soft_reboot:
hardness = nova_servers.REBOOT_SOFT
novaclient(request).servers.reboot(instance_id, hardness)
def server_rebuild(request, instance_id, image_id, password=None,
disk_config=None):
return novaclient(request).servers.rebuild(instance_id, image_id,
password, disk_config)
def server_update(request, instance_id, name):
return novaclient(request).servers.update(instance_id, name=name)
def server_migrate(request, instance_id):
novaclient(request).servers.migrate(instance_id)
def server_live_migrate(request, instance_id, host, block_migration=False,
disk_over_commit=False):
novaclient(request).servers.live_migrate(instance_id, host,
block_migration,
disk_over_commit)
def server_resize(request, instance_id, flavor, disk_config=None, **kwargs):
novaclient(request).servers.resize(instance_id, flavor,
disk_config, **kwargs)
def server_confirm_resize(request, instance_id):
novaclient(request).servers.confirm_resize(instance_id)
def server_revert_resize(request, instance_id):
novaclient(request).servers.revert_resize(instance_id)
def server_start(request, instance_id):
novaclient(request).servers.start(instance_id)
def server_stop(request, instance_id):
novaclient(request).servers.stop(instance_id)
def tenant_quota_get(request, tenant_id):
return base.QuotaSet(novaclient(request).quotas.get(tenant_id))
def tenant_quota_update(request, tenant_id, **kwargs):
novaclient(request).quotas.update(tenant_id, **kwargs)
def default_quota_get(request, tenant_id):
return base.QuotaSet(novaclient(request).quotas.defaults(tenant_id))
def default_quota_update(request, **kwargs):
novaclient(request).quota_classes.update(DEFAULT_QUOTA_NAME, **kwargs)
def usage_get(request, tenant_id, start, end):
return NovaUsage(novaclient(request).usage.get(tenant_id, start, end))
def usage_list(request, start, end):
return [NovaUsage(u) for u in
novaclient(request).usage.list(start, end, True)]
def virtual_interfaces_list(request, instance_id):
return novaclient(request).virtual_interfaces.list(instance_id)
def get_x509_credentials(request):
return novaclient(request).certs.create()
def get_x509_root_certificate(request):
return novaclient(request).certs.get()
def get_password(request, instance_id, private_key=None):
return novaclient(request).servers.get_password(instance_id, private_key)
def instance_volume_attach(request, volume_id, instance_id, device):
return novaclient(request).volumes.create_server_volume(instance_id,
volume_id,
device)
def instance_volume_detach(request, instance_id, att_id):
return novaclient(request).volumes.delete_server_volume(instance_id,
att_id)
def instance_volumes_list(request, instance_id):
from openstack_dashboard.api import cinder
volumes = novaclient(request).volumes.get_server_volumes(instance_id)
for volume in volumes:
volume_data = cinder.cinderclient(request).volumes.get(volume.id)
volume.name = cinder.Volume(volume_data).name
return volumes
def hypervisor_list(request):
return novaclient(request).hypervisors.list()
def hypervisor_stats(request):
return novaclient(request).hypervisors.statistics()
def hypervisor_search(request, query, servers=True):
return novaclient(request).hypervisors.search(query, servers)
def evacuate_host(request, host, target=None, on_shared_storage=False):
# TODO(jmolle) This should be change for nova atomic api host_evacuate
hypervisors = novaclient(request).hypervisors.search(host, True)
response = []
err_code = None
for hypervisor in hypervisors:
hyper = Hypervisor(hypervisor)
# if hypervisor doesn't have servers, the attribute is not present
for server in hyper.servers:
try:
novaclient(request).servers.evacuate(server['uuid'],
target,
on_shared_storage)
except nova_exceptions.ClientException as err:
err_code = err.code
msg = _("Name: %(name)s ID: %(uuid)s")
msg = msg % {'name': server['name'], 'uuid': server['uuid']}
response.append(msg)
if err_code:
msg = _('Failed to evacuate instances: %s') % ', '.join(response)
raise nova_exceptions.ClientException(err_code, msg)
return True
def tenant_absolute_limits(request, reserved=False):
limits = novaclient(request).limits.get(reserved=reserved).absolute
limits_dict = {}
for limit in limits:
if limit.value < 0:
# Workaround for nova bug 1370867 that absolute_limits
# returns negative value for total.*Used instead of 0.
# For such case, replace negative values with 0.
if limit.name.startswith('total') and limit.name.endswith('Used'):
limits_dict[limit.name] = 0
else:
# -1 is used to represent unlimited quotas
limits_dict[limit.name] = float("inf")
else:
limits_dict[limit.name] = limit.value
return limits_dict
def availability_zone_list(request, detailed=False):
return novaclient(request).availability_zones.list(detailed=detailed)
def service_list(request, binary=None):
return novaclient(request).services.list(binary=binary)
def aggregate_details_list(request):
result = []
c = novaclient(request)
for aggregate in c.aggregates.list():
result.append(c.aggregates.get_details(aggregate.id))
return result
def aggregate_create(request, name, availability_zone=None):
return novaclient(request).aggregates.create(name, availability_zone)
def aggregate_delete(request, aggregate_id):
return novaclient(request).aggregates.delete(aggregate_id)
def aggregate_get(request, aggregate_id):
return novaclient(request).aggregates.get(aggregate_id)
def aggregate_update(request, aggregate_id, values):
return novaclient(request).aggregates.update(aggregate_id, values)
def aggregate_set_metadata(request, aggregate_id, metadata):
return novaclient(request).aggregates.set_metadata(aggregate_id, metadata)
def host_list(request):
return novaclient(request).hosts.list()
def add_host_to_aggregate(request, aggregate_id, host):
return novaclient(request).aggregates.add_host(aggregate_id, host)
def remove_host_from_aggregate(request, aggregate_id, host):
return novaclient(request).aggregates.remove_host(aggregate_id, host)
@memoized
def list_extensions(request):
return nova_list_extensions.ListExtManager(novaclient(request)).show_all()
@memoized
def extension_supported(extension_name, request):
"""Determine if nova supports a given extension name.
Example values for the extension_name include AdminActions, ConsoleOutput,
etc.
"""
extensions = list_extensions(request)
for extension in extensions:
if extension.name == extension_name:
return True
return False
def can_set_server_password():
features = getattr(settings, 'OPENSTACK_HYPERVISOR_FEATURES', {})
return features.get('can_set_password', False)
def instance_action_list(request, instance_id):
return nova_instance_action.InstanceActionManager(
novaclient(request)).list(instance_id)
|
|
###############################################################################
##
## Copyright (C) 2014-2015, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
from __future__ import division
import copy
import hashlib
from auto_gen import DBVistrail as _DBVistrail
from auto_gen import DBAdd, DBChange, DBDelete, DBAbstraction, DBGroup, \
DBModule
from id_scope import IdScope
class DBVistrail(_DBVistrail):
def __init__(self, *args, **kwargs):
_DBVistrail.__init__(self, *args, **kwargs)
self.idScope = IdScope(remap={DBAdd.vtType: 'operation',
DBChange.vtType: 'operation',
DBDelete.vtType: 'operation',
DBAbstraction.vtType: DBModule.vtType,
DBGroup.vtType: DBModule.vtType})
self.idScope.setBeginId('action', 1)
self.db_objects = {}
# keep a reference to the current logging information here
self.db_log_filename = None
self.log = None
def __copy__(self):
return DBVistrail.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = _DBVistrail.do_copy(self, new_ids, id_scope, id_remap)
cp.__class__ = DBVistrail
cp.idScope = copy.copy(self.idScope)
cp.db_objects = copy.copy(self.db_objects)
cp.db_log_filename = self.db_log_filename
if self.log is not None:
cp.log = copy.copy(self.log)
else:
cp.log = None
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBVistrail()
new_obj = _DBVistrail.update_version(old_obj, trans_dict, new_obj)
new_obj.update_id_scope()
if hasattr(old_obj, 'db_log_filename'):
new_obj.db_log_filename = old_obj.db_log_filename
if hasattr(old_obj, 'log'):
new_obj.log = old_obj.log
return new_obj
def update_id_scope(self):
def getOldObjId(operation):
if operation.vtType == 'change':
return operation.db_oldObjId
return operation.db_objectId
def getNewObjId(operation):
if operation.vtType == 'change':
return operation.db_newObjId
return operation.db_objectId
for action in self.db_actions:
self.idScope.updateBeginId('action', action.db_id+1)
if action.db_session is not None:
self.idScope.updateBeginId('session', action.db_session + 1)
for operation in action.db_operations:
self.idScope.updateBeginId('operation', operation.db_id+1)
if operation.vtType == 'add' or operation.vtType == 'change':
# update ids of data
self.idScope.updateBeginId(operation.db_what,
getNewObjId(operation)+1)
if operation.db_data is None:
if operation.vtType == 'change':
operation.db_objectId = operation.db_oldObjId
self.db_add_object(operation.db_data)
for annotation in action.db_annotations:
self.idScope.updateBeginId('annotation', annotation.db_id+1)
def db_add_object(self, obj):
self.db_objects[(obj.vtType, obj.db_id)] = obj
def db_get_object(self, type, id):
return self.db_objects.get((type, id), None)
def db_update_object(self, obj, **kwargs):
# want to swap out old object with a new version
# need this for updating aliases...
# hack it using setattr...
real_obj = self.db_objects[(obj.vtType, obj.db_id)]
for (k, v) in kwargs.iteritems():
if hasattr(real_obj, k):
setattr(real_obj, k, v)
def update_checkout_version(self, app=''):
checkout_key = "__checkout_version_"
action_key = checkout_key + app
tag_key = action_key + '_taghash'
annotation_key = action_key + '_annotationhash'
action_annotation_key = action_key + '_actionannotationhash'
# delete previous checkout annotations
deletekeys = [action_key,tag_key,annotation_key,action_annotation_key]
for key in deletekeys:
while self.db_has_annotation_with_key(key):
a = self.db_get_annotation_by_key(key)
self.db_delete_annotation(a)
# annotation hash - requires annotations to be clean
value = self.hashAnnotations()
self.set_annotation(annotation_key, value)
# action annotation hash
value = self.hashActionAnnotations()
self.set_annotation(action_annotation_key, value)
# last action id hash
if len(self.db_actions) == 0:
value = 0
else:
value = max(v.db_id for v in self.db_actions)
self.set_annotation(action_key, value)
# tag hash
self.set_annotation(tag_key, self.hashTags())
def hashTags(self):
tagKeys = [tag.db_id for tag in self.db_tags]
tagKeys.sort()
m = hashlib.md5()
for k in tagKeys:
m.update(str(k))
m.update(self.db_get_tag_by_id(k).db_name)
return m.hexdigest()
def hashAnnotations(self):
annotations = {}
for annotation in self.db_annotations:
if annotation._db_key not in annotations:
annotations[annotation._db_key] = []
if annotation._db_value not in annotations[annotation._db_key]:
annotations[annotation._db_key].append(annotation._db_value)
keys = annotations.keys()
keys.sort()
m = hashlib.md5()
for k in keys:
m.update(k)
annotations[k].sort()
for v in annotations[k]:
m.update(v)
return m.hexdigest()
def hashActionAnnotations(self):
action_annotations = {}
for id, annotations in [[action.db_id, action.db_annotations] for action in self.db_actions]:
for annotation in annotations:
index = (str(id), annotation.db_key)
if index not in action_annotations:
action_annotations[index] = []
if annotation.db_value not in action_annotations[index]:
action_annotations[index].append(annotation.db_value)
keys = action_annotations.keys()
keys.sort()
m = hashlib.md5()
for k in keys:
m.update(k[0] + k[1])
action_annotations[k].sort()
for v in action_annotations[k]:
m.update(v)
return m.hexdigest()
|
|
#!/usr/bin/env python
#
# Copyright 2013, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
import logging
import time
import unittest
from vtproto import topodata_pb2
from vtdb import keyrange
from vtdb import keyrange_constants
from vtdb import vtgate_client
import environment
import tablet
import utils
# source keyspace, with 4 tables
source_master = tablet.Tablet()
source_replica = tablet.Tablet()
source_rdonly1 = tablet.Tablet()
source_rdonly2 = tablet.Tablet()
# destination keyspace, with just two tables
destination_master = tablet.Tablet()
destination_replica = tablet.Tablet()
destination_rdonly1 = tablet.Tablet()
destination_rdonly2 = tablet.Tablet()
def setUpModule():
try:
environment.topo_server().setup()
setup_procs = [
source_master.init_mysql(),
source_replica.init_mysql(),
source_rdonly1.init_mysql(),
source_rdonly2.init_mysql(),
destination_master.init_mysql(),
destination_replica.init_mysql(),
destination_rdonly1.init_mysql(),
destination_rdonly2.init_mysql(),
]
utils.Vtctld().start()
utils.wait_procs(setup_procs)
except:
tearDownModule()
raise
def tearDownModule():
utils.required_teardown()
if utils.options.skip_teardown:
return
if utils.vtgate:
utils.vtgate.kill()
teardown_procs = [
source_master.teardown_mysql(),
source_replica.teardown_mysql(),
source_rdonly1.teardown_mysql(),
source_rdonly2.teardown_mysql(),
destination_master.teardown_mysql(),
destination_replica.teardown_mysql(),
destination_rdonly1.teardown_mysql(),
destination_rdonly2.teardown_mysql(),
]
utils.wait_procs(teardown_procs, raise_on_error=False)
environment.topo_server().teardown()
utils.kill_sub_processes()
utils.remove_tmp_files()
source_master.remove_tree()
source_replica.remove_tree()
source_rdonly1.remove_tree()
source_rdonly2.remove_tree()
destination_master.remove_tree()
destination_replica.remove_tree()
destination_rdonly1.remove_tree()
destination_rdonly2.remove_tree()
class TestVerticalSplit(unittest.TestCase):
def setUp(self):
self.insert_index = 0
self._init_keyspaces_and_tablets()
utils.VtGate().start(cache_ttl='0s')
# create the schema on the source keyspace, add some values
self._create_source_schema()
self._insert_initial_values()
def tearDown(self):
# kill everything
tablet.kill_tablets([source_master, source_replica, source_rdonly1,
source_rdonly2, destination_master,
destination_replica, destination_rdonly1,
destination_rdonly2])
utils.vtgate.kill()
def _init_keyspaces_and_tablets(self):
utils.run_vtctl(['CreateKeyspace', 'source_keyspace'])
utils.run_vtctl(
['CreateKeyspace', '--served_from',
'master:source_keyspace,replica:source_keyspace,rdonly:'
'source_keyspace',
'destination_keyspace'])
source_master.start_vttablet(
wait_for_state=None, target_tablet_type='replica',
init_keyspace='source_keyspace', init_shard='0')
source_replica.start_vttablet(
wait_for_state=None, target_tablet_type='replica',
init_keyspace='source_keyspace', init_shard='0')
source_rdonly1.start_vttablet(
wait_for_state=None, target_tablet_type='rdonly',
init_keyspace='source_keyspace', init_shard='0')
source_rdonly2.start_vttablet(
wait_for_state=None, target_tablet_type='rdonly',
init_keyspace='source_keyspace', init_shard='0')
destination_master.start_vttablet(
wait_for_state=None, target_tablet_type='replica',
init_keyspace='destination_keyspace', init_shard='0')
destination_replica.start_vttablet(
wait_for_state=None, target_tablet_type='replica',
init_keyspace='destination_keyspace', init_shard='0')
destination_rdonly1.start_vttablet(
wait_for_state=None, target_tablet_type='rdonly',
init_keyspace='destination_keyspace', init_shard='0')
destination_rdonly2.start_vttablet(
wait_for_state=None, target_tablet_type='rdonly',
init_keyspace='destination_keyspace', init_shard='0')
# wait for the tablets
all_setup_tablets = [
source_master, source_replica, source_rdonly1, source_rdonly2,
destination_master, destination_replica, destination_rdonly1,
destination_rdonly2]
for t in all_setup_tablets:
t.wait_for_vttablet_state('NOT_SERVING')
# check SrvKeyspace
self._check_srv_keyspace('ServedFrom(master): source_keyspace\n'
'ServedFrom(rdonly): source_keyspace\n'
'ServedFrom(replica): source_keyspace\n')
# reparent to make the tablets work
utils.run_vtctl(['InitShardMaster', '-force', 'source_keyspace/0',
source_master.tablet_alias], auto_log=True)
utils.run_vtctl(['InitShardMaster', '-force', 'destination_keyspace/0',
destination_master.tablet_alias], auto_log=True)
for t in all_setup_tablets:
t.wait_for_vttablet_state('SERVING')
def _create_source_schema(self):
create_table_template = '''create table %s(
id bigint not null,
msg varchar(64),
primary key (id),
index by_msg (msg)
) Engine=InnoDB'''
create_view_template = 'create view %s(id, msg) as select id, msg from %s'
for t in ['moving1', 'moving2', 'staying1', 'staying2']:
utils.run_vtctl(['ApplySchema',
'-sql=' + create_table_template % (t),
'source_keyspace'],
auto_log=True)
utils.run_vtctl(['ApplySchema',
'-sql=' + create_view_template % ('view1', 'moving1'),
'source_keyspace'],
auto_log=True)
for t in [source_master, source_replica, source_rdonly1, source_rdonly2]:
utils.run_vtctl(['ReloadSchema', t.tablet_alias])
# Add a table to the destination keyspace which should be ignored.
utils.run_vtctl(['ApplySchema',
'-sql=' + create_table_template % 'extra1',
'destination_keyspace'],
auto_log=True)
for t in [destination_master, destination_replica,
destination_rdonly1, destination_rdonly2]:
utils.run_vtctl(['ReloadSchema', t.tablet_alias])
def _insert_initial_values(self):
self.moving1_first = self._insert_values('moving1', 100)
self.moving2_first = self._insert_values('moving2', 100)
staying1_first = self._insert_values('staying1', 100)
staying2_first = self._insert_values('staying2', 100)
self._check_values(source_master, 'vt_source_keyspace', 'moving1',
self.moving1_first, 100)
self._check_values(source_master, 'vt_source_keyspace', 'moving2',
self.moving2_first, 100)
self._check_values(source_master, 'vt_source_keyspace', 'staying1',
staying1_first, 100)
self._check_values(source_master, 'vt_source_keyspace', 'staying2',
staying2_first, 100)
self._check_values(source_master, 'vt_source_keyspace', 'view1',
self.moving1_first, 100)
# Insert data directly because vtgate would redirect us.
destination_master.mquery(
'vt_destination_keyspace',
"insert into %s (id, msg) values(%d, 'value %d')" % ('extra1', 1, 1),
write=True)
self._check_values(destination_master, 'vt_destination_keyspace', 'extra1',
1, 1)
def _vtdb_conn(self):
protocol, addr = utils.vtgate.rpc_endpoint(python=True)
return vtgate_client.connect(protocol, addr, 30.0)
# insert some values in the source master db, return the first id used
def _insert_values(self, table, count):
result = self.insert_index
conn = self._vtdb_conn()
cursor = conn.cursor(
tablet_type='master', keyspace='source_keyspace',
keyranges=[keyrange.KeyRange(keyrange_constants.NON_PARTIAL_KEYRANGE)],
writable=True)
for _ in xrange(count):
conn.begin()
cursor.execute("insert into %s (id, msg) values(%d, 'value %d')" % (
table, self.insert_index, self.insert_index), {})
conn.commit()
self.insert_index += 1
conn.close()
return result
def _check_values(self, t, dbname, table, first, count):
logging.debug(
'Checking %d values from %s/%s starting at %d', count, dbname,
table, first)
rows = t.mquery(
dbname, 'select id, msg from %s where id>=%d order by id limit %d' %
(table, first, count))
self.assertEqual(count, len(rows), 'got wrong number of rows: %d != %d' %
(len(rows), count))
for i in xrange(count):
self.assertEqual(first + i, rows[i][0], 'invalid id[%d]: %d != %d' %
(i, first + i, rows[i][0]))
self.assertEqual('value %d' % (first + i), rows[i][1],
"invalid msg[%d]: 'value %d' != '%s'" %
(i, first + i, rows[i][1]))
def _check_values_timeout(self, t, dbname, table, first, count,
timeout=30):
while True:
try:
self._check_values(t, dbname, table, first, count)
return
except Exception: # pylint: disable=broad-except
timeout -= 1
if timeout == 0:
raise
logging.debug('Sleeping for 1s waiting for data in %s/%s', dbname,
table)
time.sleep(1)
def _check_srv_keyspace(self, expected):
cell = 'test_nj'
keyspace = 'destination_keyspace'
ks = utils.run_vtctl_json(['GetSrvKeyspace', cell, keyspace])
result = ''
if 'served_from' in ks and ks['served_from']:
a = []
for served_from in sorted(ks['served_from']):
tt = topodata_pb2.TabletType.Name(served_from['tablet_type']).lower()
if tt == 'batch':
tt = 'rdonly'
a.append('ServedFrom(%s): %s\n' % (tt, served_from['keyspace']))
for line in sorted(a):
result += line
logging.debug('Cell %s keyspace %s has data:\n%s', cell, keyspace, result)
self.assertEqual(
expected, result,
'Mismatch in srv keyspace for cell %s keyspace %s, expected:\n'
'%s\ngot:\n%s' % (
cell, keyspace, expected, result))
self.assertNotIn('sharding_column_name', ks,
'Got a sharding_column_name in SrvKeyspace: %s' %
str(ks))
self.assertNotIn('sharding_column_type', ks,
'Got a sharding_column_type in SrvKeyspace: %s' %
str(ks))
def _check_blacklisted_tables(self, t, expected):
status = t.get_status()
if expected:
self.assertIn('BlacklistedTables: %s' % ' '.join(expected), status)
else:
self.assertNotIn('BlacklistedTables', status)
# check we can or cannot access the tables
for table in ['moving1', 'moving2']:
if expected and 'moving.*' in expected:
# table is blacklisted, should get the error
_, stderr = utils.run_vtctl(['VtTabletExecute', '-json',
'-keyspace', t.keyspace,
'-shard', t.shard,
t.tablet_alias,
'select count(1) from %s' % table],
expect_fail=True)
self.assertIn(
'retry: Query disallowed due to rule: enforce blacklisted tables',
stderr)
else:
# table is not blacklisted, should just work
qr = t.execute('select count(1) from %s' % table)
logging.debug('Got %s rows from table %s on tablet %s',
qr['rows'][0][0], table, t.tablet_alias)
def _check_client_conn_redirection(
self, destination_ks, servedfrom_db_types,
moved_tables=None):
# check that the ServedFrom indirection worked correctly.
if moved_tables is None:
moved_tables = []
conn = self._vtdb_conn()
for db_type in servedfrom_db_types:
for tbl in moved_tables:
try:
rows = conn._execute(
'select * from %s' % tbl, {}, tablet_type=db_type,
keyspace_name=destination_ks,
keyranges=[keyrange.KeyRange(
keyrange_constants.NON_PARTIAL_KEYRANGE)])
logging.debug(
'Select on %s.%s returned %d rows', db_type, tbl, len(rows))
except Exception, e: # pylint: disable=broad-except
self.fail('Execute failed w/ exception %s' % str(e))
def _check_stats(self):
v = utils.vtgate.get_vars()
self.assertEqual(
v['VttabletCall']['Histograms']['Execute.source_keyspace.0.replica'][
'Count'],
2
, 'unexpected value for VttabletCall('
'Execute.source_keyspace.0.replica) inside %s' % str(v))
# Verify master reads done by self._check_client_conn_redirection().
self.assertEqual(
v['VtgateApi']['Histograms'][
'ExecuteKeyRanges.destination_keyspace.master']['Count'],
6,
'unexpected value for VtgateApi('
'ExecuteKeyRanges.destination_keyspace.master) inside %s' % str(v))
self.assertEqual(
len(v['VtgateApiErrorCounts']), 0,
'unexpected errors for VtgateApiErrorCounts inside %s' % str(v))
def test_vertical_split(self):
# the worker will do everything. We test with source_reader_count=10
# (down from default=20) as connection pool is not big enough for 20.
# min_table_size_for_split is set to 1 as to force a split even on the
# small table we have.
utils.run_vtctl(['CopySchemaShard', '--tables', 'moving.*,view1',
source_rdonly1.tablet_alias, 'destination_keyspace/0'],
auto_log=True)
utils.run_vtworker(['--cell', 'test_nj',
'--command_display_interval', '10ms',
'VerticalSplitClone',
'--tables', 'moving.*,view1',
'--source_reader_count', '10',
'--min_table_size_for_split', '1',
'destination_keyspace/0'],
auto_log=True)
# One of the two source rdonly tablets went spare after the clone.
# Force a healthcheck on both to get them back to "rdonly".
for t in [source_rdonly1, source_rdonly2]:
utils.run_vtctl(['RunHealthCheck', t.tablet_alias, 'rdonly'])
# check values are present
self._check_values(destination_master, 'vt_destination_keyspace', 'moving1',
self.moving1_first, 100)
self._check_values(destination_master, 'vt_destination_keyspace', 'moving2',
self.moving2_first, 100)
self._check_values(destination_master, 'vt_destination_keyspace', 'view1',
self.moving1_first, 100)
# check the binlog players is running
destination_master.wait_for_binlog_player_count(1)
# add values to source, make sure they're replicated
moving1_first_add1 = self._insert_values('moving1', 100)
_ = self._insert_values('staying1', 100)
moving2_first_add1 = self._insert_values('moving2', 100)
self._check_values_timeout(destination_master, 'vt_destination_keyspace',
'moving1', moving1_first_add1, 100)
self._check_values_timeout(destination_master, 'vt_destination_keyspace',
'moving2', moving2_first_add1, 100)
# use vtworker to compare the data
for t in [destination_rdonly1, destination_rdonly2]:
utils.run_vtctl(['RunHealthCheck', t.tablet_alias, 'rdonly'])
logging.debug('Running vtworker VerticalSplitDiff')
utils.run_vtworker(['-cell', 'test_nj', 'VerticalSplitDiff',
'destination_keyspace/0'], auto_log=True)
# One of each source and dest rdonly tablet went spare after the diff.
# Force a healthcheck on all four to get them back to "rdonly".
for t in [source_rdonly1, source_rdonly2,
destination_rdonly1, destination_rdonly2]:
utils.run_vtctl(['RunHealthCheck', t.tablet_alias, 'rdonly'])
utils.pause('Good time to test vtworker for diffs')
# get status for destination master tablet, make sure we have it all
destination_master_status = destination_master.get_status()
self.assertIn('Binlog player state: Running', destination_master_status)
self.assertIn('moving.*', destination_master_status)
self.assertIn(
'<td><b>All</b>: 1000<br><b>Query</b>: 700<br>'
'<b>Transaction</b>: 300<br></td>', destination_master_status)
self.assertIn('</html>', destination_master_status)
# check query service is off on destination master, as filtered
# replication is enabled. Even health check should not interfere.
destination_master_vars = utils.get_vars(destination_master.port)
self.assertEqual(destination_master_vars['TabletStateName'], 'NOT_SERVING')
# check we can't migrate the master just yet
utils.run_vtctl(['MigrateServedFrom', 'destination_keyspace/0', 'master'],
expect_fail=True)
# migrate rdonly only in test_ny cell, make sure nothing is migrated
# in test_nj
utils.run_vtctl(['MigrateServedFrom', '--cells=test_ny',
'destination_keyspace/0', 'rdonly'],
auto_log=True)
self._check_srv_keyspace('ServedFrom(master): source_keyspace\n'
'ServedFrom(rdonly): source_keyspace\n'
'ServedFrom(replica): source_keyspace\n')
self._check_blacklisted_tables(source_master, None)
self._check_blacklisted_tables(source_replica, None)
self._check_blacklisted_tables(source_rdonly1, None)
self._check_blacklisted_tables(source_rdonly2, None)
# migrate test_nj only, using command line manual fix command,
# and restore it back.
keyspace_json = utils.run_vtctl_json(
['GetKeyspace', 'destination_keyspace'])
found = False
for ksf in keyspace_json['served_froms']:
if ksf['tablet_type'] == topodata_pb2.RDONLY:
found = True
self.assertEqual(ksf['cells'], ['test_nj'])
self.assertTrue(found)
utils.run_vtctl(['SetKeyspaceServedFrom', '-source=source_keyspace',
'-remove', '-cells=test_nj', 'destination_keyspace',
'rdonly'], auto_log=True)
keyspace_json = utils.run_vtctl_json(
['GetKeyspace', 'destination_keyspace'])
found = False
for ksf in keyspace_json['served_froms']:
if ksf['tablet_type'] == topodata_pb2.RDONLY:
found = True
self.assertFalse(found)
utils.run_vtctl(['SetKeyspaceServedFrom', '-source=source_keyspace',
'destination_keyspace', 'rdonly'],
auto_log=True)
keyspace_json = utils.run_vtctl_json(
['GetKeyspace', 'destination_keyspace'])
found = False
for ksf in keyspace_json['served_froms']:
if ksf['tablet_type'] == topodata_pb2.RDONLY:
found = True
self.assertNotIn('cells', ksf)
self.assertTrue(found)
# now serve rdonly from the destination shards
utils.run_vtctl(['MigrateServedFrom', 'destination_keyspace/0', 'rdonly'],
auto_log=True)
self._check_srv_keyspace('ServedFrom(master): source_keyspace\n'
'ServedFrom(replica): source_keyspace\n')
self._check_blacklisted_tables(source_master, None)
self._check_blacklisted_tables(source_replica, None)
self._check_blacklisted_tables(source_rdonly1, ['moving.*', 'view1'])
self._check_blacklisted_tables(source_rdonly2, ['moving.*', 'view1'])
self._check_client_conn_redirection(
'destination_keyspace',
['master', 'replica'], ['moving1', 'moving2'])
# then serve replica from the destination shards
utils.run_vtctl(['MigrateServedFrom', 'destination_keyspace/0', 'replica'],
auto_log=True)
self._check_srv_keyspace('ServedFrom(master): source_keyspace\n')
self._check_blacklisted_tables(source_master, None)
self._check_blacklisted_tables(source_replica, ['moving.*', 'view1'])
self._check_blacklisted_tables(source_rdonly1, ['moving.*', 'view1'])
self._check_blacklisted_tables(source_rdonly2, ['moving.*', 'view1'])
self._check_client_conn_redirection(
'destination_keyspace',
['master'], ['moving1', 'moving2'])
# move replica back and forth
utils.run_vtctl(['MigrateServedFrom', '-reverse',
'destination_keyspace/0', 'replica'], auto_log=True)
self._check_srv_keyspace('ServedFrom(master): source_keyspace\n'
'ServedFrom(replica): source_keyspace\n')
self._check_blacklisted_tables(source_master, None)
self._check_blacklisted_tables(source_replica, None)
self._check_blacklisted_tables(source_rdonly1, ['moving.*', 'view1'])
self._check_blacklisted_tables(source_rdonly2, ['moving.*', 'view1'])
utils.run_vtctl(['MigrateServedFrom', 'destination_keyspace/0', 'replica'],
auto_log=True)
self._check_srv_keyspace('ServedFrom(master): source_keyspace\n')
self._check_blacklisted_tables(source_master, None)
self._check_blacklisted_tables(source_replica, ['moving.*', 'view1'])
self._check_blacklisted_tables(source_rdonly1, ['moving.*', 'view1'])
self._check_blacklisted_tables(source_rdonly2, ['moving.*', 'view1'])
self._check_client_conn_redirection(
'destination_keyspace',
['master'], ['moving1', 'moving2'])
# then serve master from the destination shards
utils.run_vtctl(['MigrateServedFrom', 'destination_keyspace/0', 'master'],
auto_log=True)
self._check_srv_keyspace('')
self._check_blacklisted_tables(source_master, ['moving.*', 'view1'])
self._check_blacklisted_tables(source_replica, ['moving.*', 'view1'])
self._check_blacklisted_tables(source_rdonly1, ['moving.*', 'view1'])
self._check_blacklisted_tables(source_rdonly2, ['moving.*', 'view1'])
# check the binlog player is gone now
destination_master.wait_for_binlog_player_count(0)
# check the stats are correct
self._check_stats()
# now remove the tables on the source shard. The blacklisted tables
# in the source shard won't match any table, make sure that works.
utils.run_vtctl(['ApplySchema',
'-sql=drop view view1',
'source_keyspace'],
auto_log=True)
for t in ['moving1', 'moving2']:
utils.run_vtctl(['ApplySchema',
'-sql=drop table %s' % (t),
'source_keyspace'],
auto_log=True)
for t in [source_master, source_replica, source_rdonly1, source_rdonly2]:
utils.run_vtctl(['ReloadSchema', t.tablet_alias])
qr = source_master.execute('select count(1) from staying1')
self.assertEqual(len(qr['rows']), 1,
'cannot read staying1: got %s' % str(qr))
# test SetShardTabletControl
self._verify_vtctl_set_shard_tablet_control()
def _verify_vtctl_set_shard_tablet_control(self):
"""Test that manually editing the blacklisted tables works correctly.
TODO(mberlin): This is more an integration test and should be moved to the
Go codebase eventually.
"""
# check 'vtctl SetShardTabletControl' command works as expected:
# clear the rdonly entry:
utils.run_vtctl(['SetShardTabletControl', '--remove', 'source_keyspace/0',
'rdonly'], auto_log=True)
self._assert_tablet_controls([topodata_pb2.MASTER, topodata_pb2.REPLICA])
# re-add rdonly:
utils.run_vtctl(['SetShardTabletControl', '--tables=moving.*,view1',
'source_keyspace/0', 'rdonly'], auto_log=True)
self._assert_tablet_controls([topodata_pb2.MASTER, topodata_pb2.REPLICA,
topodata_pb2.RDONLY])
# and then clear all entries:
utils.run_vtctl(['SetShardTabletControl', '--remove', 'source_keyspace/0',
'rdonly'], auto_log=True)
utils.run_vtctl(['SetShardTabletControl', '--remove', 'source_keyspace/0',
'replica'], auto_log=True)
utils.run_vtctl(['SetShardTabletControl', '--remove', 'source_keyspace/0',
'master'], auto_log=True)
shard_json = utils.run_vtctl_json(['GetShard', 'source_keyspace/0'])
self.assertNotIn('tablet_controls', shard_json)
def _assert_tablet_controls(self, expected_dbtypes):
shard_json = utils.run_vtctl_json(['GetShard', 'source_keyspace/0'])
self.assertEqual(len(shard_json['tablet_controls']), len(expected_dbtypes))
expected_dbtypes_set = set(expected_dbtypes)
for tc in shard_json['tablet_controls']:
self.assertIn(tc['tablet_type'], expected_dbtypes_set)
self.assertEqual(['moving.*', 'view1'], tc['blacklisted_tables'])
expected_dbtypes_set.remove(tc['tablet_type'])
self.assertEqual(0, len(expected_dbtypes_set),
'Not all expected db types were blacklisted')
if __name__ == '__main__':
utils.main()
|
|
from cloudmesh.config.cm_config import get_mongo_db, get_mongo_dbname_from_collection, DBConnFactory
from cloudmesh.management.cloudmeshobject import CloudmeshObject
from cloudmesh_base.ConfigDict import ConfigDict
from cloudmesh_base.locations import config_file
from mongoengine import *
from tabulate import tabulate
import datetime
import json
import sys
import pprint
import yaml
STATUS = ('pending', 'approved', 'blocked', 'denied')
def IMPLEMENT():
print "IMPLEMENT ME"
'''
def generate_password_hash(password)
# maybe using passlib https://pypi.python.org/pypi/passlib
salt = uuid.uuid4().hex
hashed_password = hashlib.sha512(password + salt).hexdigest()
return hashed_password'''
def read_user(filename):
'''
reads user data from a yaml file
:param filename: The file anme
:type filename: String of the path
'''
stream = open(filename, 'r')
data = yaml.load(stream)
user = User(
status=data["status"],
username=data["username"],
title=data["title"],
firstname=data["firstname"],
lastname=data["lastname"],
email=data["email"],
url=data["url"],
citizenship=data["citizenship"],
bio=data["bio"],
password=data["password"],
userid=data["userid"],
phone=data["phone"],
projects=data["projects"],
institution=data["institution"],
department=data["department"],
address=data["address"],
country=data["country"],
advisor=data["advisor"],
message=data["message"],
)
return user
class User(CloudmeshObject):
"""
This class is used to represent a Cloudmesh User
"""
dbname = get_mongo_dbname_from_collection("manage")
if dbname:
meta = {'db_alias': dbname}
#
# defer the connection to where the object is instantiated
# get_mongo_db("manage", DBConnFactory.TYPE_MONGOENGINE)
"""
User fields
"""
username = StringField(required=True)
email = EmailField(required=True)
password = StringField(required=True)
confirm = StringField(required=True)
title = StringField(required=True)
firstname = StringField(required=True)
lastname = StringField(required=True)
phone = StringField(required=True)
url = StringField(required=True)
citizenship = StringField(required=True)
bio = StringField(required=True)
institution = StringField(required=True)
institutionrole = StringField(required=True)
department = StringField(required=True)
address = StringField(required=True)
advisor = StringField(required=True)
country = StringField(required=True)
"""
Hidden fields
"""
status = StringField(required=True, default='pending')
userid = UUIDField()
projects = StringField()
"""
Message received from either reviewers,
committee or other users. It is a list because
there might be more than one message
"""
message = ListField(StringField())
def order(self):
"""
Order the attributes to be printed in the display
method
"""
try:
return [
("username", self.username),
("status", self.status),
("title", self.title),
("firstname", self.firstname),
("lastname", self.lastname),
("email", self.email),
("url", self.url),
("citizenship", self.citizenship),
("bio", self.bio),
("password", self.password),
("phone", self.phone),
("projects", self.projects),
("institution", self.institution),
("department", self.department),
("address", self.address),
("country", self.country),
("advisor", self.advisor),
("date_modified", self.date_modified),
("date_created", self.date_created),
("date_approved", self.date_approved),
("date_deactivated", self.date_deactivated),
]
except:
return None
@classmethod
def hidden(cls):
"""
Hidden attributes
"""
return [
"userid",
"active",
"message",
]
# def save(self,db):
# db.put({"firname":user.firname,...})
def is_active(self):
'''
check if the user is active
'''
"""finds if a user is active or not"""
d1 = datetime.datetime.now()
return (self.active and (datetime.datetime.now() < self.date_deactivate))
@classmethod
def set_password(cls, password):
'''
not implemented
:param password:
:type password:
'''
# self.password_hash = generate_password_hash(password)
pass
@classmethod
def check_password(cls, password):
'''
not implemented
:param password:
:type password:
'''
# return check_password_hash(self.password_hash, password)
pass
def json(self):
'''
returns a json representation of the object
'''
"""prints the user as a json object"""
d = {}
for (field, value) in self.order():
try:
d[field] = value
except:
pass
return d
def yaml(self):
'''
returns the yaml object of the object.
'''
"""prints the user as a json object"""
return self.__str__(fields=True, all=True)
"""
def __str__(self, fields=False, all=False):
content = ""
for (field, value) in self.order():
try:
if not (value is None or value == "") or all:
if fields:
content = content + field + ": "
content = content + value + "\n"
except:
pass
return content
"""
class Users(object):
"""
convenience object to manage several users
"""
def __init__(self):
config = ConfigDict(filename=config_file("/cloudmesh_server.yaml"))
port = config['cloudmesh']['server']['mongo']['port']
# db = connect('manage', port=port)
self.users = User.objects()
dbname = get_mongo_dbname_from_collection("manage")
if dbname:
meta = {'db_alias': dbname}
# get_mongo_db("manage", DBConnFactory.TYPE_MONGOENGINE)
@classmethod
def objects(cls):
"""
returns the users
"""
return cls.users
@classmethod
def get_unique_username(cls, proposal):
"""
gets a unique username form a proposal. This is achieved whil appending a number at the end. if the
:param proposal: the proposed username
:type proposal: String
"""
new_proposal = proposal.lower()
num = 1
username = User.objects(username=new_proposal)
while username.count() > 0:
new_proposal = proposal + str(num)
username = User.objects(username=new_proposal)
num = num + 1
return new_proposal
@classmethod
def add(cls, user):
"""
adds a user
:param user: the username
:type user: String
"""
user.username = cls.get_unique_username(user.username)
user.set_date_deactivate()
if cls.validate_email(user.email):
user.save()
else:
print "ERROR: a user with the e-mail `{0}` already exists".format(user.email)
@classmethod
def delete_user(cls, user_name=None):
if user_name:
try:
user = User.objects(username=user_name)
if user:
user.delete()
else:
print "Error: User with the name '{0}' does not exist.".format(user_name)
except:
print "Oops! Something went wrong while trying to remove a user", sys.exc_info()[0]
else:
print "Error: Please specity the user to be removed"
@classmethod
def amend_user_status(self, user_name=None, status=None):
if user_name:
try:
user = User.objects(username=user_name)
if user:
user.status = status
user.save()
except:
print "Oops! Something went wrong while trying to amend user status", sys.exc_info()[0]
else:
print "Error: Please specity the user to be amended"
@classmethod
def validate_email(cls, email):
"""
verifies if the email of the user is not already in the users.
:param user: user object
:type user: User
:rtype: Boolean
"""
user = User.objects(email=email)
valid = user.count() == 0
return valid
@classmethod
def find(cls, email=None):
"""
returns the users based on the given query.
If no email is specified all users are returned.
If the email is specified we search for the user with the given e-mail.
:param email: email
:type email: email address
"""
if email is None:
return User.objects()
else:
found = User.objects(email=email)
if found.count() > 0:
return User.objects()[0]
else:
return None
@classmethod
def find_user(cls, username):
"""
returns a user based on the username
:param username:
:type username:
"""
return User.object(username=username)
@classmethod
def clear(cls):
"""removes all elements form the mongo db that are users"""
for user in User.objects:
user.delete()
@classmethod
def list_users(cls, disp_fmt=None, username=None):
req_fields = ["username", "title", "firstname", "lastname",
"email", "phone", "url", "citizenship",
"institution", "institutionrole", "department",
"advisor", "address", "status"]
try:
if username is None:
user_json = User.objects.only(*req_fields).to_json()
user_dict = json.loads(user_json)
if disp_fmt != 'json':
cls.display(user_dict, username)
else:
cls.display_json(user_dict, username)
else:
user_json = User.objects(username=username).only(*req_fields).to_json()
user_dict = json.loads(user_json)
if disp_fmt != 'json':
cls.display(user_dict, username)
else:
cls.display_json(user_dict, username)
except:
print "Oops.. Something went wrong in the list users method", sys.exc_info()[0]
pass
@classmethod
def display(cls, user_dicts=None, user_name=None):
if bool(user_dicts):
values = []
for entry in user_dicts:
items = []
headers = []
for key, value in entry.iteritems():
items.append(value)
headers.append(key.replace('_', ' ').title())
values.append(items)
table_fmt = "orgtbl"
table = tabulate(values, headers, table_fmt)
separator = ''
try:
seperator = table.split("\n")[1].replace("|", "+")
except:
separator = "-" * 50
print separator
print table
print separator
else:
if user_name:
print "Error: No user in the system with name '{0}'".format(user_name)
@classmethod
def display_json(cls, user_dict=None, user_name=None):
if bool(user_dict):
# pprint.pprint(user_json)
print json.dumps(user_dict, indent=4)
else:
if user_name:
print "Error: No user in the system with name '{0}'".format(user_name)
def verified_email_domain(email):
"""
not yet implemented. Returns true if the a-mail is in a specified domain.
:param email:
:type email:
"""
domains = ["indiana.edu"]
for domain in domains:
if email.endswith():
return True
return False
|
|
import datetime
import os
import time
import presets
import torch
import torch.utils.data
import torchvision
import utils
from coco_utils import get_coco
from torch import nn
try:
from torchvision import prototype
except ImportError:
prototype = None
def get_dataset(dir_path, name, image_set, transform):
def sbd(*args, **kwargs):
return torchvision.datasets.SBDataset(*args, mode="segmentation", **kwargs)
paths = {
"voc": (dir_path, torchvision.datasets.VOCSegmentation, 21),
"voc_aug": (dir_path, sbd, 21),
"coco": (dir_path, get_coco, 21),
}
p, ds_fn, num_classes = paths[name]
ds = ds_fn(p, image_set=image_set, transforms=transform)
return ds, num_classes
def get_transform(train, args):
if train:
return presets.SegmentationPresetTrain(base_size=520, crop_size=480)
elif not args.prototype:
return presets.SegmentationPresetEval(base_size=520)
else:
if args.weights:
weights = prototype.models.get_weight(args.weights)
return weights.transforms()
else:
return prototype.transforms.SemanticSegmentationEval(resize_size=520)
def criterion(inputs, target):
losses = {}
for name, x in inputs.items():
losses[name] = nn.functional.cross_entropy(x, target, ignore_index=255)
if len(losses) == 1:
return losses["out"]
return losses["out"] + 0.5 * losses["aux"]
def evaluate(model, data_loader, device, num_classes):
model.eval()
confmat = utils.ConfusionMatrix(num_classes)
metric_logger = utils.MetricLogger(delimiter=" ")
header = "Test:"
with torch.inference_mode():
for image, target in metric_logger.log_every(data_loader, 100, header):
image, target = image.to(device), target.to(device)
output = model(image)
output = output["out"]
confmat.update(target.flatten(), output.argmax(1).flatten())
confmat.reduce_from_all_processes()
return confmat
def train_one_epoch(model, criterion, optimizer, data_loader, lr_scheduler, device, epoch, print_freq, scaler=None):
model.train()
metric_logger = utils.MetricLogger(delimiter=" ")
metric_logger.add_meter("lr", utils.SmoothedValue(window_size=1, fmt="{value}"))
header = f"Epoch: [{epoch}]"
for image, target in metric_logger.log_every(data_loader, print_freq, header):
image, target = image.to(device), target.to(device)
with torch.cuda.amp.autocast(enabled=scaler is not None):
output = model(image)
loss = criterion(output, target)
optimizer.zero_grad()
if scaler is not None:
scaler.scale(loss).backward()
scaler.step(optimizer)
scaler.update()
else:
loss.backward()
optimizer.step()
lr_scheduler.step()
metric_logger.update(loss=loss.item(), lr=optimizer.param_groups[0]["lr"])
def main(args):
if args.prototype and prototype is None:
raise ImportError("The prototype module couldn't be found. Please install the latest torchvision nightly.")
if not args.prototype and args.weights:
raise ValueError("The weights parameter works only in prototype mode. Please pass the --prototype argument.")
if args.output_dir:
utils.mkdir(args.output_dir)
utils.init_distributed_mode(args)
print(args)
device = torch.device(args.device)
dataset, num_classes = get_dataset(args.data_path, args.dataset, "train", get_transform(True, args))
dataset_test, _ = get_dataset(args.data_path, args.dataset, "val", get_transform(False, args))
if args.distributed:
train_sampler = torch.utils.data.distributed.DistributedSampler(dataset)
test_sampler = torch.utils.data.distributed.DistributedSampler(dataset_test)
else:
train_sampler = torch.utils.data.RandomSampler(dataset)
test_sampler = torch.utils.data.SequentialSampler(dataset_test)
data_loader = torch.utils.data.DataLoader(
dataset,
batch_size=args.batch_size,
sampler=train_sampler,
num_workers=args.workers,
collate_fn=utils.collate_fn,
drop_last=True,
)
data_loader_test = torch.utils.data.DataLoader(
dataset_test, batch_size=1, sampler=test_sampler, num_workers=args.workers, collate_fn=utils.collate_fn
)
if not args.prototype:
model = torchvision.models.segmentation.__dict__[args.model](
pretrained=args.pretrained,
num_classes=num_classes,
aux_loss=args.aux_loss,
)
else:
model = prototype.models.segmentation.__dict__[args.model](
weights=args.weights, num_classes=num_classes, aux_loss=args.aux_loss
)
model.to(device)
if args.distributed:
model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)
model_without_ddp = model
if args.distributed:
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu])
model_without_ddp = model.module
params_to_optimize = [
{"params": [p for p in model_without_ddp.backbone.parameters() if p.requires_grad]},
{"params": [p for p in model_without_ddp.classifier.parameters() if p.requires_grad]},
]
if args.aux_loss:
params = [p for p in model_without_ddp.aux_classifier.parameters() if p.requires_grad]
params_to_optimize.append({"params": params, "lr": args.lr * 10})
optimizer = torch.optim.SGD(params_to_optimize, lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay)
scaler = torch.cuda.amp.GradScaler() if args.amp else None
iters_per_epoch = len(data_loader)
main_lr_scheduler = torch.optim.lr_scheduler.LambdaLR(
optimizer, lambda x: (1 - x / (iters_per_epoch * (args.epochs - args.lr_warmup_epochs))) ** 0.9
)
if args.lr_warmup_epochs > 0:
warmup_iters = iters_per_epoch * args.lr_warmup_epochs
args.lr_warmup_method = args.lr_warmup_method.lower()
if args.lr_warmup_method == "linear":
warmup_lr_scheduler = torch.optim.lr_scheduler.LinearLR(
optimizer, start_factor=args.lr_warmup_decay, total_iters=warmup_iters
)
elif args.lr_warmup_method == "constant":
warmup_lr_scheduler = torch.optim.lr_scheduler.ConstantLR(
optimizer, factor=args.lr_warmup_decay, total_iters=warmup_iters
)
else:
raise RuntimeError(
f"Invalid warmup lr method '{args.lr_warmup_method}'. Only linear and constant are supported."
)
lr_scheduler = torch.optim.lr_scheduler.SequentialLR(
optimizer, schedulers=[warmup_lr_scheduler, main_lr_scheduler], milestones=[warmup_iters]
)
else:
lr_scheduler = main_lr_scheduler
if args.resume:
checkpoint = torch.load(args.resume, map_location="cpu")
model_without_ddp.load_state_dict(checkpoint["model"], strict=not args.test_only)
if not args.test_only:
optimizer.load_state_dict(checkpoint["optimizer"])
lr_scheduler.load_state_dict(checkpoint["lr_scheduler"])
args.start_epoch = checkpoint["epoch"] + 1
if args.amp:
scaler.load_state_dict(checkpoint["scaler"])
if args.test_only:
confmat = evaluate(model, data_loader_test, device=device, num_classes=num_classes)
print(confmat)
return
start_time = time.time()
for epoch in range(args.start_epoch, args.epochs):
if args.distributed:
train_sampler.set_epoch(epoch)
train_one_epoch(model, criterion, optimizer, data_loader, lr_scheduler, device, epoch, args.print_freq, scaler)
confmat = evaluate(model, data_loader_test, device=device, num_classes=num_classes)
print(confmat)
checkpoint = {
"model": model_without_ddp.state_dict(),
"optimizer": optimizer.state_dict(),
"lr_scheduler": lr_scheduler.state_dict(),
"epoch": epoch,
"args": args,
}
if args.amp:
checkpoint["scaler"] = scaler.state_dict()
utils.save_on_master(checkpoint, os.path.join(args.output_dir, f"model_{epoch}.pth"))
utils.save_on_master(checkpoint, os.path.join(args.output_dir, "checkpoint.pth"))
total_time = time.time() - start_time
total_time_str = str(datetime.timedelta(seconds=int(total_time)))
print(f"Training time {total_time_str}")
def get_args_parser(add_help=True):
import argparse
parser = argparse.ArgumentParser(description="PyTorch Segmentation Training", add_help=add_help)
parser.add_argument("--data-path", default="/datasets01/COCO/022719/", type=str, help="dataset path")
parser.add_argument("--dataset", default="coco", type=str, help="dataset name")
parser.add_argument("--model", default="fcn_resnet101", type=str, help="model name")
parser.add_argument("--aux-loss", action="store_true", help="auxiliar loss")
parser.add_argument("--device", default="cuda", type=str, help="device (Use cuda or cpu Default: cuda)")
parser.add_argument(
"-b", "--batch-size", default=8, type=int, help="images per gpu, the total batch size is $NGPU x batch_size"
)
parser.add_argument("--epochs", default=30, type=int, metavar="N", help="number of total epochs to run")
parser.add_argument(
"-j", "--workers", default=16, type=int, metavar="N", help="number of data loading workers (default: 16)"
)
parser.add_argument("--lr", default=0.01, type=float, help="initial learning rate")
parser.add_argument("--momentum", default=0.9, type=float, metavar="M", help="momentum")
parser.add_argument(
"--wd",
"--weight-decay",
default=1e-4,
type=float,
metavar="W",
help="weight decay (default: 1e-4)",
dest="weight_decay",
)
parser.add_argument("--lr-warmup-epochs", default=0, type=int, help="the number of epochs to warmup (default: 0)")
parser.add_argument("--lr-warmup-method", default="linear", type=str, help="the warmup method (default: linear)")
parser.add_argument("--lr-warmup-decay", default=0.01, type=float, help="the decay for lr")
parser.add_argument("--print-freq", default=10, type=int, help="print frequency")
parser.add_argument("--output-dir", default=".", type=str, help="path to save outputs")
parser.add_argument("--resume", default="", type=str, help="path of checkpoint")
parser.add_argument("--start-epoch", default=0, type=int, metavar="N", help="start epoch")
parser.add_argument(
"--test-only",
dest="test_only",
help="Only test the model",
action="store_true",
)
parser.add_argument(
"--pretrained",
dest="pretrained",
help="Use pre-trained models from the modelzoo",
action="store_true",
)
# distributed training parameters
parser.add_argument("--world-size", default=1, type=int, help="number of distributed processes")
parser.add_argument("--dist-url", default="env://", type=str, help="url used to set up distributed training")
# Prototype models only
parser.add_argument(
"--prototype",
dest="prototype",
help="Use prototype model builders instead those from main area",
action="store_true",
)
parser.add_argument("--weights", default=None, type=str, help="the weights enum name to load")
# Mixed precision training parameters
parser.add_argument("--amp", action="store_true", help="Use torch.cuda.amp for mixed precision training")
return parser
if __name__ == "__main__":
args = get_args_parser().parse_args()
main(args)
|
|
"""Flask web views for Caravel"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
import logging
import re
import sys
import time
import traceback
from datetime import datetime
import functools
import pandas as pd
import sqlalchemy as sqla
from flask import (
g, request, redirect, flash, Response, render_template, Markup)
from flask_appbuilder import ModelView, CompactCRUDMixin, BaseView, expose
from flask_appbuilder.actions import action
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_appbuilder.security.decorators import has_access, has_access_api
from flask_babel import gettext as __
from flask_babel import lazy_gettext as _
from flask_appbuilder.models.sqla.filters import BaseFilter
from sqlalchemy import create_engine, select, text
from sqlalchemy.sql.expression import TextAsFrom
from werkzeug.routing import BaseConverter
from wtforms.validators import ValidationError
import caravel
from caravel import appbuilder, db, models, viz, utils, app, sm, ascii_art
config = app.config
log_this = models.Log.log_this
can_access = utils.can_access
class BaseCaravelView(BaseView):
def can_access(self, permission_name, view_name):
return utils.can_access(appbuilder.sm, permission_name, view_name)
def get_error_msg():
if config.get("SHOW_STACKTRACE"):
error_msg = traceback.format_exc()
else:
error_msg = "FATAL ERROR \n"
error_msg += (
"Stacktrace is hidden. Change the SHOW_STACKTRACE "
"configuration setting to enable it")
return error_msg
def api(f):
"""
A decorator to label an endpoint as an API. Catches uncaught exceptions and
return the response in the JSON format
"""
def wraps(self, *args, **kwargs):
try:
return f(self, *args, **kwargs)
except Exception as e:
logging.exception(e)
resp = Response(
json.dumps({
'message': get_error_msg()
}),
status=500,
mimetype="application/json")
return resp
return functools.update_wrapper(wraps, f)
def check_ownership(obj, raise_if_false=True):
"""Meant to be used in `pre_update` hooks on models to enforce ownership
Admin have all access, and other users need to be referenced on either
the created_by field that comes with the ``AuditMixin``, or in a field
named ``owners`` which is expected to be a one-to-many with the User
model. It is meant to be used in the ModelView's pre_update hook in
which raising will abort the update.
"""
if not obj:
return False
roles = (r.name for r in get_user_roles())
if 'Admin' in roles:
return True
session = db.create_scoped_session()
orig_obj = session.query(obj.__class__).filter_by(id=obj.id).first()
owner_names = (user.username for user in orig_obj.owners)
if (
hasattr(orig_obj, 'created_by') and
orig_obj.created_by and
orig_obj.created_by.username == g.user.username):
return True
if (
hasattr(orig_obj, 'owners') and
g.user and
hasattr(g.user, 'username') and
g.user.username in owner_names):
return True
if raise_if_false:
raise utils.CaravelSecurityException(
"You don't have the rights to alter [{}]".format(obj))
else:
return False
def get_user_roles():
if g.user.is_anonymous():
return [appbuilder.sm.find_role('Public')]
return g.user.roles
class CaravelFilter(BaseFilter):
def get_perms(self):
perms = []
for role in get_user_roles():
for perm_view in role.permissions:
if perm_view.permission.name == 'datasource_access':
perms.append(perm_view.view_menu.name)
return perms
class FilterSlice(CaravelFilter):
def apply(self, query, func): # noqa
if any([r.name in ('Admin', 'Alpha') for r in get_user_roles()]):
return query
qry = query.filter(self.model.perm.in_(self.get_perms()))
return qry
class FilterDashboard(CaravelFilter):
def apply(self, query, func): # noqa
if any([r.name in ('Admin', 'Alpha') for r in get_user_roles()]):
return query
Slice = models.Slice # noqa
Dash = models.Dashboard # noqa
slice_ids_qry = (
db.session
.query(Slice.id)
.filter(Slice.perm.in_(self.get_perms()))
)
query = query.filter(
Dash.id.in_(
db.session.query(Dash.id)
.distinct()
.join(Dash.slices)
.filter(Slice.id.in_(slice_ids_qry))
)
)
return query
def validate_json(form, field): # noqa
try:
json.loads(field.data)
except Exception as e:
logging.exception(e)
raise ValidationError("json isn't valid")
def generate_download_headers(extension):
filename = datetime.now().strftime("%Y%m%d_%H%M%S")
content_disp = "attachment; filename={}.{}".format(filename, extension)
headers = {
"Content-Disposition": content_disp,
}
return headers
class DeleteMixin(object):
@action(
"muldelete", "Delete", "Delete all Really?", "fa-trash", single=False)
def muldelete(self, items):
self.datamodel.delete_all(items)
self.update_redirect()
return redirect(self.get_redirect())
class CaravelModelView(ModelView):
page_size = 500
class TableColumnInlineView(CompactCRUDMixin, CaravelModelView): # noqa
datamodel = SQLAInterface(models.TableColumn)
can_delete = False
edit_columns = [
'column_name', 'verbose_name', 'description', 'groupby', 'filterable',
'table', 'count_distinct', 'sum', 'min', 'max', 'expression',
'is_dttm', 'python_date_format', 'database_expression']
add_columns = edit_columns
list_columns = [
'column_name', 'type', 'groupby', 'filterable', 'count_distinct',
'sum', 'min', 'max', 'is_dttm']
page_size = 500
description_columns = {
'is_dttm': (_(
"Whether to make this column available as a "
"[Time Granularity] option, column has to be DATETIME or "
"DATETIME-like")),
'expression': utils.markdown(
"a valid SQL expression as supported by the underlying backend. "
"Example: `substr(name, 1, 1)`", True),
'python_date_format': utils.markdown(Markup(
"The pattern of timestamp format, use "
"<a href='https://docs.python.org/2/library/"
"datetime.html#strftime-strptime-behavior'>"
"python datetime string pattern</a> "
"expression. If time is stored in epoch "
"format, put `epoch_s` or `epoch_ms`. Leave `Database Expression` "
"below empty if timestamp is stored in "
"String or Integer(epoch) type"), True),
'database_expression': utils.markdown(
"The database expression to cast internal datetime "
"constants to database date/timestamp type according to the DBAPI. "
"The expression should follow the pattern of "
"%Y-%m-%d %H:%M:%S, based on different DBAPI. "
"The string should be a python string formatter \n"
"`Ex: TO_DATE('{}', 'YYYY-MM-DD HH24:MI:SS')` for Oracle"
"Caravel uses default expression based on DB URI if this "
"field is blank.", True),
}
label_columns = {
'column_name': _("Column"),
'verbose_name': _("Verbose Name"),
'description': _("Description"),
'groupby': _("Groupable"),
'filterable': _("Filterable"),
'table': _("Table"),
'count_distinct': _("Count Distinct"),
'sum': _("Sum"),
'min': _("Min"),
'max': _("Max"),
'expression': _("Expression"),
'is_dttm': _("Is temporal"),
'python_date_format': _("Datetime Format"),
'database_expression': _("Database Expression")
}
appbuilder.add_view_no_menu(TableColumnInlineView)
class DruidColumnInlineView(CompactCRUDMixin, CaravelModelView): # noqa
datamodel = SQLAInterface(models.DruidColumn)
edit_columns = [
'column_name', 'description', 'datasource', 'groupby',
'count_distinct', 'sum', 'min', 'max']
list_columns = [
'column_name', 'type', 'groupby', 'filterable', 'count_distinct',
'sum', 'min', 'max']
can_delete = False
page_size = 500
label_columns = {
'column_name': _("Column"),
'type': _("Type"),
'datasource': _("Datasource"),
'groupby': _("Groupable"),
'filterable': _("Filterable"),
'count_distinct': _("Count Distinct"),
'sum': _("Sum"),
'min': _("Min"),
'max': _("Max"),
}
def post_update(self, col):
col.generate_metrics()
appbuilder.add_view_no_menu(DruidColumnInlineView)
class SqlMetricInlineView(CompactCRUDMixin, CaravelModelView): # noqa
datamodel = SQLAInterface(models.SqlMetric)
list_columns = ['metric_name', 'verbose_name', 'metric_type']
edit_columns = [
'metric_name', 'description', 'verbose_name', 'metric_type',
'expression', 'table', 'is_restricted']
description_columns = {
'expression': utils.markdown(
"a valid SQL expression as supported by the underlying backend. "
"Example: `count(DISTINCT userid)`", True),
'is_restricted': _("Whether the access to this metric is restricted "
"to certain roles. Only roles with the permission "
"'metric access on XXX (the name of this metric)' "
"are allowed to access this metric"),
}
add_columns = edit_columns
page_size = 500
label_columns = {
'metric_name': _("Metric"),
'description': _("Description"),
'verbose_name': _("Verbose Name"),
'metric_type': _("Type"),
'expression': _("SQL Expression"),
'table': _("Table"),
}
def post_add(self, metric):
utils.init_metrics_perm(caravel, [metric])
def post_update(self, metric):
utils.init_metrics_perm(caravel, [metric])
appbuilder.add_view_no_menu(SqlMetricInlineView)
class DruidMetricInlineView(CompactCRUDMixin, CaravelModelView): # noqa
datamodel = SQLAInterface(models.DruidMetric)
list_columns = ['metric_name', 'verbose_name', 'metric_type']
edit_columns = [
'metric_name', 'description', 'verbose_name', 'metric_type', 'json',
'datasource', 'is_restricted']
add_columns = edit_columns
page_size = 500
validators_columns = {
'json': [validate_json],
}
description_columns = {
'metric_type': utils.markdown(
"use `postagg` as the metric type if you are defining a "
"[Druid Post Aggregation]"
"(http://druid.io/docs/latest/querying/post-aggregations.html)",
True),
'is_restricted': _("Whether the access to this metric is restricted "
"to certain roles. Only roles with the permission "
"'metric access on XXX (the name of this metric)' "
"are allowed to access this metric"),
}
label_columns = {
'metric_name': _("Metric"),
'description': _("Description"),
'verbose_name': _("Verbose Name"),
'metric_type': _("Type"),
'json': _("JSON"),
'datasource': _("Druid Datasource"),
}
def post_add(self, metric):
utils.init_metrics_perm(caravel, [metric])
def post_update(self, metric):
utils.init_metrics_perm(caravel, [metric])
appbuilder.add_view_no_menu(DruidMetricInlineView)
class DatabaseView(CaravelModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.Database)
list_columns = ['database_name', 'sql_link', 'creator', 'changed_on_']
add_columns = [
'database_name', 'sqlalchemy_uri', 'cache_timeout', 'extra']
search_exclude_columns = ('password',)
edit_columns = add_columns
add_template = "caravel/models/database/add.html"
edit_template = "caravel/models/database/edit.html"
base_order = ('changed_on', 'desc')
description_columns = {
'sqlalchemy_uri': (
"Refer to the SqlAlchemy docs for more information on how "
"to structure your URI here: "
"http://docs.sqlalchemy.org/en/rel_1_0/core/engines.html"),
'extra': utils.markdown(
"JSON string containing extra configuration elements. "
"The ``engine_params`` object gets unpacked into the "
"[sqlalchemy.create_engine]"
"(http://docs.sqlalchemy.org/en/latest/core/engines.html#"
"sqlalchemy.create_engine) call, while the ``metadata_params`` "
"gets unpacked into the [sqlalchemy.MetaData]"
"(http://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html"
"#sqlalchemy.schema.MetaData) call. ", True),
}
label_columns = {
'database_name': _("Database"),
'sql_link': _("SQL link"),
'creator': _("Creator"),
'changed_on_': _("Last Changed"),
'sqlalchemy_uri': _("SQLAlchemy URI"),
'cache_timeout': _("Cache Timeout"),
'extra': _("Extra"),
}
def pre_add(self, db):
conn = sqla.engine.url.make_url(db.sqlalchemy_uri)
db.password = conn.password
conn.password = "X" * 10 if conn.password else None
db.sqlalchemy_uri = str(conn) # hides the password
def pre_update(self, db):
self.pre_add(db)
appbuilder.add_view(
DatabaseView,
"Databases",
label=__("Databases"),
icon="fa-database",
category="Sources",
category_label=__("Sources"),
category_icon='fa-database',)
class TableModelView(CaravelModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.SqlaTable)
list_columns = [
'table_link', 'database', 'sql_link', 'is_featured',
'changed_by_', 'changed_on_']
order_columns = [
'table_link', 'database', 'sql_link', 'is_featured', 'changed_on_']
add_columns = [
'table_name', 'database', 'schema',
'default_endpoint', 'offset', 'cache_timeout']
edit_columns = [
'table_name', 'is_featured', 'database', 'schema',
'description', 'owner',
'main_dttm_col', 'default_endpoint', 'offset', 'cache_timeout']
related_views = [TableColumnInlineView, SqlMetricInlineView]
base_order = ('changed_on', 'desc')
description_columns = {
'offset': "Timezone offset (in hours) for this datasource",
'schema': (
"Schema, as used only in some databases like Postgres, Redshift "
"and DB2"),
'description': Markup(
"Supports <a href='https://daringfireball.net/projects/markdown/'>"
"markdown</a>"),
}
label_columns = {
'table_link': _("Table"),
'changed_by_': _("Changed By"),
'database': _("Database"),
'changed_on_': _("Last Changed"),
'sql_link': _("SQL Editor"),
'is_featured': _("Is Featured"),
'schema': _("Schema"),
'default_endpoint': _("Default Endpoint"),
'offset': _("Offset"),
'cache_timeout': _("Cache Timeout"),
}
def post_add(self, table):
table_name = table.table_name
try:
table.fetch_metadata()
except Exception as e:
logging.exception(e)
flash(
"Table [{}] doesn't seem to exist, "
"couldn't fetch metadata".format(table_name),
"danger")
utils.merge_perm(sm, 'datasource_access', table.perm)
def post_update(self, table):
self.post_add(table)
appbuilder.add_view(
TableModelView,
"Tables",
label=__("Tables"),
category="Sources",
category_label=__("Sources"),
icon='fa-table',)
appbuilder.add_separator("Sources")
class DruidClusterModelView(CaravelModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.DruidCluster)
add_columns = [
'cluster_name',
'coordinator_host', 'coordinator_port', 'coordinator_endpoint',
'broker_host', 'broker_port', 'broker_endpoint',
]
edit_columns = add_columns
list_columns = ['cluster_name', 'metadata_last_refreshed']
label_columns = {
'cluster_name': _("Cluster"),
'coordinator_host': _("Coordinator Host"),
'coordinator_port': _("Coordinator Port"),
'coordinator_endpoint': _("Coordinator Endpoint"),
'broker_host': _("Broker Host"),
'broker_port': _("Broker Port"),
'broker_endpoint': _("Broker Endpoint"),
}
if config['DRUID_IS_ACTIVE']:
appbuilder.add_view(
DruidClusterModelView,
name="Druid Clusters",
label=__("Druid Clusters"),
icon="fa-cubes",
category="Sources",
category_label=__("Sources"),
category_icon='fa-database',)
class SliceModelView(CaravelModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.Slice)
add_template = "caravel/add_slice.html"
can_add = False
label_columns = {
'datasource_link': 'Datasource',
}
list_columns = [
'slice_link', 'viz_type', 'datasource_link', 'creator', 'modified']
edit_columns = [
'slice_name', 'description', 'viz_type', 'druid_datasource',
'table', 'owners', 'dashboards', 'params', 'cache_timeout']
base_order = ('changed_on', 'desc')
description_columns = {
'description': Markup(
"The content here can be displayed as widget headers in the "
"dashboard view. Supports "
"<a href='https://daringfireball.net/projects/markdown/'>"
"markdown</a>"),
'params': _(
"These parameters are generated dynamically when clicking "
"the save or overwrite button in the explore view. This JSON "
"object is exposed here for reference and for power users who may "
"want to alter specific parameters."),
'cache_timeout': _(
"Duration (in seconds) of the caching timeout for this slice."
),
}
base_filters = [['id', FilterSlice, lambda: []]]
label_columns = {
'cache_timeout': _("Cache Timeout"),
'creator': _("Creator"),
'dashboards': _("Dashboards"),
'datasource_link': _("Datasource"),
'description': _("Description"),
'modified': _("Last Modified"),
'owners': _("Owners"),
'params': _("Parameters"),
'slice_link': _("Slice"),
'slice_name': _("Name"),
'table': _("Table"),
'viz_type': _("Visualization Type"),
}
def pre_update(self, obj):
check_ownership(obj)
def pre_delete(self, obj):
check_ownership(obj)
appbuilder.add_view(
SliceModelView,
"Slices",
label=__("Slices"),
icon="fa-bar-chart",
category="",
category_icon='',)
class SliceAsync(SliceModelView): # noqa
list_columns = [
'slice_link', 'viz_type',
'creator', 'modified', 'icons']
label_columns = {
'icons': ' ',
'slice_link': _('Slice'),
'viz_type': _('Visualization Type'),
}
appbuilder.add_view_no_menu(SliceAsync)
class SliceAddView(SliceModelView): # noqa
list_columns = [
'slice_link', 'viz_type',
'owners', 'modified', 'data', 'changed_on']
label_columns = {
'icons': ' ',
'slice_link': _('Slice'),
'viz_type': _('Visualization Type'),
}
appbuilder.add_view_no_menu(SliceAddView)
class DashboardModelView(CaravelModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.Dashboard)
list_columns = ['dashboard_link', 'creator', 'modified']
edit_columns = [
'dashboard_title', 'slug', 'slices', 'owners', 'position_json', 'css',
'json_metadata']
show_columns = edit_columns + ['table_names']
add_columns = edit_columns
base_order = ('changed_on', 'desc')
description_columns = {
'position_json': _(
"This json object describes the positioning of the widgets in "
"the dashboard. It is dynamically generated when adjusting "
"the widgets size and positions by using drag & drop in "
"the dashboard view"),
'css': _(
"The css for individual dashboards can be altered here, or "
"in the dashboard view where changes are immediately "
"visible"),
'slug': _("To get a readable URL for your dashboard"),
'json_metadata': _(
"This JSON object is generated dynamically when clicking "
"the save or overwrite button in the dashboard view. It "
"is exposed here for reference and for power users who may "
"want to alter specific parameters."),
'owners': _("Owners is a list of users who can alter the dashboard."),
}
base_filters = [['slice', FilterDashboard, lambda: []]]
label_columns = {
'dashboard_link': _("Dashboard"),
'dashboard_title': _("Title"),
'slug': _("Slug"),
'slices': _("Slices"),
'owners': _("Owners"),
'creator': _("Creator"),
'modified': _("Modified"),
'position_json': _("Position JSON"),
'css': _("CSS"),
'json_metadata': _("JSON Metadata"),
'table_names': _("Underlying Tables"),
}
def pre_add(self, obj):
obj.slug = obj.slug.strip() or None
if obj.slug:
obj.slug = obj.slug.replace(" ", "-")
obj.slug = re.sub(r'\W+', '', obj.slug)
def pre_update(self, obj):
check_ownership(obj)
self.pre_add(obj)
def pre_delete(self, obj):
check_ownership(obj)
appbuilder.add_view(
DashboardModelView,
"Dashboards",
label=__("Dashboards"),
icon="fa-dashboard",
category="",
category_icon='',)
class DashboardModelViewAsync(DashboardModelView): # noqa
list_columns = ['dashboard_link', 'creator', 'modified', 'dashboard_title']
label_columns = {
'dashboard_link': 'Dashboard',
}
appbuilder.add_view_no_menu(DashboardModelViewAsync)
class LogModelView(CaravelModelView):
datamodel = SQLAInterface(models.Log)
list_columns = ('user', 'action', 'dttm')
edit_columns = ('user', 'action', 'dttm', 'json')
base_order = ('dttm', 'desc')
label_columns = {
'user': _("User"),
'action': _("Action"),
'dttm': _("dttm"),
'json': _("JSON"),
}
appbuilder.add_view(
LogModelView,
"Action Log",
label=__("Action Log"),
category="Security",
category_label=__("Security"),
icon="fa-list-ol")
class DruidDatasourceModelView(CaravelModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.DruidDatasource)
list_columns = [
'datasource_link', 'cluster', 'changed_by_', 'changed_on_', 'offset']
order_columns = [
'datasource_link', 'changed_on_', 'offset']
related_views = [DruidColumnInlineView, DruidMetricInlineView]
edit_columns = [
'datasource_name', 'cluster', 'description', 'owner',
'is_featured', 'is_hidden', 'default_endpoint', 'offset',
'cache_timeout']
add_columns = edit_columns
page_size = 500
base_order = ('datasource_name', 'asc')
description_columns = {
'offset': _("Timezone offset (in hours) for this datasource"),
'description': Markup(
"Supports <a href='"
"https://daringfireball.net/projects/markdown/'>markdown</a>"),
}
label_columns = {
'datasource_link': _("Data Source"),
'cluster': _("Cluster"),
'description': _("Description"),
'owner': _("Owner"),
'is_featured': _("Is Featured"),
'is_hidden': _("Is Hidden"),
'default_endpoint': _("Default Endpoint"),
'offset': _("Time Offset"),
'cache_timeout': _("Cache Timeout"),
}
def post_add(self, datasource):
datasource.generate_metrics()
utils.merge_perm(sm, 'datasource_access', datasource.perm)
def post_update(self, datasource):
self.post_add(datasource)
if config['DRUID_IS_ACTIVE']:
appbuilder.add_view(
DruidDatasourceModelView,
"Druid Datasources",
label=__("Druid Datasources"),
category="Sources",
category_label=__("Sources"),
icon="fa-cube")
@app.route('/health')
def health():
return "OK"
@app.route('/ping')
def ping():
return "OK"
class R(BaseCaravelView):
"""used for short urls"""
@log_this
@expose("/<url_id>")
def index(self, url_id):
url = db.session.query(models.Url).filter_by(id=url_id).first()
if url:
return redirect('/' + url.url)
else:
flash("URL to nowhere...", "danger")
return redirect('/')
@log_this
@expose("/shortner/", methods=['POST', 'GET'])
def shortner(self):
url = request.form.get('data')
obj = models.Url(url=url)
db.session.add(obj)
db.session.commit()
return("{request.headers[Host]}/r/{obj.id}".format(
request=request, obj=obj))
@expose("/msg/")
def msg(self):
"""Redirects to specified url while flash a message"""
flash(Markup(request.args.get("msg")), "info")
return redirect(request.args.get("url"))
appbuilder.add_view_no_menu(R)
class Caravel(BaseCaravelView):
"""The base views for Caravel!"""
@has_access
@expose("/explore/<datasource_type>/<datasource_id>/")
@expose("/datasource/<datasource_type>/<datasource_id>/") # Legacy url
@log_this
def explore(self, datasource_type, datasource_id):
error_redirect = '/slicemodelview/list/'
datasource_class = models.SqlaTable \
if datasource_type == "table" else models.DruidDatasource
datasources = (
db.session
.query(datasource_class)
.all()
)
datasources = sorted(datasources, key=lambda ds: ds.full_name)
datasource = [ds for ds in datasources if int(datasource_id) == ds.id]
datasource = datasource[0] if datasource else None
slice_id = request.args.get("slice_id")
slc = None
if slice_id:
slc = (
db.session.query(models.Slice)
.filter_by(id=slice_id)
.first()
)
if not datasource:
flash(__("The datasource seems to have been deleted"), "alert")
return redirect(error_redirect)
slice_add_perm = self.can_access('can_add', 'SliceModelView')
slice_edit_perm = check_ownership(slc, raise_if_false=False)
slice_download_perm = self.can_access('can_download', 'SliceModelView')
all_datasource_access = self.can_access(
'all_datasource_access', 'all_datasource_access')
datasource_access = self.can_access(
'datasource_access', datasource.perm)
if not (all_datasource_access or datasource_access):
flash(__("You don't seem to have access to this datasource"), "danger")
return redirect(error_redirect)
action = request.args.get('action')
if action in ('saveas', 'overwrite'):
return self.save_or_overwrite_slice(
request.args, slc, slice_add_perm, slice_edit_perm)
viz_type = request.args.get("viz_type")
if not viz_type and datasource.default_endpoint:
return redirect(datasource.default_endpoint)
if not viz_type:
viz_type = "table"
try:
obj = viz.viz_types[viz_type](
datasource,
form_data=request.args,
slice_=slc)
except Exception as e:
flash(str(e), "danger")
return redirect(error_redirect)
if request.args.get("json") == "true":
status = 200
if config.get("DEBUG"):
# Allows for nice debugger stack traces in debug mode
payload = obj.get_json()
else:
try:
payload = obj.get_json()
except Exception as e:
logging.exception(e)
payload = str(e)
status = 500
resp = Response(
payload,
status=status,
mimetype="application/json")
return resp
elif request.args.get("csv") == "true":
payload = obj.get_csv()
return Response(
payload,
status=200,
headers=generate_download_headers("csv"),
mimetype="application/csv")
else:
if request.args.get("standalone") == "true":
template = "caravel/standalone.html"
else:
template = "caravel/explore.html"
resp = self.render_template(
template, viz=obj, slice=slc, datasources=datasources,
can_add=slice_add_perm, can_edit=slice_edit_perm,
can_download=slice_download_perm,
userid=g.user.get_id() if g.user else '')
try:
pass
except Exception as e:
if config.get("DEBUG"):
raise(e)
return Response(
str(e),
status=500,
mimetype="application/json")
return resp
def save_or_overwrite_slice(
self, args, slc, slice_add_perm, slice_edit_perm):
"""Save or overwrite a slice"""
slice_name = args.get('slice_name')
action = args.get('action')
# TODO use form processing form wtforms
d = args.to_dict(flat=False)
del d['action']
del d['previous_viz_type']
as_list = ('metrics', 'groupby', 'columns', 'all_columns', 'mapbox_label')
for k in d:
v = d.get(k)
if k in as_list and not isinstance(v, list):
d[k] = [v] if v else []
if k not in as_list and isinstance(v, list):
d[k] = v[0]
table_id = druid_datasource_id = None
datasource_type = args.get('datasource_type')
if datasource_type in ('datasource', 'druid'):
druid_datasource_id = args.get('datasource_id')
elif datasource_type == 'table':
table_id = args.get('datasource_id')
if action in ('saveas'):
slc = models.Slice(owners=[g.user] if g.user else [])
slc.params = json.dumps(d, indent=4, sort_keys=True)
slc.datasource_name = args.get('datasource_name')
slc.viz_type = args.get('viz_type')
slc.druid_datasource_id = druid_datasource_id
slc.table_id = table_id
slc.datasource_type = datasource_type
slc.slice_name = slice_name
if action in ('saveas') and slice_add_perm:
self.save_slice(slc)
elif action == 'overwrite' and slice_edit_perm:
self.overwrite_slice(slc)
# Adding slice to a dashboard if requested
dash = None
if request.args.get('add_to_dash') == 'existing':
dash = (
db.session.query(models.Dashboard)
.filter_by(id=int(request.args.get('save_to_dashboard_id')))
.one()
)
flash(
"Slice [{}] was added to dashboard [{}]".format(
slc.slice_name,
dash.dashboard_title),
"info")
elif request.args.get('add_to_dash') == 'new':
dash = models.Dashboard(
dashboard_title=request.args.get('new_dashboard_name'),
owners=[g.user] if g.user else [])
flash(
"Dashboard [{}] just got created and slice [{}] was added "
"to it".format(
dash.dashboard_title,
slc.slice_name),
"info")
if dash and slc not in dash.slices:
dash.slices.append(slc)
db.session.commit()
if request.args.get('goto_dash') == 'true':
return redirect(dash.url)
else:
return redirect(slc.slice_url)
def save_slice(self, slc):
session = db.session()
msg = "Slice [{}] has been saved".format(slc.slice_name)
session.add(slc)
session.commit()
flash(msg, "info")
def overwrite_slice(self, slc):
can_update = check_ownership(slc, raise_if_false=False)
if not can_update:
flash("You cannot overwrite [{}]".format(slc), "danger")
else:
session = db.session()
session.merge(slc)
session.commit()
msg = "Slice [{}] has been overwritten".format(slc.slice_name)
flash(msg, "info")
@api
@has_access_api
@expose("/checkbox/<model_view>/<id_>/<attr>/<value>", methods=['GET'])
def checkbox(self, model_view, id_, attr, value):
"""endpoint for checking/unchecking any boolean in a sqla model"""
views = sys.modules[__name__]
model_view_cls = getattr(views, model_view)
model = model_view_cls.datamodel.obj
obj = db.session.query(model).filter_by(id=id_).first()
if obj:
setattr(obj, attr, value == 'true')
db.session.commit()
return Response("OK", mimetype="application/json")
@api
@has_access_api
@expose("/activity_per_day")
def activity_per_day(self):
"""endpoint to power the calendar heatmap on the welcome page"""
Log = models.Log # noqa
qry = (
db.session
.query(
Log.dt,
sqla.func.count())
.group_by(Log.dt)
.all()
)
payload = {str(time.mktime(dt.timetuple())): ccount for dt, ccount in qry if dt}
return Response(json.dumps(payload), mimetype="application/json")
@api
@has_access_api
@expose("/save_dash/<dashboard_id>/", methods=['GET', 'POST'])
def save_dash(self, dashboard_id):
"""Save a dashboard's metadata"""
data = json.loads(request.form.get('data'))
positions = data['positions']
slice_ids = [int(d['slice_id']) for d in positions]
session = db.session()
Dash = models.Dashboard # noqa
dash = session.query(Dash).filter_by(id=dashboard_id).first()
check_ownership(dash, raise_if_false=True)
dash.slices = [o for o in dash.slices if o.id in slice_ids]
positions = sorted(data['positions'], key=lambda x: int(x['slice_id']))
dash.position_json = json.dumps(positions, indent=4, sort_keys=True)
md = dash.metadata_dejson
if 'filter_immune_slices' not in md:
md['filter_immune_slices'] = []
md['expanded_slices'] = data['expanded_slices']
dash.json_metadata = json.dumps(md, indent=4)
dash.css = data['css']
session.merge(dash)
session.commit()
session.close()
return "SUCCESS"
@api
@has_access_api
@expose("/add_slices/<dashboard_id>/", methods=['POST'])
def add_slices(self, dashboard_id):
"""Add and save slices to a dashboard"""
data = json.loads(request.form.get('data'))
session = db.session()
Slice = models.Slice # noqa
dash = session.query(models.Dashboard).filter_by(id=dashboard_id).first()
check_ownership(dash, raise_if_false=True)
new_slices = session.query(Slice).filter(Slice.id.in_(data['slice_ids']))
dash.slices += new_slices
session.merge(dash)
session.commit()
session.close()
return "SLICES ADDED"
@api
@has_access_api
@expose("/testconn", methods=["POST", "GET"])
def testconn(self):
"""Tests a sqla connection"""
try:
uri = request.json.get('uri')
connect_args = (
request.json
.get('extras', {})
.get('engine_params', {})
.get('connect_args', {}))
engine = create_engine(uri, connect_args=connect_args)
engine.connect()
return json.dumps(engine.table_names(), indent=4)
except Exception:
return Response(
traceback.format_exc(),
status=500,
mimetype="application/json")
@expose("/favstar/<class_name>/<obj_id>/<action>/")
def favstar(self, class_name, obj_id, action):
session = db.session()
FavStar = models.FavStar # noqa
count = 0
favs = session.query(FavStar).filter_by(
class_name=class_name, obj_id=obj_id, user_id=g.user.get_id()).all()
if action == 'select':
if not favs:
session.add(
FavStar(
class_name=class_name, obj_id=obj_id, user_id=g.user.get_id(),
dttm=datetime.now()))
count = 1
elif action == 'unselect':
for fav in favs:
session.delete(fav)
else:
count = len(favs)
session.commit()
return Response(
json.dumps({'count': count}),
mimetype="application/json")
@has_access
@expose("/slice/<slice_id>/")
def slice(self, slice_id):
"""Redirects a request for a slice id to its corresponding URL"""
session = db.session()
qry = session.query(models.Slice).filter_by(id=int(slice_id))
slc = qry.first()
if slc:
return redirect(slc.slice_url)
else:
flash("The specified slice could not be found", "danger")
return redirect('/slicemodelview/list/')
@has_access
@expose("/dashboard/<dashboard_id>/")
def dashboard(self, dashboard_id):
"""Server side rendering for a dashboard"""
session = db.session()
qry = session.query(models.Dashboard)
if dashboard_id.isdigit():
qry = qry.filter_by(id=int(dashboard_id))
else:
qry = qry.filter_by(slug=dashboard_id)
templates = session.query(models.CssTemplate).all()
dash = qry.first()
# Hack to log the dashboard_id properly, even when getting a slug
@log_this
def dashboard(**kwargs): # noqa
pass
dashboard(dashboard_id=dash.id)
return self.render_template(
"caravel/dashboard.html", dashboard=dash,
user_id=g.user.get_id(),
templates=templates,
dash_save_perm=self.can_access('can_save_dash', 'Caravel'),
dash_edit_perm=check_ownership(dash, raise_if_false=False))
@has_access
@expose("/sql/<database_id>/")
@log_this
def sql(self, database_id):
if (
not self.can_access(
'all_datasource_access', 'all_datasource_access')):
flash(
"This view requires the `all_datasource_access` "
"permission", "danger")
return redirect("/tablemodelview/list/")
mydb = db.session.query(
models.Database).filter_by(id=database_id).first()
engine = mydb.get_sqla_engine()
tables = engine.table_names()
table_name = request.args.get('table_name')
return self.render_template(
"caravel/sql.html",
tables=tables,
table_name=table_name,
db=mydb)
@has_access
@expose("/table/<database_id>/<table_name>/")
@log_this
def table(self, database_id, table_name):
mydb = db.session.query(
models.Database).filter_by(id=database_id).first()
cols = mydb.get_columns(table_name)
df = pd.DataFrame([(c['name'], c['type']) for c in cols])
df.columns = ['col', 'type']
tbl_cls = (
"dataframe table table-striped table-bordered "
"table-condensed sql_results").split(' ')
return self.render_template(
"caravel/ajah.html",
content=df.to_html(
index=False,
na_rep='',
classes=tbl_cls))
@has_access
@expose("/select_star/<database_id>/<table_name>/")
@log_this
def select_star(self, database_id, table_name):
mydb = db.session.query(
models.Database).filter_by(id=database_id).first()
t = mydb.get_table(table_name)
fields = ", ".join(
[c.name for c in t.columns] or "*")
s = "SELECT\n{}\nFROM {}".format(fields, table_name)
return self.render_template(
"caravel/ajah.html",
content=s
)
@has_access
@expose("/runsql/", methods=['POST', 'GET'])
@log_this
def runsql(self):
"""Runs arbitrary sql and returns and html table"""
session = db.session()
limit = 1000
data = json.loads(request.form.get('data'))
sql = data.get('sql')
database_id = data.get('database_id')
mydb = session.query(models.Database).filter_by(id=database_id).first()
if (
not self.can_access(
'all_datasource_access', 'all_datasource_access')):
raise utils.CaravelSecurityException(_(
"This view requires the `all_datasource_access` permission"))
content = ""
if mydb:
eng = mydb.get_sqla_engine()
if limit:
sql = sql.strip().strip(';')
qry = (
select('*')
.select_from(TextAsFrom(text(sql), ['*']).alias('inner_qry'))
.limit(limit)
)
sql = str(qry.compile(eng, compile_kwargs={"literal_binds": True}))
try:
df = pd.read_sql_query(sql=sql, con=eng)
content = df.to_html(
index=False,
na_rep='',
classes=(
"dataframe table table-striped table-bordered "
"table-condensed sql_results").split(' '))
except Exception as e:
content = (
'<div class="alert alert-danger">'
"{}</div>"
).format(e.message)
session.commit()
return content
@has_access
@expose("/refresh_datasources/")
def refresh_datasources(self):
"""endpoint that refreshes druid datasources metadata"""
session = db.session()
for cluster in session.query(models.DruidCluster).all():
cluster_name = cluster.cluster_name
try:
cluster.refresh_datasources()
except Exception as e:
flash(
"Error while processing cluster '{}'\n{}".format(
cluster_name, str(e)),
"danger")
logging.exception(e)
return redirect('/druidclustermodelview/list/')
cluster.metadata_last_refreshed = datetime.now()
flash(
"Refreshed metadata from cluster "
"[" + cluster.cluster_name + "]",
'info')
session.commit()
return redirect("/druiddatasourcemodelview/list/")
@app.errorhandler(500)
def show_traceback(self):
error_msg = get_error_msg()
return render_template(
'caravel/traceback.html',
error_msg=error_msg,
title=ascii_art.stacktrace,
art=ascii_art.error), 500
@has_access
@expose("/welcome")
def welcome(self):
"""Personalized welcome page"""
return self.render_template('caravel/welcome.html', utils=utils)
appbuilder.add_view_no_menu(Caravel)
if config['DRUID_IS_ACTIVE']:
appbuilder.add_link(
_("Refresh Druid Metadata"),
href='/caravel/refresh_datasources/',
category='Sources',
category_label=__("Sources"),
category_icon='fa-database',
icon="fa-cog")
class CssTemplateModelView(CaravelModelView, DeleteMixin):
datamodel = SQLAInterface(models.CssTemplate)
list_columns = ['template_name']
edit_columns = ['template_name', 'css']
add_columns = edit_columns
appbuilder.add_separator("Sources")
appbuilder.add_view(
CssTemplateModelView,
"CSS Templates",
label=__("CSS Templates"),
icon="fa-css3",
category="Sources",
category_label=__("Sources"),
category_icon='')
# ---------------------------------------------------------------------
# Redirecting URL from previous names
class RegexConverter(BaseConverter):
def __init__(self, url_map, *items):
super(RegexConverter, self).__init__(url_map)
self.regex = items[0]
app.url_map.converters['regex'] = RegexConverter
@app.route('/<regex("panoramix\/.*"):url>')
def panoramix(url): # noqa
return redirect(request.full_path.replace('panoramix', 'caravel'))
@app.route('/<regex("dashed\/.*"):url>')
def dashed(url): # noqa
return redirect(request.full_path.replace('dashed', 'caravel'))
# ---------------------------------------------------------------------
|
|
"""Placeholder module, that's where the smart things happen."""
from pages.widgets_registry import get_widget
from pages import settings
from pages.models import Content, Media
from pages.widgets import ImageInput, FileInput
from pages.utils import slugify
from django import forms
from django.core.mail import send_mail
from django import template
from django.template import TemplateSyntaxError
from django.core.files.storage import default_storage
from django.forms import Textarea, ImageField, CharField, FileField
from django.forms import TextInput
from django.conf import settings as global_settings
from django.utils.translation import ugettext_lazy as _
from django.utils.safestring import mark_safe
from django.utils.text import unescape_string_literal
from django.template.loader import render_to_string
from django.template import RequestContext
from django.core.files.uploadedfile import UploadedFile
import logging
import os
import time
import copy
import uuid
logging.basicConfig()
logger = logging.getLogger("pages")
PLACEHOLDER_ERROR = _("[Placeholder %(name)s had syntax error: %(error)s]")
def parse_placeholder(parser, token):
"""Parse the `PlaceholderNode` parameters.
Return a tuple with the name and parameters."""
params = {}
bits = token.split_contents()
count = len(bits)
error_string = '%r tag requires at least one argument' % bits[0]
if count <= 1:
raise TemplateSyntaxError(error_string)
try:
name = unescape_string_literal(bits[1])
except ValueError:
name = bits[1]
remaining = bits[2:]
simple_options = ['parsed', 'inherited', 'untranslated', 'shared', 'block']
param_options = ['as', 'on', 'with', 'section']
all_options = simple_options + param_options
while remaining:
bit = remaining[0]
if bit not in all_options:
raise TemplateSyntaxError(
"%r is not an correct option for a placeholder" % bit)
if bit in param_options:
if len(remaining) < 2:
raise TemplateSyntaxError(
"Placeholder option '%s' need a parameter" % bit)
if bit == 'as':
params['as_varname'] = remaining[1]
if bit == 'with':
params['widget'] = remaining[1]
if bit == 'on':
params['page'] = remaining[1]
if bit == 'section':
params['section'] = unescape_string_literal(remaining[1])
remaining = remaining[2:]
elif bit == 'parsed':
params['parsed'] = True
remaining = remaining[1:]
elif bit == 'inherited':
params['inherited'] = True
remaining = remaining[1:]
elif bit == 'untranslated':
params['untranslated'] = True
remaining = remaining[1:]
elif bit == 'shared':
params['shared'] = True
remaining = remaining[1:]
elif bit == 'block':
remaining = remaining[1:]
nodelist = parser.parse(('endplaceholder',))
parser.delete_first_token()
params['nodelist'] = nodelist
return name, params
class PlaceholderNode(template.Node):
"""This template node is used to output and save page content and
dynamically generate input fields in the admin.
:param name: the name of the placeholder you want to show/create
:param page: the optional page object
:param widget: the widget you want to use in the admin interface. Take
a look into :mod:`pages.widgets` to see which widgets
are available.
:param parsed: if the ``parsed`` word is given, the content of the
placeholder is evaluated as template code, within the current
context.
:param as_varname: if ``as_varname`` is defined, no value will be
returned. A variable will be created in the context
with the defined name.
:param inherited: inherit content from parent's pages.
:param untranslated: the placeholder's content is the same for
every language.
"""
field = CharField
widget = TextInput
def __init__(
self, name, page=None, widget=None, parsed=False,
as_varname=None, inherited=False, untranslated=False,
has_revision=True, section=None, shared=False, nodelist=None):
"""Gather parameters for the `PlaceholderNode`.
These values should be thread safe and don't change between calls."""
self.page = page or 'current_page'
self.name = name
self.ctype = name.replace(" ", "_")
if widget:
self.widget = widget
self.parsed = parsed
self.inherited = inherited
self.untranslated = untranslated
self.as_varname = as_varname
self.section = section
self.shared = shared
self.nodelist = nodelist or [] # should be an iterable
self.found_in_block = None
def get_widget(self, page, language, fallback=Textarea):
"""Given the name of a placeholder return a `Widget` subclass
like Textarea or TextInput."""
if isinstance(self.widget, str):
widget = get_widget(self.widget)
else:
widget = self.widget
try:
return widget(page=page, language=language)
except:
pass
return widget()
def get_extra_data(self, data):
"""Get eventual extra data for this placeholder from the
admin form. This method is called when the Page is
saved in the admin and passed to the placeholder save
method."""
result = {}
for key in list(data.keys()):
if key.startswith(self.ctype + '-'):
new_key = key.replace(self.ctype + '-', '')
result[new_key] = data[key]
return result
def get_field(self, page, language, initial=None):
"""The field that will be shown within the admin."""
if self.parsed:
help_text = _('Note: This field is evaluated as template code.')
else:
help_text = ''
widget = self.get_widget(page, language)
label = _(self.name.replace("_", " ")).capitalize()
return self.field(
widget=widget, initial=initial, label=label,
help_text=help_text, required=False)
def save(self, page, language, data, change, extra_data=None):
"""Actually save the placeholder data into the Content object."""
# if this placeholder is untranslated, we save everything
# in the default language
if self.untranslated:
language = settings.PAGE_DEFAULT_LANGUAGE
if self.shared:
page = None
# the page is being changed
if change:
# we need create a new content if revision is enabled
if(settings.PAGE_CONTENT_REVISION and self.name
not in settings.PAGE_CONTENT_REVISION_EXCLUDE_LIST):
Content.objects.create_content_if_changed(
page,
language,
self.ctype,
data
)
else:
Content.objects.set_or_create_content(
page,
language,
self.ctype,
data
)
# the page is being added
else:
Content.objects.set_or_create_content(
page,
language,
self.ctype,
data
)
def get_content(self, page_obj, lang, lang_fallback=True):
if self.untranslated:
lang = settings.PAGE_DEFAULT_LANGUAGE
lang_fallback = False
if self.shared:
return Content.objects.get_content(
None, lang, self.ctype, lang_fallback)
content = Content.objects.get_content(
page_obj, lang, self.ctype, lang_fallback)
if self.inherited and not content:
for ancestor in page_obj.get_ancestors():
content = Content.objects.get_content(
ancestor, lang,
self.ctype, lang_fallback)
if content:
break
return content
def get_lang(self, context):
if self.untranslated:
lang = settings.PAGE_DEFAULT_LANGUAGE
else:
lang = context.get('lang', settings.PAGE_DEFAULT_LANGUAGE)
return lang
def get_content_from_context(self, context):
if self.untranslated:
lang_fallback = False
else:
lang_fallback = True
if self.shared:
return self.get_content(
None,
self.get_lang(context),
lang_fallback)
if self.page not in context:
return ''
# current_page can be set to None
if not context[self.page]:
return ''
return self.get_content(
context[self.page],
self.get_lang(context),
lang_fallback)
def get_render_content(self, context):
if self.nodelist:
with context.push():
context['content'] = self.get_content_from_context(context)
output = self.nodelist.render(context)
return mark_safe(output)
return mark_safe(self.get_content_from_context(context))
def render_parsed(self, context, content):
try:
content_template = template.Template(content, name=self.name)
new_content = mark_safe(content_template.render(context))
except TemplateSyntaxError as error:
if global_settings.DEBUG:
new_content = PLACEHOLDER_ERROR % {
'name': self.name,
'error': error,
}
else:
new_content = ''
return new_content
def edit_tag(self):
return u"""<!--placeholder ;{};-->""".format(self.name)
def render(self, context):
"""Output the content of the `PlaceholdeNode` as a template."""
content = self.get_render_content(context)
request = context.get('request')
render_edit_tag = False
if request and request.user.is_staff and request.COOKIES.get('enable_edit_mode'):
render_edit_tag = True
if not content:
if not render_edit_tag:
return ''
return self.edit_tag()
if self.parsed:
content = self.render_parsed(context, content)
if self.as_varname is None:
if not render_edit_tag:
return content
return content + self.edit_tag()
context[self.as_varname] = content
return ''
def __repr__(self):
return "<Placeholder Node: %s>" % self.name
def get_filename(page, content_type, data):
"""
Generate a stable filename using the original filename of the type.
"""
avoid_collision = uuid.uuid4().hex[:8]
name_parts = data.name.split('.')
if len(name_parts) > 1:
name = slugify('.'.join(name_parts[:-1]), allow_unicode=True)
ext = slugify(name_parts[-1])
name = name + '.' + ext
else:
name = slugify(data.name)
filename = os.path.join(
settings.PAGE_UPLOAD_ROOT,
'page_' + str(page.id),
content_type + '-' + avoid_collision + '-' + name
)
return filename
class FilePlaceholderNode(PlaceholderNode):
"""A `PlaceholderNode` that saves one file on disk.
`PAGE_UPLOAD_ROOT` setting define where to save the file.
"""
def get_field(self, page, language, initial=None):
help_text = ""
widget = FileInput(page, language)
return FileField(
widget=widget,
initial=initial,
help_text=help_text,
required=False
)
def save(self, page, language, data, change, extra_data=None):
if self.shared:
page = None
if extra_data and 'delete' in extra_data:
return super(FilePlaceholderNode, self).save(
page,
language,
"",
change
)
return
if extra_data and 'revision' in extra_data:
return super(FilePlaceholderNode, self).save(
page,
language,
extra_data['revision'],
change
)
return
if extra_data and 'selected' in extra_data and extra_data['selected']:
return super(FilePlaceholderNode, self).save(
page,
language,
extra_data['selected'],
change
)
return
filename = ''
if change and data:
# the image URL is posted if not changed
if not isinstance(data, UploadedFile):
return
filename = get_filename(page, self.ctype, data)
filename = default_storage.save(filename, data)
media = Media(url=filename)
media.save()
return super(FilePlaceholderNode, self).save(
page,
language,
filename,
change
)
class ImagePlaceholderNode(FilePlaceholderNode):
"""A `PlaceholderNode` that saves one image on disk.
`PAGE_UPLOAD_ROOT` setting define where to save the image.
"""
def get_field(self, page, language, initial=None):
help_text = ""
widget = ImageInput(page, language)
return ImageField(
widget=widget,
initial=initial,
help_text=help_text,
required=False
)
class ContactForm(forms.Form):
"""
Simple contact form
"""
email = forms.EmailField(label=_('Your email'))
subject = forms.CharField(
label=_('Subject'), max_length=150)
message = forms.CharField(
widget=forms.Textarea(), label=_('Your message'))
class ContactPlaceholderNode(PlaceholderNode):
"""A contact `PlaceholderNode` example."""
def render(self, context):
request = context.get('request', None)
if not request:
raise ValueError('request not available in the context.')
if request.method == 'POST':
form = ContactForm(request.POST)
if form.is_valid():
data = form.cleaned_data
recipients = [adm[1] for adm in global_settings.ADMINS]
try:
send_mail(
data['subject'], data['message'],
data['email'], recipients, fail_silently=False)
return _("Your email has been sent. Thank you.")
except:
return _("An error as occured: your email has not been sent.")
else:
form = ContactForm()
renderer = render_to_string(
'pages/contact.html', {'form': form}, RequestContext(request))
return mark_safe(renderer)
class JsonPlaceholderNode(PlaceholderNode):
"""
A `PlaceholderNode` that try to return a deserialized JSON object
in the template.
"""
def get_render_content(self, context):
import json
content = self.get_content_from_context(context)
try:
return json.loads(str(content))
except:
logger.warning("JsonPlaceholderNode: coudn't decode json")
return content
class MarkdownPlaceholderNode(PlaceholderNode):
"""
A `PlaceholderNode` that return HTML from MarkDown format
"""
widget = Textarea
def render(self, context):
"""Render markdown."""
import markdown
content = self.get_content_from_context(context)
return markdown.markdown(content)
|
|
"""
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import os
import logging
from datetime import timedelta
from django.urls import reverse
from urllib.parse import quote, unquote_plus
from minio import Minio
from gwells.settings.base import get_env_variable
logger = logging.getLogger(__name__)
class MinioClient():
""" Load a minio client to handle public and/or private file requests
Requires environment variables:
S3_HOST: hostname for public document storage
S3_ROOT_BUCKET: public Wells document storage bucket
S3_AQUIFER_BUCKET: public Aquifers document storage buck
MINIO_ACCESS_KEY: private storage account
MINIO_SECRET_KEY: private storage secret
S3_PRIVATE_HOST: private storage host (must be specified even if same as public storage)
S3_PRIVATE_BUCKET: private storage bucket
The optional "request" param can be set to the request that requires the minio client.
This allows generation of full URIs including domain name.
This is only required for generating private, local links.
e.g.:
def get(self, request, **kwargs):
client = MinioClient(request)
"""
def __init__(self, request=None, disable_public=False, disable_private=False):
self.request = request
if not disable_public:
self.public_host = get_env_variable('S3_HOST', strict=True)
self.public_bucket = get_env_variable(
'S3_ROOT_BUCKET', strict=True)
self.public_aquifers_bucket = get_env_variable('S3_AQUIFER_BUCKET', default_value="aquifer-docs")
self.public_drillers_bucket = get_env_variable('S3_REGISTRANT_BUCKET', default_value="driller-docs")
self.public_access_key = get_env_variable(
'S3_PUBLIC_ACCESS_KEY', warn=False)
self.public_secret_key = get_env_variable(
'S3_PUBLIC_SECRET_KEY', warn=False)
self.use_secure = int(get_env_variable(
'S3_USE_SECURE', 1, warn=False))
self.public_client = Minio(
self.public_host,
access_key=self.public_access_key,
secret_key=self.public_secret_key,
secure=self.use_secure
)
if not disable_private:
self.private_client = self.create_private_client()
self.disable_private = disable_private
def create_private_client(self):
self.private_access_key = get_env_variable('MINIO_ACCESS_KEY')
self.private_secret_key = get_env_variable('MINIO_SECRET_KEY')
self.private_host = get_env_variable('S3_PRIVATE_HOST')
self.private_bucket = get_env_variable('S3_PRIVATE_BUCKET')
self.private_aquifers_bucket = get_env_variable('S3_PRIVATE_AQUIFER_BUCKET', default_value="aquifer-docs")
self.private_drillers_bucket = get_env_variable('S3_PRIVATE_REGISTRANT_BUCKET', default_value="driller-docs")
return Minio(
self.private_host,
access_key=self.private_access_key,
secret_key=self.private_secret_key,
secure=self.use_secure
)
def get_private_file(self, object_name: str, bucket_name):
""" Generates a link to a private document with name "object_name" (name includes prefixes) """
return self.private_client.presigned_get_object(
bucket_name,
unquote_plus(object_name),
expires=timedelta(minutes=12))
def create_url(self, obj, host, bucket_name, private=False):
"""Generate a URL for a file/document
obj: the file object returned by Minio.list_objects()
host: the host where the file was found
tag: well tag number
private: private file permissions are handled by externally. when private=True,
an external link will be generated.
"""
if private:
return self.get_private_file(obj.object_name, bucket_name)
return 'https://{}/{}/{}'.format(
host,
quote(obj.bucket_name),
quote(unquote_plus(obj.object_name))
)
def create_url_list(self, objects, host, bucket_name, private=False):
"""Generate a list of documents with name and url"""
urls = list(
map(
lambda document: {
'url': self.create_url(document, host, bucket_name, private),
# split on last occurrence of '/' and return last item (supports any or no prefixes)
'name': unquote_plus(document.object_name).rsplit('/', 1)[-1]
}, objects)
)
return urls
def get_bucket_folder(self, document_id, resource='well'):
"""Helper function to determine the folder for a given resource"""
if resource == 'well':
folder = str(str('{:0<6}'.format('{:0>2}'.format(document_id // 10000))))
elif resource == 'aquifer':
folder = str(str('{:0<5}'.format('{:0>3}'.format(document_id // 100))))
elif resource == 'driller':
folder = ""
else:
folder = ""
return folder
def get_prefix(self, document_id, resource='well'):
"""Helper function to determine the prefix for a given resource"""
folder = self.get_bucket_folder(document_id, resource)
if resource == 'well':
prefix = str(folder + '/WTN ' + str(document_id) + '_')
elif resource == 'aquifer':
prefix = str(folder + '/AQ_' + str('{:0<5}'.format('{:0>5}'.format(document_id))) + '_')
elif resource == 'driller':
prefix = "P_%s" % str(document_id)
else:
prefix = ""
return prefix
def format_object_name(self, object_name: str, document_id: int, resource='well'):
"""Wrapper function for getting an object name, with path and prefix, for an object and resource type"""
return self.get_prefix(document_id, resource) + object_name
def get_documents(self, document_id: int, resource='well', include_private=False):
"""Retrieves a list of available documents for a well or aquifer"""
# prefix well tag numbers with a 6 digit "folder" id
# e.g. WTA 23456 goes into prefix 020000/
prefix = self.get_prefix(document_id, resource)
if resource == 'well':
public_bucket = self.public_bucket
elif resource == 'aquifer':
public_bucket = self.public_aquifers_bucket
elif resource == 'driller':
public_bucket = self.public_drillers_bucket
objects = {}
# provide all requests with a "public" collection of documents
if self.public_client:
pub_objects = []
try:
pub_objects = self.create_url_list(
self.public_client.list_objects(
public_bucket, prefix=prefix, recursive=True),
self.public_host, public_bucket)
except:
logger.error(
"Could not retrieve files from public file server")
objects['public'] = pub_objects
# authenticated requests also receive a "private" collection
if include_private and not self.disable_private:
if resource == 'well':
private_bucket = self.private_bucket
elif resource == 'aquifer':
private_bucket = self.private_aquifers_bucket
elif resource == 'driller':
private_bucket = self.private_drillers_bucket
priv_objects = []
try:
priv_objects = self.create_url_list(
self.private_client.list_objects(
private_bucket, prefix=prefix, recursive=True),
self.private_host, private_bucket, private=True)
except:
logger.error(
"Could not retrieve files from private file server", exc_info=sys.exc_info())
objects['private'] = priv_objects
return objects
def get_presigned_put_url(self, object_name, bucket_name=None, private=False):
"""Retrieves the a presigned URL for putting objects into an S3 source"""
if private:
if bucket_name is None:
bucket_name = self.private_bucket
key = self.private_client.presigned_put_object(
bucket_name, object_name, expires=timedelta(minutes=5))
else:
if bucket_name is None:
bucket_name = self.public_bucket
key = self.public_client.presigned_put_object(
bucket_name, object_name, expires=timedelta(minutes=5))
return key
def delete_document(self, object_name, bucket_name=None, private=False):
if private:
if bucket_name is None:
bucket_name = self.private_bucket
self.private_client.remove_object(bucket_name, object_name)
else:
if bucket_name is None:
bucket_name = self.public_bucket
self.public_client.remove_object(bucket_name, object_name)
|
|
# Python implementation of the MySQL client-server protocol
# http://dev.mysql.com/doc/internals/en/client-server-protocol.html
# Error codes:
# http://dev.mysql.com/doc/refman/5.5/en/error-messages-client.html
from __future__ import print_function
from ._compat import PY2, range_type, text_type, str_type, JYTHON, IRONPYTHON
DEBUG = False
import errno
from functools import partial
import hashlib
import io
import os
import socket
import struct
import sys
try:
import ssl
SSL_ENABLED = True
except ImportError:
ssl = None
SSL_ENABLED = False
if PY2:
import ConfigParser as configparser
else:
import configparser
try:
import getpass
DEFAULT_USER = getpass.getuser()
del getpass
except ImportError:
DEFAULT_USER = None
from .charset import MBLENGTH, charset_by_name, charset_by_id
from .cursors import Cursor
from .constants import CLIENT, COMMAND, FIELD_TYPE, SERVER_STATUS
from .util import byte2int, int2byte
from .converters import escape_item, encoders, decoders, escape_string
from .err import (
raise_mysql_exception, Warning, Error,
InterfaceError, DataError, DatabaseError, OperationalError,
IntegrityError, InternalError, NotSupportedError, ProgrammingError)
_py_version = sys.version_info[:2]
# socket.makefile() in Python 2 is not usable because very inefficient and
# bad behavior about timeout.
# XXX: ._socketio doesn't work under IronPython.
if _py_version == (2, 7) and not IRONPYTHON:
# read method of file-like returned by sock.makefile() is very slow.
# So we copy io-based one from Python 3.
from ._socketio import SocketIO
def _makefile(sock, mode):
return io.BufferedReader(SocketIO(sock, mode))
elif _py_version == (2, 6):
# Python 2.6 doesn't have fast io module.
# So we make original one.
class SockFile(object):
def __init__(self, sock):
self._sock = sock
def read(self, n):
read = self._sock.recv(n)
if len(read) == n:
return read
while True:
data = self._sock.recv(n-len(read))
if not data:
return read
read += data
if len(read) == n:
return read
def _makefile(sock, mode):
assert mode == 'rb'
return SockFile(sock)
else:
# socket.makefile in Python 3 is nice.
def _makefile(sock, mode):
return sock.makefile(mode)
TEXT_TYPES = set([
FIELD_TYPE.BIT,
FIELD_TYPE.BLOB,
FIELD_TYPE.LONG_BLOB,
FIELD_TYPE.MEDIUM_BLOB,
FIELD_TYPE.STRING,
FIELD_TYPE.TINY_BLOB,
FIELD_TYPE.VAR_STRING,
FIELD_TYPE.VARCHAR])
sha_new = partial(hashlib.new, 'sha1')
NULL_COLUMN = 251
UNSIGNED_CHAR_COLUMN = 251
UNSIGNED_SHORT_COLUMN = 252
UNSIGNED_INT24_COLUMN = 253
UNSIGNED_INT64_COLUMN = 254
UNSIGNED_CHAR_LENGTH = 1
UNSIGNED_SHORT_LENGTH = 2
UNSIGNED_INT24_LENGTH = 3
UNSIGNED_INT64_LENGTH = 8
DEFAULT_CHARSET = 'latin1'
MAX_PACKET_LEN = 2**24-1
def dump_packet(data):
def is_ascii(data):
if 65 <= byte2int(data) <= 122:
if isinstance(data, int):
return chr(data)
return data
return '.'
try:
print("packet length:", len(data))
print("method call[1]:", sys._getframe(1).f_code.co_name)
print("method call[2]:", sys._getframe(2).f_code.co_name)
print("method call[3]:", sys._getframe(3).f_code.co_name)
print("method call[4]:", sys._getframe(4).f_code.co_name)
print("method call[5]:", sys._getframe(5).f_code.co_name)
print("-" * 88)
except ValueError:
pass
dump_data = [data[i:i+16] for i in range_type(0, min(len(data), 256), 16)]
for d in dump_data:
print(' '.join(map(lambda x: "{:02X}".format(byte2int(x)), d)) +
' ' * (16 - len(d)) + ' ' * 2 +
' '.join(map(lambda x: "{}".format(is_ascii(x)), d)))
print("-" * 88)
print()
def _scramble(password, message):
if not password:
return b'\0'
if DEBUG: print('password=' + password)
stage1 = sha_new(password).digest()
stage2 = sha_new(stage1).digest()
s = sha_new()
s.update(message)
s.update(stage2)
result = s.digest()
return _my_crypt(result, stage1)
def _my_crypt(message1, message2):
length = len(message1)
result = struct.pack('B', length)
for i in range_type(length):
x = (struct.unpack('B', message1[i:i+1])[0] ^
struct.unpack('B', message2[i:i+1])[0])
result += struct.pack('B', x)
return result
# old_passwords support ported from libmysql/password.c
SCRAMBLE_LENGTH_323 = 8
class RandStruct_323(object):
def __init__(self, seed1, seed2):
self.max_value = 0x3FFFFFFF
self.seed1 = seed1 % self.max_value
self.seed2 = seed2 % self.max_value
def my_rnd(self):
self.seed1 = (self.seed1 * 3 + self.seed2) % self.max_value
self.seed2 = (self.seed1 + self.seed2 + 33) % self.max_value
return float(self.seed1) / float(self.max_value)
def _scramble_323(password, message):
hash_pass = _hash_password_323(password)
hash_message = _hash_password_323(message[:SCRAMBLE_LENGTH_323])
hash_pass_n = struct.unpack(">LL", hash_pass)
hash_message_n = struct.unpack(">LL", hash_message)
rand_st = RandStruct_323(hash_pass_n[0] ^ hash_message_n[0],
hash_pass_n[1] ^ hash_message_n[1])
outbuf = io.BytesIO()
for _ in range_type(min(SCRAMBLE_LENGTH_323, len(message))):
outbuf.write(int2byte(int(rand_st.my_rnd() * 31) + 64))
extra = int2byte(int(rand_st.my_rnd() * 31))
out = outbuf.getvalue()
outbuf = io.BytesIO()
for c in out:
outbuf.write(int2byte(byte2int(c) ^ byte2int(extra)))
return outbuf.getvalue()
def _hash_password_323(password):
nr = 1345345333
add = 7
nr2 = 0x12345671
for c in [byte2int(x) for x in password if x not in (' ', '\t')]:
nr^= (((nr & 63)+add)*c)+ (nr << 8) & 0xFFFFFFFF
nr2= (nr2 + ((nr2 << 8) ^ nr)) & 0xFFFFFFFF
add= (add + c) & 0xFFFFFFFF
r1 = nr & ((1 << 31) - 1) # kill sign bits
r2 = nr2 & ((1 << 31) - 1)
# pack
return struct.pack(">LL", r1, r2)
def pack_int24(n):
return struct.pack('<I', n)[:3]
def unpack_uint16(n):
return struct.unpack('<H', n[0:2])[0]
def unpack_int24(n):
return struct.unpack('<I', n + b'\0')[0]
def unpack_int32(n):
return struct.unpack('<I', n)[0]
def unpack_int64(n):
return struct.unpack('<Q', n)[0]
class MysqlPacket(object):
"""Representation of a MySQL response packet.
Provides an interface for reading/parsing the packet results.
"""
__slots__ = ('_position', '_data')
def __init__(self, data, encoding):
self._position = 0
self._data = data
def get_all_data(self):
return self._data
def read(self, size):
"""Read the first 'size' bytes in packet and advance cursor past them."""
result = self._data[self._position:(self._position+size)]
if len(result) != size:
error = ('Result length not requested length:\n'
'Expected=%s. Actual=%s. Position: %s. Data Length: %s'
% (size, len(result), self._position, len(self._data)))
if DEBUG:
print(error)
self.dump()
raise AssertionError(error)
self._position += size
return result
def read_all(self):
"""Read all remaining data in the packet.
(Subsequent read() will return errors.)
"""
result = self._data[self._position:]
self._position = None # ensure no subsequent read()
return result
def advance(self, length):
"""Advance the cursor in data buffer 'length' bytes."""
new_position = self._position + length
if new_position < 0 or new_position > len(self._data):
raise Exception('Invalid advance amount (%s) for cursor. '
'Position=%s' % (length, new_position))
self._position = new_position
def rewind(self, position=0):
"""Set the position of the data buffer cursor to 'position'."""
if position < 0 or position > len(self._data):
raise Exception("Invalid position to rewind cursor to: %s." % position)
self._position = position
def get_bytes(self, position, length=1):
"""Get 'length' bytes starting at 'position'.
Position is start of payload (first four packet header bytes are not
included) starting at index '0'.
No error checking is done. If requesting outside end of buffer
an empty string (or string shorter than 'length') may be returned!
"""
return self._data[position:(position+length)]
def read_length_encoded_integer(self):
"""Read a 'Length Coded Binary' number from the data buffer.
Length coded numbers can be anywhere from 1 to 9 bytes depending
on the value of the first byte.
"""
c = ord(self.read(1))
if c == NULL_COLUMN:
return None
if c < UNSIGNED_CHAR_COLUMN:
return c
elif c == UNSIGNED_SHORT_COLUMN:
return unpack_uint16(self.read(UNSIGNED_SHORT_LENGTH))
elif c == UNSIGNED_INT24_COLUMN:
return unpack_int24(self.read(UNSIGNED_INT24_LENGTH))
elif c == UNSIGNED_INT64_COLUMN:
return unpack_int64(self.read(UNSIGNED_INT64_LENGTH))
def read_length_coded_string(self):
"""Read a 'Length Coded String' from the data buffer.
A 'Length Coded String' consists first of a length coded
(unsigned, positive) integer represented in 1-9 bytes followed by
that many bytes of binary data. (For example "cat" would be "3cat".)
"""
length = self.read_length_encoded_integer()
if length is None:
return None
return self.read(length)
def is_ok_packet(self):
return self._data[0:1] == b'\0'
def is_eof_packet(self):
# http://dev.mysql.com/doc/internals/en/generic-response-packets.html#packet-EOF_Packet
# Caution: \xFE may be LengthEncodedInteger.
# If \xFE is LengthEncodedInteger header, 8bytes followed.
return len(self._data) < 9 and self._data[0:1] == b'\xfe'
def is_resultset_packet(self):
field_count = ord(self._data[0:1])
return 1 <= field_count <= 250
def is_load_local_packet(self):
return self._data[0:1] == b'\xfb'
def is_error_packet(self):
return self._data[0:1] == b'\xff'
def check_error(self):
if self.is_error_packet():
self.rewind()
self.advance(1) # field_count == error (we already know that)
errno = unpack_uint16(self.read(2))
if DEBUG: print("errno =", errno)
raise_mysql_exception(self._data)
def dump(self):
dump_packet(self._data)
class FieldDescriptorPacket(MysqlPacket):
"""A MysqlPacket that represents a specific column's metadata in the result.
Parsing is automatically done and the results are exported via public
attributes on the class such as: db, table_name, name, length, type_code.
"""
def __init__(self, data, encoding):
MysqlPacket.__init__(self, data, encoding)
self.__parse_field_descriptor(encoding)
def __parse_field_descriptor(self, encoding):
"""Parse the 'Field Descriptor' (Metadata) packet.
This is compatible with MySQL 4.1+ (not compatible with MySQL 4.0).
"""
self.catalog = self.read_length_coded_string()
self.db = self.read_length_coded_string()
self.table_name = self.read_length_coded_string().decode(encoding)
self.org_table = self.read_length_coded_string().decode(encoding)
self.name = self.read_length_coded_string().decode(encoding)
self.org_name = self.read_length_coded_string().decode(encoding)
self.advance(1) # non-null filler
self.charsetnr = struct.unpack('<H', self.read(2))[0]
self.length = struct.unpack('<I', self.read(4))[0]
self.type_code = byte2int(self.read(1))
self.flags = struct.unpack('<H', self.read(2))[0]
self.scale = byte2int(self.read(1)) # "decimals"
self.advance(2) # filler (always 0x00)
# 'default' is a length coded binary and is still in the buffer?
# not used for normal result sets...
def description(self):
"""Provides a 7-item tuple compatible with the Python PEP249 DB Spec."""
return (
self.name,
self.type_code,
None, # TODO: display_length; should this be self.length?
self.get_column_length(), # 'internal_size'
self.get_column_length(), # 'precision' # TODO: why!?!?
self.scale,
self.flags % 2 == 0)
def get_column_length(self):
if self.type_code == FIELD_TYPE.VAR_STRING:
mblen = MBLENGTH.get(self.charsetnr, 1)
return self.length // mblen
return self.length
def __str__(self):
return ('%s %r.%r.%r, type=%s, flags=%x'
% (self.__class__, self.db, self.table_name, self.name,
self.type_code, self.flags))
class OKPacketWrapper(object):
"""
OK Packet Wrapper. It uses an existing packet object, and wraps
around it, exposing useful variables while still providing access
to the original packet objects variables and methods.
"""
def __init__(self, from_packet):
if not from_packet.is_ok_packet():
raise ValueError('Cannot create ' + str(self.__class__.__name__) +
' object from invalid packet type')
self.packet = from_packet
self.packet.advance(1)
self.affected_rows = self.packet.read_length_encoded_integer()
self.insert_id = self.packet.read_length_encoded_integer()
self.server_status = struct.unpack('<H', self.packet.read(2))[0]
self.warning_count = struct.unpack('<H', self.packet.read(2))[0]
self.message = self.packet.read_all()
self.has_next = self.server_status & SERVER_STATUS.SERVER_MORE_RESULTS_EXISTS
def __getattr__(self, key):
return getattr(self.packet, key)
class EOFPacketWrapper(object):
"""
EOF Packet Wrapper. It uses an existing packet object, and wraps
around it, exposing useful variables while still providing access
to the original packet objects variables and methods.
"""
def __init__(self, from_packet):
if not from_packet.is_eof_packet():
raise ValueError(
"Cannot create '{0}' object from invalid packet type".format(
self.__class__))
self.packet = from_packet
from_packet.advance(1)
self.warning_count = struct.unpack('<h', from_packet.read(2))[0]
self.server_status = struct.unpack('<h', self.packet.read(2))[0]
if DEBUG: print("server_status=", self.server_status)
self.has_next = self.server_status & SERVER_STATUS.SERVER_MORE_RESULTS_EXISTS
def __getattr__(self, key):
return getattr(self.packet, key)
class LoadLocalPacketWrapper(object):
"""
Load Local Packet Wrapper. It uses an existing packet object, and wraps
around it, exposing useful variables while still providing access
to the original packet objects variables and methods.
"""
def __init__(self, from_packet):
if not from_packet.is_load_local_packet():
raise ValueError(
"Cannot create '{0}' object from invalid packet type".format(
self.__class__))
self.packet = from_packet
self.filename = self.packet.get_all_data()[1:]
if DEBUG: print("filename=", self.filename)
def __getattr__(self, key):
return getattr(self.packet, key)
class Connection(object):
"""
Representation of a socket with a mysql server.
The proper way to get an instance of this class is to call
connect().
"""
socket = None
def __init__(self, host="localhost", user=None, password="",
database=None, port=3306, unix_socket=None,
charset='', sql_mode=None,
read_default_file=None, conv=decoders, use_unicode=None,
client_flag=0, cursorclass=Cursor, init_command=None,
connect_timeout=None, ssl=None, read_default_group=None,
compress=None, named_pipe=None, no_delay=False,
autocommit=False, db=None, passwd=None, local_infile=False):
"""
Establish a connection to the MySQL database. Accepts several
arguments:
host: Host where the database server is located
user: Username to log in as
password: Password to use.
database: Database to use, None to not use a particular one.
port: MySQL port to use, default is usually OK.
unix_socket: Optionally, you can use a unix socket rather than TCP/IP.
charset: Charset you want to use.
sql_mode: Default SQL_MODE to use.
read_default_file:
Specifies my.cnf file to read these parameters from under the [client] section.
conv:
Decoders dictionary to use instead of the default one.
This is used to provide custom marshalling of types. See converters.
use_unicode:
Whether or not to default to unicode strings.
This option defaults to true for Py3k.
client_flag: Custom flags to send to MySQL. Find potential values in constants.CLIENT.
cursorclass: Custom cursor class to use.
init_command: Initial SQL statement to run when connection is established.
connect_timeout: Timeout before throwing an exception when connecting.
ssl:
A dict of arguments similar to mysql_ssl_set()'s parameters.
For now the capath and cipher arguments are not supported.
read_default_group: Group to read from in the configuration file.
compress; Not supported
named_pipe: Not supported
no_delay: Disable Nagle's algorithm on the socket
autocommit: Autocommit mode. None means use server default. (default: False)
local_infile: Boolean to enable the use of LOAD DATA LOCAL command. (default: False)
db: Alias for database. (for compatibility to MySQLdb)
passwd: Alias for password. (for compatibility to MySQLdb)
"""
if use_unicode is None and sys.version_info[0] > 2:
use_unicode = True
if db is not None and database is None:
database = db
if passwd is not None and not password:
password = passwd
if compress or named_pipe:
raise NotImplementedError("compress and named_pipe arguments are not supported")
if local_infile:
client_flag |= CLIENT.LOCAL_FILES
if ssl and ('capath' in ssl or 'cipher' in ssl):
raise NotImplementedError('ssl options capath and cipher are not supported')
self.ssl = False
if ssl:
if not SSL_ENABLED:
raise NotImplementedError("ssl module not found")
self.ssl = True
client_flag |= CLIENT.SSL
for k in ('key', 'cert', 'ca'):
v = None
if k in ssl:
v = ssl[k]
setattr(self, k, v)
if read_default_group and not read_default_file:
if sys.platform.startswith("win"):
read_default_file = "c:\\my.ini"
else:
read_default_file = "/etc/my.cnf"
if read_default_file:
if not read_default_group:
read_default_group = "client"
cfg = configparser.RawConfigParser()
cfg.read(os.path.expanduser(read_default_file))
def _config(key, default):
try:
return cfg.get(read_default_group, key)
except Exception:
return default
user = _config("user", user)
password = _config("password", password)
host = _config("host", host)
database = _config("database", database)
unix_socket = _config("socket", unix_socket)
port = int(_config("port", port))
charset = _config("default-character-set", charset)
self.host = host
self.port = port
self.user = user or DEFAULT_USER
self.password = password or ""
self.db = database
self.no_delay = no_delay
self.unix_socket = unix_socket
if charset:
self.charset = charset
self.use_unicode = True
else:
self.charset = DEFAULT_CHARSET
self.use_unicode = False
if use_unicode is not None:
self.use_unicode = use_unicode
self.encoding = charset_by_name(self.charset).encoding
client_flag |= CLIENT.CAPABILITIES | CLIENT.MULTI_STATEMENTS
if self.db:
client_flag |= CLIENT.CONNECT_WITH_DB
self.client_flag = client_flag
self.cursorclass = cursorclass
self.connect_timeout = connect_timeout
self._result = None
self._affected_rows = 0
self.host_info = "Not connected"
#: specified autocommit mode. None means use server default.
self.autocommit_mode = autocommit
self.encoders = encoders # Need for MySQLdb compatibility.
self.decoders = conv
self.sql_mode = sql_mode
self.init_command = init_command
self._connect()
def close(self):
''' Send the quit message and close the socket '''
if self.socket is None:
raise Error("Already closed")
send_data = struct.pack('<i', 1) + int2byte(COMMAND.COM_QUIT)
try:
self._write_bytes(send_data)
except Exception:
pass
finally:
sock = self.socket
self.socket = None
self._rfile = None
sock.close()
@property
def open(self):
return self.socket is not None
def __del__(self):
if self.socket:
try:
self.socket.close()
except:
pass
self.socket = None
self._rfile = None
def autocommit(self, value):
self.autocommit_mode = bool(value)
current = self.get_autocommit()
if value != current:
self._send_autocommit_mode()
def get_autocommit(self):
return bool(self.server_status &
SERVER_STATUS.SERVER_STATUS_AUTOCOMMIT)
def _read_ok_packet(self):
pkt = self._read_packet()
if not pkt.is_ok_packet():
raise OperationalError(2014, "Command Out of Sync")
ok = OKPacketWrapper(pkt)
self.server_status = ok.server_status
return ok
def _send_autocommit_mode(self):
''' Set whether or not to commit after every execute() '''
self._execute_command(COMMAND.COM_QUERY, "SET AUTOCOMMIT = %s" %
self.escape(self.autocommit_mode))
self._read_ok_packet()
def begin(self):
"""Begin transaction."""
self._execute_command(COMMAND.COM_QUERY, "BEGIN")
self._read_ok_packet()
def commit(self):
''' Commit changes to stable storage '''
self._execute_command(COMMAND.COM_QUERY, "COMMIT")
self._read_ok_packet()
def rollback(self):
''' Roll back the current transaction '''
self._execute_command(COMMAND.COM_QUERY, "ROLLBACK")
self._read_ok_packet()
def show_warnings(self):
"""SHOW WARNINGS"""
self._execute_command(COMMAND.COM_QUERY, "SHOW WARNINGS")
result = MySQLResult(self)
result.read()
return result.rows
def select_db(self, db):
'''Set current db'''
self._execute_command(COMMAND.COM_INIT_DB, db)
self._read_ok_packet()
def escape(self, obj):
''' Escape whatever value you pass to it '''
if isinstance(obj, str_type):
return "'" + self.escape_string(obj) + "'"
return escape_item(obj, self.charset)
def literal(self, obj):
'''Alias for escape()'''
return self.escape(obj)
def escape_string(self, s):
if (self.server_status &
SERVER_STATUS.SERVER_STATUS_NO_BACKSLASH_ESCAPES):
return s.replace("'", "''")
return escape_string(s)
def cursor(self, cursor=None):
''' Create a new cursor to execute queries with '''
if cursor:
return cursor(self)
return self.cursorclass(self)
def __enter__(self):
''' Context manager that returns a Cursor '''
return self.cursor()
def __exit__(self, exc, value, traceback):
''' On successful exit, commit. On exception, rollback. '''
if exc:
self.rollback()
else:
self.commit()
# The following methods are INTERNAL USE ONLY (called from Cursor)
def query(self, sql, unbuffered=False):
#if DEBUG:
# print("DEBUG: sending query:", sql)
if isinstance(sql, text_type) and not (JYTHON or IRONPYTHON):
sql = sql.encode(self.encoding)
self._execute_command(COMMAND.COM_QUERY, sql)
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
return self._affected_rows
def next_result(self, unbuffered=False):
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
return self._affected_rows
def affected_rows(self):
return self._affected_rows
def kill(self, thread_id):
arg = struct.pack('<I', thread_id)
self._execute_command(COMMAND.COM_PROCESS_KILL, arg)
return self._read_ok_packet()
def ping(self, reconnect=True):
''' Check if the server is alive '''
if self.socket is None:
if reconnect:
self._connect()
reconnect = False
else:
raise Error("Already closed")
try:
self._execute_command(COMMAND.COM_PING, "")
return self._read_ok_packet()
except Exception:
if reconnect:
self._connect()
return self.ping(False)
else:
raise
def set_charset(self, charset):
# Make sure charset is supported.
encoding = charset_by_name(charset).encoding
self._execute_command(COMMAND.COM_QUERY, "SET NAMES %s" % self.escape(charset))
self._read_packet()
self.charset = charset
self.encoding = encoding
def _connect(self):
sock = None
try:
if self.unix_socket and self.host in ('localhost', '127.0.0.1'):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.settimeout(self.connect_timeout)
sock.connect(self.unix_socket)
self.host_info = "Localhost via UNIX socket"
if DEBUG: print('connected using unix_socket')
else:
while True:
try:
sock = socket.create_connection(
(self.host, self.port), self.connect_timeout)
break
except (OSError, IOError) as e:
if e.errno == errno.EINTR:
continue
raise
self.host_info = "socket %s:%d" % (self.host, self.port)
if DEBUG: print('connected using socket')
sock.settimeout(None)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
if self.no_delay:
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
self.socket = sock
self._rfile = _makefile(sock, 'rb')
self._get_server_information()
self._request_authentication()
if self.sql_mode is not None:
c = self.cursor()
c.execute("SET sql_mode=%s", (self.sql_mode,))
if self.init_command is not None:
c = self.cursor()
c.execute(self.init_command)
self.commit()
if self.autocommit_mode is not None:
self.autocommit(self.autocommit_mode)
except Exception as e:
self._rfile = None
if sock is not None:
try:
sock.close()
except socket.error:
pass
raise OperationalError(
2003, "Can't connect to MySQL server on %r (%s)" % (self.host, e))
def _read_packet(self, packet_type=MysqlPacket):
"""Read an entire "mysql packet" in its entirety from the network
and return a MysqlPacket type that represents the results.
"""
buff = b''
while True:
packet_header = self._read_bytes(4)
if DEBUG: dump_packet(packet_header)
packet_length_bin = packet_header[:3]
#TODO: check sequence id
# packet_number
byte2int(packet_header[3])
bin_length = packet_length_bin + b'\0' # pad little-endian number
bytes_to_read = struct.unpack('<I', bin_length)[0]
recv_data = self._read_bytes(bytes_to_read)
if DEBUG: dump_packet(recv_data)
buff += recv_data
if bytes_to_read < MAX_PACKET_LEN:
break
packet = packet_type(buff, self.encoding)
packet.check_error()
return packet
def _read_bytes(self, num_bytes):
while True:
try:
data = self._rfile.read(num_bytes)
break
except (IOError, OSError) as e:
if e.errno == errno.EINTR:
continue
raise OperationalError(
2013,
"Lost connection to MySQL server during query (%s)" % (e,))
if len(data) < num_bytes:
raise OperationalError(
2013, "Lost connection to MySQL server during query")
return data
def _write_bytes(self, data):
try:
self.socket.sendall(data)
except IOError as e:
raise OperationalError(2006, "MySQL server has gone away (%r)" % (e,))
def _read_query_result(self, unbuffered=False):
if unbuffered:
try:
result = MySQLResult(self)
result.init_unbuffered_query()
except:
result.unbuffered_active = False
result.connection = None
raise
else:
result = MySQLResult(self)
result.read()
self._result = result
if result.server_status is not None:
self.server_status = result.server_status
return result.affected_rows
def insert_id(self):
if self._result:
return self._result.insert_id
else:
return 0
def _execute_command(self, command, sql):
if not self.socket:
raise InterfaceError("(0, '')")
# If the last query was unbuffered, make sure it finishes before
# sending new commands
if self._result is not None and self._result.unbuffered_active:
self._result._finish_unbuffered_query()
if isinstance(sql, text_type):
sql = sql.encode(self.encoding)
chunk_size = min(MAX_PACKET_LEN, len(sql) + 1) # +1 is for command
prelude = struct.pack('<i', chunk_size) + int2byte(command)
self._write_bytes(prelude + sql[:chunk_size-1])
if DEBUG: dump_packet(prelude + sql)
if chunk_size < MAX_PACKET_LEN:
return
seq_id = 1
sql = sql[chunk_size-1:]
while True:
chunk_size = min(MAX_PACKET_LEN, len(sql))
prelude = struct.pack('<i', chunk_size)[:3]
data = prelude + int2byte(seq_id%256) + sql[:chunk_size]
self._write_bytes(data)
if DEBUG: dump_packet(data)
sql = sql[chunk_size:]
if not sql and chunk_size < MAX_PACKET_LEN:
break
seq_id += 1
def _request_authentication(self):
self.client_flag |= CLIENT.CAPABILITIES
if self.server_version.startswith('5'):
self.client_flag |= CLIENT.MULTI_RESULTS
if self.user is None:
raise ValueError("Did not specify a username")
charset_id = charset_by_name(self.charset).id
if isinstance(self.user, text_type):
self.user = self.user.encode(self.encoding)
data_init = (struct.pack('<i', self.client_flag) + struct.pack("<I", 1) +
int2byte(charset_id) + int2byte(0)*23)
next_packet = 1
if self.ssl:
data = pack_int24(len(data_init)) + int2byte(next_packet) + data_init
next_packet += 1
if DEBUG: dump_packet(data)
self._write_bytes(data)
self.socket = ssl.wrap_socket(self.socket, keyfile=self.key,
certfile=self.cert,
ssl_version=ssl.PROTOCOL_TLSv1,
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=self.ca)
self._rfile = _makefile(self.socket, 'rb')
data = data_init + self.user + b'\0' + \
_scramble(self.password.encode('latin1'), self.salt)
if self.db:
if isinstance(self.db, text_type):
self.db = self.db.encode(self.encoding)
data += self.db + int2byte(0)
data = pack_int24(len(data)) + int2byte(next_packet) + data
next_packet += 2
if DEBUG: dump_packet(data)
self._write_bytes(data)
auth_packet = self._read_packet()
# if old_passwords is enabled the packet will be 1 byte long and
# have the octet 254
if auth_packet.is_eof_packet():
# send legacy handshake
data = _scramble_323(self.password.encode('latin1'), self.salt) + b'\0'
data = pack_int24(len(data)) + int2byte(next_packet) + data
self._write_bytes(data)
auth_packet = self._read_packet()
# _mysql support
def thread_id(self):
return self.server_thread_id[0]
def character_set_name(self):
return self.charset
def get_host_info(self):
return self.host_info
def get_proto_info(self):
return self.protocol_version
def _get_server_information(self):
i = 0
packet = self._read_packet()
data = packet.get_all_data()
if DEBUG: dump_packet(data)
self.protocol_version = byte2int(data[i:i+1])
i += 1
server_end = data.find(int2byte(0), i)
self.server_version = data[i:server_end].decode('latin1')
i = server_end + 1
self.server_thread_id = struct.unpack('<I', data[i:i+4])
i += 4
self.salt = data[i:i+8]
i += 9 # 8 + 1(filler)
self.server_capabilities = struct.unpack('<H', data[i:i+2])[0]
i += 2
if len(data) >= i + 6:
lang, stat, cap_h, salt_len = struct.unpack('<BHHB', data[i:i+6])
i += 6
self.server_language = lang
self.server_charset = charset_by_id(lang).name
self.server_status = stat
if DEBUG: print("server_status: %x" % stat)
self.server_capabilities |= cap_h << 16
if DEBUG: print("salt_len:", salt_len)
salt_len = max(12, salt_len - 9)
# reserved
i += 10
if len(data) >= i + salt_len:
# salt_len includes auth_plugin_data_part_1 and filler
self.salt += data[i:i+salt_len]
# TODO: AUTH PLUGIN NAME may appeare here.
def get_server_info(self):
return self.server_version
Warning = Warning
Error = Error
InterfaceError = InterfaceError
DatabaseError = DatabaseError
DataError = DataError
OperationalError = OperationalError
IntegrityError = IntegrityError
InternalError = InternalError
ProgrammingError = ProgrammingError
NotSupportedError = NotSupportedError
class MySQLResult(object):
def __init__(self, connection):
self.connection = connection
self.affected_rows = None
self.insert_id = None
self.server_status = None
self.warning_count = 0
self.message = None
self.field_count = 0
self.description = None
self.rows = None
self.has_next = None
self.unbuffered_active = False
def __del__(self):
if self.unbuffered_active:
self._finish_unbuffered_query()
def read(self):
try:
first_packet = self.connection._read_packet()
if first_packet.is_ok_packet():
self._read_ok_packet(first_packet)
elif first_packet.is_load_local_packet():
self._read_load_local_packet(first_packet)
else:
self._read_result_packet(first_packet)
finally:
self.connection = False
def init_unbuffered_query(self):
self.unbuffered_active = True
first_packet = self.connection._read_packet()
if first_packet.is_ok_packet():
self._read_ok_packet(first_packet)
self.unbuffered_active = False
self.connection = None
else:
self.field_count = first_packet.read_length_encoded_integer()
self._get_descriptions()
# Apparently, MySQLdb picks this number because it's the maximum
# value of a 64bit unsigned integer. Since we're emulating MySQLdb,
# we set it to this instead of None, which would be preferred.
self.affected_rows = 18446744073709551615
def _read_ok_packet(self, first_packet):
ok_packet = OKPacketWrapper(first_packet)
self.affected_rows = ok_packet.affected_rows
self.insert_id = ok_packet.insert_id
self.server_status = ok_packet.server_status
self.warning_count = ok_packet.warning_count
self.message = ok_packet.message
self.has_next = ok_packet.has_next
def _read_load_local_packet(self, first_packet):
load_packet = LoadLocalPacketWrapper(first_packet)
sender = LoadLocalFile(load_packet.filename, self.connection)
sender.send_data()
ok_packet = self.connection._read_packet()
if not ok_packet.is_ok_packet():
raise OperationalError(2014, "Commands Out of Sync")
self._read_ok_packet(ok_packet)
def _check_packet_is_eof(self, packet):
if packet.is_eof_packet():
eof_packet = EOFPacketWrapper(packet)
self.warning_count = eof_packet.warning_count
self.has_next = eof_packet.has_next
return True
return False
def _read_result_packet(self, first_packet):
self.field_count = first_packet.read_length_encoded_integer()
self._get_descriptions()
self._read_rowdata_packet()
def _read_rowdata_packet_unbuffered(self):
# Check if in an active query
if not self.unbuffered_active:
return
# EOF
packet = self.connection._read_packet()
if self._check_packet_is_eof(packet):
self.unbuffered_active = False
self.connection = None
self.rows = None
return
row = self._read_row_from_packet(packet)
self.affected_rows = 1
self.rows = (row,) # rows should tuple of row for MySQL-python compatibility.
return row
def _finish_unbuffered_query(self):
# After much reading on the MySQL protocol, it appears that there is,
# in fact, no way to stop MySQL from sending all the data after
# executing a query, so we just spin, and wait for an EOF packet.
while self.unbuffered_active:
packet = self.connection._read_packet()
if self._check_packet_is_eof(packet):
self.unbuffered_active = False
self.connection = None # release reference to kill cyclic reference.
def _read_rowdata_packet(self):
"""Read a rowdata packet for each data row in the result set."""
rows = []
while True:
packet = self.connection._read_packet()
if self._check_packet_is_eof(packet):
self.connection = None # release reference to kill cyclic reference.
break
rows.append(self._read_row_from_packet(packet))
self.affected_rows = len(rows)
self.rows = tuple(rows)
def _read_row_from_packet(self, packet):
use_unicode = self.connection.use_unicode
row = []
for field in self.fields:
data = packet.read_length_coded_string()
if data is not None:
field_type = field.type_code
if use_unicode:
if field_type in TEXT_TYPES:
charset = charset_by_id(field.charsetnr)
if use_unicode and not charset.is_binary:
# TEXTs with charset=binary means BINARY types.
data = data.decode(charset.encoding)
else:
data = data.decode()
converter = self.connection.decoders.get(field_type)
if DEBUG: print("DEBUG: field={}, converter={}".format(field, converter))
if DEBUG: print("DEBUG: DATA = ", data)
if converter is not None:
data = converter(data)
row.append(data)
return tuple(row)
def _get_descriptions(self):
"""Read a column descriptor packet for each column in the result."""
self.fields = []
description = []
for i in range_type(self.field_count):
field = self.connection._read_packet(FieldDescriptorPacket)
self.fields.append(field)
description.append(field.description())
eof_packet = self.connection._read_packet()
assert eof_packet.is_eof_packet(), 'Protocol error, expecting EOF'
self.description = tuple(description)
class LoadLocalFile(object):
def __init__(self, filename, connection):
self.filename = filename
self.connection = connection
def send_data(self):
"""Send data packets from the local file to the server"""
if not self.connection.socket:
raise InterfaceError("(0, '')")
# sequence id is 2 as we already sent a query packet
seq_id = 2
try:
with open(self.filename, 'rb') as open_file:
chunk_size = MAX_PACKET_LEN
prelude = b""
packet = b""
packet_size = 0
while True:
chunk = open_file.read(chunk_size)
if not chunk:
break
packet = struct.pack('<i', len(chunk))[:3] + int2byte(seq_id)
format_str = '!{0}s'.format(len(chunk))
packet += struct.pack(format_str, chunk)
self.connection._write_bytes(packet)
seq_id += 1
except IOError:
raise OperationalError(1017, "Can't find file '{0}'".format(self.filename))
finally:
# send the empty packet to signify we are done sending data
packet = struct.pack('<i', 0)[:3] + int2byte(seq_id)
self.connection._write_bytes(packet)
# g:khuno_ignore='E226,E301,E701'
|
|
"""Utilities for input validation"""
# Authors: Olivier Grisel and Gael Varoquaux and others (please update me)
# License: BSD 3
import warnings
import numbers
import numpy as np
from scipy import sparse
from .fixes import safe_copy
def _assert_all_finite(X):
"""Like assert_all_finite, but only for ndarray."""
if X.dtype.char in np.typecodes['AllFloat'] and not np.isfinite(X.sum()) \
and not np.isfinite(X).all():
raise ValueError("Array contains NaN or infinity.")
def assert_all_finite(X):
"""Throw a ValueError if X contains NaN or infinity.
Input MUST be an np.ndarray instance or a scipy.sparse matrix."""
# First try an O(n) time, O(1) space solution for the common case that
# there everything is finite; fall back to O(n) space np.isfinite to
# prevent false positives from overflow in sum method.
_assert_all_finite(X.data if sparse.issparse(X) else X)
def safe_asarray(X, dtype=None, order=None):
"""Convert X to an array or sparse matrix.
Prevents copying X when possible; sparse matrices are passed through."""
if sparse.issparse(X):
assert_all_finite(X.data)
else:
X = np.asarray(X, dtype, order)
assert_all_finite(X)
return X
def as_float_array(X, copy=True):
"""Converts an array-like to an array of floats
The new dtype will be np.float32 or np.float64, depending on the original
type. The function can create a copy or modify the argument depending
on the argument copy.
Parameters
----------
X : {array-like, sparse matrix}
copy : bool, optional
If True, a copy of X will be created. If False, a copy may still be
returned if X's dtype is not a floating point type.
Returns
-------
XT : {array, sparse matrix}
An array of type np.float
"""
if isinstance(X, np.matrix) or (not isinstance(X, np.ndarray)
and not sparse.issparse(X)):
return safe_asarray(X, dtype=np.float64)
elif sparse.issparse(X) and X.dtype in [np.float32, np.float64]:
return X.copy() if copy else X
elif X.dtype in [np.float32, np.float64]: # is numpy array
return X.copy('F' if X.flags['F_CONTIGUOUS'] else 'C') if copy else X
else:
return X.astype(np.float32 if X.dtype == np.int32 else np.float64)
def array2d(X, dtype=None, order=None, copy=False):
"""Returns at least 2-d array with data from X"""
if sparse.issparse(X):
raise TypeError('A sparse matrix was passed, but dense data '
'is required. Use X.toarray() to convert to dense.')
X_2d = np.asarray(np.atleast_2d(X), dtype=dtype, order=order)
_assert_all_finite(X_2d)
if X is X_2d and copy:
X_2d = safe_copy(X_2d)
return X_2d
def _atleast2d_or_sparse(X, dtype, order, copy, sparse_class, convmethod):
if sparse.issparse(X):
# Note: order is ignored because CSR matrices hold data in 1-d arrays
if dtype is None or X.dtype == dtype:
X = getattr(X, convmethod)()
else:
X = sparse_class(X, dtype=dtype)
_assert_all_finite(X.data)
else:
X = array2d(X, dtype=dtype, order=order, copy=copy)
_assert_all_finite(X)
return X
def atleast2d_or_csc(X, dtype=None, order=None, copy=False):
"""Like numpy.atleast_2d, but converts sparse matrices to CSC format.
Also, converts np.matrix to np.ndarray.
"""
return _atleast2d_or_sparse(X, dtype, order, copy, sparse.csc_matrix,
"tocsc")
def atleast2d_or_csr(X, dtype=None, order=None, copy=False):
"""Like numpy.atleast_2d, but converts sparse matrices to CSR format
Also, converts np.matrix to np.ndarray.
"""
return _atleast2d_or_sparse(X, dtype, order, copy, sparse.csr_matrix,
"tocsr")
def _num_samples(x):
"""Return number of samples in array-like x."""
if not hasattr(x, '__len__') and not hasattr(x, 'shape'):
raise TypeError("Expected sequence or array-like, got %r" % x)
return x.shape[0] if hasattr(x, 'shape') else len(x)
def check_arrays(*arrays, **options):
"""Checked that all arrays have consistent first dimensions
Parameters
----------
*arrays : sequence of arrays or scipy.sparse matrices with same shape[0]
Python lists or tuples occurring in arrays are converted to 1D numpy
arrays.
sparse_format : 'csr', 'csc' or 'dense', None by default
If not None, any scipy.sparse matrix is converted to
Compressed Sparse Rows or Compressed Sparse Columns representations.
If 'dense', an error is raised when a sparse array is
passed.
copy : boolean, False by default
If copy is True, ensure that returned arrays are copies of the original
(if not already converted to another format earlier in the process).
check_ccontiguous : boolean, False by default
Check that the arrays are C contiguous
dtype : a numpy dtype instance, None by default
Enforce a specific dtype.
"""
sparse_format = options.pop('sparse_format', None)
if sparse_format not in (None, 'csr', 'csc', 'dense'):
raise ValueError('Unexpected sparse format: %r' % sparse_format)
copy = options.pop('copy', False)
check_ccontiguous = options.pop('check_ccontiguous', False)
dtype = options.pop('dtype', None)
if options:
raise TypeError("Unexpected keyword arguments: %r" % options.keys())
if len(arrays) == 0:
return None
n_samples = _num_samples(arrays[0])
checked_arrays = []
for array in arrays:
array_orig = array
if array is None:
# special case: ignore optional y=None kwarg pattern
checked_arrays.append(array)
continue
size = _num_samples(array)
if size != n_samples:
raise ValueError("Found array with dim %d. Expected %d" % (
size, n_samples))
if sparse.issparse(array):
if sparse_format == 'csr':
array = array.tocsr()
elif sparse_format == 'csc':
array = array.tocsc()
elif sparse_format == 'dense':
raise TypeError('A sparse matrix was passed, but dense data '
'is required. Use X.todense() to convert to dense.')
if check_ccontiguous:
array.data = np.ascontiguousarray(array.data, dtype=dtype)
else:
array.data = np.asarray(array.data, dtype=dtype)
_assert_all_finite(array.data)
else:
if check_ccontiguous:
array = np.ascontiguousarray(array, dtype=dtype)
else:
array = np.asarray(array, dtype=dtype)
_assert_all_finite(array)
if copy and array is array_orig:
array = array.copy()
checked_arrays.append(array)
return checked_arrays
def warn_if_not_float(X, estimator='This algorithm'):
"""Warning utility function to check that data type is floating point"""
if not isinstance(estimator, basestring):
estimator = estimator.__class__.__name__
if X.dtype.kind != 'f':
warnings.warn("%s assumes floating point values as input, "
"got %s" % (estimator, X.dtype))
def check_random_state(seed):
"""Turn seed into a np.random.RandomState instance
If seed is None, return the RandomState singleton used by np.random.
If seed is an int, return a new RandomState instance seeded with seed.
If seed is already a RandomState instance, return it.
Otherwise raise ValueError.
"""
if seed is None or seed is np.random:
return np.random.mtrand._rand
if isinstance(seed, numbers.Integral):
return np.random.RandomState(seed)
if isinstance(seed, np.random.RandomState):
return seed
raise ValueError('%r cannot be used to seed a numpy.random.RandomState'
' instance' % seed)
|
|
"""Queues"""
__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'QueueFull', 'QueueEmpty']
import collections
import heapq
from . import compat
from . import events
from . import locks
from .coroutines import coroutine
class QueueEmpty(Exception):
"""Exception raised when Queue.get_nowait() is called on a Queue object
which is empty.
"""
pass
class QueueFull(Exception):
"""Exception raised when the Queue.put_nowait() method is called on a Queue
object which is full.
"""
pass
class Queue:
"""A queue, useful for coordinating producer and consumer coroutines.
If maxsize is less than or equal to zero, the queue size is infinite. If it
is an integer greater than 0, then "yield from put()" will block when the
queue reaches maxsize, until an item is removed by get().
Unlike the standard library Queue, you can reliably know this Queue's size
with qsize(), since your single-threaded asyncio application won't be
interrupted between calling qsize() and doing an operation on the Queue.
"""
def __init__(self, maxsize=0, *, loop=None):
if loop is None:
self._loop = events.get_event_loop()
else:
self._loop = loop
self._maxsize = maxsize
# Futures.
self._getters = collections.deque()
# Futures.
self._putters = collections.deque()
self._unfinished_tasks = 0
self._finished = locks.Event(loop=self._loop)
self._finished.set()
self._init(maxsize)
# These three are overridable in subclasses.
def _init(self, maxsize):
self._queue = collections.deque()
def _get(self):
return self._queue.popleft()
def _put(self, item):
self._queue.append(item)
# End of the overridable methods.
def _wakeup_next(self, waiters):
# Wake up the next waiter (if any) that isn't cancelled.
while waiters:
waiter = waiters.popleft()
if not waiter.done():
waiter.set_result(None)
break
def __repr__(self):
return '<{} at {:#x} {}>'.format(
type(self).__name__, id(self), self._format())
def __str__(self):
return '<{} {}>'.format(type(self).__name__, self._format())
def _format(self):
result = 'maxsize={!r}'.format(self._maxsize)
if getattr(self, '_queue', None):
result += ' _queue={!r}'.format(list(self._queue))
if self._getters:
result += ' _getters[{}]'.format(len(self._getters))
if self._putters:
result += ' _putters[{}]'.format(len(self._putters))
if self._unfinished_tasks:
result += ' tasks={}'.format(self._unfinished_tasks)
return result
def qsize(self):
"""Number of items in the queue."""
return len(self._queue)
@property
def maxsize(self):
"""Number of items allowed in the queue."""
return self._maxsize
def empty(self):
"""Return True if the queue is empty, False otherwise."""
return not self._queue
def full(self):
"""Return True if there are maxsize items in the queue.
Note: if the Queue was initialized with maxsize=0 (the default),
then full() is never True.
"""
if self._maxsize <= 0:
return False
else:
return self.qsize() >= self._maxsize
@coroutine
def put(self, item):
"""Put an item into the queue.
Put an item into the queue. If the queue is full, wait until a free
slot is available before adding item.
This method is a coroutine.
"""
while self.full():
putter = self._loop.create_future()
self._putters.append(putter)
try:
yield from putter
except:
putter.cancel() # Just in case putter is not done yet.
if not self.full() and not putter.cancelled():
# We were woken up by get_nowait(), but can't take
# the call. Wake up the next in line.
self._wakeup_next(self._putters)
raise
return self.put_nowait(item)
def put_nowait(self, item):
"""Put an item into the queue without blocking.
If no free slot is immediately available, raise QueueFull.
"""
if self.full():
raise QueueFull
self._put(item)
self._unfinished_tasks += 1
self._finished.clear()
self._wakeup_next(self._getters)
@coroutine
def get(self):
"""Remove and return an item from the queue.
If queue is empty, wait until an item is available.
This method is a coroutine.
"""
while self.empty():
getter = self._loop.create_future()
self._getters.append(getter)
try:
yield from getter
except:
getter.cancel() # Just in case getter is not done yet.
if not self.empty() and not getter.cancelled():
# We were woken up by put_nowait(), but can't take
# the call. Wake up the next in line.
self._wakeup_next(self._getters)
raise
return self.get_nowait()
def get_nowait(self):
"""Remove and return an item from the queue.
Return an item if one is immediately available, else raise QueueEmpty.
"""
if self.empty():
raise QueueEmpty
item = self._get()
self._wakeup_next(self._putters)
return item
def task_done(self):
"""Indicate that a formerly enqueued task is complete.
Used by queue consumers. For each get() used to fetch a task,
a subsequent call to task_done() tells the queue that the processing
on the task is complete.
If a join() is currently blocking, it will resume when all items have
been processed (meaning that a task_done() call was received for every
item that had been put() into the queue).
Raises ValueError if called more times than there were items placed in
the queue.
"""
if self._unfinished_tasks <= 0:
raise ValueError('task_done() called too many times')
self._unfinished_tasks -= 1
if self._unfinished_tasks == 0:
self._finished.set()
@coroutine
def join(self):
"""Block until all items in the queue have been gotten and processed.
The count of unfinished tasks goes up whenever an item is added to the
queue. The count goes down whenever a consumer calls task_done() to
indicate that the item was retrieved and all work on it is complete.
When the count of unfinished tasks drops to zero, join() unblocks.
"""
if self._unfinished_tasks > 0:
yield from self._finished.wait()
class PriorityQueue(Queue):
"""A subclass of Queue; retrieves entries in priority order (lowest first).
Entries are typically tuples of the form: (priority number, data).
"""
def _init(self, maxsize):
self._queue = []
def _put(self, item, heappush=heapq.heappush):
heappush(self._queue, item)
def _get(self, heappop=heapq.heappop):
return heappop(self._queue)
class LifoQueue(Queue):
"""A subclass of Queue that retrieves most recently added entries first."""
def _init(self, maxsize):
self._queue = []
def _put(self, item):
self._queue.append(item)
def _get(self):
return self._queue.pop()
if not compat.PY35:
JoinableQueue = Queue
"""Deprecated alias for Queue."""
__all__.append('JoinableQueue')
|