Dataset Viewer
repository_path
stringlengths 17
197
| code
stringlengths 1
2.53M
|
---|---|
./data/paxtonhare/MarkLogic-Sublime/ml/ml_utils.py | import sublime
import os
import re
from .ml_settings import MlSettings
SETTINGS_FILE = "MarkLogic.sublime-settings"
class MlUtils:
__module_import_regex__ = re.compile(r"import[\r\n\s]+module\s+((namespace\s+)?([^\s]+)\s*=\s*)?.*?at[\r\n\s]*['\"]([^'\"]+)['\"];?", re.M | re.DOTALL)
@staticmethod
def log(log_me):
if (MlSettings.debug()):
print("[MarkLogic]\t%s" % log_me)
@staticmethod
def load_resource(name):
if hasattr(sublime, 'load_resource'):
return sublime.load_resource(name)
else:
with open(os.path.join(sublime.packages_path(), name[9:])) as f:
return f.read()
@staticmethod
def is_server_side_js(view):
return view.score_selector(view.sel()[0].a, 'source.serverside-js') > 0
@staticmethod
def get_namespace(s):
ns_str = r"""\s*xquery[^'\"]+(['\"])[^'\"]+?\1;?\s+module\s+namespace\s+([^\s]+)\s+=\s+(['\"])([^'\"]+)?\3"""
ns_re = re.compile(ns_str)
sans_comments = re.sub(r"\(:.*?:\)", "", s)
match = ns_re.search(sans_comments)
print("match: %s" % str(match))
if (match):
return (match.group(2), match.group(4))
else:
return (None, None)
@staticmethod
def get_function_defs(file_name, buffer, ns_prefix, show_private):
functions = []
if (show_private):
private_re = ""
else:
private_re = "(?<!%private)"
function_str = r"""%s # optional bit to exclude private functions
\s+
function[\s]+
(?!namespace) # bail if it's a function namespace decl
((?:[\-_a-zA-Z0-9]+:)?[\-_a-zA-Z0-9]+) #function name part
\s*
\( # paren before parameters
\s*([^{]*)\s* # all the parameters
\) # paren after parameters
""" % private_re
function_re = re.compile(function_str, re.S | re.M | re.X)
for match in function_re.findall(buffer, re.DOTALL | re.M | re.X):
if ns_prefix and ns_prefix != '':
func = re.sub(r"([^:]+:)?([^:]+)", "%s:\\2" % ns_prefix, match[0])
else:
func = re.sub(r"([^:]+:)?([^:]+)", "\\2", match[0])
params = []
pre_params = re.sub(r"[\r\n\s]+\$", "$", match[1])
pre_params = re.sub(r"\)[\r\n\s]+as.*$", "", pre_params)
if (len(pre_params) > 0):
params = re.split(r",", pre_params)
functions.append((func, params))
return functions
@staticmethod
def get_imported_files(file_name, buffer):
files = []
search_paths = MlSettings().get_search_paths()
if (search_paths):
for match in MlUtils.__module_import_regex__.findall(buffer, re.DOTALL | re.M):
ns_prefix = match[2]
uri = match[3]
for search_path in search_paths:
if (uri[0] == '/'):
f = os.path.join(search_path, uri[1:])
else:
f = os.path.join(os.path.dirname(file_name), uri)
if (os.path.exists(f)):
files.append((f, ns_prefix))
return files
|
./data/zchee/deoplete-go/rplugin/python3/deoplete/sources/deoplete_go.py | import os
import re
import subprocess
import sys
from .base import Base
from deoplete.util import charpos2bytepos
from deoplete.util import error
try:
current_dir = os.path.dirname(os.path.abspath(__file__))
ujson_dir = os.path.dirname(current_dir)
sys.path.insert(0, ujson_dir)
from ujson import loads
except ImportError:
from json import loads
class Source(Base):
def __init__(self, vim):
Base.__init__(self, vim)
self.name = 'go'
self.mark = '[Go]'
self.filetypes = ['go']
self.input_pattern = r'(?:\b[^\W\d]\w*|[\]\)])\.(?:[^\W\d]\w*)?'
self.rank = 500
self.gocode_binary = self.vim.vars['deoplete#sources#go#gocode_binary']
self.package_dot = self.vim.vars['deoplete#sources#go#package_dot']
self.sort_class = self.vim.vars['deoplete#sources#go#sort_class']
self.debug_enabled = self.vim.vars.get('deoplete#sources#go#debug', 0)
def get_complete_position(self, context):
m = re.search(r'\w*$|(?<=")[./\-\w]*$', context['input'])
return m.start() if m else -1
def gather_candidates(self, context):
line = self.vim.current.window.cursor[0]
column = context['complete_position']
buf = self.vim.current.buffer
offset = self.vim.call('line2byte', line) + \
charpos2bytepos(self.vim, context['input'][: column], column) - 1
source = '\n'.join(buf).encode()
process = subprocess.Popen([self.GoCodeBinary(),
'-f=json',
'autocomplete',
buf.name,
str(offset)],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
start_new_session=True)
process.stdin.write(source)
stdout_data, stderr_data = process.communicate()
result = loads(stdout_data.decode())
try:
if result[1][0]['class'] == 'PANIC':
error(self.vim, 'gocode panicked')
return []
if self.sort_class:
# TODO(zchee): Why not work with this?
# class_dict = {}.fromkeys(self.sort_class, [])
class_dict = {
'package': [],
'func': [],
'type': [],
'var': [],
'const': [],
}
out = []
sep = ' '
for complete in result[1]:
word = complete['name']
info = complete['type']
_class = complete['class']
abbr = str(word + sep + info).replace(' func', '', 1)
kind = _class
if _class == 'package' and self.package_dot:
word += '.'
candidates = dict(word=word,
abbr=abbr,
kind=kind,
info=info,
menu=self.mark,
dup=1
)
if not self.sort_class or _class == 'import':
out.append(candidates)
else:
class_dict[_class].append(candidates)
# append with sort by complete['class']
if self.sort_class:
for c in self.sort_class:
for x in class_dict[c]:
out.append(x)
return out
except Exception:
return []
def GoCodeBinary(self):
try:
if os.path.isfile(self.gocode_binary):
return self.gocode_binary
else:
raise
except Exception:
return self.FindBinaryPath('gocode')
def FindBinaryPath(self, cmd):
def is_exec(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(cmd)
if fpath:
if is_exec(cmd):
return cmd
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
binary = os.path.join(path, cmd)
if is_exec(binary):
return binary
return error(self.vim, 'gocode binary not found')
|
./data/edoburu/django-fluent-contents/fluent_contents/plugins/sharedcontent/utils.py | from django.contrib.sites.models import Site
# Separate function for Django 1.7 migrations
def get_current_site():
return Site.objects.get_current()
def get_current_site_id():
return Site.objects.get_current().pk
|
./data/jmcnamara/XlsxWriter/examples/chart_gradient.py | #######################################################################
#
# An example of creating an Excel charts with gradient fills using
# Python and XlsxWriter.
#
# Copyright 2013-2016, John McNamara, jmcnamara@cpan.org
#
import xlsxwriter
workbook = xlsxwriter.Workbook('chart_gradient.xlsx')
worksheet = workbook.add_worksheet()
bold = workbook.add_format({'bold': 1})
# Add the worksheet data that the charts will refer to.
headings = ['Number', 'Batch 1', 'Batch 2']
data = [
[2, 3, 4, 5, 6, 7],
[10, 40, 50, 20, 10, 50],
[30, 60, 70, 50, 40, 30],
]
worksheet.write_row('A1', headings, bold)
worksheet.write_column('A2', data[0])
worksheet.write_column('B2', data[1])
worksheet.write_column('C2', data[2])
# Create a new column chart.
chart = workbook.add_chart({'type': 'column'})
# Configure the first series, including a gradient.
chart.add_series({
'name': '=Sheet1!$B$1',
'categories': '=Sheet1!$A$2:$A$7',
'values': '=Sheet1!$B$2:$B$7',
'gradient': {'colors': ['#963735', '#F1DCDB']}
})
# Configure the second series, including a gradient.
chart.add_series({
'name': '=Sheet1!$C$1',
'categories': '=Sheet1!$A$2:$A$7',
'values': '=Sheet1!$C$2:$C$7',
'gradient': {'colors': ['#E36C0A', '#FCEADA']}
})
# Set a gradient for the plotarea.
chart.set_plotarea({
'gradient': {'colors': ['#FFEFD1', '#F0EBD5', '#B69F66']}
})
# Add some axis labels.
chart.set_x_axis({'name': 'Test number'})
chart.set_y_axis({'name': 'Sample length (mm)'})
# Turn off the chart legend.
chart.set_legend({'none': True})
# Insert the chart into the worksheet.
worksheet.insert_chart('E2', chart)
workbook.close()
|
./data/adblockplus/gyp/pylib/gyp/generator/gypsh.py | # Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypsh output module
gypsh is a GYP shell. It's not really a generator per se. All it does is
fire up an interactive Python session with a few local variables set to the
variables passed to the generator. Like gypd, it's intended as a debugging
aid, to facilitate the exploration of .gyp structures after being processed
by the input module.
The expected usage is "gyp -f gypsh -D OS=desired_os".
"""
import code
import sys
# All of this stuff about generator variables was lovingly ripped from gypd.py.
# That module has a much better description of what's going on and why.
_generator_identity_variables = [
'EXECUTABLE_PREFIX',
'EXECUTABLE_SUFFIX',
'INTERMEDIATE_DIR',
'PRODUCT_DIR',
'RULE_INPUT_ROOT',
'RULE_INPUT_DIRNAME',
'RULE_INPUT_EXT',
'RULE_INPUT_NAME',
'RULE_INPUT_PATH',
'SHARED_INTERMEDIATE_DIR',
]
generator_default_variables = {
}
for v in _generator_identity_variables:
generator_default_variables[v] = '<(%s)' % v
def GenerateOutput(target_list, target_dicts, data, params):
locals = {
'target_list': target_list,
'target_dicts': target_dicts,
'data': data,
}
# Use a banner that looks like the stock Python one and like what
# code.interact uses by default, but tack on something to indicate what
# locals are available, and identify gypsh.
banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
(sys.version, sys.platform, repr(sorted(locals.keys())))
code.interact(banner, local=locals)
|
./data/aldebaran/qibuild/python/qibuild/actions/list_configs.py | ## Copyright (c) 2012-2016 Aldebaran Robotics. All rights reserved.
## Use of this source code is governed by a BSD-style license that can be
## found in the COPYING file.
"""List all the known configs """
import operator
from qisys import ui
import qisys.parsers
import qibuild.worktree
def configure_parser(parser):
qisys.parsers.worktree_parser(parser)
def do(args):
worktree = qisys.parsers.get_worktree(args, raises=False)
qibuild_cfg = qibuild.config.QiBuildConfig()
qibuild_cfg.read()
configs = qibuild_cfg.configs.values()
configs.sort(key=operator.attrgetter("name"))
ui.info("Known configs")
for config in configs:
ui.info("*", config)
default_config = None
if worktree:
build_worktree = qibuild.worktree.BuildWorkTree(worktree)
default_config = build_worktree.default_config
if default_config:
ui.info("Worktree in", build_worktree.root,
"is using", default_config, "as a default config")
|
./data/mogui/pyorient/setup.py | #!/usr/bin/env python
# Copyright 2012 Niko Usai <usai.niko@gmail.com>, http://mogui.it
#
# this file is part of pyorient
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(name='pyorient',
version='1.4.9',
author='Niko Usai <mogui83@gmail.com>, Domenico Lupinetti <ostico@gmail.com>',
description='OrientDB native client library',
long_description=open('README.rst').read(),
license='LICENSE',
packages = [
'pyorient',
'pyorient.messages',
'pyorient.ogm',
]
)
|
./data/akheron/cpython/Lib/test/pydocfodder.py | """Something just to look at via pydoc."""
import types
class A_classic:
"A classic class."
def A_method(self):
"Method defined in A."
def AB_method(self):
"Method defined in A and B."
def AC_method(self):
"Method defined in A and C."
def AD_method(self):
"Method defined in A and D."
def ABC_method(self):
"Method defined in A, B and C."
def ABD_method(self):
"Method defined in A, B and D."
def ACD_method(self):
"Method defined in A, C and D."
def ABCD_method(self):
"Method defined in A, B, C and D."
class B_classic(A_classic):
"A classic class, derived from A_classic."
def AB_method(self):
"Method defined in A and B."
def ABC_method(self):
"Method defined in A, B and C."
def ABD_method(self):
"Method defined in A, B and D."
def ABCD_method(self):
"Method defined in A, B, C and D."
def B_method(self):
"Method defined in B."
def BC_method(self):
"Method defined in B and C."
def BD_method(self):
"Method defined in B and D."
def BCD_method(self):
"Method defined in B, C and D."
class C_classic(A_classic):
"A classic class, derived from A_classic."
def AC_method(self):
"Method defined in A and C."
def ABC_method(self):
"Method defined in A, B and C."
def ACD_method(self):
"Method defined in A, C and D."
def ABCD_method(self):
"Method defined in A, B, C and D."
def BC_method(self):
"Method defined in B and C."
def BCD_method(self):
"Method defined in B, C and D."
def C_method(self):
"Method defined in C."
def CD_method(self):
"Method defined in C and D."
class D_classic(B_classic, C_classic):
"A classic class, derived from B_classic and C_classic."
def AD_method(self):
"Method defined in A and D."
def ABD_method(self):
"Method defined in A, B and D."
def ACD_method(self):
"Method defined in A, C and D."
def ABCD_method(self):
"Method defined in A, B, C and D."
def BD_method(self):
"Method defined in B and D."
def BCD_method(self):
"Method defined in B, C and D."
def CD_method(self):
"Method defined in C and D."
def D_method(self):
"Method defined in D."
class A_new(object):
"A new-style class."
def A_method(self):
"Method defined in A."
def AB_method(self):
"Method defined in A and B."
def AC_method(self):
"Method defined in A and C."
def AD_method(self):
"Method defined in A and D."
def ABC_method(self):
"Method defined in A, B and C."
def ABD_method(self):
"Method defined in A, B and D."
def ACD_method(self):
"Method defined in A, C and D."
def ABCD_method(self):
"Method defined in A, B, C and D."
def A_classmethod(cls, x):
"A class method defined in A."
A_classmethod = classmethod(A_classmethod)
def A_staticmethod():
"A static method defined in A."
A_staticmethod = staticmethod(A_staticmethod)
def _getx(self):
"A property getter function."
def _setx(self, value):
"A property setter function."
def _delx(self):
"A property deleter function."
A_property = property(fdel=_delx, fget=_getx, fset=_setx,
doc="A sample property defined in A.")
A_int_alias = int
class B_new(A_new):
"A new-style class, derived from A_new."
def AB_method(self):
"Method defined in A and B."
def ABC_method(self):
"Method defined in A, B and C."
def ABD_method(self):
"Method defined in A, B and D."
def ABCD_method(self):
"Method defined in A, B, C and D."
def B_method(self):
"Method defined in B."
def BC_method(self):
"Method defined in B and C."
def BD_method(self):
"Method defined in B and D."
def BCD_method(self):
"Method defined in B, C and D."
class C_new(A_new):
"A new-style class, derived from A_new."
def AC_method(self):
"Method defined in A and C."
def ABC_method(self):
"Method defined in A, B and C."
def ACD_method(self):
"Method defined in A, C and D."
def ABCD_method(self):
"Method defined in A, B, C and D."
def BC_method(self):
"Method defined in B and C."
def BCD_method(self):
"Method defined in B, C and D."
def C_method(self):
"Method defined in C."
def CD_method(self):
"Method defined in C and D."
class D_new(B_new, C_new):
"""A new-style class, derived from B_new and C_new.
"""
def AD_method(self):
"Method defined in A and D."
def ABD_method(self):
"Method defined in A, B and D."
def ACD_method(self):
"Method defined in A, C and D."
def ABCD_method(self):
"Method defined in A, B, C and D."
def BD_method(self):
"Method defined in B and D."
def BCD_method(self):
"Method defined in B, C and D."
def CD_method(self):
"Method defined in C and D."
def D_method(self):
"Method defined in D."
class FunkyProperties(object):
"""From SF bug 472347, by Roeland Rengelink.
Property getters etc may not be vanilla functions or methods,
and this used to make GUI pydoc blow up.
"""
def __init__(self):
self.desc = {'x':0}
class get_desc:
def __init__(self, attr):
self.attr = attr
def __call__(self, inst):
print('Get called', self, inst)
return inst.desc[self.attr]
class set_desc:
def __init__(self, attr):
self.attr = attr
def __call__(self, inst, val):
print('Set called', self, inst, val)
inst.desc[self.attr] = val
class del_desc:
def __init__(self, attr):
self.attr = attr
def __call__(self, inst):
print('Del called', self, inst)
del inst.desc[self.attr]
x = property(get_desc('x'), set_desc('x'), del_desc('x'), 'prop x')
submodule = types.ModuleType(__name__ + '.submodule',
"""A submodule, which should appear in its parent's summary""")
|
./data/anhstudios/swganh/data/scripts/templates/object/draft_schematic/community_crafting/component/shared_lightweight_turret_hardware.py | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/community_crafting/component/shared_lightweight_turret_hardware.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result |
./data/pyjs/pyjs/pyjswidgets/pyjamas/Canvas2D.browser.py | class Canvas(Widget):
def isEmulation(self):
JS("""
return (typeof $wnd['G_vmlCanvasManager'] != "undefined");
""")
def init(self):
JS("""
var el = this['canvas'];
if (typeof $wnd['G_vmlCanvasManager'] != "undefined") {
var parent = el['parent'];
el = $wnd['G_vmlCanvasManager']['fixElement_'](el);
el['getContext'] = function () {
if (this['context_']) {
return this['context_'];
}
return this['context_'] = new $wnd['CanvasRenderingContext2D'](el);
};
el['attachEvent']("onpropertychange", function (e) {
// we need to watch changes to width and height
switch (e['propertyName']) {
case "width":
case "height":
// coord size changed?
break;
}
});
// if style['height'] is set
var attrs = el['attributes'];
if (attrs['width'] && attrs['width']['specified']) {
// TODO: use runtimeStyle and coordsize
// el['getContext']()['setWidth_'](attrs['width']['nodeValue']);
el['style']['width'] = attrs['width']['nodeValue'] + "px";
}
if (attrs['height'] && attrs['height']['specified']) {
// TODO: use runtimeStyle and coordsize
// el['getContext']()['setHeight_'](attrs['height']['nodeValue']);
el['style']['height'] = attrs['height']['nodeValue'] + "px";
}
}
var ctx = el['getContext']("2d");
ctx['_createPattern'] = ctx['createPattern'];
ctx['createPattern'] = function(img, rep) {
// Next line breaks things for Chrome
//if (!(img instanceof Image)) img = img['getElement']();
return this['_createPattern'](img, rep);
}
ctx['_drawImage'] = ctx['drawImage'];
ctx['drawImage'] = function() {
var a=arguments;
// Next line breaks things for Chrome
//if (!(a[0] instanceof Image)) a[0] = a[0]['getElement']();
if (a['length']==9) return this['_drawImage'](a[0], a[1], a[2], a[3], a[4], a[5], a[6], a[7], a[8]);
else if (a['length']==5) return this['_drawImage'](a[0], a[1], a[2], a[3], a[4]);
return this['_drawImage'](a[0], a[1], a[2]);
}
this['context'] = ctx;
""")
|
./data/ekristen/unboxed/unboxed/__init__.py | from telnet import *
from logo import *
from reboot import *
__apps__ = ['telnet', 'logo', 'reboot']
|
./data/rawdigits/bouncer/example/redis-loaders/request_time_per_host.py | #!/usr/bin/python
import socket
import time
import redis
import shared
r = redis.StrictRedis(host='localhost', port=6379, db=0)
b = shared.BatchCounter(5,10000)
agg = shared.AggregatorConnector()
metric = {}
metric2 = {}
granularity = 60
def process_data(data):
global metric
#print data
if data['type'] == 'request':
# assign these inside the if because there are tons of connect events..
request_time = data['time']
#request_time = ((data['time']/1000)/granularity)*granularity
uuid = data['uuid']
TIME_LENGTH=10
try:
metric[uuid] = {"time":request_time, "host":data['host']}
except:
print "this shouldn't happen"
elif data['type'] == 'end':
# assign these inside the if because there are tons of connect events..
request_time = data['time']
uuid = data['uuid']
#print metric
if metric.has_key(uuid):
metric2[metric[uuid]["host"]] = request_time - metric[uuid]["time"]
#print metric2[metric[uuid]["host"]]
metric.pop(uuid)
if b.check():
#using slices in these because it seems faster..
now = (int(time.time())/granularity)*granularity
for k,v in metric2.items():
r.zincrby(str(now)+"-hosttime", k, v)
metric2.clear()
while True:
for d in agg.json_read():
process_data(d)
|
./data/HewlettPackard/python-ilorest-library/src/ilorest/ris/validation.py | ###
# Copyright 2016 Hewlett Packard Enterprise, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
# -*- coding: utf-8 -*-
"""RIS Schema classes"""
# ---------Imports---------
import os
import re
import sys
import json
import locale
import zipfile
import logging
import textwrap
import validictory
from .sharedtypes import JSONEncoder
from ilorest.rest.v1_helper import (RisObject)
# ---------End of imports---------
# ---------Debug logger---------
LOGGER = logging.getLogger(__name__)
# ---------End of debug logger---------
class ValidationError(Exception):
"""Validation Class Error"""
pass
class SchemaValidationError(ValidationError):
"""Schema Validation Class Error"""
pass
class RegistryValidationError(ValidationError):
"""Registration Validation Class Error"""
def __init__(self, msg, regentry=None, selector=None):
super(RegistryValidationError, self).__init__(msg)
self.reg = regentry
self.sel = selector
class UnknownValidatorError(Exception):
"""Raised when we find an attribute type that we don't know how to"""
""" validate. """
class ValidationManager(object):
"""Keep track of all the schemas and registries and provides helpers"""
""" to simplify validation """
def __init__(self, local_path, bios_local_path, romfamily=None, \
biosversion=None, iloversion=None, monolith=None):
super(ValidationManager, self).__init__()
defaultilopath = None
defaultbiospath = None
schemamainfolder = None
if float(iloversion) < 2.10:
if os.name == 'nt':
defaultilopath = r".\hp-rest-classes-ilo4"
defaultbiospath = r".\hp-rest-classes-bios"
schemamainfolder = os.path.dirname(sys.executable)
else:
defaultilopath = "/usr/share/hprest/hp-rest-classes-ilo4"
defaultbiospath = "/usr/share/hprest/hp-rest-classes-bios"
schemamainfolder = "/usr/share/hprest/"
# iLO schema location defaults
if not local_path:
if not os.path.isdir(defaultilopath):
ilozip = self.getiloziplocation(schemamainfolder, \
iloversion)
if ilozip and os.path.exists(ilozip):
with zipfile.ZipFile(os.path.join(schemamainfolder, \
ilozip), "r") as zfile:
zfile.extractall(os.path.join(schemamainfolder, \
"hp-rest-classes-ilo4"))
local_path = os.path.join(schemamainfolder, \
u'hp-rest-classes-ilo4')
else:
raise SchemaValidationError(\
u'No valid iLO schema zip file found.\n' \
'Please refer to our documentation for ' \
'further instructions on downloading the' \
' appropriate schemas.')
else:
local_path = defaultilopath
else:
if not os.path.isdir(local_path):
raise SchemaValidationError(u"iLO schema directory '%s' "
"doesn't exist" % local_path)
# bios schema location defaults
if not bios_local_path:
if not os.path.isdir(defaultbiospath):
bioszip = self.getbiosziplocation(romfamily, \
schemamainfolder, biosversion)
if bioszip and os.path.exists(bioszip):
with zipfile.ZipFile(
os.path.join(schemamainfolder, bioszip), "r") as \
zfile:
zfile.extractall(os.path.join(schemamainfolder, \
"hp-rest-classes-bios"))
bios_local_path = os.path.join(schemamainfolder, \
u'hp-rest-classes-bios')
else:
raise SchemaValidationError(u'No valid BIOS schema ' \
'zip file found.\nPlease refer to our ' \
'documentation for further instructions ' \
'on downloading the appropriate schemas.')
else:
bios_local_path = defaultbiospath
else:
if not os.path.isdir(bios_local_path):
raise SchemaValidationError(u"Bios schema directory '%s' " \
"doesn't exist" % bios_local_path)
else:
if monolith.is_redfish:
local_path = "/redfish/v1/Schemas/"
bios_local_path = "/redfish/v1/Registries/"
else:
local_path = "/rest/v1/Schemas"
bios_local_path = "/rest/v1/Registries"
# iLO schema and registry lists
self._schema_locations = list()
self._classes = list()
self._registry_locations = list()
self._classes_registry = list()
# iLO schema and registry lists
self._bios_schema_locations = list()
self._bios_classes = list()
self._bios_registry_locations = list()
self._bios_classes_registry = list()
# iLO and base error messages
self._ilo_messages = list()
self._base_messages = list()
self._hpcommon_messages = list()
self._iloevents_messages = list()
# error
self._errors = list()
#strings for v1/redfish
if monolith.is_redfish:
self._schemaid = ["/redfish/v1/schemas", "Members"]
self._regid = ["/redfish/v1/registries", "Members"]
else:
self._schemaid = ["/rest/v1/schemas", "Items"]
self._regid = ["/rest/v1/registries", "Items"]
if local_path:
self.add_location(schema_path=local_path, monolith=monolith)
self.add_location(registry_path=local_path, monolith=monolith)
if bios_local_path:
self.add_location(schema_path=bios_local_path, biossection=True, \
monolith=monolith)
self.add_location(registry_path=bios_local_path, biossection=True, \
monolith=monolith)
def getbiosziplocation(self, romfamily, schemadir, biosversion):
"""Helper function for BIOS zip location from schema directory
:param romfamily: the current systems rom family.
:type romfamily: str.
:param schemadir: the current configuration schema directory.
:type schemadir: str.
:param biosversion: the current system BIOS version.
:type biosversion: str.
"""
foundfile = None
currentver = None
tempstr = "hp-rest-classes-bios-" + romfamily + "-" + biosversion
for _, _, filenames in os.walk(schemadir):
for filename in filenames:
if tempstr in filename:
regentry = re.compile('%s(.*?).zip' % tempstr)
mentry = regentry.search(filename)
if mentry and currentver:
if currentver < mentry.group(1):
foundfile = filename
currentver = mentry.group(1)
elif mentry and not currentver:
foundfile = filename
currentver = mentry.group(1)
if foundfile:
return os.path.join(schemadir, foundfile)
else:
return None
def getiloziplocation(self, schemadir, iloversion):
"""Helper function for iLO zip location from schema directory
:param schemadir: the current configuration schema directory.
:type schemadir: str.
:param iloversion: the current system iLO version.
:type iloversion: str.
"""
if float(iloversion) < 2.10:
iloversion = u'2.00'
tempstr = "hp-rest-classes-ilo4-" + iloversion.replace(".", "")
for _, _, filenames in os.walk(schemadir):
for filename in filenames:
if tempstr in filename:
return os.path.join(schemadir, filename)
return None
def add_location(self, schema_path=None, registry_path=None,
biossection=False, monolith=None):
"""Add schema_path and registry_path to the list of locations to"""
""" search for schemas and registries
:param schema_path: directory or URL where schemas are located.
:type schema_path: str.
:param registry_path: directory or URL where registries are located.
:type registry_path: str.
:param biossection: flag to determine if within BIOS section.
:type biossection: str.
:param monolith: full data model retrieved from server.
:type monolith: dict.
"""
if schema_path:
if not biossection:
self._schema_locations.append(schema_path)
self._update_location_map(monolith=monolith)
else:
self._bios_schema_locations.append(schema_path)
self._update_location_map(biossection=True, monolith=monolith)
elif registry_path:
if not biossection:
self._registry_locations.append(registry_path)
self._update_location_map(registries=True, monolith=monolith)
else:
self._bios_registry_locations.append(registry_path)
self._update_location_map(biossection=True, registries=True, \
monolith=monolith)
else:
raise ValueError(u"'schema_path' and 'registry_path' " \
"are undefined")
def _update_location_map(self, biossection=False, registries=False,
monolith=None):
"""Searches locations to build a map of type to filename
:param biossection: flag to determine if within BIOS section.
:type biossection: str.
:param registries: flag to determine if within registries section.
:type registries: boolean.
:param monolith: full data model retrieved from server.
:type monolith: dict.
"""
locationslist = list()
pathjoinstr = None
if not registries:
pathjoinstr = "Schemas"
if not biossection:
locationslist = self._schema_locations
else:
locationslist = self._bios_schema_locations
else:
pathjoinstr = "Registries"
if not biossection:
locationslist = self._registry_locations
else:
locationslist = self._bios_registry_locations
for location in locationslist:
if monolith:
self.new_load_file(monolith, root=location, \
biossection=biossection, registries=registries)
elif self._is_local(location):
# need to set the executable bit on all SCEXEs
for root, _, filenames in os.walk(os.path.join(location,
pathjoinstr)):
for filename in filenames:
fqpath = os.path.abspath(os.path.join(\
os.path.normpath(root), filename))
if self.load_file(fqpath, root=location, \
biossection=biossection, registries=registries):
LOGGER.info("Loaded schema mapping '%s'", fqpath)
def new_load_file(self, monolith, root=None, biossection=False, \
registries=False):
"""Loads the types from monolith.
:param monolith: full data model retrieved from server.
:type monolith: dict.
:param root: pointer to the root of the load.
:type root: class obj.
:param biossection: flag to determine if within BIOS section.
:type biossection: str.
:param registries: flag to determine if within registries section.
:type registries: boolean.
"""
classesdataholder = []
for itemtype in monolith.types:
if itemtype.startswith("#SchemaFileCollection.") or \
itemtype.startswith("Collection.") and \
u'Instances' in monolith.types[itemtype]:
for instance in monolith.types[itemtype][u'Instances']:
if self._schemaid[0] in instance.resp.request.path.\
lower() or self._regid[0] in \
instance.resp.request.path.lower():
if not registries and self._schemaid[0] in \
instance.resp.request.path.lower():
if classesdataholder:
if self._schemaid[1] in instance.resp.dict:
classesdataholder[0][self._schemaid[1]].\
extend(instance.resp.dict\
[self._schemaid[1]])
else:
classesdataholder.append(instance.resp.dict)
elif registries and self._regid[0] in \
instance.resp.request.path.lower():
if classesdataholder:
if monolith.is_redfish:
classesdataholder[0][self._regid[1]].\
extend(instance.resp.dict\
[self._regid[1]])
else:
classesdataholder.append(instance.resp.dict)
if classesdataholder:
classesdataholder = classesdataholder[0]
try:
if monolith._typestring in classesdataholder and ('Collection.' in \
classesdataholder[monolith._typestring] or \
('#SchemaFileCollection.' in \
classesdataholder[monolith._typestring] \
and monolith.is_redfish)):
newclass = Classes.parse(classesdataholder)
newclass.set_root(root)
if not registries:
if not biossection:
self._classes.append(newclass)
else:
self._bios_classes.append(newclass)
else:
if not biossection:
self._classes_registry.append(newclass)
else:
self._bios_classes_registry.append(newclass)
except BaseException:
pass
else:
pass
def load_file(self, filepath, root=None, biossection=False,
registries=False, datareturn=False):
"""Loads the types from filepath.
:param filepath: path to a file to load, local or URL.
:type filepath: str.
:param root: root path used to reconstruct full file paths.
:type root: str.
:param biossection: flag to determine if within BIOS section.
:type biossection: str.
:param registries: flag to determine if within registries section.
:type registries: boolean.
:param datareturn: flag to determine if the raw data should be returned.
:type datareturn: boolean.
"""
result = False
if os.path.isfile(filepath):
try:
filehand = open(filepath, 'r')
data = json.load(filehand)
if datareturn:
return data
if u'Type' in data and data[u'Type'] == 'Collection.1.0.0':
if biossection and registries:
itemsreturn = self.bios_helper_function(data, root)
data["Items"] = itemsreturn
newclass = Classes.parse(data)
newclass.set_root(root)
if not registries:
if not biossection:
self._classes.append(newclass)
else:
self._bios_classes.append(newclass)
else:
if not biossection:
self._classes_registry.append(newclass)
else:
self._bios_classes_registry.append(newclass)
result = True
except BaseException:
pass
else:
pass
finally:
filehand.close()
return result
def bios_helper_function(self, data, root):
"""Helper function for BIOS schemas
:param data: current retrieved data for BIOS.
:type data: str.
:param root: root path used to reconstruct full file paths.
:type root: str.
"""
folderentries = data["links"]
datareturn = list()
for entry in folderentries["Member"]:
joinstr = entry["href"]
if os.name == 'nt' and joinstr[0] == "/":
joinstr = joinstr.replace("/", "\\")[1:]
elif joinstr[0] == "/":
joinstr = joinstr[1:]
for root, _, filenames in os.walk(os.path.join(root, joinstr)):
for filename in filenames:
fqpath = os.path.abspath(os.path.join(\
os.path.normpath(root), filename))
datareturn.append(self.load_file(fqpath, root=root, \
biossection=True, registries=True, datareturn=True))
LOGGER.info("Loaded schema mapping '%s'", fqpath)
return datareturn
def validate(self, item, selector=None, currdict=None, monolith=None,
newarg=None, checkall=False, regloc=None):
"""Search for matching schemas and attribute registries and"""
""" ensure that item is valid.
:param item: the item to be validated.
:type item: str.
:param selector: the type selection for the get operation.
:type selector: str.
:param currdict: current selection dictionary.
:type currdict: dict.
:param monolith: full data model retrieved from server.
:type monolith: dict.
:param newargs: list of multi level properties to be modified.
:type newargs: list.
:param checkall: flag to determine if check all should be enabled.
:type checkall: boolean.
:param regloc: path to registry location.
:type regloc: str.
"""
if regloc:
attrreg = RepoRegistryEntry(regloc)
else:
attrreg = self.find_schema(schname=item[monolith._typestring])
if attrreg:
tempvalue = attrreg.validate(item, self._errors, selector=selector,
currdict=currdict, monolith=monolith,
newarg=newarg, checkall=checkall)
if tempvalue is True:
return False
elif tempvalue:
self._errors = tempvalue
return True
def bios_validate(self, item, regname, selector=None, currdict=None,
checkall=False, monolith=None):
"""BIOS Search for matching schemas and attribute registries and"""
""" ensure that item is valid
:param item: the item to be validated.
:type item: str.
:param regname: string containing the registry name.
:type regname: str.
:param selector: the type selection for the get operation.
:type selector: str.
:param currdict: current selection dictionary.
:type currdict: dict.
:param checkall: flag to determine if check all should be enabled.
:type checkall: boolean.
:param monolith: full data model retrieved from server.
:type monolith: dict.
"""
attrreg = self.find_bios_registry(regname=regname)
if attrreg:
tempvalue = attrreg.validate_bios_version(item, self._errors, \
selector=selector, currdict=currdict, \
checkall=checkall, monolith=monolith)
if tempvalue == 'readonly':
return tempvalue
elif tempvalue == 'unique':
return tempvalue
elif tempvalue:
self._errors = tempvalue
return True
def bios_info(self, item, regname, selector):
"""BIOS Search for matching schemas and attribute registries and"""
""" ensure that item is valid
:param item: the item to be validated.
:type item: str.
:param regname: string containing the registry name.
:type regname: str.
:param selector: the type selection for the get operation.
:type selector: str.
"""
attrreg = self.find_bios_registry(regname=regname)
if attrreg:
if attrreg.validate_bios_version(item, self._errors, \
selector=selector):
return False
return True
def find_schema(self, schname):
"""Searches through all locations and returns the first schema"""
""" found for the provided type
:param schname: string containing the schema name.
:type schname: str.
"""
for cls in self._classes:
found = cls.find_schema(schname=schname)
if found:
return found
return None
def find_registry(self, regname):
"""Searches through all locations and returns the first registry"""
""" found for the provided type
:param regname: string containing the registry name.
:type regname: str.
"""
for cls in self._classes_registry:
found = cls.find_registry(regname=regname)
if found:
return found
return None
def find_bios_registry(self, regname):
"""Searches through all locations and returns the first schema found"""
""" for the provided type
:param regname: string containing the registry name.
:type regname: str.
"""
for cls in self._bios_classes_registry:
found = cls.find_bios_registry(regname=regname)
if found:
return found
return None
def get_errors(self):
"""Return a list of errors encountered"""
return self._errors
def _is_local(self, path):
"""Determine if path is a local file or remote
:param path: The path to examine.
:type path: str.
"""
if u'://' in path:
return False
return True
class Classes(RisObject):
"""Represents an entry in the Classes registry"""
def __init__(self, item):
super(Classes, self).__init__(item)
self._root = None
def set_root(self, newroot):
"""Set new root
:param newroot: new root to be set.
:type newroot: str.
"""
self._root = newroot
def find_schema(self, schname):
"""Returns iLO schemas
:param schname: string containing the schema name.
:type schname: str.
"""
result = None
if hasattr(self, 'Items') and isinstance(self.Items, list):
for entry in self.Items:
if entry and u'Schema' in entry and entry[u'Schema'].lower() \
== schname.lower():
regentry = RepoRegistryEntry.parse(entry)
regentry.set_root(self._root)
result = regentry
break
elif hasattr(self, 'Members') and isinstance(self.Members, list):
schname = schname.split('.')[-1]
for entry in self.Members:
schlink = entry[u'@odata.id'].split('/')
schlink = schlink[len(schlink)-2]
if schname.lower() == schlink.lower():
result = entry
break
return result
def find_registry(self, regname):
"""Returns iLO registries
:param regname: string containing the registry name.
:type regname: str.
"""
result = None
if hasattr(self, 'Items') and isinstance(self.Items, list):
for entry in self.Items:
if entry and (u'Schema' in entry and
entry[u'Schema'].lower().startswith(regname.lower())):
regentry = RepoRegistryEntry.parse(entry)
regentry.set_root(self._root)
result = regentry
break
elif hasattr(self, 'Members') and isinstance(self.Members, list):
regname = regname.split('.')[-1]
for entry in self.Members:
reglink = entry[u'@odata.id'].split('/')
reglink = reglink[len(reglink)-2]
if regname.lower() == reglink.lower():
result = entry
break
return result
def find_bios_schema(self, schname):
"""Returns BIOS schemas
:param schname: string containing the schema name.
:type schname: str.
"""
result = None
if hasattr(self, 'Items') and isinstance(self.Items, list):
for entry in self.Items:
if (u'Schema' in entry and entry[u'Schema'].lower() ==
schname.lower()):
regentry = RepoRegistryEntry.parse(entry)
regentry.set_root(self._root)
result = regentry
break
return result
def find_bios_registry(self, regname):
"""Returns BIOS registries
:param regname: string containing the registry name.
:type regname: str.
"""
result = None
if hasattr(self, 'Items') and isinstance(self.Items, list):
for entry in self.Items:
if entry and (u'Schema' in entry and regname.lower() in \
entry[u'Schema'].lower()):
regentry = RepoRegistryEntry.parse(entry)
regentry.set_root(self._root)
result = regentry
break
return result
class RepoBaseEntry(RisObject):
"""Represents an entry in the Classes registry"""
def __init__(self, d):
super(RepoBaseEntry, self).__init__(d)
self._root = None
def set_root(self, newroot):
"""Set new root
:param newroot: new root to be set.
:type newroot: str.
"""
self._root = newroot
def _read_location_file(self, currloc, errlist):
"""Return results from locations
:param currdict: current selection dictionary.
:type currdict: dict.
:param errlist: list containing found errors.
:type errlist: list.
"""
result = None
if u'Uri' in currloc:
root = os.path.normpath(self._root)
xref = os.path.normpath(currloc.Uri.extref).lstrip(os.path.sep)
fqpath = os.path.join(root, xref)
if not os.path.isfile(fqpath):
errlist.append(SchemaValidationError(
u"Unable to location ArchiveUri '%s'" % fqpath))
else:
result = None
if fqpath.endswith('.json'):
result = open(fqpath).read()
return result
class RepoRegistryEntry(RepoBaseEntry):
"""Represents an entry in the Classes registry"""
def __init__(self, d):
super(RepoRegistryEntry, self).__init__(d)
def validate(self, tdict, errlist=None, selector=None, currdict=None, \
checkall=False, monolith=None, newarg=None):
"""Load the schema file and validate tdict against it
:param tdict: the dictionary to test against.
:type tdict: dict.
:param errlist: list containing found errors.
:type errlist: list.
:param selector: the type selection for the get operation.
:type selector: str.
:param currdict: current selection dictionary.
:type currdict: dict.
:param checkall: flag to determine if check all should be enabled.
:type checkall: boolean.
:param monolith: full data model retrieved from server.
:type monolith: dict.
:param newargs: list of multi level properties to be modified.
:type newargs: list.
"""
if not errlist:
errlist = list()
reg = self.get_registry_model(errlist=errlist, currdict=currdict, \
monolith=monolith, newarg=newarg)
if reg and not checkall:
try:
if reg[selector].readonly:
return True
except BaseException:
pass
else:
pass
results = reg.validate_attribute_values(tdict)
errlist.extend(results)
elif checkall and selector is None:
results = reg.validate_attribute_values(tdict)
errlist.extend(results)
else:
errlist.append(RegistryValidationError(u'Unable to locate ' \
'registry model'))
if errlist:
return errlist
def validate_bios_version(self, tdict, errlist=None, selector=None, \
checkall=False, currdict=None, monolith=None):
"""BIOS VERSION. Load the schema file and validate tdict against it
:param tdict: the dictionary to test against.
:type tdict: dict.
:param errlist: list containing found errors.
:type errlist: list.
:param selector: the type selection for the get operation.
:type selector: str.
:param currdict: current selection dictionary.
:type currdict: dict.
:param checkall: flag to determine if check all should be enabled.
:type checkall: boolean.
:param monolith: full data model retrieved from server.
:type monolith: dict.
:param newargs: list of multi level properties to be modified.
:type newargs: list.
"""
if not errlist:
errlist = list()
reg = self.get_registry_model_bios_version(errlist=errlist, \
currdict=currdict, monolith=monolith)
if reg and not checkall:
for item in reg.Attributes:
if not item["Name"] == selector:
continue
# validate that selector isn't read-only or a unique property
if item["ReadOnly"] is True:
return 'readonly'
try:
if item["IsSystemUniqueProperty"] is True:
return 'unique'
except BaseException:
continue
else:
continue
results = reg.validate_att_val_bios(tdict)
errlist.extend(results)
elif checkall and selector is None:
results = reg.validate_att_val_bios(tdict)
errlist.extend(results)
else:
errlist.append(RegistryValidationError(u'Unable to locate ' \
'registry model'))
if errlist:
return errlist
def validate_deprecated(self, tdict, errlist=None):
"""Load the schema file and validate tdict against it
:param tdict: the dictionary to test against.
:type tdict: list.
:param errlist: list containing found errors.
:type errlist: list.
"""
if not errlist:
errlist = list()
if not hasattr(self, u'Location'):
errlist.append(RegistryValidationError(u'Location property does' \
' not exist'))
return errlist
currloc = None
defloc = None
langcode = 'TBD'
for loc in self.Location:
for loclang in loc.keys():
if loclang.lower() == langcode.lower():
currloc = loc[loclang]
break
elif loclang.lower() == u'default':
defloc = loc[loclang]
if not currloc:
# use default location if lang doesn't match
currloc = defloc
if not currloc:
errlist.append(RegistryValidationError(u'Unable to determine' \
' location'))
return
location_file = self._read_location_file(currloc, errlist=errlist)
if not location_file:
errlist.append(RegistryValidationError(u'Location data is empty'))
else:
jsonreg = json.loads(location_file)
if u'Registry' in jsonreg:
if u'Type' in jsonreg and jsonreg[u'Type'] == \
u'HpPropertiesRegistrySchema.1.0.0':
reg = HpPropertiesRegistry.parse(jsonreg[u'Registry'])
results = reg.validate_attribute_values(tdict)
errlist.extend(results)
def get_registry_model(self, currdict=None, monolith=None, errlist=None, \
skipcommit=False, searchtype=None, newarg=None, latestschema=None):
"""Load the schema file and find the registry model if available
:param currdict: current selection dictionary.
:type currdict: dict.
:param monolith: full data model retrieved from server.
:type monolith: dict.
:param errlist: list containing found errors.
:type errlist: list.
:param skipcommit: flag to determine if commit should be skipped.
:type skipcommit: boolean.
:param searchtype: classifier for the current search.
:type searchtype: str.
:param newargs: list of multi level properties to be modified.
:type newargs: list.
:param latestschema: flag to determine if we should use smart schema.
:type latestschema: boolean.
"""
if not errlist:
errlist = list()
if not hasattr(self, u'Location'):
errlist.append(RegistryValidationError(
u'Location property does not exist'))
return None
currloc = None
defloc = "en"
langcode = list(locale.getdefaultlocale())
if not langcode[0]:
langcode[0] = "en"
for loc in self.Location:
locationlanguage = loc["Language"].lower()
locationlanguage = locationlanguage.replace("-", "_")
if locationlanguage in langcode[0].lower():
currloc = loc
break
if not currloc:
# use default location if lang doesn't match
currloc = defloc
if not currloc:
errlist.append(RegistryValidationError(u'Unable to determine ' \
'location'))
return None
if not searchtype:
searchtype = "ob"
location_file = None
if currdict and monolith:
for itemtype in monolith.types:
if itemtype.lower().startswith(searchtype.lower()) and \
u'Instances' in monolith.types[itemtype]:
for instance in monolith.types[itemtype][u'Instances']:
try:
if monolith.is_redfish:
currtype = currdict[instance._typestring].\
split('#')[-1]
currtype = currtype.split('.')[0] + '.'
else:
currtype = currdict[instance._typestring]
if latestschema:
currtype = currdict[instance._typestring].\
split('.')[:1]
insttype = instance.resp.dict["title"].\
split('.')[:1]
if currtype == insttype or currtype == \
instance.resp.dict[\
"oldtitle"].split('.')[:1]:
location_file = instance.resp.dict
break
elif searchtype == "ob" and instance.resp.dict[\
"title"].startswith(currtype) or \
"oldtitle" in instance.resp.dict.\
keys() and currdict[instance._typestring\
] == instance.resp.dict["oldtitle"]:
location_file = instance.resp.dict
break
elif searchtype != "ob" and \
currdict[instance._typestring] \
in instance.resp.dict["RegistryPrefix"]:
location_file = instance.resp.dict
break
except BaseException:
pass
else:
pass
if location_file:
break
else:
location_file = self._read_location_file(currloc, errlist=errlist)
if not location_file:
errlist.append(RegistryValidationError(u'Location data is empty'))
else:
if currdict and monolith:
jsonreg = json.loads(json.dumps(location_file, indent=2, \
cls=JSONEncoder))
else:
jsonreg = json.loads(location_file)
if skipcommit:
return jsonreg["Messages"]
if u'properties' in jsonreg:
regitem = jsonreg[u'properties']
reg = HpPropertiesRegistry.parse(regitem)
if newarg:
regcopy = reg
for arg in newarg[:-1]:
try:
if 'properties' in regcopy[arg].iterkeys() \
and ('patternProperties' in \
regcopy[arg].iterkeys()):
regcopy[arg]['properties'].update(\
regcopy[arg]['patternProperties'])
regcopy = regcopy[arg]["properties"]
for pattern in regcopy.iterkeys():
test = re.compile(pattern)
nextarg = newarg[newarg.index(arg)+1]
match = test.match(nextarg)
if match:
regcopy[nextarg] = regcopy.pop(pattern)
break
elif 'oneOf' in regcopy[arg]:
oneof = regcopy[arg]['oneOf']
for item in oneof:
regcopy = item['properties']
if not arg == newarg[-1]:
try:
nextitem = newarg[newarg.index(arg)+1]
regcopy[nextitem]
break
except Exception:
continue
else:
regcopy = regcopy[arg]["properties"]
except Exception:
try:
regcopy = regcopy[arg]['patternProperties']
for pattern in regcopy.iterkeys():
test = re.compile(pattern)
nextarg = newarg[newarg.index(arg)+1]
match = test.match(nextarg)
if match:
patterninfo = regcopy.pop(pattern)
regcopy[nextarg] = patterninfo
except BaseException:
return None
reg = regcopy
return reg
return None
def get_registry_model_bios_version(self, currdict=None, monolith=None, \
errlist=None):
"""BIOS VERSION Load the schema file and find the registry model"""
""" if available.
:param currdict: current selection dictionary.
:type currdict: dict.
:param monolith: full data model retrieved from server.
:type monolith: dict.
:param errlist: list containing found errors.
:type errlist: list.
"""
if not errlist:
errlist = list()
if not hasattr(self, u'Location'):
errlist.append(RegistryValidationError(
u'Location property does not exist'))
return None
currloc = None
defloc = "en"
langcode = list(locale.getdefaultlocale())
if not langcode[0]:
langcode[0] = "en"
for loc in self.Location:
locationlanguage = loc["Language"].lower()
locationlanguage = locationlanguage.replace("-", "_")
if locationlanguage in langcode[0].lower():
currloc = loc
break
if not currloc:
# use default location if lang doesn't match
currloc = defloc
if not currloc:
errlist.append(RegistryValidationError(
u'Unable to determine location'))
return None
location_file = None
if currdict and monolith:
for itemtype in monolith.types:
if "HpBiosAttributeRegistrySchema." in itemtype and \
u'Instances' in monolith.types[itemtype]:
for instance in monolith.types[itemtype][u'Instances']:
location_file = instance.resp.dict
break
if location_file:
break
else:
location_file = self._read_location_file(currloc, errlist=errlist)
if not location_file:
errlist.append(RegistryValidationError(u'Location data is empty'))
else:
if currdict and monolith:
jsonreg = json.loads(json.dumps(location_file, indent=2, \
cls=JSONEncoder))
else:
jsonreg = json.loads(location_file)
if u'RegistryEntries' in jsonreg:
regitem = jsonreg[u'RegistryEntries']
reg = HpPropertiesRegistry.parse(regitem)
return reg
return None
class RepoSchemaEntry(RepoBaseEntry):
"""Represents an entry in the Classes registry"""
def __init__(self, item):
super(RepoSchemaEntry, self).__init__(item)
self._root = None
def set_root(self, newroot):
"""Set new root
:param newroot: new root to be set.
:type newroot: str.
"""
self._root = newroot
def _read_location_file(self, currloc, errlist):
"""Return results from locations
:param currdict: current selection dictionary.
:type currdict: dict.
:param errlist: list containing found errors.
:type errlist: list.
"""
if u'ArchiveUri' in currloc and u'ArchiveFile' in currloc:
fqpath = os.path.join(self._root, \
currloc.ArchiveUri.xref.lstrip(os.path.sep))
if not os.path.isfile(fqpath):
errlist.append(SchemaValidationError(u"Unable to location " \
"ArchiveUri '%s'" % fqpath))
else:
archive_file = currloc.ArchiveFile
archive_fh = None
result = None
if fqpath.endswith('.zip'):
archive_fh = zipfile.ZipFile(fqpath)
infolist = archive_fh.infolist()
for i in infolist:
if i.filename.lower() == archive_file.lower():
jsonsch_fh = archive_fh.open(i)
result = jsonsch_fh.read()
jsonsch_fh.close()
archive_fh.close()
return result
def validate(self, tdict, errlist=None):
"""Load the schema file and validate tdict against it
:param tdict: the dictionary to test against.
:type tdict: list.
:param errlist: list containing found errors.
:type errlist: list.
"""
if not errlist:
errlist = list()
result = list()
if not hasattr(self, u'Location'):
result.append(SchemaValidationError(u'Location property does ' \
'not exist'))
return result
currloc = None
defloc = None
langcode = 'TBD'
for loc in self.Location:
for loclang in loc.keys():
if loclang.lower() == langcode.lower():
currloc = loc[loclang]
break
elif loclang.lower() == u'default':
defloc = loc[loclang]
if not currloc:
# use default location if lang doesn't match
currloc = defloc
if not currloc:
result.append(SchemaValidationError(
u'Unable to determine location'))
return
location_file = self._read_location_file(currloc, errlist=result)
if not location_file:
result.append(SchemaValidationError(u'Location data is empty'))
else:
jsonsch = json.loads(location_file)
validictory.validate(tdict, jsonsch)
class HpPropertiesRegistry(RisObject):
"""Models the HpPropertiesRegistry file"""
def __init__(self, d):
super(HpPropertiesRegistry, self).__init__(d)
def validate_attribute_values(self, tdict):
"""Look for tdict in attribute list and attempt to validate its value
:param tdict: the dictionary to test against.
:type tdict: list.
"""
result = list()
for tkey in tdict:
try:
if self[tkey] and hasattr(self[tkey], "type"):
temp = self.validate_attribute(self[tkey], tdict[tkey], \
tkey)
for err in temp:
if isinstance(err, RegistryValidationError):
if err.reg:
err.sel = tkey
result.extend(temp)
except Exception:
pass
return result
def validate_att_val_bios(self, tdict):
"""Look for tdict in attribute list and attempt to validate its value
:param tdict: the dictionary to test against.
:type tdict: list.
"""
result = list()
for tkey in tdict:
for item in self.Attributes:
try:
if item["Name"] == tkey and hasattr(item, "Type"):
temp = self.validate_attribute(item, tdict[tkey], tkey)
for err in temp:
if isinstance(err, RegistryValidationError):
if err.reg:
err.sel = tkey
result.extend(temp)
break
except Exception:
pass
return result
def get_validator(self, attrname, newargs=None, oneof=None):
"""Returns attribute validator type
:param attrname: attribute name to be used for validation.
:type attrname: str.
:param newargs: list of multi level properties to be modified.
:type newargs: list.
:param oneof: special string for "oneof" options within validation.
:type oneof: list.
"""
if oneof:
self = oneof
if newargs:
for arg in newargs:
try:
self = self['properties']
except Exception:
pass
if not hasattr(self, arg):
return None
elif not arg == newargs[-1]:
self = self[arg]
if not hasattr(self, attrname):
return None
validator = None
if EnumValidator.is_type(self[attrname]):
validator = EnumValidator.parse(self[attrname])
elif StringValidator.is_type(self[attrname]):
validator = StringValidator.parse(self[attrname])
elif ObjectValidator.is_type(self[attrname]):
validator = ObjectValidator.parse(self[attrname])
elif IntegerValidator.is_type(self[attrname]):
validator = IntegerValidator.parse(self[attrname])
elif BoolValidator.is_type(self[attrname]):
validator = BoolValidator.parse(self[attrname])
elif PasswordValidator.is_type(self[attrname]):
validator = PasswordValidator.parse(self[attrname])
elif u'oneOf' in self[attrname].keys():
for item in self[attrname]['oneOf']:
validator = self.get_validator(attrname, newargs, \
HpPropertiesRegistry({attrname:item}))
if validator:
break
return validator
def get_validator_bios(self, attrname):
"""Returns attribute validator type
:param attrname: attribute name to be used for validation.
:type attrname: str.
"""
for item in self.Attributes:
if item["Name"] == attrname:
validator = None
if EnumValidator.is_type(item):
validator = EnumValidator.parse(item)
elif StringValidator.is_type(item):
validator = StringValidator.parse(item)
elif IntegerValidator.is_type(item):
validator = IntegerValidator.parse(item)
elif BoolValidator.is_type(item):
validator = BoolValidator.parse(item)
elif ObjectValidator.is_type(item):
validator = ObjectValidator.parse(item)
elif PasswordValidator.is_type(item):
validator = PasswordValidator.parse(item)
return validator
return None
def validate_attribute(self, attrentry, attrval, name):
"""Function to validate attribute against iLO schema
:param attrname: attribute name to be used for validation.
:type attrname: str.
:param attrval: attribute value to be used for validation.
:type attrval: str.
:param name: clean name for outputting.
:type name: str.
"""
result = list()
validator = None
if EnumValidator.is_type(attrentry):
validator = EnumValidator.parse(attrentry)
elif StringValidator.is_type(attrentry):
validator = StringValidator.parse(attrentry)
elif IntegerValidator.is_type(attrentry):
validator = IntegerValidator.parse(attrentry)
elif BoolValidator.is_type(attrentry):
validator = BoolValidator.parse(attrentry)
elif ObjectValidator.is_type(attrentry):
validator = ObjectValidator.parse(attrentry)
elif PasswordValidator.is_type(attrentry):
validator = PasswordValidator.parse(attrentry)
else:
raise UnknownValidatorError(attrentry)
if validator:
result.extend(validator.validate(attrval, name))
return result
class BaseValidator(RisObject):
"""Base validator class"""
def __init__(self, d):
super(BaseValidator, self).__init__(d)
def validate(self):
"""Overridable function for validation """
raise RuntimeError(u'You must override this method in your derived ' \
'class')
class EnumValidator(BaseValidator):
"""Enum validator class"""
def __init__(self, d):
super(EnumValidator, self).__init__(d)
@staticmethod
def is_type(attrentry):
"""Validate that the type is enumeration
:param attrname: attribute name to be used for validation.
:type attrname: str.
"""
if u'type' in attrentry:
if isinstance(attrentry[u'type'], list):
for item in attrentry[u'type']:
if item.lower() == u'enumeration':
return True
elif u'enum' in attrentry and item.lower() == u'string':
return True
elif u'enum' in attrentry and attrentry[u'type'] == "array":
for key, value in attrentry[u'items'].iteritems():
if key.lower() == "type" and value.lower() == u'string':
return True
else:
if attrentry[u'type'].lower() == u'enumeration':
return True
elif u'enum' in attrentry and attrentry[u'type'].lower() == \
u'string':
return True
elif u'Type' in attrentry:
if attrentry[u'Type'].lower() == u'enumeration':
return True
return False
def validate(self, newval, name):
"""Validate against iLO schema
:param newval: new value to be used for validation.
:type newval: str.
:param name: clean name for outputting.
:type name: str.
"""
result = list()
try:
for possibleval in self.enum:
if possibleval.lower() == newval.lower():
return result
except Exception:
for possibleval in self.Value:
if possibleval.ValueName.lower() == str(newval).lower():
return result
result.append(RegistryValidationError(u"'%s' is not a valid setting "
"for '%s'" % (newval, name),
regentry=self))
return result
def print_help(self, name, out=sys.stdout):
"""Info command helper function for print outs
:param name: clean name for outputting.
:type name: str.
:param out: output type for verbosity.
:type out: output type.
"""
wrapper = textwrap.TextWrapper()
wrapper.initial_indent = ' ' * 4
wrapper.subsequent_indent = ' ' * 4
out.write(u'\nNAME\n')
out.write('%s' % wrapper.fill('%s' % name))
out.write('\n')
if u'DisplayName' in self:
out.write(u'\nDISPLAY NAME\n')
out.write('%s' % wrapper.fill('%(DisplayName)s' % self))
out.write('\n')
if u'description' in self:
out.write(u'\nDESCRIPTION\n')
out.write('%s' % wrapper.fill('%(description)s' % self))
out.write('\n')
if u'HelpText' in self:
out.write(u'\nHELP TEXT\n')
out.write('%s' % wrapper.fill('%(HelpText)s' % self))
out.write('\n')
if u'WarningText' in self:
out.write(u'\n************************************************\n')
out.write(u'\nWARNING\n')
out.write('%s' % wrapper.fill('%(WarningText)s' % self))
out.write(u'\n\n**********************************************\n')
out.write('\n')
if u'type' in self and isinstance(self[u'type'], list):
out.write(u'\nTYPE\n')
for item in self[u'type']:
out.write('%s\n' % wrapper.fill('%s' % item))
out.write('\n')
elif u'type' in self:
out.write(u'\nTYPE\n')
out.write('%s' % wrapper.fill('%(type)s' % self))
out.write('\n')
elif u'Type' in self:
out.write(u'\nTYPE\n')
out.write('%s' % wrapper.fill('%(Type)s' % self))
out.write('\n')
if u'ReadOnly' in self:
out.write(u'\nREAD-ONLY\n')
out.write('%s' % wrapper.fill('%(ReadOnly)s' % self))
out.write('\n')
elif u'readonly' in self:
out.write(u'\nREAD-ONLY\n')
out.write('%s' % wrapper.fill('%(readonly)s' % self))
out.write('\n')
out.write(u'\nPOSSIBLE VALUES\n')
try:
for possibleval in self.enum:
out.write(' %s\n' % possibleval)
except Exception:
for possibleval in self.Value:
out.write(' %(ValueName)s\n' % possibleval)
out.write('\n')
class BoolValidator(BaseValidator):
"""Bool validator class"""
def __init__(self, d):
super(BoolValidator, self).__init__(d)
@staticmethod
def is_type(attrentry):
"""Validate that the type is boolean
:param attrentry: attribute entry containing data to be validated.
:type attrentry: str.
"""
if u'type' in attrentry:
if isinstance(attrentry[u'type'], list):
for item in attrentry[u'type']:
if item.lower() == u'boolean':
return True
elif attrentry[u'type'] == "array":
for key, value in attrentry[u'items'].iteritems():
if key.lower() == "type" and value.lower() == u'boolean':
return True
else:
if attrentry[u'type'].lower() == u'boolean':
return True
elif u'Type' in attrentry:
if attrentry[u'Type'].lower() == u'boolean':
return True
return False
def validate(self, newval, name):
"""Validate against iLO schema
:param newval: new value to be used for validation.
:type newval: str.
:param name: clean name for outputting.
:type name: str.
"""
result = list()
if newval is False or newval is True:
return result
result.append(
RegistryValidationError(
u"'%s' is not a valid setting for '%s'" % (newval, name),
regentry=self
)
)
return result
def print_help(self, name, out=sys.stdout):
"""Info command helper function for print outs
:param name: clean name for outputting.
:type name: str.
:param out: output type for verbosity.
:type out: output type.
"""
wrapper = textwrap.TextWrapper()
wrapper.initial_indent = ' ' * 4
wrapper.subsequent_indent = ' ' * 4
out.write(u'\nNAME\n')
out.write('%s' % wrapper.fill('%s' % name))
out.write('\n')
if u'DisplayName' in self:
out.write(u'\nDISPLAY NAME\n')
out.write('%s' % wrapper.fill('%(DisplayName)s' % self))
out.write('\n')
if u'description' in self:
out.write(u'\nDESCRIPTION\n')
out.write('%s' % wrapper.fill('%(description)s' % self))
out.write('\n')
if u'HelpText' in self:
out.write(u'\nHELP TEXT\n')
out.write('%s' % wrapper.fill('%(HelpText)s' % self))
out.write('\n')
if u'WarningText' in self:
out.write(u'\n************************************************\n')
out.write(u'\nWARNING\n')
out.write('%s' % wrapper.fill('%(WarningText)s' % self))
out.write(u'\n\n**********************************************\n')
out.write('\n')
if u'type' in self and isinstance(self[u'type'], list):
out.write(u'\nTYPE\n')
for item in self[u'type']:
out.write('%s\n' % wrapper.fill('%s' % item))
out.write('\n')
elif u'type' in self:
out.write(u'\nTYPE\n')
out.write('%s' % wrapper.fill('%(type)s' % self))
out.write('\n')
elif u'Type' in self:
out.write(u'\nTYPE\n')
out.write('%s' % wrapper.fill('%(Type)s' % self))
out.write('\n')
if u'ReadOnly' in self:
out.write(u'\nREAD-ONLY\n')
out.write('%s' % wrapper.fill('%(ReadOnly)s' % self))
out.write('\n')
elif u'readonly' in self:
out.write(u'\nREAD-ONLY\n')
out.write('%s' % wrapper.fill('%(readonly)s' % self))
out.write('\n')
out.write(u'\nPOSSIBLE VALUES\n')
out.write(' True or False\n')
out.write('\n')
class StringValidator(BaseValidator):
"""Constructor """
def __init__(self, d):
super(StringValidator, self).__init__(d)
@staticmethod
def is_type(attrentry):
"""Validate that the type is string
:param attrentry: attribute entry containing data to be validated.
:type attrentry: str.
"""
if u'type' in attrentry:
if isinstance(attrentry[u'type'], list):
for item in attrentry[u'type']:
if item.lower() == u'string':
return True
elif attrentry[u'type'] == "array":
for key, value in attrentry[u'items'].iteritems():
if key.lower() == "type" and u'string' in value:
return True
else:
if attrentry[u'type'].lower() == u'string':
return True
elif u'Type' in attrentry:
if attrentry[u'Type'].lower() == u'string':
return True
return False
def validate(self, newval, name):
"""Validate against iLO schema
:param newval: new value to be used for validation.
:type newval: str.
:param name: clean name for outputting.
:type name: str.
"""
result = list()
if u'MinLength' in self:
if len(newval) < int(self[u'MinLength']):
result.append(RegistryValidationError(
u"'%s' must be at least '%s' characters long" %
(self.Name, int(self[u'MinLength'])), regentry=self))
if u'MaxLength' in self:
if len(newval) > int(self[u'MaxLength']):
result.append(RegistryValidationError(
u"'%s' must be less than '%s' characters long" %
(self.Name, int(self[u'MaxLength'])), regentry=self))
if u'ValueExpression' in self:
if self[u'ValueExpression']:
pat = re.compile(self[u'ValueExpression'])
if newval and not pat.match(newval):
result.append(RegistryValidationError(
u"'%(Name)s' must match the regular expression "
"'%(ValueExpression)s'" % (self), regentry=self))
return result
def print_help(self, name, out=sys.stdout):
"""Info command helper function for print outs
:param name: clean name for outputting.
:type name: str.
:param out: output type for verbosity.
:type out: output type.
"""
wrapper = textwrap.TextWrapper()
wrapper.initial_indent = ' ' * 4
wrapper.subsequent_indent = ' ' * 4
out.write(u'\nNAME\n')
out.write('%s' % wrapper.fill('%s' % name))
out.write('\n')
if u'DisplayName' in self:
out.write(u'\nDISPLAY NAME\n')
out.write('%s' % wrapper.fill('%(DisplayName)s' % self))
out.write('\n')
if u'description' in self:
out.write(u'\nDESCRIPTION\n')
out.write('%s' % wrapper.fill('%(description)s' % self))
out.write('\n')
if u'HelpText' in self:
out.write(u'\nHELP TEXT\n')
out.write('%s' % wrapper.fill('%(HelpText)s' % self))
out.write('\n')
if u'WarningText' in self:
out.write(u'\n************************************************\n')
out.write(u'\nWARNING\n')
out.write('%s' % wrapper.fill('%(WarningText)s' % self))
out.write(u'\n\n**********************************************\n')
out.write('\n')
if u'type' in self and isinstance(self[u'type'], list):
out.write(u'\nTYPE\n')
for item in self[u'type']:
out.write('%s\n' % wrapper.fill('%s' % item))
out.write('\n')
elif u'type' in self:
out.write(u'\nTYPE\n')
out.write('%s' % wrapper.fill('%(type)s' % self))
out.write('\n')
elif u'Type' in self:
out.write(u'\nTYPE\n')
out.write('%s' % wrapper.fill('%(Type)s' % self))
out.write('\n')
if u'MinLength' in self:
out.write(u'\nMIN LENGTH\n')
out.write('%s' % wrapper.fill('%(MinLength)s' % self))
out.write('\n')
if u'MaxLength' in self:
out.write(u'\nMAX LENGTH\n')
out.write('%s' % wrapper.fill('%(MaxLength)s' % self))
out.write('\n')
if u'ReadOnly' in self:
out.write(u'\nREAD-ONLY\n')
out.write('%s' % wrapper.fill('%(ReadOnly)s' % self))
out.write('\n')
elif u'readonly' in self:
out.write(u'\nREAD-ONLY\n')
out.write('%s' % wrapper.fill('%(readonly)s' % self))
out.write('\n')
class IntegerValidator(BaseValidator):
"""Interger validator class"""
def __init__(self, d):
super(IntegerValidator, self).__init__(d)
@staticmethod
def is_type(attrentry):
"""Validate that the type is integer
:param attrname: attribute name to be used for validation.
:type attrname: str.
"""
if u'type' in attrentry:
if isinstance(attrentry[u'type'], list):
for item in attrentry[u'type']:
if item.lower() == u'integer' or item.lower() == u'number':
return True
elif attrentry[u'type'] == "array":
for key, value in attrentry[u'items'].iteritems():
if key.lower() == "type":
if value.lower() == u'interger' or value.lower() == \
u'number':
return True
else:
if attrentry[u'type'].lower() == u'integer' or \
attrentry[u'type'].lower().lower() == u'number':
return True
elif u'Type' in attrentry:
if attrentry[u'Type'].lower() == u'integer':
return True
return False
def validate(self, newval, name):
"""Validate against iLO schema
:param newval: new value to be used for validation.
:type newval: str.
:param name: clean name for outputting.
:type name: str.
"""
result = list()
intval = int(newval)
pat = re.compile(r'0-9+')
if newval and not pat.match(intval):
result.append(
RegistryValidationError(
u"'%(Name)s' must be an integer value'" % (self),
regentry=self
)
)
return result
if u'LowerBound' in self:
if intval < int(self[u'LowerBound']):
result.append(RegistryValidationError(u"'%s' must be greater" \
" than or equal to '%s'" % (self.Name, \
int(self[u'LowerBound'])), regentry=self))
if u'UpperBound' in self:
if intval > int(self[u'UpperBound']):
result.append(RegistryValidationError(u"'%s' must be less " \
"than or equal to '%s'" % (self.Name, \
int(self[u'LowerBound'])), regentry=self))
return result
def print_help(self, name, out=sys.stdout):
"""Info command helper function for print outs
:param name: clean name for outputting.
:type name: str.
:param out: output type for verbosity.
:type out: output type.
"""
wrapper = textwrap.TextWrapper()
wrapper.initial_indent = ' ' * 4
wrapper.subsequent_indent = ' ' * 4
out.write(u'\nNAME\n')
out.write('%s' % wrapper.fill('%s' % name))
out.write('\n')
if u'DisplayName' in self:
out.write(u'\nDISPLAY NAME\n')
out.write('%s' % wrapper.fill('%(DisplayName)s' % self))
out.write('\n')
if u'description' in self:
out.write(u'\nDESCRIPTION\n')
out.write('%s' % wrapper.fill('%(description)s' % self))
out.write('\n')
if u'HelpText' in self:
out.write(u'\nHELP TEXT\n')
out.write('%s' % wrapper.fill('%(HelpText)s' % self))
out.write('\n')
if u'WarningText' in self:
out.write(u'\n************************************************\n')
out.write(u'\nWARNING\n')
out.write('%s' % wrapper.fill('%(WarningText)s' % self))
out.write(u'\n\n**********************************************\n')
out.write('\n')
if u'type' in self and isinstance(self[u'type'], list):
out.write(u'\nTYPE\n')
for item in self[u'type']:
out.write('%s\n' % wrapper.fill('%s' % item))
out.write('\n')
elif u'type' in self:
out.write(u'\nTYPE\n')
out.write('%s' % wrapper.fill('%(type)s' % self))
out.write('\n')
elif u'Type' in self:
out.write(u'\nTYPE\n')
out.write('%s' % wrapper.fill('%(Type)s' % self))
out.write('\n')
if u'ReadOnly' in self:
out.write(u'\nREAD-ONLY\n')
out.write('%s' % wrapper.fill('%(ReadOnly)s' % self))
out.write('\n')
elif u'readonly' in self:
out.write(u'\nREAD-ONLY\n')
out.write('%s' % wrapper.fill('%(readonly)s' % self))
out.write('\n')
class ObjectValidator(BaseValidator):
"""Object validator class"""
def __init__(self, d):
super(ObjectValidator, self).__init__(d)
@staticmethod
def is_type(attrentry):
"""Validate that the type is object
:param attrname: attribute name to be used for validation.
:type attrname: str.
"""
if u'type' in attrentry:
if isinstance(attrentry[u'type'], list):
for item in attrentry[u'type']:
if item.lower() == u'object':
return True
elif attrentry[u'type'] == "array":
for key, value in attrentry[u'items'].iteritems():
if key.lower() == "type" and value.lower() == u'object':
return True
elif key.lower() == "anyof":
try:
if value[0][u'type'] == u'object':
return True
except Exception:
continue
else:
if attrentry[u'type'].lower() == u'object':
return True
elif u'Type' in attrentry:
if attrentry[u'Type'].lower() == u'object':
return True
return False
def validate(self, newval, name):
"""Validate against iLO schema
:param newval: new value to be used for validation.
:type newval: str.
:param name: clean name for outputting.
:type name: str.
"""
#TODO need to add so logic for objects class?
result = list()
return result
def print_help(self, name, out=sys.stdout):
"""Info command helper function for print outs
:param name: clean name for outputting.
:type name: str.
:param out: output type for verbosity.
:type out: output type.
"""
wrapper = textwrap.TextWrapper()
wrapper.initial_indent = ' ' * 4
wrapper.subsequent_indent = ' ' * 4
out.write(u'\nNAME\n')
out.write('%s' % wrapper.fill('%s' % name))
out.write('\n')
if u'DisplayName' in self:
out.write(u'\nDISPLAY NAME\n')
out.write('%s' % wrapper.fill('%(DisplayName)s' % self))
out.write('\n')
if u'description' in self:
out.write(u'\nDESCRIPTION\n')
out.write('%s' % wrapper.fill('%(description)s' % self))
out.write('\n')
if u'HelpText' in self:
out.write(u'\nHELP TEXT\n')
out.write('%s' % wrapper.fill('%(HelpText)s' % self))
out.write('\n')
if u'WarningText' in self:
out.write(u'\n************************************************\n')
out.write(u'\nWARNING\n')
out.write('%s' % wrapper.fill('%(WarningText)s' % self))
out.write(u'\n\n**********************************************\n')
out.write('\n')
if u'type' in self and isinstance(self[u'type'], list):
out.write(u'\nTYPE\n')
for item in self[u'type']:
out.write('%s\n' % wrapper.fill('%s' % item))
out.write('\n')
elif u'type' in self:
out.write(u'\nTYPE\n')
out.write('%s' % wrapper.fill('%(type)s' % self))
out.write('\n')
elif u'Type' in self:
out.write(u'\nTYPE\n')
out.write('%s' % wrapper.fill('%(Type)s' % self))
out.write('\n')
if u'ReadOnly' in self:
out.write(u'\nREAD-ONLY\n')
out.write('%s' % wrapper.fill('%(ReadOnly)s' % self))
out.write('\n')
elif u'readonly' in self:
out.write(u'\nREAD-ONLY\n')
out.write('%s' % wrapper.fill('%(readonly)s' % self))
out.write('\n')
class PasswordValidator(BaseValidator):
"""Password validator class"""
def __init__(self, d):
super(PasswordValidator, self).__init__(d)
@staticmethod
def is_type(attrentry):
"""Validate that the type is password
:param attrname: attribute name to be used for validation.
:type attrname: str.
"""
if u'type' in attrentry:
if isinstance(attrentry[u'type'], list):
for item in attrentry[u'type']:
if item.lower() == u'password':
return True
elif attrentry[u'type'] == "array":
for key, value in attrentry[u'items'].iteritems():
if key.lower() == "type" and value.lower() == u'password':
return True
else:
if attrentry[u'type'].lower() == u'password':
return True
elif u'Type' in attrentry:
if attrentry[u'Type'].lower() == u'password':
return True
return False
def validate(self, newval, name):
"""Validate against iLO schema
:param newval: new value to be used for validation.
:type newval: str.
:param name: clean name for outputting.
:type name: str.
"""
result = list()
if newval is None:
return result
if u'MinLength' in self:
if len(newval) < int(self[u'MinLength']):
result.append(RegistryValidationError(u"'%s' must be at least" \
" '%s' characters long" % (self.Name, \
int(self[u'MinLength'])), regentry=self))
if u'MaxLength' in self:
if len(newval) > int(self[u'MaxLength']):
result.append(RegistryValidationError(u"'%s' must be less " \
"than '%s' characters long" % (self.Name, \
int(self[u'MaxLength'])), regentry=self))
if u'ValueExpression' in self:
if self[u'ValueExpression']:
pat = re.compile(self[u'ValueExpression'])
if newval and not pat.match(newval):
result.append(RegistryValidationError(u"'%(Name)s' must " \
"match the regular expression '%(Value" \
"Expression)s'" % (self), regentry=self))
return result
def print_help(self, name, out=sys.stdout):
"""Info command helper function for print outs
:param name: clean name for outputting.
:type name: str.
:param out: output type for verbosity.
:type out: output type.
"""
wrapper = textwrap.TextWrapper()
wrapper.initial_indent = ' ' * 4
wrapper.subsequent_indent = ' ' * 4
out.write(u'\nNAME\n')
out.write('%s' % wrapper.fill('%s' % name))
out.write('\n')
if u'DisplayName' in self:
out.write(u'\nDISPLAY NAME\n')
out.write('%s' % wrapper.fill('%(DisplayName)s' % self))
out.write('\n')
if u'description' in self:
out.write(u'\nDESCRIPTION\n')
out.write('%s' % wrapper.fill('%(description)s' % self))
out.write('\n')
if u'HelpText' in self:
out.write(u'\nHELP TEXT\n')
out.write('%s' % wrapper.fill('%(HelpText)s' % self))
out.write('\n')
if u'WarningText' in self:
out.write(u'\n************************************************\n')
out.write(u'\nWARNING\n')
out.write('%s' % wrapper.fill('%(WarningText)s' % self))
out.write(u'\n\n**********************************************\n')
out.write('\n')
if u'type' in self and isinstance(self[u'type'], list):
out.write(u'\nTYPE\n')
for item in self[u'type']:
out.write('%s\n' % wrapper.fill('%s' % item))
out.write('\n')
elif u'type' in self:
out.write(u'\nTYPE\n')
out.write('%s' % wrapper.fill('%(type)s' % self))
out.write('\n')
elif u'Type' in self:
out.write(u'\nTYPE\n')
out.write('%s' % wrapper.fill('%(Type)s' % self))
out.write('\n')
if u'MinLength' in self:
out.write(u'\nMIN LENGTH\n')
out.write('%s' % wrapper.fill('%(MinLength)s' % self))
out.write('\n')
if u'MaxLength' in self:
out.write(u'\nMAX LENGTH\n')
out.write('%s' % wrapper.fill('%(MaxLength)s' % self))
out.write('\n')
if u'ReadOnly' in self:
out.write(u'\nREAD-ONLY\n')
out.write('%s' % wrapper.fill('%(ReadOnly)s' % self))
out.write('\n')
elif u'readonly' in self:
out.write(u'\nREAD-ONLY\n')
out.write('%s' % wrapper.fill('%(readonly)s' % self))
out.write('\n')
|
./data/anhstudios/swganh/data/scripts/templates/object/tangible/furniture/decorative/shared_foodcart.py | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/furniture/decorative/shared_foodcart.iff"
result.attribute_template_id = 6
result.stfName("frn_n","foodcart")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result |
./data/anhstudios/swganh/data/scripts/templates/object/tangible/sign/player/shared_house_address.py | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/sign/player/shared_house_address.iff"
result.attribute_template_id = -1
result.stfName("sign_name","sign")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result |
./data/facebook/augmented-traffic-control/atc/atcd/tests/test_AtcdThriftHandlerTask.py | #
# Copyright (c) 2014, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
#
'''
from sparts.tests.base import SingleTaskTestCase
from atcd.AtcdThriftHandlerTask import AtcdThriftHandlerTask
class AtcdThriftHandlerTaskTest(SingleTaskTestCase):
TASK = AtcdThriftHandlerTask
def setUp(self):
super(AtcdThriftHandlerTaskTest, self).setUp()
def test_nothing(self):
self.assertTrue(True)
'''
|
./data/google/simian/src/tests/simian/auth/x509_test.py | #!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""x509 module tests."""
import types
from google.apputils import app
from google.apputils import basetest
import mox
import stubout
from pyasn1.type import univ
from simian.auth import x509
class Error(Exception):
"""Base Error."""
class X509ModuleTest(mox.MoxTestBase):
def setUp(self):
mox.MoxTestBase.setUp(self)
self.stubs = stubout.StubOutForTesting()
def tearDown(self):
self.mox.UnsetStubs()
self.stubs.UnsetAll()
def testLoadPemGeneric(self):
"""Test LoadPemGeneric()."""
header = 'BEGIN'
footer = 'END'
input = '\n\n\n-----BEGIN-----\nhello\n-----END-----\n\n\n'
expected = [
'-----BEGIN-----',
'hello',
'-----END-----',
]
self.assertEqual(expected, x509.LoadPemGeneric(input, header, footer))
def testLoadPemGenericWhenInfo(self):
"""Test LoadPemGeneric()."""
header = 'BEGIN'
footer = 'END'
input = ('\n\n\n-----BEGIN-----\n'
'Proc-Type: foo\nhello\n-----END-----\n\n\n')
expected = [
'-----BEGIN-----',
'hello',
'-----END-----',
]
self.assertEqual(expected, x509.LoadPemGeneric(input, header, footer))
def testLoadPemGenericWhenSpaces(self):
"""Test LoadPemGeneric()."""
header = 'BEGIN'
footer = 'END'
input = ' \n\n\n-----BEGIN----- \nhello \n-----END----- \n\n\n '
expected = [
'-----BEGIN-----',
'hello',
'-----END-----',
]
self.assertEqual(expected, x509.LoadPemGeneric(input, header, footer))
def testLoadPemGenericWhenSpacesNoLastNewline(self):
"""Test LoadPemGeneric()."""
header = 'BEGIN'
footer = 'END'
input = ' \n\n\n-----BEGIN----- \nhello \n-----END-----'
expected = [
'-----BEGIN-----',
'hello',
'-----END-----',
]
self.assertEqual(expected, x509.LoadPemGeneric(input, header, footer))
def testLoadPemGenericWhenMissingHeader(self):
"""Test LoadPemGeneric()."""
header = 'BEGIN BLAH'
footer = 'END BLAH'
input = '\n\n\n-----BEGIN-----\nhello\n-----END-----\n\n\n'
self.assertRaises(
x509.HeaderMissingPEMFormatError, x509.LoadPemGeneric,
input, header, footer)
def testLoadPemGenericWhenMissingFooter(self):
"""Test LoadPemGeneric()."""
header = 'BEGIN'
footer = 'END BLAH'
input = '\n\n\n-----BEGIN-----\nhello\n-----END-----\n\n\n'
self.assertRaises(
x509.FooterMissingPEMFormatError, x509.LoadPemGeneric,
input, header, footer)
def testLoadPemGenericWhenTooFewLines(self):
"""Test LoadPemGeneric()."""
header = 'BEGIN'
footer = 'END BLAH'
input = '\n\n\n-----BEGIN-----\n\n\n\n'
self.assertRaises(
x509.PEMFormatError, x509.LoadPemGeneric, input, header, footer)
def testLoadCertificateFromPEM(self):
"""Test LoadCertificateFromPEM()."""
header = 'BEGIN CERTIFICATE'
footer = 'END CERTIFICATE'
pem_input = 'pem_input'
pem_output = ['---header---', 'base64', '---footer---']
self.mox.StubOutWithMock(x509, 'LoadPemGeneric')
self.mox.StubOutWithMock(x509, 'LoadCertificateFromBase64')
x509.LoadPemGeneric(pem_input, header, footer).AndReturn(pem_output)
x509.LoadCertificateFromBase64('base64').AndReturn('ok')
self.mox.ReplayAll()
self.assertEqual(x509.LoadCertificateFromPEM(pem_input), 'ok')
self.mox.VerifyAll()
def testLoadRSAPrivateKeyFromPEM(self):
"""Test LoadRSAPrivateKeyFromPEM()."""
header = 'BEGIN RSA PRIVATE KEY'
footer = 'END RSA PRIVATE KEY'
pem_input = 'pem_input'
pem_output = ['---header---', 'base64', '---footer---']
self.mox.StubOutWithMock(x509, 'LoadPemGeneric')
self.mox.StubOutWithMock(
x509.tlslite_bridge, 'parsePEMKey')
x509.LoadPemGeneric(pem_input, header, footer).AndReturn(pem_output)
x509.tlslite_bridge.parsePEMKey(
'\n'.join(pem_output)).AndReturn('ok')
self.mox.ReplayAll()
self.assertEqual(x509.LoadRSAPrivateKeyFromPEM(pem_input), 'ok')
self.mox.VerifyAll()
def testLoadRSAPrivateKeyFromPEMWhenSyntaxError(self):
"""Test LoadRSAPrivateKeyFromPEM()."""
header = 'BEGIN RSA PRIVATE KEY'
footer = 'END RSA PRIVATE KEY'
pem_input = 'pem_input'
pem_output = ['---header---', 'base64', '---footer---']
self.mox.StubOutWithMock(x509, 'LoadPemGeneric')
self.mox.StubOutWithMock(
x509.tlslite_bridge, 'parsePEMKey')
x509.LoadPemGeneric(pem_input, header, footer).AndReturn(pem_output)
x509.tlslite_bridge.parsePEMKey(
'\n'.join(pem_output)).AndRaise(SyntaxError)
self.mox.ReplayAll()
self.assertRaises(
x509.RSAPrivateKeyPEMFormatError,
x509.LoadRSAPrivateKeyFromPEM, pem_input)
self.mox.VerifyAll()
def testLoadCertificateFromBase64(self):
"""Test LoadCertificateFromBase64()."""
self.mox.StubOutWithMock(x509.base64, 'b64decode')
self.mox.StubOutWithMock(x509, 'BASE64_RE')
x509.BASE64_RE.search('b64str').AndReturn(True)
x509.base64.b64decode('b64str').AndReturn('binary')
mock_x509 = self.mox.CreateMockAnything()
self.stubs.Set(x509, 'X509Certificate', mock_x509)
mock_x509().AndReturn(mock_x509)
mock_x509.LoadFromByteString('binary').AndReturn(None)
self.mox.ReplayAll()
self.assertEqual(
mock_x509,
x509.LoadCertificateFromBase64('b64str'))
self.mox.VerifyAll()
def testLoadCertificateFromBase64WhenBase64CharacterCheckFail(self):
"""Test LoadCertificateFromBase64()."""
self.mox.StubOutWithMock(x509.base64, 'b64decode')
self.mox.StubOutWithMock(x509, 'BASE64_RE')
x509.BASE64_RE.search('b64str').AndReturn(None)
self.mox.ReplayAll()
self.assertRaises(
x509.PEMFormatError,
x509.LoadCertificateFromBase64, 'b64str')
self.mox.VerifyAll()
def testLoadCertificateFromBase64WhenBase64DecodeFail(self):
"""Test LoadCertificateFromBase64()."""
self.mox.StubOutWithMock(x509.base64, 'b64decode')
self.mox.StubOutWithMock(x509, 'BASE64_RE')
x509.BASE64_RE.search('b64str').AndReturn(True)
x509.base64.b64decode('b64str').AndRaise(TypeError)
self.mox.ReplayAll()
self.assertRaises(
x509.PEMFormatError,
x509.LoadCertificateFromBase64, 'b64str')
self.mox.VerifyAll()
class BaseDataObjectTest(mox.MoxTestBase):
"""Test BaseDataObject class."""
def setUp(self):
mox.MoxTestBase.setUp(self)
self.stubs = stubout.StubOutForTesting()
self.bdo = x509.BaseDataObject()
def tearDown(self):
self.mox.UnsetStubs()
self.stubs.UnsetAll()
def testGetDataDict(self):
"""Test _GetDataDict()."""
try:
self.bdo._GetDataDict()
self.fail('NotImplementedError not raised')
except NotImplementedError:
pass
def testCreateGetMethod(self):
"""Test CreateGetMethod()."""
mock_dataobj = self.mox.CreateMockAnything()
mock_dataobj._GetDataDict().AndReturn({'foo': 123})
def mock_setattr(cls, key, value):
self.assertEquals(key, 'GetFoo')
self.assertTrue(type(value) is types.FunctionType)
self.assertEqual(123, value(mock_dataobj))
self.mox.ReplayAll()
x509.BaseDataObject.CreateGetMethod('Foo', 'foo', setattr_=mock_setattr)
self.mox.VerifyAll()
class X509CertificateTest(mox.MoxTestBase):
def setUp(self):
mox.MoxTestBase.setUp(self)
self.stubs = stubout.StubOutForTesting()
self.x = x509.X509Certificate()
self._cert_reset = {
'serial_num': None,
'issuer': None,
'subject': None,
'valid_notbefore': None,
'valid_notafter': None,
'fields_data': None,
'sig_data': None,
'sig_algorithm': None,
'entire_cert_data': None,
'public_key': None,
'may_act_as_ca': None,
'key_usage': None,
'subject_alt_name': None,
}
def tearDown(self):
self.mox.UnsetStubs()
self.stubs.UnsetAll()
def _CheckSaneCertFields(self, d):
"""Check that output dict keys are defined in _cert_reset.
Args:
d: dict, output from a _Get*FromSequence method
"""
for k in d:
self.assertTrue(k in self._cert_reset, 'Key %s is invalid in _cert' % k)
def testInit(self):
"""Test __init__()."""
self.mox.StubOutWithMock(x509.X509Certificate, 'Reset')
x509.X509Certificate.Reset().AndReturn(None)
self.mox.ReplayAll()
unused = x509.X509Certificate()
self.mox.VerifyAll()
def testReset(self):
"""Test Reset()."""
self.x.Reset()
self.assertEqual(self.x._cert, self._cert_reset)
def testCreateGetMethods(self):
"""Test the autogenerated methods from CreateGetMethod()."""
names = [
'Issuer',
'Subject',
'DatetimeNotValidBefore',
'DatetimeNotValidAfter',
'FieldsData',
'SignatureData',
'SignatureAlgorithm',
'SerialNumber',
'EntireCertData',
'PublicKey',
'MayActAsCA',
'KeyUsage',
'SubjectAltName',
]
for name in names:
self.assertTrue(
hasattr(self.x, 'Get%s' % name), 'has method Get%s' % name)
self.assertTrue(
type(getattr(self.x, 'Get%s' % name)) is types.MethodType,
'Get%s is a method' % name)
def testGetDataDict(self):
"""Test _GetDataDict()."""
self.assertEqual(self.x._cert, self.x._GetDataDict())
def testCertTimestampToDatetime(self):
"""Test _CertTimestampToDatetime()."""
self.mox.StubOutWithMock(x509.time, 'strptime')
self.mox.StubOutWithMock(x509.datetime, 'datetime', True)
time_ary = (1981, 1, 11, 0, 0, 0, 0, 'bla')
x509.time.strptime('ts', self.x.TIMESTAMP_FMT).AndReturn(time_ary)
x509.datetime.datetime(*time_ary[0:7]).AndReturn('datetime')
self.mox.ReplayAll()
self.assertEqual('datetime', self.x._CertTimestampToDatetime('ts'))
self.mox.VerifyAll()
def testStrToArray(self):
"""Test _StrToArray()."""
self.mox.StubOutWithMock(x509.array, 'array', True)
x509.array.array('B', 's').AndReturn('ary')
self.mox.ReplayAll()
self.assertEqual('ary', self.x._StrToArray('s'))
self.mox.VerifyAll()
def testCertTimestampToDatetimeWhenBadTimestamp(self):
"""Test _CertTimestampToDatetime()."""
self.mox.StubOutWithMock(x509.time, 'strptime')
x509.time.strptime('ts', self.x.TIMESTAMP_FMT).AndRaise(ValueError)
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateValueError,
self.x._CertTimestampToDatetime, 'ts')
self.mox.VerifyAll()
def testGetV3ExtensionFieldsFromSequenceWhenOIDKeyUsage(self):
"""Test _GetV3ExtensionFieldsFromSequence()."""
self.mox.StubOutWithMock(x509.der_decoder, 'decode', True)
e_key_usage = univ.OctetString('\x03e_key_usage')
d_key_usage = ((1, 0, 1),)
x509.der_decoder.decode(e_key_usage).AndReturn(d_key_usage)
seq = (
('junk', ('value', 'value')),
(x509.OID_X509V3_KEY_USAGE, e_key_usage),
)
output = {
'key_usage': (
x509.X509V3_KEY_USAGE_BIT_FIELDS[0],
x509.X509V3_KEY_USAGE_BIT_FIELDS[2],
),
}
self.mox.ReplayAll()
self.assertEqual(
output,
self.x._GetV3ExtensionFieldsFromSequence(seq))
self.mox.VerifyAll()
def testGetV3ExtensionFieldsFromSequenceWhenOIDKeyUsageBadParse(self):
"""Test _GetV3ExtensionFieldsFromSequence()."""
e_key_usage = univ.OctetString('e_key_usage')
d_key_usage = ((1, 0, 1),)
seq = (
('junk', ('value', 'value')),
(x509.OID_X509V3_KEY_USAGE, e_key_usage),
)
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateParseError,
self.x._GetV3ExtensionFieldsFromSequence,
seq)
self.mox.VerifyAll()
def testGetV3ExtensionFieldsFromSequenceWhenOIDBasicConstraint(self):
"""Test _GetV3ExtensionFieldsFromSequence()."""
self.mox.StubOutWithMock(x509.der_decoder, 'decode', True)
e_basic_const = univ.OctetString('e_basic_const')
d_basic_const = ((True,), '')
x509.der_decoder.decode(e_basic_const).AndReturn(d_basic_const)
seq = (
('junk', ('value', 'value')),
(x509.OID_X509V3_BASIC_CONSTRAINTS, e_basic_const),
)
output = {
'may_act_as_ca': True,
}
self.mox.ReplayAll()
self.assertEqual(
output,
self.x._GetV3ExtensionFieldsFromSequence(seq))
self.mox.VerifyAll()
def testGetV3ExtensionFieldsFromSequenceWhenOIDBasicConstraintForm2(self):
"""Test _GetV3ExtensionFieldsFromSequence()."""
self.mox.StubOutWithMock(x509.der_decoder, 'decode', True)
e_basic_const = univ.OctetString('e_basic_const')
d_basic_const = ((True,), '')
x509.der_decoder.decode(e_basic_const).AndReturn(d_basic_const)
seq = (
('junk', ('value', 'value')),
(x509.OID_X509V3_BASIC_CONSTRAINTS, True, e_basic_const),
)
output = {
'may_act_as_ca': True,
}
self.mox.ReplayAll()
self.assertEqual(
output,
self.x._GetV3ExtensionFieldsFromSequence(seq))
self.mox.VerifyAll()
def testGetV3ExtensionFieldsFromSequenceWhenOIDBasicConstraintBadForm(self):
"""Test _GetV3ExtensionFieldsFromSequence()."""
self.mox.StubOutWithMock(x509.der_decoder, 'decode', True)
e_basic_const = univ.OctetString('e_basic_const')
d_basic_const = ((True,), '')
seq = (
('junk', ('value', 'value')),
(x509.OID_X509V3_BASIC_CONSTRAINTS, True, e_basic_const, 'what', 'ugh'),
)
output = {
'may_act_as_ca': True,
}
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateParseError,
self.x._GetV3ExtensionFieldsFromSequence,
seq)
self.mox.VerifyAll()
def testGetV3ExtensionFieldsFromSequenceWhenOIDBasicConstraintPaths(self):
"""Test _GetV3ExtensionFieldsFromSequence()."""
self.mox.StubOutWithMock(x509.der_decoder, 'decode', True)
e_basic_const = univ.OctetString('e_basic_const')
d_basic_const = ((True,), ['unsupported path data'])
x509.der_decoder.decode(e_basic_const).AndReturn(d_basic_const)
seq = (
('junk', ('value', 'value')),
(x509.OID_X509V3_BASIC_CONSTRAINTS, e_basic_const),
)
output = {
'may_act_as_ca': True,
}
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateParseError,
self.x._GetV3ExtensionFieldsFromSequence,
seq)
self.mox.VerifyAll()
def testGetV3ExtensionFieldsFromSequenceWhenOIDSubjectAltName(self):
"""Test _GetV3ExtensionFieldsFromSequence()."""
self.mox.StubOutWithMock(x509.der_decoder, 'decode', True)
e_mspn = univ.OctetString('\x30mspn der encoded')
d_mspn = (
(x509.OID_MS_NT_PRINCIPAL_NAME, 'foo'),
)
x509.der_decoder.decode(e_mspn).AndReturn(d_mspn)
seq = (
('junk', ('value', 'value')),
(x509.OID_X509V3_SUBJECT_ALT_NAME, e_mspn),
)
output = {
'subject_alt_name': 'X_MS_NT_Principal_Name=foo',
}
self.mox.ReplayAll()
self.assertEqual(
output,
self.x._GetV3ExtensionFieldsFromSequence(seq))
self.mox.VerifyAll()
def testGetV3ExtensionFieldsFromSequenceWhenOIDSubjectAltNameBadForm(self):
"""Test _GetV3ExtensionFieldsFromSequence()."""
e_mspn = univ.OctetString('mspn der encoded wrong encapsulation')
d_mspn = (
(x509.OID_MS_NT_PRINCIPAL_NAME, 'foo'),
)
seq = (
('junk', ('value', 'value')),
(x509.OID_X509V3_SUBJECT_ALT_NAME, e_mspn),
)
output = {
'subject_alt_name': 'X_MS_NT_Principal_Name=foo',
}
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateParseError,
self.x._GetV3ExtensionFieldsFromSequence,
seq)
self.mox.VerifyAll()
def testGetV3ExtensionFieldsFromSequenceWhenOIDSubjectAltNameUnknownOID(self):
"""Test _GetV3ExtensionFieldsFromSequence()."""
self.mox.StubOutWithMock(x509.der_decoder, 'decode', True)
unknown_oid = (1, 2, 3)
e_mspn = univ.OctetString('\x30mspn der encoded')
d_mspn = (
(unknown_oid, 'foo'),
)
x509.der_decoder.decode(e_mspn).AndReturn(d_mspn)
seq = (
('junk', ('value', 'value')),
(x509.OID_X509V3_SUBJECT_ALT_NAME, e_mspn),
)
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateParseError,
self.x._GetV3ExtensionFieldsFromSequence,
seq)
self.mox.VerifyAll()
def testAttributeValueToString(self):
"""Test _AttributeValueToString()."""
value = 'newyork'
expected = 'newyork'
self.assertEqual(value, expected)
result = self.x._AttributeValueToString(value)
self.assertEqual(expected, result)
def testAttributeValueToStringWhenLeadingBadCharsSpace(self):
"""Test _AttributeValueToString()."""
value = ' new york'
expected = '\\ new york'
result = self.x._AttributeValueToString(value)
self.assertEqual(expected, result)
def testAttributeValueToStringWhenLeadingBadCharsHash(self):
"""Test _AttributeValueToString()."""
value = '#new york'
expected = '\\#new york'
result = self.x._AttributeValueToString(value)
self.assertEqual(expected, result)
def testAttributeValueToStringWhenTrailingBadCharsSpace(self):
"""Test _AttributeValueToString()."""
value = 'new york '
expected = 'new york\\ '
result = self.x._AttributeValueToString(value)
self.assertEqual(expected, result)
def testAttributeValueToStringWhenContainsNull(self):
"""Test _AttributeValueToString()."""
value = 'new%syork' % chr(00)
expected = 'new\\00york'
result = self.x._AttributeValueToString(value)
self.assertEqual(expected, result)
def testAttributeValueToStringPreventIndexRegression(self):
"""Test _AttributeValueToString()."""
value = ',newyork'
expected = '\\,newyork'
result = self.x._AttributeValueToString(value)
self.assertEqual(expected, result)
def testAttributeValueToStringWhenCharsNeedingEscaping(self):
"""Test _AttributeValueToString()."""
chars = ['"', '+', ',', ';', '<', '>', '\\']
for c in chars:
value = 'new%syork' % c
expected = 'new\\%syork' % c
result = self.x._AttributeValueToString(value)
self.assertEqual(expected, result)
def testAttributeValueToStringWhenMultipleAdjacentTransformsNeeded(self):
"""Test _AttributeValueToString()."""
value = ' new,york;; '
expected = '\\ new\\,york\\;\\;\\ '
result = self.x._AttributeValueToString(value)
self.assertEqual(expected, result)
value = '#new,york;\x00, '
expected = '\\#new\\,york\\;\\00\\,\\ '
result = self.x._AttributeValueToString(value)
self.assertEqual(expected, result)
def testAssembleDNSequence(self):
"""Test _AssembleDNSequence()."""
value = (
((x509.OID_ID['CN'], 'foo'),),
((x509.OID_ID['OU'], 'bar'),),
)
self.mox.StubOutWithMock(self.x, '_AttributeValueToString')
self.x._AttributeValueToString('foo').AndReturn('foo')
self.x._AttributeValueToString('bar').AndReturn('bar')
self.mox.ReplayAll()
self.assertEqual(self.x._AssembleDNSequence(value), 'CN=foo,OU=bar')
self.mox.VerifyAll()
def testAssembleDNSequenceWhenUnknownOID(self):
"""Test _AssembleDNSequence()."""
bad_oid = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)
value = (
((bad_oid, 'foo'),),
((x509.OID_ID['OU'], 'bar'),),
)
self.assertRaises(
x509.CertificateParseError,
self.x._AssembleDNSequence,
value)
def testAssembleDNSequenceWhenBadStructure(self):
"""Test _AssembleDNSequence()."""
value = (
(x509.OID_ID['CN'], 'foo'), # bad structure
((x509.OID_ID['OU'], 'bar'),),
)
self.assertRaises(
x509.CertificateParseError,
self.x._AssembleDNSequence,
value)
def testGetFieldsFromSequence(self):
"""Test _GetFieldsFromSequence()."""
sig_alg_seq = ('a','b')
sig_alg = 'sigalg'
before_ts = self.mox.CreateMockAnything()
after_ts = self.mox.CreateMockAnything()
mock_utctime = self.mox.CreateMockAnything()
self.stubs.Set(x509.pyasn1.type.useful, 'UTCTime', mock_utctime)
mock_utctime().AndReturn(mock_utctime)
mock_utctime().AndReturn(mock_utctime)
before_ts.isSameTypeWith(mock_utctime).AndReturn(True)
after_ts.isSameTypeWith(mock_utctime).AndReturn(True)
serial_num = 12345
v3ext = {
'may_act_as_ca': 123,
'key_usage': (1, 2, 3),
'subject_alt_name': 'subj alt name',
}
seq = (
x509.X509_CERT_VERSION_3,
serial_num,
sig_alg_seq,
(((x509.OID_ID['CN'], 'issuer'),),),
(before_ts, after_ts),
(((x509.OID_ID['CN'], 'subject'),),),
'public key',
'x509v3 extensions',
)
seq_encoded = 'raw bytes'
before_dt = 'before_dt'
after_dt = 'after_dt'
self.mox.StubOutWithMock(self.x, '_GetSignatureAlgorithmFromSequence')
self.mox.StubOutWithMock(self.x, '_CertTimestampToDatetime')
self.mox.StubOutWithMock(self.x, '_GetV3ExtensionFieldsFromSequence')
self.mox.StubOutWithMock(self.x, '_AssembleDNSequence')
self.mox.StubOutWithMock(x509.der_encoder, 'encode', True)
self.x._GetSignatureAlgorithmFromSequence(
sig_alg_seq).AndReturn(sig_alg)
self.x._AssembleDNSequence(seq[3]).AndReturn('CN=issuer')
self.x._CertTimestampToDatetime(before_ts).AndReturn(before_dt)
self.x._CertTimestampToDatetime(after_ts).AndReturn(after_dt)
self.x._AssembleDNSequence(seq[5]).AndReturn('CN=subject')
self.x._GetV3ExtensionFieldsFromSequence(seq[7]).AndReturn(v3ext)
x509.der_encoder.encode(seq).AndReturn(seq_encoded)
self.mox.ReplayAll()
output = self.x._GetFieldsFromSequence(seq)
self._CheckSaneCertFields(output)
self.assertEqual(
output, {
'serial_num': serial_num,
'issuer': u'CN=issuer',
'subject': u'CN=subject',
'valid_notbefore': before_dt,
'valid_notafter': after_dt,
'fields_data': seq_encoded,
'sig_algorithm': sig_alg,
'may_act_as_ca': v3ext['may_act_as_ca'],
'key_usage': v3ext['key_usage'],
'subject_alt_name': v3ext['subject_alt_name'],
})
self.mox.VerifyAll()
def testGetFieldsFromSequenceWhenSeqShort(self):
"""Test _GetFieldsFromSequence()."""
serial_num = 12345
seq = (
x509.X509_CERT_VERSION_3,
serial_num,
) # fails (length of entire sequence too short)
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateParseError,
self.x._GetFieldsFromSequence, seq)
self.mox.VerifyAll()
def testGetFieldsFromSequenceWhenWrongVersion(self):
"""Test _GetFieldsFromSequence()."""
seq = (
x509.X509_CERT_VERSION_3 * 2, # fails
1,
2,
3,
4,
5,
6,
)
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateParseError,
self.x._GetFieldsFromSequence, seq)
self.mox.VerifyAll()
def testGetFieldsFromSequenceWhenValidityNotBeforeFail(self):
"""Test _GetFieldsFromSequence()."""
sig_alg_seq = ('a','b')
sig_alg = 'sigalg'
before_ts = self.mox.CreateMockAnything()
after_ts = self.mox.CreateMockAnything()
mock_utctime = self.mox.CreateMockAnything()
self.stubs.Set(x509.pyasn1.type.useful, 'UTCTime', mock_utctime)
mock_utctime().AndReturn(mock_utctime)
before_ts.isSameTypeWith(mock_utctime).AndReturn(False) # fails
serial_num = 12345
bad_oid_cn = (9) * 10
seq = (
x509.X509_CERT_VERSION_3,
serial_num,
sig_alg_seq,
(((x509.OID_ID['CN'], 'issuer'),),),
(before_ts, after_ts),
(((x509.OID_ID['CN'], 'subject'),),),
'public key',
'x509v3 extensions',
)
self.mox.StubOutWithMock(self.x, '_GetSignatureAlgorithmFromSequence')
self.mox.StubOutWithMock(self.x, '_AssembleDNSequence')
self.x._GetSignatureAlgorithmFromSequence(
sig_alg_seq).AndReturn(sig_alg)
self.x._AssembleDNSequence(seq[3]).AndReturn('CN=issuer')
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateParseError,
self.x._GetFieldsFromSequence, seq)
self.mox.VerifyAll()
def testGetFieldsFromSequenceWhenValidityNotAfterFail(self):
"""Test _GetFieldsFromSequence()."""
sig_alg_seq = ('a','b')
sig_alg = 'sigalg'
before_ts = self.mox.CreateMockAnything()
after_ts = self.mox.CreateMockAnything()
mock_utctime = self.mox.CreateMockAnything()
self.stubs.Set(x509.pyasn1.type.useful, 'UTCTime', mock_utctime)
mock_utctime().AndReturn(mock_utctime)
mock_utctime().AndReturn(mock_utctime)
before_ts.isSameTypeWith(mock_utctime).AndReturn(True)
after_ts.isSameTypeWith(mock_utctime).AndReturn(False) # fails
serial_num = 12345
bad_oid_cn = (9) * 10
seq = (
x509.X509_CERT_VERSION_3,
serial_num,
sig_alg_seq,
(((x509.OID_ID['CN'], 'issuer'),),),
(before_ts, after_ts),
(((x509.OID_ID['CN'], 'subject'),),),
'public key',
'x509v3 extensions',
)
self.mox.StubOutWithMock(self.x, '_GetSignatureAlgorithmFromSequence')
self.mox.StubOutWithMock(self.x, '_AssembleDNSequence')
self.x._GetSignatureAlgorithmFromSequence(
sig_alg_seq).AndReturn(sig_alg)
self.x._AssembleDNSequence(seq[3]).AndReturn('CN=issuer')
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateParseError,
self.x._GetFieldsFromSequence, seq)
self.mox.VerifyAll()
def testGetFieldsFromSequenceWhenX509V3Missing(self):
"""Test _GetFieldsFromSequence()."""
sig_alg_seq = ('a','b')
sig_alg = 'sigalg'
before_ts = self.mox.CreateMockAnything()
after_ts = self.mox.CreateMockAnything()
mock_utctime = self.mox.CreateMockAnything()
self.stubs.Set(x509.pyasn1.type.useful, 'UTCTime', mock_utctime)
mock_utctime().AndReturn(mock_utctime)
mock_utctime().AndReturn(mock_utctime)
before_ts.isSameTypeWith(mock_utctime).AndReturn(True)
after_ts.isSameTypeWith(mock_utctime).AndReturn(True)
serial_num = 12345
v3ext = { 'may_act_as_ca': 123, 'key_usage': (1, 2, 3) }
seq = (
x509.X509_CERT_VERSION_3,
serial_num,
sig_alg_seq,
(((x509.OID_ID['CN'], 'issuer'),),),
(before_ts, after_ts),
(((x509.OID_ID['CN'], 'subject'),),),
'public key',
)
seq_encoded = 'raw bytes'
before_dt = 'before_dt'
after_dt = 'after_dt'
self.mox.StubOutWithMock(self.x, '_GetSignatureAlgorithmFromSequence')
self.mox.StubOutWithMock(self.x, '_CertTimestampToDatetime')
self.mox.StubOutWithMock(self.x, '_AssembleDNSequence')
self.mox.StubOutWithMock(x509.der_encoder, 'encode', True)
self.x._GetSignatureAlgorithmFromSequence(
sig_alg_seq).AndReturn(sig_alg)
self.x._AssembleDNSequence(seq[3]).AndReturn('CN=issuer')
self.x._CertTimestampToDatetime(before_ts).AndReturn(before_dt)
self.x._CertTimestampToDatetime(after_ts).AndReturn(after_dt)
self.x._AssembleDNSequence(seq[5]).AndReturn('CN=subject')
x509.der_encoder.encode(seq).AndReturn(seq_encoded)
self.mox.ReplayAll()
output = self.x._GetFieldsFromSequence(seq)
self._CheckSaneCertFields(output)
self.assertEqual(
output, {
'serial_num': serial_num,
'issuer': 'CN=issuer',
'subject': 'CN=subject',
'valid_notbefore': before_dt,
'valid_notafter': after_dt,
'fields_data': seq_encoded,
'sig_algorithm': sig_alg,
})
self.mox.VerifyAll()
def testGetSignatureAlgorithmFromSequence(self):
"""Test _GetSignatureAlgorithmFromSequence()."""
alg = self.x.SIGNATURE_ALGORITHMS[0]
seq = (alg, '')
output = self.x._GetSignatureAlgorithmFromSequence(seq)
self._CheckSaneCertFields(output)
self.assertEqual(output['sig_algorithm'], alg)
def testGetSignatureAlgorithmFromSequenceWhenBadOID(self):
"""Test _GetSignatureAlgorithmFromSequence()."""
alg = (5, 4, 3, 2, 1) # fake OID
self.assertFalse(alg in self.x.SIGNATURE_ALGORITHMS)
seq = (alg, '')
self.assertRaises(
x509.CertificateValueError,
self.x._GetSignatureAlgorithmFromSequence, seq)
def testGetSignatureAlgorithmFromSequenceWhenJunkSeq(self):
"""Test _GetSignatureAlgorithmFromSequence()."""
alg = self.x.SIGNATURE_ALGORITHMS[0]
seq = (alg, '', '', '')
self.assertRaises(
x509.CertificateParseError,
self.x._GetSignatureAlgorithmFromSequence, seq)
def testGetSignatureAlgorithmFromSequenceWhenJunk(self):
"""Test _GetSignatureAlgorithmFromSequence()."""
seq = True
self.assertRaises(
x509.CertificateParseError,
self.x._GetSignatureAlgorithmFromSequence, seq)
def testGetSignatureFromSequence(self):
"""Test _GetSignatureFromSequence()."""
bits = 1024
good_seq = [1] * bits
good_sig = (bits/8) * 'x'
self.mox.StubOutWithMock(x509.der_encoder, 'encode', True)
x509.der_encoder.encode(good_seq).AndReturn('junkJunkJUNK%s' % good_sig)
self.mox.ReplayAll()
output = self.x._GetSignatureFromSequence(good_seq)
self._CheckSaneCertFields(output)
self.assertEqual(output['sig_data'], good_sig)
self.mox.VerifyAll()
def testGetSignatureFromSequenceWhenShortSeq(self):
"""Test _GetSignatureFromSequence()."""
short_seq = [1] * 5
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateParseError,
self.x._GetSignatureFromSequence, short_seq)
self.mox.VerifyAll()
def testGetSignatureFromSequenceWhenNonBinarySeq(self):
"""Test _GetSignatureFromSequence()."""
non_binary_seq = [2] * 2048
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateParseError,
self.x._GetSignatureFromSequence, non_binary_seq)
self.mox.VerifyAll()
def testGetSignatureFromSequenceWhenJunkInput(self):
"""Test _GetSignatureFromSequence()."""
junk_seq = ['a'] * 1024
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateParseError,
self.x._GetSignatureFromSequence, junk_seq)
self.mox.VerifyAll()
def testGetCertSequencesFromTopSequence(self):
"""Test GetCertSequencesFromTopSequence()."""
seq = ((0, 1, 2),)
self.mox.StubOutWithMock(self.x, '_GetFieldsFromSequence')
self.mox.StubOutWithMock(self.x, '_GetSignatureAlgorithmFromSequence')
self.mox.StubOutWithMock(self.x, '_GetSignatureFromSequence')
self.x._GetFieldsFromSequence(seq[0][0]).AndReturn({'a':1})
self.x._GetSignatureAlgorithmFromSequence(seq[0][1]).AndReturn({'b':1})
self.x._GetSignatureFromSequence(seq[0][2]).AndReturn({'c':1})
self.mox.ReplayAll()
o = self.x._GetCertSequencesFromTopSequence(seq)
self.assertEqual(o, {'a':1, 'b':1, 'c':1})
self.mox.VerifyAll()
def testGetCertSequencesFromTopSequenceWhenBadTuple(self):
"""Test _GetCertSequencesFromTopSequence()."""
seq = ()
self.assertRaises(
x509.CertificateParseError,
self.x._GetCertSequencesFromTopSequence,
seq)
seq = 'not a tuple'
self.assertRaises(
x509.CertificateParseError,
self.x._GetCertSequencesFromTopSequence,
seq)
def testGetPublicKeyFromByteString(self):
"""Test _GetPublicKeyFromByteString()."""
bytes = 'bytes'
publickey = 'publickey'
self.mox.StubOutClassWithMocks(x509.tlslite_bridge, 'X509')
mock_tls509 = x509.tlslite_bridge.X509()
mock_tls509.parseBinary(bytes).AndReturn(None)
mock_tls509.publicKey = publickey
self.mox.ReplayAll()
self.assertEqual(
{'public_key': publickey},
self.x._GetPublicKeyFromByteString(bytes))
self.mox.VerifyAll()
def testLoadFromByteString(self):
"""Test LoadFromByteString()."""
self.x.Reset()
base_cert = self.x._cert
self.mox.StubOutWithMock(x509.der_decoder, 'decode', True)
self.mox.StubOutWithMock(self.x, '_GetCertSequencesFromTopSequence')
self.mox.StubOutWithMock(self.x, '_GetPublicKeyFromByteString')
self.mox.StubOutWithMock(self.x, 'Reset')
bytes = 'bytes'
seq = 'seq'
certseq = {'certseq': 1}
pubkey = {'pubkey': 1}
cert = { 'entire_byte_string': bytes }
cert.update(base_cert)
cert.update(certseq)
cert.update(pubkey)
x509.der_decoder.decode(bytes).AndReturn(seq)
self.x._GetCertSequencesFromTopSequence(seq).AndReturn(certseq)
self.x._GetPublicKeyFromByteString(bytes).AndReturn(pubkey)
self.x.Reset().AndReturn(None)
self.mox.ReplayAll()
self.x.LoadFromByteString(bytes)
self.assertEqual(self.x._cert, cert)
self.mox.VerifyAll()
def testLoadFromByteStringWhenPyAsn1Error(self):
"""Test LoadFromByteString()."""
self.mox.StubOutWithMock(x509.der_decoder, 'decode', True)
bytes = 'bytes'
x509.der_decoder.decode(bytes).AndRaise(x509.pyasn1.error.PyAsn1Error)
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateASN1FormatError,
self.x.LoadFromByteString, bytes)
self.mox.VerifyAll()
def testCheckValidityWhenObtainUtc(self):
"""Test CheckValidity()."""
mock_datetime = self.mox.CreateMock(x509.datetime.datetime)
self.stubs.Set(x509.datetime, 'datetime', mock_datetime)
mock_datetime.utcnow().AndReturn(2)
self.x._cert['valid_notafter'] = 5
self.x._cert['valid_notbefore'] = 0
self.mox.ReplayAll()
self.x.CheckValidity()
self.mox.VerifyAll()
def testCheckValidityWhenTooNew(self):
"""Test CheckValidity()."""
self.x._cert['valid_notafter'] = 1
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateError,
self.x.CheckValidity,
2)
self.mox.VerifyAll()
def testCheckValidityWhenTooOld(self):
"""Test CheckValidity()."""
self.x._cert['valid_notafter'] = 10
self.x._cert['valid_notbefore'] = 5
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateError,
self.x.CheckValidity,
2)
self.mox.VerifyAll()
def testCheckIssuerWhenNoIssuerSupplied(self):
"""Test CheckIssuer()."""
self.x._required_issuer = 'required'
self.x._cert['issuer'] = 'required'
self.mox.ReplayAll()
self.x.CheckIssuer()
self.mox.VerifyAll()
def testCheckIssuerWhenFailed(self):
"""Test CheckIssuer()."""
self.x._required_issuer = None
self.x._cert['issuer'] = 'required'
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateValueError,
self.x.CheckIssuer, 'some other issuer')
self.mox.VerifyAll()
def testCheckIssuerWhenNoRequirement(self):
"""Test CheckIssuer()."""
self.x._required_issuer = None
self.x._cert['issuer'] = 'no one cares'
self.mox.ReplayAll()
self.x.CheckIssuer()
self.mox.VerifyAll()
def testCheckAll(self):
"""Test CheckAll()."""
self.mox.StubOutWithMock(self.x, 'CheckValidity')
self.mox.StubOutWithMock(self.x, 'CheckIssuer')
self.x.CheckValidity().AndReturn(None)
self.x.CheckIssuer().AndReturn(None)
self.mox.ReplayAll()
self.x.CheckAll()
self.mox.VerifyAll()
def testSetRequiredIssuer(self):
"""Test SetRequiredIssuer()."""
self.x.SetRequiredIssuer('required')
self.assertEqual(self.x._required_issuer, 'required')
def testIsSignedBy(self):
"""Test IsSignedBy()."""
self.mox.StubOutWithMock(self.x, '_StrToArray')
self.mox.StubOutWithMock(self.x, 'GetSignatureData')
self.mox.StubOutWithMock(self.x, 'GetFieldsData')
mock_othercert = self.mox.CreateMockAnything()
mock_othercert.GetMayActAsCA().AndReturn(True)
mock_othercert.GetPublicKey().AndReturn(mock_othercert) # lazy re-use
self.x.GetSignatureData().AndReturn('sigdata')
self.x.GetFieldsData().AndReturn('fieldsdata')
self.x._StrToArray('sigdata').AndReturn('arysigdata')
self.x._StrToArray('fieldsdata').AndReturn('aryfieldsdata')
mock_othercert.hashAndVerify('arysigdata', 'aryfieldsdata').AndReturn(True)
self.mox.ReplayAll()
self.assertTrue(self.x.IsSignedBy(mock_othercert))
self.mox.VerifyAll()
def testIsSignedByWhenOtherCertNotCA(self):
"""Test IsSignedBy()."""
mock_othercert = self.mox.CreateMockAnything()
mock_othercert.GetMayActAsCA().AndReturn(False)
self.mox.ReplayAll()
self.assertRaises(
x509.CertificateValueError,
self.x.IsSignedBy, mock_othercert)
self.mox.VerifyAll()
def main(unused_argv):
basetest.main()
if __name__ == '__main__':
app.run()
|
./data/nitishsrivastava/deepnet/cudamat/gpu_lock2.py | #!/usr/bin/python
"""
A simple discretionary locking system for /dev/nvidia devices.
Iain Murray, November 2009, January 2010.
-- Additions -- Charlie Tang, Jan, 2011:
added display of GPU usages
-- Charlie Tang, July, 2011:
improved statistics displaying
"""
import os
import os.path
from xml.dom import Node
from xml.dom.minidom import parseString
from subprocess import Popen, PIPE, STDOUT
_dev_prefix = '/dev/nvidia'
# Get ID's of NVIDIA boards. Should do this through a CUDA call, but this is
# a quick and dirty way that works for now:
def board_ids():
"""Returns integer board ids available on this machine."""
#from glob import glob
#board_devs = glob(_dev_prefix + '[0-9]*')
#return range(len(board_devs))
p = Popen(['/u/tang/bin/get_num_gpu_boards'], stdout=PIPE)
nBoards = int(p.stdout.read())
return range(nBoards)
def _lock_file(id):
"""lock file from integer id"""
# /tmp is cleared on reboot on many systems, but it doesn't have to be
if os.path.exists('/dev/shm'):
# /dev/shm on linux machines is a RAM disk, so is definitely cleared
return '/dev/shm/gpu_lock_%d' % id
else:
return '/tmp/gpu_lock_%d' % id
def owner_of_lock(id):
"""Username that has locked the device id. (Empty string if no lock)."""
import pwd
try:
statinfo = os.lstat(_lock_file(id))
return pwd.getpwuid(statinfo.st_uid).pw_name
except:
return ""
def _obtain_lock(id):
"""Attempts to lock id, returning success as True/False."""
try:
# On POSIX systems symlink creation is atomic, so this should be a
# robust locking operation:
os.symlink('/dev/null', _lock_file(id))
return True
except:
return False
def _launch_reaper(id, pid):
"""Start a process that will free a lock when process pid terminates"""
from subprocess import Popen, PIPE
me = __file__
if me.endswith('.pyc'):
me = me[:-1]
myloc = os.path.dirname(me)
if not myloc:
myloc = os.getcwd()
reaper_cmd = os.path.join(myloc, 'run_on_me_or_pid_quit')
Popen([reaper_cmd, str(pid), me, '--free', str(id)],
stdout=open('/dev/null', 'w'))
def obtain_lock_id(pid=None):
"""
Finds a free id, locks it and returns integer id, or -1 if none free.
A process is spawned that will free the lock automatically when the
process pid (by default the current python process) terminates.
"""
id = -1
id = obtain_lock_id_to_hog()
try:
if id >= 0:
if pid is None:
pid = os.getpid()
_launch_reaper(id, pid)
except:
free_lock(id)
id = -1
return id
def obtain_lock_id_to_hog():
"""
Finds a free id, locks it and returns integer id, or -1 if none free.
* Lock must be freed manually *
"""
for id in board_ids():
if _obtain_lock(id):
return id
return -1
def free_lock(id):
"""Attempts to free lock id, returning success as True/False."""
try:
filename = _lock_file(id)
# On POSIX systems os.rename is an atomic operation, so this is the safe
# way to delete a lock:
os.rename(filename, filename + '.redundant')
os.remove(filename + '.redundant')
return True
except:
return False
def nvidia_gpu_stats():
p = Popen(['nvidia-smi', '-x', '-a'], stdout=PIPE)
output = p.stdout.read().lstrip()
try:
doc = parseString(output)
gpucounter = 0
templist = []
memlist = []
uselist = []
fanlist = []
doc2 = doc.getElementsByTagName("nvidia_smi_log")[0]
gpulist = doc2.getElementsByTagName("gpu")
for gpu in gpulist:
temp = gpu.getElementsByTagName('temperature')[0]
temp2 = temp.getElementsByTagName('gpu_temp')[0]
templist.append(str(temp2.firstChild.toxml()))
mem = gpu.getElementsByTagName('memory_usage')[0]
memtot = mem.getElementsByTagName('total')[0]
memused = mem.getElementsByTagName('used')[0]
memfree = mem.getElementsByTagName('free')[0]
memtot_str = str(memtot.firstChild.toxml())
memused_str = str(memused.firstChild.toxml())
memfree_str = str(memfree.firstChild.toxml())
memtot_float = float(memtot_str[:-3])
memused_float = float(memused_str[:-3])
memfree_float = float(memfree_str[:-3])
memlist.append('%03.f' % memused_float + '+%03.f' % memfree_float + '=%03.f' % memtot_float + 'Mb')
use = gpu.getElementsByTagName('gpu_util')[0]
uselist.append(str(use.firstChild.toxml()))
fan = gpu.getElementsByTagName('fan_speed')[0]
fanlist.append(str(fan.firstChild.toxml()))
gpucounter += 1
return [uselist, memlist, fanlist, templist]
except:
return [ [-9999] * len(board_ids()) ] *4
# If run as a program:
if __name__ == "__main__":
div = ' ' + "-" * 90
import sys
me = sys.argv[0]
# Report
if '--id' in sys.argv:
if len(sys.argv) > 2:
try:
pid = int(sys.argv[2])
assert(os.path.exists('/proc/%d' % pid))
except:
print 'Usage: %s --id [pid_to_wait_on]' % me
print 'The optional process id must exist if specified.'
print 'Otherwise the id of the parent process is used.'
sys.exit(1)
else:
pid = os.getppid()
print obtain_lock_id(pid)
elif '--ids' in sys.argv:
try:
id = int(sys.argv[2])
except:
print 'Usage: %s --ids [specific gpu id]' % me
sys.exit(1)
if _obtain_lock(id):
print id
else:
print - 1
elif '--id-to-hog' in sys.argv:
print obtain_lock_id_to_hog()
elif '--free' in sys.argv:
try:
id = int(sys.argv[2])
except:
print 'Usage: %s --free <id>' % me
sys.exit(1)
if free_lock(id):
print "Lock freed"
else:
owner = owner_of_lock(id)
if owner:
print "Failed to free lock id=%d owned by %s" % (id, owner)
else:
print "Failed to free lock, but it wasn't actually set?"
elif '--noverbose' in sys.argv:
stats = nvidia_gpu_stats()
print div
print "%s board users:" % 'abc'
print div
for id in board_ids():
print " Board %d {Use:%s; Mem:%s; Temp:%s}: %s" % (id, stats[0][id], stats[1][id], stats[2][id], owner_of_lock(id))
print div + '\n'
else:
stats = nvidia_gpu_stats()
print div
print ' Usage instructions:\n'
print ' To obtain and lock an id: %s --id' % me
print ' The lock is automatically freed when the parent terminates'
print
print " To get an id that won't be freed: %s --id-to-hog <id>" % me
print " To get a specific id: %s --ids <id>" % me
print
print " You *must* manually free these ids: %s --free <id>\n" % me
print ' More info: http://www.cs.toronto.edu/~murray/code/gpu_monitoring/'
print ' Report any problems to: tang@cs.toronto.edu'
print '\n' + div
print " NVIDIA board users:"
print div
for id in board_ids():
print " Board %d {Use:%s; Mem(used+free=total): %s; Fan:%s; Temp:%s}: %s" % (id, stats[0][id], stats[1][id], stats[2][id], stats[3][id], owner_of_lock(id))
print div + '\n'
|
./data/daviddrysdale/python-phonenumbers/python/phonenumbers/data/region_TD.py | """Auto-generated file, do not edit by hand. TD metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_TD = PhoneMetadata(id='TD', country_code=235, international_prefix='00|16',
general_desc=PhoneNumberDesc(national_number_pattern='[2679]\\d{7}', possible_number_pattern='\\d{8}'),
fixed_line=PhoneNumberDesc(national_number_pattern='22(?:[3789]0|5[0-5]|6[89])\\d{4}', possible_number_pattern='\\d{8}', example_number='22501234'),
mobile=PhoneNumberDesc(national_number_pattern='(?:6[023568]\\d|77\\d|9\\d{2})\\d{5}', possible_number_pattern='\\d{8}', example_number='63012345'),
toll_free=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
premium_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
preferred_international_prefix='00',
number_format=[NumberFormat(pattern='(\\d{2})(\\d{2})(\\d{2})(\\d{2})', format='\\1 \\2 \\3 \\4')])
|
./data/operasoftware/sentrycli/sentrycli/__init__.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
__version__ = '1.4.1'
|
./data/saltstack/salt/tests/unit/states/lxc_test.py | # -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Jayesh Kariya <jayeshk@saltstack.com>`
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from salttesting import skipIf, TestCase
from salttesting.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import Salt Libs
from salt.states import lxc
import salt.utils
lxc.__salt__ = {}
lxc.__opts__ = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class LxcTestCase(TestCase):
'''
Test cases for salt.states.lxc
'''
# 'present' function tests: 1
def test_present(self):
'''
Test to verify the named container if it exist.
'''
name = 'web01'
ret = {'name': name,
'result': False,
'comment': '',
'changes': {}}
mock = MagicMock(side_effect=[False, True, True, True, True, True,
True])
mock_t = MagicMock(side_effect=[None, True, 'frozen', 'frozen',
'stopped', 'running', 'running'])
with patch.dict(lxc.__salt__, {'lxc.exists': mock,
'lxc.state': mock_t}):
comt = ("Clone source 'True' does not exist")
ret.update({'comment': comt})
self.assertDictEqual(lxc.present(name, clone_from=True), ret)
with patch.dict(lxc.__opts__, {'test': True}):
comt = ("Container 'web01' will be cloned from True")
ret.update({'comment': comt, 'result': None})
self.assertDictEqual(lxc.present(name, clone_from=True), ret)
comt = ("Container 'web01' already exists")
ret.update({'comment': comt, 'result': True})
self.assertDictEqual(lxc.present(name, clone_from=True), ret)
comt = ("Container 'web01' would be unfrozen")
ret.update({'comment': comt, 'result': None})
self.assertDictEqual(lxc.present(name, running=True,
clone_from=True), ret)
comt = ('Container \'{0}\' would be stopped'.format(name))
ret.update({'comment': comt, 'result': None})
self.assertDictEqual(lxc.present(name, running=False,
clone_from=True), ret)
comt = ("Container 'web01' already exists and is stopped")
ret.update({'comment': comt, 'result': True})
self.assertDictEqual(lxc.present(name, running=False,
clone_from=True), ret)
with patch.dict(lxc.__opts__, {'test': False}):
comt = ("Container 'web01' already exists")
ret.update({'comment': comt, 'result': True})
self.assertDictEqual(lxc.present(name, clone_from=True), ret)
# 'absent' function tests: 1
def test_absent(self):
'''
Test to ensure a container is not present, destroying it if present.
'''
name = 'web01'
ret = {'name': name,
'result': True,
'comment': '',
'changes': {}}
mock = MagicMock(side_effect=[False, True, True])
mock_des = MagicMock(return_value={'state': True})
with patch.dict(lxc.__salt__, {'lxc.exists': mock,
'lxc.destroy': mock_des}):
comt = ('Container \'{0}\' does not exist'.format(name))
ret.update({'comment': comt})
self.assertDictEqual(lxc.absent(name), ret)
with patch.dict(lxc.__opts__, {'test': True}):
comt = ('Container \'{0}\' would be destroyed'.format(name))
ret.update({'comment': comt, 'result': None})
self.assertDictEqual(lxc.absent(name), ret)
with patch.dict(lxc.__opts__, {'test': False}):
comt = ('Container \'{0}\' was destroyed'.format(name))
ret.update({'comment': comt, 'result': True,
'changes': {'state': True}})
self.assertDictEqual(lxc.absent(name), ret)
# 'running' function tests: 1
def test_running(self):
'''
Test to ensure that a container is running.
'''
name = 'web01'
ret = {'name': name,
'result': False,
'comment': '',
'changes': {}}
mock = MagicMock(return_value={'state': {'new': 'stop'}})
mock_t = MagicMock(side_effect=[None, 'running', 'stopped', 'start'])
with patch.dict(lxc.__salt__, {'lxc.exists': mock,
'lxc.state': mock_t,
'lxc.start': mock}):
comt = ('Container \'{0}\' does not exist'.format(name))
ret.update({'comment': comt})
self.assertDictEqual(lxc.running(name), ret)
comt = ("Container 'web01' is already running")
ret.update({'comment': comt, 'result': True})
self.assertDictEqual(lxc.running(name), ret)
with patch.dict(lxc.__opts__, {'test': True}):
comt = ("Container 'web01' would be started")
ret.update({'comment': comt, 'result': None})
self.assertDictEqual(lxc.running(name), ret)
with patch.dict(lxc.__opts__, {'test': False}):
comt = ("Unable to start container 'web01'")
ret.update({'comment': comt, 'result': False, 'changes':
{'state': {'new': 'stop', 'old': 'start'}}})
self.assertDictEqual(lxc.running(name), ret)
# 'frozen' function tests: 1
def test_frozen(self):
'''
Test to ensure that a container is frozen.
'''
name = 'web01'
ret = {'name': name,
'result': True,
'comment': '',
'changes': {}}
mock = MagicMock(return_value={'state': {'new': 'stop'}})
mock_t = MagicMock(side_effect=['frozen', 'stopped', 'stopped'])
with patch.dict(lxc.__salt__, {'lxc.freeze': mock,
'lxc.state': mock_t}):
comt = ('Container \'{0}\' is already frozen'.format(name))
ret.update({'comment': comt})
self.assertDictEqual(lxc.frozen(name), ret)
with patch.dict(lxc.__opts__, {'test': True}):
comt = ("Container 'web01' would be started and frozen")
ret.update({'comment': comt, 'result': None})
self.assertDictEqual(lxc.frozen(name), ret)
with patch.dict(lxc.__opts__, {'test': False}):
comt = ("Unable to start and freeze container 'web01'")
ret.update({'comment': comt, 'result': False, 'changes':
{'state': {'new': 'stop', 'old': 'stopped'}}})
self.assertDictEqual(lxc.frozen(name), ret)
# 'stopped' function tests: 1
def test_stopped(self):
'''
Test to ensure that a container is stopped.
'''
name = 'web01'
ret = {'name': name,
'result': False,
'comment': '',
'changes': {}}
mock = MagicMock(return_value={'state': {'new': 'stop'}})
mock_t = MagicMock(side_effect=[None, 'stopped', 'frozen', 'frozen'])
with patch.dict(lxc.__salt__, {'lxc.stop': mock,
'lxc.state': mock_t}):
comt = ('Container \'{0}\' does not exist'.format(name))
ret.update({'comment': comt})
self.assertDictEqual(lxc.stopped(name), ret)
comt = ('Container \'{0}\' is already stopped'.format(name))
ret.update({'comment': comt, 'result': True})
self.assertDictEqual(lxc.stopped(name), ret)
with patch.dict(lxc.__opts__, {'test': True}):
comt = ("Container 'web01' would be stopped")
ret.update({'comment': comt, 'result': None})
self.assertDictEqual(lxc.stopped(name), ret)
with patch.dict(lxc.__opts__, {'test': False}):
comt = ("Unable to stop container 'web01'")
ret.update({'comment': comt, 'result': False, 'changes':
{'state': {'new': 'stop', 'old': 'frozen'}}})
self.assertDictEqual(lxc.stopped(name), ret)
# 'created' function tests: 1
def test_created(self):
'''
Test to execute create func.
'''
name = 'web01'
ret = {'name': name,
'result': False,
'comment': '',
'changes': {}}
mock = MagicMock(return_value=False)
with patch.dict(lxc.__salt__, {'lxc.exists': mock}):
with patch.object(salt.utils, 'warn_until', MagicMock()):
comt = ("Clone source 'True' does not exist")
ret.update({'comment': comt})
self.assertDictEqual(lxc.created(name, clone_from=True), ret)
# 'started' function tests: 1
def test_started(self):
'''
Test to execute started func.
'''
name = 'web01'
ret = {'name': name,
'result': False,
'comment': '',
'changes': {}}
mock = MagicMock(return_value=None)
with patch.dict(lxc.__salt__, {'lxc.state': mock}):
with patch.object(salt.utils, 'warn_until', MagicMock()):
comt = ("Container 'web01' does not exist")
ret.update({'comment': comt})
self.assertDictEqual(lxc.started(name), ret)
# 'cloned' function tests: 1
def test_cloned(self):
'''
Test to execute cloned func.
'''
name = 'web01'
ret = {'name': name,
'result': False,
'comment': '',
'changes': {}}
mock = MagicMock(return_value=False)
with patch.dict(lxc.__salt__, {'lxc.exists': mock}):
with patch.object(salt.utils, 'warn_until', MagicMock()):
comt = ("Clone source 'True' does not exist")
ret.update({'comment': comt})
self.assertDictEqual(lxc.cloned(name, True), ret)
# 'set_pass' function tests: 1
def test_set_pass(self):
'''
Test to execute set_pass func.
'''
comment = ('The lxc.set_pass state is no longer supported. Please see '
'the LXC states documentation for further information.')
ret = {'name': 'web01',
'comment': comment,
'result': False,
'changes': {}}
self.assertDictEqual(lxc.set_pass('web01'), ret)
# 'edited_conf' function tests: 1
def test_edited_conf(self):
'''
Test to edit LXC configuration options
'''
name = 'web01'
comment = ('{0} lxc.conf will be edited'.format(name))
ret = {'name': name,
'result': True,
'comment': comment,
'changes': {}}
with patch.object(salt.utils, 'warn_until', MagicMock()):
with patch.dict(lxc.__opts__, {'test': True}):
self.assertDictEqual(lxc.edited_conf(name), ret)
with patch.dict(lxc.__opts__, {'test': False}):
mock = MagicMock(return_value={})
with patch.dict(lxc.__salt__, {'lxc.update_lxc_conf': mock}):
self.assertDictEqual(lxc.edited_conf(name),
{'name': 'web01'})
if __name__ == '__main__':
from integration import run_tests
run_tests(LxcTestCase, needs_daemon=False)
|
./data/dukestats/gpustats/old/build_cython.py | #/usr/bin/env python
from distutils.extension import Extension
from numpy.distutils.core import setup
from Cython.Distutils import build_ext
import numpy
def get_cuda_include():
return '/usr/local/cuda/include'
pyx_ext = Extension('testmod', ['cytest.pyx'],
include_dirs=[numpy.get_include(),
get_cuda_include()],
library_dirs=['.'],
libraries=['gpustats'])
setup(name='testmod', description='',
ext_modules=[pyx_ext],
cmdclass = {
'build_ext' : build_ext
})
|
./data/kmike/opencorpora-tools/opencorpora/compat.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import sys
import functools
import itertools
PY3 = sys.version_info[0] == 3
if PY3:
imap = map
string_types = str,
text_type = str
binary_type = bytes
integer_types = int,
else:
imap = itertools.imap
string_types = basestring,
text_type = unicode
binary_type = str
integer_types = (int, long)
try:
from xml.etree import cElementTree as ElementTree
except ImportError:
from xml.etree import ElementTree
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
try:
import cPickle as pickle
except ImportError:
import pickle
def utf8_for_PY2(func):
if PY3:
return func
@functools.wraps(func)
def inner(*args, **kwargs):
return func(*args, **kwargs).encode('utf8')
return inner |
./data/osrf/rosbook/code/basics/src/fancy_action_server.py | #! /usr/bin/env python
# BEGIN ALL
#! /usr/bin/env python
import rospy
import time
import actionlib
# BEGIN PART_1
from basics.msg import TimerAction, TimerGoal, TimerResult, TimerFeedback
# END PART_1
def do_timer(goal):
start_time = time.time()
# BEGIN PART_2
update_count = 0
# END PART_2
# BEGIN PART_3
if goal.time_to_wait.to_sec() > 60.0:
result = TimerResult()
result.time_elapsed = rospy.Duration.from_sec(time.time() - start_time)
result.updates_sent = update_count
server.set_aborted(result, "Timer aborted due to too-long wait")
return
# END PART_3
# BEGIN PART_4
while (time.time() - start_time) < goal.time_to_wait.to_sec():
# END PART_4
# BEGIN PART_5
if server.is_preempt_requested():
result = TimerResult()
result.time_elapsed = rospy.Duration.from_sec(time.time() - start_time)
result.updates_sent = update_count
server.set_preempted(result, "Timer preempted")
return
# END PART_5
# BEGIN PART_6
feedback = TimerFeedback()
feedback.time_elapsed = rospy.Duration.from_sec(time.time() - start_time)
feedback.time_remaining = goal.time_to_wait - feedback.time_elapsed
server.publish_feedback(feedback)
update_count += 1
# END PART_6
# BEGIN PART_7
time.sleep(1.0)
# END PART_7
# BEGIN PART_8
result = TimerResult()
result.time_elapsed = rospy.Duration.from_sec(time.time() - start_time)
result.updates_sent = update_count
server.set_succeeded(result, "Timer completed successfully")
# END PART_8
rospy.init_node('timer_action_server')
server = actionlib.SimpleActionServer('timer', TimerAction, do_timer, False)
server.start()
rospy.spin()
# END ALL
|
./data/ella/ella/ella/core/migrations/0004_add_field_category_app_data_add_field_publishable_app_data.py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Category.app_data'
db.add_column('core_category', 'app_data', self.gf('app_data.fields.AppDataField')(default='{}'), keep_default=True)
if not db.dry_run:
orm['core.Category'].objects.filter(app_data='').update(app_data='{}')
# Adding field 'Publishable.app_data'
db.add_column('core_publishable', 'app_data', self.gf('app_data.fields.AppDataField')(default='{}'), keep_default=True)
if not db.dry_run:
orm['core.Publishable'].objects.filter(app_data='').update(app_data='{}')
def backwards(self, orm):
# Deleting field 'Category.app_data'
db.delete_column('core_category', 'app_data')
# Deleting field 'Publishable.app_data'
db.delete_column('core_publishable', 'app_data')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.author': {
'Meta': {'object_name': 'Author'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'core.category': {
'Meta': {'unique_together': "(('site', 'tree_path'),)", 'object_name': 'Category'},
'app_data': ('app_data.fields.AppDataField', [], {'default': "'{}'", 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'tree_parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Category']", 'null': 'True', 'blank': 'True'}),
'tree_path': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'core.dependency': {
'Meta': {'object_name': 'Dependency'},
'dependent_ct': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'depends_on_set'", 'to': "orm['contenttypes.ContentType']"}),
'dependent_id': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'target_ct': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependency_for_set'", 'to': "orm['contenttypes.ContentType']"}),
'target_id': ('django.db.models.fields.IntegerField', [], {})
},
'core.listing': {
'Meta': {'object_name': 'Listing'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Category']"}),
'commercial': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'publish_from': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'publish_to': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'publishable': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Publishable']"})
},
'core.publishable': {
'Meta': {'object_name': 'Publishable'},
'app_data': ('app_data.fields.AppDataField', [], {'default': "'{}'", 'blank': 'True'}),
'authors': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.Author']", 'symmetrical': 'False'}),
'category': ('ella.core.cache.fields.CachedForeignKey', [], {'to': "orm['core.Category']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'photo': ('ella.core.cache.fields.CachedForeignKey', [], {'to': "orm['photos.Photo']", 'null': 'True', 'blank': 'True'}),
'publish_from': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(3000, 1, 1, 0, 0, 0, 2)', 'db_index': 'True'}),
'publish_to': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'db_index': 'True'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Source']", 'null': 'True', 'blank': 'True'}),
'static': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'core.related': {
'Meta': {'object_name': 'Related'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'publishable': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Publishable']"}),
'related_ct': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'related_id': ('django.db.models.fields.IntegerField', [], {})
},
'core.source': {
'Meta': {'object_name': 'Source'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'photos.photo': {
'Meta': {'object_name': 'Photo'},
'authors': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'photo_set'", 'symmetrical': 'False', 'to': "orm['core.Author']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'height': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'important_bottom': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'important_left': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'important_right': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'important_top': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'db_index': 'True'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Source']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'width': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['core']
|
./data/openstack/sahara/sahara/plugins/vanilla/confighints_helper.py | # Copyright (c) 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.service.edp.oozie.workflow_creator import workflow_factory
from sahara.utils import xmlutils
def get_possible_hive_config_from(file_name):
'''Return the possible configs, args, params for a Hive job.'''
config = {
'configs': xmlutils.load_hadoop_xml_defaults(file_name),
'params': {}
}
return config
def get_possible_mapreduce_config_from(file_name):
'''Return the possible configs, args, params for a MapReduce job.'''
config = {
'configs': get_possible_pig_config_from(file_name).get('configs')
}
config['configs'] += workflow_factory.get_possible_mapreduce_configs()
return config
def get_possible_pig_config_from(file_name):
'''Return the possible configs, args, params for a Pig job.'''
config = {
'configs': xmlutils.load_hadoop_xml_defaults(file_name),
'args': [],
'params': {}
}
return config
|
./data/openstack/oslo.log/doc/source/examples/usage_context.py | # Copyright (c) 2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""A usage example of Oslo Logging with context
This example requires the following package to be installed.
$ pip install oslo.log
Additional Oslo packages installed include oslo.config, oslo.context,
oslo.i18n, osli.serialization and oslo.utils.
More information about Oslo Logging can be found at:
http://docs.openstack.org/developer/oslo.log/usage.html
http://docs.openstack.org/developer/oslo.context/usage.html
"""
from oslo_config import cfg
from oslo_context import context
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
DOMAIN = 'demo'
def prepare():
"""Prepare Oslo Logging (2 or 3 steps)
Use of Oslo Logging involves the following:
* logging.register_options
* logging.set_defaults (optional)
* logging.setup
"""
# Required step to register common, logging and generic configuration
# variables
logging.register_options(CONF)
# Optional step to set new defaults if necessary for
# * logging_context_format_string
# * default_log_levels
#
# These variables default to respectively:
#
# import oslo_log
# oslo_log._options.DEFAULT_LOG_LEVELS
# oslo_log._options.log_opts[0].default
#
extra_log_level_defaults = [
'dogpile=INFO',
'routes=INFO'
]
logging.set_defaults(
default_log_levels=logging.get_default_log_levels() +
extra_log_level_defaults)
# Required setup based on configuration and domain
logging.setup(CONF, DOMAIN)
if __name__ == '__main__':
prepare()
LOG.info("Welcome to Oslo Logging")
LOG.info("Without context")
context.RequestContext(user='6ce90b4d',
tenant='d6134462',
domain='a6b9360e')
LOG.info("With context")
|
./data/hazelcast/hazelcast-python-client/hazelcast/protocol/codec/executor_service_cancel_on_partition_codec.py | from hazelcast.serialization.bits import *
from hazelcast.protocol.client_message import ClientMessage
from hazelcast.protocol.custom_codec import *
from hazelcast.util import ImmutableLazyDataList
from hazelcast.protocol.codec.executor_service_message_type import *
REQUEST_TYPE = EXECUTORSERVICE_CANCELONPARTITION
RESPONSE_TYPE = 101
RETRYABLE = False
def calculate_size(uuid, partition_id, interrupt):
""" Calculates the request payload size"""
data_size = 0
data_size += calculate_size_str(uuid)
data_size += INT_SIZE_IN_BYTES
data_size += BOOLEAN_SIZE_IN_BYTES
return data_size
def encode_request(uuid, partition_id, interrupt):
""" Encode request into client_message"""
client_message = ClientMessage(payload_size=calculate_size(uuid, partition_id, interrupt))
client_message.set_message_type(REQUEST_TYPE)
client_message.set_retryable(RETRYABLE)
client_message.append_str(uuid)
client_message.append_int(partition_id)
client_message.append_bool(interrupt)
client_message.update_frame_length()
return client_message
def decode_response(client_message, to_object=None):
""" Decode response from client message"""
parameters = dict(response=None)
parameters['response'] = client_message.read_bool()
return parameters
|
End of preview. Expand
in Data Studio
Dataset Card for "Py150-processed"
Dataset Creation
The original dataset is at https://www.sri.inf.ethz.ch/py150.
Citation Information
@article{raychev2016probabilistic, title={Probabilistic model for code with decision trees}, author={Raychev, Veselin and Bielik, Pavol and Vechev, Martin}, journal={ACM SIGPLAN Notices}, volume={51}, number={10}, pages={731--747}, year={2016}, publisher={ACM New York, NY, USA} }
- Downloads last month
- 59