id
stringlengths 23
25
| content
stringlengths 1.16k
88k
| max_stars_repo_path
stringlengths 12
48
|
|---|---|---|
codereval_python_data_201
|
Helper method to send a document via POST.
Additional ``*args`` and ``**kwargs`` will be passed on to ``requests.post``.
:arg url: Full url to send to, including protocol
:arg data: Dictionary (will be form-encoded), bytes, or file-like object to send in the body
:arg timeout: Seconds to wait for response (defaults to 10)
:arg method: Method to use, defaults to post
:returns: Tuple of status code (int or None) and error (exception class instance or None)
def send_document(url, data, timeout=10, method="post", *args, **kwargs):
"""Helper method to send a document via POST.
Additional ``*args`` and ``**kwargs`` will be passed on to ``requests.post``.
:arg url: Full url to send to, including protocol
:arg data: Dictionary (will be form-encoded), bytes, or file-like object to send in the body
:arg timeout: Seconds to wait for response (defaults to 10)
:arg method: Method to use, defaults to post
:returns: Tuple of status code (int or None) and error (exception class instance or None)
"""
logger.debug("send_document: url=%s, data=%s, timeout=%s, method=%s", url, data, timeout, method)
if not method:
method = "post"
headers = CaseInsensitiveDict({
'User-Agent': USER_AGENT,
})
if "headers" in kwargs:
# Update from kwargs
headers.update(kwargs.get("headers"))
kwargs.update({
"data": data, "timeout": timeout, "headers": headers
})
request_func = getattr(requests, method)
try:
response = request_func(url, *args, **kwargs)
logger.debug("send_document: response status code %s", response.status_code)
return response.status_code, None
# TODO support rate limit 429 code
except RequestException as ex:
logger.debug("send_document: exception %s", ex)
return None, ex
import calendar
import datetime
import logging
import re
import socket
from typing import Optional, Dict
from urllib.parse import quote
from uuid import uuid4
import requests
from requests.exceptions import RequestException, HTTPError, SSLError
from requests.exceptions import ConnectionError
from requests.structures import CaseInsensitiveDict
from federation import __version__
logger = logging.getLogger("federation")
USER_AGENT = "python/federation/%s" % __version__
def fetch_content_type(url: str) -> Optional[str]:
"""
Fetch the HEAD of the remote url to determine the content type.
"""
try:
response = requests.head(url, headers={'user-agent': USER_AGENT}, timeout=10)
except RequestException as ex:
logger.warning("fetch_content_type - %s when fetching url %s", ex, url)
else:
return response.headers.get('Content-Type')
def fetch_document(url=None, host=None, path="/", timeout=10, raise_ssl_errors=True, extra_headers=None, **kwargs):
"""Helper method to fetch remote document.
Must be given either the ``url`` or ``host``.
If ``url`` is given, only that will be tried without falling back to http from https.
If ``host`` given, `path` will be added to it. Will fall back to http on non-success status code.
:arg url: Full url to fetch, including protocol
:arg host: Domain part only without path or protocol
:arg path: Path without domain (defaults to "/")
:arg timeout: Seconds to wait for response (defaults to 10)
:arg raise_ssl_errors: Pass False if you want to try HTTP even for sites with SSL errors (default True)
:arg extra_headers: Optional extra headers dictionary to add to requests
:arg kwargs holds extra args passed to requests.get
:returns: Tuple of document (str or None), status code (int or None) and error (an exception class instance or None)
:raises ValueError: If neither url nor host are given as parameters
"""
if not url and not host:
raise ValueError("Need url or host.")
logger.debug("fetch_document: url=%s, host=%s, path=%s, timeout=%s, raise_ssl_errors=%s",
url, host, path, timeout, raise_ssl_errors)
headers = {'user-agent': USER_AGENT}
if extra_headers:
headers.update(extra_headers)
if url:
# Use url since it was given
logger.debug("fetch_document: trying %s", url)
try:
response = requests.get(url, timeout=timeout, headers=headers, **kwargs)
logger.debug("fetch_document: found document, code %s", response.status_code)
response.raise_for_status()
return response.text, response.status_code, None
except RequestException as ex:
logger.debug("fetch_document: exception %s", ex)
return None, None, ex
# Build url with some little sanitizing
host_string = host.replace("http://", "").replace("https://", "").strip("/")
path_string = path if path.startswith("/") else "/%s" % path
url = "https://%s%s" % (host_string, path_string)
logger.debug("fetch_document: trying %s", url)
try:
response = requests.get(url, timeout=timeout, headers=headers)
logger.debug("fetch_document: found document, code %s", response.status_code)
response.raise_for_status()
return response.text, response.status_code, None
except (HTTPError, SSLError, ConnectionError) as ex:
if isinstance(ex, SSLError) and raise_ssl_errors:
logger.debug("fetch_document: exception %s", ex)
return None, None, ex
# Try http then
url = url.replace("https://", "http://")
logger.debug("fetch_document: trying %s", url)
try:
response = requests.get(url, timeout=timeout, headers=headers)
logger.debug("fetch_document: found document, code %s", response.status_code)
response.raise_for_status()
return response.text, response.status_code, None
except RequestException as ex:
logger.debug("fetch_document: exception %s", ex)
return None, None, ex
except RequestException as ex:
logger.debug("fetch_document: exception %s", ex)
return None, None, ex
def fetch_host_ip(host: str) -> str:
"""
Fetch ip by host
"""
try:
ip = socket.gethostbyname(host)
except socket.gaierror:
return ''
return ip
def fetch_file(url: str, timeout: int = 30, extra_headers: Dict = None) -> str:
"""
Download a file with a temporary name and return the name.
"""
headers = {'user-agent': USER_AGENT}
if extra_headers:
headers.update(extra_headers)
response = requests.get(url, timeout=timeout, headers=headers, stream=True)
response.raise_for_status()
name = f"/tmp/{str(uuid4())}"
with open(name, "wb") as f:
for chunk in response.iter_content(chunk_size=8192):
f.write(chunk)
return name
def parse_http_date(date):
"""
Parse a date format as specified by HTTP RFC7231 section 7.1.1.1.
The three formats allowed by the RFC are accepted, even if only the first
one is still in widespread use.
Return an integer expressed in seconds since the epoch, in UTC.
Implementation copied from Django.
https://github.com/django/django/blob/master/django/utils/http.py#L157
License: BSD 3-clause
"""
MONTHS = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
__D = r'(?P<day>\d{2})'
__D2 = r'(?P<day>[ \d]\d)'
__M = r'(?P<mon>\w{3})'
__Y = r'(?P<year>\d{4})'
__Y2 = r'(?P<year>\d{2})'
__T = r'(?P<hour>\d{2}):(?P<min>\d{2}):(?P<sec>\d{2})'
RFC1123_DATE = re.compile(r'^\w{3}, %s %s %s %s GMT$' % (__D, __M, __Y, __T))
RFC850_DATE = re.compile(r'^\w{6,9}, %s-%s-%s %s GMT$' % (__D, __M, __Y2, __T))
ASCTIME_DATE = re.compile(r'^\w{3} %s %s %s %s$' % (__M, __D2, __T, __Y))
# email.utils.parsedate() does the job for RFC1123 dates; unfortunately
# RFC7231 makes it mandatory to support RFC850 dates too. So we roll
# our own RFC-compliant parsing.
for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE:
m = regex.match(date)
if m is not None:
break
else:
raise ValueError("%r is not in a valid HTTP date format" % date)
try:
year = int(m.group('year'))
if year < 100:
if year < 70:
year += 2000
else:
year += 1900
month = MONTHS.index(m.group('mon').lower()) + 1
day = int(m.group('day'))
hour = int(m.group('hour'))
min = int(m.group('min'))
sec = int(m.group('sec'))
result = datetime.datetime(year, month, day, hour, min, sec)
return calendar.timegm(result.utctimetuple())
except Exception as exc:
raise ValueError("%r is not a valid date" % date) from exc
def send_document(url, data, timeout=10, method="post", *args, **kwargs):
"""Helper method to send a document via POST.
Additional ``*args`` and ``**kwargs`` will be passed on to ``requests.post``.
:arg url: Full url to send to, including protocol
:arg data: Dictionary (will be form-encoded), bytes, or file-like object to send in the body
:arg timeout: Seconds to wait for response (defaults to 10)
:arg method: Method to use, defaults to post
:returns: Tuple of status code (int or None) and error (exception class instance or None)
"""
logger.debug("send_document: url=%s, data=%s, timeout=%s, method=%s", url, data, timeout, method)
if not method:
method = "post"
headers = CaseInsensitiveDict({
'User-Agent': USER_AGENT,
})
if "headers" in kwargs:
# Update from kwargs
headers.update(kwargs.get("headers"))
kwargs.update({
"data": data, "timeout": timeout, "headers": headers
})
request_func = getattr(requests, method)
try:
response = request_func(url, *args, **kwargs)
logger.debug("send_document: response status code %s", response.status_code)
return response.status_code, None
# TODO support rate limit 429 code
except RequestException as ex:
logger.debug("send_document: exception %s", ex)
return None, ex
def try_retrieve_webfinger_document(handle: str) -> Optional[str]:
"""
Try to retrieve an RFC7033 webfinger document. Does not raise if it fails.
"""
try:
host = handle.split("@")[1]
except AttributeError:
logger.warning("retrieve_webfinger_document: invalid handle given: %s", handle)
return None
document, code, exception = fetch_document(
host=host, path="/.well-known/webfinger?resource=acct:%s" % quote(handle),
)
if exception:
logger.debug("retrieve_webfinger_document: failed to fetch webfinger document: %s, %s", code, exception)
return document
|
federation/utils/network.py
|
codereval_python_data_202
|
insert a value of a nested key into a dictionary
to insert value for a nested key, all ancestor keys should be given as
method's arguments
example:
dict_insert({}, 'val', 'key1.key2'.split('.'))
:param dic: a dictionary object to insert the nested key value into
:param val: a value to insert to the given dictionary
:param key: first key in a chain of key that will store the value
:param keys: sub keys in the keys chain
def dict_insert(dic, val, key, *keys):
"""insert a value of a nested key into a dictionary
to insert value for a nested key, all ancestor keys should be given as
method's arguments
example:
dict_insert({}, 'val', 'key1.key2'.split('.'))
:param dic: a dictionary object to insert the nested key value into
:param val: a value to insert to the given dictionary
:param key: first key in a chain of key that will store the value
:param keys: sub keys in the keys chain
"""
if dic is None:
return
if not keys:
if isinstance(dic.get(key, None), dict) and isinstance(val, dict):
dict_merge(dic[key], val)
else:
dic[key] = val
return
dict_insert(dic.setdefault(key, {}), val, *keys)
"""This module provides helper methods for dict merging and dict insertion. """
from infrared.core.utils import logger
LOG = logger.LOG
def dict_insert(dic, val, key, *keys):
"""insert a value of a nested key into a dictionary
to insert value for a nested key, all ancestor keys should be given as
method's arguments
example:
dict_insert({}, 'val', 'key1.key2'.split('.'))
:param dic: a dictionary object to insert the nested key value into
:param val: a value to insert to the given dictionary
:param key: first key in a chain of key that will store the value
:param keys: sub keys in the keys chain
"""
if dic is None:
return
if not keys:
if isinstance(dic.get(key, None), dict) and isinstance(val, dict):
dict_merge(dic[key], val)
else:
dic[key] = val
return
dict_insert(dic.setdefault(key, {}), val, *keys)
class ConflictResolver(object):
"""Resolves conflicts while merging dicts. """
@staticmethod
def none_resolver(first, second, key):
"""Replaces value in first dict only if it is None.
Appends second value into the first in type is list.
"""
# tyr to merge lists first
if isinstance(first[key], list):
if isinstance(second[key], list):
first[key].extend(second[key])
elif second[key] is not None:
first[key].append(second[key])
if key not in first or first[key] is None:
first[key] = second[key]
@staticmethod
def greedy_resolver(first, second, key):
"""Replace always first with the value from second """
first[key] = second[key]
@staticmethod
def unique_append_list_resolver(first, second, key):
"""Merges first and second lists """
if isinstance(first[key], list) and isinstance(second[key], list):
for item in second[key]:
if item not in first[key]:
first[key].append(item)
else:
return ConflictResolver.greedy_resolver(first, second, key)
def dict_merge(first, second,
conflict_resolver=ConflictResolver.greedy_resolver):
"""Merge `second` dict into `first`.
:param first: Modified dict
:param second: Modifier dict
:param conflict_resolver: Function that resolves a merge between 2 values
when one of them isn't a dict
"""
for key in second:
if key in first:
if isinstance(first[key], dict) and isinstance(second[key], dict):
dict_merge(first[key], second[key],
conflict_resolver=conflict_resolver)
else:
# replace first value with the value from second
conflict_resolver(first, second, key)
else:
try:
first[key] = second[key]
except TypeError as e:
LOG.error("dict_merge(%s, %s) failed on: %s" % (first, second, key))
raise e
|
infrared/core/utils/dict_utils.py
|
codereval_python_data_203
|
Create a new IniType complex type
def list_of_file_names(settings_dirs, spec_option):
"""Create a new IniType complex type
"""
return cli.ListOfFileNames("ListOfFileNames", settings_dirs, None,
spec_option)
import os
import pytest
from infrared.core.cli import cli
from infrared.core.utils import exceptions
@pytest.fixture
def list_value_type():
"""
Create a new list value complex type
"""
return cli.ListValue("test", [os.getcwd(), ], 'cmd', None)
@pytest.fixture
def dict_type():
"""Create a new IniType complex type
"""
return cli.Dict("TestDict", None, None, None)
@pytest.fixture
def nested_dict():
"""Create a new NestedDict complex type
"""
return cli.NestedDict("TestNestedDict", None, None, None)
@pytest.fixture
def nested_list():
"""Create a new NestedList complex type
"""
return cli.NestedList("TestNestedList", None, None, None)
@pytest.fixture
def flag_type():
"""Create a new Flag complex type
"""
return cli.Flag("test", None, None, None)
@pytest.mark.parametrize(
"test_value,expected", [
("item1,item2", ["item1", "item2"]),
("item1", ["item1", ]),
("item1,item2,item3,", ["item1", "item2", "item3", ''])])
def test_list_value_resolve(list_value_type, test_value, expected):
"""
Verifies the string value can be resolved to the list.
"""
assert expected == list_value_type.resolve(test_value)
@pytest.mark.parametrize("input_value, expected_return", [
(['k1=v1'], {'k1': 'v1'}),
(['l1.s1.k1=v1'], {'l1': {'s1': {'k1': 'v1'}}}),
([' s1.k1=v1 '], {'s1': {'k1': 'v1'}}),
(['s1.k1=v1', 's1.k2=v2', 's2.k3=v3'],
{'s1': {'k1': 'v1', 'k2': 'v2'}, 's2': {'k3': 'v3'}}),
('k1=v1', {'k1': 'v1'}),
('s1.k1=v1', {'s1': {'k1': 'v1'}}),
(' s1.k1=v1 ', {'s1': {'k1': 'v1'}}),
('s1.k1=v1,s1.k2=v2,s2.k3=v3',
{'s1': {'k1': 'v1', 'k2': 'v2'}, 's2': {'k3': 'v3'}}),
('s1.k1=v1, s1.l2.k2=v2, s2.k3=v3',
{'s1': {'k1': 'v1', 'l2': {'k2': 'v2'}}, 's2': {'k3': 'v3'}}),
])
def test_nested_dict_resolve(input_value, expected_return, nested_dict):
"""Verifies the return value of 'resolve' method in 'NestedDict' Complex type
"""
assert nested_dict.resolve(input_value) == expected_return
@pytest.mark.parametrize("input_value, expected_return", [
(["k1=v1","k2=v2"], [{"k1":"v1"},{"k2":"v2"}])
])
def test_nested_list_resolve(input_value, expected_return, nested_list):
"""Verifies the return value of 'resolve' method in 'NestedList'
Complex type"""
assert nested_list.resolve(input_value) == expected_return
@pytest.mark.parametrize("input_value, expected_return", [
(['k1=v1'], {'k1': 'v1'}),
(['l1.s1.k1=v1'], {'l1.s1.k1': 'v1'}),
([' s1.k1=v1 '], {'s1.k1': 'v1'}),
(['s1.k1=v1', 's1.k2=v2', 's2.k3=v3'],
{'s1.k1': 'v1', 's1.k2': 'v2', 's2.k3': 'v3'}),
('k1=v1', {'k1': 'v1'}),
])
def test_dict_type_resolve(input_value, expected_return, dict_type):
"""Verifies the return value of 'resolve' method in 'IniType' Complex type
"""
assert dict_type.resolve(input_value) == expected_return
@pytest.mark.parametrize("input_value, expected_return", [
('test', True),
])
def test_flag_type_resolve(input_value, expected_return, flag_type):
"""Verifies the return value of 'resolve' method in 'Flag' Complex type
"""
assert flag_type.resolve(input_value) == expected_return
@pytest.fixture(scope="module")
def file_root_dir(tmpdir_factory):
"""Prepares the testing dirs for file tests"""
root_dir = tmpdir_factory.mktemp('complex_file_dir')
for file_path in ['file1.yml',
'arg/name/file2',
'defaults/arg/name/file.yml',
'defaults/arg/name/file2',
'vars/arg/name/file1.yml',
'vars/arg/name/file3.yml',
'vars/arg/name/nested/file4.yml']:
root_dir.join(file_path).ensure()
return root_dir
@pytest.fixture(scope="module")
def dir_root_dir(tmpdir_factory):
"""Prepares the testing dirs for dir tests"""
root_dir = tmpdir_factory.mktemp('complex_dir')
for dir_path in ['dir0/1.file',
'arg/name/dir1/1.file',
'vars/arg/name/dir2/1.file',
'defaults/arg/name/dir3/1.file']:
# creating a file will create a dir
root_dir.join(dir_path).ensure()
return root_dir
def create_file_type(root_dir, type_class):
return type_class("arg-name",
(root_dir.join('vars').strpath,
root_dir.join('defaults').strpath),
None,
None)
@pytest.fixture
def file_type(file_root_dir, request):
return create_file_type(file_root_dir, request.param)
@pytest.fixture
def dir_type(dir_root_dir, request):
return create_file_type(dir_root_dir, request.param)
@pytest.mark.parametrize('file_type', [cli.FileType], indirect=True)
def test_file_type_resolve(file_root_dir, file_type, monkeypatch):
"""Verifies the file complex type"""
# change cwd to the temp dir
monkeypatch.setattr("os.getcwd", lambda: file_root_dir.strpath)
assert file_type.resolve('file1') == file_root_dir.join(
'file1.yml').strpath
assert file_type.resolve('file2') == file_root_dir.join(
'arg/name/file2').strpath
with pytest.raises(exceptions.IRFileNotFoundException):
file_type.resolve('file.yml')
@pytest.mark.parametrize('file_type', [cli.VarFileType], indirect=True)
def test_var_file_type_resolve(file_root_dir, file_type, monkeypatch):
"""Verifies the file complex type"""
# change cwd to the temp dir
monkeypatch.setattr("os.getcwd", lambda: file_root_dir.strpath)
assert file_type.resolve('file1') == file_root_dir.join(
'file1.yml').strpath
assert file_type.resolve(
os.path.abspath('file1')) == file_root_dir.join('file1.yml').strpath
assert file_type.resolve('file2') == file_root_dir.join(
'arg/name/file2').strpath
assert file_type.resolve('file.yml') == file_root_dir.join(
'defaults/arg/name/file.yml').strpath
assert file_type.resolve('file3') == file_root_dir.join(
'vars/arg/name/file3.yml').strpath
assert file_type.resolve('nested/file4.yml') == file_root_dir.join(
'vars/arg/name/nested/file4.yml').strpath
with pytest.raises(exceptions.IRFileNotFoundException):
file_type.resolve('file5')
@pytest.mark.parametrize('file_type', [cli.ListFileType], indirect=True)
def test_list_of_var_files(file_root_dir, file_type, monkeypatch):
"""Verifies the list of files"""
monkeypatch.setattr("os.getcwd", lambda: file_root_dir.strpath)
assert file_type.resolve('file1') == [
file_root_dir.join('file1.yml').strpath]
assert file_type.resolve('file1,file2') == [
file_root_dir.join('file1.yml').strpath,
file_root_dir.join('arg/name/file2').strpath]
assert file_type.resolve('file3.yml,vars/arg/name/file3') == [
file_root_dir.join('vars/arg/name/file3.yml').strpath,
file_root_dir.join('vars/arg/name/file3.yml').strpath]
@pytest.mark.parametrize('dir_type', [cli.VarDirType], indirect=True)
def test_dir_type_resolve(dir_root_dir, dir_type, monkeypatch):
"""Verifies the file complex type"""
# change cwd to the temp dir
monkeypatch.setattr("os.getcwd", lambda: dir_root_dir.strpath)
assert dir_type.resolve('dir0') == dir_root_dir.join(
'dir0/').strpath
assert dir_type.resolve('dir1') == dir_root_dir.join(
'arg/name/dir1/').strpath
assert dir_type.resolve('dir2') == dir_root_dir.join(
'vars/arg/name/dir2/').strpath
assert dir_type.resolve('dir3') == dir_root_dir.join(
'defaults/arg/name/dir3/').strpath
with pytest.raises(exceptions.IRFileNotFoundException):
dir_type.resolve('dir4')
def list_of_file_names(settings_dirs, spec_option):
"""Create a new IniType complex type
"""
return cli.ListOfFileNames("ListOfFileNames", settings_dirs, None,
spec_option)
def test_list_of_file_names_values_auto_propagation():
expected = ["task1", "task2", "task3"]
settings_dirs = ["", "", 'tests/example']
spec_option = {'lookup_dir': 'post_tasks'}
complex_action = list_of_file_names(settings_dirs, spec_option)
allowed_values = complex_action.get_allowed_values()
assert expected.sort() == allowed_values.sort()
def test_list_of_file_names_resolve():
expected = ["task2", "task3"]
settings_dirs = ["", "", 'tests/example/']
spec_option = {'lookup_dir': 'post_tasks'}
value = "task2,task3"
complex_action = list_of_file_names(settings_dirs, spec_option)
values = complex_action.resolve(value)
print(values)
assert expected.sort() == values.sort()
|
tests/test_complex_types.py
|
codereval_python_data_204
|
Gets the ansible config manager.
@classmethod
def ansible_config_manager(cls):
"""Gets the ansible config manager. """
return cls._get_service(ServiceName.ANSIBLE_CONFIG_MANAGER)
"""Service locator for the IR services
Stores and resolves all the dependencies for the services.
"""
import os
import sys
from infrared.core.services import ansible_config
from infrared.core.services import execution_logger
from infrared.core.services import plugins
from infrared.core.services import workspaces
from infrared.core.utils import logger
LOG = logger.LOG
class ServiceName(object):
"""Holds the supported services names. """
WORKSPACE_MANAGER = "workspace_manager"
PLUGINS_MANAGER = "plugins_manager"
ANSIBLE_CONFIG_MANAGER = "ansible_config_manager"
EXECUTION_LOGGER_MANAGER = "execution_logger_manager"
class CoreSettings(object):
"""Holds the main settings for the infrared. """
def __init__(self, workspaces_base_folder=None,
plugins_conf_file=None,
install_plugin_at_start=True,
plugins_base_folder=None):
"""Constructor.
:param workspaces_base_folder: folder where the
workspace will be stored
:param plugins_conf_file: location of the plugins.ini file with the
list of all plugins and types.
:param install_plugin_at_start: specifies whether all the plugins
should be installed on ir start. Skip installation may be
required for unit tests, for example.
"""
self.infrared_home = os.path.abspath(os.environ.get(
"IR_HOME", os.path.join(os.path.expanduser("~"), '.infrared')))
# todo(obaranov) replace .workspaces to workspaces and .plugins.ini to
# todo(obaranov) plugins.ini once IR is packaged as pip
self.plugins_conf_file = plugins_conf_file or os.path.join(
self.infrared_home, '.plugins.ini')
self.workspaces_base_folder = workspaces_base_folder or os.path.join(
self.infrared_home, '.workspaces')
self.install_plugin_at_start = install_plugin_at_start
self.plugins_base_folder = plugins_base_folder or os.path.join(
self.infrared_home, 'plugins')
class CoreServices(object):
"""Holds and configures all the required for core services. """
_SERVICES = {}
@classmethod
def setup(cls, core_settings=None):
"""Creates configuration from file or from defaults.
:param core_settings: the instance of the CoreSettings class with the
desired settings. If None is provided then the default settings
will be used.
"""
if core_settings is None:
core_settings = CoreSettings()
# create workspace manager
if ServiceName.WORKSPACE_MANAGER not in cls._SERVICES:
cls.register_service(ServiceName.WORKSPACE_MANAGER,
workspaces.WorkspaceManager(
core_settings.workspaces_base_folder))
# create plugins manager
if ServiceName.PLUGINS_MANAGER not in cls._SERVICES:
# A temporary WH to skip all plugins installation on first InfraRed
# command if the command is 'infrared plugin add'.
# Should be removed together with auto plugins installation
# mechanism.
skip_plugins_install = {'plugin', 'add'}.issubset(sys.argv)
cls.register_service(
ServiceName.PLUGINS_MANAGER, plugins.InfraredPluginManager(
plugins_conf=core_settings.plugins_conf_file,
install_plugins=(core_settings.install_plugin_at_start and
not skip_plugins_install),
plugins_dir=core_settings.plugins_base_folder))
# create ansible config manager
if ServiceName.ANSIBLE_CONFIG_MANAGER not in cls._SERVICES:
cls.register_service(ServiceName.ANSIBLE_CONFIG_MANAGER,
ansible_config.AnsibleConfigManager(
core_settings.infrared_home))
# create execution logger manager
if ServiceName.EXECUTION_LOGGER_MANAGER not in cls._SERVICES:
# get ansible manager
ansible_manager = CoreServices.ansible_config_manager()
# build log file path
log_file = \
os.path.join(core_settings.infrared_home, 'ir-commands.log')
cls.register_service(ServiceName.EXECUTION_LOGGER_MANAGER,
execution_logger.ExecutionLoggerManager(
ansible_manager.ansible_config_path,
log_file=log_file))
@classmethod
def register_service(cls, service_name, service):
"""Protect the _SERVICES dict"""
CoreServices._SERVICES[service_name] = service
@classmethod
def _get_service(cls, name):
if name not in cls._SERVICES:
cls.setup()
return cls._SERVICES[name]
@classmethod
def workspace_manager(cls):
"""Gets the workspace manager. """
return cls._get_service(ServiceName.WORKSPACE_MANAGER)
@classmethod
def plugins_manager(cls):
"""Gets the plugin manager. """
return cls._get_service(ServiceName.PLUGINS_MANAGER)
@classmethod
def ansible_config_manager(cls):
"""Gets the ansible config manager. """
return cls._get_service(ServiceName.ANSIBLE_CONFIG_MANAGER)
@classmethod
def execution_logger_manager(cls):
"""Gets the execution logger manager. """
return cls._get_service(ServiceName.EXECUTION_LOGGER_MANAGER)
|
infrared/core/services/__init__.py
|
codereval_python_data_205
|
Gets the workspace manager.
@classmethod
def workspace_manager(cls):
"""Gets the workspace manager. """
return cls._get_service(ServiceName.WORKSPACE_MANAGER)
"""Service locator for the IR services
Stores and resolves all the dependencies for the services.
"""
import os
import sys
from infrared.core.services import ansible_config
from infrared.core.services import execution_logger
from infrared.core.services import plugins
from infrared.core.services import workspaces
from infrared.core.utils import logger
LOG = logger.LOG
class ServiceName(object):
"""Holds the supported services names. """
WORKSPACE_MANAGER = "workspace_manager"
PLUGINS_MANAGER = "plugins_manager"
ANSIBLE_CONFIG_MANAGER = "ansible_config_manager"
EXECUTION_LOGGER_MANAGER = "execution_logger_manager"
class CoreSettings(object):
"""Holds the main settings for the infrared. """
def __init__(self, workspaces_base_folder=None,
plugins_conf_file=None,
install_plugin_at_start=True,
plugins_base_folder=None):
"""Constructor.
:param workspaces_base_folder: folder where the
workspace will be stored
:param plugins_conf_file: location of the plugins.ini file with the
list of all plugins and types.
:param install_plugin_at_start: specifies whether all the plugins
should be installed on ir start. Skip installation may be
required for unit tests, for example.
"""
self.infrared_home = os.path.abspath(os.environ.get(
"IR_HOME", os.path.join(os.path.expanduser("~"), '.infrared')))
# todo(obaranov) replace .workspaces to workspaces and .plugins.ini to
# todo(obaranov) plugins.ini once IR is packaged as pip
self.plugins_conf_file = plugins_conf_file or os.path.join(
self.infrared_home, '.plugins.ini')
self.workspaces_base_folder = workspaces_base_folder or os.path.join(
self.infrared_home, '.workspaces')
self.install_plugin_at_start = install_plugin_at_start
self.plugins_base_folder = plugins_base_folder or os.path.join(
self.infrared_home, 'plugins')
class CoreServices(object):
"""Holds and configures all the required for core services. """
_SERVICES = {}
@classmethod
def setup(cls, core_settings=None):
"""Creates configuration from file or from defaults.
:param core_settings: the instance of the CoreSettings class with the
desired settings. If None is provided then the default settings
will be used.
"""
if core_settings is None:
core_settings = CoreSettings()
# create workspace manager
if ServiceName.WORKSPACE_MANAGER not in cls._SERVICES:
cls.register_service(ServiceName.WORKSPACE_MANAGER,
workspaces.WorkspaceManager(
core_settings.workspaces_base_folder))
# create plugins manager
if ServiceName.PLUGINS_MANAGER not in cls._SERVICES:
# A temporary WH to skip all plugins installation on first InfraRed
# command if the command is 'infrared plugin add'.
# Should be removed together with auto plugins installation
# mechanism.
skip_plugins_install = {'plugin', 'add'}.issubset(sys.argv)
cls.register_service(
ServiceName.PLUGINS_MANAGER, plugins.InfraredPluginManager(
plugins_conf=core_settings.plugins_conf_file,
install_plugins=(core_settings.install_plugin_at_start and
not skip_plugins_install),
plugins_dir=core_settings.plugins_base_folder))
# create ansible config manager
if ServiceName.ANSIBLE_CONFIG_MANAGER not in cls._SERVICES:
cls.register_service(ServiceName.ANSIBLE_CONFIG_MANAGER,
ansible_config.AnsibleConfigManager(
core_settings.infrared_home))
# create execution logger manager
if ServiceName.EXECUTION_LOGGER_MANAGER not in cls._SERVICES:
# get ansible manager
ansible_manager = CoreServices.ansible_config_manager()
# build log file path
log_file = \
os.path.join(core_settings.infrared_home, 'ir-commands.log')
cls.register_service(ServiceName.EXECUTION_LOGGER_MANAGER,
execution_logger.ExecutionLoggerManager(
ansible_manager.ansible_config_path,
log_file=log_file))
@classmethod
def register_service(cls, service_name, service):
"""Protect the _SERVICES dict"""
CoreServices._SERVICES[service_name] = service
@classmethod
def _get_service(cls, name):
if name not in cls._SERVICES:
cls.setup()
return cls._SERVICES[name]
@classmethod
def workspace_manager(cls):
"""Gets the workspace manager. """
return cls._get_service(ServiceName.WORKSPACE_MANAGER)
@classmethod
def plugins_manager(cls):
"""Gets the plugin manager. """
return cls._get_service(ServiceName.PLUGINS_MANAGER)
@classmethod
def ansible_config_manager(cls):
"""Gets the ansible config manager. """
return cls._get_service(ServiceName.ANSIBLE_CONFIG_MANAGER)
@classmethod
def execution_logger_manager(cls):
"""Gets the execution logger manager. """
return cls._get_service(ServiceName.EXECUTION_LOGGER_MANAGER)
|
infrared/core/services/__init__.py
|
codereval_python_data_206
|
Gets the plugin manager.
@classmethod
def plugins_manager(cls):
"""Gets the plugin manager. """
return cls._get_service(ServiceName.PLUGINS_MANAGER)
"""Service locator for the IR services
Stores and resolves all the dependencies for the services.
"""
import os
import sys
from infrared.core.services import ansible_config
from infrared.core.services import execution_logger
from infrared.core.services import plugins
from infrared.core.services import workspaces
from infrared.core.utils import logger
LOG = logger.LOG
class ServiceName(object):
"""Holds the supported services names. """
WORKSPACE_MANAGER = "workspace_manager"
PLUGINS_MANAGER = "plugins_manager"
ANSIBLE_CONFIG_MANAGER = "ansible_config_manager"
EXECUTION_LOGGER_MANAGER = "execution_logger_manager"
class CoreSettings(object):
"""Holds the main settings for the infrared. """
def __init__(self, workspaces_base_folder=None,
plugins_conf_file=None,
install_plugin_at_start=True,
plugins_base_folder=None):
"""Constructor.
:param workspaces_base_folder: folder where the
workspace will be stored
:param plugins_conf_file: location of the plugins.ini file with the
list of all plugins and types.
:param install_plugin_at_start: specifies whether all the plugins
should be installed on ir start. Skip installation may be
required for unit tests, for example.
"""
self.infrared_home = os.path.abspath(os.environ.get(
"IR_HOME", os.path.join(os.path.expanduser("~"), '.infrared')))
# todo(obaranov) replace .workspaces to workspaces and .plugins.ini to
# todo(obaranov) plugins.ini once IR is packaged as pip
self.plugins_conf_file = plugins_conf_file or os.path.join(
self.infrared_home, '.plugins.ini')
self.workspaces_base_folder = workspaces_base_folder or os.path.join(
self.infrared_home, '.workspaces')
self.install_plugin_at_start = install_plugin_at_start
self.plugins_base_folder = plugins_base_folder or os.path.join(
self.infrared_home, 'plugins')
class CoreServices(object):
"""Holds and configures all the required for core services. """
_SERVICES = {}
@classmethod
def setup(cls, core_settings=None):
"""Creates configuration from file or from defaults.
:param core_settings: the instance of the CoreSettings class with the
desired settings. If None is provided then the default settings
will be used.
"""
if core_settings is None:
core_settings = CoreSettings()
# create workspace manager
if ServiceName.WORKSPACE_MANAGER not in cls._SERVICES:
cls.register_service(ServiceName.WORKSPACE_MANAGER,
workspaces.WorkspaceManager(
core_settings.workspaces_base_folder))
# create plugins manager
if ServiceName.PLUGINS_MANAGER not in cls._SERVICES:
# A temporary WH to skip all plugins installation on first InfraRed
# command if the command is 'infrared plugin add'.
# Should be removed together with auto plugins installation
# mechanism.
skip_plugins_install = {'plugin', 'add'}.issubset(sys.argv)
cls.register_service(
ServiceName.PLUGINS_MANAGER, plugins.InfraredPluginManager(
plugins_conf=core_settings.plugins_conf_file,
install_plugins=(core_settings.install_plugin_at_start and
not skip_plugins_install),
plugins_dir=core_settings.plugins_base_folder))
# create ansible config manager
if ServiceName.ANSIBLE_CONFIG_MANAGER not in cls._SERVICES:
cls.register_service(ServiceName.ANSIBLE_CONFIG_MANAGER,
ansible_config.AnsibleConfigManager(
core_settings.infrared_home))
# create execution logger manager
if ServiceName.EXECUTION_LOGGER_MANAGER not in cls._SERVICES:
# get ansible manager
ansible_manager = CoreServices.ansible_config_manager()
# build log file path
log_file = \
os.path.join(core_settings.infrared_home, 'ir-commands.log')
cls.register_service(ServiceName.EXECUTION_LOGGER_MANAGER,
execution_logger.ExecutionLoggerManager(
ansible_manager.ansible_config_path,
log_file=log_file))
@classmethod
def register_service(cls, service_name, service):
"""Protect the _SERVICES dict"""
CoreServices._SERVICES[service_name] = service
@classmethod
def _get_service(cls, name):
if name not in cls._SERVICES:
cls.setup()
return cls._SERVICES[name]
@classmethod
def workspace_manager(cls):
"""Gets the workspace manager. """
return cls._get_service(ServiceName.WORKSPACE_MANAGER)
@classmethod
def plugins_manager(cls):
"""Gets the plugin manager. """
return cls._get_service(ServiceName.PLUGINS_MANAGER)
@classmethod
def ansible_config_manager(cls):
"""Gets the ansible config manager. """
return cls._get_service(ServiceName.ANSIBLE_CONFIG_MANAGER)
@classmethod
def execution_logger_manager(cls):
"""Gets the execution logger manager. """
return cls._get_service(ServiceName.EXECUTION_LOGGER_MANAGER)
|
infrared/core/services/__init__.py
|
codereval_python_data_207
|
validates that spec (YAML) content has all required fields
:param spec_content: content of spec file
:raise IRValidatorException: when mandatory data
is missing in spec file
:return: Dictionary with data loaded from a spec (YAML) file
@classmethod
def validate_from_content(cls, spec_content=None):
"""validates that spec (YAML) content has all required fields
:param spec_content: content of spec file
:raise IRValidatorException: when mandatory data
is missing in spec file
:return: Dictionary with data loaded from a spec (YAML) file
"""
if spec_content is None:
raise IRValidatorException(
"Plugin spec content is missing")
spec_dict = yaml.safe_load(spec_content)
if not isinstance(spec_dict, dict):
raise IRValidatorException(
"Spec file is empty or corrupted: {}".format(spec_content))
# check if new spec file structure
try:
if "config" in spec_dict:
jsonschema.validate(spec_dict,
cls.SCHEMA_WITH_CONFIG)
else:
jsonschema.validate(spec_dict,
cls.SCHEMA_WITHOUT_CONFIG)
except jsonschema.exceptions.ValidationError as error:
raise IRValidatorException(
"{} in file:\n{}".format(error.message, spec_content))
subparsers_key = "subparsers"
if ("description" not in spec_dict and "description"
not in list(spec_dict[subparsers_key].values())[0]):
raise IRValidatorException(
"Required key 'description' is missing for supbarser '{}' in "
"spec file: {}".format(
list(spec_dict[subparsers_key].keys())[0], spec_content))
return spec_dict
import jsonschema
import os
from six.moves import configparser
import yaml
from infrared.core.utils.exceptions import IRValidatorException
from infrared.core.utils.logger import LOG as logger
class Validator(object):
@classmethod
def validate_from_file(cls, yaml_file=None):
"""Loads & validates that a YAML file has all required fields
:param yaml_file: Path to YAML file
:raise IRValidatorException: when mandatory data is missing in file
:return: Dictionary with data loaded from a YAML file
"""
if yaml_file is None:
raise IRValidatorException(
"YAML file is missing")
if not os.path.isfile(yaml_file):
raise IRValidatorException(
"The YAML file doesn't exist: {}".format(yaml_file))
with open(yaml_file) as fp:
spec_dict = cls.validate_from_content(fp.read())
return spec_dict
@classmethod
def validate_from_content(cls, file_content=None):
"""validates that YAML content has all required fields
:param file_content: content of the YAML file
:raise IRValidatorException: when mandatory data is missing in file
:return: Dictionary with data loaded from a YAML file
"""
raise NotImplementedError
class SpecValidator(Validator):
"""Class for validating a plugin spec.
It checks that a plugin spec (YAML) has all required fields.
"""
CONFIG_PART_SCHEMA = {
"type": "object",
"properties": {
"plugin_type": {"type": "string", "minLength": 1},
"entry_point": {"type": "string", "minLength": 1},
"roles_path": {"type": "string", "minLength": 1},
},
"additionalProperties": False,
"required": ["plugin_type"]
}
SUBPARSER_PART_SCHEMA = {
"type": "object",
"minProperties": 1,
"maxProperties": 1,
"patternProperties": {
"^(?!(?:all)$).+$": {
"type": "object",
}
},
"additionalProperties": False
}
SCHEMA_WITH_CONFIG = {
"type": "object",
"properties": {
"description": {"type": "string", "minLength": 1},
"config": CONFIG_PART_SCHEMA,
"subparsers": SUBPARSER_PART_SCHEMA
},
"additionalProperties": False,
"required": ["config", "subparsers"]
}
SCHEMA_WITHOUT_CONFIG = {
"type": "object",
"properties": {
"plugin_type": {"type": "string", "minLength": 1},
"entry_point": {"type": "string", "minLength": 1},
"roles_path": {"type": "string", "minLength": 1},
"description": {"type": "string", "minLength": 1},
"subparsers": SUBPARSER_PART_SCHEMA
},
"additionalProperties": False,
"required": ["plugin_type", "subparsers"]
}
@classmethod
def validate_from_content(cls, spec_content=None):
"""validates that spec (YAML) content has all required fields
:param spec_content: content of spec file
:raise IRValidatorException: when mandatory data
is missing in spec file
:return: Dictionary with data loaded from a spec (YAML) file
"""
if spec_content is None:
raise IRValidatorException(
"Plugin spec content is missing")
spec_dict = yaml.safe_load(spec_content)
if not isinstance(spec_dict, dict):
raise IRValidatorException(
"Spec file is empty or corrupted: {}".format(spec_content))
# check if new spec file structure
try:
if "config" in spec_dict:
jsonschema.validate(spec_dict,
cls.SCHEMA_WITH_CONFIG)
else:
jsonschema.validate(spec_dict,
cls.SCHEMA_WITHOUT_CONFIG)
except jsonschema.exceptions.ValidationError as error:
raise IRValidatorException(
"{} in file:\n{}".format(error.message, spec_content))
subparsers_key = "subparsers"
if ("description" not in spec_dict and "description"
not in list(spec_dict[subparsers_key].values())[0]):
raise IRValidatorException(
"Required key 'description' is missing for supbarser '{}' in "
"spec file: {}".format(
list(spec_dict[subparsers_key].keys())[0], spec_content))
return spec_dict
class RegistryValidator(Validator):
SCHEMA_REGISTRY = {
"type": "object",
"patternProperties": {
"^.+$": {
"type": "object",
"properties": {
"src": {"type": "string", "minLength": 1},
"src_path": {"type": "string", "minLength": 1},
"rev": {"type": "string", "minLength": 1},
"desc": {"type": "string", "minLength": 1},
"type": {"type": "string", "minLength": 1},
},
"additionalProperties": False,
"required": ["src", "desc", "type"]
}
},
"additionalProperties": False,
}
@classmethod
def validate_from_content(cls, file_content=None):
"""validates that Registry YAML content has all required fields
:param file_content: content of the Registry YAML file
:raise IRValidatorException: when mandatory data is missing in Registry
:return: Dictionary with data loaded from a Registry YAML file
"""
if file_content is None:
raise IRValidatorException(
"Registry YAML content is missing")
registry_dict = yaml.safe_load(file_content)
if not isinstance(registry_dict, dict):
raise IRValidatorException(
"Registry file is empty or corrupted: {}".format(file_content))
try:
# validate schema
jsonschema.validate(registry_dict,
cls.SCHEMA_REGISTRY)
except jsonschema.exceptions.ValidationError as error:
raise IRValidatorException(
"{} in file:\n{}".format(error.message, file_content))
return registry_dict
class AnsibleConfigValidator(Validator):
ANSIBLE_CONFIG_OPTIONS = {
'defaults': {
'host_key_checking': {
'type': 'bool',
'comparison': 'eq',
'expected_value': False,
'critical': True
},
'forks': {
'type': 'int',
'comparison': 'gt',
'expected_value': 500,
'critical': False
},
'timeout': {
'type': 'int',
'comparison': 'gt',
'expected_value': 30,
'critical': False
}
}
}
@classmethod
def validate_from_file(cls, yaml_file=None):
config = configparser.RawConfigParser()
config.read(yaml_file)
config_dict = cls._convert_config_to_dict(config)
for section, option_details in cls.ANSIBLE_CONFIG_OPTIONS.items():
for opt_name, opt_params in option_details.items():
try:
config_value = config_dict[section][opt_name]
cls._validate_config_option(yaml_file,
opt_name,
opt_params['type'],
opt_params['comparison'],
opt_params['expected_value'],
config_value,
opt_params['critical'])
except KeyError:
cls._handle_missing_value(yaml_file, section, opt_name,
opt_params['expected_value'],
opt_params['critical'])
@classmethod
def validate_from_content(cls, file_content=None):
pass
@classmethod
def _validate_config_option(cls, yaml_file, opt_name, opt_type,
comparison, exp_value, cur_value, critical):
if opt_type == 'int':
cur_value = int(cur_value)
if opt_type == 'bool':
if cur_value == 'True':
cur_value = True
else:
cur_value = False
if comparison == 'eq':
if cur_value != exp_value:
cls._handle_wrong_value(yaml_file, opt_name, exp_value,
cur_value, critical)
if comparison == 'gt':
if cur_value < exp_value:
cls._handle_wrong_value(yaml_file, opt_name, exp_value,
cur_value, critical)
@classmethod
def _handle_wrong_value(cls, yaml_file, option_name, exp_value,
cur_value, critical):
msg = "There is an issue with Ansible configuration in " \
"{}. Expected value for the option '{}' is '{}', " \
"current value is '{}'".format(yaml_file, option_name,
exp_value, cur_value)
if critical:
raise IRValidatorException(msg)
else:
logger.warn(msg)
@classmethod
def _handle_missing_value(cls, yaml_file, section, option_name,
exp_value, critical):
msg = "There is an issue with Ansible configuration in" \
" {}. Option '{}' with value of '{}' not found in" \
" section '{}'".format(yaml_file, option_name,
exp_value, section)
if critical:
raise IRValidatorException(msg)
else:
logger.warn(msg)
@staticmethod
def _convert_config_to_dict(config):
config_dict = {}
for section in config.sections():
if section not in config_dict:
config_dict[section] = {}
for option in config.options(section):
option_value = config.get(section, option)
try:
option_value = int(option_value)
except ValueError:
pass
config_dict[section][option] = option_value
return config_dict
|
infrared/core/utils/validators.py
|
codereval_python_data_208
|
Loads & validates that a YAML file has all required fields
:param yaml_file: Path to YAML file
:raise IRValidatorException: when mandatory data is missing in file
:return: Dictionary with data loaded from a YAML file
@classmethod
def validate_from_file(cls, yaml_file=None):
"""Loads & validates that a YAML file has all required fields
:param yaml_file: Path to YAML file
:raise IRValidatorException: when mandatory data is missing in file
:return: Dictionary with data loaded from a YAML file
"""
if yaml_file is None:
raise IRValidatorException(
"YAML file is missing")
if not os.path.isfile(yaml_file):
raise IRValidatorException(
"The YAML file doesn't exist: {}".format(yaml_file))
with open(yaml_file) as fp:
spec_dict = cls.validate_from_content(fp.read())
return spec_dict
import jsonschema
import os
from six.moves import configparser
import yaml
from infrared.core.utils.exceptions import IRValidatorException
from infrared.core.utils.logger import LOG as logger
class Validator(object):
@classmethod
def validate_from_file(cls, yaml_file=None):
"""Loads & validates that a YAML file has all required fields
:param yaml_file: Path to YAML file
:raise IRValidatorException: when mandatory data is missing in file
:return: Dictionary with data loaded from a YAML file
"""
if yaml_file is None:
raise IRValidatorException(
"YAML file is missing")
if not os.path.isfile(yaml_file):
raise IRValidatorException(
"The YAML file doesn't exist: {}".format(yaml_file))
with open(yaml_file) as fp:
spec_dict = cls.validate_from_content(fp.read())
return spec_dict
@classmethod
def validate_from_content(cls, file_content=None):
"""validates that YAML content has all required fields
:param file_content: content of the YAML file
:raise IRValidatorException: when mandatory data is missing in file
:return: Dictionary with data loaded from a YAML file
"""
raise NotImplementedError
class SpecValidator(Validator):
"""Class for validating a plugin spec.
It checks that a plugin spec (YAML) has all required fields.
"""
CONFIG_PART_SCHEMA = {
"type": "object",
"properties": {
"plugin_type": {"type": "string", "minLength": 1},
"entry_point": {"type": "string", "minLength": 1},
"roles_path": {"type": "string", "minLength": 1},
},
"additionalProperties": False,
"required": ["plugin_type"]
}
SUBPARSER_PART_SCHEMA = {
"type": "object",
"minProperties": 1,
"maxProperties": 1,
"patternProperties": {
"^(?!(?:all)$).+$": {
"type": "object",
}
},
"additionalProperties": False
}
SCHEMA_WITH_CONFIG = {
"type": "object",
"properties": {
"description": {"type": "string", "minLength": 1},
"config": CONFIG_PART_SCHEMA,
"subparsers": SUBPARSER_PART_SCHEMA
},
"additionalProperties": False,
"required": ["config", "subparsers"]
}
SCHEMA_WITHOUT_CONFIG = {
"type": "object",
"properties": {
"plugin_type": {"type": "string", "minLength": 1},
"entry_point": {"type": "string", "minLength": 1},
"roles_path": {"type": "string", "minLength": 1},
"description": {"type": "string", "minLength": 1},
"subparsers": SUBPARSER_PART_SCHEMA
},
"additionalProperties": False,
"required": ["plugin_type", "subparsers"]
}
@classmethod
def validate_from_content(cls, spec_content=None):
"""validates that spec (YAML) content has all required fields
:param spec_content: content of spec file
:raise IRValidatorException: when mandatory data
is missing in spec file
:return: Dictionary with data loaded from a spec (YAML) file
"""
if spec_content is None:
raise IRValidatorException(
"Plugin spec content is missing")
spec_dict = yaml.safe_load(spec_content)
if not isinstance(spec_dict, dict):
raise IRValidatorException(
"Spec file is empty or corrupted: {}".format(spec_content))
# check if new spec file structure
try:
if "config" in spec_dict:
jsonschema.validate(spec_dict,
cls.SCHEMA_WITH_CONFIG)
else:
jsonschema.validate(spec_dict,
cls.SCHEMA_WITHOUT_CONFIG)
except jsonschema.exceptions.ValidationError as error:
raise IRValidatorException(
"{} in file:\n{}".format(error.message, spec_content))
subparsers_key = "subparsers"
if ("description" not in spec_dict and "description"
not in list(spec_dict[subparsers_key].values())[0]):
raise IRValidatorException(
"Required key 'description' is missing for supbarser '{}' in "
"spec file: {}".format(
list(spec_dict[subparsers_key].keys())[0], spec_content))
return spec_dict
class RegistryValidator(Validator):
SCHEMA_REGISTRY = {
"type": "object",
"patternProperties": {
"^.+$": {
"type": "object",
"properties": {
"src": {"type": "string", "minLength": 1},
"src_path": {"type": "string", "minLength": 1},
"rev": {"type": "string", "minLength": 1},
"desc": {"type": "string", "minLength": 1},
"type": {"type": "string", "minLength": 1},
},
"additionalProperties": False,
"required": ["src", "desc", "type"]
}
},
"additionalProperties": False,
}
@classmethod
def validate_from_content(cls, file_content=None):
"""validates that Registry YAML content has all required fields
:param file_content: content of the Registry YAML file
:raise IRValidatorException: when mandatory data is missing in Registry
:return: Dictionary with data loaded from a Registry YAML file
"""
if file_content is None:
raise IRValidatorException(
"Registry YAML content is missing")
registry_dict = yaml.safe_load(file_content)
if not isinstance(registry_dict, dict):
raise IRValidatorException(
"Registry file is empty or corrupted: {}".format(file_content))
try:
# validate schema
jsonschema.validate(registry_dict,
cls.SCHEMA_REGISTRY)
except jsonschema.exceptions.ValidationError as error:
raise IRValidatorException(
"{} in file:\n{}".format(error.message, file_content))
return registry_dict
class AnsibleConfigValidator(Validator):
ANSIBLE_CONFIG_OPTIONS = {
'defaults': {
'host_key_checking': {
'type': 'bool',
'comparison': 'eq',
'expected_value': False,
'critical': True
},
'forks': {
'type': 'int',
'comparison': 'gt',
'expected_value': 500,
'critical': False
},
'timeout': {
'type': 'int',
'comparison': 'gt',
'expected_value': 30,
'critical': False
}
}
}
@classmethod
def validate_from_file(cls, yaml_file=None):
config = configparser.RawConfigParser()
config.read(yaml_file)
config_dict = cls._convert_config_to_dict(config)
for section, option_details in cls.ANSIBLE_CONFIG_OPTIONS.items():
for opt_name, opt_params in option_details.items():
try:
config_value = config_dict[section][opt_name]
cls._validate_config_option(yaml_file,
opt_name,
opt_params['type'],
opt_params['comparison'],
opt_params['expected_value'],
config_value,
opt_params['critical'])
except KeyError:
cls._handle_missing_value(yaml_file, section, opt_name,
opt_params['expected_value'],
opt_params['critical'])
@classmethod
def validate_from_content(cls, file_content=None):
pass
@classmethod
def _validate_config_option(cls, yaml_file, opt_name, opt_type,
comparison, exp_value, cur_value, critical):
if opt_type == 'int':
cur_value = int(cur_value)
if opt_type == 'bool':
if cur_value == 'True':
cur_value = True
else:
cur_value = False
if comparison == 'eq':
if cur_value != exp_value:
cls._handle_wrong_value(yaml_file, opt_name, exp_value,
cur_value, critical)
if comparison == 'gt':
if cur_value < exp_value:
cls._handle_wrong_value(yaml_file, opt_name, exp_value,
cur_value, critical)
@classmethod
def _handle_wrong_value(cls, yaml_file, option_name, exp_value,
cur_value, critical):
msg = "There is an issue with Ansible configuration in " \
"{}. Expected value for the option '{}' is '{}', " \
"current value is '{}'".format(yaml_file, option_name,
exp_value, cur_value)
if critical:
raise IRValidatorException(msg)
else:
logger.warn(msg)
@classmethod
def _handle_missing_value(cls, yaml_file, section, option_name,
exp_value, critical):
msg = "There is an issue with Ansible configuration in" \
" {}. Option '{}' with value of '{}' not found in" \
" section '{}'".format(yaml_file, option_name,
exp_value, section)
if critical:
raise IRValidatorException(msg)
else:
logger.warn(msg)
@staticmethod
def _convert_config_to_dict(config):
config_dict = {}
for section in config.sections():
if section not in config_dict:
config_dict[section] = {}
for option in config.options(section):
option_value = config.get(section, option)
try:
option_value = int(option_value)
except ValueError:
pass
config_dict[section][option] = option_value
return config_dict
|
infrared/core/utils/validators.py
|
codereval_python_data_209
|
Resolves the include dict directive in the spec files.
def _include_groups(self, parser_dict):
"""Resolves the include dict directive in the spec files. """
for group in parser_dict.get('include_groups', []):
# ensure we have that group
grp_dict = next(
(grp for grp in self.spec_dict.get('shared_groups', [])
if grp['title'] == group),
None)
if grp_dict is None:
raise SpecParserException(
"Unable to include group '{}' in '{}' parser. "
"Group was not found!".format(
group,
parser_dict['name']))
for option in grp_dict.get('options', {}).values():
option['is_shared_group_option'] = True
parser_groups_list = parser_dict.get('groups', [])
parser_groups_list.append(deepcopy(grp_dict))
parser_dict['groups'] = parser_groups_list
parser_dict['groups']
from copy import deepcopy
from infrared.core.utils.exceptions import SpecParserException
OptionState = dict(
UNRECOGNIZED='unrecognized',
IS_SET='is set',
NOT_SET='is no set'
)
class SpecDictHelper(object):
"""Controls the spec dicts and provides useful methods to get spec info."""
def __init__(self, spec_dict):
self.spec_dict = spec_dict
# make structure of the dict flat
# 1. handle include_groups directive in main parser
parser_dict = self.spec_dict
self._include_groups(parser_dict)
# 2. Include groups for all subparsers
for subparser_name, subparser_dict in parser_dict.get(
'subparsers', {}).items():
self._include_groups(subparser_dict)
def iterate_parsers(self):
"""Iterates over the main parsers and subparsers. """
for subparser_name, subparser_dict in self.spec_dict.get(
'subparsers', {}).items():
yield dict(name=subparser_name, **subparser_dict)
def iterate_option_specs(self):
"""Iterates over all the option specs.
Returns pair of parser and option on every iteration.
"""
for parser in self.iterate_parsers():
for spec_option in self._get_all_options_spec(parser):
yield parser, spec_option
@staticmethod
def _get_all_options_spec(parser_dict):
"""Gets all the options specification as the list of dicts. """
result = []
for group in parser_dict.get('groups', []):
for option_name, option_dict in group.get('options', {}).items():
result.append(dict(name=option_name, **option_dict))
for option_name, option_dict in parser_dict.get('options', {}).items():
result.append(dict(name=option_name, **option_dict))
return result
def get_parser_option_specs(self, command_name):
"""Gets all the options for the specified command
:param command_name: the command name (main, virsh, ospd, etc...)
:return: the list of all command options
"""
options = []
for parser in self.iterate_parsers():
if parser['name'] == command_name:
options = self._get_all_options_spec(parser)
break
return options
def get_option_spec(self, command_name, argument_name):
"""Gets the specification for the specified option name. """
options = self.get_parser_option_specs(command_name)
return next((opt for opt in options
if opt['name'] == argument_name), {})
def get_option_state(self, command_name, option_name, args):
"""Gets the option state.
:param command_name: The command name
:param option_name: The option name to analyze
:param args: The received arguments.
"""
option_spec = self.get_option_spec(command_name, option_name)
if not option_spec:
res = OptionState['UNRECOGNIZED']
elif option_name not in args.get(command_name, {}):
res = OptionState['NOT_SET']
else:
option_value = args[command_name][option_name]
if option_spec.get('action', '') in ['store_true'] \
and option_value is False:
res = OptionState['NOT_SET']
else:
res = OptionState['IS_SET']
return res
def _include_groups(self, parser_dict):
"""Resolves the include dict directive in the spec files. """
for group in parser_dict.get('include_groups', []):
# ensure we have that group
grp_dict = next(
(grp for grp in self.spec_dict.get('shared_groups', [])
if grp['title'] == group),
None)
if grp_dict is None:
raise SpecParserException(
"Unable to include group '{}' in '{}' parser. "
"Group was not found!".format(
group,
parser_dict['name']))
for option in grp_dict.get('options', {}).values():
option['is_shared_group_option'] = True
parser_groups_list = parser_dict.get('groups', [])
parser_groups_list.append(deepcopy(grp_dict))
parser_dict['groups'] = parser_groups_list
|
infrared/core/inspector/helper.py
|
codereval_python_data_210
|
Resolve arguments' values from spec and other sources.
def get_spec_defaults(self):
"""Resolve arguments' values from spec and other sources. """
def spec_default_getter(option):
"""Getter function to retrieve the default value from spec.
:param option: argument name
"""
# first try to get environment variable with IR_ prefix
default_value = SpecParser.get_env_option(option['name'])
if default_value is not None:
LOG.info(
"[environ] Loading '{0}' default value"
" '{1}' from the environment variable".format(
option['name'], default_value))
elif option.get('default', None) is not None:
default_value = option['default']
elif option.get('action', None) in ['store_true']:
default_value = False
return default_value
return self._get_defaults(spec_default_getter)
import collections
import os
from six.moves import configparser
from string import Template
import yaml
from infrared.core.cli.cli import CliParser
from infrared.core.cli.cli import COMPLEX_TYPES
from infrared.core.inspector import helper
from infrared.core.utils import dict_utils
from infrared.core.utils import exceptions
from infrared.core.utils import logger
LOG = logger.LOG
class SpecParser(object):
"""Parses input arguments from different sources (cli, answers file). """
@classmethod
def from_plugin(cls, subparser, plugin, base_groups):
"""Reads spec & vars from plugin and constructs the parser instance
:param subparser: argparse.subparser to extend
:param plugin: InfraredPlugin object
:param base_groups: dict, included groups
:return: SpecParser object based on given plugin spec & vars
"""
spec_dict = base_groups or {}
with open(plugin.spec) as stream:
spec = yaml.safe_load(stream) or {}
dict_utils.dict_merge(
base_groups,
spec,
dict_utils.ConflictResolver.unique_append_list_resolver)
# The "try-excpet" block here is for adding spec file path if it
# includes an unsupported option type
try:
return SpecParser(subparser, spec_dict, plugin.vars_dir,
plugin.defaults_dir, plugin.path)
except exceptions.IRUnsupportedSpecOptionType as ex:
ex.message += ' in file: {}'.format(plugin.spec)
raise ex
def __init__(self, subparser, spec_dict, vars_dir, defaults_dir,
plugin_path):
"""Constructor.
:param subparser: argparse.subparser to extend
:param spec_dict: dict with CLI description
:param vars_dir: Path to plugin's vars dir
:param defaults_dir: Path to plugin's defaults dir
"""
self.vars = vars_dir
self.defaults = defaults_dir
self.plugin_path = plugin_path
self.spec_helper = helper.SpecDictHelper(spec_dict)
# create parser
self.parser = CliParser.create_parser(self, subparser)
def add_shared_groups(self, list_of_groups):
"""Adds the user defined shared groups
:param list_of_groups: list, of group dicts
"""
shared_groups = self.spec_helper.spec_dict.get('shared_groups', [])
shared_groups.expand(list_of_groups)
self.spec_helper.spec_dict['shared_groups'] = shared_groups
def _get_defaults(self, default_getter_func):
"""Resolve arguments' values from cli or answers file.
:param default_getter_func: callable. will be called for all the
available options in spec file.
"""
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
default_value = default_getter_func(option)
if default_value is not None:
sub = parser['name']
result[sub][option['name']] = default_value
return result
def get_spec_defaults(self):
"""Resolve arguments' values from spec and other sources. """
def spec_default_getter(option):
"""Getter function to retrieve the default value from spec.
:param option: argument name
"""
# first try to get environment variable with IR_ prefix
default_value = SpecParser.get_env_option(option['name'])
if default_value is not None:
LOG.info(
"[environ] Loading '{0}' default value"
" '{1}' from the environment variable".format(
option['name'], default_value))
elif option.get('default', None) is not None:
default_value = option['default']
elif option.get('action', None) in ['store_true']:
default_value = False
return default_value
return self._get_defaults(spec_default_getter)
@staticmethod
def get_env_option(name):
"""Try get """
return os.environ.get('IR_' + name.upper().replace('-', '_'))
def get_deprecated_args(self):
"""Returning dict with options which deprecate others. """
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
if option.get('deprecates') is not None:
result[option.get('deprecates')] = option.get('name')
return result
@staticmethod
def parse_env_variable_from_file(value):
if isinstance(value, str):
t = Template(value)
try:
value = t.substitute(os.environ)
except KeyError as undefined_var:
raise exceptions.IRAnswersFileEnvVarNotDefined(undefined_var)
return value
def get_answers_file_args(self, cli_args):
"""Resolve arguments' values from answers INI file. """
file_result = {}
args_to_remove = []
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
file_result[parser_name] = file_result.get(parser_name, {})
if option_spec and option_spec.get(
'action', '') == 'read-answers':
# Iterate over arguments supplied by file
for parsed_arg in parser_dict[arg_name]:
# Supplied arguments' value can be a list
if isinstance(parser_dict[arg_name][parsed_arg], list):
i = 0
# Iterrate over argument values list
for parsed_value in parser_dict[arg_name][parsed_arg]:
parser_dict[arg_name][parsed_arg][i] = \
SpecParser.parse_env_variable_from_file(parsed_value)
i += 1
else:
parser_dict[arg_name][parsed_arg] = \
SpecParser.parse_env_variable_from_file(parser_dict[arg_name][parsed_arg])
# we have config option. saving it.
self._convert_non_cli_args(
parser_name, parser_dict[arg_name])
dict_utils.dict_merge(
file_result[parser_name],
parser_dict[arg_name])
# remove from cli args
args_to_remove.append((parser_name, arg_name))
# remove parser dict outside loop to avoid iteration dict modification
for parser_name, arg_name in args_to_remove:
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in cli_args and spec_parser['name'] == parser_name:
parser_dict = cli_args[spec_parser['name']]
parser_dict.pop(arg_name)
break
return file_result
def generate_answers_file(self, cli_args, spec_defaults):
"""Generates answers INI file
:param cli_args: list, cli arguments.
:param spec_defaults: the default values.
"""
def put_option(config, parser_name, option_name, value):
for opt_help in option.get('help', '').split('\n'):
help_opt = '# ' + opt_help
# add help comment
if config.has_option(parser_name, help_opt):
config.remove_option(parser_name, help_opt)
config.set(
parser_name, help_opt)
if config.has_option(parser_name, option_name):
value = config.get(parser_name, option_name)
config.remove_option(parser_name, option_name)
config.set(
parser_name,
option_name,
str(value))
file_generated = False
# load generate answers file for all the parsers
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
if option_spec and option_spec.get(
'action', '') == 'generate-answers':
options_to_save = \
self.spec_helper.get_parser_option_specs(parser_name)
out_answers = configparser.ConfigParser(allow_no_value=True)
if not out_answers.has_section(parser_name):
out_answers.add_section(parser_name)
for option in options_to_save:
opt_name = option['name']
if opt_name in parser_dict:
put_option(
out_answers,
parser_name,
opt_name,
parser_dict[opt_name])
elif opt_name in spec_defaults[parser_name]:
put_option(
out_answers,
parser_name,
opt_name,
spec_defaults[parser_name][opt_name])
elif option.get('required', False):
put_option(
out_answers,
parser_name,
'# ' + opt_name,
"Required argument. "
"Edit with one of the allowed values OR "
"override with "
"CLI: --{}=<option>".format(opt_name))
# write to file
with open(arg_value, 'w') as answers_file:
out_answers.write(answers_file)
file_generated = True
return file_generated
def resolve_custom_types(self, args):
"""Transforms the arguments with custom types
:param args: the list of received arguments.
"""
for parser_name, parser_dict in args.items():
spec_complex_options = [opt for opt in
self.spec_helper.get_parser_option_specs(
parser_name) if
opt.get('type', None) in COMPLEX_TYPES]
for spec_option in spec_complex_options:
option_name = spec_option['name']
if option_name in parser_dict:
# we have custom type to resolve
type_name = spec_option['type']
option_value = parser_dict[option_name]
action = self.create_complex_argumet_type(
parser_name,
type_name,
option_name,
spec_option)
# resolving value
parser_dict[option_name] = action.resolve(option_value)
def create_complex_argumet_type(self, subcommand, type_name, option_name,
spec_option):
"""Build the complex argument type
:param subcommand: the command name
:param type_name: the complex type name
:param option_name: the option name
:param spec_option: option's specifications
:return: the complex type instance
"""
complex_action = COMPLEX_TYPES.get(
type_name, None)
if complex_action is None:
raise exceptions.SpecParserException(
"Unknown complex type: {}".format(type_name))
return complex_action(
option_name,
(self.vars, self.defaults, self.plugin_path),
subcommand,
spec_option)
def parse_args(self, arg_parser, args=None):
"""Parses all the arguments (cli, answers file)
:return: None, if ``--generate-answers-file`` in arg_arg_parser
:return: (dict, dict):
* command arguments dict (arguments to control the IR logic)
* nested arguments dict (arguments to pass to the playbooks)
"""
spec_defaults = self.get_spec_defaults()
cli_args = CliParser.parse_cli_input(arg_parser, args)
file_args = self.get_answers_file_args(cli_args)
# generate answers file and exit
if self.generate_answers_file(cli_args, spec_defaults):
LOG.warning("Answers file generated. Exiting.")
# print warnings when something was overridden from non-cli source.
self.validate_arg_sources(cli_args, file_args,
spec_defaults)
# print warnings for deprecated
self.validate_arg_deprecation(cli_args, file_args)
# now filter defaults to have only parser defined in cli
defaults = dict((key, spec_defaults[key])
for key in cli_args.keys() if
key in spec_defaults)
# copy cli args with the same name to all parser groups
self._merge_duplicated_cli_args(cli_args)
self._merge_duplicated_cli_args(file_args)
dict_utils.dict_merge(defaults, file_args)
dict_utils.dict_merge(defaults, cli_args)
self.validate_requires_args(defaults)
self.validate_length_args(defaults)
self.validate_choices_args(defaults)
self.validate_min_max_args(defaults)
# now resolve complex types.
self.resolve_custom_types(defaults)
nested, control, custom = \
self.get_nested_custom_and_control_args(defaults)
return nested, control, custom
def validate_arg_deprecation(self, cli_args, answer_file_args):
"""Validates and prints the deprecated arguments.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
"""
for deprecated, deprecates in self.get_deprecated_args().items():
for input_args in (answer_file_args.items(), cli_args.items()):
for command, command_dict in input_args:
if deprecated in command_dict:
if deprecates in command_dict:
raise exceptions.IRDeprecationException(
"[{}] Argument '{}' deprecates '{}',"
" please use only the new one.".format(
command, deprecated, deprecates))
if deprecated in answer_file_args[command]:
answer_file_args[command][deprecates] = \
answer_file_args[command][deprecated]
if deprecated in cli_args[command]:
cli_args[command][deprecates] = \
cli_args[command][deprecated]
LOG.warning(
"[{}] Argument '{}' was deprecated,"
" please use '{}'.".format(
command, deprecated, deprecates))
@staticmethod
def validate_arg_sources(cli_args, answer_file_args, spec_defaults):
"""Validates and prints the arguments' source.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
:param spec_defaults: the default values from spec files
"""
def show_diff(diff, command_name, cmd_dict, source_name):
if diff:
for arg_name in diff:
value = cmd_dict[arg_name]
LOG.info(
"[{}] Argument '{}' was set to"
" '{}' from the {} source.".format(
command_name, arg_name, value, source_name))
for command, command_dict in cli_args.items():
file_dict = answer_file_args.get(command, {})
file_diff = set(file_dict.keys()) - set(command_dict.keys())
show_diff(file_diff, command, file_dict, 'answers file')
def_dict = spec_defaults.get(command, {})
default_diff = set(def_dict.keys()) - set(
command_dict.keys()) - file_diff
show_diff(default_diff, command, def_dict, 'spec defaults')
def _get_conditionally_required_args(self, command_name, options_spec,
args):
"""List arguments with ``required_when`` condition matched.
:param command_name: the command name.
:param options_spec: the list of command spec options.
:param args: the received input arguments
:return: list, list of argument names with matched ``required_when``
condition
"""
opts_names = [option_spec['name'] for option_spec in options_spec]
missing_args = []
for option_spec in options_spec:
option_results = []
if option_spec and 'required_when' in option_spec:
req_when_args = [option_spec['required_when']] \
if not type(option_spec['required_when']) is list \
else option_spec['required_when']
# validate conditions
for req_when_arg in req_when_args:
splited_args_list = req_when_arg.split()
for idx, req_arg in enumerate(splited_args_list):
if req_arg in opts_names:
splited_args_list[idx] = \
args.get(command_name, {}).get(req_arg.strip())
if splited_args_list[idx] is None:
option_results.append(False)
break
splited_args_list[idx] = str(splited_args_list[idx])
if (splited_args_list[idx] not in ['and', 'or'] and
not any(
(c in '<>=') for c in splited_args_list[idx])):
splited_args_list[idx] = "'{0}'".format(
yaml.safe_load(splited_args_list[idx]))
else:
option_results.append(
eval(' '.join(splited_args_list)))
if all(option_results) and \
self.spec_helper.get_option_state(
command_name,
option_spec['name'],
args) == helper.OptionState['NOT_SET']:
missing_args.append(option_spec['name'])
return missing_args
def validate_requires_args(self, args):
"""Check if all the required arguments have been provided. """
silent_args = self.get_silent_args(args)
def validate_parser(parser_name, expected_options, parser_args):
"""Helper method to resolve dict_merge. """
result = collections.defaultdict(list)
condition_req_args = self._get_conditionally_required_args(
parser_name, expected_options, args)
for option in expected_options:
name = option['name']
# check required options.
if (option.get('required', False) and
name not in parser_args or
option['name'] in condition_req_args) and \
name not in silent_args:
result[parser_name].append(name)
return result
res = {}
for command_data in self.spec_helper.iterate_parsers():
cmd_name = command_data['name']
if cmd_name in args:
dict_utils.dict_merge(
res,
validate_parser(
cmd_name,
self.spec_helper.get_parser_option_specs(cmd_name),
args[cmd_name]))
missing_args = dict((cmd_name, args)
for cmd_name, args in res.items() if len(args) > 0)
if missing_args:
raise exceptions.IRRequiredArgsMissingException(missing_args)
def validate_length_args(self, args):
"""Check if value of arguments is not longer than length specified.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'length' not in spec_option:
# skip options that does not contain length
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve length
length = spec_option['length']
option_value = parser_dict[option_name]
if len(option_value) > int(length):
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
length
))
if invalid_options:
# raise exception with all arguments that exceed length
raise exceptions.IRInvalidLengthException(invalid_options)
def validate_choices_args(self, args):
"""Check if value of choice arguments is one of the available choices.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'choices' not in spec_option:
# skip options that does not contain choices
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve choices
choices = spec_option['choices']
option_value = parser_dict[option_name]
if option_value not in choices:
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
choices
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidChoiceException(invalid_options)
def validate_min_max_args(self, args):
"""Check if value of arguments is between minimum and maximum values.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if all([key not in spec_option
for key in ('maximum', 'minimum')]):
# skip options that does not contain minimum or maximum
continue
option_name = spec_option['name']
if option_name in parser_dict:
option_value = parser_dict[option_name]
min_value = spec_option.get('minimum')
max_value = spec_option.get('maximum')
# handle empty values in spec files which load as None
min_value = '' if 'minimum' in spec_option \
and min_value is None else min_value
max_value = '' if 'maximum' in spec_option \
and max_value is None else max_value
values = {
"value": option_value,
"maximum": max_value,
"minimum": min_value
}
# make sure that values are numbers
is_all_values_numbers = True
for name, num in values.items():
if num is not None \
and (isinstance(num, bool) or
not isinstance(num, (int, float))):
invalid_options.append((
option_name,
name,
"number",
type(num).__name__
))
is_all_values_numbers = False
if not is_all_values_numbers:
# don't continue to min max checks since some of the
# values are not numbers
continue
# check bigger than minimum
if min_value is not None and option_value < min_value:
invalid_options.append((
option_name,
"minimum",
min_value,
option_value
))
# check smaller than maximum
if max_value is not None and option_value > max_value:
invalid_options.append((
option_name,
"maximum",
max_value,
option_value
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidMinMaxRangeException(invalid_options)
def get_silent_args(self, args):
"""list of silenced argument
:param args: The received arguments.
:return: list, slienced argument names
"""
silent_args_names = []
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if arg_spec and 'silent' in arg_spec and \
self.spec_helper.get_option_state(
parser_name,
arg_name,
args) == helper.OptionState['IS_SET']:
silent_args_names.extend(arg_spec['silent'])
return list(set(silent_args_names))
def get_nested_custom_and_control_args(self, args):
"""Split input arguments to control nested and custom.
Controls arguments: control the IR behavior. These arguments
will not be put into the spec yml file
Nested arguments: are used by the Ansible playbooks and will be put
into the spec yml file.
Custom arguments: Custom ansible variables to be used instead of the
normal nested usage.
:param args: the collected list of args.
:return: (dict, dict): flat dicts (control_args, nested_args)
"""
# returns flat dicts
nested = {}
control_args = {}
custom_args = {}
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if all([arg_spec, arg_spec.get('type', None),
arg_spec.get('type', None) in
[ctype_name for ctype_name, klass in
COMPLEX_TYPES.items() if klass.is_nested]
]) or ('is_shared_group_option' not in arg_spec):
if arg_name in nested:
LOG.warning(
"Duplicated nested argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, nested[arg_name]))
elif arg_name in custom_args:
LOG.warning(
"Duplicated custom argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, custom_args[arg_name]))
else:
if "ansible_variable" in arg_spec:
custom_args[arg_spec["ansible_variable"]] = arg_value
else:
nested[arg_name] = arg_value
else:
if arg_name in control_args:
LOG.warning(
"Duplicated control argument found: '{}'. Using "
"old value: '{}'".format(
arg_name, control_args[arg_name]))
else:
control_args[arg_name] = arg_value
return nested, control_args, custom_args
def _iterate_received_arguments(self, args):
"""Iterator helper method over all the received arguments
:return: yields tuple:
(spec name, spec dict,
argument name, argument value, argument spec)
"""
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in args:
parser_dict = args[spec_parser['name']]
for arg_name, arg_val in parser_dict.items():
arg_spec = self.spec_helper.get_option_spec(
spec_parser['name'], arg_name)
yield (spec_parser['name'], parser_dict,
arg_name, arg_val, arg_spec)
def _convert_non_cli_args(self, parser_name, values_dict):
"""Casts arguments to correct types by modifying values_dict param.
By default all the values are strings.
:param parser_name: The command name, e.g. main, virsh, ospd, etc
:param values_dict: The dict of with arguments
"""
for opt_name, opt_value in values_dict.items():
file_option_spec = self.spec_helper.get_option_spec(
parser_name, opt_name)
if file_option_spec.get('type', None) in ['int', ] or \
file_option_spec.get('action', None) in ['count', ]:
values_dict[opt_name] = int(opt_value)
def _merge_duplicated_cli_args(self, cli_args):
"""Merge duplicated arguments to all the parsers
This is need to handle control args, shared among several parsers.
for example, verbose, inventory
"""
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(cli_args):
for parser_name2, parser_dict2 in cli_args.items():
if all([parser_name2, parser_name != parser_name2,
arg_name not in parser_dict2]):
if self.spec_helper.get_option_spec(parser_name2,
arg_name):
parser_dict2[arg_name] = arg_value
|
infrared/core/inspector/inspector.py
|
codereval_python_data_211
|
Returning dict with options which deprecate others.
def get_deprecated_args(self):
"""Returning dict with options which deprecate others. """
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
if option.get('deprecates') is not None:
result[option.get('deprecates')] = option.get('name')
return result
import collections
import os
from six.moves import configparser
from string import Template
import yaml
from infrared.core.cli.cli import CliParser
from infrared.core.cli.cli import COMPLEX_TYPES
from infrared.core.inspector import helper
from infrared.core.utils import dict_utils
from infrared.core.utils import exceptions
from infrared.core.utils import logger
LOG = logger.LOG
class SpecParser(object):
"""Parses input arguments from different sources (cli, answers file). """
@classmethod
def from_plugin(cls, subparser, plugin, base_groups):
"""Reads spec & vars from plugin and constructs the parser instance
:param subparser: argparse.subparser to extend
:param plugin: InfraredPlugin object
:param base_groups: dict, included groups
:return: SpecParser object based on given plugin spec & vars
"""
spec_dict = base_groups or {}
with open(plugin.spec) as stream:
spec = yaml.safe_load(stream) or {}
dict_utils.dict_merge(
base_groups,
spec,
dict_utils.ConflictResolver.unique_append_list_resolver)
# The "try-excpet" block here is for adding spec file path if it
# includes an unsupported option type
try:
return SpecParser(subparser, spec_dict, plugin.vars_dir,
plugin.defaults_dir, plugin.path)
except exceptions.IRUnsupportedSpecOptionType as ex:
ex.message += ' in file: {}'.format(plugin.spec)
raise ex
def __init__(self, subparser, spec_dict, vars_dir, defaults_dir,
plugin_path):
"""Constructor.
:param subparser: argparse.subparser to extend
:param spec_dict: dict with CLI description
:param vars_dir: Path to plugin's vars dir
:param defaults_dir: Path to plugin's defaults dir
"""
self.vars = vars_dir
self.defaults = defaults_dir
self.plugin_path = plugin_path
self.spec_helper = helper.SpecDictHelper(spec_dict)
# create parser
self.parser = CliParser.create_parser(self, subparser)
def add_shared_groups(self, list_of_groups):
"""Adds the user defined shared groups
:param list_of_groups: list, of group dicts
"""
shared_groups = self.spec_helper.spec_dict.get('shared_groups', [])
shared_groups.expand(list_of_groups)
self.spec_helper.spec_dict['shared_groups'] = shared_groups
def _get_defaults(self, default_getter_func):
"""Resolve arguments' values from cli or answers file.
:param default_getter_func: callable. will be called for all the
available options in spec file.
"""
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
default_value = default_getter_func(option)
if default_value is not None:
sub = parser['name']
result[sub][option['name']] = default_value
return result
def get_spec_defaults(self):
"""Resolve arguments' values from spec and other sources. """
def spec_default_getter(option):
"""Getter function to retrieve the default value from spec.
:param option: argument name
"""
# first try to get environment variable with IR_ prefix
default_value = SpecParser.get_env_option(option['name'])
if default_value is not None:
LOG.info(
"[environ] Loading '{0}' default value"
" '{1}' from the environment variable".format(
option['name'], default_value))
elif option.get('default', None) is not None:
default_value = option['default']
elif option.get('action', None) in ['store_true']:
default_value = False
return default_value
return self._get_defaults(spec_default_getter)
@staticmethod
def get_env_option(name):
"""Try get """
return os.environ.get('IR_' + name.upper().replace('-', '_'))
def get_deprecated_args(self):
"""Returning dict with options which deprecate others. """
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
if option.get('deprecates') is not None:
result[option.get('deprecates')] = option.get('name')
return result
@staticmethod
def parse_env_variable_from_file(value):
if isinstance(value, str):
t = Template(value)
try:
value = t.substitute(os.environ)
except KeyError as undefined_var:
raise exceptions.IRAnswersFileEnvVarNotDefined(undefined_var)
return value
def get_answers_file_args(self, cli_args):
"""Resolve arguments' values from answers INI file. """
file_result = {}
args_to_remove = []
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
file_result[parser_name] = file_result.get(parser_name, {})
if option_spec and option_spec.get(
'action', '') == 'read-answers':
# Iterate over arguments supplied by file
for parsed_arg in parser_dict[arg_name]:
# Supplied arguments' value can be a list
if isinstance(parser_dict[arg_name][parsed_arg], list):
i = 0
# Iterrate over argument values list
for parsed_value in parser_dict[arg_name][parsed_arg]:
parser_dict[arg_name][parsed_arg][i] = \
SpecParser.parse_env_variable_from_file(parsed_value)
i += 1
else:
parser_dict[arg_name][parsed_arg] = \
SpecParser.parse_env_variable_from_file(parser_dict[arg_name][parsed_arg])
# we have config option. saving it.
self._convert_non_cli_args(
parser_name, parser_dict[arg_name])
dict_utils.dict_merge(
file_result[parser_name],
parser_dict[arg_name])
# remove from cli args
args_to_remove.append((parser_name, arg_name))
# remove parser dict outside loop to avoid iteration dict modification
for parser_name, arg_name in args_to_remove:
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in cli_args and spec_parser['name'] == parser_name:
parser_dict = cli_args[spec_parser['name']]
parser_dict.pop(arg_name)
break
return file_result
def generate_answers_file(self, cli_args, spec_defaults):
"""Generates answers INI file
:param cli_args: list, cli arguments.
:param spec_defaults: the default values.
"""
def put_option(config, parser_name, option_name, value):
for opt_help in option.get('help', '').split('\n'):
help_opt = '# ' + opt_help
# add help comment
if config.has_option(parser_name, help_opt):
config.remove_option(parser_name, help_opt)
config.set(
parser_name, help_opt)
if config.has_option(parser_name, option_name):
value = config.get(parser_name, option_name)
config.remove_option(parser_name, option_name)
config.set(
parser_name,
option_name,
str(value))
file_generated = False
# load generate answers file for all the parsers
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
if option_spec and option_spec.get(
'action', '') == 'generate-answers':
options_to_save = \
self.spec_helper.get_parser_option_specs(parser_name)
out_answers = configparser.ConfigParser(allow_no_value=True)
if not out_answers.has_section(parser_name):
out_answers.add_section(parser_name)
for option in options_to_save:
opt_name = option['name']
if opt_name in parser_dict:
put_option(
out_answers,
parser_name,
opt_name,
parser_dict[opt_name])
elif opt_name in spec_defaults[parser_name]:
put_option(
out_answers,
parser_name,
opt_name,
spec_defaults[parser_name][opt_name])
elif option.get('required', False):
put_option(
out_answers,
parser_name,
'# ' + opt_name,
"Required argument. "
"Edit with one of the allowed values OR "
"override with "
"CLI: --{}=<option>".format(opt_name))
# write to file
with open(arg_value, 'w') as answers_file:
out_answers.write(answers_file)
file_generated = True
return file_generated
def resolve_custom_types(self, args):
"""Transforms the arguments with custom types
:param args: the list of received arguments.
"""
for parser_name, parser_dict in args.items():
spec_complex_options = [opt for opt in
self.spec_helper.get_parser_option_specs(
parser_name) if
opt.get('type', None) in COMPLEX_TYPES]
for spec_option in spec_complex_options:
option_name = spec_option['name']
if option_name in parser_dict:
# we have custom type to resolve
type_name = spec_option['type']
option_value = parser_dict[option_name]
action = self.create_complex_argumet_type(
parser_name,
type_name,
option_name,
spec_option)
# resolving value
parser_dict[option_name] = action.resolve(option_value)
def create_complex_argumet_type(self, subcommand, type_name, option_name,
spec_option):
"""Build the complex argument type
:param subcommand: the command name
:param type_name: the complex type name
:param option_name: the option name
:param spec_option: option's specifications
:return: the complex type instance
"""
complex_action = COMPLEX_TYPES.get(
type_name, None)
if complex_action is None:
raise exceptions.SpecParserException(
"Unknown complex type: {}".format(type_name))
return complex_action(
option_name,
(self.vars, self.defaults, self.plugin_path),
subcommand,
spec_option)
def parse_args(self, arg_parser, args=None):
"""Parses all the arguments (cli, answers file)
:return: None, if ``--generate-answers-file`` in arg_arg_parser
:return: (dict, dict):
* command arguments dict (arguments to control the IR logic)
* nested arguments dict (arguments to pass to the playbooks)
"""
spec_defaults = self.get_spec_defaults()
cli_args = CliParser.parse_cli_input(arg_parser, args)
file_args = self.get_answers_file_args(cli_args)
# generate answers file and exit
if self.generate_answers_file(cli_args, spec_defaults):
LOG.warning("Answers file generated. Exiting.")
# print warnings when something was overridden from non-cli source.
self.validate_arg_sources(cli_args, file_args,
spec_defaults)
# print warnings for deprecated
self.validate_arg_deprecation(cli_args, file_args)
# now filter defaults to have only parser defined in cli
defaults = dict((key, spec_defaults[key])
for key in cli_args.keys() if
key in spec_defaults)
# copy cli args with the same name to all parser groups
self._merge_duplicated_cli_args(cli_args)
self._merge_duplicated_cli_args(file_args)
dict_utils.dict_merge(defaults, file_args)
dict_utils.dict_merge(defaults, cli_args)
self.validate_requires_args(defaults)
self.validate_length_args(defaults)
self.validate_choices_args(defaults)
self.validate_min_max_args(defaults)
# now resolve complex types.
self.resolve_custom_types(defaults)
nested, control, custom = \
self.get_nested_custom_and_control_args(defaults)
return nested, control, custom
def validate_arg_deprecation(self, cli_args, answer_file_args):
"""Validates and prints the deprecated arguments.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
"""
for deprecated, deprecates in self.get_deprecated_args().items():
for input_args in (answer_file_args.items(), cli_args.items()):
for command, command_dict in input_args:
if deprecated in command_dict:
if deprecates in command_dict:
raise exceptions.IRDeprecationException(
"[{}] Argument '{}' deprecates '{}',"
" please use only the new one.".format(
command, deprecated, deprecates))
if deprecated in answer_file_args[command]:
answer_file_args[command][deprecates] = \
answer_file_args[command][deprecated]
if deprecated in cli_args[command]:
cli_args[command][deprecates] = \
cli_args[command][deprecated]
LOG.warning(
"[{}] Argument '{}' was deprecated,"
" please use '{}'.".format(
command, deprecated, deprecates))
@staticmethod
def validate_arg_sources(cli_args, answer_file_args, spec_defaults):
"""Validates and prints the arguments' source.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
:param spec_defaults: the default values from spec files
"""
def show_diff(diff, command_name, cmd_dict, source_name):
if diff:
for arg_name in diff:
value = cmd_dict[arg_name]
LOG.info(
"[{}] Argument '{}' was set to"
" '{}' from the {} source.".format(
command_name, arg_name, value, source_name))
for command, command_dict in cli_args.items():
file_dict = answer_file_args.get(command, {})
file_diff = set(file_dict.keys()) - set(command_dict.keys())
show_diff(file_diff, command, file_dict, 'answers file')
def_dict = spec_defaults.get(command, {})
default_diff = set(def_dict.keys()) - set(
command_dict.keys()) - file_diff
show_diff(default_diff, command, def_dict, 'spec defaults')
def _get_conditionally_required_args(self, command_name, options_spec,
args):
"""List arguments with ``required_when`` condition matched.
:param command_name: the command name.
:param options_spec: the list of command spec options.
:param args: the received input arguments
:return: list, list of argument names with matched ``required_when``
condition
"""
opts_names = [option_spec['name'] for option_spec in options_spec]
missing_args = []
for option_spec in options_spec:
option_results = []
if option_spec and 'required_when' in option_spec:
req_when_args = [option_spec['required_when']] \
if not type(option_spec['required_when']) is list \
else option_spec['required_when']
# validate conditions
for req_when_arg in req_when_args:
splited_args_list = req_when_arg.split()
for idx, req_arg in enumerate(splited_args_list):
if req_arg in opts_names:
splited_args_list[idx] = \
args.get(command_name, {}).get(req_arg.strip())
if splited_args_list[idx] is None:
option_results.append(False)
break
splited_args_list[idx] = str(splited_args_list[idx])
if (splited_args_list[idx] not in ['and', 'or'] and
not any(
(c in '<>=') for c in splited_args_list[idx])):
splited_args_list[idx] = "'{0}'".format(
yaml.safe_load(splited_args_list[idx]))
else:
option_results.append(
eval(' '.join(splited_args_list)))
if all(option_results) and \
self.spec_helper.get_option_state(
command_name,
option_spec['name'],
args) == helper.OptionState['NOT_SET']:
missing_args.append(option_spec['name'])
return missing_args
def validate_requires_args(self, args):
"""Check if all the required arguments have been provided. """
silent_args = self.get_silent_args(args)
def validate_parser(parser_name, expected_options, parser_args):
"""Helper method to resolve dict_merge. """
result = collections.defaultdict(list)
condition_req_args = self._get_conditionally_required_args(
parser_name, expected_options, args)
for option in expected_options:
name = option['name']
# check required options.
if (option.get('required', False) and
name not in parser_args or
option['name'] in condition_req_args) and \
name not in silent_args:
result[parser_name].append(name)
return result
res = {}
for command_data in self.spec_helper.iterate_parsers():
cmd_name = command_data['name']
if cmd_name in args:
dict_utils.dict_merge(
res,
validate_parser(
cmd_name,
self.spec_helper.get_parser_option_specs(cmd_name),
args[cmd_name]))
missing_args = dict((cmd_name, args)
for cmd_name, args in res.items() if len(args) > 0)
if missing_args:
raise exceptions.IRRequiredArgsMissingException(missing_args)
def validate_length_args(self, args):
"""Check if value of arguments is not longer than length specified.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'length' not in spec_option:
# skip options that does not contain length
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve length
length = spec_option['length']
option_value = parser_dict[option_name]
if len(option_value) > int(length):
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
length
))
if invalid_options:
# raise exception with all arguments that exceed length
raise exceptions.IRInvalidLengthException(invalid_options)
def validate_choices_args(self, args):
"""Check if value of choice arguments is one of the available choices.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'choices' not in spec_option:
# skip options that does not contain choices
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve choices
choices = spec_option['choices']
option_value = parser_dict[option_name]
if option_value not in choices:
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
choices
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidChoiceException(invalid_options)
def validate_min_max_args(self, args):
"""Check if value of arguments is between minimum and maximum values.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if all([key not in spec_option
for key in ('maximum', 'minimum')]):
# skip options that does not contain minimum or maximum
continue
option_name = spec_option['name']
if option_name in parser_dict:
option_value = parser_dict[option_name]
min_value = spec_option.get('minimum')
max_value = spec_option.get('maximum')
# handle empty values in spec files which load as None
min_value = '' if 'minimum' in spec_option \
and min_value is None else min_value
max_value = '' if 'maximum' in spec_option \
and max_value is None else max_value
values = {
"value": option_value,
"maximum": max_value,
"minimum": min_value
}
# make sure that values are numbers
is_all_values_numbers = True
for name, num in values.items():
if num is not None \
and (isinstance(num, bool) or
not isinstance(num, (int, float))):
invalid_options.append((
option_name,
name,
"number",
type(num).__name__
))
is_all_values_numbers = False
if not is_all_values_numbers:
# don't continue to min max checks since some of the
# values are not numbers
continue
# check bigger than minimum
if min_value is not None and option_value < min_value:
invalid_options.append((
option_name,
"minimum",
min_value,
option_value
))
# check smaller than maximum
if max_value is not None and option_value > max_value:
invalid_options.append((
option_name,
"maximum",
max_value,
option_value
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidMinMaxRangeException(invalid_options)
def get_silent_args(self, args):
"""list of silenced argument
:param args: The received arguments.
:return: list, slienced argument names
"""
silent_args_names = []
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if arg_spec and 'silent' in arg_spec and \
self.spec_helper.get_option_state(
parser_name,
arg_name,
args) == helper.OptionState['IS_SET']:
silent_args_names.extend(arg_spec['silent'])
return list(set(silent_args_names))
def get_nested_custom_and_control_args(self, args):
"""Split input arguments to control nested and custom.
Controls arguments: control the IR behavior. These arguments
will not be put into the spec yml file
Nested arguments: are used by the Ansible playbooks and will be put
into the spec yml file.
Custom arguments: Custom ansible variables to be used instead of the
normal nested usage.
:param args: the collected list of args.
:return: (dict, dict): flat dicts (control_args, nested_args)
"""
# returns flat dicts
nested = {}
control_args = {}
custom_args = {}
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if all([arg_spec, arg_spec.get('type', None),
arg_spec.get('type', None) in
[ctype_name for ctype_name, klass in
COMPLEX_TYPES.items() if klass.is_nested]
]) or ('is_shared_group_option' not in arg_spec):
if arg_name in nested:
LOG.warning(
"Duplicated nested argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, nested[arg_name]))
elif arg_name in custom_args:
LOG.warning(
"Duplicated custom argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, custom_args[arg_name]))
else:
if "ansible_variable" in arg_spec:
custom_args[arg_spec["ansible_variable"]] = arg_value
else:
nested[arg_name] = arg_value
else:
if arg_name in control_args:
LOG.warning(
"Duplicated control argument found: '{}'. Using "
"old value: '{}'".format(
arg_name, control_args[arg_name]))
else:
control_args[arg_name] = arg_value
return nested, control_args, custom_args
def _iterate_received_arguments(self, args):
"""Iterator helper method over all the received arguments
:return: yields tuple:
(spec name, spec dict,
argument name, argument value, argument spec)
"""
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in args:
parser_dict = args[spec_parser['name']]
for arg_name, arg_val in parser_dict.items():
arg_spec = self.spec_helper.get_option_spec(
spec_parser['name'], arg_name)
yield (spec_parser['name'], parser_dict,
arg_name, arg_val, arg_spec)
def _convert_non_cli_args(self, parser_name, values_dict):
"""Casts arguments to correct types by modifying values_dict param.
By default all the values are strings.
:param parser_name: The command name, e.g. main, virsh, ospd, etc
:param values_dict: The dict of with arguments
"""
for opt_name, opt_value in values_dict.items():
file_option_spec = self.spec_helper.get_option_spec(
parser_name, opt_name)
if file_option_spec.get('type', None) in ['int', ] or \
file_option_spec.get('action', None) in ['count', ]:
values_dict[opt_name] = int(opt_value)
def _merge_duplicated_cli_args(self, cli_args):
"""Merge duplicated arguments to all the parsers
This is need to handle control args, shared among several parsers.
for example, verbose, inventory
"""
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(cli_args):
for parser_name2, parser_dict2 in cli_args.items():
if all([parser_name2, parser_name != parser_name2,
arg_name not in parser_dict2]):
if self.spec_helper.get_option_spec(parser_name2,
arg_name):
parser_dict2[arg_name] = arg_value
|
infrared/core/inspector/inspector.py
|
codereval_python_data_212
|
Validates and prints the deprecated arguments.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
def validate_arg_deprecation(self, cli_args, answer_file_args):
"""Validates and prints the deprecated arguments.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
"""
for deprecated, deprecates in self.get_deprecated_args().items():
for input_args in (answer_file_args.items(), cli_args.items()):
for command, command_dict in input_args:
if deprecated in command_dict:
if deprecates in command_dict:
raise exceptions.IRDeprecationException(
"[{}] Argument '{}' deprecates '{}',"
" please use only the new one.".format(
command, deprecated, deprecates))
if deprecated in answer_file_args[command]:
answer_file_args[command][deprecates] = \
answer_file_args[command][deprecated]
if deprecated in cli_args[command]:
cli_args[command][deprecates] = \
cli_args[command][deprecated]
LOG.warning(
"[{}] Argument '{}' was deprecated,"
" please use '{}'.".format(
command, deprecated, deprecates))
return cli_args
import collections
import os
from six.moves import configparser
from string import Template
import yaml
from infrared.core.cli.cli import CliParser
from infrared.core.cli.cli import COMPLEX_TYPES
from infrared.core.inspector import helper
from infrared.core.utils import dict_utils
from infrared.core.utils import exceptions
from infrared.core.utils import logger
LOG = logger.LOG
class SpecParser(object):
"""Parses input arguments from different sources (cli, answers file). """
@classmethod
def from_plugin(cls, subparser, plugin, base_groups):
"""Reads spec & vars from plugin and constructs the parser instance
:param subparser: argparse.subparser to extend
:param plugin: InfraredPlugin object
:param base_groups: dict, included groups
:return: SpecParser object based on given plugin spec & vars
"""
spec_dict = base_groups or {}
with open(plugin.spec) as stream:
spec = yaml.safe_load(stream) or {}
dict_utils.dict_merge(
base_groups,
spec,
dict_utils.ConflictResolver.unique_append_list_resolver)
# The "try-excpet" block here is for adding spec file path if it
# includes an unsupported option type
try:
return SpecParser(subparser, spec_dict, plugin.vars_dir,
plugin.defaults_dir, plugin.path)
except exceptions.IRUnsupportedSpecOptionType as ex:
ex.message += ' in file: {}'.format(plugin.spec)
raise ex
def __init__(self, subparser, spec_dict, vars_dir, defaults_dir,
plugin_path):
"""Constructor.
:param subparser: argparse.subparser to extend
:param spec_dict: dict with CLI description
:param vars_dir: Path to plugin's vars dir
:param defaults_dir: Path to plugin's defaults dir
"""
self.vars = vars_dir
self.defaults = defaults_dir
self.plugin_path = plugin_path
self.spec_helper = helper.SpecDictHelper(spec_dict)
# create parser
self.parser = CliParser.create_parser(self, subparser)
def add_shared_groups(self, list_of_groups):
"""Adds the user defined shared groups
:param list_of_groups: list, of group dicts
"""
shared_groups = self.spec_helper.spec_dict.get('shared_groups', [])
shared_groups.expand(list_of_groups)
self.spec_helper.spec_dict['shared_groups'] = shared_groups
def _get_defaults(self, default_getter_func):
"""Resolve arguments' values from cli or answers file.
:param default_getter_func: callable. will be called for all the
available options in spec file.
"""
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
default_value = default_getter_func(option)
if default_value is not None:
sub = parser['name']
result[sub][option['name']] = default_value
return result
def get_spec_defaults(self):
"""Resolve arguments' values from spec and other sources. """
def spec_default_getter(option):
"""Getter function to retrieve the default value from spec.
:param option: argument name
"""
# first try to get environment variable with IR_ prefix
default_value = SpecParser.get_env_option(option['name'])
if default_value is not None:
LOG.info(
"[environ] Loading '{0}' default value"
" '{1}' from the environment variable".format(
option['name'], default_value))
elif option.get('default', None) is not None:
default_value = option['default']
elif option.get('action', None) in ['store_true']:
default_value = False
return default_value
return self._get_defaults(spec_default_getter)
@staticmethod
def get_env_option(name):
"""Try get """
return os.environ.get('IR_' + name.upper().replace('-', '_'))
def get_deprecated_args(self):
"""Returning dict with options which deprecate others. """
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
if option.get('deprecates') is not None:
result[option.get('deprecates')] = option.get('name')
return result
@staticmethod
def parse_env_variable_from_file(value):
if isinstance(value, str):
t = Template(value)
try:
value = t.substitute(os.environ)
except KeyError as undefined_var:
raise exceptions.IRAnswersFileEnvVarNotDefined(undefined_var)
return value
def get_answers_file_args(self, cli_args):
"""Resolve arguments' values from answers INI file. """
file_result = {}
args_to_remove = []
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
file_result[parser_name] = file_result.get(parser_name, {})
if option_spec and option_spec.get(
'action', '') == 'read-answers':
# Iterate over arguments supplied by file
for parsed_arg in parser_dict[arg_name]:
# Supplied arguments' value can be a list
if isinstance(parser_dict[arg_name][parsed_arg], list):
i = 0
# Iterrate over argument values list
for parsed_value in parser_dict[arg_name][parsed_arg]:
parser_dict[arg_name][parsed_arg][i] = \
SpecParser.parse_env_variable_from_file(parsed_value)
i += 1
else:
parser_dict[arg_name][parsed_arg] = \
SpecParser.parse_env_variable_from_file(parser_dict[arg_name][parsed_arg])
# we have config option. saving it.
self._convert_non_cli_args(
parser_name, parser_dict[arg_name])
dict_utils.dict_merge(
file_result[parser_name],
parser_dict[arg_name])
# remove from cli args
args_to_remove.append((parser_name, arg_name))
# remove parser dict outside loop to avoid iteration dict modification
for parser_name, arg_name in args_to_remove:
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in cli_args and spec_parser['name'] == parser_name:
parser_dict = cli_args[spec_parser['name']]
parser_dict.pop(arg_name)
break
return file_result
def generate_answers_file(self, cli_args, spec_defaults):
"""Generates answers INI file
:param cli_args: list, cli arguments.
:param spec_defaults: the default values.
"""
def put_option(config, parser_name, option_name, value):
for opt_help in option.get('help', '').split('\n'):
help_opt = '# ' + opt_help
# add help comment
if config.has_option(parser_name, help_opt):
config.remove_option(parser_name, help_opt)
config.set(
parser_name, help_opt)
if config.has_option(parser_name, option_name):
value = config.get(parser_name, option_name)
config.remove_option(parser_name, option_name)
config.set(
parser_name,
option_name,
str(value))
file_generated = False
# load generate answers file for all the parsers
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
if option_spec and option_spec.get(
'action', '') == 'generate-answers':
options_to_save = \
self.spec_helper.get_parser_option_specs(parser_name)
out_answers = configparser.ConfigParser(allow_no_value=True)
if not out_answers.has_section(parser_name):
out_answers.add_section(parser_name)
for option in options_to_save:
opt_name = option['name']
if opt_name in parser_dict:
put_option(
out_answers,
parser_name,
opt_name,
parser_dict[opt_name])
elif opt_name in spec_defaults[parser_name]:
put_option(
out_answers,
parser_name,
opt_name,
spec_defaults[parser_name][opt_name])
elif option.get('required', False):
put_option(
out_answers,
parser_name,
'# ' + opt_name,
"Required argument. "
"Edit with one of the allowed values OR "
"override with "
"CLI: --{}=<option>".format(opt_name))
# write to file
with open(arg_value, 'w') as answers_file:
out_answers.write(answers_file)
file_generated = True
return file_generated
def resolve_custom_types(self, args):
"""Transforms the arguments with custom types
:param args: the list of received arguments.
"""
for parser_name, parser_dict in args.items():
spec_complex_options = [opt for opt in
self.spec_helper.get_parser_option_specs(
parser_name) if
opt.get('type', None) in COMPLEX_TYPES]
for spec_option in spec_complex_options:
option_name = spec_option['name']
if option_name in parser_dict:
# we have custom type to resolve
type_name = spec_option['type']
option_value = parser_dict[option_name]
action = self.create_complex_argumet_type(
parser_name,
type_name,
option_name,
spec_option)
# resolving value
parser_dict[option_name] = action.resolve(option_value)
def create_complex_argumet_type(self, subcommand, type_name, option_name,
spec_option):
"""Build the complex argument type
:param subcommand: the command name
:param type_name: the complex type name
:param option_name: the option name
:param spec_option: option's specifications
:return: the complex type instance
"""
complex_action = COMPLEX_TYPES.get(
type_name, None)
if complex_action is None:
raise exceptions.SpecParserException(
"Unknown complex type: {}".format(type_name))
return complex_action(
option_name,
(self.vars, self.defaults, self.plugin_path),
subcommand,
spec_option)
def parse_args(self, arg_parser, args=None):
"""Parses all the arguments (cli, answers file)
:return: None, if ``--generate-answers-file`` in arg_arg_parser
:return: (dict, dict):
* command arguments dict (arguments to control the IR logic)
* nested arguments dict (arguments to pass to the playbooks)
"""
spec_defaults = self.get_spec_defaults()
cli_args = CliParser.parse_cli_input(arg_parser, args)
file_args = self.get_answers_file_args(cli_args)
# generate answers file and exit
if self.generate_answers_file(cli_args, spec_defaults):
LOG.warning("Answers file generated. Exiting.")
# print warnings when something was overridden from non-cli source.
self.validate_arg_sources(cli_args, file_args,
spec_defaults)
# print warnings for deprecated
self.validate_arg_deprecation(cli_args, file_args)
# now filter defaults to have only parser defined in cli
defaults = dict((key, spec_defaults[key])
for key in cli_args.keys() if
key in spec_defaults)
# copy cli args with the same name to all parser groups
self._merge_duplicated_cli_args(cli_args)
self._merge_duplicated_cli_args(file_args)
dict_utils.dict_merge(defaults, file_args)
dict_utils.dict_merge(defaults, cli_args)
self.validate_requires_args(defaults)
self.validate_length_args(defaults)
self.validate_choices_args(defaults)
self.validate_min_max_args(defaults)
# now resolve complex types.
self.resolve_custom_types(defaults)
nested, control, custom = \
self.get_nested_custom_and_control_args(defaults)
return nested, control, custom
def validate_arg_deprecation(self, cli_args, answer_file_args):
"""Validates and prints the deprecated arguments.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
"""
for deprecated, deprecates in self.get_deprecated_args().items():
for input_args in (answer_file_args.items(), cli_args.items()):
for command, command_dict in input_args:
if deprecated in command_dict:
if deprecates in command_dict:
raise exceptions.IRDeprecationException(
"[{}] Argument '{}' deprecates '{}',"
" please use only the new one.".format(
command, deprecated, deprecates))
if deprecated in answer_file_args[command]:
answer_file_args[command][deprecates] = \
answer_file_args[command][deprecated]
if deprecated in cli_args[command]:
cli_args[command][deprecates] = \
cli_args[command][deprecated]
LOG.warning(
"[{}] Argument '{}' was deprecated,"
" please use '{}'.".format(
command, deprecated, deprecates))
@staticmethod
def validate_arg_sources(cli_args, answer_file_args, spec_defaults):
"""Validates and prints the arguments' source.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
:param spec_defaults: the default values from spec files
"""
def show_diff(diff, command_name, cmd_dict, source_name):
if diff:
for arg_name in diff:
value = cmd_dict[arg_name]
LOG.info(
"[{}] Argument '{}' was set to"
" '{}' from the {} source.".format(
command_name, arg_name, value, source_name))
for command, command_dict in cli_args.items():
file_dict = answer_file_args.get(command, {})
file_diff = set(file_dict.keys()) - set(command_dict.keys())
show_diff(file_diff, command, file_dict, 'answers file')
def_dict = spec_defaults.get(command, {})
default_diff = set(def_dict.keys()) - set(
command_dict.keys()) - file_diff
show_diff(default_diff, command, def_dict, 'spec defaults')
def _get_conditionally_required_args(self, command_name, options_spec,
args):
"""List arguments with ``required_when`` condition matched.
:param command_name: the command name.
:param options_spec: the list of command spec options.
:param args: the received input arguments
:return: list, list of argument names with matched ``required_when``
condition
"""
opts_names = [option_spec['name'] for option_spec in options_spec]
missing_args = []
for option_spec in options_spec:
option_results = []
if option_spec and 'required_when' in option_spec:
req_when_args = [option_spec['required_when']] \
if not type(option_spec['required_when']) is list \
else option_spec['required_when']
# validate conditions
for req_when_arg in req_when_args:
splited_args_list = req_when_arg.split()
for idx, req_arg in enumerate(splited_args_list):
if req_arg in opts_names:
splited_args_list[idx] = \
args.get(command_name, {}).get(req_arg.strip())
if splited_args_list[idx] is None:
option_results.append(False)
break
splited_args_list[idx] = str(splited_args_list[idx])
if (splited_args_list[idx] not in ['and', 'or'] and
not any(
(c in '<>=') for c in splited_args_list[idx])):
splited_args_list[idx] = "'{0}'".format(
yaml.safe_load(splited_args_list[idx]))
else:
option_results.append(
eval(' '.join(splited_args_list)))
if all(option_results) and \
self.spec_helper.get_option_state(
command_name,
option_spec['name'],
args) == helper.OptionState['NOT_SET']:
missing_args.append(option_spec['name'])
return missing_args
def validate_requires_args(self, args):
"""Check if all the required arguments have been provided. """
silent_args = self.get_silent_args(args)
def validate_parser(parser_name, expected_options, parser_args):
"""Helper method to resolve dict_merge. """
result = collections.defaultdict(list)
condition_req_args = self._get_conditionally_required_args(
parser_name, expected_options, args)
for option in expected_options:
name = option['name']
# check required options.
if (option.get('required', False) and
name not in parser_args or
option['name'] in condition_req_args) and \
name not in silent_args:
result[parser_name].append(name)
return result
res = {}
for command_data in self.spec_helper.iterate_parsers():
cmd_name = command_data['name']
if cmd_name in args:
dict_utils.dict_merge(
res,
validate_parser(
cmd_name,
self.spec_helper.get_parser_option_specs(cmd_name),
args[cmd_name]))
missing_args = dict((cmd_name, args)
for cmd_name, args in res.items() if len(args) > 0)
if missing_args:
raise exceptions.IRRequiredArgsMissingException(missing_args)
def validate_length_args(self, args):
"""Check if value of arguments is not longer than length specified.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'length' not in spec_option:
# skip options that does not contain length
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve length
length = spec_option['length']
option_value = parser_dict[option_name]
if len(option_value) > int(length):
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
length
))
if invalid_options:
# raise exception with all arguments that exceed length
raise exceptions.IRInvalidLengthException(invalid_options)
def validate_choices_args(self, args):
"""Check if value of choice arguments is one of the available choices.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'choices' not in spec_option:
# skip options that does not contain choices
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve choices
choices = spec_option['choices']
option_value = parser_dict[option_name]
if option_value not in choices:
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
choices
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidChoiceException(invalid_options)
def validate_min_max_args(self, args):
"""Check if value of arguments is between minimum and maximum values.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if all([key not in spec_option
for key in ('maximum', 'minimum')]):
# skip options that does not contain minimum or maximum
continue
option_name = spec_option['name']
if option_name in parser_dict:
option_value = parser_dict[option_name]
min_value = spec_option.get('minimum')
max_value = spec_option.get('maximum')
# handle empty values in spec files which load as None
min_value = '' if 'minimum' in spec_option \
and min_value is None else min_value
max_value = '' if 'maximum' in spec_option \
and max_value is None else max_value
values = {
"value": option_value,
"maximum": max_value,
"minimum": min_value
}
# make sure that values are numbers
is_all_values_numbers = True
for name, num in values.items():
if num is not None \
and (isinstance(num, bool) or
not isinstance(num, (int, float))):
invalid_options.append((
option_name,
name,
"number",
type(num).__name__
))
is_all_values_numbers = False
if not is_all_values_numbers:
# don't continue to min max checks since some of the
# values are not numbers
continue
# check bigger than minimum
if min_value is not None and option_value < min_value:
invalid_options.append((
option_name,
"minimum",
min_value,
option_value
))
# check smaller than maximum
if max_value is not None and option_value > max_value:
invalid_options.append((
option_name,
"maximum",
max_value,
option_value
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidMinMaxRangeException(invalid_options)
def get_silent_args(self, args):
"""list of silenced argument
:param args: The received arguments.
:return: list, slienced argument names
"""
silent_args_names = []
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if arg_spec and 'silent' in arg_spec and \
self.spec_helper.get_option_state(
parser_name,
arg_name,
args) == helper.OptionState['IS_SET']:
silent_args_names.extend(arg_spec['silent'])
return list(set(silent_args_names))
def get_nested_custom_and_control_args(self, args):
"""Split input arguments to control nested and custom.
Controls arguments: control the IR behavior. These arguments
will not be put into the spec yml file
Nested arguments: are used by the Ansible playbooks and will be put
into the spec yml file.
Custom arguments: Custom ansible variables to be used instead of the
normal nested usage.
:param args: the collected list of args.
:return: (dict, dict): flat dicts (control_args, nested_args)
"""
# returns flat dicts
nested = {}
control_args = {}
custom_args = {}
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if all([arg_spec, arg_spec.get('type', None),
arg_spec.get('type', None) in
[ctype_name for ctype_name, klass in
COMPLEX_TYPES.items() if klass.is_nested]
]) or ('is_shared_group_option' not in arg_spec):
if arg_name in nested:
LOG.warning(
"Duplicated nested argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, nested[arg_name]))
elif arg_name in custom_args:
LOG.warning(
"Duplicated custom argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, custom_args[arg_name]))
else:
if "ansible_variable" in arg_spec:
custom_args[arg_spec["ansible_variable"]] = arg_value
else:
nested[arg_name] = arg_value
else:
if arg_name in control_args:
LOG.warning(
"Duplicated control argument found: '{}'. Using "
"old value: '{}'".format(
arg_name, control_args[arg_name]))
else:
control_args[arg_name] = arg_value
return nested, control_args, custom_args
def _iterate_received_arguments(self, args):
"""Iterator helper method over all the received arguments
:return: yields tuple:
(spec name, spec dict,
argument name, argument value, argument spec)
"""
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in args:
parser_dict = args[spec_parser['name']]
for arg_name, arg_val in parser_dict.items():
arg_spec = self.spec_helper.get_option_spec(
spec_parser['name'], arg_name)
yield (spec_parser['name'], parser_dict,
arg_name, arg_val, arg_spec)
def _convert_non_cli_args(self, parser_name, values_dict):
"""Casts arguments to correct types by modifying values_dict param.
By default all the values are strings.
:param parser_name: The command name, e.g. main, virsh, ospd, etc
:param values_dict: The dict of with arguments
"""
for opt_name, opt_value in values_dict.items():
file_option_spec = self.spec_helper.get_option_spec(
parser_name, opt_name)
if file_option_spec.get('type', None) in ['int', ] or \
file_option_spec.get('action', None) in ['count', ]:
values_dict[opt_name] = int(opt_value)
def _merge_duplicated_cli_args(self, cli_args):
"""Merge duplicated arguments to all the parsers
This is need to handle control args, shared among several parsers.
for example, verbose, inventory
"""
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(cli_args):
for parser_name2, parser_dict2 in cli_args.items():
if all([parser_name2, parser_name != parser_name2,
arg_name not in parser_dict2]):
if self.spec_helper.get_option_spec(parser_name2,
arg_name):
parser_dict2[arg_name] = arg_value
|
infrared/core/inspector/inspector.py
|
codereval_python_data_213
|
Gets all the options for the specified command
:param command_name: the command name (main, virsh, ospd, etc...)
:return: the list of all command options
def get_parser_option_specs(self, command_name):
"""Gets all the options for the specified command
:param command_name: the command name (main, virsh, ospd, etc...)
:return: the list of all command options
"""
options = []
for parser in self.iterate_parsers():
if parser['name'] == command_name:
options = self._get_all_options_spec(parser)
break
return options
from copy import deepcopy
from infrared.core.utils.exceptions import SpecParserException
OptionState = dict(
UNRECOGNIZED='unrecognized',
IS_SET='is set',
NOT_SET='is no set'
)
class SpecDictHelper(object):
"""Controls the spec dicts and provides useful methods to get spec info."""
def __init__(self, spec_dict):
self.spec_dict = spec_dict
# make structure of the dict flat
# 1. handle include_groups directive in main parser
parser_dict = self.spec_dict
self._include_groups(parser_dict)
# 2. Include groups for all subparsers
for subparser_name, subparser_dict in parser_dict.get(
'subparsers', {}).items():
self._include_groups(subparser_dict)
def iterate_parsers(self):
"""Iterates over the main parsers and subparsers. """
for subparser_name, subparser_dict in self.spec_dict.get(
'subparsers', {}).items():
yield dict(name=subparser_name, **subparser_dict)
def iterate_option_specs(self):
"""Iterates over all the option specs.
Returns pair of parser and option on every iteration.
"""
for parser in self.iterate_parsers():
for spec_option in self._get_all_options_spec(parser):
yield parser, spec_option
@staticmethod
def _get_all_options_spec(parser_dict):
"""Gets all the options specification as the list of dicts. """
result = []
for group in parser_dict.get('groups', []):
for option_name, option_dict in group.get('options', {}).items():
result.append(dict(name=option_name, **option_dict))
for option_name, option_dict in parser_dict.get('options', {}).items():
result.append(dict(name=option_name, **option_dict))
return result
def get_parser_option_specs(self, command_name):
"""Gets all the options for the specified command
:param command_name: the command name (main, virsh, ospd, etc...)
:return: the list of all command options
"""
options = []
for parser in self.iterate_parsers():
if parser['name'] == command_name:
options = self._get_all_options_spec(parser)
break
return options
def get_option_spec(self, command_name, argument_name):
"""Gets the specification for the specified option name. """
options = self.get_parser_option_specs(command_name)
return next((opt for opt in options
if opt['name'] == argument_name), {})
def get_option_state(self, command_name, option_name, args):
"""Gets the option state.
:param command_name: The command name
:param option_name: The option name to analyze
:param args: The received arguments.
"""
option_spec = self.get_option_spec(command_name, option_name)
if not option_spec:
res = OptionState['UNRECOGNIZED']
elif option_name not in args.get(command_name, {}):
res = OptionState['NOT_SET']
else:
option_value = args[command_name][option_name]
if option_spec.get('action', '') in ['store_true'] \
and option_value is False:
res = OptionState['NOT_SET']
else:
res = OptionState['IS_SET']
return res
def _include_groups(self, parser_dict):
"""Resolves the include dict directive in the spec files. """
for group in parser_dict.get('include_groups', []):
# ensure we have that group
grp_dict = next(
(grp for grp in self.spec_dict.get('shared_groups', [])
if grp['title'] == group),
None)
if grp_dict is None:
raise SpecParserException(
"Unable to include group '{}' in '{}' parser. "
"Group was not found!".format(
group,
parser_dict['name']))
for option in grp_dict.get('options', {}).values():
option['is_shared_group_option'] = True
parser_groups_list = parser_dict.get('groups', [])
parser_groups_list.append(deepcopy(grp_dict))
parser_dict['groups'] = parser_groups_list
|
infrared/core/inspector/helper.py
|
codereval_python_data_214
|
Gets the specification for the specified option name.
def get_option_spec(self, command_name, argument_name):
"""Gets the specification for the specified option name. """
options = self.get_parser_option_specs(command_name)
return next((opt for opt in options
if opt['name'] == argument_name), {})
from copy import deepcopy
from infrared.core.utils.exceptions import SpecParserException
OptionState = dict(
UNRECOGNIZED='unrecognized',
IS_SET='is set',
NOT_SET='is no set'
)
class SpecDictHelper(object):
"""Controls the spec dicts and provides useful methods to get spec info."""
def __init__(self, spec_dict):
self.spec_dict = spec_dict
# make structure of the dict flat
# 1. handle include_groups directive in main parser
parser_dict = self.spec_dict
self._include_groups(parser_dict)
# 2. Include groups for all subparsers
for subparser_name, subparser_dict in parser_dict.get(
'subparsers', {}).items():
self._include_groups(subparser_dict)
def iterate_parsers(self):
"""Iterates over the main parsers and subparsers. """
for subparser_name, subparser_dict in self.spec_dict.get(
'subparsers', {}).items():
yield dict(name=subparser_name, **subparser_dict)
def iterate_option_specs(self):
"""Iterates over all the option specs.
Returns pair of parser and option on every iteration.
"""
for parser in self.iterate_parsers():
for spec_option in self._get_all_options_spec(parser):
yield parser, spec_option
@staticmethod
def _get_all_options_spec(parser_dict):
"""Gets all the options specification as the list of dicts. """
result = []
for group in parser_dict.get('groups', []):
for option_name, option_dict in group.get('options', {}).items():
result.append(dict(name=option_name, **option_dict))
for option_name, option_dict in parser_dict.get('options', {}).items():
result.append(dict(name=option_name, **option_dict))
return result
def get_parser_option_specs(self, command_name):
"""Gets all the options for the specified command
:param command_name: the command name (main, virsh, ospd, etc...)
:return: the list of all command options
"""
options = []
for parser in self.iterate_parsers():
if parser['name'] == command_name:
options = self._get_all_options_spec(parser)
break
return options
def get_option_spec(self, command_name, argument_name):
"""Gets the specification for the specified option name. """
options = self.get_parser_option_specs(command_name)
return next((opt for opt in options
if opt['name'] == argument_name), {})
def get_option_state(self, command_name, option_name, args):
"""Gets the option state.
:param command_name: The command name
:param option_name: The option name to analyze
:param args: The received arguments.
"""
option_spec = self.get_option_spec(command_name, option_name)
if not option_spec:
res = OptionState['UNRECOGNIZED']
elif option_name not in args.get(command_name, {}):
res = OptionState['NOT_SET']
else:
option_value = args[command_name][option_name]
if option_spec.get('action', '') in ['store_true'] \
and option_value is False:
res = OptionState['NOT_SET']
else:
res = OptionState['IS_SET']
return res
def _include_groups(self, parser_dict):
"""Resolves the include dict directive in the spec files. """
for group in parser_dict.get('include_groups', []):
# ensure we have that group
grp_dict = next(
(grp for grp in self.spec_dict.get('shared_groups', [])
if grp['title'] == group),
None)
if grp_dict is None:
raise SpecParserException(
"Unable to include group '{}' in '{}' parser. "
"Group was not found!".format(
group,
parser_dict['name']))
for option in grp_dict.get('options', {}).values():
option['is_shared_group_option'] = True
parser_groups_list = parser_dict.get('groups', [])
parser_groups_list.append(deepcopy(grp_dict))
parser_dict['groups'] = parser_groups_list
|
infrared/core/inspector/helper.py
|
codereval_python_data_215
|
list of silenced argument
:param args: The received arguments.
:return: list, slienced argument names
def get_silent_args(self, args):
"""list of silenced argument
:param args: The received arguments.
:return: list, slienced argument names
"""
silent_args_names = []
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if arg_spec and 'silent' in arg_spec and \
self.spec_helper.get_option_state(
parser_name,
arg_name,
args) == helper.OptionState['IS_SET']:
silent_args_names.extend(arg_spec['silent'])
return list(set(silent_args_names))
import collections
import os
from six.moves import configparser
from string import Template
import yaml
from infrared.core.cli.cli import CliParser
from infrared.core.cli.cli import COMPLEX_TYPES
from infrared.core.inspector import helper
from infrared.core.utils import dict_utils
from infrared.core.utils import exceptions
from infrared.core.utils import logger
LOG = logger.LOG
class SpecParser(object):
"""Parses input arguments from different sources (cli, answers file). """
@classmethod
def from_plugin(cls, subparser, plugin, base_groups):
"""Reads spec & vars from plugin and constructs the parser instance
:param subparser: argparse.subparser to extend
:param plugin: InfraredPlugin object
:param base_groups: dict, included groups
:return: SpecParser object based on given plugin spec & vars
"""
spec_dict = base_groups or {}
with open(plugin.spec) as stream:
spec = yaml.safe_load(stream) or {}
dict_utils.dict_merge(
base_groups,
spec,
dict_utils.ConflictResolver.unique_append_list_resolver)
# The "try-excpet" block here is for adding spec file path if it
# includes an unsupported option type
try:
return SpecParser(subparser, spec_dict, plugin.vars_dir,
plugin.defaults_dir, plugin.path)
except exceptions.IRUnsupportedSpecOptionType as ex:
ex.message += ' in file: {}'.format(plugin.spec)
raise ex
def __init__(self, subparser, spec_dict, vars_dir, defaults_dir,
plugin_path):
"""Constructor.
:param subparser: argparse.subparser to extend
:param spec_dict: dict with CLI description
:param vars_dir: Path to plugin's vars dir
:param defaults_dir: Path to plugin's defaults dir
"""
self.vars = vars_dir
self.defaults = defaults_dir
self.plugin_path = plugin_path
self.spec_helper = helper.SpecDictHelper(spec_dict)
# create parser
self.parser = CliParser.create_parser(self, subparser)
def add_shared_groups(self, list_of_groups):
"""Adds the user defined shared groups
:param list_of_groups: list, of group dicts
"""
shared_groups = self.spec_helper.spec_dict.get('shared_groups', [])
shared_groups.expand(list_of_groups)
self.spec_helper.spec_dict['shared_groups'] = shared_groups
def _get_defaults(self, default_getter_func):
"""Resolve arguments' values from cli or answers file.
:param default_getter_func: callable. will be called for all the
available options in spec file.
"""
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
default_value = default_getter_func(option)
if default_value is not None:
sub = parser['name']
result[sub][option['name']] = default_value
return result
def get_spec_defaults(self):
"""Resolve arguments' values from spec and other sources. """
def spec_default_getter(option):
"""Getter function to retrieve the default value from spec.
:param option: argument name
"""
# first try to get environment variable with IR_ prefix
default_value = SpecParser.get_env_option(option['name'])
if default_value is not None:
LOG.info(
"[environ] Loading '{0}' default value"
" '{1}' from the environment variable".format(
option['name'], default_value))
elif option.get('default', None) is not None:
default_value = option['default']
elif option.get('action', None) in ['store_true']:
default_value = False
return default_value
return self._get_defaults(spec_default_getter)
@staticmethod
def get_env_option(name):
"""Try get """
return os.environ.get('IR_' + name.upper().replace('-', '_'))
def get_deprecated_args(self):
"""Returning dict with options which deprecate others. """
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
if option.get('deprecates') is not None:
result[option.get('deprecates')] = option.get('name')
return result
@staticmethod
def parse_env_variable_from_file(value):
if isinstance(value, str):
t = Template(value)
try:
value = t.substitute(os.environ)
except KeyError as undefined_var:
raise exceptions.IRAnswersFileEnvVarNotDefined(undefined_var)
return value
def get_answers_file_args(self, cli_args):
"""Resolve arguments' values from answers INI file. """
file_result = {}
args_to_remove = []
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
file_result[parser_name] = file_result.get(parser_name, {})
if option_spec and option_spec.get(
'action', '') == 'read-answers':
# Iterate over arguments supplied by file
for parsed_arg in parser_dict[arg_name]:
# Supplied arguments' value can be a list
if isinstance(parser_dict[arg_name][parsed_arg], list):
i = 0
# Iterrate over argument values list
for parsed_value in parser_dict[arg_name][parsed_arg]:
parser_dict[arg_name][parsed_arg][i] = \
SpecParser.parse_env_variable_from_file(parsed_value)
i += 1
else:
parser_dict[arg_name][parsed_arg] = \
SpecParser.parse_env_variable_from_file(parser_dict[arg_name][parsed_arg])
# we have config option. saving it.
self._convert_non_cli_args(
parser_name, parser_dict[arg_name])
dict_utils.dict_merge(
file_result[parser_name],
parser_dict[arg_name])
# remove from cli args
args_to_remove.append((parser_name, arg_name))
# remove parser dict outside loop to avoid iteration dict modification
for parser_name, arg_name in args_to_remove:
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in cli_args and spec_parser['name'] == parser_name:
parser_dict = cli_args[spec_parser['name']]
parser_dict.pop(arg_name)
break
return file_result
def generate_answers_file(self, cli_args, spec_defaults):
"""Generates answers INI file
:param cli_args: list, cli arguments.
:param spec_defaults: the default values.
"""
def put_option(config, parser_name, option_name, value):
for opt_help in option.get('help', '').split('\n'):
help_opt = '# ' + opt_help
# add help comment
if config.has_option(parser_name, help_opt):
config.remove_option(parser_name, help_opt)
config.set(
parser_name, help_opt)
if config.has_option(parser_name, option_name):
value = config.get(parser_name, option_name)
config.remove_option(parser_name, option_name)
config.set(
parser_name,
option_name,
str(value))
file_generated = False
# load generate answers file for all the parsers
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
if option_spec and option_spec.get(
'action', '') == 'generate-answers':
options_to_save = \
self.spec_helper.get_parser_option_specs(parser_name)
out_answers = configparser.ConfigParser(allow_no_value=True)
if not out_answers.has_section(parser_name):
out_answers.add_section(parser_name)
for option in options_to_save:
opt_name = option['name']
if opt_name in parser_dict:
put_option(
out_answers,
parser_name,
opt_name,
parser_dict[opt_name])
elif opt_name in spec_defaults[parser_name]:
put_option(
out_answers,
parser_name,
opt_name,
spec_defaults[parser_name][opt_name])
elif option.get('required', False):
put_option(
out_answers,
parser_name,
'# ' + opt_name,
"Required argument. "
"Edit with one of the allowed values OR "
"override with "
"CLI: --{}=<option>".format(opt_name))
# write to file
with open(arg_value, 'w') as answers_file:
out_answers.write(answers_file)
file_generated = True
return file_generated
def resolve_custom_types(self, args):
"""Transforms the arguments with custom types
:param args: the list of received arguments.
"""
for parser_name, parser_dict in args.items():
spec_complex_options = [opt for opt in
self.spec_helper.get_parser_option_specs(
parser_name) if
opt.get('type', None) in COMPLEX_TYPES]
for spec_option in spec_complex_options:
option_name = spec_option['name']
if option_name in parser_dict:
# we have custom type to resolve
type_name = spec_option['type']
option_value = parser_dict[option_name]
action = self.create_complex_argumet_type(
parser_name,
type_name,
option_name,
spec_option)
# resolving value
parser_dict[option_name] = action.resolve(option_value)
def create_complex_argumet_type(self, subcommand, type_name, option_name,
spec_option):
"""Build the complex argument type
:param subcommand: the command name
:param type_name: the complex type name
:param option_name: the option name
:param spec_option: option's specifications
:return: the complex type instance
"""
complex_action = COMPLEX_TYPES.get(
type_name, None)
if complex_action is None:
raise exceptions.SpecParserException(
"Unknown complex type: {}".format(type_name))
return complex_action(
option_name,
(self.vars, self.defaults, self.plugin_path),
subcommand,
spec_option)
def parse_args(self, arg_parser, args=None):
"""Parses all the arguments (cli, answers file)
:return: None, if ``--generate-answers-file`` in arg_arg_parser
:return: (dict, dict):
* command arguments dict (arguments to control the IR logic)
* nested arguments dict (arguments to pass to the playbooks)
"""
spec_defaults = self.get_spec_defaults()
cli_args = CliParser.parse_cli_input(arg_parser, args)
file_args = self.get_answers_file_args(cli_args)
# generate answers file and exit
if self.generate_answers_file(cli_args, spec_defaults):
LOG.warning("Answers file generated. Exiting.")
# print warnings when something was overridden from non-cli source.
self.validate_arg_sources(cli_args, file_args,
spec_defaults)
# print warnings for deprecated
self.validate_arg_deprecation(cli_args, file_args)
# now filter defaults to have only parser defined in cli
defaults = dict((key, spec_defaults[key])
for key in cli_args.keys() if
key in spec_defaults)
# copy cli args with the same name to all parser groups
self._merge_duplicated_cli_args(cli_args)
self._merge_duplicated_cli_args(file_args)
dict_utils.dict_merge(defaults, file_args)
dict_utils.dict_merge(defaults, cli_args)
self.validate_requires_args(defaults)
self.validate_length_args(defaults)
self.validate_choices_args(defaults)
self.validate_min_max_args(defaults)
# now resolve complex types.
self.resolve_custom_types(defaults)
nested, control, custom = \
self.get_nested_custom_and_control_args(defaults)
return nested, control, custom
def validate_arg_deprecation(self, cli_args, answer_file_args):
"""Validates and prints the deprecated arguments.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
"""
for deprecated, deprecates in self.get_deprecated_args().items():
for input_args in (answer_file_args.items(), cli_args.items()):
for command, command_dict in input_args:
if deprecated in command_dict:
if deprecates in command_dict:
raise exceptions.IRDeprecationException(
"[{}] Argument '{}' deprecates '{}',"
" please use only the new one.".format(
command, deprecated, deprecates))
if deprecated in answer_file_args[command]:
answer_file_args[command][deprecates] = \
answer_file_args[command][deprecated]
if deprecated in cli_args[command]:
cli_args[command][deprecates] = \
cli_args[command][deprecated]
LOG.warning(
"[{}] Argument '{}' was deprecated,"
" please use '{}'.".format(
command, deprecated, deprecates))
@staticmethod
def validate_arg_sources(cli_args, answer_file_args, spec_defaults):
"""Validates and prints the arguments' source.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
:param spec_defaults: the default values from spec files
"""
def show_diff(diff, command_name, cmd_dict, source_name):
if diff:
for arg_name in diff:
value = cmd_dict[arg_name]
LOG.info(
"[{}] Argument '{}' was set to"
" '{}' from the {} source.".format(
command_name, arg_name, value, source_name))
for command, command_dict in cli_args.items():
file_dict = answer_file_args.get(command, {})
file_diff = set(file_dict.keys()) - set(command_dict.keys())
show_diff(file_diff, command, file_dict, 'answers file')
def_dict = spec_defaults.get(command, {})
default_diff = set(def_dict.keys()) - set(
command_dict.keys()) - file_diff
show_diff(default_diff, command, def_dict, 'spec defaults')
def _get_conditionally_required_args(self, command_name, options_spec,
args):
"""List arguments with ``required_when`` condition matched.
:param command_name: the command name.
:param options_spec: the list of command spec options.
:param args: the received input arguments
:return: list, list of argument names with matched ``required_when``
condition
"""
opts_names = [option_spec['name'] for option_spec in options_spec]
missing_args = []
for option_spec in options_spec:
option_results = []
if option_spec and 'required_when' in option_spec:
req_when_args = [option_spec['required_when']] \
if not type(option_spec['required_when']) is list \
else option_spec['required_when']
# validate conditions
for req_when_arg in req_when_args:
splited_args_list = req_when_arg.split()
for idx, req_arg in enumerate(splited_args_list):
if req_arg in opts_names:
splited_args_list[idx] = \
args.get(command_name, {}).get(req_arg.strip())
if splited_args_list[idx] is None:
option_results.append(False)
break
splited_args_list[idx] = str(splited_args_list[idx])
if (splited_args_list[idx] not in ['and', 'or'] and
not any(
(c in '<>=') for c in splited_args_list[idx])):
splited_args_list[idx] = "'{0}'".format(
yaml.safe_load(splited_args_list[idx]))
else:
option_results.append(
eval(' '.join(splited_args_list)))
if all(option_results) and \
self.spec_helper.get_option_state(
command_name,
option_spec['name'],
args) == helper.OptionState['NOT_SET']:
missing_args.append(option_spec['name'])
return missing_args
def validate_requires_args(self, args):
"""Check if all the required arguments have been provided. """
silent_args = self.get_silent_args(args)
def validate_parser(parser_name, expected_options, parser_args):
"""Helper method to resolve dict_merge. """
result = collections.defaultdict(list)
condition_req_args = self._get_conditionally_required_args(
parser_name, expected_options, args)
for option in expected_options:
name = option['name']
# check required options.
if (option.get('required', False) and
name not in parser_args or
option['name'] in condition_req_args) and \
name not in silent_args:
result[parser_name].append(name)
return result
res = {}
for command_data in self.spec_helper.iterate_parsers():
cmd_name = command_data['name']
if cmd_name in args:
dict_utils.dict_merge(
res,
validate_parser(
cmd_name,
self.spec_helper.get_parser_option_specs(cmd_name),
args[cmd_name]))
missing_args = dict((cmd_name, args)
for cmd_name, args in res.items() if len(args) > 0)
if missing_args:
raise exceptions.IRRequiredArgsMissingException(missing_args)
def validate_length_args(self, args):
"""Check if value of arguments is not longer than length specified.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'length' not in spec_option:
# skip options that does not contain length
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve length
length = spec_option['length']
option_value = parser_dict[option_name]
if len(option_value) > int(length):
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
length
))
if invalid_options:
# raise exception with all arguments that exceed length
raise exceptions.IRInvalidLengthException(invalid_options)
def validate_choices_args(self, args):
"""Check if value of choice arguments is one of the available choices.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'choices' not in spec_option:
# skip options that does not contain choices
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve choices
choices = spec_option['choices']
option_value = parser_dict[option_name]
if option_value not in choices:
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
choices
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidChoiceException(invalid_options)
def validate_min_max_args(self, args):
"""Check if value of arguments is between minimum and maximum values.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if all([key not in spec_option
for key in ('maximum', 'minimum')]):
# skip options that does not contain minimum or maximum
continue
option_name = spec_option['name']
if option_name in parser_dict:
option_value = parser_dict[option_name]
min_value = spec_option.get('minimum')
max_value = spec_option.get('maximum')
# handle empty values in spec files which load as None
min_value = '' if 'minimum' in spec_option \
and min_value is None else min_value
max_value = '' if 'maximum' in spec_option \
and max_value is None else max_value
values = {
"value": option_value,
"maximum": max_value,
"minimum": min_value
}
# make sure that values are numbers
is_all_values_numbers = True
for name, num in values.items():
if num is not None \
and (isinstance(num, bool) or
not isinstance(num, (int, float))):
invalid_options.append((
option_name,
name,
"number",
type(num).__name__
))
is_all_values_numbers = False
if not is_all_values_numbers:
# don't continue to min max checks since some of the
# values are not numbers
continue
# check bigger than minimum
if min_value is not None and option_value < min_value:
invalid_options.append((
option_name,
"minimum",
min_value,
option_value
))
# check smaller than maximum
if max_value is not None and option_value > max_value:
invalid_options.append((
option_name,
"maximum",
max_value,
option_value
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidMinMaxRangeException(invalid_options)
def get_silent_args(self, args):
"""list of silenced argument
:param args: The received arguments.
:return: list, slienced argument names
"""
silent_args_names = []
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if arg_spec and 'silent' in arg_spec and \
self.spec_helper.get_option_state(
parser_name,
arg_name,
args) == helper.OptionState['IS_SET']:
silent_args_names.extend(arg_spec['silent'])
return list(set(silent_args_names))
def get_nested_custom_and_control_args(self, args):
"""Split input arguments to control nested and custom.
Controls arguments: control the IR behavior. These arguments
will not be put into the spec yml file
Nested arguments: are used by the Ansible playbooks and will be put
into the spec yml file.
Custom arguments: Custom ansible variables to be used instead of the
normal nested usage.
:param args: the collected list of args.
:return: (dict, dict): flat dicts (control_args, nested_args)
"""
# returns flat dicts
nested = {}
control_args = {}
custom_args = {}
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if all([arg_spec, arg_spec.get('type', None),
arg_spec.get('type', None) in
[ctype_name for ctype_name, klass in
COMPLEX_TYPES.items() if klass.is_nested]
]) or ('is_shared_group_option' not in arg_spec):
if arg_name in nested:
LOG.warning(
"Duplicated nested argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, nested[arg_name]))
elif arg_name in custom_args:
LOG.warning(
"Duplicated custom argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, custom_args[arg_name]))
else:
if "ansible_variable" in arg_spec:
custom_args[arg_spec["ansible_variable"]] = arg_value
else:
nested[arg_name] = arg_value
else:
if arg_name in control_args:
LOG.warning(
"Duplicated control argument found: '{}'. Using "
"old value: '{}'".format(
arg_name, control_args[arg_name]))
else:
control_args[arg_name] = arg_value
return nested, control_args, custom_args
def _iterate_received_arguments(self, args):
"""Iterator helper method over all the received arguments
:return: yields tuple:
(spec name, spec dict,
argument name, argument value, argument spec)
"""
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in args:
parser_dict = args[spec_parser['name']]
for arg_name, arg_val in parser_dict.items():
arg_spec = self.spec_helper.get_option_spec(
spec_parser['name'], arg_name)
yield (spec_parser['name'], parser_dict,
arg_name, arg_val, arg_spec)
def _convert_non_cli_args(self, parser_name, values_dict):
"""Casts arguments to correct types by modifying values_dict param.
By default all the values are strings.
:param parser_name: The command name, e.g. main, virsh, ospd, etc
:param values_dict: The dict of with arguments
"""
for opt_name, opt_value in values_dict.items():
file_option_spec = self.spec_helper.get_option_spec(
parser_name, opt_name)
if file_option_spec.get('type', None) in ['int', ] or \
file_option_spec.get('action', None) in ['count', ]:
values_dict[opt_name] = int(opt_value)
def _merge_duplicated_cli_args(self, cli_args):
"""Merge duplicated arguments to all the parsers
This is need to handle control args, shared among several parsers.
for example, verbose, inventory
"""
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(cli_args):
for parser_name2, parser_dict2 in cli_args.items():
if all([parser_name2, parser_name != parser_name2,
arg_name not in parser_dict2]):
if self.spec_helper.get_option_spec(parser_name2,
arg_name):
parser_dict2[arg_name] = arg_value
|
infrared/core/inspector/inspector.py
|
codereval_python_data_216
|
Check if all the required arguments have been provided.
def validate_requires_args(self, args):
"""Check if all the required arguments have been provided. """
silent_args = self.get_silent_args(args)
def validate_parser(parser_name, expected_options, parser_args):
"""Helper method to resolve dict_merge. """
result = collections.defaultdict(list)
condition_req_args = self._get_conditionally_required_args(
parser_name, expected_options, args)
for option in expected_options:
name = option['name']
# check required options.
if (option.get('required', False) and
name not in parser_args or
option['name'] in condition_req_args) and \
name not in silent_args:
result[parser_name].append(name)
return result
res = {}
for command_data in self.spec_helper.iterate_parsers():
cmd_name = command_data['name']
if cmd_name in args:
dict_utils.dict_merge(
res,
validate_parser(
cmd_name,
self.spec_helper.get_parser_option_specs(cmd_name),
args[cmd_name]))
missing_args = dict((cmd_name, args)
for cmd_name, args in res.items() if len(args) > 0)
return missing_args
import collections
import os
from six.moves import configparser
from string import Template
import yaml
from infrared.core.cli.cli import CliParser
from infrared.core.cli.cli import COMPLEX_TYPES
from infrared.core.inspector import helper
from infrared.core.utils import dict_utils
from infrared.core.utils import exceptions
from infrared.core.utils import logger
LOG = logger.LOG
class SpecParser(object):
"""Parses input arguments from different sources (cli, answers file). """
@classmethod
def from_plugin(cls, subparser, plugin, base_groups):
"""Reads spec & vars from plugin and constructs the parser instance
:param subparser: argparse.subparser to extend
:param plugin: InfraredPlugin object
:param base_groups: dict, included groups
:return: SpecParser object based on given plugin spec & vars
"""
spec_dict = base_groups or {}
with open(plugin.spec) as stream:
spec = yaml.safe_load(stream) or {}
dict_utils.dict_merge(
base_groups,
spec,
dict_utils.ConflictResolver.unique_append_list_resolver)
# The "try-excpet" block here is for adding spec file path if it
# includes an unsupported option type
try:
return SpecParser(subparser, spec_dict, plugin.vars_dir,
plugin.defaults_dir, plugin.path)
except exceptions.IRUnsupportedSpecOptionType as ex:
ex.message += ' in file: {}'.format(plugin.spec)
raise ex
def __init__(self, subparser, spec_dict, vars_dir, defaults_dir,
plugin_path):
"""Constructor.
:param subparser: argparse.subparser to extend
:param spec_dict: dict with CLI description
:param vars_dir: Path to plugin's vars dir
:param defaults_dir: Path to plugin's defaults dir
"""
self.vars = vars_dir
self.defaults = defaults_dir
self.plugin_path = plugin_path
self.spec_helper = helper.SpecDictHelper(spec_dict)
# create parser
self.parser = CliParser.create_parser(self, subparser)
def add_shared_groups(self, list_of_groups):
"""Adds the user defined shared groups
:param list_of_groups: list, of group dicts
"""
shared_groups = self.spec_helper.spec_dict.get('shared_groups', [])
shared_groups.expand(list_of_groups)
self.spec_helper.spec_dict['shared_groups'] = shared_groups
def _get_defaults(self, default_getter_func):
"""Resolve arguments' values from cli or answers file.
:param default_getter_func: callable. will be called for all the
available options in spec file.
"""
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
default_value = default_getter_func(option)
if default_value is not None:
sub = parser['name']
result[sub][option['name']] = default_value
return result
def get_spec_defaults(self):
"""Resolve arguments' values from spec and other sources. """
def spec_default_getter(option):
"""Getter function to retrieve the default value from spec.
:param option: argument name
"""
# first try to get environment variable with IR_ prefix
default_value = SpecParser.get_env_option(option['name'])
if default_value is not None:
LOG.info(
"[environ] Loading '{0}' default value"
" '{1}' from the environment variable".format(
option['name'], default_value))
elif option.get('default', None) is not None:
default_value = option['default']
elif option.get('action', None) in ['store_true']:
default_value = False
return default_value
return self._get_defaults(spec_default_getter)
@staticmethod
def get_env_option(name):
"""Try get """
return os.environ.get('IR_' + name.upper().replace('-', '_'))
def get_deprecated_args(self):
"""Returning dict with options which deprecate others. """
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
if option.get('deprecates') is not None:
result[option.get('deprecates')] = option.get('name')
return result
@staticmethod
def parse_env_variable_from_file(value):
if isinstance(value, str):
t = Template(value)
try:
value = t.substitute(os.environ)
except KeyError as undefined_var:
raise exceptions.IRAnswersFileEnvVarNotDefined(undefined_var)
return value
def get_answers_file_args(self, cli_args):
"""Resolve arguments' values from answers INI file. """
file_result = {}
args_to_remove = []
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
file_result[parser_name] = file_result.get(parser_name, {})
if option_spec and option_spec.get(
'action', '') == 'read-answers':
# Iterate over arguments supplied by file
for parsed_arg in parser_dict[arg_name]:
# Supplied arguments' value can be a list
if isinstance(parser_dict[arg_name][parsed_arg], list):
i = 0
# Iterrate over argument values list
for parsed_value in parser_dict[arg_name][parsed_arg]:
parser_dict[arg_name][parsed_arg][i] = \
SpecParser.parse_env_variable_from_file(parsed_value)
i += 1
else:
parser_dict[arg_name][parsed_arg] = \
SpecParser.parse_env_variable_from_file(parser_dict[arg_name][parsed_arg])
# we have config option. saving it.
self._convert_non_cli_args(
parser_name, parser_dict[arg_name])
dict_utils.dict_merge(
file_result[parser_name],
parser_dict[arg_name])
# remove from cli args
args_to_remove.append((parser_name, arg_name))
# remove parser dict outside loop to avoid iteration dict modification
for parser_name, arg_name in args_to_remove:
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in cli_args and spec_parser['name'] == parser_name:
parser_dict = cli_args[spec_parser['name']]
parser_dict.pop(arg_name)
break
return file_result
def generate_answers_file(self, cli_args, spec_defaults):
"""Generates answers INI file
:param cli_args: list, cli arguments.
:param spec_defaults: the default values.
"""
def put_option(config, parser_name, option_name, value):
for opt_help in option.get('help', '').split('\n'):
help_opt = '# ' + opt_help
# add help comment
if config.has_option(parser_name, help_opt):
config.remove_option(parser_name, help_opt)
config.set(
parser_name, help_opt)
if config.has_option(parser_name, option_name):
value = config.get(parser_name, option_name)
config.remove_option(parser_name, option_name)
config.set(
parser_name,
option_name,
str(value))
file_generated = False
# load generate answers file for all the parsers
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
if option_spec and option_spec.get(
'action', '') == 'generate-answers':
options_to_save = \
self.spec_helper.get_parser_option_specs(parser_name)
out_answers = configparser.ConfigParser(allow_no_value=True)
if not out_answers.has_section(parser_name):
out_answers.add_section(parser_name)
for option in options_to_save:
opt_name = option['name']
if opt_name in parser_dict:
put_option(
out_answers,
parser_name,
opt_name,
parser_dict[opt_name])
elif opt_name in spec_defaults[parser_name]:
put_option(
out_answers,
parser_name,
opt_name,
spec_defaults[parser_name][opt_name])
elif option.get('required', False):
put_option(
out_answers,
parser_name,
'# ' + opt_name,
"Required argument. "
"Edit with one of the allowed values OR "
"override with "
"CLI: --{}=<option>".format(opt_name))
# write to file
with open(arg_value, 'w') as answers_file:
out_answers.write(answers_file)
file_generated = True
return file_generated
def resolve_custom_types(self, args):
"""Transforms the arguments with custom types
:param args: the list of received arguments.
"""
for parser_name, parser_dict in args.items():
spec_complex_options = [opt for opt in
self.spec_helper.get_parser_option_specs(
parser_name) if
opt.get('type', None) in COMPLEX_TYPES]
for spec_option in spec_complex_options:
option_name = spec_option['name']
if option_name in parser_dict:
# we have custom type to resolve
type_name = spec_option['type']
option_value = parser_dict[option_name]
action = self.create_complex_argumet_type(
parser_name,
type_name,
option_name,
spec_option)
# resolving value
parser_dict[option_name] = action.resolve(option_value)
def create_complex_argumet_type(self, subcommand, type_name, option_name,
spec_option):
"""Build the complex argument type
:param subcommand: the command name
:param type_name: the complex type name
:param option_name: the option name
:param spec_option: option's specifications
:return: the complex type instance
"""
complex_action = COMPLEX_TYPES.get(
type_name, None)
if complex_action is None:
raise exceptions.SpecParserException(
"Unknown complex type: {}".format(type_name))
return complex_action(
option_name,
(self.vars, self.defaults, self.plugin_path),
subcommand,
spec_option)
def parse_args(self, arg_parser, args=None):
"""Parses all the arguments (cli, answers file)
:return: None, if ``--generate-answers-file`` in arg_arg_parser
:return: (dict, dict):
* command arguments dict (arguments to control the IR logic)
* nested arguments dict (arguments to pass to the playbooks)
"""
spec_defaults = self.get_spec_defaults()
cli_args = CliParser.parse_cli_input(arg_parser, args)
file_args = self.get_answers_file_args(cli_args)
# generate answers file and exit
if self.generate_answers_file(cli_args, spec_defaults):
LOG.warning("Answers file generated. Exiting.")
# print warnings when something was overridden from non-cli source.
self.validate_arg_sources(cli_args, file_args,
spec_defaults)
# print warnings for deprecated
self.validate_arg_deprecation(cli_args, file_args)
# now filter defaults to have only parser defined in cli
defaults = dict((key, spec_defaults[key])
for key in cli_args.keys() if
key in spec_defaults)
# copy cli args with the same name to all parser groups
self._merge_duplicated_cli_args(cli_args)
self._merge_duplicated_cli_args(file_args)
dict_utils.dict_merge(defaults, file_args)
dict_utils.dict_merge(defaults, cli_args)
self.validate_requires_args(defaults)
self.validate_length_args(defaults)
self.validate_choices_args(defaults)
self.validate_min_max_args(defaults)
# now resolve complex types.
self.resolve_custom_types(defaults)
nested, control, custom = \
self.get_nested_custom_and_control_args(defaults)
return nested, control, custom
def validate_arg_deprecation(self, cli_args, answer_file_args):
"""Validates and prints the deprecated arguments.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
"""
for deprecated, deprecates in self.get_deprecated_args().items():
for input_args in (answer_file_args.items(), cli_args.items()):
for command, command_dict in input_args:
if deprecated in command_dict:
if deprecates in command_dict:
raise exceptions.IRDeprecationException(
"[{}] Argument '{}' deprecates '{}',"
" please use only the new one.".format(
command, deprecated, deprecates))
if deprecated in answer_file_args[command]:
answer_file_args[command][deprecates] = \
answer_file_args[command][deprecated]
if deprecated in cli_args[command]:
cli_args[command][deprecates] = \
cli_args[command][deprecated]
LOG.warning(
"[{}] Argument '{}' was deprecated,"
" please use '{}'.".format(
command, deprecated, deprecates))
@staticmethod
def validate_arg_sources(cli_args, answer_file_args, spec_defaults):
"""Validates and prints the arguments' source.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
:param spec_defaults: the default values from spec files
"""
def show_diff(diff, command_name, cmd_dict, source_name):
if diff:
for arg_name in diff:
value = cmd_dict[arg_name]
LOG.info(
"[{}] Argument '{}' was set to"
" '{}' from the {} source.".format(
command_name, arg_name, value, source_name))
for command, command_dict in cli_args.items():
file_dict = answer_file_args.get(command, {})
file_diff = set(file_dict.keys()) - set(command_dict.keys())
show_diff(file_diff, command, file_dict, 'answers file')
def_dict = spec_defaults.get(command, {})
default_diff = set(def_dict.keys()) - set(
command_dict.keys()) - file_diff
show_diff(default_diff, command, def_dict, 'spec defaults')
def _get_conditionally_required_args(self, command_name, options_spec,
args):
"""List arguments with ``required_when`` condition matched.
:param command_name: the command name.
:param options_spec: the list of command spec options.
:param args: the received input arguments
:return: list, list of argument names with matched ``required_when``
condition
"""
opts_names = [option_spec['name'] for option_spec in options_spec]
missing_args = []
for option_spec in options_spec:
option_results = []
if option_spec and 'required_when' in option_spec:
req_when_args = [option_spec['required_when']] \
if not type(option_spec['required_when']) is list \
else option_spec['required_when']
# validate conditions
for req_when_arg in req_when_args:
splited_args_list = req_when_arg.split()
for idx, req_arg in enumerate(splited_args_list):
if req_arg in opts_names:
splited_args_list[idx] = \
args.get(command_name, {}).get(req_arg.strip())
if splited_args_list[idx] is None:
option_results.append(False)
break
splited_args_list[idx] = str(splited_args_list[idx])
if (splited_args_list[idx] not in ['and', 'or'] and
not any(
(c in '<>=') for c in splited_args_list[idx])):
splited_args_list[idx] = "'{0}'".format(
yaml.safe_load(splited_args_list[idx]))
else:
option_results.append(
eval(' '.join(splited_args_list)))
if all(option_results) and \
self.spec_helper.get_option_state(
command_name,
option_spec['name'],
args) == helper.OptionState['NOT_SET']:
missing_args.append(option_spec['name'])
return missing_args
def validate_requires_args(self, args):
"""Check if all the required arguments have been provided. """
silent_args = self.get_silent_args(args)
def validate_parser(parser_name, expected_options, parser_args):
"""Helper method to resolve dict_merge. """
result = collections.defaultdict(list)
condition_req_args = self._get_conditionally_required_args(
parser_name, expected_options, args)
for option in expected_options:
name = option['name']
# check required options.
if (option.get('required', False) and
name not in parser_args or
option['name'] in condition_req_args) and \
name not in silent_args:
result[parser_name].append(name)
return result
res = {}
for command_data in self.spec_helper.iterate_parsers():
cmd_name = command_data['name']
if cmd_name in args:
dict_utils.dict_merge(
res,
validate_parser(
cmd_name,
self.spec_helper.get_parser_option_specs(cmd_name),
args[cmd_name]))
missing_args = dict((cmd_name, args)
for cmd_name, args in res.items() if len(args) > 0)
if missing_args:
raise exceptions.IRRequiredArgsMissingException(missing_args)
def validate_length_args(self, args):
"""Check if value of arguments is not longer than length specified.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'length' not in spec_option:
# skip options that does not contain length
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve length
length = spec_option['length']
option_value = parser_dict[option_name]
if len(option_value) > int(length):
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
length
))
if invalid_options:
# raise exception with all arguments that exceed length
raise exceptions.IRInvalidLengthException(invalid_options)
def validate_choices_args(self, args):
"""Check if value of choice arguments is one of the available choices.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'choices' not in spec_option:
# skip options that does not contain choices
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve choices
choices = spec_option['choices']
option_value = parser_dict[option_name]
if option_value not in choices:
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
choices
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidChoiceException(invalid_options)
def validate_min_max_args(self, args):
"""Check if value of arguments is between minimum and maximum values.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if all([key not in spec_option
for key in ('maximum', 'minimum')]):
# skip options that does not contain minimum or maximum
continue
option_name = spec_option['name']
if option_name in parser_dict:
option_value = parser_dict[option_name]
min_value = spec_option.get('minimum')
max_value = spec_option.get('maximum')
# handle empty values in spec files which load as None
min_value = '' if 'minimum' in spec_option \
and min_value is None else min_value
max_value = '' if 'maximum' in spec_option \
and max_value is None else max_value
values = {
"value": option_value,
"maximum": max_value,
"minimum": min_value
}
# make sure that values are numbers
is_all_values_numbers = True
for name, num in values.items():
if num is not None \
and (isinstance(num, bool) or
not isinstance(num, (int, float))):
invalid_options.append((
option_name,
name,
"number",
type(num).__name__
))
is_all_values_numbers = False
if not is_all_values_numbers:
# don't continue to min max checks since some of the
# values are not numbers
continue
# check bigger than minimum
if min_value is not None and option_value < min_value:
invalid_options.append((
option_name,
"minimum",
min_value,
option_value
))
# check smaller than maximum
if max_value is not None and option_value > max_value:
invalid_options.append((
option_name,
"maximum",
max_value,
option_value
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidMinMaxRangeException(invalid_options)
def get_silent_args(self, args):
"""list of silenced argument
:param args: The received arguments.
:return: list, slienced argument names
"""
silent_args_names = []
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if arg_spec and 'silent' in arg_spec and \
self.spec_helper.get_option_state(
parser_name,
arg_name,
args) == helper.OptionState['IS_SET']:
silent_args_names.extend(arg_spec['silent'])
return list(set(silent_args_names))
def get_nested_custom_and_control_args(self, args):
"""Split input arguments to control nested and custom.
Controls arguments: control the IR behavior. These arguments
will not be put into the spec yml file
Nested arguments: are used by the Ansible playbooks and will be put
into the spec yml file.
Custom arguments: Custom ansible variables to be used instead of the
normal nested usage.
:param args: the collected list of args.
:return: (dict, dict): flat dicts (control_args, nested_args)
"""
# returns flat dicts
nested = {}
control_args = {}
custom_args = {}
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if all([arg_spec, arg_spec.get('type', None),
arg_spec.get('type', None) in
[ctype_name for ctype_name, klass in
COMPLEX_TYPES.items() if klass.is_nested]
]) or ('is_shared_group_option' not in arg_spec):
if arg_name in nested:
LOG.warning(
"Duplicated nested argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, nested[arg_name]))
elif arg_name in custom_args:
LOG.warning(
"Duplicated custom argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, custom_args[arg_name]))
else:
if "ansible_variable" in arg_spec:
custom_args[arg_spec["ansible_variable"]] = arg_value
else:
nested[arg_name] = arg_value
else:
if arg_name in control_args:
LOG.warning(
"Duplicated control argument found: '{}'. Using "
"old value: '{}'".format(
arg_name, control_args[arg_name]))
else:
control_args[arg_name] = arg_value
return nested, control_args, custom_args
def _iterate_received_arguments(self, args):
"""Iterator helper method over all the received arguments
:return: yields tuple:
(spec name, spec dict,
argument name, argument value, argument spec)
"""
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in args:
parser_dict = args[spec_parser['name']]
for arg_name, arg_val in parser_dict.items():
arg_spec = self.spec_helper.get_option_spec(
spec_parser['name'], arg_name)
yield (spec_parser['name'], parser_dict,
arg_name, arg_val, arg_spec)
def _convert_non_cli_args(self, parser_name, values_dict):
"""Casts arguments to correct types by modifying values_dict param.
By default all the values are strings.
:param parser_name: The command name, e.g. main, virsh, ospd, etc
:param values_dict: The dict of with arguments
"""
for opt_name, opt_value in values_dict.items():
file_option_spec = self.spec_helper.get_option_spec(
parser_name, opt_name)
if file_option_spec.get('type', None) in ['int', ] or \
file_option_spec.get('action', None) in ['count', ]:
values_dict[opt_name] = int(opt_value)
def _merge_duplicated_cli_args(self, cli_args):
"""Merge duplicated arguments to all the parsers
This is need to handle control args, shared among several parsers.
for example, verbose, inventory
"""
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(cli_args):
for parser_name2, parser_dict2 in cli_args.items():
if all([parser_name2, parser_name != parser_name2,
arg_name not in parser_dict2]):
if self.spec_helper.get_option_spec(parser_name2,
arg_name):
parser_dict2[arg_name] = arg_value
|
infrared/core/inspector/inspector.py
|
codereval_python_data_217
|
List arguments with ``required_when`` condition matched.
:param command_name: the command name.
:param options_spec: the list of command spec options.
:param args: the received input arguments
:return: list, list of argument names with matched ``required_when``
condition
def _get_conditionally_required_args(self, command_name, options_spec,
args):
"""List arguments with ``required_when`` condition matched.
:param command_name: the command name.
:param options_spec: the list of command spec options.
:param args: the received input arguments
:return: list, list of argument names with matched ``required_when``
condition
"""
opts_names = [option_spec['name'] for option_spec in options_spec]
missing_args = []
for option_spec in options_spec:
option_results = []
if option_spec and 'required_when' in option_spec:
req_when_args = [option_spec['required_when']] \
if not type(option_spec['required_when']) is list \
else option_spec['required_when']
# validate conditions
for req_when_arg in req_when_args:
splited_args_list = req_when_arg.split()
for idx, req_arg in enumerate(splited_args_list):
if req_arg in opts_names:
splited_args_list[idx] = \
args.get(command_name, {}).get(req_arg.strip())
if splited_args_list[idx] is None:
option_results.append(False)
break
splited_args_list[idx] = str(splited_args_list[idx])
if (splited_args_list[idx] not in ['and', 'or'] and
not any(
(c in '<>=') for c in splited_args_list[idx])):
splited_args_list[idx] = "'{0}'".format(
yaml.safe_load(splited_args_list[idx]))
else:
option_results.append(
eval(' '.join(splited_args_list)))
if all(option_results) and \
self.spec_helper.get_option_state(
command_name,
option_spec['name'],
args) == helper.OptionState['NOT_SET']:
missing_args.append(option_spec['name'])
return missing_args
import collections
import os
from six.moves import configparser
from string import Template
import yaml
from infrared.core.cli.cli import CliParser
from infrared.core.cli.cli import COMPLEX_TYPES
from infrared.core.inspector import helper
from infrared.core.utils import dict_utils
from infrared.core.utils import exceptions
from infrared.core.utils import logger
LOG = logger.LOG
class SpecParser(object):
"""Parses input arguments from different sources (cli, answers file). """
@classmethod
def from_plugin(cls, subparser, plugin, base_groups):
"""Reads spec & vars from plugin and constructs the parser instance
:param subparser: argparse.subparser to extend
:param plugin: InfraredPlugin object
:param base_groups: dict, included groups
:return: SpecParser object based on given plugin spec & vars
"""
spec_dict = base_groups or {}
with open(plugin.spec) as stream:
spec = yaml.safe_load(stream) or {}
dict_utils.dict_merge(
base_groups,
spec,
dict_utils.ConflictResolver.unique_append_list_resolver)
# The "try-excpet" block here is for adding spec file path if it
# includes an unsupported option type
try:
return SpecParser(subparser, spec_dict, plugin.vars_dir,
plugin.defaults_dir, plugin.path)
except exceptions.IRUnsupportedSpecOptionType as ex:
ex.message += ' in file: {}'.format(plugin.spec)
raise ex
def __init__(self, subparser, spec_dict, vars_dir, defaults_dir,
plugin_path):
"""Constructor.
:param subparser: argparse.subparser to extend
:param spec_dict: dict with CLI description
:param vars_dir: Path to plugin's vars dir
:param defaults_dir: Path to plugin's defaults dir
"""
self.vars = vars_dir
self.defaults = defaults_dir
self.plugin_path = plugin_path
self.spec_helper = helper.SpecDictHelper(spec_dict)
# create parser
self.parser = CliParser.create_parser(self, subparser)
def add_shared_groups(self, list_of_groups):
"""Adds the user defined shared groups
:param list_of_groups: list, of group dicts
"""
shared_groups = self.spec_helper.spec_dict.get('shared_groups', [])
shared_groups.expand(list_of_groups)
self.spec_helper.spec_dict['shared_groups'] = shared_groups
def _get_defaults(self, default_getter_func):
"""Resolve arguments' values from cli or answers file.
:param default_getter_func: callable. will be called for all the
available options in spec file.
"""
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
default_value = default_getter_func(option)
if default_value is not None:
sub = parser['name']
result[sub][option['name']] = default_value
return result
def get_spec_defaults(self):
"""Resolve arguments' values from spec and other sources. """
def spec_default_getter(option):
"""Getter function to retrieve the default value from spec.
:param option: argument name
"""
# first try to get environment variable with IR_ prefix
default_value = SpecParser.get_env_option(option['name'])
if default_value is not None:
LOG.info(
"[environ] Loading '{0}' default value"
" '{1}' from the environment variable".format(
option['name'], default_value))
elif option.get('default', None) is not None:
default_value = option['default']
elif option.get('action', None) in ['store_true']:
default_value = False
return default_value
return self._get_defaults(spec_default_getter)
@staticmethod
def get_env_option(name):
"""Try get """
return os.environ.get('IR_' + name.upper().replace('-', '_'))
def get_deprecated_args(self):
"""Returning dict with options which deprecate others. """
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
if option.get('deprecates') is not None:
result[option.get('deprecates')] = option.get('name')
return result
@staticmethod
def parse_env_variable_from_file(value):
if isinstance(value, str):
t = Template(value)
try:
value = t.substitute(os.environ)
except KeyError as undefined_var:
raise exceptions.IRAnswersFileEnvVarNotDefined(undefined_var)
return value
def get_answers_file_args(self, cli_args):
"""Resolve arguments' values from answers INI file. """
file_result = {}
args_to_remove = []
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
file_result[parser_name] = file_result.get(parser_name, {})
if option_spec and option_spec.get(
'action', '') == 'read-answers':
# Iterate over arguments supplied by file
for parsed_arg in parser_dict[arg_name]:
# Supplied arguments' value can be a list
if isinstance(parser_dict[arg_name][parsed_arg], list):
i = 0
# Iterrate over argument values list
for parsed_value in parser_dict[arg_name][parsed_arg]:
parser_dict[arg_name][parsed_arg][i] = \
SpecParser.parse_env_variable_from_file(parsed_value)
i += 1
else:
parser_dict[arg_name][parsed_arg] = \
SpecParser.parse_env_variable_from_file(parser_dict[arg_name][parsed_arg])
# we have config option. saving it.
self._convert_non_cli_args(
parser_name, parser_dict[arg_name])
dict_utils.dict_merge(
file_result[parser_name],
parser_dict[arg_name])
# remove from cli args
args_to_remove.append((parser_name, arg_name))
# remove parser dict outside loop to avoid iteration dict modification
for parser_name, arg_name in args_to_remove:
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in cli_args and spec_parser['name'] == parser_name:
parser_dict = cli_args[spec_parser['name']]
parser_dict.pop(arg_name)
break
return file_result
def generate_answers_file(self, cli_args, spec_defaults):
"""Generates answers INI file
:param cli_args: list, cli arguments.
:param spec_defaults: the default values.
"""
def put_option(config, parser_name, option_name, value):
for opt_help in option.get('help', '').split('\n'):
help_opt = '# ' + opt_help
# add help comment
if config.has_option(parser_name, help_opt):
config.remove_option(parser_name, help_opt)
config.set(
parser_name, help_opt)
if config.has_option(parser_name, option_name):
value = config.get(parser_name, option_name)
config.remove_option(parser_name, option_name)
config.set(
parser_name,
option_name,
str(value))
file_generated = False
# load generate answers file for all the parsers
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
if option_spec and option_spec.get(
'action', '') == 'generate-answers':
options_to_save = \
self.spec_helper.get_parser_option_specs(parser_name)
out_answers = configparser.ConfigParser(allow_no_value=True)
if not out_answers.has_section(parser_name):
out_answers.add_section(parser_name)
for option in options_to_save:
opt_name = option['name']
if opt_name in parser_dict:
put_option(
out_answers,
parser_name,
opt_name,
parser_dict[opt_name])
elif opt_name in spec_defaults[parser_name]:
put_option(
out_answers,
parser_name,
opt_name,
spec_defaults[parser_name][opt_name])
elif option.get('required', False):
put_option(
out_answers,
parser_name,
'# ' + opt_name,
"Required argument. "
"Edit with one of the allowed values OR "
"override with "
"CLI: --{}=<option>".format(opt_name))
# write to file
with open(arg_value, 'w') as answers_file:
out_answers.write(answers_file)
file_generated = True
return file_generated
def resolve_custom_types(self, args):
"""Transforms the arguments with custom types
:param args: the list of received arguments.
"""
for parser_name, parser_dict in args.items():
spec_complex_options = [opt for opt in
self.spec_helper.get_parser_option_specs(
parser_name) if
opt.get('type', None) in COMPLEX_TYPES]
for spec_option in spec_complex_options:
option_name = spec_option['name']
if option_name in parser_dict:
# we have custom type to resolve
type_name = spec_option['type']
option_value = parser_dict[option_name]
action = self.create_complex_argumet_type(
parser_name,
type_name,
option_name,
spec_option)
# resolving value
parser_dict[option_name] = action.resolve(option_value)
def create_complex_argumet_type(self, subcommand, type_name, option_name,
spec_option):
"""Build the complex argument type
:param subcommand: the command name
:param type_name: the complex type name
:param option_name: the option name
:param spec_option: option's specifications
:return: the complex type instance
"""
complex_action = COMPLEX_TYPES.get(
type_name, None)
if complex_action is None:
raise exceptions.SpecParserException(
"Unknown complex type: {}".format(type_name))
return complex_action(
option_name,
(self.vars, self.defaults, self.plugin_path),
subcommand,
spec_option)
def parse_args(self, arg_parser, args=None):
"""Parses all the arguments (cli, answers file)
:return: None, if ``--generate-answers-file`` in arg_arg_parser
:return: (dict, dict):
* command arguments dict (arguments to control the IR logic)
* nested arguments dict (arguments to pass to the playbooks)
"""
spec_defaults = self.get_spec_defaults()
cli_args = CliParser.parse_cli_input(arg_parser, args)
file_args = self.get_answers_file_args(cli_args)
# generate answers file and exit
if self.generate_answers_file(cli_args, spec_defaults):
LOG.warning("Answers file generated. Exiting.")
# print warnings when something was overridden from non-cli source.
self.validate_arg_sources(cli_args, file_args,
spec_defaults)
# print warnings for deprecated
self.validate_arg_deprecation(cli_args, file_args)
# now filter defaults to have only parser defined in cli
defaults = dict((key, spec_defaults[key])
for key in cli_args.keys() if
key in spec_defaults)
# copy cli args with the same name to all parser groups
self._merge_duplicated_cli_args(cli_args)
self._merge_duplicated_cli_args(file_args)
dict_utils.dict_merge(defaults, file_args)
dict_utils.dict_merge(defaults, cli_args)
self.validate_requires_args(defaults)
self.validate_length_args(defaults)
self.validate_choices_args(defaults)
self.validate_min_max_args(defaults)
# now resolve complex types.
self.resolve_custom_types(defaults)
nested, control, custom = \
self.get_nested_custom_and_control_args(defaults)
return nested, control, custom
def validate_arg_deprecation(self, cli_args, answer_file_args):
"""Validates and prints the deprecated arguments.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
"""
for deprecated, deprecates in self.get_deprecated_args().items():
for input_args in (answer_file_args.items(), cli_args.items()):
for command, command_dict in input_args:
if deprecated in command_dict:
if deprecates in command_dict:
raise exceptions.IRDeprecationException(
"[{}] Argument '{}' deprecates '{}',"
" please use only the new one.".format(
command, deprecated, deprecates))
if deprecated in answer_file_args[command]:
answer_file_args[command][deprecates] = \
answer_file_args[command][deprecated]
if deprecated in cli_args[command]:
cli_args[command][deprecates] = \
cli_args[command][deprecated]
LOG.warning(
"[{}] Argument '{}' was deprecated,"
" please use '{}'.".format(
command, deprecated, deprecates))
@staticmethod
def validate_arg_sources(cli_args, answer_file_args, spec_defaults):
"""Validates and prints the arguments' source.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
:param spec_defaults: the default values from spec files
"""
def show_diff(diff, command_name, cmd_dict, source_name):
if diff:
for arg_name in diff:
value = cmd_dict[arg_name]
LOG.info(
"[{}] Argument '{}' was set to"
" '{}' from the {} source.".format(
command_name, arg_name, value, source_name))
for command, command_dict in cli_args.items():
file_dict = answer_file_args.get(command, {})
file_diff = set(file_dict.keys()) - set(command_dict.keys())
show_diff(file_diff, command, file_dict, 'answers file')
def_dict = spec_defaults.get(command, {})
default_diff = set(def_dict.keys()) - set(
command_dict.keys()) - file_diff
show_diff(default_diff, command, def_dict, 'spec defaults')
def _get_conditionally_required_args(self, command_name, options_spec,
args):
"""List arguments with ``required_when`` condition matched.
:param command_name: the command name.
:param options_spec: the list of command spec options.
:param args: the received input arguments
:return: list, list of argument names with matched ``required_when``
condition
"""
opts_names = [option_spec['name'] for option_spec in options_spec]
missing_args = []
for option_spec in options_spec:
option_results = []
if option_spec and 'required_when' in option_spec:
req_when_args = [option_spec['required_when']] \
if not type(option_spec['required_when']) is list \
else option_spec['required_when']
# validate conditions
for req_when_arg in req_when_args:
splited_args_list = req_when_arg.split()
for idx, req_arg in enumerate(splited_args_list):
if req_arg in opts_names:
splited_args_list[idx] = \
args.get(command_name, {}).get(req_arg.strip())
if splited_args_list[idx] is None:
option_results.append(False)
break
splited_args_list[idx] = str(splited_args_list[idx])
if (splited_args_list[idx] not in ['and', 'or'] and
not any(
(c in '<>=') for c in splited_args_list[idx])):
splited_args_list[idx] = "'{0}'".format(
yaml.safe_load(splited_args_list[idx]))
else:
option_results.append(
eval(' '.join(splited_args_list)))
if all(option_results) and \
self.spec_helper.get_option_state(
command_name,
option_spec['name'],
args) == helper.OptionState['NOT_SET']:
missing_args.append(option_spec['name'])
return missing_args
def validate_requires_args(self, args):
"""Check if all the required arguments have been provided. """
silent_args = self.get_silent_args(args)
def validate_parser(parser_name, expected_options, parser_args):
"""Helper method to resolve dict_merge. """
result = collections.defaultdict(list)
condition_req_args = self._get_conditionally_required_args(
parser_name, expected_options, args)
for option in expected_options:
name = option['name']
# check required options.
if (option.get('required', False) and
name not in parser_args or
option['name'] in condition_req_args) and \
name not in silent_args:
result[parser_name].append(name)
return result
res = {}
for command_data in self.spec_helper.iterate_parsers():
cmd_name = command_data['name']
if cmd_name in args:
dict_utils.dict_merge(
res,
validate_parser(
cmd_name,
self.spec_helper.get_parser_option_specs(cmd_name),
args[cmd_name]))
missing_args = dict((cmd_name, args)
for cmd_name, args in res.items() if len(args) > 0)
if missing_args:
raise exceptions.IRRequiredArgsMissingException(missing_args)
def validate_length_args(self, args):
"""Check if value of arguments is not longer than length specified.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'length' not in spec_option:
# skip options that does not contain length
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve length
length = spec_option['length']
option_value = parser_dict[option_name]
if len(option_value) > int(length):
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
length
))
if invalid_options:
# raise exception with all arguments that exceed length
raise exceptions.IRInvalidLengthException(invalid_options)
def validate_choices_args(self, args):
"""Check if value of choice arguments is one of the available choices.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'choices' not in spec_option:
# skip options that does not contain choices
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve choices
choices = spec_option['choices']
option_value = parser_dict[option_name]
if option_value not in choices:
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
choices
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidChoiceException(invalid_options)
def validate_min_max_args(self, args):
"""Check if value of arguments is between minimum and maximum values.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if all([key not in spec_option
for key in ('maximum', 'minimum')]):
# skip options that does not contain minimum or maximum
continue
option_name = spec_option['name']
if option_name in parser_dict:
option_value = parser_dict[option_name]
min_value = spec_option.get('minimum')
max_value = spec_option.get('maximum')
# handle empty values in spec files which load as None
min_value = '' if 'minimum' in spec_option \
and min_value is None else min_value
max_value = '' if 'maximum' in spec_option \
and max_value is None else max_value
values = {
"value": option_value,
"maximum": max_value,
"minimum": min_value
}
# make sure that values are numbers
is_all_values_numbers = True
for name, num in values.items():
if num is not None \
and (isinstance(num, bool) or
not isinstance(num, (int, float))):
invalid_options.append((
option_name,
name,
"number",
type(num).__name__
))
is_all_values_numbers = False
if not is_all_values_numbers:
# don't continue to min max checks since some of the
# values are not numbers
continue
# check bigger than minimum
if min_value is not None and option_value < min_value:
invalid_options.append((
option_name,
"minimum",
min_value,
option_value
))
# check smaller than maximum
if max_value is not None and option_value > max_value:
invalid_options.append((
option_name,
"maximum",
max_value,
option_value
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidMinMaxRangeException(invalid_options)
def get_silent_args(self, args):
"""list of silenced argument
:param args: The received arguments.
:return: list, slienced argument names
"""
silent_args_names = []
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if arg_spec and 'silent' in arg_spec and \
self.spec_helper.get_option_state(
parser_name,
arg_name,
args) == helper.OptionState['IS_SET']:
silent_args_names.extend(arg_spec['silent'])
return list(set(silent_args_names))
def get_nested_custom_and_control_args(self, args):
"""Split input arguments to control nested and custom.
Controls arguments: control the IR behavior. These arguments
will not be put into the spec yml file
Nested arguments: are used by the Ansible playbooks and will be put
into the spec yml file.
Custom arguments: Custom ansible variables to be used instead of the
normal nested usage.
:param args: the collected list of args.
:return: (dict, dict): flat dicts (control_args, nested_args)
"""
# returns flat dicts
nested = {}
control_args = {}
custom_args = {}
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if all([arg_spec, arg_spec.get('type', None),
arg_spec.get('type', None) in
[ctype_name for ctype_name, klass in
COMPLEX_TYPES.items() if klass.is_nested]
]) or ('is_shared_group_option' not in arg_spec):
if arg_name in nested:
LOG.warning(
"Duplicated nested argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, nested[arg_name]))
elif arg_name in custom_args:
LOG.warning(
"Duplicated custom argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, custom_args[arg_name]))
else:
if "ansible_variable" in arg_spec:
custom_args[arg_spec["ansible_variable"]] = arg_value
else:
nested[arg_name] = arg_value
else:
if arg_name in control_args:
LOG.warning(
"Duplicated control argument found: '{}'. Using "
"old value: '{}'".format(
arg_name, control_args[arg_name]))
else:
control_args[arg_name] = arg_value
return nested, control_args, custom_args
def _iterate_received_arguments(self, args):
"""Iterator helper method over all the received arguments
:return: yields tuple:
(spec name, spec dict,
argument name, argument value, argument spec)
"""
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in args:
parser_dict = args[spec_parser['name']]
for arg_name, arg_val in parser_dict.items():
arg_spec = self.spec_helper.get_option_spec(
spec_parser['name'], arg_name)
yield (spec_parser['name'], parser_dict,
arg_name, arg_val, arg_spec)
def _convert_non_cli_args(self, parser_name, values_dict):
"""Casts arguments to correct types by modifying values_dict param.
By default all the values are strings.
:param parser_name: The command name, e.g. main, virsh, ospd, etc
:param values_dict: The dict of with arguments
"""
for opt_name, opt_value in values_dict.items():
file_option_spec = self.spec_helper.get_option_spec(
parser_name, opt_name)
if file_option_spec.get('type', None) in ['int', ] or \
file_option_spec.get('action', None) in ['count', ]:
values_dict[opt_name] = int(opt_value)
def _merge_duplicated_cli_args(self, cli_args):
"""Merge duplicated arguments to all the parsers
This is need to handle control args, shared among several parsers.
for example, verbose, inventory
"""
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(cli_args):
for parser_name2, parser_dict2 in cli_args.items():
if all([parser_name2, parser_name != parser_name2,
arg_name not in parser_dict2]):
if self.spec_helper.get_option_spec(parser_name2,
arg_name):
parser_dict2[arg_name] = arg_value
|
infrared/core/inspector/inspector.py
|
codereval_python_data_218
|
Check if value of arguments is not longer than length specified.
:param args: The received arguments.
def validate_length_args(self, args):
"""Check if value of arguments is not longer than length specified.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'length' not in spec_option:
# skip options that does not contain length
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve length
length = spec_option['length']
option_value = parser_dict[option_name]
if len(option_value) > int(length):
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
length
))
return invalid_options
import collections
import os
from six.moves import configparser
from string import Template
import yaml
from infrared.core.cli.cli import CliParser
from infrared.core.cli.cli import COMPLEX_TYPES
from infrared.core.inspector import helper
from infrared.core.utils import dict_utils
from infrared.core.utils import exceptions
from infrared.core.utils import logger
LOG = logger.LOG
class SpecParser(object):
"""Parses input arguments from different sources (cli, answers file). """
@classmethod
def from_plugin(cls, subparser, plugin, base_groups):
"""Reads spec & vars from plugin and constructs the parser instance
:param subparser: argparse.subparser to extend
:param plugin: InfraredPlugin object
:param base_groups: dict, included groups
:return: SpecParser object based on given plugin spec & vars
"""
spec_dict = base_groups or {}
with open(plugin.spec) as stream:
spec = yaml.safe_load(stream) or {}
dict_utils.dict_merge(
base_groups,
spec,
dict_utils.ConflictResolver.unique_append_list_resolver)
# The "try-excpet" block here is for adding spec file path if it
# includes an unsupported option type
try:
return SpecParser(subparser, spec_dict, plugin.vars_dir,
plugin.defaults_dir, plugin.path)
except exceptions.IRUnsupportedSpecOptionType as ex:
ex.message += ' in file: {}'.format(plugin.spec)
raise ex
def __init__(self, subparser, spec_dict, vars_dir, defaults_dir,
plugin_path):
"""Constructor.
:param subparser: argparse.subparser to extend
:param spec_dict: dict with CLI description
:param vars_dir: Path to plugin's vars dir
:param defaults_dir: Path to plugin's defaults dir
"""
self.vars = vars_dir
self.defaults = defaults_dir
self.plugin_path = plugin_path
self.spec_helper = helper.SpecDictHelper(spec_dict)
# create parser
self.parser = CliParser.create_parser(self, subparser)
def add_shared_groups(self, list_of_groups):
"""Adds the user defined shared groups
:param list_of_groups: list, of group dicts
"""
shared_groups = self.spec_helper.spec_dict.get('shared_groups', [])
shared_groups.expand(list_of_groups)
self.spec_helper.spec_dict['shared_groups'] = shared_groups
def _get_defaults(self, default_getter_func):
"""Resolve arguments' values from cli or answers file.
:param default_getter_func: callable. will be called for all the
available options in spec file.
"""
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
default_value = default_getter_func(option)
if default_value is not None:
sub = parser['name']
result[sub][option['name']] = default_value
return result
def get_spec_defaults(self):
"""Resolve arguments' values from spec and other sources. """
def spec_default_getter(option):
"""Getter function to retrieve the default value from spec.
:param option: argument name
"""
# first try to get environment variable with IR_ prefix
default_value = SpecParser.get_env_option(option['name'])
if default_value is not None:
LOG.info(
"[environ] Loading '{0}' default value"
" '{1}' from the environment variable".format(
option['name'], default_value))
elif option.get('default', None) is not None:
default_value = option['default']
elif option.get('action', None) in ['store_true']:
default_value = False
return default_value
return self._get_defaults(spec_default_getter)
@staticmethod
def get_env_option(name):
"""Try get """
return os.environ.get('IR_' + name.upper().replace('-', '_'))
def get_deprecated_args(self):
"""Returning dict with options which deprecate others. """
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
if option.get('deprecates') is not None:
result[option.get('deprecates')] = option.get('name')
return result
@staticmethod
def parse_env_variable_from_file(value):
if isinstance(value, str):
t = Template(value)
try:
value = t.substitute(os.environ)
except KeyError as undefined_var:
raise exceptions.IRAnswersFileEnvVarNotDefined(undefined_var)
return value
def get_answers_file_args(self, cli_args):
"""Resolve arguments' values from answers INI file. """
file_result = {}
args_to_remove = []
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
file_result[parser_name] = file_result.get(parser_name, {})
if option_spec and option_spec.get(
'action', '') == 'read-answers':
# Iterate over arguments supplied by file
for parsed_arg in parser_dict[arg_name]:
# Supplied arguments' value can be a list
if isinstance(parser_dict[arg_name][parsed_arg], list):
i = 0
# Iterrate over argument values list
for parsed_value in parser_dict[arg_name][parsed_arg]:
parser_dict[arg_name][parsed_arg][i] = \
SpecParser.parse_env_variable_from_file(parsed_value)
i += 1
else:
parser_dict[arg_name][parsed_arg] = \
SpecParser.parse_env_variable_from_file(parser_dict[arg_name][parsed_arg])
# we have config option. saving it.
self._convert_non_cli_args(
parser_name, parser_dict[arg_name])
dict_utils.dict_merge(
file_result[parser_name],
parser_dict[arg_name])
# remove from cli args
args_to_remove.append((parser_name, arg_name))
# remove parser dict outside loop to avoid iteration dict modification
for parser_name, arg_name in args_to_remove:
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in cli_args and spec_parser['name'] == parser_name:
parser_dict = cli_args[spec_parser['name']]
parser_dict.pop(arg_name)
break
return file_result
def generate_answers_file(self, cli_args, spec_defaults):
"""Generates answers INI file
:param cli_args: list, cli arguments.
:param spec_defaults: the default values.
"""
def put_option(config, parser_name, option_name, value):
for opt_help in option.get('help', '').split('\n'):
help_opt = '# ' + opt_help
# add help comment
if config.has_option(parser_name, help_opt):
config.remove_option(parser_name, help_opt)
config.set(
parser_name, help_opt)
if config.has_option(parser_name, option_name):
value = config.get(parser_name, option_name)
config.remove_option(parser_name, option_name)
config.set(
parser_name,
option_name,
str(value))
file_generated = False
# load generate answers file for all the parsers
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
if option_spec and option_spec.get(
'action', '') == 'generate-answers':
options_to_save = \
self.spec_helper.get_parser_option_specs(parser_name)
out_answers = configparser.ConfigParser(allow_no_value=True)
if not out_answers.has_section(parser_name):
out_answers.add_section(parser_name)
for option in options_to_save:
opt_name = option['name']
if opt_name in parser_dict:
put_option(
out_answers,
parser_name,
opt_name,
parser_dict[opt_name])
elif opt_name in spec_defaults[parser_name]:
put_option(
out_answers,
parser_name,
opt_name,
spec_defaults[parser_name][opt_name])
elif option.get('required', False):
put_option(
out_answers,
parser_name,
'# ' + opt_name,
"Required argument. "
"Edit with one of the allowed values OR "
"override with "
"CLI: --{}=<option>".format(opt_name))
# write to file
with open(arg_value, 'w') as answers_file:
out_answers.write(answers_file)
file_generated = True
return file_generated
def resolve_custom_types(self, args):
"""Transforms the arguments with custom types
:param args: the list of received arguments.
"""
for parser_name, parser_dict in args.items():
spec_complex_options = [opt for opt in
self.spec_helper.get_parser_option_specs(
parser_name) if
opt.get('type', None) in COMPLEX_TYPES]
for spec_option in spec_complex_options:
option_name = spec_option['name']
if option_name in parser_dict:
# we have custom type to resolve
type_name = spec_option['type']
option_value = parser_dict[option_name]
action = self.create_complex_argumet_type(
parser_name,
type_name,
option_name,
spec_option)
# resolving value
parser_dict[option_name] = action.resolve(option_value)
def create_complex_argumet_type(self, subcommand, type_name, option_name,
spec_option):
"""Build the complex argument type
:param subcommand: the command name
:param type_name: the complex type name
:param option_name: the option name
:param spec_option: option's specifications
:return: the complex type instance
"""
complex_action = COMPLEX_TYPES.get(
type_name, None)
if complex_action is None:
raise exceptions.SpecParserException(
"Unknown complex type: {}".format(type_name))
return complex_action(
option_name,
(self.vars, self.defaults, self.plugin_path),
subcommand,
spec_option)
def parse_args(self, arg_parser, args=None):
"""Parses all the arguments (cli, answers file)
:return: None, if ``--generate-answers-file`` in arg_arg_parser
:return: (dict, dict):
* command arguments dict (arguments to control the IR logic)
* nested arguments dict (arguments to pass to the playbooks)
"""
spec_defaults = self.get_spec_defaults()
cli_args = CliParser.parse_cli_input(arg_parser, args)
file_args = self.get_answers_file_args(cli_args)
# generate answers file and exit
if self.generate_answers_file(cli_args, spec_defaults):
LOG.warning("Answers file generated. Exiting.")
# print warnings when something was overridden from non-cli source.
self.validate_arg_sources(cli_args, file_args,
spec_defaults)
# print warnings for deprecated
self.validate_arg_deprecation(cli_args, file_args)
# now filter defaults to have only parser defined in cli
defaults = dict((key, spec_defaults[key])
for key in cli_args.keys() if
key in spec_defaults)
# copy cli args with the same name to all parser groups
self._merge_duplicated_cli_args(cli_args)
self._merge_duplicated_cli_args(file_args)
dict_utils.dict_merge(defaults, file_args)
dict_utils.dict_merge(defaults, cli_args)
self.validate_requires_args(defaults)
self.validate_length_args(defaults)
self.validate_choices_args(defaults)
self.validate_min_max_args(defaults)
# now resolve complex types.
self.resolve_custom_types(defaults)
nested, control, custom = \
self.get_nested_custom_and_control_args(defaults)
return nested, control, custom
def validate_arg_deprecation(self, cli_args, answer_file_args):
"""Validates and prints the deprecated arguments.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
"""
for deprecated, deprecates in self.get_deprecated_args().items():
for input_args in (answer_file_args.items(), cli_args.items()):
for command, command_dict in input_args:
if deprecated in command_dict:
if deprecates in command_dict:
raise exceptions.IRDeprecationException(
"[{}] Argument '{}' deprecates '{}',"
" please use only the new one.".format(
command, deprecated, deprecates))
if deprecated in answer_file_args[command]:
answer_file_args[command][deprecates] = \
answer_file_args[command][deprecated]
if deprecated in cli_args[command]:
cli_args[command][deprecates] = \
cli_args[command][deprecated]
LOG.warning(
"[{}] Argument '{}' was deprecated,"
" please use '{}'.".format(
command, deprecated, deprecates))
@staticmethod
def validate_arg_sources(cli_args, answer_file_args, spec_defaults):
"""Validates and prints the arguments' source.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
:param spec_defaults: the default values from spec files
"""
def show_diff(diff, command_name, cmd_dict, source_name):
if diff:
for arg_name in diff:
value = cmd_dict[arg_name]
LOG.info(
"[{}] Argument '{}' was set to"
" '{}' from the {} source.".format(
command_name, arg_name, value, source_name))
for command, command_dict in cli_args.items():
file_dict = answer_file_args.get(command, {})
file_diff = set(file_dict.keys()) - set(command_dict.keys())
show_diff(file_diff, command, file_dict, 'answers file')
def_dict = spec_defaults.get(command, {})
default_diff = set(def_dict.keys()) - set(
command_dict.keys()) - file_diff
show_diff(default_diff, command, def_dict, 'spec defaults')
def _get_conditionally_required_args(self, command_name, options_spec,
args):
"""List arguments with ``required_when`` condition matched.
:param command_name: the command name.
:param options_spec: the list of command spec options.
:param args: the received input arguments
:return: list, list of argument names with matched ``required_when``
condition
"""
opts_names = [option_spec['name'] for option_spec in options_spec]
missing_args = []
for option_spec in options_spec:
option_results = []
if option_spec and 'required_when' in option_spec:
req_when_args = [option_spec['required_when']] \
if not type(option_spec['required_when']) is list \
else option_spec['required_when']
# validate conditions
for req_when_arg in req_when_args:
splited_args_list = req_when_arg.split()
for idx, req_arg in enumerate(splited_args_list):
if req_arg in opts_names:
splited_args_list[idx] = \
args.get(command_name, {}).get(req_arg.strip())
if splited_args_list[idx] is None:
option_results.append(False)
break
splited_args_list[idx] = str(splited_args_list[idx])
if (splited_args_list[idx] not in ['and', 'or'] and
not any(
(c in '<>=') for c in splited_args_list[idx])):
splited_args_list[idx] = "'{0}'".format(
yaml.safe_load(splited_args_list[idx]))
else:
option_results.append(
eval(' '.join(splited_args_list)))
if all(option_results) and \
self.spec_helper.get_option_state(
command_name,
option_spec['name'],
args) == helper.OptionState['NOT_SET']:
missing_args.append(option_spec['name'])
return missing_args
def validate_requires_args(self, args):
"""Check if all the required arguments have been provided. """
silent_args = self.get_silent_args(args)
def validate_parser(parser_name, expected_options, parser_args):
"""Helper method to resolve dict_merge. """
result = collections.defaultdict(list)
condition_req_args = self._get_conditionally_required_args(
parser_name, expected_options, args)
for option in expected_options:
name = option['name']
# check required options.
if (option.get('required', False) and
name not in parser_args or
option['name'] in condition_req_args) and \
name not in silent_args:
result[parser_name].append(name)
return result
res = {}
for command_data in self.spec_helper.iterate_parsers():
cmd_name = command_data['name']
if cmd_name in args:
dict_utils.dict_merge(
res,
validate_parser(
cmd_name,
self.spec_helper.get_parser_option_specs(cmd_name),
args[cmd_name]))
missing_args = dict((cmd_name, args)
for cmd_name, args in res.items() if len(args) > 0)
if missing_args:
raise exceptions.IRRequiredArgsMissingException(missing_args)
def validate_length_args(self, args):
"""Check if value of arguments is not longer than length specified.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'length' not in spec_option:
# skip options that does not contain length
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve length
length = spec_option['length']
option_value = parser_dict[option_name]
if len(option_value) > int(length):
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
length
))
if invalid_options:
# raise exception with all arguments that exceed length
raise exceptions.IRInvalidLengthException(invalid_options)
def validate_choices_args(self, args):
"""Check if value of choice arguments is one of the available choices.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'choices' not in spec_option:
# skip options that does not contain choices
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve choices
choices = spec_option['choices']
option_value = parser_dict[option_name]
if option_value not in choices:
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
choices
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidChoiceException(invalid_options)
def validate_min_max_args(self, args):
"""Check if value of arguments is between minimum and maximum values.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if all([key not in spec_option
for key in ('maximum', 'minimum')]):
# skip options that does not contain minimum or maximum
continue
option_name = spec_option['name']
if option_name in parser_dict:
option_value = parser_dict[option_name]
min_value = spec_option.get('minimum')
max_value = spec_option.get('maximum')
# handle empty values in spec files which load as None
min_value = '' if 'minimum' in spec_option \
and min_value is None else min_value
max_value = '' if 'maximum' in spec_option \
and max_value is None else max_value
values = {
"value": option_value,
"maximum": max_value,
"minimum": min_value
}
# make sure that values are numbers
is_all_values_numbers = True
for name, num in values.items():
if num is not None \
and (isinstance(num, bool) or
not isinstance(num, (int, float))):
invalid_options.append((
option_name,
name,
"number",
type(num).__name__
))
is_all_values_numbers = False
if not is_all_values_numbers:
# don't continue to min max checks since some of the
# values are not numbers
continue
# check bigger than minimum
if min_value is not None and option_value < min_value:
invalid_options.append((
option_name,
"minimum",
min_value,
option_value
))
# check smaller than maximum
if max_value is not None and option_value > max_value:
invalid_options.append((
option_name,
"maximum",
max_value,
option_value
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidMinMaxRangeException(invalid_options)
def get_silent_args(self, args):
"""list of silenced argument
:param args: The received arguments.
:return: list, slienced argument names
"""
silent_args_names = []
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if arg_spec and 'silent' in arg_spec and \
self.spec_helper.get_option_state(
parser_name,
arg_name,
args) == helper.OptionState['IS_SET']:
silent_args_names.extend(arg_spec['silent'])
return list(set(silent_args_names))
def get_nested_custom_and_control_args(self, args):
"""Split input arguments to control nested and custom.
Controls arguments: control the IR behavior. These arguments
will not be put into the spec yml file
Nested arguments: are used by the Ansible playbooks and will be put
into the spec yml file.
Custom arguments: Custom ansible variables to be used instead of the
normal nested usage.
:param args: the collected list of args.
:return: (dict, dict): flat dicts (control_args, nested_args)
"""
# returns flat dicts
nested = {}
control_args = {}
custom_args = {}
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if all([arg_spec, arg_spec.get('type', None),
arg_spec.get('type', None) in
[ctype_name for ctype_name, klass in
COMPLEX_TYPES.items() if klass.is_nested]
]) or ('is_shared_group_option' not in arg_spec):
if arg_name in nested:
LOG.warning(
"Duplicated nested argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, nested[arg_name]))
elif arg_name in custom_args:
LOG.warning(
"Duplicated custom argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, custom_args[arg_name]))
else:
if "ansible_variable" in arg_spec:
custom_args[arg_spec["ansible_variable"]] = arg_value
else:
nested[arg_name] = arg_value
else:
if arg_name in control_args:
LOG.warning(
"Duplicated control argument found: '{}'. Using "
"old value: '{}'".format(
arg_name, control_args[arg_name]))
else:
control_args[arg_name] = arg_value
return nested, control_args, custom_args
def _iterate_received_arguments(self, args):
"""Iterator helper method over all the received arguments
:return: yields tuple:
(spec name, spec dict,
argument name, argument value, argument spec)
"""
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in args:
parser_dict = args[spec_parser['name']]
for arg_name, arg_val in parser_dict.items():
arg_spec = self.spec_helper.get_option_spec(
spec_parser['name'], arg_name)
yield (spec_parser['name'], parser_dict,
arg_name, arg_val, arg_spec)
def _convert_non_cli_args(self, parser_name, values_dict):
"""Casts arguments to correct types by modifying values_dict param.
By default all the values are strings.
:param parser_name: The command name, e.g. main, virsh, ospd, etc
:param values_dict: The dict of with arguments
"""
for opt_name, opt_value in values_dict.items():
file_option_spec = self.spec_helper.get_option_spec(
parser_name, opt_name)
if file_option_spec.get('type', None) in ['int', ] or \
file_option_spec.get('action', None) in ['count', ]:
values_dict[opt_name] = int(opt_value)
def _merge_duplicated_cli_args(self, cli_args):
"""Merge duplicated arguments to all the parsers
This is need to handle control args, shared among several parsers.
for example, verbose, inventory
"""
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(cli_args):
for parser_name2, parser_dict2 in cli_args.items():
if all([parser_name2, parser_name != parser_name2,
arg_name not in parser_dict2]):
if self.spec_helper.get_option_spec(parser_name2,
arg_name):
parser_dict2[arg_name] = arg_value
|
infrared/core/inspector/inspector.py
|
codereval_python_data_219
|
Check if value of choice arguments is one of the available choices.
:param args: The received arguments.
def validate_choices_args(self, args):
"""Check if value of choice arguments is one of the available choices.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'choices' not in spec_option:
# skip options that does not contain choices
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve choices
choices = spec_option['choices']
option_value = parser_dict[option_name]
if option_value not in choices:
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
choices
))
return invalid_options
import collections
import os
from six.moves import configparser
from string import Template
import yaml
from infrared.core.cli.cli import CliParser
from infrared.core.cli.cli import COMPLEX_TYPES
from infrared.core.inspector import helper
from infrared.core.utils import dict_utils
from infrared.core.utils import exceptions
from infrared.core.utils import logger
LOG = logger.LOG
class SpecParser(object):
"""Parses input arguments from different sources (cli, answers file). """
@classmethod
def from_plugin(cls, subparser, plugin, base_groups):
"""Reads spec & vars from plugin and constructs the parser instance
:param subparser: argparse.subparser to extend
:param plugin: InfraredPlugin object
:param base_groups: dict, included groups
:return: SpecParser object based on given plugin spec & vars
"""
spec_dict = base_groups or {}
with open(plugin.spec) as stream:
spec = yaml.safe_load(stream) or {}
dict_utils.dict_merge(
base_groups,
spec,
dict_utils.ConflictResolver.unique_append_list_resolver)
# The "try-excpet" block here is for adding spec file path if it
# includes an unsupported option type
try:
return SpecParser(subparser, spec_dict, plugin.vars_dir,
plugin.defaults_dir, plugin.path)
except exceptions.IRUnsupportedSpecOptionType as ex:
ex.message += ' in file: {}'.format(plugin.spec)
raise ex
def __init__(self, subparser, spec_dict, vars_dir, defaults_dir,
plugin_path):
"""Constructor.
:param subparser: argparse.subparser to extend
:param spec_dict: dict with CLI description
:param vars_dir: Path to plugin's vars dir
:param defaults_dir: Path to plugin's defaults dir
"""
self.vars = vars_dir
self.defaults = defaults_dir
self.plugin_path = plugin_path
self.spec_helper = helper.SpecDictHelper(spec_dict)
# create parser
self.parser = CliParser.create_parser(self, subparser)
def add_shared_groups(self, list_of_groups):
"""Adds the user defined shared groups
:param list_of_groups: list, of group dicts
"""
shared_groups = self.spec_helper.spec_dict.get('shared_groups', [])
shared_groups.expand(list_of_groups)
self.spec_helper.spec_dict['shared_groups'] = shared_groups
def _get_defaults(self, default_getter_func):
"""Resolve arguments' values from cli or answers file.
:param default_getter_func: callable. will be called for all the
available options in spec file.
"""
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
default_value = default_getter_func(option)
if default_value is not None:
sub = parser['name']
result[sub][option['name']] = default_value
return result
def get_spec_defaults(self):
"""Resolve arguments' values from spec and other sources. """
def spec_default_getter(option):
"""Getter function to retrieve the default value from spec.
:param option: argument name
"""
# first try to get environment variable with IR_ prefix
default_value = SpecParser.get_env_option(option['name'])
if default_value is not None:
LOG.info(
"[environ] Loading '{0}' default value"
" '{1}' from the environment variable".format(
option['name'], default_value))
elif option.get('default', None) is not None:
default_value = option['default']
elif option.get('action', None) in ['store_true']:
default_value = False
return default_value
return self._get_defaults(spec_default_getter)
@staticmethod
def get_env_option(name):
"""Try get """
return os.environ.get('IR_' + name.upper().replace('-', '_'))
def get_deprecated_args(self):
"""Returning dict with options which deprecate others. """
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
if option.get('deprecates') is not None:
result[option.get('deprecates')] = option.get('name')
return result
@staticmethod
def parse_env_variable_from_file(value):
if isinstance(value, str):
t = Template(value)
try:
value = t.substitute(os.environ)
except KeyError as undefined_var:
raise exceptions.IRAnswersFileEnvVarNotDefined(undefined_var)
return value
def get_answers_file_args(self, cli_args):
"""Resolve arguments' values from answers INI file. """
file_result = {}
args_to_remove = []
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
file_result[parser_name] = file_result.get(parser_name, {})
if option_spec and option_spec.get(
'action', '') == 'read-answers':
# Iterate over arguments supplied by file
for parsed_arg in parser_dict[arg_name]:
# Supplied arguments' value can be a list
if isinstance(parser_dict[arg_name][parsed_arg], list):
i = 0
# Iterrate over argument values list
for parsed_value in parser_dict[arg_name][parsed_arg]:
parser_dict[arg_name][parsed_arg][i] = \
SpecParser.parse_env_variable_from_file(parsed_value)
i += 1
else:
parser_dict[arg_name][parsed_arg] = \
SpecParser.parse_env_variable_from_file(parser_dict[arg_name][parsed_arg])
# we have config option. saving it.
self._convert_non_cli_args(
parser_name, parser_dict[arg_name])
dict_utils.dict_merge(
file_result[parser_name],
parser_dict[arg_name])
# remove from cli args
args_to_remove.append((parser_name, arg_name))
# remove parser dict outside loop to avoid iteration dict modification
for parser_name, arg_name in args_to_remove:
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in cli_args and spec_parser['name'] == parser_name:
parser_dict = cli_args[spec_parser['name']]
parser_dict.pop(arg_name)
break
return file_result
def generate_answers_file(self, cli_args, spec_defaults):
"""Generates answers INI file
:param cli_args: list, cli arguments.
:param spec_defaults: the default values.
"""
def put_option(config, parser_name, option_name, value):
for opt_help in option.get('help', '').split('\n'):
help_opt = '# ' + opt_help
# add help comment
if config.has_option(parser_name, help_opt):
config.remove_option(parser_name, help_opt)
config.set(
parser_name, help_opt)
if config.has_option(parser_name, option_name):
value = config.get(parser_name, option_name)
config.remove_option(parser_name, option_name)
config.set(
parser_name,
option_name,
str(value))
file_generated = False
# load generate answers file for all the parsers
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
if option_spec and option_spec.get(
'action', '') == 'generate-answers':
options_to_save = \
self.spec_helper.get_parser_option_specs(parser_name)
out_answers = configparser.ConfigParser(allow_no_value=True)
if not out_answers.has_section(parser_name):
out_answers.add_section(parser_name)
for option in options_to_save:
opt_name = option['name']
if opt_name in parser_dict:
put_option(
out_answers,
parser_name,
opt_name,
parser_dict[opt_name])
elif opt_name in spec_defaults[parser_name]:
put_option(
out_answers,
parser_name,
opt_name,
spec_defaults[parser_name][opt_name])
elif option.get('required', False):
put_option(
out_answers,
parser_name,
'# ' + opt_name,
"Required argument. "
"Edit with one of the allowed values OR "
"override with "
"CLI: --{}=<option>".format(opt_name))
# write to file
with open(arg_value, 'w') as answers_file:
out_answers.write(answers_file)
file_generated = True
return file_generated
def resolve_custom_types(self, args):
"""Transforms the arguments with custom types
:param args: the list of received arguments.
"""
for parser_name, parser_dict in args.items():
spec_complex_options = [opt for opt in
self.spec_helper.get_parser_option_specs(
parser_name) if
opt.get('type', None) in COMPLEX_TYPES]
for spec_option in spec_complex_options:
option_name = spec_option['name']
if option_name in parser_dict:
# we have custom type to resolve
type_name = spec_option['type']
option_value = parser_dict[option_name]
action = self.create_complex_argumet_type(
parser_name,
type_name,
option_name,
spec_option)
# resolving value
parser_dict[option_name] = action.resolve(option_value)
def create_complex_argumet_type(self, subcommand, type_name, option_name,
spec_option):
"""Build the complex argument type
:param subcommand: the command name
:param type_name: the complex type name
:param option_name: the option name
:param spec_option: option's specifications
:return: the complex type instance
"""
complex_action = COMPLEX_TYPES.get(
type_name, None)
if complex_action is None:
raise exceptions.SpecParserException(
"Unknown complex type: {}".format(type_name))
return complex_action(
option_name,
(self.vars, self.defaults, self.plugin_path),
subcommand,
spec_option)
def parse_args(self, arg_parser, args=None):
"""Parses all the arguments (cli, answers file)
:return: None, if ``--generate-answers-file`` in arg_arg_parser
:return: (dict, dict):
* command arguments dict (arguments to control the IR logic)
* nested arguments dict (arguments to pass to the playbooks)
"""
spec_defaults = self.get_spec_defaults()
cli_args = CliParser.parse_cli_input(arg_parser, args)
file_args = self.get_answers_file_args(cli_args)
# generate answers file and exit
if self.generate_answers_file(cli_args, spec_defaults):
LOG.warning("Answers file generated. Exiting.")
# print warnings when something was overridden from non-cli source.
self.validate_arg_sources(cli_args, file_args,
spec_defaults)
# print warnings for deprecated
self.validate_arg_deprecation(cli_args, file_args)
# now filter defaults to have only parser defined in cli
defaults = dict((key, spec_defaults[key])
for key in cli_args.keys() if
key in spec_defaults)
# copy cli args with the same name to all parser groups
self._merge_duplicated_cli_args(cli_args)
self._merge_duplicated_cli_args(file_args)
dict_utils.dict_merge(defaults, file_args)
dict_utils.dict_merge(defaults, cli_args)
self.validate_requires_args(defaults)
self.validate_length_args(defaults)
self.validate_choices_args(defaults)
self.validate_min_max_args(defaults)
# now resolve complex types.
self.resolve_custom_types(defaults)
nested, control, custom = \
self.get_nested_custom_and_control_args(defaults)
return nested, control, custom
def validate_arg_deprecation(self, cli_args, answer_file_args):
"""Validates and prints the deprecated arguments.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
"""
for deprecated, deprecates in self.get_deprecated_args().items():
for input_args in (answer_file_args.items(), cli_args.items()):
for command, command_dict in input_args:
if deprecated in command_dict:
if deprecates in command_dict:
raise exceptions.IRDeprecationException(
"[{}] Argument '{}' deprecates '{}',"
" please use only the new one.".format(
command, deprecated, deprecates))
if deprecated in answer_file_args[command]:
answer_file_args[command][deprecates] = \
answer_file_args[command][deprecated]
if deprecated in cli_args[command]:
cli_args[command][deprecates] = \
cli_args[command][deprecated]
LOG.warning(
"[{}] Argument '{}' was deprecated,"
" please use '{}'.".format(
command, deprecated, deprecates))
@staticmethod
def validate_arg_sources(cli_args, answer_file_args, spec_defaults):
"""Validates and prints the arguments' source.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
:param spec_defaults: the default values from spec files
"""
def show_diff(diff, command_name, cmd_dict, source_name):
if diff:
for arg_name in diff:
value = cmd_dict[arg_name]
LOG.info(
"[{}] Argument '{}' was set to"
" '{}' from the {} source.".format(
command_name, arg_name, value, source_name))
for command, command_dict in cli_args.items():
file_dict = answer_file_args.get(command, {})
file_diff = set(file_dict.keys()) - set(command_dict.keys())
show_diff(file_diff, command, file_dict, 'answers file')
def_dict = spec_defaults.get(command, {})
default_diff = set(def_dict.keys()) - set(
command_dict.keys()) - file_diff
show_diff(default_diff, command, def_dict, 'spec defaults')
def _get_conditionally_required_args(self, command_name, options_spec,
args):
"""List arguments with ``required_when`` condition matched.
:param command_name: the command name.
:param options_spec: the list of command spec options.
:param args: the received input arguments
:return: list, list of argument names with matched ``required_when``
condition
"""
opts_names = [option_spec['name'] for option_spec in options_spec]
missing_args = []
for option_spec in options_spec:
option_results = []
if option_spec and 'required_when' in option_spec:
req_when_args = [option_spec['required_when']] \
if not type(option_spec['required_when']) is list \
else option_spec['required_when']
# validate conditions
for req_when_arg in req_when_args:
splited_args_list = req_when_arg.split()
for idx, req_arg in enumerate(splited_args_list):
if req_arg in opts_names:
splited_args_list[idx] = \
args.get(command_name, {}).get(req_arg.strip())
if splited_args_list[idx] is None:
option_results.append(False)
break
splited_args_list[idx] = str(splited_args_list[idx])
if (splited_args_list[idx] not in ['and', 'or'] and
not any(
(c in '<>=') for c in splited_args_list[idx])):
splited_args_list[idx] = "'{0}'".format(
yaml.safe_load(splited_args_list[idx]))
else:
option_results.append(
eval(' '.join(splited_args_list)))
if all(option_results) and \
self.spec_helper.get_option_state(
command_name,
option_spec['name'],
args) == helper.OptionState['NOT_SET']:
missing_args.append(option_spec['name'])
return missing_args
def validate_requires_args(self, args):
"""Check if all the required arguments have been provided. """
silent_args = self.get_silent_args(args)
def validate_parser(parser_name, expected_options, parser_args):
"""Helper method to resolve dict_merge. """
result = collections.defaultdict(list)
condition_req_args = self._get_conditionally_required_args(
parser_name, expected_options, args)
for option in expected_options:
name = option['name']
# check required options.
if (option.get('required', False) and
name not in parser_args or
option['name'] in condition_req_args) and \
name not in silent_args:
result[parser_name].append(name)
return result
res = {}
for command_data in self.spec_helper.iterate_parsers():
cmd_name = command_data['name']
if cmd_name in args:
dict_utils.dict_merge(
res,
validate_parser(
cmd_name,
self.spec_helper.get_parser_option_specs(cmd_name),
args[cmd_name]))
missing_args = dict((cmd_name, args)
for cmd_name, args in res.items() if len(args) > 0)
if missing_args:
raise exceptions.IRRequiredArgsMissingException(missing_args)
def validate_length_args(self, args):
"""Check if value of arguments is not longer than length specified.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'length' not in spec_option:
# skip options that does not contain length
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve length
length = spec_option['length']
option_value = parser_dict[option_name]
if len(option_value) > int(length):
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
length
))
if invalid_options:
# raise exception with all arguments that exceed length
raise exceptions.IRInvalidLengthException(invalid_options)
def validate_choices_args(self, args):
"""Check if value of choice arguments is one of the available choices.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'choices' not in spec_option:
# skip options that does not contain choices
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve choices
choices = spec_option['choices']
option_value = parser_dict[option_name]
if option_value not in choices:
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
choices
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidChoiceException(invalid_options)
def validate_min_max_args(self, args):
"""Check if value of arguments is between minimum and maximum values.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if all([key not in spec_option
for key in ('maximum', 'minimum')]):
# skip options that does not contain minimum or maximum
continue
option_name = spec_option['name']
if option_name in parser_dict:
option_value = parser_dict[option_name]
min_value = spec_option.get('minimum')
max_value = spec_option.get('maximum')
# handle empty values in spec files which load as None
min_value = '' if 'minimum' in spec_option \
and min_value is None else min_value
max_value = '' if 'maximum' in spec_option \
and max_value is None else max_value
values = {
"value": option_value,
"maximum": max_value,
"minimum": min_value
}
# make sure that values are numbers
is_all_values_numbers = True
for name, num in values.items():
if num is not None \
and (isinstance(num, bool) or
not isinstance(num, (int, float))):
invalid_options.append((
option_name,
name,
"number",
type(num).__name__
))
is_all_values_numbers = False
if not is_all_values_numbers:
# don't continue to min max checks since some of the
# values are not numbers
continue
# check bigger than minimum
if min_value is not None and option_value < min_value:
invalid_options.append((
option_name,
"minimum",
min_value,
option_value
))
# check smaller than maximum
if max_value is not None and option_value > max_value:
invalid_options.append((
option_name,
"maximum",
max_value,
option_value
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidMinMaxRangeException(invalid_options)
def get_silent_args(self, args):
"""list of silenced argument
:param args: The received arguments.
:return: list, slienced argument names
"""
silent_args_names = []
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if arg_spec and 'silent' in arg_spec and \
self.spec_helper.get_option_state(
parser_name,
arg_name,
args) == helper.OptionState['IS_SET']:
silent_args_names.extend(arg_spec['silent'])
return list(set(silent_args_names))
def get_nested_custom_and_control_args(self, args):
"""Split input arguments to control nested and custom.
Controls arguments: control the IR behavior. These arguments
will not be put into the spec yml file
Nested arguments: are used by the Ansible playbooks and will be put
into the spec yml file.
Custom arguments: Custom ansible variables to be used instead of the
normal nested usage.
:param args: the collected list of args.
:return: (dict, dict): flat dicts (control_args, nested_args)
"""
# returns flat dicts
nested = {}
control_args = {}
custom_args = {}
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if all([arg_spec, arg_spec.get('type', None),
arg_spec.get('type', None) in
[ctype_name for ctype_name, klass in
COMPLEX_TYPES.items() if klass.is_nested]
]) or ('is_shared_group_option' not in arg_spec):
if arg_name in nested:
LOG.warning(
"Duplicated nested argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, nested[arg_name]))
elif arg_name in custom_args:
LOG.warning(
"Duplicated custom argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, custom_args[arg_name]))
else:
if "ansible_variable" in arg_spec:
custom_args[arg_spec["ansible_variable"]] = arg_value
else:
nested[arg_name] = arg_value
else:
if arg_name in control_args:
LOG.warning(
"Duplicated control argument found: '{}'. Using "
"old value: '{}'".format(
arg_name, control_args[arg_name]))
else:
control_args[arg_name] = arg_value
return nested, control_args, custom_args
def _iterate_received_arguments(self, args):
"""Iterator helper method over all the received arguments
:return: yields tuple:
(spec name, spec dict,
argument name, argument value, argument spec)
"""
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in args:
parser_dict = args[spec_parser['name']]
for arg_name, arg_val in parser_dict.items():
arg_spec = self.spec_helper.get_option_spec(
spec_parser['name'], arg_name)
yield (spec_parser['name'], parser_dict,
arg_name, arg_val, arg_spec)
def _convert_non_cli_args(self, parser_name, values_dict):
"""Casts arguments to correct types by modifying values_dict param.
By default all the values are strings.
:param parser_name: The command name, e.g. main, virsh, ospd, etc
:param values_dict: The dict of with arguments
"""
for opt_name, opt_value in values_dict.items():
file_option_spec = self.spec_helper.get_option_spec(
parser_name, opt_name)
if file_option_spec.get('type', None) in ['int', ] or \
file_option_spec.get('action', None) in ['count', ]:
values_dict[opt_name] = int(opt_value)
def _merge_duplicated_cli_args(self, cli_args):
"""Merge duplicated arguments to all the parsers
This is need to handle control args, shared among several parsers.
for example, verbose, inventory
"""
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(cli_args):
for parser_name2, parser_dict2 in cli_args.items():
if all([parser_name2, parser_name != parser_name2,
arg_name not in parser_dict2]):
if self.spec_helper.get_option_spec(parser_name2,
arg_name):
parser_dict2[arg_name] = arg_value
|
infrared/core/inspector/inspector.py
|
codereval_python_data_220
|
Check if value of arguments is between minimum and maximum values.
:param args: The received arguments.
def validate_min_max_args(self, args):
"""Check if value of arguments is between minimum and maximum values.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if all([key not in spec_option
for key in ('maximum', 'minimum')]):
# skip options that does not contain minimum or maximum
continue
option_name = spec_option['name']
if option_name in parser_dict:
option_value = parser_dict[option_name]
min_value = spec_option.get('minimum')
max_value = spec_option.get('maximum')
# handle empty values in spec files which load as None
min_value = '' if 'minimum' in spec_option \
and min_value is None else min_value
max_value = '' if 'maximum' in spec_option \
and max_value is None else max_value
values = {
"value": option_value,
"maximum": max_value,
"minimum": min_value
}
# make sure that values are numbers
is_all_values_numbers = True
for name, num in values.items():
if num is not None \
and (isinstance(num, bool) or
not isinstance(num, (int, float))):
invalid_options.append((
option_name,
name,
"number",
type(num).__name__
))
is_all_values_numbers = False
if not is_all_values_numbers:
# don't continue to min max checks since some of the
# values are not numbers
continue
# check bigger than minimum
if min_value is not None and option_value < min_value:
invalid_options.append((
option_name,
"minimum",
min_value,
option_value
))
# check smaller than maximum
if max_value is not None and option_value > max_value:
invalid_options.append((
option_name,
"maximum",
max_value,
option_value
))
return invalid_options
import collections
import os
from six.moves import configparser
from string import Template
import yaml
from infrared.core.cli.cli import CliParser
from infrared.core.cli.cli import COMPLEX_TYPES
from infrared.core.inspector import helper
from infrared.core.utils import dict_utils
from infrared.core.utils import exceptions
from infrared.core.utils import logger
LOG = logger.LOG
class SpecParser(object):
"""Parses input arguments from different sources (cli, answers file). """
@classmethod
def from_plugin(cls, subparser, plugin, base_groups):
"""Reads spec & vars from plugin and constructs the parser instance
:param subparser: argparse.subparser to extend
:param plugin: InfraredPlugin object
:param base_groups: dict, included groups
:return: SpecParser object based on given plugin spec & vars
"""
spec_dict = base_groups or {}
with open(plugin.spec) as stream:
spec = yaml.safe_load(stream) or {}
dict_utils.dict_merge(
base_groups,
spec,
dict_utils.ConflictResolver.unique_append_list_resolver)
# The "try-excpet" block here is for adding spec file path if it
# includes an unsupported option type
try:
return SpecParser(subparser, spec_dict, plugin.vars_dir,
plugin.defaults_dir, plugin.path)
except exceptions.IRUnsupportedSpecOptionType as ex:
ex.message += ' in file: {}'.format(plugin.spec)
raise ex
def __init__(self, subparser, spec_dict, vars_dir, defaults_dir,
plugin_path):
"""Constructor.
:param subparser: argparse.subparser to extend
:param spec_dict: dict with CLI description
:param vars_dir: Path to plugin's vars dir
:param defaults_dir: Path to plugin's defaults dir
"""
self.vars = vars_dir
self.defaults = defaults_dir
self.plugin_path = plugin_path
self.spec_helper = helper.SpecDictHelper(spec_dict)
# create parser
self.parser = CliParser.create_parser(self, subparser)
def add_shared_groups(self, list_of_groups):
"""Adds the user defined shared groups
:param list_of_groups: list, of group dicts
"""
shared_groups = self.spec_helper.spec_dict.get('shared_groups', [])
shared_groups.expand(list_of_groups)
self.spec_helper.spec_dict['shared_groups'] = shared_groups
def _get_defaults(self, default_getter_func):
"""Resolve arguments' values from cli or answers file.
:param default_getter_func: callable. will be called for all the
available options in spec file.
"""
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
default_value = default_getter_func(option)
if default_value is not None:
sub = parser['name']
result[sub][option['name']] = default_value
return result
def get_spec_defaults(self):
"""Resolve arguments' values from spec and other sources. """
def spec_default_getter(option):
"""Getter function to retrieve the default value from spec.
:param option: argument name
"""
# first try to get environment variable with IR_ prefix
default_value = SpecParser.get_env_option(option['name'])
if default_value is not None:
LOG.info(
"[environ] Loading '{0}' default value"
" '{1}' from the environment variable".format(
option['name'], default_value))
elif option.get('default', None) is not None:
default_value = option['default']
elif option.get('action', None) in ['store_true']:
default_value = False
return default_value
return self._get_defaults(spec_default_getter)
@staticmethod
def get_env_option(name):
"""Try get """
return os.environ.get('IR_' + name.upper().replace('-', '_'))
def get_deprecated_args(self):
"""Returning dict with options which deprecate others. """
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
if option.get('deprecates') is not None:
result[option.get('deprecates')] = option.get('name')
return result
@staticmethod
def parse_env_variable_from_file(value):
if isinstance(value, str):
t = Template(value)
try:
value = t.substitute(os.environ)
except KeyError as undefined_var:
raise exceptions.IRAnswersFileEnvVarNotDefined(undefined_var)
return value
def get_answers_file_args(self, cli_args):
"""Resolve arguments' values from answers INI file. """
file_result = {}
args_to_remove = []
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
file_result[parser_name] = file_result.get(parser_name, {})
if option_spec and option_spec.get(
'action', '') == 'read-answers':
# Iterate over arguments supplied by file
for parsed_arg in parser_dict[arg_name]:
# Supplied arguments' value can be a list
if isinstance(parser_dict[arg_name][parsed_arg], list):
i = 0
# Iterrate over argument values list
for parsed_value in parser_dict[arg_name][parsed_arg]:
parser_dict[arg_name][parsed_arg][i] = \
SpecParser.parse_env_variable_from_file(parsed_value)
i += 1
else:
parser_dict[arg_name][parsed_arg] = \
SpecParser.parse_env_variable_from_file(parser_dict[arg_name][parsed_arg])
# we have config option. saving it.
self._convert_non_cli_args(
parser_name, parser_dict[arg_name])
dict_utils.dict_merge(
file_result[parser_name],
parser_dict[arg_name])
# remove from cli args
args_to_remove.append((parser_name, arg_name))
# remove parser dict outside loop to avoid iteration dict modification
for parser_name, arg_name in args_to_remove:
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in cli_args and spec_parser['name'] == parser_name:
parser_dict = cli_args[spec_parser['name']]
parser_dict.pop(arg_name)
break
return file_result
def generate_answers_file(self, cli_args, spec_defaults):
"""Generates answers INI file
:param cli_args: list, cli arguments.
:param spec_defaults: the default values.
"""
def put_option(config, parser_name, option_name, value):
for opt_help in option.get('help', '').split('\n'):
help_opt = '# ' + opt_help
# add help comment
if config.has_option(parser_name, help_opt):
config.remove_option(parser_name, help_opt)
config.set(
parser_name, help_opt)
if config.has_option(parser_name, option_name):
value = config.get(parser_name, option_name)
config.remove_option(parser_name, option_name)
config.set(
parser_name,
option_name,
str(value))
file_generated = False
# load generate answers file for all the parsers
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
if option_spec and option_spec.get(
'action', '') == 'generate-answers':
options_to_save = \
self.spec_helper.get_parser_option_specs(parser_name)
out_answers = configparser.ConfigParser(allow_no_value=True)
if not out_answers.has_section(parser_name):
out_answers.add_section(parser_name)
for option in options_to_save:
opt_name = option['name']
if opt_name in parser_dict:
put_option(
out_answers,
parser_name,
opt_name,
parser_dict[opt_name])
elif opt_name in spec_defaults[parser_name]:
put_option(
out_answers,
parser_name,
opt_name,
spec_defaults[parser_name][opt_name])
elif option.get('required', False):
put_option(
out_answers,
parser_name,
'# ' + opt_name,
"Required argument. "
"Edit with one of the allowed values OR "
"override with "
"CLI: --{}=<option>".format(opt_name))
# write to file
with open(arg_value, 'w') as answers_file:
out_answers.write(answers_file)
file_generated = True
return file_generated
def resolve_custom_types(self, args):
"""Transforms the arguments with custom types
:param args: the list of received arguments.
"""
for parser_name, parser_dict in args.items():
spec_complex_options = [opt for opt in
self.spec_helper.get_parser_option_specs(
parser_name) if
opt.get('type', None) in COMPLEX_TYPES]
for spec_option in spec_complex_options:
option_name = spec_option['name']
if option_name in parser_dict:
# we have custom type to resolve
type_name = spec_option['type']
option_value = parser_dict[option_name]
action = self.create_complex_argumet_type(
parser_name,
type_name,
option_name,
spec_option)
# resolving value
parser_dict[option_name] = action.resolve(option_value)
def create_complex_argumet_type(self, subcommand, type_name, option_name,
spec_option):
"""Build the complex argument type
:param subcommand: the command name
:param type_name: the complex type name
:param option_name: the option name
:param spec_option: option's specifications
:return: the complex type instance
"""
complex_action = COMPLEX_TYPES.get(
type_name, None)
if complex_action is None:
raise exceptions.SpecParserException(
"Unknown complex type: {}".format(type_name))
return complex_action(
option_name,
(self.vars, self.defaults, self.plugin_path),
subcommand,
spec_option)
def parse_args(self, arg_parser, args=None):
"""Parses all the arguments (cli, answers file)
:return: None, if ``--generate-answers-file`` in arg_arg_parser
:return: (dict, dict):
* command arguments dict (arguments to control the IR logic)
* nested arguments dict (arguments to pass to the playbooks)
"""
spec_defaults = self.get_spec_defaults()
cli_args = CliParser.parse_cli_input(arg_parser, args)
file_args = self.get_answers_file_args(cli_args)
# generate answers file and exit
if self.generate_answers_file(cli_args, spec_defaults):
LOG.warning("Answers file generated. Exiting.")
# print warnings when something was overridden from non-cli source.
self.validate_arg_sources(cli_args, file_args,
spec_defaults)
# print warnings for deprecated
self.validate_arg_deprecation(cli_args, file_args)
# now filter defaults to have only parser defined in cli
defaults = dict((key, spec_defaults[key])
for key in cli_args.keys() if
key in spec_defaults)
# copy cli args with the same name to all parser groups
self._merge_duplicated_cli_args(cli_args)
self._merge_duplicated_cli_args(file_args)
dict_utils.dict_merge(defaults, file_args)
dict_utils.dict_merge(defaults, cli_args)
self.validate_requires_args(defaults)
self.validate_length_args(defaults)
self.validate_choices_args(defaults)
self.validate_min_max_args(defaults)
# now resolve complex types.
self.resolve_custom_types(defaults)
nested, control, custom = \
self.get_nested_custom_and_control_args(defaults)
return nested, control, custom
def validate_arg_deprecation(self, cli_args, answer_file_args):
"""Validates and prints the deprecated arguments.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
"""
for deprecated, deprecates in self.get_deprecated_args().items():
for input_args in (answer_file_args.items(), cli_args.items()):
for command, command_dict in input_args:
if deprecated in command_dict:
if deprecates in command_dict:
raise exceptions.IRDeprecationException(
"[{}] Argument '{}' deprecates '{}',"
" please use only the new one.".format(
command, deprecated, deprecates))
if deprecated in answer_file_args[command]:
answer_file_args[command][deprecates] = \
answer_file_args[command][deprecated]
if deprecated in cli_args[command]:
cli_args[command][deprecates] = \
cli_args[command][deprecated]
LOG.warning(
"[{}] Argument '{}' was deprecated,"
" please use '{}'.".format(
command, deprecated, deprecates))
@staticmethod
def validate_arg_sources(cli_args, answer_file_args, spec_defaults):
"""Validates and prints the arguments' source.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
:param spec_defaults: the default values from spec files
"""
def show_diff(diff, command_name, cmd_dict, source_name):
if diff:
for arg_name in diff:
value = cmd_dict[arg_name]
LOG.info(
"[{}] Argument '{}' was set to"
" '{}' from the {} source.".format(
command_name, arg_name, value, source_name))
for command, command_dict in cli_args.items():
file_dict = answer_file_args.get(command, {})
file_diff = set(file_dict.keys()) - set(command_dict.keys())
show_diff(file_diff, command, file_dict, 'answers file')
def_dict = spec_defaults.get(command, {})
default_diff = set(def_dict.keys()) - set(
command_dict.keys()) - file_diff
show_diff(default_diff, command, def_dict, 'spec defaults')
def _get_conditionally_required_args(self, command_name, options_spec,
args):
"""List arguments with ``required_when`` condition matched.
:param command_name: the command name.
:param options_spec: the list of command spec options.
:param args: the received input arguments
:return: list, list of argument names with matched ``required_when``
condition
"""
opts_names = [option_spec['name'] for option_spec in options_spec]
missing_args = []
for option_spec in options_spec:
option_results = []
if option_spec and 'required_when' in option_spec:
req_when_args = [option_spec['required_when']] \
if not type(option_spec['required_when']) is list \
else option_spec['required_when']
# validate conditions
for req_when_arg in req_when_args:
splited_args_list = req_when_arg.split()
for idx, req_arg in enumerate(splited_args_list):
if req_arg in opts_names:
splited_args_list[idx] = \
args.get(command_name, {}).get(req_arg.strip())
if splited_args_list[idx] is None:
option_results.append(False)
break
splited_args_list[idx] = str(splited_args_list[idx])
if (splited_args_list[idx] not in ['and', 'or'] and
not any(
(c in '<>=') for c in splited_args_list[idx])):
splited_args_list[idx] = "'{0}'".format(
yaml.safe_load(splited_args_list[idx]))
else:
option_results.append(
eval(' '.join(splited_args_list)))
if all(option_results) and \
self.spec_helper.get_option_state(
command_name,
option_spec['name'],
args) == helper.OptionState['NOT_SET']:
missing_args.append(option_spec['name'])
return missing_args
def validate_requires_args(self, args):
"""Check if all the required arguments have been provided. """
silent_args = self.get_silent_args(args)
def validate_parser(parser_name, expected_options, parser_args):
"""Helper method to resolve dict_merge. """
result = collections.defaultdict(list)
condition_req_args = self._get_conditionally_required_args(
parser_name, expected_options, args)
for option in expected_options:
name = option['name']
# check required options.
if (option.get('required', False) and
name not in parser_args or
option['name'] in condition_req_args) and \
name not in silent_args:
result[parser_name].append(name)
return result
res = {}
for command_data in self.spec_helper.iterate_parsers():
cmd_name = command_data['name']
if cmd_name in args:
dict_utils.dict_merge(
res,
validate_parser(
cmd_name,
self.spec_helper.get_parser_option_specs(cmd_name),
args[cmd_name]))
missing_args = dict((cmd_name, args)
for cmd_name, args in res.items() if len(args) > 0)
if missing_args:
raise exceptions.IRRequiredArgsMissingException(missing_args)
def validate_length_args(self, args):
"""Check if value of arguments is not longer than length specified.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'length' not in spec_option:
# skip options that does not contain length
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve length
length = spec_option['length']
option_value = parser_dict[option_name]
if len(option_value) > int(length):
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
length
))
if invalid_options:
# raise exception with all arguments that exceed length
raise exceptions.IRInvalidLengthException(invalid_options)
def validate_choices_args(self, args):
"""Check if value of choice arguments is one of the available choices.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'choices' not in spec_option:
# skip options that does not contain choices
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve choices
choices = spec_option['choices']
option_value = parser_dict[option_name]
if option_value not in choices:
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
choices
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidChoiceException(invalid_options)
def validate_min_max_args(self, args):
"""Check if value of arguments is between minimum and maximum values.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if all([key not in spec_option
for key in ('maximum', 'minimum')]):
# skip options that does not contain minimum or maximum
continue
option_name = spec_option['name']
if option_name in parser_dict:
option_value = parser_dict[option_name]
min_value = spec_option.get('minimum')
max_value = spec_option.get('maximum')
# handle empty values in spec files which load as None
min_value = '' if 'minimum' in spec_option \
and min_value is None else min_value
max_value = '' if 'maximum' in spec_option \
and max_value is None else max_value
values = {
"value": option_value,
"maximum": max_value,
"minimum": min_value
}
# make sure that values are numbers
is_all_values_numbers = True
for name, num in values.items():
if num is not None \
and (isinstance(num, bool) or
not isinstance(num, (int, float))):
invalid_options.append((
option_name,
name,
"number",
type(num).__name__
))
is_all_values_numbers = False
if not is_all_values_numbers:
# don't continue to min max checks since some of the
# values are not numbers
continue
# check bigger than minimum
if min_value is not None and option_value < min_value:
invalid_options.append((
option_name,
"minimum",
min_value,
option_value
))
# check smaller than maximum
if max_value is not None and option_value > max_value:
invalid_options.append((
option_name,
"maximum",
max_value,
option_value
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidMinMaxRangeException(invalid_options)
def get_silent_args(self, args):
"""list of silenced argument
:param args: The received arguments.
:return: list, slienced argument names
"""
silent_args_names = []
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if arg_spec and 'silent' in arg_spec and \
self.spec_helper.get_option_state(
parser_name,
arg_name,
args) == helper.OptionState['IS_SET']:
silent_args_names.extend(arg_spec['silent'])
return list(set(silent_args_names))
def get_nested_custom_and_control_args(self, args):
"""Split input arguments to control nested and custom.
Controls arguments: control the IR behavior. These arguments
will not be put into the spec yml file
Nested arguments: are used by the Ansible playbooks and will be put
into the spec yml file.
Custom arguments: Custom ansible variables to be used instead of the
normal nested usage.
:param args: the collected list of args.
:return: (dict, dict): flat dicts (control_args, nested_args)
"""
# returns flat dicts
nested = {}
control_args = {}
custom_args = {}
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if all([arg_spec, arg_spec.get('type', None),
arg_spec.get('type', None) in
[ctype_name for ctype_name, klass in
COMPLEX_TYPES.items() if klass.is_nested]
]) or ('is_shared_group_option' not in arg_spec):
if arg_name in nested:
LOG.warning(
"Duplicated nested argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, nested[arg_name]))
elif arg_name in custom_args:
LOG.warning(
"Duplicated custom argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, custom_args[arg_name]))
else:
if "ansible_variable" in arg_spec:
custom_args[arg_spec["ansible_variable"]] = arg_value
else:
nested[arg_name] = arg_value
else:
if arg_name in control_args:
LOG.warning(
"Duplicated control argument found: '{}'. Using "
"old value: '{}'".format(
arg_name, control_args[arg_name]))
else:
control_args[arg_name] = arg_value
return nested, control_args, custom_args
def _iterate_received_arguments(self, args):
"""Iterator helper method over all the received arguments
:return: yields tuple:
(spec name, spec dict,
argument name, argument value, argument spec)
"""
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in args:
parser_dict = args[spec_parser['name']]
for arg_name, arg_val in parser_dict.items():
arg_spec = self.spec_helper.get_option_spec(
spec_parser['name'], arg_name)
yield (spec_parser['name'], parser_dict,
arg_name, arg_val, arg_spec)
def _convert_non_cli_args(self, parser_name, values_dict):
"""Casts arguments to correct types by modifying values_dict param.
By default all the values are strings.
:param parser_name: The command name, e.g. main, virsh, ospd, etc
:param values_dict: The dict of with arguments
"""
for opt_name, opt_value in values_dict.items():
file_option_spec = self.spec_helper.get_option_spec(
parser_name, opt_name)
if file_option_spec.get('type', None) in ['int', ] or \
file_option_spec.get('action', None) in ['count', ]:
values_dict[opt_name] = int(opt_value)
def _merge_duplicated_cli_args(self, cli_args):
"""Merge duplicated arguments to all the parsers
This is need to handle control args, shared among several parsers.
for example, verbose, inventory
"""
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(cli_args):
for parser_name2, parser_dict2 in cli_args.items():
if all([parser_name2, parser_name != parser_name2,
arg_name not in parser_dict2]):
if self.spec_helper.get_option_spec(parser_name2,
arg_name):
parser_dict2[arg_name] = arg_value
|
infrared/core/inspector/inspector.py
|
codereval_python_data_221
|
Build the complex argument type
:param subcommand: the command name
:param type_name: the complex type name
:param option_name: the option name
:param spec_option: option's specifications
:return: the complex type instance
def create_complex_argumet_type(self, subcommand, type_name, option_name,
spec_option):
"""Build the complex argument type
:param subcommand: the command name
:param type_name: the complex type name
:param option_name: the option name
:param spec_option: option's specifications
:return: the complex type instance
"""
complex_action = COMPLEX_TYPES.get(
type_name, None)
if complex_action is None:
raise exceptions.SpecParserException(
"Unknown complex type: {}".format(type_name))
return complex_action(
option_name,
(self.vars, self.defaults, self.plugin_path),
subcommand,
spec_option)
import collections
import os
from six.moves import configparser
from string import Template
import yaml
from infrared.core.cli.cli import CliParser
from infrared.core.cli.cli import COMPLEX_TYPES
from infrared.core.inspector import helper
from infrared.core.utils import dict_utils
from infrared.core.utils import exceptions
from infrared.core.utils import logger
LOG = logger.LOG
class SpecParser(object):
"""Parses input arguments from different sources (cli, answers file). """
@classmethod
def from_plugin(cls, subparser, plugin, base_groups):
"""Reads spec & vars from plugin and constructs the parser instance
:param subparser: argparse.subparser to extend
:param plugin: InfraredPlugin object
:param base_groups: dict, included groups
:return: SpecParser object based on given plugin spec & vars
"""
spec_dict = base_groups or {}
with open(plugin.spec) as stream:
spec = yaml.safe_load(stream) or {}
dict_utils.dict_merge(
base_groups,
spec,
dict_utils.ConflictResolver.unique_append_list_resolver)
# The "try-excpet" block here is for adding spec file path if it
# includes an unsupported option type
try:
return SpecParser(subparser, spec_dict, plugin.vars_dir,
plugin.defaults_dir, plugin.path)
except exceptions.IRUnsupportedSpecOptionType as ex:
ex.message += ' in file: {}'.format(plugin.spec)
raise ex
def __init__(self, subparser, spec_dict, vars_dir, defaults_dir,
plugin_path):
"""Constructor.
:param subparser: argparse.subparser to extend
:param spec_dict: dict with CLI description
:param vars_dir: Path to plugin's vars dir
:param defaults_dir: Path to plugin's defaults dir
"""
self.vars = vars_dir
self.defaults = defaults_dir
self.plugin_path = plugin_path
self.spec_helper = helper.SpecDictHelper(spec_dict)
# create parser
self.parser = CliParser.create_parser(self, subparser)
def add_shared_groups(self, list_of_groups):
"""Adds the user defined shared groups
:param list_of_groups: list, of group dicts
"""
shared_groups = self.spec_helper.spec_dict.get('shared_groups', [])
shared_groups.expand(list_of_groups)
self.spec_helper.spec_dict['shared_groups'] = shared_groups
def _get_defaults(self, default_getter_func):
"""Resolve arguments' values from cli or answers file.
:param default_getter_func: callable. will be called for all the
available options in spec file.
"""
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
default_value = default_getter_func(option)
if default_value is not None:
sub = parser['name']
result[sub][option['name']] = default_value
return result
def get_spec_defaults(self):
"""Resolve arguments' values from spec and other sources. """
def spec_default_getter(option):
"""Getter function to retrieve the default value from spec.
:param option: argument name
"""
# first try to get environment variable with IR_ prefix
default_value = SpecParser.get_env_option(option['name'])
if default_value is not None:
LOG.info(
"[environ] Loading '{0}' default value"
" '{1}' from the environment variable".format(
option['name'], default_value))
elif option.get('default', None) is not None:
default_value = option['default']
elif option.get('action', None) in ['store_true']:
default_value = False
return default_value
return self._get_defaults(spec_default_getter)
@staticmethod
def get_env_option(name):
"""Try get """
return os.environ.get('IR_' + name.upper().replace('-', '_'))
def get_deprecated_args(self):
"""Returning dict with options which deprecate others. """
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
if option.get('deprecates') is not None:
result[option.get('deprecates')] = option.get('name')
return result
@staticmethod
def parse_env_variable_from_file(value):
if isinstance(value, str):
t = Template(value)
try:
value = t.substitute(os.environ)
except KeyError as undefined_var:
raise exceptions.IRAnswersFileEnvVarNotDefined(undefined_var)
return value
def get_answers_file_args(self, cli_args):
"""Resolve arguments' values from answers INI file. """
file_result = {}
args_to_remove = []
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
file_result[parser_name] = file_result.get(parser_name, {})
if option_spec and option_spec.get(
'action', '') == 'read-answers':
# Iterate over arguments supplied by file
for parsed_arg in parser_dict[arg_name]:
# Supplied arguments' value can be a list
if isinstance(parser_dict[arg_name][parsed_arg], list):
i = 0
# Iterrate over argument values list
for parsed_value in parser_dict[arg_name][parsed_arg]:
parser_dict[arg_name][parsed_arg][i] = \
SpecParser.parse_env_variable_from_file(parsed_value)
i += 1
else:
parser_dict[arg_name][parsed_arg] = \
SpecParser.parse_env_variable_from_file(parser_dict[arg_name][parsed_arg])
# we have config option. saving it.
self._convert_non_cli_args(
parser_name, parser_dict[arg_name])
dict_utils.dict_merge(
file_result[parser_name],
parser_dict[arg_name])
# remove from cli args
args_to_remove.append((parser_name, arg_name))
# remove parser dict outside loop to avoid iteration dict modification
for parser_name, arg_name in args_to_remove:
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in cli_args and spec_parser['name'] == parser_name:
parser_dict = cli_args[spec_parser['name']]
parser_dict.pop(arg_name)
break
return file_result
def generate_answers_file(self, cli_args, spec_defaults):
"""Generates answers INI file
:param cli_args: list, cli arguments.
:param spec_defaults: the default values.
"""
def put_option(config, parser_name, option_name, value):
for opt_help in option.get('help', '').split('\n'):
help_opt = '# ' + opt_help
# add help comment
if config.has_option(parser_name, help_opt):
config.remove_option(parser_name, help_opt)
config.set(
parser_name, help_opt)
if config.has_option(parser_name, option_name):
value = config.get(parser_name, option_name)
config.remove_option(parser_name, option_name)
config.set(
parser_name,
option_name,
str(value))
file_generated = False
# load generate answers file for all the parsers
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
if option_spec and option_spec.get(
'action', '') == 'generate-answers':
options_to_save = \
self.spec_helper.get_parser_option_specs(parser_name)
out_answers = configparser.ConfigParser(allow_no_value=True)
if not out_answers.has_section(parser_name):
out_answers.add_section(parser_name)
for option in options_to_save:
opt_name = option['name']
if opt_name in parser_dict:
put_option(
out_answers,
parser_name,
opt_name,
parser_dict[opt_name])
elif opt_name in spec_defaults[parser_name]:
put_option(
out_answers,
parser_name,
opt_name,
spec_defaults[parser_name][opt_name])
elif option.get('required', False):
put_option(
out_answers,
parser_name,
'# ' + opt_name,
"Required argument. "
"Edit with one of the allowed values OR "
"override with "
"CLI: --{}=<option>".format(opt_name))
# write to file
with open(arg_value, 'w') as answers_file:
out_answers.write(answers_file)
file_generated = True
return file_generated
def resolve_custom_types(self, args):
"""Transforms the arguments with custom types
:param args: the list of received arguments.
"""
for parser_name, parser_dict in args.items():
spec_complex_options = [opt for opt in
self.spec_helper.get_parser_option_specs(
parser_name) if
opt.get('type', None) in COMPLEX_TYPES]
for spec_option in spec_complex_options:
option_name = spec_option['name']
if option_name in parser_dict:
# we have custom type to resolve
type_name = spec_option['type']
option_value = parser_dict[option_name]
action = self.create_complex_argumet_type(
parser_name,
type_name,
option_name,
spec_option)
# resolving value
parser_dict[option_name] = action.resolve(option_value)
def create_complex_argumet_type(self, subcommand, type_name, option_name,
spec_option):
"""Build the complex argument type
:param subcommand: the command name
:param type_name: the complex type name
:param option_name: the option name
:param spec_option: option's specifications
:return: the complex type instance
"""
complex_action = COMPLEX_TYPES.get(
type_name, None)
if complex_action is None:
raise exceptions.SpecParserException(
"Unknown complex type: {}".format(type_name))
return complex_action(
option_name,
(self.vars, self.defaults, self.plugin_path),
subcommand,
spec_option)
def parse_args(self, arg_parser, args=None):
"""Parses all the arguments (cli, answers file)
:return: None, if ``--generate-answers-file`` in arg_arg_parser
:return: (dict, dict):
* command arguments dict (arguments to control the IR logic)
* nested arguments dict (arguments to pass to the playbooks)
"""
spec_defaults = self.get_spec_defaults()
cli_args = CliParser.parse_cli_input(arg_parser, args)
file_args = self.get_answers_file_args(cli_args)
# generate answers file and exit
if self.generate_answers_file(cli_args, spec_defaults):
LOG.warning("Answers file generated. Exiting.")
# print warnings when something was overridden from non-cli source.
self.validate_arg_sources(cli_args, file_args,
spec_defaults)
# print warnings for deprecated
self.validate_arg_deprecation(cli_args, file_args)
# now filter defaults to have only parser defined in cli
defaults = dict((key, spec_defaults[key])
for key in cli_args.keys() if
key in spec_defaults)
# copy cli args with the same name to all parser groups
self._merge_duplicated_cli_args(cli_args)
self._merge_duplicated_cli_args(file_args)
dict_utils.dict_merge(defaults, file_args)
dict_utils.dict_merge(defaults, cli_args)
self.validate_requires_args(defaults)
self.validate_length_args(defaults)
self.validate_choices_args(defaults)
self.validate_min_max_args(defaults)
# now resolve complex types.
self.resolve_custom_types(defaults)
nested, control, custom = \
self.get_nested_custom_and_control_args(defaults)
return nested, control, custom
def validate_arg_deprecation(self, cli_args, answer_file_args):
"""Validates and prints the deprecated arguments.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
"""
for deprecated, deprecates in self.get_deprecated_args().items():
for input_args in (answer_file_args.items(), cli_args.items()):
for command, command_dict in input_args:
if deprecated in command_dict:
if deprecates in command_dict:
raise exceptions.IRDeprecationException(
"[{}] Argument '{}' deprecates '{}',"
" please use only the new one.".format(
command, deprecated, deprecates))
if deprecated in answer_file_args[command]:
answer_file_args[command][deprecates] = \
answer_file_args[command][deprecated]
if deprecated in cli_args[command]:
cli_args[command][deprecates] = \
cli_args[command][deprecated]
LOG.warning(
"[{}] Argument '{}' was deprecated,"
" please use '{}'.".format(
command, deprecated, deprecates))
@staticmethod
def validate_arg_sources(cli_args, answer_file_args, spec_defaults):
"""Validates and prints the arguments' source.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
:param spec_defaults: the default values from spec files
"""
def show_diff(diff, command_name, cmd_dict, source_name):
if diff:
for arg_name in diff:
value = cmd_dict[arg_name]
LOG.info(
"[{}] Argument '{}' was set to"
" '{}' from the {} source.".format(
command_name, arg_name, value, source_name))
for command, command_dict in cli_args.items():
file_dict = answer_file_args.get(command, {})
file_diff = set(file_dict.keys()) - set(command_dict.keys())
show_diff(file_diff, command, file_dict, 'answers file')
def_dict = spec_defaults.get(command, {})
default_diff = set(def_dict.keys()) - set(
command_dict.keys()) - file_diff
show_diff(default_diff, command, def_dict, 'spec defaults')
def _get_conditionally_required_args(self, command_name, options_spec,
args):
"""List arguments with ``required_when`` condition matched.
:param command_name: the command name.
:param options_spec: the list of command spec options.
:param args: the received input arguments
:return: list, list of argument names with matched ``required_when``
condition
"""
opts_names = [option_spec['name'] for option_spec in options_spec]
missing_args = []
for option_spec in options_spec:
option_results = []
if option_spec and 'required_when' in option_spec:
req_when_args = [option_spec['required_when']] \
if not type(option_spec['required_when']) is list \
else option_spec['required_when']
# validate conditions
for req_when_arg in req_when_args:
splited_args_list = req_when_arg.split()
for idx, req_arg in enumerate(splited_args_list):
if req_arg in opts_names:
splited_args_list[idx] = \
args.get(command_name, {}).get(req_arg.strip())
if splited_args_list[idx] is None:
option_results.append(False)
break
splited_args_list[idx] = str(splited_args_list[idx])
if (splited_args_list[idx] not in ['and', 'or'] and
not any(
(c in '<>=') for c in splited_args_list[idx])):
splited_args_list[idx] = "'{0}'".format(
yaml.safe_load(splited_args_list[idx]))
else:
option_results.append(
eval(' '.join(splited_args_list)))
if all(option_results) and \
self.spec_helper.get_option_state(
command_name,
option_spec['name'],
args) == helper.OptionState['NOT_SET']:
missing_args.append(option_spec['name'])
return missing_args
def validate_requires_args(self, args):
"""Check if all the required arguments have been provided. """
silent_args = self.get_silent_args(args)
def validate_parser(parser_name, expected_options, parser_args):
"""Helper method to resolve dict_merge. """
result = collections.defaultdict(list)
condition_req_args = self._get_conditionally_required_args(
parser_name, expected_options, args)
for option in expected_options:
name = option['name']
# check required options.
if (option.get('required', False) and
name not in parser_args or
option['name'] in condition_req_args) and \
name not in silent_args:
result[parser_name].append(name)
return result
res = {}
for command_data in self.spec_helper.iterate_parsers():
cmd_name = command_data['name']
if cmd_name in args:
dict_utils.dict_merge(
res,
validate_parser(
cmd_name,
self.spec_helper.get_parser_option_specs(cmd_name),
args[cmd_name]))
missing_args = dict((cmd_name, args)
for cmd_name, args in res.items() if len(args) > 0)
if missing_args:
raise exceptions.IRRequiredArgsMissingException(missing_args)
def validate_length_args(self, args):
"""Check if value of arguments is not longer than length specified.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'length' not in spec_option:
# skip options that does not contain length
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve length
length = spec_option['length']
option_value = parser_dict[option_name]
if len(option_value) > int(length):
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
length
))
if invalid_options:
# raise exception with all arguments that exceed length
raise exceptions.IRInvalidLengthException(invalid_options)
def validate_choices_args(self, args):
"""Check if value of choice arguments is one of the available choices.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'choices' not in spec_option:
# skip options that does not contain choices
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve choices
choices = spec_option['choices']
option_value = parser_dict[option_name]
if option_value not in choices:
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
choices
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidChoiceException(invalid_options)
def validate_min_max_args(self, args):
"""Check if value of arguments is between minimum and maximum values.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if all([key not in spec_option
for key in ('maximum', 'minimum')]):
# skip options that does not contain minimum or maximum
continue
option_name = spec_option['name']
if option_name in parser_dict:
option_value = parser_dict[option_name]
min_value = spec_option.get('minimum')
max_value = spec_option.get('maximum')
# handle empty values in spec files which load as None
min_value = '' if 'minimum' in spec_option \
and min_value is None else min_value
max_value = '' if 'maximum' in spec_option \
and max_value is None else max_value
values = {
"value": option_value,
"maximum": max_value,
"minimum": min_value
}
# make sure that values are numbers
is_all_values_numbers = True
for name, num in values.items():
if num is not None \
and (isinstance(num, bool) or
not isinstance(num, (int, float))):
invalid_options.append((
option_name,
name,
"number",
type(num).__name__
))
is_all_values_numbers = False
if not is_all_values_numbers:
# don't continue to min max checks since some of the
# values are not numbers
continue
# check bigger than minimum
if min_value is not None and option_value < min_value:
invalid_options.append((
option_name,
"minimum",
min_value,
option_value
))
# check smaller than maximum
if max_value is not None and option_value > max_value:
invalid_options.append((
option_name,
"maximum",
max_value,
option_value
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidMinMaxRangeException(invalid_options)
def get_silent_args(self, args):
"""list of silenced argument
:param args: The received arguments.
:return: list, slienced argument names
"""
silent_args_names = []
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if arg_spec and 'silent' in arg_spec and \
self.spec_helper.get_option_state(
parser_name,
arg_name,
args) == helper.OptionState['IS_SET']:
silent_args_names.extend(arg_spec['silent'])
return list(set(silent_args_names))
def get_nested_custom_and_control_args(self, args):
"""Split input arguments to control nested and custom.
Controls arguments: control the IR behavior. These arguments
will not be put into the spec yml file
Nested arguments: are used by the Ansible playbooks and will be put
into the spec yml file.
Custom arguments: Custom ansible variables to be used instead of the
normal nested usage.
:param args: the collected list of args.
:return: (dict, dict): flat dicts (control_args, nested_args)
"""
# returns flat dicts
nested = {}
control_args = {}
custom_args = {}
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if all([arg_spec, arg_spec.get('type', None),
arg_spec.get('type', None) in
[ctype_name for ctype_name, klass in
COMPLEX_TYPES.items() if klass.is_nested]
]) or ('is_shared_group_option' not in arg_spec):
if arg_name in nested:
LOG.warning(
"Duplicated nested argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, nested[arg_name]))
elif arg_name in custom_args:
LOG.warning(
"Duplicated custom argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, custom_args[arg_name]))
else:
if "ansible_variable" in arg_spec:
custom_args[arg_spec["ansible_variable"]] = arg_value
else:
nested[arg_name] = arg_value
else:
if arg_name in control_args:
LOG.warning(
"Duplicated control argument found: '{}'. Using "
"old value: '{}'".format(
arg_name, control_args[arg_name]))
else:
control_args[arg_name] = arg_value
return nested, control_args, custom_args
def _iterate_received_arguments(self, args):
"""Iterator helper method over all the received arguments
:return: yields tuple:
(spec name, spec dict,
argument name, argument value, argument spec)
"""
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in args:
parser_dict = args[spec_parser['name']]
for arg_name, arg_val in parser_dict.items():
arg_spec = self.spec_helper.get_option_spec(
spec_parser['name'], arg_name)
yield (spec_parser['name'], parser_dict,
arg_name, arg_val, arg_spec)
def _convert_non_cli_args(self, parser_name, values_dict):
"""Casts arguments to correct types by modifying values_dict param.
By default all the values are strings.
:param parser_name: The command name, e.g. main, virsh, ospd, etc
:param values_dict: The dict of with arguments
"""
for opt_name, opt_value in values_dict.items():
file_option_spec = self.spec_helper.get_option_spec(
parser_name, opt_name)
if file_option_spec.get('type', None) in ['int', ] or \
file_option_spec.get('action', None) in ['count', ]:
values_dict[opt_name] = int(opt_value)
def _merge_duplicated_cli_args(self, cli_args):
"""Merge duplicated arguments to all the parsers
This is need to handle control args, shared among several parsers.
for example, verbose, inventory
"""
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(cli_args):
for parser_name2, parser_dict2 in cli_args.items():
if all([parser_name2, parser_name != parser_name2,
arg_name not in parser_dict2]):
if self.spec_helper.get_option_spec(parser_name2,
arg_name):
parser_dict2[arg_name] = arg_value
|
infrared/core/inspector/inspector.py
|
codereval_python_data_222
|
Split input arguments to control nested and custom.
Controls arguments: control the IR behavior. These arguments
will not be put into the spec yml file
Nested arguments: are used by the Ansible playbooks and will be put
into the spec yml file.
Custom arguments: Custom ansible variables to be used instead of the
normal nested usage.
:param args: the collected list of args.
:return: (dict, dict): flat dicts (control_args, nested_args)
def get_nested_custom_and_control_args(self, args):
"""Split input arguments to control nested and custom.
Controls arguments: control the IR behavior. These arguments
will not be put into the spec yml file
Nested arguments: are used by the Ansible playbooks and will be put
into the spec yml file.
Custom arguments: Custom ansible variables to be used instead of the
normal nested usage.
:param args: the collected list of args.
:return: (dict, dict): flat dicts (control_args, nested_args)
"""
# returns flat dicts
nested = {}
control_args = {}
custom_args = {}
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if all([arg_spec, arg_spec.get('type', None),
arg_spec.get('type', None) in
[ctype_name for ctype_name, klass in
COMPLEX_TYPES.items() if klass.is_nested]
]) or ('is_shared_group_option' not in arg_spec):
if arg_name in nested:
LOG.warning(
"Duplicated nested argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, nested[arg_name]))
elif arg_name in custom_args:
LOG.warning(
"Duplicated custom argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, custom_args[arg_name]))
else:
if "ansible_variable" in arg_spec:
custom_args[arg_spec["ansible_variable"]] = arg_value
else:
nested[arg_name] = arg_value
else:
if arg_name in control_args:
LOG.warning(
"Duplicated control argument found: '{}'. Using "
"old value: '{}'".format(
arg_name, control_args[arg_name]))
else:
control_args[arg_name] = arg_value
return nested, control_args, custom_args
import collections
import os
from six.moves import configparser
from string import Template
import yaml
from infrared.core.cli.cli import CliParser
from infrared.core.cli.cli import COMPLEX_TYPES
from infrared.core.inspector import helper
from infrared.core.utils import dict_utils
from infrared.core.utils import exceptions
from infrared.core.utils import logger
LOG = logger.LOG
class SpecParser(object):
"""Parses input arguments from different sources (cli, answers file). """
@classmethod
def from_plugin(cls, subparser, plugin, base_groups):
"""Reads spec & vars from plugin and constructs the parser instance
:param subparser: argparse.subparser to extend
:param plugin: InfraredPlugin object
:param base_groups: dict, included groups
:return: SpecParser object based on given plugin spec & vars
"""
spec_dict = base_groups or {}
with open(plugin.spec) as stream:
spec = yaml.safe_load(stream) or {}
dict_utils.dict_merge(
base_groups,
spec,
dict_utils.ConflictResolver.unique_append_list_resolver)
# The "try-excpet" block here is for adding spec file path if it
# includes an unsupported option type
try:
return SpecParser(subparser, spec_dict, plugin.vars_dir,
plugin.defaults_dir, plugin.path)
except exceptions.IRUnsupportedSpecOptionType as ex:
ex.message += ' in file: {}'.format(plugin.spec)
raise ex
def __init__(self, subparser, spec_dict, vars_dir, defaults_dir,
plugin_path):
"""Constructor.
:param subparser: argparse.subparser to extend
:param spec_dict: dict with CLI description
:param vars_dir: Path to plugin's vars dir
:param defaults_dir: Path to plugin's defaults dir
"""
self.vars = vars_dir
self.defaults = defaults_dir
self.plugin_path = plugin_path
self.spec_helper = helper.SpecDictHelper(spec_dict)
# create parser
self.parser = CliParser.create_parser(self, subparser)
def add_shared_groups(self, list_of_groups):
"""Adds the user defined shared groups
:param list_of_groups: list, of group dicts
"""
shared_groups = self.spec_helper.spec_dict.get('shared_groups', [])
shared_groups.expand(list_of_groups)
self.spec_helper.spec_dict['shared_groups'] = shared_groups
def _get_defaults(self, default_getter_func):
"""Resolve arguments' values from cli or answers file.
:param default_getter_func: callable. will be called for all the
available options in spec file.
"""
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
default_value = default_getter_func(option)
if default_value is not None:
sub = parser['name']
result[sub][option['name']] = default_value
return result
def get_spec_defaults(self):
"""Resolve arguments' values from spec and other sources. """
def spec_default_getter(option):
"""Getter function to retrieve the default value from spec.
:param option: argument name
"""
# first try to get environment variable with IR_ prefix
default_value = SpecParser.get_env_option(option['name'])
if default_value is not None:
LOG.info(
"[environ] Loading '{0}' default value"
" '{1}' from the environment variable".format(
option['name'], default_value))
elif option.get('default', None) is not None:
default_value = option['default']
elif option.get('action', None) in ['store_true']:
default_value = False
return default_value
return self._get_defaults(spec_default_getter)
@staticmethod
def get_env_option(name):
"""Try get """
return os.environ.get('IR_' + name.upper().replace('-', '_'))
def get_deprecated_args(self):
"""Returning dict with options which deprecate others. """
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
if option.get('deprecates') is not None:
result[option.get('deprecates')] = option.get('name')
return result
@staticmethod
def parse_env_variable_from_file(value):
if isinstance(value, str):
t = Template(value)
try:
value = t.substitute(os.environ)
except KeyError as undefined_var:
raise exceptions.IRAnswersFileEnvVarNotDefined(undefined_var)
return value
def get_answers_file_args(self, cli_args):
"""Resolve arguments' values from answers INI file. """
file_result = {}
args_to_remove = []
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
file_result[parser_name] = file_result.get(parser_name, {})
if option_spec and option_spec.get(
'action', '') == 'read-answers':
# Iterate over arguments supplied by file
for parsed_arg in parser_dict[arg_name]:
# Supplied arguments' value can be a list
if isinstance(parser_dict[arg_name][parsed_arg], list):
i = 0
# Iterrate over argument values list
for parsed_value in parser_dict[arg_name][parsed_arg]:
parser_dict[arg_name][parsed_arg][i] = \
SpecParser.parse_env_variable_from_file(parsed_value)
i += 1
else:
parser_dict[arg_name][parsed_arg] = \
SpecParser.parse_env_variable_from_file(parser_dict[arg_name][parsed_arg])
# we have config option. saving it.
self._convert_non_cli_args(
parser_name, parser_dict[arg_name])
dict_utils.dict_merge(
file_result[parser_name],
parser_dict[arg_name])
# remove from cli args
args_to_remove.append((parser_name, arg_name))
# remove parser dict outside loop to avoid iteration dict modification
for parser_name, arg_name in args_to_remove:
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in cli_args and spec_parser['name'] == parser_name:
parser_dict = cli_args[spec_parser['name']]
parser_dict.pop(arg_name)
break
return file_result
def generate_answers_file(self, cli_args, spec_defaults):
"""Generates answers INI file
:param cli_args: list, cli arguments.
:param spec_defaults: the default values.
"""
def put_option(config, parser_name, option_name, value):
for opt_help in option.get('help', '').split('\n'):
help_opt = '# ' + opt_help
# add help comment
if config.has_option(parser_name, help_opt):
config.remove_option(parser_name, help_opt)
config.set(
parser_name, help_opt)
if config.has_option(parser_name, option_name):
value = config.get(parser_name, option_name)
config.remove_option(parser_name, option_name)
config.set(
parser_name,
option_name,
str(value))
file_generated = False
# load generate answers file for all the parsers
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
if option_spec and option_spec.get(
'action', '') == 'generate-answers':
options_to_save = \
self.spec_helper.get_parser_option_specs(parser_name)
out_answers = configparser.ConfigParser(allow_no_value=True)
if not out_answers.has_section(parser_name):
out_answers.add_section(parser_name)
for option in options_to_save:
opt_name = option['name']
if opt_name in parser_dict:
put_option(
out_answers,
parser_name,
opt_name,
parser_dict[opt_name])
elif opt_name in spec_defaults[parser_name]:
put_option(
out_answers,
parser_name,
opt_name,
spec_defaults[parser_name][opt_name])
elif option.get('required', False):
put_option(
out_answers,
parser_name,
'# ' + opt_name,
"Required argument. "
"Edit with one of the allowed values OR "
"override with "
"CLI: --{}=<option>".format(opt_name))
# write to file
with open(arg_value, 'w') as answers_file:
out_answers.write(answers_file)
file_generated = True
return file_generated
def resolve_custom_types(self, args):
"""Transforms the arguments with custom types
:param args: the list of received arguments.
"""
for parser_name, parser_dict in args.items():
spec_complex_options = [opt for opt in
self.spec_helper.get_parser_option_specs(
parser_name) if
opt.get('type', None) in COMPLEX_TYPES]
for spec_option in spec_complex_options:
option_name = spec_option['name']
if option_name in parser_dict:
# we have custom type to resolve
type_name = spec_option['type']
option_value = parser_dict[option_name]
action = self.create_complex_argumet_type(
parser_name,
type_name,
option_name,
spec_option)
# resolving value
parser_dict[option_name] = action.resolve(option_value)
def create_complex_argumet_type(self, subcommand, type_name, option_name,
spec_option):
"""Build the complex argument type
:param subcommand: the command name
:param type_name: the complex type name
:param option_name: the option name
:param spec_option: option's specifications
:return: the complex type instance
"""
complex_action = COMPLEX_TYPES.get(
type_name, None)
if complex_action is None:
raise exceptions.SpecParserException(
"Unknown complex type: {}".format(type_name))
return complex_action(
option_name,
(self.vars, self.defaults, self.plugin_path),
subcommand,
spec_option)
def parse_args(self, arg_parser, args=None):
"""Parses all the arguments (cli, answers file)
:return: None, if ``--generate-answers-file`` in arg_arg_parser
:return: (dict, dict):
* command arguments dict (arguments to control the IR logic)
* nested arguments dict (arguments to pass to the playbooks)
"""
spec_defaults = self.get_spec_defaults()
cli_args = CliParser.parse_cli_input(arg_parser, args)
file_args = self.get_answers_file_args(cli_args)
# generate answers file and exit
if self.generate_answers_file(cli_args, spec_defaults):
LOG.warning("Answers file generated. Exiting.")
# print warnings when something was overridden from non-cli source.
self.validate_arg_sources(cli_args, file_args,
spec_defaults)
# print warnings for deprecated
self.validate_arg_deprecation(cli_args, file_args)
# now filter defaults to have only parser defined in cli
defaults = dict((key, spec_defaults[key])
for key in cli_args.keys() if
key in spec_defaults)
# copy cli args with the same name to all parser groups
self._merge_duplicated_cli_args(cli_args)
self._merge_duplicated_cli_args(file_args)
dict_utils.dict_merge(defaults, file_args)
dict_utils.dict_merge(defaults, cli_args)
self.validate_requires_args(defaults)
self.validate_length_args(defaults)
self.validate_choices_args(defaults)
self.validate_min_max_args(defaults)
# now resolve complex types.
self.resolve_custom_types(defaults)
nested, control, custom = \
self.get_nested_custom_and_control_args(defaults)
return nested, control, custom
def validate_arg_deprecation(self, cli_args, answer_file_args):
"""Validates and prints the deprecated arguments.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
"""
for deprecated, deprecates in self.get_deprecated_args().items():
for input_args in (answer_file_args.items(), cli_args.items()):
for command, command_dict in input_args:
if deprecated in command_dict:
if deprecates in command_dict:
raise exceptions.IRDeprecationException(
"[{}] Argument '{}' deprecates '{}',"
" please use only the new one.".format(
command, deprecated, deprecates))
if deprecated in answer_file_args[command]:
answer_file_args[command][deprecates] = \
answer_file_args[command][deprecated]
if deprecated in cli_args[command]:
cli_args[command][deprecates] = \
cli_args[command][deprecated]
LOG.warning(
"[{}] Argument '{}' was deprecated,"
" please use '{}'.".format(
command, deprecated, deprecates))
@staticmethod
def validate_arg_sources(cli_args, answer_file_args, spec_defaults):
"""Validates and prints the arguments' source.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
:param spec_defaults: the default values from spec files
"""
def show_diff(diff, command_name, cmd_dict, source_name):
if diff:
for arg_name in diff:
value = cmd_dict[arg_name]
LOG.info(
"[{}] Argument '{}' was set to"
" '{}' from the {} source.".format(
command_name, arg_name, value, source_name))
for command, command_dict in cli_args.items():
file_dict = answer_file_args.get(command, {})
file_diff = set(file_dict.keys()) - set(command_dict.keys())
show_diff(file_diff, command, file_dict, 'answers file')
def_dict = spec_defaults.get(command, {})
default_diff = set(def_dict.keys()) - set(
command_dict.keys()) - file_diff
show_diff(default_diff, command, def_dict, 'spec defaults')
def _get_conditionally_required_args(self, command_name, options_spec,
args):
"""List arguments with ``required_when`` condition matched.
:param command_name: the command name.
:param options_spec: the list of command spec options.
:param args: the received input arguments
:return: list, list of argument names with matched ``required_when``
condition
"""
opts_names = [option_spec['name'] for option_spec in options_spec]
missing_args = []
for option_spec in options_spec:
option_results = []
if option_spec and 'required_when' in option_spec:
req_when_args = [option_spec['required_when']] \
if not type(option_spec['required_when']) is list \
else option_spec['required_when']
# validate conditions
for req_when_arg in req_when_args:
splited_args_list = req_when_arg.split()
for idx, req_arg in enumerate(splited_args_list):
if req_arg in opts_names:
splited_args_list[idx] = \
args.get(command_name, {}).get(req_arg.strip())
if splited_args_list[idx] is None:
option_results.append(False)
break
splited_args_list[idx] = str(splited_args_list[idx])
if (splited_args_list[idx] not in ['and', 'or'] and
not any(
(c in '<>=') for c in splited_args_list[idx])):
splited_args_list[idx] = "'{0}'".format(
yaml.safe_load(splited_args_list[idx]))
else:
option_results.append(
eval(' '.join(splited_args_list)))
if all(option_results) and \
self.spec_helper.get_option_state(
command_name,
option_spec['name'],
args) == helper.OptionState['NOT_SET']:
missing_args.append(option_spec['name'])
return missing_args
def validate_requires_args(self, args):
"""Check if all the required arguments have been provided. """
silent_args = self.get_silent_args(args)
def validate_parser(parser_name, expected_options, parser_args):
"""Helper method to resolve dict_merge. """
result = collections.defaultdict(list)
condition_req_args = self._get_conditionally_required_args(
parser_name, expected_options, args)
for option in expected_options:
name = option['name']
# check required options.
if (option.get('required', False) and
name not in parser_args or
option['name'] in condition_req_args) and \
name not in silent_args:
result[parser_name].append(name)
return result
res = {}
for command_data in self.spec_helper.iterate_parsers():
cmd_name = command_data['name']
if cmd_name in args:
dict_utils.dict_merge(
res,
validate_parser(
cmd_name,
self.spec_helper.get_parser_option_specs(cmd_name),
args[cmd_name]))
missing_args = dict((cmd_name, args)
for cmd_name, args in res.items() if len(args) > 0)
if missing_args:
raise exceptions.IRRequiredArgsMissingException(missing_args)
def validate_length_args(self, args):
"""Check if value of arguments is not longer than length specified.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'length' not in spec_option:
# skip options that does not contain length
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve length
length = spec_option['length']
option_value = parser_dict[option_name]
if len(option_value) > int(length):
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
length
))
if invalid_options:
# raise exception with all arguments that exceed length
raise exceptions.IRInvalidLengthException(invalid_options)
def validate_choices_args(self, args):
"""Check if value of choice arguments is one of the available choices.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'choices' not in spec_option:
# skip options that does not contain choices
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve choices
choices = spec_option['choices']
option_value = parser_dict[option_name]
if option_value not in choices:
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
choices
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidChoiceException(invalid_options)
def validate_min_max_args(self, args):
"""Check if value of arguments is between minimum and maximum values.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if all([key not in spec_option
for key in ('maximum', 'minimum')]):
# skip options that does not contain minimum or maximum
continue
option_name = spec_option['name']
if option_name in parser_dict:
option_value = parser_dict[option_name]
min_value = spec_option.get('minimum')
max_value = spec_option.get('maximum')
# handle empty values in spec files which load as None
min_value = '' if 'minimum' in spec_option \
and min_value is None else min_value
max_value = '' if 'maximum' in spec_option \
and max_value is None else max_value
values = {
"value": option_value,
"maximum": max_value,
"minimum": min_value
}
# make sure that values are numbers
is_all_values_numbers = True
for name, num in values.items():
if num is not None \
and (isinstance(num, bool) or
not isinstance(num, (int, float))):
invalid_options.append((
option_name,
name,
"number",
type(num).__name__
))
is_all_values_numbers = False
if not is_all_values_numbers:
# don't continue to min max checks since some of the
# values are not numbers
continue
# check bigger than minimum
if min_value is not None and option_value < min_value:
invalid_options.append((
option_name,
"minimum",
min_value,
option_value
))
# check smaller than maximum
if max_value is not None and option_value > max_value:
invalid_options.append((
option_name,
"maximum",
max_value,
option_value
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidMinMaxRangeException(invalid_options)
def get_silent_args(self, args):
"""list of silenced argument
:param args: The received arguments.
:return: list, slienced argument names
"""
silent_args_names = []
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if arg_spec and 'silent' in arg_spec and \
self.spec_helper.get_option_state(
parser_name,
arg_name,
args) == helper.OptionState['IS_SET']:
silent_args_names.extend(arg_spec['silent'])
return list(set(silent_args_names))
def get_nested_custom_and_control_args(self, args):
"""Split input arguments to control nested and custom.
Controls arguments: control the IR behavior. These arguments
will not be put into the spec yml file
Nested arguments: are used by the Ansible playbooks and will be put
into the spec yml file.
Custom arguments: Custom ansible variables to be used instead of the
normal nested usage.
:param args: the collected list of args.
:return: (dict, dict): flat dicts (control_args, nested_args)
"""
# returns flat dicts
nested = {}
control_args = {}
custom_args = {}
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if all([arg_spec, arg_spec.get('type', None),
arg_spec.get('type', None) in
[ctype_name for ctype_name, klass in
COMPLEX_TYPES.items() if klass.is_nested]
]) or ('is_shared_group_option' not in arg_spec):
if arg_name in nested:
LOG.warning(
"Duplicated nested argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, nested[arg_name]))
elif arg_name in custom_args:
LOG.warning(
"Duplicated custom argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, custom_args[arg_name]))
else:
if "ansible_variable" in arg_spec:
custom_args[arg_spec["ansible_variable"]] = arg_value
else:
nested[arg_name] = arg_value
else:
if arg_name in control_args:
LOG.warning(
"Duplicated control argument found: '{}'. Using "
"old value: '{}'".format(
arg_name, control_args[arg_name]))
else:
control_args[arg_name] = arg_value
return nested, control_args, custom_args
def _iterate_received_arguments(self, args):
"""Iterator helper method over all the received arguments
:return: yields tuple:
(spec name, spec dict,
argument name, argument value, argument spec)
"""
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in args:
parser_dict = args[spec_parser['name']]
for arg_name, arg_val in parser_dict.items():
arg_spec = self.spec_helper.get_option_spec(
spec_parser['name'], arg_name)
yield (spec_parser['name'], parser_dict,
arg_name, arg_val, arg_spec)
def _convert_non_cli_args(self, parser_name, values_dict):
"""Casts arguments to correct types by modifying values_dict param.
By default all the values are strings.
:param parser_name: The command name, e.g. main, virsh, ospd, etc
:param values_dict: The dict of with arguments
"""
for opt_name, opt_value in values_dict.items():
file_option_spec = self.spec_helper.get_option_spec(
parser_name, opt_name)
if file_option_spec.get('type', None) in ['int', ] or \
file_option_spec.get('action', None) in ['count', ]:
values_dict[opt_name] = int(opt_value)
def _merge_duplicated_cli_args(self, cli_args):
"""Merge duplicated arguments to all the parsers
This is need to handle control args, shared among several parsers.
for example, verbose, inventory
"""
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(cli_args):
for parser_name2, parser_dict2 in cli_args.items():
if all([parser_name2, parser_name != parser_name2,
arg_name not in parser_dict2]):
if self.spec_helper.get_option_spec(parser_name2,
arg_name):
parser_dict2[arg_name] = arg_value
|
infrared/core/inspector/inspector.py
|
codereval_python_data_223
|
Extend ``vars_dict`` with ``extra-vars``
:param vars_dict: Dictionary to merge extra-vars into
:param extra_vars: List of extra-vars
@staticmethod
def merge_extra_vars(vars_dict, extra_vars=None):
"""Extend ``vars_dict`` with ``extra-vars``
:param vars_dict: Dictionary to merge extra-vars into
:param extra_vars: List of extra-vars
"""
for extra_var in extra_vars or []:
if extra_var.startswith('@'):
with open(extra_var[1:]) as f_obj:
loaded_yml = yaml.safe_load(f_obj)
dict_utils.dict_merge(
vars_dict,
loaded_yml,
conflict_resolver=dict_utils.ConflictResolver.
unique_append_list_resolver)
else:
if '=' not in extra_var:
raise exceptions.IRExtraVarsException(extra_var)
key, value = extra_var.split("=", 1)
if value.startswith('@'):
with open(value[1:]) as f_obj:
loaded_yml = yaml.safe_load(f_obj)
tmp_dict = {}
dict_utils.dict_insert(tmp_dict, loaded_yml, *key.split("."))
dict_utils.dict_merge(
vars_dict,
tmp_dict,
conflict_resolver=dict_utils.ConflictResolver.
unique_append_list_resolver)
else:
dict_utils.dict_insert(vars_dict, value, *key.split("."))
return vars_dict
import yaml
from infrared.core.utils import dict_utils
from infrared.core.utils import exceptions
class VarsDictManager(object):
@staticmethod
def generate_settings(entry_point,
nested_args,
delimiter='-'):
"""Unifies all input into a single dict of Ansible extra-vars
:param entry_point: All input will be nested under this key
:param nested_args: dict. these values will be nested
example:
{
foo-bar: value1,
foo2: value2
foo-another-bar: value3
}
:param delimiter: character to split keys by.
:return: dict. nest input with keys splitted by delimiter
>>> VarsDictManager.generate_settings(
... 'entry_point', {'foo-bar': 'value1',
... 'foo2': 'value2',
... 'foo-another-bar': 'value3'})
{'entry_point': {'foo': {'bar': 'value1', 'another':\
{'bar': 'value3'}}, 'foo2': 'value2'}}
"""
vars_dict = {entry_point: {}}
try:
for _name, argument in nested_args.items():
dict_utils.dict_insert(vars_dict[entry_point],
argument,
*_name.split(delimiter))
# handle errors here and provide more output for user if required
except exceptions.IRKeyNotFoundException as key_exception:
if key_exception and key_exception.key.startswith("private."):
raise exceptions.IRPrivateSettingsMissingException(
key_exception.key)
else:
raise
return vars_dict
@staticmethod
def merge_extra_vars(vars_dict, extra_vars=None):
"""Extend ``vars_dict`` with ``extra-vars``
:param vars_dict: Dictionary to merge extra-vars into
:param extra_vars: List of extra-vars
"""
for extra_var in extra_vars or []:
if extra_var.startswith('@'):
with open(extra_var[1:]) as f_obj:
loaded_yml = yaml.safe_load(f_obj)
dict_utils.dict_merge(
vars_dict,
loaded_yml,
conflict_resolver=dict_utils.ConflictResolver.
unique_append_list_resolver)
else:
if '=' not in extra_var:
raise exceptions.IRExtraVarsException(extra_var)
key, value = extra_var.split("=", 1)
if value.startswith('@'):
with open(value[1:]) as f_obj:
loaded_yml = yaml.safe_load(f_obj)
tmp_dict = {}
dict_utils.dict_insert(tmp_dict, loaded_yml, *key.split("."))
dict_utils.dict_merge(
vars_dict,
tmp_dict,
conflict_resolver=dict_utils.ConflictResolver.
unique_append_list_resolver)
else:
dict_utils.dict_insert(vars_dict, value, *key.split("."))
|
infrared/core/settings.py
|
codereval_python_data_224
|
Wraps the 'ansible-playbook' CLI.
:param ir_workspace: An Infrared Workspace object represents the active
workspace
:param ir_plugin: An InfraredPlugin object of the current plugin
:param playbook_path: the playbook to invoke
:param verbose: Ansible verbosity level
:param extra_vars: dict. Passed to Ansible as extra-vars
:param ansible_args: dict of ansible-playbook arguments to plumb down
directly to Ansible.
def ansible_playbook(ir_workspace, ir_plugin, playbook_path, verbose=None,
extra_vars=None, ansible_args=None):
"""Wraps the 'ansible-playbook' CLI.
:param ir_workspace: An Infrared Workspace object represents the active
workspace
:param ir_plugin: An InfraredPlugin object of the current plugin
:param playbook_path: the playbook to invoke
:param verbose: Ansible verbosity level
:param extra_vars: dict. Passed to Ansible as extra-vars
:param ansible_args: dict of ansible-playbook arguments to plumb down
directly to Ansible.
"""
ansible_args = ansible_args or []
LOG.debug("Additional ansible args: {}".format(ansible_args))
# hack for verbosity
from ansible.utils.display import Display
display = Display(verbosity=verbose)
import __main__ as main
setattr(main, "display", display)
# TODO(yfried): Use proper ansible API instead of emulating CLI
cli_args = ['execute',
playbook_path,
'--inventory', ir_workspace.inventory]
# infrared should not change ansible verbosity unless user specifies that
if verbose:
cli_args.append('-' + 'v' * int(verbose))
cli_args.extend(ansible_args)
results = _run_playbook(cli_args, vars_dict=extra_vars or {},
ir_workspace=ir_workspace, ir_plugin=ir_plugin)
if results:
LOG.error('Playbook "%s" failed!' % playbook_path)
return results
from datetime import datetime
from distutils.util import strtobool
import errno
import json
import os
import re
import sys
import tempfile
from infrared.core.utils import logger
import yaml
LOG = logger.LOG
class NoAnsiFile(object):
re_ansi = re.compile(r'\x1b[^m]*m')
def __init__(self, fd):
self.fd = fd
def write(self, data):
no_ansi_data = self.re_ansi.sub('', data)
self.fd.write(no_ansi_data)
def close(self):
self.fd.close()
def flush(self):
self.fd.flush()
class IRStdFd(object):
pass
class IRStdoutFd(IRStdFd):
def __init__(self, print_stdout=True):
self.print_stdout = print_stdout
self.org_stdout = sys.stdout
sys.stdout = self
def write(self, data):
if self.print_stdout:
sys.__stdout__.write(data)
sys.__stdout__.flush()
for fd in IRSTDFDManager.fds:
if not isinstance(fd, IRStdFd):
fd.write(data)
fd.flush()
@staticmethod
def flush():
sys.__stdout__.flush()
@staticmethod
def close():
sys.stdout = sys.__stdout__
@staticmethod
def fileno():
return sys.__stdout__.fileno()
class IRStderrFd(IRStdFd):
def __init__(self, print_stderr=True):
self.print_stderr = print_stderr
self.org_stderr = sys.stderr
sys.stderr = self
def write(self, data):
if self.print_stderr:
sys.__stderr__.write(data)
sys.__stderr__.flush()
for fd in IRSTDFDManager.fds:
if not isinstance(fd, IRStdFd):
fd.write(data)
fd.flush()
@staticmethod
def flush():
sys.__stderr__.flush()
@staticmethod
def close():
sys.stderr = sys.__stderr__
class IRSTDFDManager(object):
fds = set()
def __init__(self, stdout=True, stderr=True, *fds):
self.stdout = stdout
self.stderr = stderr
for fd in fds:
self.add(fd)
self.add(IRStdoutFd(print_stdout=self.stdout))
self.add(IRStderrFd(print_stderr=self.stderr))
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def write(self, data):
for fd in self.fds:
fd.write(data)
fd.flush()
def flush(self):
for fd in self.fds:
fd.flush()
def close(self):
for fd in self.fds:
fd.close()
def add(self, fd):
self.fds.add(fd)
def ansible_playbook(ir_workspace, ir_plugin, playbook_path, verbose=None,
extra_vars=None, ansible_args=None):
"""Wraps the 'ansible-playbook' CLI.
:param ir_workspace: An Infrared Workspace object represents the active
workspace
:param ir_plugin: An InfraredPlugin object of the current plugin
:param playbook_path: the playbook to invoke
:param verbose: Ansible verbosity level
:param extra_vars: dict. Passed to Ansible as extra-vars
:param ansible_args: dict of ansible-playbook arguments to plumb down
directly to Ansible.
"""
ansible_args = ansible_args or []
LOG.debug("Additional ansible args: {}".format(ansible_args))
# hack for verbosity
from ansible.utils.display import Display
display = Display(verbosity=verbose)
import __main__ as main
setattr(main, "display", display)
# TODO(yfried): Use proper ansible API instead of emulating CLI
cli_args = ['execute',
playbook_path,
'--inventory', ir_workspace.inventory]
# infrared should not change ansible verbosity unless user specifies that
if verbose:
cli_args.append('-' + 'v' * int(verbose))
cli_args.extend(ansible_args)
results = _run_playbook(cli_args, vars_dict=extra_vars or {},
ir_workspace=ir_workspace, ir_plugin=ir_plugin)
if results:
LOG.error('Playbook "%s" failed!' % playbook_path)
return results
def _run_playbook(cli_args, vars_dict, ir_workspace, ir_plugin):
"""Runs ansible cli with vars dict
:param vars_dict: dict, Will be passed as Ansible extra-vars
:param cli_args: the list of command line arguments
:param ir_workspace: An Infrared Workspace object represents the active
workspace
:param ir_plugin: An InfraredPlugin object of the current plugin
:return: ansible results
"""
# TODO(yfried): use ansible vars object instead of tmpfile
# NOTE(oanufrii): !!!this import should be exactly here!!!
# Ansible uses 'display' singleton from '__main__' and
# gets it on module level. While we monkeypatching our
# '__main__' in 'ansible_playbook' function import of
# PlaybookCLI shoul be after that, to get patched
# '__main__'. Otherwise ansible gets unpatched '__main__'
# and creates new 'display' object with default (0)
# verbosity.
# NOTE(afazekas): GlobalCLIArgs gets value only once per invocation, but
# since it has singleton decorator, so it would remember to old arguments in different tests
# removing the singleton decorator
try:
from ansible.utils import context_objects
context_objects.GlobalCLIArgs = context_objects.CLIArgs
except ImportError:
# older version
pass
from ansible.cli.playbook import PlaybookCLI
from ansible.errors import AnsibleOptionsError
from ansible.errors import AnsibleParserError
with tempfile.NamedTemporaryFile(
mode='w+', prefix="ir-settings-", delete=True) as tmp:
tmp.write(yaml.safe_dump(vars_dict, default_flow_style=False))
# make sure created file is readable.
tmp.flush()
cli_args.extend(['--extra-vars', "@" + tmp.name])
if not bool(strtobool(os.environ.get('IR_NO_EXTRAS', 'no'))):
ir_extras = {
'infrared': {
'python': {
'executable': sys.executable,
'version': {
'full': sys.version.split()[0],
'major': sys.version_info.major,
'minor': sys.version_info.minor,
'micro': sys.version_info.micro,
}
}
}
}
cli_args.extend(['--extra-vars', str(ir_extras)])
cli = PlaybookCLI(cli_args)
LOG.debug('Starting ansible cli with args: {}'.format(cli_args[1:]))
try:
cli.parse()
stdout = not bool(
strtobool(os.environ.get('IR_ANSIBLE_NO_STDOUT', 'no')))
stderr = not bool(
strtobool(os.environ.get('IR_ANSIBLE_NO_STDERR', 'no')))
ansible_outputs_dir = \
os.path.join(ir_workspace.path, 'ansible_outputs')
ansible_vars_dir = \
os.path.join(ir_workspace.path, 'ansible_vars')
timestamp = datetime.utcnow().strftime("%Y-%m-%d_%H-%M-%S.%f")
filename_template = \
"ir_{timestamp}_{plugin_name}{postfix}.{file_ext}"
for _dir in (ansible_outputs_dir, ansible_vars_dir):
try:
os.makedirs(_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
if bool(strtobool(os.environ.get('IR_GEN_VARS_JSON', 'no'))):
filename = filename_template.format(
timestamp=timestamp,
plugin_name=ir_plugin.name,
postfix='',
file_ext='json'
)
vars_file = os.path.join(ansible_vars_dir, filename)
with open(vars_file, 'w') as fp:
json.dump(vars_dict, fp, indent=4, sort_keys=True)
with IRSTDFDManager(stdout=stdout, stderr=stderr) as fd_manager:
if bool(strtobool(os.environ.get(
'IR_ANSIBLE_LOG_OUTPUT', 'no'))):
filename = filename_template.format(
timestamp=timestamp,
plugin_name=ir_plugin.name,
postfix='',
file_ext='log'
)
log_file = os.path.join(ansible_outputs_dir, filename)
fd_manager.add(open(log_file, 'w'))
if bool(strtobool(os.environ.get(
'IR_ANSIBLE_LOG_OUTPUT_NO_ANSI', 'no'))):
filename = filename_template.format(
timestamp=timestamp,
plugin_name=ir_plugin.name,
postfix='_no_ansi',
file_ext='log'
)
log_file = os.path.join(ansible_outputs_dir, filename)
fd_manager.add(NoAnsiFile(open(log_file, 'w')))
# Return the result:
# 0: Success
# 1: "Error"
# 2: Host failed
# 3: Unreachable
# 4: Parser Error
# 5: Options error
return cli.run()
except (AnsibleParserError, AnsibleOptionsError) as error:
LOG.error('{}: {}'.format(type(error), error))
raise error
|
infrared/core/execute.py
|
codereval_python_data_225
|
Runs ansible cli with vars dict
:param vars_dict: dict, Will be passed as Ansible extra-vars
:param cli_args: the list of command line arguments
:param ir_workspace: An Infrared Workspace object represents the active
workspace
:param ir_plugin: An InfraredPlugin object of the current plugin
:return: ansible results
def _run_playbook(cli_args, vars_dict, ir_workspace, ir_plugin):
"""Runs ansible cli with vars dict
:param vars_dict: dict, Will be passed as Ansible extra-vars
:param cli_args: the list of command line arguments
:param ir_workspace: An Infrared Workspace object represents the active
workspace
:param ir_plugin: An InfraredPlugin object of the current plugin
:return: ansible results
"""
# TODO(yfried): use ansible vars object instead of tmpfile
# NOTE(oanufrii): !!!this import should be exactly here!!!
# Ansible uses 'display' singleton from '__main__' and
# gets it on module level. While we monkeypatching our
# '__main__' in 'ansible_playbook' function import of
# PlaybookCLI shoul be after that, to get patched
# '__main__'. Otherwise ansible gets unpatched '__main__'
# and creates new 'display' object with default (0)
# verbosity.
# NOTE(afazekas): GlobalCLIArgs gets value only once per invocation, but
# since it has singleton decorator, so it would remember to old arguments in different tests
# removing the singleton decorator
try:
from ansible.utils import context_objects
context_objects.GlobalCLIArgs = context_objects.CLIArgs
except ImportError:
# older version
pass
from ansible.cli.playbook import PlaybookCLI
from ansible.errors import AnsibleOptionsError
from ansible.errors import AnsibleParserError
with tempfile.NamedTemporaryFile(
mode='w+', prefix="ir-settings-", delete=True) as tmp:
tmp.write(yaml.safe_dump(vars_dict, default_flow_style=False))
# make sure created file is readable.
tmp.flush()
cli_args.extend(['--extra-vars', "@" + tmp.name])
if not bool(strtobool(os.environ.get('IR_NO_EXTRAS', 'no'))):
ir_extras = {
'infrared': {
'python': {
'executable': sys.executable,
'version': {
'full': sys.version.split()[0],
'major': sys.version_info.major,
'minor': sys.version_info.minor,
'micro': sys.version_info.micro,
}
}
}
}
cli_args.extend(['--extra-vars', str(ir_extras)])
cli = PlaybookCLI(cli_args)
LOG.debug('Starting ansible cli with args: {}'.format(cli_args[1:]))
try:
cli.parse()
stdout = not bool(
strtobool(os.environ.get('IR_ANSIBLE_NO_STDOUT', 'no')))
stderr = not bool(
strtobool(os.environ.get('IR_ANSIBLE_NO_STDERR', 'no')))
ansible_outputs_dir = \
os.path.join(ir_workspace.path, 'ansible_outputs')
ansible_vars_dir = \
os.path.join(ir_workspace.path, 'ansible_vars')
timestamp = datetime.utcnow().strftime("%Y-%m-%d_%H-%M-%S.%f")
filename_template = \
"ir_{timestamp}_{plugin_name}{postfix}.{file_ext}"
for _dir in (ansible_outputs_dir, ansible_vars_dir):
try:
os.makedirs(_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
if bool(strtobool(os.environ.get('IR_GEN_VARS_JSON', 'no'))):
filename = filename_template.format(
timestamp=timestamp,
plugin_name=ir_plugin.name,
postfix='',
file_ext='json'
)
vars_file = os.path.join(ansible_vars_dir, filename)
with open(vars_file, 'w') as fp:
json.dump(vars_dict, fp, indent=4, sort_keys=True)
with IRSTDFDManager(stdout=stdout, stderr=stderr) as fd_manager:
if bool(strtobool(os.environ.get(
'IR_ANSIBLE_LOG_OUTPUT', 'no'))):
filename = filename_template.format(
timestamp=timestamp,
plugin_name=ir_plugin.name,
postfix='',
file_ext='log'
)
log_file = os.path.join(ansible_outputs_dir, filename)
fd_manager.add(open(log_file, 'w'))
if bool(strtobool(os.environ.get(
'IR_ANSIBLE_LOG_OUTPUT_NO_ANSI', 'no'))):
filename = filename_template.format(
timestamp=timestamp,
plugin_name=ir_plugin.name,
postfix='_no_ansi',
file_ext='log'
)
log_file = os.path.join(ansible_outputs_dir, filename)
fd_manager.add(NoAnsiFile(open(log_file, 'w')))
# Return the result:
# 0: Success
# 1: "Error"
# 2: Host failed
# 3: Unreachable
# 4: Parser Error
# 5: Options error
return cli.run()
except (AnsibleParserError, AnsibleOptionsError) as error:
LOG.error('{}: {}'.format(type(error), error))
raise error
from datetime import datetime
from distutils.util import strtobool
import errno
import json
import os
import re
import sys
import tempfile
from infrared.core.utils import logger
import yaml
LOG = logger.LOG
class NoAnsiFile(object):
re_ansi = re.compile(r'\x1b[^m]*m')
def __init__(self, fd):
self.fd = fd
def write(self, data):
no_ansi_data = self.re_ansi.sub('', data)
self.fd.write(no_ansi_data)
def close(self):
self.fd.close()
def flush(self):
self.fd.flush()
class IRStdFd(object):
pass
class IRStdoutFd(IRStdFd):
def __init__(self, print_stdout=True):
self.print_stdout = print_stdout
self.org_stdout = sys.stdout
sys.stdout = self
def write(self, data):
if self.print_stdout:
sys.__stdout__.write(data)
sys.__stdout__.flush()
for fd in IRSTDFDManager.fds:
if not isinstance(fd, IRStdFd):
fd.write(data)
fd.flush()
@staticmethod
def flush():
sys.__stdout__.flush()
@staticmethod
def close():
sys.stdout = sys.__stdout__
@staticmethod
def fileno():
return sys.__stdout__.fileno()
class IRStderrFd(IRStdFd):
def __init__(self, print_stderr=True):
self.print_stderr = print_stderr
self.org_stderr = sys.stderr
sys.stderr = self
def write(self, data):
if self.print_stderr:
sys.__stderr__.write(data)
sys.__stderr__.flush()
for fd in IRSTDFDManager.fds:
if not isinstance(fd, IRStdFd):
fd.write(data)
fd.flush()
@staticmethod
def flush():
sys.__stderr__.flush()
@staticmethod
def close():
sys.stderr = sys.__stderr__
class IRSTDFDManager(object):
fds = set()
def __init__(self, stdout=True, stderr=True, *fds):
self.stdout = stdout
self.stderr = stderr
for fd in fds:
self.add(fd)
self.add(IRStdoutFd(print_stdout=self.stdout))
self.add(IRStderrFd(print_stderr=self.stderr))
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def write(self, data):
for fd in self.fds:
fd.write(data)
fd.flush()
def flush(self):
for fd in self.fds:
fd.flush()
def close(self):
for fd in self.fds:
fd.close()
def add(self, fd):
self.fds.add(fd)
def ansible_playbook(ir_workspace, ir_plugin, playbook_path, verbose=None,
extra_vars=None, ansible_args=None):
"""Wraps the 'ansible-playbook' CLI.
:param ir_workspace: An Infrared Workspace object represents the active
workspace
:param ir_plugin: An InfraredPlugin object of the current plugin
:param playbook_path: the playbook to invoke
:param verbose: Ansible verbosity level
:param extra_vars: dict. Passed to Ansible as extra-vars
:param ansible_args: dict of ansible-playbook arguments to plumb down
directly to Ansible.
"""
ansible_args = ansible_args or []
LOG.debug("Additional ansible args: {}".format(ansible_args))
# hack for verbosity
from ansible.utils.display import Display
display = Display(verbosity=verbose)
import __main__ as main
setattr(main, "display", display)
# TODO(yfried): Use proper ansible API instead of emulating CLI
cli_args = ['execute',
playbook_path,
'--inventory', ir_workspace.inventory]
# infrared should not change ansible verbosity unless user specifies that
if verbose:
cli_args.append('-' + 'v' * int(verbose))
cli_args.extend(ansible_args)
results = _run_playbook(cli_args, vars_dict=extra_vars or {},
ir_workspace=ir_workspace, ir_plugin=ir_plugin)
if results:
LOG.error('Playbook "%s" failed!' % playbook_path)
return results
def _run_playbook(cli_args, vars_dict, ir_workspace, ir_plugin):
"""Runs ansible cli with vars dict
:param vars_dict: dict, Will be passed as Ansible extra-vars
:param cli_args: the list of command line arguments
:param ir_workspace: An Infrared Workspace object represents the active
workspace
:param ir_plugin: An InfraredPlugin object of the current plugin
:return: ansible results
"""
# TODO(yfried): use ansible vars object instead of tmpfile
# NOTE(oanufrii): !!!this import should be exactly here!!!
# Ansible uses 'display' singleton from '__main__' and
# gets it on module level. While we monkeypatching our
# '__main__' in 'ansible_playbook' function import of
# PlaybookCLI shoul be after that, to get patched
# '__main__'. Otherwise ansible gets unpatched '__main__'
# and creates new 'display' object with default (0)
# verbosity.
# NOTE(afazekas): GlobalCLIArgs gets value only once per invocation, but
# since it has singleton decorator, so it would remember to old arguments in different tests
# removing the singleton decorator
try:
from ansible.utils import context_objects
context_objects.GlobalCLIArgs = context_objects.CLIArgs
except ImportError:
# older version
pass
from ansible.cli.playbook import PlaybookCLI
from ansible.errors import AnsibleOptionsError
from ansible.errors import AnsibleParserError
with tempfile.NamedTemporaryFile(
mode='w+', prefix="ir-settings-", delete=True) as tmp:
tmp.write(yaml.safe_dump(vars_dict, default_flow_style=False))
# make sure created file is readable.
tmp.flush()
cli_args.extend(['--extra-vars', "@" + tmp.name])
if not bool(strtobool(os.environ.get('IR_NO_EXTRAS', 'no'))):
ir_extras = {
'infrared': {
'python': {
'executable': sys.executable,
'version': {
'full': sys.version.split()[0],
'major': sys.version_info.major,
'minor': sys.version_info.minor,
'micro': sys.version_info.micro,
}
}
}
}
cli_args.extend(['--extra-vars', str(ir_extras)])
cli = PlaybookCLI(cli_args)
LOG.debug('Starting ansible cli with args: {}'.format(cli_args[1:]))
try:
cli.parse()
stdout = not bool(
strtobool(os.environ.get('IR_ANSIBLE_NO_STDOUT', 'no')))
stderr = not bool(
strtobool(os.environ.get('IR_ANSIBLE_NO_STDERR', 'no')))
ansible_outputs_dir = \
os.path.join(ir_workspace.path, 'ansible_outputs')
ansible_vars_dir = \
os.path.join(ir_workspace.path, 'ansible_vars')
timestamp = datetime.utcnow().strftime("%Y-%m-%d_%H-%M-%S.%f")
filename_template = \
"ir_{timestamp}_{plugin_name}{postfix}.{file_ext}"
for _dir in (ansible_outputs_dir, ansible_vars_dir):
try:
os.makedirs(_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
if bool(strtobool(os.environ.get('IR_GEN_VARS_JSON', 'no'))):
filename = filename_template.format(
timestamp=timestamp,
plugin_name=ir_plugin.name,
postfix='',
file_ext='json'
)
vars_file = os.path.join(ansible_vars_dir, filename)
with open(vars_file, 'w') as fp:
json.dump(vars_dict, fp, indent=4, sort_keys=True)
with IRSTDFDManager(stdout=stdout, stderr=stderr) as fd_manager:
if bool(strtobool(os.environ.get(
'IR_ANSIBLE_LOG_OUTPUT', 'no'))):
filename = filename_template.format(
timestamp=timestamp,
plugin_name=ir_plugin.name,
postfix='',
file_ext='log'
)
log_file = os.path.join(ansible_outputs_dir, filename)
fd_manager.add(open(log_file, 'w'))
if bool(strtobool(os.environ.get(
'IR_ANSIBLE_LOG_OUTPUT_NO_ANSI', 'no'))):
filename = filename_template.format(
timestamp=timestamp,
plugin_name=ir_plugin.name,
postfix='_no_ansi',
file_ext='log'
)
log_file = os.path.join(ansible_outputs_dir, filename)
fd_manager.add(NoAnsiFile(open(log_file, 'w')))
# Return the result:
# 0: Success
# 1: "Error"
# 2: Host failed
# 3: Unreachable
# 4: Parser Error
# 5: Options error
return cli.run()
except (AnsibleParserError, AnsibleOptionsError) as error:
LOG.error('{}: {}'.format(type(error), error))
raise error
|
infrared/core/execute.py
|
codereval_python_data_226
|
Casts arguments to correct types by modifying values_dict param.
By default all the values are strings.
:param parser_name: The command name, e.g. main, virsh, ospd, etc
:param values_dict: The dict of with arguments
def _convert_non_cli_args(self, parser_name, values_dict):
"""Casts arguments to correct types by modifying values_dict param.
By default all the values are strings.
:param parser_name: The command name, e.g. main, virsh, ospd, etc
:param values_dict: The dict of with arguments
"""
for opt_name, opt_value in values_dict.items():
file_option_spec = self.spec_helper.get_option_spec(
parser_name, opt_name)
if file_option_spec.get('type', None) in ['int', ] or \
file_option_spec.get('action', None) in ['count', ]:
values_dict[opt_name] = int(opt_value)
return values_dict
import collections
import os
from six.moves import configparser
from string import Template
import yaml
from infrared.core.cli.cli import CliParser
from infrared.core.cli.cli import COMPLEX_TYPES
from infrared.core.inspector import helper
from infrared.core.utils import dict_utils
from infrared.core.utils import exceptions
from infrared.core.utils import logger
LOG = logger.LOG
class SpecParser(object):
"""Parses input arguments from different sources (cli, answers file). """
@classmethod
def from_plugin(cls, subparser, plugin, base_groups):
"""Reads spec & vars from plugin and constructs the parser instance
:param subparser: argparse.subparser to extend
:param plugin: InfraredPlugin object
:param base_groups: dict, included groups
:return: SpecParser object based on given plugin spec & vars
"""
spec_dict = base_groups or {}
with open(plugin.spec) as stream:
spec = yaml.safe_load(stream) or {}
dict_utils.dict_merge(
base_groups,
spec,
dict_utils.ConflictResolver.unique_append_list_resolver)
# The "try-excpet" block here is for adding spec file path if it
# includes an unsupported option type
try:
return SpecParser(subparser, spec_dict, plugin.vars_dir,
plugin.defaults_dir, plugin.path)
except exceptions.IRUnsupportedSpecOptionType as ex:
ex.message += ' in file: {}'.format(plugin.spec)
raise ex
def __init__(self, subparser, spec_dict, vars_dir, defaults_dir,
plugin_path):
"""Constructor.
:param subparser: argparse.subparser to extend
:param spec_dict: dict with CLI description
:param vars_dir: Path to plugin's vars dir
:param defaults_dir: Path to plugin's defaults dir
"""
self.vars = vars_dir
self.defaults = defaults_dir
self.plugin_path = plugin_path
self.spec_helper = helper.SpecDictHelper(spec_dict)
# create parser
self.parser = CliParser.create_parser(self, subparser)
def add_shared_groups(self, list_of_groups):
"""Adds the user defined shared groups
:param list_of_groups: list, of group dicts
"""
shared_groups = self.spec_helper.spec_dict.get('shared_groups', [])
shared_groups.expand(list_of_groups)
self.spec_helper.spec_dict['shared_groups'] = shared_groups
def _get_defaults(self, default_getter_func):
"""Resolve arguments' values from cli or answers file.
:param default_getter_func: callable. will be called for all the
available options in spec file.
"""
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
default_value = default_getter_func(option)
if default_value is not None:
sub = parser['name']
result[sub][option['name']] = default_value
return result
def get_spec_defaults(self):
"""Resolve arguments' values from spec and other sources. """
def spec_default_getter(option):
"""Getter function to retrieve the default value from spec.
:param option: argument name
"""
# first try to get environment variable with IR_ prefix
default_value = SpecParser.get_env_option(option['name'])
if default_value is not None:
LOG.info(
"[environ] Loading '{0}' default value"
" '{1}' from the environment variable".format(
option['name'], default_value))
elif option.get('default', None) is not None:
default_value = option['default']
elif option.get('action', None) in ['store_true']:
default_value = False
return default_value
return self._get_defaults(spec_default_getter)
@staticmethod
def get_env_option(name):
"""Try get """
return os.environ.get('IR_' + name.upper().replace('-', '_'))
def get_deprecated_args(self):
"""Returning dict with options which deprecate others. """
result = collections.defaultdict(dict)
for parser, option in self.spec_helper.iterate_option_specs():
if option.get('deprecates') is not None:
result[option.get('deprecates')] = option.get('name')
return result
@staticmethod
def parse_env_variable_from_file(value):
if isinstance(value, str):
t = Template(value)
try:
value = t.substitute(os.environ)
except KeyError as undefined_var:
raise exceptions.IRAnswersFileEnvVarNotDefined(undefined_var)
return value
def get_answers_file_args(self, cli_args):
"""Resolve arguments' values from answers INI file. """
file_result = {}
args_to_remove = []
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
file_result[parser_name] = file_result.get(parser_name, {})
if option_spec and option_spec.get(
'action', '') == 'read-answers':
# Iterate over arguments supplied by file
for parsed_arg in parser_dict[arg_name]:
# Supplied arguments' value can be a list
if isinstance(parser_dict[arg_name][parsed_arg], list):
i = 0
# Iterrate over argument values list
for parsed_value in parser_dict[arg_name][parsed_arg]:
parser_dict[arg_name][parsed_arg][i] = \
SpecParser.parse_env_variable_from_file(parsed_value)
i += 1
else:
parser_dict[arg_name][parsed_arg] = \
SpecParser.parse_env_variable_from_file(parser_dict[arg_name][parsed_arg])
# we have config option. saving it.
self._convert_non_cli_args(
parser_name, parser_dict[arg_name])
dict_utils.dict_merge(
file_result[parser_name],
parser_dict[arg_name])
# remove from cli args
args_to_remove.append((parser_name, arg_name))
# remove parser dict outside loop to avoid iteration dict modification
for parser_name, arg_name in args_to_remove:
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in cli_args and spec_parser['name'] == parser_name:
parser_dict = cli_args[spec_parser['name']]
parser_dict.pop(arg_name)
break
return file_result
def generate_answers_file(self, cli_args, spec_defaults):
"""Generates answers INI file
:param cli_args: list, cli arguments.
:param spec_defaults: the default values.
"""
def put_option(config, parser_name, option_name, value):
for opt_help in option.get('help', '').split('\n'):
help_opt = '# ' + opt_help
# add help comment
if config.has_option(parser_name, help_opt):
config.remove_option(parser_name, help_opt)
config.set(
parser_name, help_opt)
if config.has_option(parser_name, option_name):
value = config.get(parser_name, option_name)
config.remove_option(parser_name, option_name)
config.set(
parser_name,
option_name,
str(value))
file_generated = False
# load generate answers file for all the parsers
for (parser_name, parser_dict, arg_name, arg_value,
option_spec) in self._iterate_received_arguments(cli_args):
if option_spec and option_spec.get(
'action', '') == 'generate-answers':
options_to_save = \
self.spec_helper.get_parser_option_specs(parser_name)
out_answers = configparser.ConfigParser(allow_no_value=True)
if not out_answers.has_section(parser_name):
out_answers.add_section(parser_name)
for option in options_to_save:
opt_name = option['name']
if opt_name in parser_dict:
put_option(
out_answers,
parser_name,
opt_name,
parser_dict[opt_name])
elif opt_name in spec_defaults[parser_name]:
put_option(
out_answers,
parser_name,
opt_name,
spec_defaults[parser_name][opt_name])
elif option.get('required', False):
put_option(
out_answers,
parser_name,
'# ' + opt_name,
"Required argument. "
"Edit with one of the allowed values OR "
"override with "
"CLI: --{}=<option>".format(opt_name))
# write to file
with open(arg_value, 'w') as answers_file:
out_answers.write(answers_file)
file_generated = True
return file_generated
def resolve_custom_types(self, args):
"""Transforms the arguments with custom types
:param args: the list of received arguments.
"""
for parser_name, parser_dict in args.items():
spec_complex_options = [opt for opt in
self.spec_helper.get_parser_option_specs(
parser_name) if
opt.get('type', None) in COMPLEX_TYPES]
for spec_option in spec_complex_options:
option_name = spec_option['name']
if option_name in parser_dict:
# we have custom type to resolve
type_name = spec_option['type']
option_value = parser_dict[option_name]
action = self.create_complex_argumet_type(
parser_name,
type_name,
option_name,
spec_option)
# resolving value
parser_dict[option_name] = action.resolve(option_value)
def create_complex_argumet_type(self, subcommand, type_name, option_name,
spec_option):
"""Build the complex argument type
:param subcommand: the command name
:param type_name: the complex type name
:param option_name: the option name
:param spec_option: option's specifications
:return: the complex type instance
"""
complex_action = COMPLEX_TYPES.get(
type_name, None)
if complex_action is None:
raise exceptions.SpecParserException(
"Unknown complex type: {}".format(type_name))
return complex_action(
option_name,
(self.vars, self.defaults, self.plugin_path),
subcommand,
spec_option)
def parse_args(self, arg_parser, args=None):
"""Parses all the arguments (cli, answers file)
:return: None, if ``--generate-answers-file`` in arg_arg_parser
:return: (dict, dict):
* command arguments dict (arguments to control the IR logic)
* nested arguments dict (arguments to pass to the playbooks)
"""
spec_defaults = self.get_spec_defaults()
cli_args = CliParser.parse_cli_input(arg_parser, args)
file_args = self.get_answers_file_args(cli_args)
# generate answers file and exit
if self.generate_answers_file(cli_args, spec_defaults):
LOG.warning("Answers file generated. Exiting.")
# print warnings when something was overridden from non-cli source.
self.validate_arg_sources(cli_args, file_args,
spec_defaults)
# print warnings for deprecated
self.validate_arg_deprecation(cli_args, file_args)
# now filter defaults to have only parser defined in cli
defaults = dict((key, spec_defaults[key])
for key in cli_args.keys() if
key in spec_defaults)
# copy cli args with the same name to all parser groups
self._merge_duplicated_cli_args(cli_args)
self._merge_duplicated_cli_args(file_args)
dict_utils.dict_merge(defaults, file_args)
dict_utils.dict_merge(defaults, cli_args)
self.validate_requires_args(defaults)
self.validate_length_args(defaults)
self.validate_choices_args(defaults)
self.validate_min_max_args(defaults)
# now resolve complex types.
self.resolve_custom_types(defaults)
nested, control, custom = \
self.get_nested_custom_and_control_args(defaults)
return nested, control, custom
def validate_arg_deprecation(self, cli_args, answer_file_args):
"""Validates and prints the deprecated arguments.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
"""
for deprecated, deprecates in self.get_deprecated_args().items():
for input_args in (answer_file_args.items(), cli_args.items()):
for command, command_dict in input_args:
if deprecated in command_dict:
if deprecates in command_dict:
raise exceptions.IRDeprecationException(
"[{}] Argument '{}' deprecates '{}',"
" please use only the new one.".format(
command, deprecated, deprecates))
if deprecated in answer_file_args[command]:
answer_file_args[command][deprecates] = \
answer_file_args[command][deprecated]
if deprecated in cli_args[command]:
cli_args[command][deprecates] = \
cli_args[command][deprecated]
LOG.warning(
"[{}] Argument '{}' was deprecated,"
" please use '{}'.".format(
command, deprecated, deprecates))
@staticmethod
def validate_arg_sources(cli_args, answer_file_args, spec_defaults):
"""Validates and prints the arguments' source.
:param cli_args: the dict of arguments from cli
:param answer_file_args: the dict of arguments from files
:param spec_defaults: the default values from spec files
"""
def show_diff(diff, command_name, cmd_dict, source_name):
if diff:
for arg_name in diff:
value = cmd_dict[arg_name]
LOG.info(
"[{}] Argument '{}' was set to"
" '{}' from the {} source.".format(
command_name, arg_name, value, source_name))
for command, command_dict in cli_args.items():
file_dict = answer_file_args.get(command, {})
file_diff = set(file_dict.keys()) - set(command_dict.keys())
show_diff(file_diff, command, file_dict, 'answers file')
def_dict = spec_defaults.get(command, {})
default_diff = set(def_dict.keys()) - set(
command_dict.keys()) - file_diff
show_diff(default_diff, command, def_dict, 'spec defaults')
def _get_conditionally_required_args(self, command_name, options_spec,
args):
"""List arguments with ``required_when`` condition matched.
:param command_name: the command name.
:param options_spec: the list of command spec options.
:param args: the received input arguments
:return: list, list of argument names with matched ``required_when``
condition
"""
opts_names = [option_spec['name'] for option_spec in options_spec]
missing_args = []
for option_spec in options_spec:
option_results = []
if option_spec and 'required_when' in option_spec:
req_when_args = [option_spec['required_when']] \
if not type(option_spec['required_when']) is list \
else option_spec['required_when']
# validate conditions
for req_when_arg in req_when_args:
splited_args_list = req_when_arg.split()
for idx, req_arg in enumerate(splited_args_list):
if req_arg in opts_names:
splited_args_list[idx] = \
args.get(command_name, {}).get(req_arg.strip())
if splited_args_list[idx] is None:
option_results.append(False)
break
splited_args_list[idx] = str(splited_args_list[idx])
if (splited_args_list[idx] not in ['and', 'or'] and
not any(
(c in '<>=') for c in splited_args_list[idx])):
splited_args_list[idx] = "'{0}'".format(
yaml.safe_load(splited_args_list[idx]))
else:
option_results.append(
eval(' '.join(splited_args_list)))
if all(option_results) and \
self.spec_helper.get_option_state(
command_name,
option_spec['name'],
args) == helper.OptionState['NOT_SET']:
missing_args.append(option_spec['name'])
return missing_args
def validate_requires_args(self, args):
"""Check if all the required arguments have been provided. """
silent_args = self.get_silent_args(args)
def validate_parser(parser_name, expected_options, parser_args):
"""Helper method to resolve dict_merge. """
result = collections.defaultdict(list)
condition_req_args = self._get_conditionally_required_args(
parser_name, expected_options, args)
for option in expected_options:
name = option['name']
# check required options.
if (option.get('required', False) and
name not in parser_args or
option['name'] in condition_req_args) and \
name not in silent_args:
result[parser_name].append(name)
return result
res = {}
for command_data in self.spec_helper.iterate_parsers():
cmd_name = command_data['name']
if cmd_name in args:
dict_utils.dict_merge(
res,
validate_parser(
cmd_name,
self.spec_helper.get_parser_option_specs(cmd_name),
args[cmd_name]))
missing_args = dict((cmd_name, args)
for cmd_name, args in res.items() if len(args) > 0)
if missing_args:
raise exceptions.IRRequiredArgsMissingException(missing_args)
def validate_length_args(self, args):
"""Check if value of arguments is not longer than length specified.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'length' not in spec_option:
# skip options that does not contain length
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve length
length = spec_option['length']
option_value = parser_dict[option_name]
if len(option_value) > int(length):
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
length
))
if invalid_options:
# raise exception with all arguments that exceed length
raise exceptions.IRInvalidLengthException(invalid_options)
def validate_choices_args(self, args):
"""Check if value of choice arguments is one of the available choices.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if 'choices' not in spec_option:
# skip options that does not contain choices
continue
option_name = spec_option['name']
if option_name in parser_dict:
# resolve choices
choices = spec_option['choices']
option_value = parser_dict[option_name]
if option_value not in choices:
# found invalid option, append to list of invalid opts
invalid_options.append((
option_name,
option_value,
choices
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidChoiceException(invalid_options)
def validate_min_max_args(self, args):
"""Check if value of arguments is between minimum and maximum values.
:param args: The received arguments.
"""
invalid_options = []
for parser_name, parser_dict in args.items():
for spec_option in \
self.spec_helper.get_parser_option_specs(parser_name):
if all([key not in spec_option
for key in ('maximum', 'minimum')]):
# skip options that does not contain minimum or maximum
continue
option_name = spec_option['name']
if option_name in parser_dict:
option_value = parser_dict[option_name]
min_value = spec_option.get('minimum')
max_value = spec_option.get('maximum')
# handle empty values in spec files which load as None
min_value = '' if 'minimum' in spec_option \
and min_value is None else min_value
max_value = '' if 'maximum' in spec_option \
and max_value is None else max_value
values = {
"value": option_value,
"maximum": max_value,
"minimum": min_value
}
# make sure that values are numbers
is_all_values_numbers = True
for name, num in values.items():
if num is not None \
and (isinstance(num, bool) or
not isinstance(num, (int, float))):
invalid_options.append((
option_name,
name,
"number",
type(num).__name__
))
is_all_values_numbers = False
if not is_all_values_numbers:
# don't continue to min max checks since some of the
# values are not numbers
continue
# check bigger than minimum
if min_value is not None and option_value < min_value:
invalid_options.append((
option_name,
"minimum",
min_value,
option_value
))
# check smaller than maximum
if max_value is not None and option_value > max_value:
invalid_options.append((
option_name,
"maximum",
max_value,
option_value
))
if invalid_options:
# raise exception with all arguments that contains invalid choices
raise exceptions.IRInvalidMinMaxRangeException(invalid_options)
def get_silent_args(self, args):
"""list of silenced argument
:param args: The received arguments.
:return: list, slienced argument names
"""
silent_args_names = []
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if arg_spec and 'silent' in arg_spec and \
self.spec_helper.get_option_state(
parser_name,
arg_name,
args) == helper.OptionState['IS_SET']:
silent_args_names.extend(arg_spec['silent'])
return list(set(silent_args_names))
def get_nested_custom_and_control_args(self, args):
"""Split input arguments to control nested and custom.
Controls arguments: control the IR behavior. These arguments
will not be put into the spec yml file
Nested arguments: are used by the Ansible playbooks and will be put
into the spec yml file.
Custom arguments: Custom ansible variables to be used instead of the
normal nested usage.
:param args: the collected list of args.
:return: (dict, dict): flat dicts (control_args, nested_args)
"""
# returns flat dicts
nested = {}
control_args = {}
custom_args = {}
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(args):
if all([arg_spec, arg_spec.get('type', None),
arg_spec.get('type', None) in
[ctype_name for ctype_name, klass in
COMPLEX_TYPES.items() if klass.is_nested]
]) or ('is_shared_group_option' not in arg_spec):
if arg_name in nested:
LOG.warning(
"Duplicated nested argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, nested[arg_name]))
elif arg_name in custom_args:
LOG.warning(
"Duplicated custom argument found:'{}'. "
"Using old value: '{}'".format(
arg_name, custom_args[arg_name]))
else:
if "ansible_variable" in arg_spec:
custom_args[arg_spec["ansible_variable"]] = arg_value
else:
nested[arg_name] = arg_value
else:
if arg_name in control_args:
LOG.warning(
"Duplicated control argument found: '{}'. Using "
"old value: '{}'".format(
arg_name, control_args[arg_name]))
else:
control_args[arg_name] = arg_value
return nested, control_args, custom_args
def _iterate_received_arguments(self, args):
"""Iterator helper method over all the received arguments
:return: yields tuple:
(spec name, spec dict,
argument name, argument value, argument spec)
"""
for spec_parser in self.spec_helper.iterate_parsers():
if spec_parser['name'] in args:
parser_dict = args[spec_parser['name']]
for arg_name, arg_val in parser_dict.items():
arg_spec = self.spec_helper.get_option_spec(
spec_parser['name'], arg_name)
yield (spec_parser['name'], parser_dict,
arg_name, arg_val, arg_spec)
def _convert_non_cli_args(self, parser_name, values_dict):
"""Casts arguments to correct types by modifying values_dict param.
By default all the values are strings.
:param parser_name: The command name, e.g. main, virsh, ospd, etc
:param values_dict: The dict of with arguments
"""
for opt_name, opt_value in values_dict.items():
file_option_spec = self.spec_helper.get_option_spec(
parser_name, opt_name)
if file_option_spec.get('type', None) in ['int', ] or \
file_option_spec.get('action', None) in ['count', ]:
values_dict[opt_name] = int(opt_value)
def _merge_duplicated_cli_args(self, cli_args):
"""Merge duplicated arguments to all the parsers
This is need to handle control args, shared among several parsers.
for example, verbose, inventory
"""
for (parser_name, parser_dict, arg_name, arg_value,
arg_spec) in self._iterate_received_arguments(cli_args):
for parser_name2, parser_dict2 in cli_args.items():
if all([parser_name2, parser_name != parser_name2,
arg_name not in parser_dict2]):
if self.spec_helper.get_option_spec(parser_name2,
arg_name):
parser_dict2[arg_name] = arg_value
|
infrared/core/inspector/inspector.py
|
codereval_python_data_227
|
Creates a flat dict from the plugin spec
:param plugin_dir: A path to the plugin's dir
:return: A flatten dictionary contains the plugin's properties
def get_plugin_spec_flatten_dict(plugin_dir):
"""Creates a flat dict from the plugin spec
:param plugin_dir: A path to the plugin's dir
:return: A flatten dictionary contains the plugin's properties
"""
with open(os.path.join(plugin_dir, PLUGIN_SPEC)) as fp:
spec_yaml = yaml.safe_load(fp)
plugin_name = list(spec_yaml['subparsers'].keys())[0]
plugin_description = spec_yaml['description'] \
if "description" in spec_yaml \
else spec_yaml['subparsers'][plugin_name]['description']
plugin_type = spec_yaml["config"]["plugin_type"] \
if "config" in spec_yaml \
else spec_yaml["plugin_type"]
plugin_spec_dict = dict(
name=plugin_name,
dir=plugin_dir,
description=plugin_description,
type=plugin_type
)
return plugin_spec_dict
from six.moves import configparser
import os
import git
import yaml
import shutil
import sys
import tarfile
import tempfile
import filecmp
import pytest
from infrared.core.utils.exceptions import IRPluginExistsException, \
IRUnsupportedPluginType
from infrared.core.utils.exceptions import IRFailedToAddPlugin
from infrared.core.utils.exceptions import IRValidatorException
from infrared.core.utils.exceptions import IRFailedToRemovePlugin
from infrared.core.utils.exceptions import IRFailedToUpdatePlugin
from infrared.core.utils.exceptions import IRUnsupportedSpecOptionType
from infrared.core.utils.dict_utils import dict_insert
import infrared.core.services.plugins
from infrared.core.services.plugins import InfraredPluginManager
from infrared.core.services.plugins import InfraredPlugin
from infrared.core.utils.validators import SpecValidator, RegistryValidator
from infrared.core.services import CoreServices, ServiceName
PLUGIN_SPEC = 'plugin.spec'
SAMPLE_PLUGINS_DIR = 'tests/example/plugins'
SUPPORTED_TYPES_DICT = dict(
supported_types=dict(
supported_type1='Tools of supported_type1',
supported_type2='Tools of supported_type2',
provision='Provisioning plugins',
install='Installing plugins',
test='Testing plugins'
)
)
@pytest.fixture()
def plugins_conf_fixture(tmpdir):
"""Creates temporary IR
:param tmpdir: builtin pytest fixtures to create temporary files & dirs
:return: plugins conf file as a LocalPath object (py.path)
"""
# Creates temporary plugins conf file
lp_dir = tmpdir.mkdir('test_tmp_dir')
lp_file = lp_dir.join('.plugins.ini')
try:
yield lp_file
finally:
lp_dir.remove()
@pytest.fixture()
def plugin_manager_fixture(plugins_conf_fixture):
"""Creates a PluginManager fixture
Creates a fixture which returns a PluginManager object based on
temporary plugins conf with default values(sections - provision, install &
test)
:param plugins_conf_fixture: fixture that returns a path of a temporary
plugins conf
"""
lp_file = plugins_conf_fixture
def plugin_manager_helper(plugins_conf_dict=None):
if plugins_conf_dict is None:
plugins_conf_dict = {}
plugins_conf_dict.update(SUPPORTED_TYPES_DICT)
with lp_file.open(mode='w') as fp:
config = configparser.ConfigParser()
for section, section_data in plugins_conf_dict.items():
config.add_section(section)
for option, value in section_data.items():
config.set(section, option, value)
config.write(fp)
CoreServices.register_service(
ServiceName.PLUGINS_MANAGER, InfraredPluginManager(
lp_file.strpath,
os.path.join(lp_file.dirname, "plugins")))
return CoreServices.plugins_manager()
yield plugin_manager_helper
@pytest.fixture()
def git_plugin_manager_fixture(tmpdir, plugin_manager_fixture):
"""Yields an IRPluginManager obj configured with git plugin
Just like plugin_manager_fixture but also create two temporary directories
that will be used to mimic local and remote git repos of an InfraRed's
plugin. The IRPluginManager that will be returned, will be configured with
this InfraRed git plugin.
:param tmpdir: builtin pytest fixtures to create temporary files & dirs
:param plugin_manager_fixture: Fixture object which yields
InfraredPluginManger object
"""
plugin_tar_gz = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'example/plugins/git_plugin/git_plugin_repo.tar.gz')
plugin_repo_dir = tmpdir.mkdir('plugin_repo_dir')
plugin_install_dir = tmpdir.mkdir('plugin_install_dir')
t_file = tarfile.open(plugin_tar_gz)
t_file.extractall(path=str(plugin_repo_dir))
repo = git.Repo.clone_from(
url=str(plugin_repo_dir),
to_path=str(plugin_install_dir))
repo.git.config('user.name', 'dummy-user')
repo.git.config('user.email', 'dummy@email.com')
plugin_spec_dict = get_plugin_spec_flatten_dict(str(plugin_install_dir))
try:
plugin_manager = plugin_manager_fixture({
plugin_spec_dict['type']: {
plugin_spec_dict['name']: str(plugin_install_dir)}
})
yield plugin_manager
finally:
plugin_repo_dir.remove()
plugin_install_dir.remove()
def get_plugin_spec_flatten_dict(plugin_dir):
"""Creates a flat dict from the plugin spec
:param plugin_dir: A path to the plugin's dir
:return: A flatten dictionary contains the plugin's properties
"""
with open(os.path.join(plugin_dir, PLUGIN_SPEC)) as fp:
spec_yaml = yaml.safe_load(fp)
plugin_name = list(spec_yaml['subparsers'].keys())[0]
plugin_description = spec_yaml['description'] \
if "description" in spec_yaml \
else spec_yaml['subparsers'][plugin_name]['description']
plugin_type = spec_yaml["config"]["plugin_type"] \
if "config" in spec_yaml \
else spec_yaml["plugin_type"]
plugin_spec_dict = dict(
name=plugin_name,
dir=plugin_dir,
description=plugin_description,
type=plugin_type
)
return plugin_spec_dict
def plugin_in_conf(plugins_conf, plugin_type, plugin_name):
"""Checks if a plugin exists in a conf file
:param plugins_conf: A path to the plugins conf file
:param plugin_type: The plugin's type
:param plugin_name: The Plugin's name
:return: True if plugin is in the conf file, otherwise False
"""
config = configparser.ConfigParser()
with open(plugins_conf) as fp:
if (sys.version_info > (3, 2)):
config.read_file(fp)
else:
config.readfp(fp)
return config.has_option(plugin_type, plugin_name)
def test_add_plugin(plugin_manager_fixture):
"""Tests the ability to add plugins
:param plugin_manager_fixture: Fixture object which yields
InfraredPluginManger object
"""
plugin_manager = plugin_manager_fixture()
for plugin_dir, plugins_cnt in (
('type1_plugin1', 1), # Add a plugin
('type1_plugin2', 2), # Add a plugin - same type
('type2_plugin1', 3)): # Add a plugin - different type
plugin_dict = get_plugin_spec_flatten_dict(
os.path.join(SAMPLE_PLUGINS_DIR, plugin_dir))
plugin_manager.add_plugin(plugin_dict['dir'])
assert plugin_dict['name'] in plugin_manager.PLUGINS_DICT,\
"Plugin wasn't added to the plugins manager."
assert plugin_in_conf(
plugins_conf=plugin_manager.config_file,
plugin_type=plugin_dict['type'],
plugin_name=plugin_dict['name']), \
"Plugin wasn't added to conf file."
assert len(plugin_manager.PLUGINS_DICT) == plugins_cnt
def test_load_plugin(plugin_manager_fixture):
"""Test that an existing plugin can be loaded and it's properties
:param plugin_manager_fixture: Fixture object which yields
InfraredPluginManger object
"""
plugin_dir = 'type1_plugin1'
plugin_dict = get_plugin_spec_flatten_dict(
os.path.join(os.path.abspath(SAMPLE_PLUGINS_DIR), plugin_dir))
plugin_manager = plugin_manager_fixture({
plugin_dict['type']: {
plugin_dict['name']: plugin_dict['dir']}
})
plugin = plugin_manager.get_plugin(plugin_name=plugin_dict['name'])
assert type(plugin) is InfraredPlugin, "Failed to add a plugin"
assert plugin.name == plugin_dict['name'], "Wrong plugin name"
assert plugin.description == plugin_dict['description'], \
'Wrong plugin description'
def test_entry_point(plugin_manager_fixture):
"""Test that spec file has a valid entry point
:param plugin_manager_fixture: Fixture object which yields
InfraredPluginManger object
"""
plugin_dir = 'plugin_with_entry_point'
plugin_dict = get_plugin_spec_flatten_dict(
os.path.join(os.path.abspath(SAMPLE_PLUGINS_DIR), plugin_dir))
plugin_manager = plugin_manager_fixture({
plugin_dict['type']: {
plugin_dict['name']: plugin_dict['dir']}
})
plugin = plugin_manager.get_plugin(plugin_name=plugin_dict['name'])
assert plugin.playbook == os.path.join(plugin_dict['dir'], "example.yml")
def test_add_plugin_with_same_name(plugin_manager_fixture):
"""Tests that it not possible to add a plugin with a name that already
exists
:param plugin_manager_fixture: Fixture object which yields
InfraredPluginManger object
"""
plugin_dir = 'type1_plugin1'
plugin_dict = get_plugin_spec_flatten_dict(
os.path.join(SAMPLE_PLUGINS_DIR, plugin_dir))
plugin_manager = plugin_manager_fixture({
plugin_dict['type']: {
plugin_dict['name']: plugin_dict['dir']}
})
plugins_cfg_mtime_before_add = os.path.getmtime(plugin_manager.config_file)
plugins_cnt_before_try = len(plugin_manager.PLUGINS_DICT)
with pytest.raises(IRPluginExistsException):
plugin_manager.add_plugin(plugin_dict['dir'])
assert plugins_cnt_before_try == len(plugin_manager.PLUGINS_DICT)
assert os.path.getmtime(
plugin_manager.config_file) == plugins_cfg_mtime_before_add, \
"Plugins configuration file has been modified."
def test_add_plugin_unsupported_type(plugin_manager_fixture):
"""Test that it's not possible to add a plugin from unsupported type
:param plugin_manager_fixture: Fixture object which yields
InfraredPluginManger object
"""
plugin_manager = plugin_manager_fixture()
plugin_dict = get_plugin_spec_flatten_dict(
os.path.join(SAMPLE_PLUGINS_DIR, 'unsupported_plugin'))
plugins_cfg_mtime_before_add = os.path.getmtime(plugin_manager.config_file)
plugins_cnt_before_try = len(plugin_manager.PLUGINS_DICT)
with pytest.raises(IRUnsupportedPluginType):
plugin_manager.add_plugin(plugin_dict['dir'])
assert not plugin_in_conf(
plugins_conf=plugin_manager.config_file,
plugin_type=plugin_dict['type'],
plugin_name=plugin_dict['name']), \
"Plugin was added to conf file."
assert plugins_cnt_before_try == len(plugin_manager.PLUGINS_DICT)
assert os.path.getmtime(
plugin_manager.config_file) == plugins_cfg_mtime_before_add, \
"Plugins configuration file has been modified."
def test_remove_plugin(plugin_manager_fixture):
""" Tests the ability to remove a plugin
:param plugin_manager_fixture: Fixture object which yields
InfraredPluginManger object
"""
plugins_conf = {}
for plugin_dir in ('type1_plugin1', 'type1_plugin2', 'type2_plugin1'):
plugin_dict = get_plugin_spec_flatten_dict(
os.path.join(os.path.abspath(SAMPLE_PLUGINS_DIR), plugin_dir))
dict_insert(plugins_conf,
plugin_dict['dir'],
plugin_dict['type'],
plugin_dict['name'],)
plugin_manager = plugin_manager_fixture(plugins_conf)
for plugin_dir, plugins_cnt in (
('type1_plugin1', 2),
('type2_plugin1', 1),
('type1_plugin2', 0)):
plugin_dict = get_plugin_spec_flatten_dict(
os.path.join(SAMPLE_PLUGINS_DIR, plugin_dir))
assert plugin_dict['name'] in plugin_manager.PLUGINS_DICT, \
"Can't remove unexisting plugin"
plugin_manager.remove_plugin(plugin_dict['name'])
with pytest.raises(KeyError):
plugin_manager.get_plugin(plugin_name=plugin_dict['name'])
assert not plugin_in_conf(
plugins_conf=plugin_manager.config_file,
plugin_type=plugin_dict['type'],
plugin_name=plugin_dict['name']), \
"Plugin wasn't removed from conf file."
assert len(plugin_manager.PLUGINS_DICT) == plugins_cnt
def test_remove_unexisting_plugin(plugin_manager_fixture):
"""Tests the behavior of removing unexisting plugin
Checks that no exception is being raised and no changes in
InfraredPluginManager dict and configuration file
:param plugin_manager_fixture: Fixture object which yields
InfraredPluginManger object
"""
plugin_manager = plugin_manager_fixture()
plugins_cfg_mtime_before_add = os.path.getmtime(plugin_manager.config_file)
plugins_cnt_before_try = len(plugin_manager.PLUGINS_DICT)
with pytest.raises(IRFailedToRemovePlugin):
plugin_manager.remove_plugin('unexisting_plugin')
assert plugins_cnt_before_try == len(plugin_manager.PLUGINS_DICT)
assert os.path.getmtime(
plugin_manager.config_file) == plugins_cfg_mtime_before_add, \
"Plugins configuration file has been modified."
@pytest.mark.parametrize("input_args, plugins_conf", [
("plugin list", None),
("plugin add tests/example/plugins/type1_plugin1", None),
("plugin remove type1_plugin1", dict(
supported_type1=dict(
type1_plugin1='tests/example/plugins/type1_plugin1'))),
("plugin add "
"tests/example/plugins/type1_plugin1 "
"tests/example/plugins/type1_plugin2", None),
("plugin remove type1_plugin1 type1_plugin2", dict(
supported_type1=dict(
type1_plugin1='tests/example/plugins/type1_plugin1',
type1_plugin2='tests/example/plugins/type1_plugin2'))),
])
def test_plugin_cli(plugin_manager_fixture, input_args, plugins_conf):
"""Tests that plugin CLI works
:param plugin_manager_fixture: Fixture object which yields
InfraredPluginManger object
:param input_args: infrared's testing arguments
:param plugins_conf: Plugins conf data as a dictionary
"""
plugin_manager_fixture(plugins_conf)
from infrared.main import main as ir_main
rc = ir_main(input_args.split())
assert rc == 0, \
"Return code ({}) != 0, cmd='infrared {}'".format(rc, input_args)
def test_add_plugin_no_spec(plugin_manager_fixture):
"""Tests that it's not possible to add plugin without a spec file
:param plugin_manager_fixture: Fixture object which yields
InfraredPluginManger object
"""
plugin_dir = os.path.join(SAMPLE_PLUGINS_DIR, 'plugin_without_spec')
plugin_manager = plugin_manager_fixture({})
plugins_cfg_mtime_before_add = os.path.getmtime(plugin_manager.config_file)
plugins_cnt_before_try = len(plugin_manager.PLUGINS_DICT)
with pytest.raises(IRValidatorException):
plugin_manager.add_plugin(plugin_dir)
assert plugins_cnt_before_try == len(plugin_manager.PLUGINS_DICT)
assert os.path.getmtime(
plugin_manager.config_file) == plugins_cfg_mtime_before_add, \
"Plugins configuration file has been modified."
@pytest.mark.parametrize("description, plugin_spec", [
('no_description', {
'plugin_type': 'supported_type',
'subparsers': {
'sample_plugin1:': {}}}),
('no_type', {
'description': 'some plugin description',
'subparsers': {
'sample_plugin1:': {}}}),
('no_value', {
'plugin_type': '',
'subparsers': {
'sample_plugin1:': {}}}),
('no_subparsers_key', {
'plugin_type': 'supported_type',
'description': 'some plugin description'}),
('no_subparsers_value', {
'plugin_type': 'supported_type',
'description': 'some plugin description',
'subparsers': ''}),
('no_entry_point_value',{
'plugin_type': 'supported_type',
'entry_point': '',
'subparsers': {
'sample_plugin1:': {}}}),
('no_entry_point_value_in_config',{
'config': {
"plugin_type": 'supported_type',
"entry_point": '',
},
'subparsers': {
'sample_plugin1:': {}}}),
('no_type_in_config', {
'config': {
},
'description': 'some plugin description',
'subparsers': {
'sample_plugin1:': {}}}),
])
def test_add_plugin_corrupted_spec(tmpdir_factory, description, plugin_spec):
"""Tests that it's not possible to add a plugin with invalid spec file
:param tmpdir_factory: pytest builtin fixture for creating temp dirs
:param description: test description (adds a description in pytest run)
:param plugin_spec: dictionary with data for spec file
:return:
"""
lp_dir = tmpdir_factory.mktemp('test_tmp_dir')
lp_file = lp_dir.join('plugin.spec')
with open(lp_file.strpath, 'w') as fp:
yaml.dump(plugin_spec, fp, default_flow_style=True)
try:
with pytest.raises(IRValidatorException):
SpecValidator.validate_from_file(lp_file.strpath)
finally:
lp_dir.remove()
def test_plugin_with_unsupporetd_option_type_in_spec(plugin_manager_fixture):
"""Tests that the user get a proper error
:param plugin_manager_fixture: Fixture object which yields
InfraredPluginManger object
"""
plugin_dir = os.path.join(SAMPLE_PLUGINS_DIR,
'plugin_with_unsupported_option_type_in_spec')
plugin_dict = get_plugin_spec_flatten_dict(plugin_dir)
plugin_manager = plugin_manager_fixture()
plugin_manager.add_plugin(plugin_dir)
from infrared.main import main as ir_main
with pytest.raises(IRUnsupportedSpecOptionType):
ir_main([plugin_dict['name'], '--help'])
def test_add_plugin_from_git(plugin_manager_fixture, mocker):
plugin_manager = plugin_manager_fixture()
mock_git = mocker.patch("infrared.core.services.plugins.git.Repo")
mock_os = mocker.patch("infrared.core.services.plugins.os")
mock_os.path.exists.return_value = False
mock_os.listdir.return_value = ["sample_plugin"]
mock_tempfile = mocker.patch("infrared.core.services.plugins.tempfile")
mock_shutil = mocker.patch("infrared.core.services.plugins.shutil")
plugin_dict = get_plugin_spec_flatten_dict(
os.path.join(SAMPLE_PLUGINS_DIR, 'type1_plugin1'))
mock_os.path.join.return_value = os.path.join(plugin_dict["dir"],
PLUGIN_SPEC)
# add_plugin call
plugin_manager.add_plugin(
"https://sample_github.null/plugin_repo.git", rev="test",
skip_roles=True)
mock_tempfile.mkdtemp.assert_called_once()
mock_git.clone_from.assert_called_with(
url='https://sample_github.null/plugin_repo.git',
to_path=mock_os.path.join.return_value,
kill_after_timeout=300)
mock_os.join.has_call(SAMPLE_PLUGINS_DIR, mock_os.listdir.return_value[0])
mock_os.join.has_call(mock_tempfile.mkdtemp.return_value,
mock_os.listdir.return_value[0])
mock_shutil.rmtree.assert_called_with(mock_os.path.join.return_value)
def test_add_plugin_from_git_dirname_from_spec(plugin_manager_fixture, mocker):
"""
Validate that we take the folder name from the spec plugin name
instead of the git repo name
:param plugin_manager_fixture: Fixture object which yields
InfraredPluginManger object
:param mocker: mocker fixture
"""
def clone_from_side_effect(url, to_path, **kwargs):
"""
Define a side effect function to override the
original behaviour of clone_from
"""
shutil.copytree(src=plugin_dict["dir"], dst=to_path)
plugin_manager = plugin_manager_fixture()
mock_git = mocker.patch("infrared.core.services.plugins.git.Repo")
# use side effect to use copytree instead of original clone
mock_git.clone_from.side_effect = clone_from_side_effect
mock_os_path_exists = mocker.patch(
"infrared.core.services.plugins.os.path.exists")
# set to false in order to enter the git section
# in if/else inside add_plugin func
mock_os_path_exists.return_value = False
mock_tempfile = mocker.patch("infrared.core.services.plugins.tempfile")
mock_tempfile.mkdtemp.return_value = tempfile.mkdtemp(prefix="ir-")
mock_shutil = mocker.patch("infrared.core.services.plugins.shutil")
plugin_dict = get_plugin_spec_flatten_dict(
os.path.abspath(os.path.join(SAMPLE_PLUGINS_DIR, 'type1_plugin1')))
# add_plugin call
with pytest.raises(IRFailedToAddPlugin):
plugin_manager.add_plugin(
"https://sample_github.null/plugin_repo.git")
mock_shutil.rmtree.assert_called_with(os.path.join(
mock_tempfile.mkdtemp.return_value, "plugin_repo"))
# clean tmp folder
shutil.rmtree(mock_tempfile.mkdtemp.return_value)
# check it was cloned with the temp name
mock_git.clone_from.assert_called_with(
url='https://sample_github.null/plugin_repo.git',
to_path=os.path.join(
mock_tempfile.mkdtemp.return_value, "plugin_repo"),
kill_after_timeout=300)
# check that it was copied with the plugin name and not repo name
mock_shutil.copytree.assert_called_with(
os.path.join(mock_tempfile.mkdtemp.return_value, "plugin_repo"),
os.path.join(plugin_manager.plugins_dir, plugin_dict["name"]))
def test_add_plugin_from_git_exception(plugin_manager_fixture, mocker):
plugin_manager = plugin_manager_fixture()
mock_git = mocker.patch("infrared.core.services.plugins.git")
mock_git.Repo.clone_from.side_effect = git.exc.GitCommandError(
"some_git_cmd", 1)
mock_git.exc.GitCommandError = git.exc.GitCommandError
mock_tempfile = mocker.patch("infrared.core.services.plugins.tempfile")
mock_shutil = mocker.patch("infrared.core.services.plugins.shutil")
mock_os = mocker.patch("infrared.core.services.plugins.os")
mock_os.path.exists.return_value = False
# add_plugin call
with pytest.raises(IRFailedToAddPlugin):
plugin_manager.add_plugin(
"https://sample_github.null/plugin_repo.git")
mock_shutil.rmtree.assert_called_with(mock_tempfile.mkdtemp.return_value)
def validate_plugins_presence_in_conf(
plugin_manager, plugins_dict, present=True):
"""Validate presence of plugins in the configuration file
:param plugin_manager: InfraredPluginManager object
:param plugins_dict: Dict of plugins
{plugin_name: plugin_dir_path, ...}
:param present: Whether all plugins in the dict should be present in the
plugins configuration file or not.
"""
assert present in (True, False), \
"'absent' accept only Boolean values, got: '{}'".format(str(present))
with open(plugin_manager.config_file) as config_file:
plugins_cfg = configparser.ConfigParser()
if (sys.version_info > (3, 2)):
plugins_cfg.read_file(config_file)
else:
plugins_cfg.readfp(config_file)
for plugin_path in plugins_dict.values():
plugin = InfraredPlugin(plugin_path['src'])
if present:
assert plugins_cfg.has_option(plugin.type, plugin.name), \
"Plugin '{}' was suppose to be in the plugins " \
"configuration file".format(plugin.name)
else:
assert not plugins_cfg.has_option(plugin.type, plugin.name), \
"Plugin '{}' wasn't suppose to be in the plugins " \
"configuration file".format(plugin.name)
def test_plugin_add_all(plugin_manager_fixture):
"""Tests the add and remove all plugins functioning
:param plugin_manager_fixture: Fixture object which yields
InfraredPluginManger object
"""
tests_plugins = (
'provision_plugin1', 'provision_plugin2',
'install_plugin1', 'install_plugin2',
'test_plugin1', 'test_plugin2'
)
tests_plugins_dir = 'tests/example/plugins/add_remove_all_plugins/'
plugins_registry = \
dict((pname, {'src': os.path.join(tests_plugins_dir, pname)})
for pname in tests_plugins)
plugin_manager = plugin_manager_fixture()
# Validates that plugins aren't in configuration file from the beginning
validate_plugins_presence_in_conf(
plugin_manager, plugins_registry, present=False)
# Validates all plugins are in the configuration file
plugin_manager.add_all_available(plugins_registry=plugins_registry)
validate_plugins_presence_in_conf(
plugin_manager, plugins_registry, present=True)
# Validates all plugins are no longer in the configuration file
plugin_manager.remove_all()
validate_plugins_presence_in_conf(
plugin_manager, plugins_registry, present=False)
def test_git_plugin_update(git_plugin_manager_fixture):
"""Tests the git plugin update functionality
Tests the following:
1. Plugin update without new changes
2. Plugin update to an older commit
3. No update when there are local changes
4. Switch back to master after checking out old revision
5. Switch to revision that does not exists
:param git_plugin_manager_fixture: Fixture object which yields
InfraredPluginManger object with git plugin installed
"""
gpm = git_plugin_manager_fixture
repo = git.Repo(gpm.get_plugin('git_plugin').path)
commits_list = repo.git.rev_list('HEAD').splitlines()
assert len(commits_list) > 1, \
"Can perform the test without at least two commits"
# Plugin update without new changes
assert gpm.update_plugin('git_plugin') is None, \
"Failed to pull changes from remote with up-to-date local branch"
# Plugin update to an older commit
gpm.update_plugin(plugin_name='git_plugin', revision=commits_list[-1])
assert commits_list[-1] == repo.git.rev_parse('HEAD'), \
"Failed to Update plugin to: {}".format(commits_list[-1])
# No update when there are local changes
file_name = os.path.join(repo.working_dir, 'test.txt')
# create new file and add it to git to create local changes
with open(file_name, 'w') as f:
f.write('test')
repo.git.add([file_name])
with pytest.raises(IRFailedToUpdatePlugin):
gpm.update_plugin(plugin_name='git_plugin')
assert commits_list[-1] == repo.git.rev_parse('HEAD'), \
"Plugin wasn't supposed to be changed when update failed..."
# Switch back to master after checking out old revision
gpm.update_plugin(plugin_name='git_plugin',
revision='master', hard_reset=True)
assert commits_list[0] == repo.git.rev_parse('HEAD'), \
"Plugin haven't been updated from '{}' to '{}'".format(
commits_list[-1], commits_list[0])
# Switch to revision that does not exists
branch_before = repo.active_branch
with pytest.raises(IRFailedToUpdatePlugin):
gpm.update_plugin(plugin_name='git_plugin', revision='not_exists_rev')
assert branch_before == repo.active_branch, \
"Plugin's revision wasn't supposed to change"
@pytest.mark.parametrize("description, registry_yaml", [
('no_type', {
'some_plugin_name': {
'src': '/path/to/plugin',
'desc': 'some plugin description'
}
}),
('no_desc', {
'some_plugin_name': {
'src': '/path/to/plugin',
'type': 'supported_type'
}
}),
('no_src', {
'some_plugin_name': {
'desc': 'some plugin description',
'type': 'supported_type'
}
}),
('empty_revision', {
'some_plugin_name': {
'src': '/path/to/plugin',
'type': 'supported_type',
'desc': 'some plugin description',
'rev': ''
}
}),
('empty_src_path', {
'some_plugin_name': {
'src': '/path/to/plugin',
'type': 'supported_type',
'desc': 'some plugin description',
'src_path': ''
}
}),
('empty_plugin_key', {
'': {
'src': '/path/to/plugin',
'type': 'supported_type',
'desc': 'some plugin description',
'src_path': ''
}
}),
('additional_not_allowed_param', {
'': {
'src': '/path/to/plugin',
'type': 'supported_type',
'desc': 'some plugin description',
'src_path': '/relative/path',
'rev': 'some_rev',
'not_allowed_additional_key': 'some_value'
}
}),
])
def test_import_plugins_corrupted_registry(tmpdir_factory, description,
registry_yaml):
"""
Tests that it's not possible to import plugins with invalid registry file
:param tmpdir_factory: pytest builtin fixture for creating temp dirs
:param description: test description (adds a description in pytest run)
:param registry_yaml: dictionary with data for registry file
:return:
"""
lp_dir = tmpdir_factory.mktemp('test_tmp_dir')
lp_file = lp_dir.join('registry.yaml')
with open(lp_file.strpath, 'w') as fp:
yaml.dump(registry_yaml, fp, default_flow_style=True)
try:
with pytest.raises(IRValidatorException):
RegistryValidator.validate_from_file(lp_file.strpath)
finally:
lp_dir.remove()
def test_import_plugins_from_registry(tmpdir, plugin_manager_fixture):
"""
Test that plugins import actually imports the plugins specified in the
registry file supplied
:param tmpdir: pytest builtin fixture for creating temp dirs
:param plugin_manager_fixture: Fixture object which yields
"""
plugin_manager = plugin_manager_fixture()
plugins_registry = os.path.join(SAMPLE_PLUGINS_DIR, "registry_example.yml")
with open(plugins_registry) as fp:
registry_yaml = yaml.safe_load(fp)
# prepare tmp library folder to hold the dependencies
tmp_pluginss_dir = str(tmpdir.mkdir("tmp_pluginss_dir"))
plugin_manager.plugins_dir = tmp_pluginss_dir
# Validates that plugins aren't in configuration file from the beginning
validate_plugins_presence_in_conf(
plugin_manager, registry_yaml, present=False)
# import all plugins from registry
plugin_manager.import_plugins(plugins_registry)
# check that plugins were copied to the plugins directory
assert os.path.isdir(os.path.join(
tmp_pluginss_dir, 'type1_plugin1'))
assert os.path.isdir(os.path.join(
tmp_pluginss_dir, 'type2_plugin1'))
assert os.path.isdir(os.path.join(
tmp_pluginss_dir, 'type1_plugin2'))
# Validates all plugins are in the configuration file
validate_plugins_presence_in_conf(
plugin_manager, registry_yaml, present=True)
def test_add_plugin_with_src_path(plugin_manager_fixture, mocker):
"""
Validates that add plugin copies the whole directory and only reference
to the plugin inside the directory
:param plugin_manager_fixture: Fixture object which yields
InfraredPluginManger object
:param mocker: mocker fixture
"""
def clone_from_side_effect(url, to_path, **kwargs):
"""
Define a side effect function to override the
original behaviour of clone_from
"""
shutil.copytree(src=plugin_src, dst=to_path)
return to_path
plugin_manager = plugin_manager_fixture()
mock_git = mocker.patch("infrared.core.services.plugins.git.Repo")
# use side effect to use copytree instead of original clone
mock_git.clone_from.side_effect = clone_from_side_effect
plugin_src = os.path.abspath(os.path.join(SAMPLE_PLUGINS_DIR,
"plugin_with_src_path"))
# add_plugin call
plugin_manager.add_plugin(
plugin_source="https://sample_github.null/plugin_repo.git",
plugin_src_path="infrared_plugin")
plugin = plugin_manager.get_plugin("plugin_with_src_path")
expected_plugin_path = os.path.join(plugin_manager.plugins_dir,
"plugin_with_src_path")
expected_plugin_src_path = \
os.path.join(expected_plugin_path, "infrared_plugin")
assert expected_plugin_src_path == plugin.path, \
"Plugin path is not as expected"
# compare the dirs before and after to make sure we copied it entirely
dirs_cmp = filecmp.dircmp(plugin_src, expected_plugin_path)
assert dirs_cmp.right_list == dirs_cmp.left_list, \
"Plugin directory is does not contain the original files from " \
"the original plugin source."
|
tests/test_plugins.py
|
codereval_python_data_228
|
Set the environment variable for config path, if it is undefined.
def inject_config(self):
"""Set the environment variable for config path, if it is undefined."""
if os.environ.get('ANSIBLE_CONFIG', '') == '':
os.environ['ANSIBLE_CONFIG'] = self.ansible_config_path
return os.environ['ANSIBLE_CONFIG']
from collections import OrderedDict
import os
from six.moves import configparser
from infrared.core.utils import logger
from infrared.core.utils.validators import AnsibleConfigValidator
LOG = logger.LOG
class AnsibleConfigManager(object):
def __init__(self, infrared_home):
"""Constructor.
:param ansible_config: A path to the ansible config
"""
self.ansible_config_path = self._get_ansible_conf_path(infrared_home)
config_validator = AnsibleConfigValidator()
if not os.path.isfile(self.ansible_config_path):
self._create_ansible_config(infrared_home)
else:
config_validator.validate_from_file(self.ansible_config_path)
@staticmethod
def _get_ansible_conf_path(infrared_home):
"""Get path to Ansible config.
Check for Ansible config in specific locations and return the first
located.
:param infrared_home: infrared's home directory
:return: the first located Ansible config
"""
locations_list = [
os.path.join(os.getcwd(), 'ansible.cfg'),
os.path.join(infrared_home, 'ansible.cfg'),
os.path.join(os.path.expanduser('~'), '.ansible.cfg')
]
env_var_path = os.environ.get('ANSIBLE_CONFIG', '')
if env_var_path != '':
return env_var_path
for location in locations_list:
if os.path.isfile(location):
return location
return os.path.join(infrared_home, 'ansible.cfg')
def _create_ansible_config(self, infrared_home):
"""Create ansible config file """
infrared_common_path = os.path.realpath(__file__ + '/../../../common')
default_ansible_settings = dict(
defaults=OrderedDict([
('host_key_checking', 'False'),
('forks', 500),
('timeout', 30),
('force_color', 1),
('show_custom_stats', 'True'),
('callback_plugins', infrared_common_path + '/callback_plugins'),
('filter_plugins', infrared_common_path + '/filter_plugins'),
('library', infrared_common_path + '/modules'),
('roles', infrared_common_path + '/roles'),
('collections_paths', infrared_home + '/.ansible/collections'),
('local_tmp', infrared_home + '/.ansible/tmp'),
]),
ssh_connection=OrderedDict([
('pipelining', 'True'),
('retries', 2),
]),
galaxy=OrderedDict([
('cache_dir', infrared_home + '/.ansible/galaxy_cache'),
('token_path', infrared_home + '/.ansible/galaxy_token'),
]),
)
LOG.warning("Ansible conf ('{}') not found, creating it with "
"default data".format(self.ansible_config_path))
with open(self.ansible_config_path, 'w') as fp:
config = configparser.ConfigParser()
for section, section_data in default_ansible_settings.items():
if not config.has_section(section):
config.add_section(section)
for option, value in section_data.items():
config.set(section, option, str(value))
config.write(fp)
def inject_config(self):
"""Set the environment variable for config path, if it is undefined."""
if os.environ.get('ANSIBLE_CONFIG', '') == '':
os.environ['ANSIBLE_CONFIG'] = self.ansible_config_path
|
infrared/core/services/ansible_config.py
|
codereval_python_data_229
|
Adds the spec cli options to to the main entry point.
:param subparser: the subparser object to extend.
def extend_cli(self, root_subparsers):
workspace_plugin = root_subparsers.add_parser(
self.name,
help=self.kwargs["description"],
**self.kwargs)
workspace_subparsers = workspace_plugin.add_subparsers(dest="command0")
# create
create_parser = workspace_subparsers.add_parser(
'create', help='Creates a new workspace')
create_parser.add_argument("name", help="Workspace name")
# checkout
checkout_parser = workspace_subparsers.add_parser(
'checkout',
help='Switches workspace to the specified workspace')
checkout_parser.add_argument(
"name",
help="Workspace name").completer = completers.workspace_list
checkout_parser.add_argument(
"-c", "--create", action='store_true', dest="checkout_create",
help="Creates a workspace if not exists and "
"switches to it")
# inventory
inventory_parser = workspace_subparsers.add_parser(
'inventory',
help="prints workspace's inventory file")
inventory_parser.add_argument(
"name", help="Workspace name",
nargs="?").completer = completers.workspace_list
# list
wrkspc_list_parser = workspace_subparsers.add_parser(
'list', help='Lists all the workspaces')
wrkspc_list_parser.add_argument(
"--active", action='store_true', dest='print_active',
help="Prints the active workspace only")
# delete
delete_parser = workspace_subparsers.add_parser(
'delete', help='Deletes workspaces')
delete_parser.add_argument(
'name', nargs='+',
help="Workspace names").completer = completers.workspace_list
# cleanup
cleanup_parser = workspace_subparsers.add_parser(
'cleanup', help='Removes all the files from workspace')
cleanup_parser.add_argument(
"name",
help="Workspace name").completer = completers.workspace_list
# import settings
importer_parser = workspace_subparsers.add_parser(
'import', help='Import deployment configs.')
importer_parser.add_argument("filename", help="Archive file name or URL.")
importer_parser.add_argument(
"-n", "--name", dest="workspacename",
help="Workspace name to import with. "
"If not specified - file name will be used.")
# export settings
exporter_parser = workspace_subparsers.add_parser(
'export', help='Export deployment configurations.')
exporter_parser.add_argument(
"-n", "--name", dest="workspacename",
help="Workspace name. If not sepecified - active "
"workspace will be used.").completer = completers.workspace_list
exporter_parser.add_argument("-f", "--filename", dest="filename",
help="Archive file name.")
exporter_parser.add_argument("-K", "--copy-keys", dest="copykeys",
action="store_true",
help="Silently copy ssh keys "
"to workspace.")
# node list
nodelist_parser = workspace_subparsers.add_parser(
'node-list',
help='List nodes, managed by workspace')
nodelist_parser.add_argument(
"-n", "--name",
help="Workspace name").completer = completers.workspace_list
nodelist_parser.add_argument(
"-g", "--group",
help="List nodes in specific group"
).completer = completers.group_list
nodelist_parser.add_argument(
"-f", "--format", choices=['fancy', 'json'], default='fancy',
help="Output format")
# group list
grouplist_parser = workspace_subparsers.add_parser(
'group-list',
help='List groups, managed by workspace')
grouplist_parser.add_argument(
"-n", "--name",
help="Workspace name").completer = completers.workspace_list
from __future__ import print_function
import argcomplete
import json
import os
from pbr import version
import pkg_resources as pkg
import sys
def inject_common_paths():
"""Discover the path to the common/ directory provided by infrared core."""
def override_conf_path(common_path, envvar, specific_dir):
conf_path = os.environ.get(envvar, '')
additional_conf_path = os.path.join(common_path, specific_dir)
if conf_path:
full_conf_path = ':'.join([additional_conf_path, conf_path])
else:
full_conf_path = additional_conf_path
os.environ[envvar] = full_conf_path
version_info = version.VersionInfo('infrared')
common_path = pkg.resource_filename(version_info.package,
'common')
override_conf_path(common_path, 'ANSIBLE_ROLES_PATH', 'roles')
override_conf_path(common_path, 'ANSIBLE_FILTER_PLUGINS', 'filter_plugins')
override_conf_path(common_path, 'ANSIBLE_CALLBACK_PLUGINS',
'callback_plugins')
override_conf_path(common_path, 'ANSIBLE_LIBRARY', 'library')
# This needs to be called here because as soon as an ansible class is loaded
# the code in constants.py is triggered. That code reads the configuration
# settings from all sources (ansible.cfg, environment variables, etc).
# If the first include to ansible modules is moved deeper in the InfraRed
# code (or on demand), then this call can be moved as well in that place.
inject_common_paths()
from infrared import api # noqa
import infrared.bash_completers as completers # noqa
from infrared.core.services import CoreServices # noqa
from infrared.core.services.plugins import PLUGINS_REGISTRY # noqa
from infrared.core.utils import exceptions # noqa
from infrared.core.utils import interactive_ssh # noqa
from infrared.core.utils import logger # noqa
from infrared.core.utils.print_formats import fancy_table # noqa
LOG = logger.LOG
class WorkspaceManagerSpec(api.SpecObject):
"""The workspace manager CLI. """
def __init__(self, name, *args, **kwargs):
super(WorkspaceManagerSpec, self).__init__(name, **kwargs)
self.workspace_manager = CoreServices.workspace_manager()
def extend_cli(self, root_subparsers):
workspace_plugin = root_subparsers.add_parser(
self.name,
help=self.kwargs["description"],
**self.kwargs)
workspace_subparsers = workspace_plugin.add_subparsers(dest="command0")
# create
create_parser = workspace_subparsers.add_parser(
'create', help='Creates a new workspace')
create_parser.add_argument("name", help="Workspace name")
# checkout
checkout_parser = workspace_subparsers.add_parser(
'checkout',
help='Switches workspace to the specified workspace')
checkout_parser.add_argument(
"name",
help="Workspace name").completer = completers.workspace_list
checkout_parser.add_argument(
"-c", "--create", action='store_true', dest="checkout_create",
help="Creates a workspace if not exists and "
"switches to it")
# inventory
inventory_parser = workspace_subparsers.add_parser(
'inventory',
help="prints workspace's inventory file")
inventory_parser.add_argument(
"name", help="Workspace name",
nargs="?").completer = completers.workspace_list
# list
wrkspc_list_parser = workspace_subparsers.add_parser(
'list', help='Lists all the workspaces')
wrkspc_list_parser.add_argument(
"--active", action='store_true', dest='print_active',
help="Prints the active workspace only")
# delete
delete_parser = workspace_subparsers.add_parser(
'delete', help='Deletes workspaces')
delete_parser.add_argument(
'name', nargs='+',
help="Workspace names").completer = completers.workspace_list
# cleanup
cleanup_parser = workspace_subparsers.add_parser(
'cleanup', help='Removes all the files from workspace')
cleanup_parser.add_argument(
"name",
help="Workspace name").completer = completers.workspace_list
# import settings
importer_parser = workspace_subparsers.add_parser(
'import', help='Import deployment configs.')
importer_parser.add_argument("filename", help="Archive file name or URL.")
importer_parser.add_argument(
"-n", "--name", dest="workspacename",
help="Workspace name to import with. "
"If not specified - file name will be used.")
# export settings
exporter_parser = workspace_subparsers.add_parser(
'export', help='Export deployment configurations.')
exporter_parser.add_argument(
"-n", "--name", dest="workspacename",
help="Workspace name. If not sepecified - active "
"workspace will be used.").completer = completers.workspace_list
exporter_parser.add_argument("-f", "--filename", dest="filename",
help="Archive file name.")
exporter_parser.add_argument("-K", "--copy-keys", dest="copykeys",
action="store_true",
help="Silently copy ssh keys "
"to workspace.")
# node list
nodelist_parser = workspace_subparsers.add_parser(
'node-list',
help='List nodes, managed by workspace')
nodelist_parser.add_argument(
"-n", "--name",
help="Workspace name").completer = completers.workspace_list
nodelist_parser.add_argument(
"-g", "--group",
help="List nodes in specific group"
).completer = completers.group_list
nodelist_parser.add_argument(
"-f", "--format", choices=['fancy', 'json'], default='fancy',
help="Output format")
# group list
grouplist_parser = workspace_subparsers.add_parser(
'group-list',
help='List groups, managed by workspace')
grouplist_parser.add_argument(
"-n", "--name",
help="Workspace name").completer = completers.workspace_list
def spec_handler(self, parser, args):
"""Handles all the plugin manager commands
:param parser: the infrared parser object.
:param args: the list of arguments received from cli.
"""
pargs = parser.parse_args(args)
subcommand = pargs.command0
if subcommand == 'create':
self._create_workspace(pargs.name)
elif subcommand == 'checkout':
self._checkout_workspace(pargs.name, pargs.checkout_create)
elif subcommand == 'inventory':
self._fetch_inventory(pargs.name)
elif subcommand == 'list':
if pargs.print_active:
print(self.workspace_manager.get_active_workspace().name)
else:
workspaces = self.workspace_manager.list()
headers = ("Name", "Active")
workspaces = sorted([workspace.name for workspace in
self.workspace_manager.list()])
print(fancy_table(
headers,
*[(workspace, ' ' * (len(headers[-1]) // 2) + "*" if
self.workspace_manager.is_active(workspace) else "")
for workspace in workspaces]))
elif subcommand == 'delete':
for workspace_name in pargs.name:
self.workspace_manager.delete(workspace_name)
print("Workspace '{}' deleted".format(workspace_name))
elif subcommand == 'cleanup':
self.workspace_manager.cleanup(pargs.name)
elif subcommand == 'export':
self.workspace_manager.export_workspace(
pargs.workspacename, pargs.filename, pargs.copykeys)
elif subcommand == 'import':
self.workspace_manager.import_workspace(
pargs.filename, pargs.workspacename)
elif subcommand == 'node-list':
nodes = self.workspace_manager.node_list(pargs.name, pargs.group)
if pargs.format == 'json':
nodes_dict = [
{'name': name, 'address': address, 'groups': groups}
for name, address, groups in nodes]
print(json.dumps({'nodes': nodes_dict}))
else:
print(fancy_table(
("Name", "Address", "Groups"),
*[node_name for node_name in nodes]))
elif subcommand == "group-list":
groups = self.workspace_manager.group_list(pargs.name)
print(fancy_table(
("Name", "Nodes"), *[group_name for group_name in groups]))
def _create_workspace(self, name):
"""Creates a workspace
:param name: Name of the workspace to create
"""
self.workspace_manager.create(name)
print("Workspace '{}' has been added".format(name))
def _checkout_workspace(self, name, create=False):
"""Checkouts (activate) a workspace
:param name: The name of the workspace to checkout
:param create: if set to true will create a new workspace
before checking out to it
"""
if create:
self._create_workspace(name)
self.workspace_manager.activate(name)
print("Now using workspace: '{}'".format(name))
def _fetch_inventory(self, name):
"""fetch inventory file for workspace.
if no active workspace found - create a new workspace
"""
if name:
wkspc = self.workspace_manager.get(name)
else:
wkspc = self.workspace_manager.get_active_workspace()
if not wkspc:
raise exceptions.IRNoActiveWorkspaceFound()
print(wkspc.inventory)
class PluginManagerSpec(api.SpecObject):
def __init__(self, name, *args, **kwargs):
super(PluginManagerSpec, self).__init__(name, *args, **kwargs)
self.plugin_manager = CoreServices.plugins_manager()
def extend_cli(self, root_subparsers):
plugin_parser = root_subparsers.add_parser(
self.name,
help=self.kwargs["description"],
**self.kwargs)
plugin_subparsers = plugin_parser.add_subparsers(dest="command0")
# Add plugin
add_parser = plugin_subparsers.add_parser(
'add', help='Add a plugin')
add_parser.add_argument("src", nargs='+',
help="Plugin Source (name/path/git URL)\n'all'"
" will install all available plugins")
add_parser.add_argument("--revision", help="git branch/tag/revision"
" sourced plugins. Ignored for"
"'plugin add all' command.")
add_parser.add_argument("--src-path",
help="Relative path within the repository "
"where infrared plugin can be found.\n"
"(Required with --link-roles")
add_parser.add_argument("--link-roles", action='store_true',
help="Auto creates symbolic 'roles' directory "
"in the path provided with '--src-path' "
"which points to the 'roles' directory "
"inside the project's root dir if exists,"
" otherwise to the project's root dir "
"itself.")
add_parser.add_argument("--skip-roles", action='store_true',
help="Skip the from file roles installation. "
"(Don't install Ansible roles from "
"'requirements.yml' or "
"'requirements.yaml' file)")
# Remove plugin
remove_parser = plugin_subparsers.add_parser(
"remove",
help="Remove a plugin, 'all' will remove all installed plugins")
remove_parser.add_argument(
"name", nargs='+',
help="Plugin name").completer = completers.plugin_list
# List command
list_parser = plugin_subparsers.add_parser(
'list', help='List all the available plugins')
list_parser.add_argument(
"--available", action='store_true',
help="Prints all available plugins in addition "
"to installed plugins")
list_parser.add_argument(
"--versions", action='store_true',
help="Prints version of each installed plugins")
# Update plugin
update_parser = plugin_subparsers.add_parser(
"update",
help="Update a Git-based plugin")
update_parser.add_argument(
"name",
help="Name of the plugin to update")
update_parser.add_argument(
"revision", nargs='?', default='latest',
help="Revision number to checkout (if not given, will only pull "
"changes from the remote)")
update_parser.add_argument(
'--skip_reqs', '-s', action='store_true',
help="Skips plugin's requirements installation")
update_parser.add_argument(
'--hard-reset', action='store_true',
help="Drop all local changes using hard "
"reset (changes will be stashed")
plugin_subparsers.add_parser(
"freeze", help="Run through installed plugins. For git sourced "
"one writes its current revision to plugins registry.")
# search all plugins from github organization
plugin_subparsers.add_parser(
'search', help='Search and list all the available plugins from '
"rhos-infra organization on GitHub")
# import plugins from registry yml file
plugin_subparsers.add_parser(
'import', help='Install plugins from a YAML file')
# Add plugin
import_parser = plugin_subparsers.add_parser(
'import', help='Install plugins from a registry YML file')
import_parser.add_argument("src",
help="The registry YML file Source")
def spec_handler(self, parser, args):
"""Handles all the plugin manager commands
:param parser: the infrared parser object.
:param args: the list of arguments received from cli.
"""
pargs = parser.parse_args(args)
subcommand = pargs.command0
if subcommand == 'list':
self._list_plugins(pargs.available, pargs.versions)
elif subcommand == 'add':
if 'all' in pargs.src:
self.plugin_manager.add_all_available()
self._list_plugins(print_available=False, print_version=False)
else:
if len(pargs.src) > 1 and (pargs.revision or pargs.src_path):
raise exceptions.IRFailedToAddPlugin(
"'--revision' works with one plugin source only.")
for _plugin in pargs.src:
self.plugin_manager.add_plugin(
_plugin, rev=pargs.revision,
plugin_src_path=pargs.src_path,
skip_roles=pargs.skip_roles,
link_roles=pargs.link_roles)
elif subcommand == 'remove':
if 'all' in pargs.name:
self.plugin_manager.remove_all()
self._list_plugins(print_available=False, print_version=False)
else:
for _plugin in pargs.name:
self.plugin_manager.remove_plugin(_plugin)
elif subcommand == 'freeze':
self.plugin_manager.freeze()
elif subcommand == 'update':
self.plugin_manager.update_plugin(
pargs.name, pargs.revision, pargs.skip_reqs, pargs.hard_reset)
elif subcommand == 'search':
self._search_plugins()
elif subcommand == 'import':
self.plugin_manager.import_plugins(pargs.src)
def _list_plugins(self, print_available=False, print_version=False):
"""Print a list of installed & available plugins"""
table_rows = []
table_headers = ["Type", "Name"]
installed_mark = ' ' * (len('Installed') // 2) + '*'
plugins_dict = \
self.plugin_manager.get_all_plugins() \
if print_available \
else self.plugin_manager.get_installed_plugins()
for plugins_type, plugins in plugins_dict.items():
installed_plugins_list = \
self.plugin_manager.get_installed_plugins(plugins_type).keys()
plugins_names = list(plugins.keys())
plugins_names.sort()
if print_available:
all_plugins_list = []
for plugin_name in plugins_names:
all_plugins_list.append(plugin_name)
installed_plugins_mark_list = \
[installed_mark if plugin_name in installed_plugins_list
else '' for plugin_name in all_plugins_list]
plugins_descs = \
[PLUGINS_REGISTRY.get(plugin, {}).get('desc', '')
for plugin in all_plugins_list]
row = [plugins_type, '\n'.join(all_plugins_list),
'\n'.join(installed_plugins_mark_list),
'\n'.join(plugins_descs)]
if print_version:
plugins_version = [
self.plugin_manager.get_plugin_version(plugin_name)
if plugin_name in installed_plugins_list else ''
for plugin_name in all_plugins_list]
row.append('\n'.join(plugins_version))
else:
row = [
plugins_type,
'\n'.join(installed_plugins_list)]
if print_version:
plugins_version = [self.plugin_manager.get_plugin_version(
plugin_name) for plugin_name in installed_plugins_list]
row.append('\n'.join(plugins_version))
table_rows.append(row)
if print_available:
table_headers.append("Installed")
table_headers.append("Description")
if print_version:
table_headers.append("Version")
print(fancy_table(table_headers, *table_rows))
def _search_plugins(self):
"""Search git organizations and print a list of available plugins """
table_rows = []
table_headers = ["Type", "Name", "Description", "Source"]
plugins_dict = \
self.plugin_manager.get_all_git_plugins()
for plugins_type, plugins in plugins_dict.items():
# prepare empty lists
all_plugins_list = []
plugins_descs = []
plugins_sources = []
for plugin_name in sorted(plugins.iterkeys()):
# get all plugin names
all_plugins_list.append(plugin_name)
# get all plugin descriptions
plugins_descs.append(plugins[plugin_name]["desc"])
# get all plugins sources
plugins_sources.append(plugins[plugin_name]["src"])
table_rows.append([
plugins_type,
'\n'.join(all_plugins_list),
'\n'.join(plugins_descs),
'\n'.join(plugins_sources)])
print(fancy_table(table_headers, *table_rows))
class SSHSpec(api.SpecObject):
def __init__(self, name, *args, **kwargs):
super(SSHSpec, self).__init__(name, *args, **kwargs)
def extend_cli(self, root_subparsers):
issh_parser = root_subparsers.add_parser(
self.name,
help=self.kwargs["description"],
**self.kwargs)
issh_parser.add_argument("node_name", help="Node name. "
"Ex.: controller-0"
).completer = completers.node_list
issh_parser.add_argument("remote_command", nargs="?", help="Run "
"provided command line on remote host and "
"return its output.")
def spec_handler(self, parser, args):
"""Handles the ssh command
:param parser: the infrared parser object.
:param args: the list of arguments received from cli.
"""
pargs = parser.parse_args(args)
return interactive_ssh.ssh_to_host(
pargs.node_name, remote_command=pargs.remote_command)
def main(args=None):
CoreServices.setup()
# inject ansible config file
CoreServices.ansible_config_manager().inject_config()
specs_manager = api.SpecManager()
# Init Managers
specs_manager.register_spec(
WorkspaceManagerSpec('workspace',
description="Workspace manager. "
"Allows to create and use an "
"isolated environment for plugins "
"execution."))
specs_manager.register_spec(
PluginManagerSpec('plugin',
description="Plugin management"))
specs_manager.register_spec(
SSHSpec(
'ssh',
description="Interactive ssh session to node from inventory."))
# register all plugins
for plugin in CoreServices.plugins_manager().PLUGINS_DICT.values():
specs_manager.register_spec(api.InfraredPluginsSpec(plugin))
argcomplete.autocomplete(specs_manager.parser)
return specs_manager.run_specs(args) or 0
if __name__ == '__main__':
sys.exit(int(main() or 0))
|
infrared/main.py
|
codereval_python_data_230
|
Remove root from path, throw exception on failure.
def strip_root(path, root):
"""Remove root from path, throw exception on failure."""
root = root.rstrip(os.sep) # ditch any trailing path separator
if os.path.commonprefix((path, root)) == root:
return os.path.relpath(path, start=root)
raise Exception("Path %s is not in root %s" % (path, root))
"""Base class for Dispositor objects."""
import os
import os.path
from urllib.parse import quote_plus, unquote_plus
class Dispositor:
"""Base class for disposition handlers -- let's call them Dispositors."""
def strip_root(self, path, root):
"""Remove root from path, throw exception on failure."""
root = root.rstrip(os.sep) # ditch any trailing path separator
if os.path.commonprefix((path, root)) == root:
return os.path.relpath(path, start=root)
raise Exception("Path %s is not in root %s" % (path, root))
def is_valid(self, identifier): # pylint: disable=unused-argument
"""Return True if identifier is valid, always True in this base implementation."""
return True
def encode(self, identifier):
"""Encode identifier to get rid of unsafe chars."""
return quote_plus(identifier)
def decode(self, identifier):
"""Decode identifier to put back unsafe chars."""
return unquote_plus(identifier)
def identifier_to_path(self, identifier):
"""Convert identifier to path relative to some root."""
raise Exception("No yet implemented")
def relative_path_to_identifier(self, path):
"""Convert relative path to identifier."""
raise Exception("No yet implemented")
def path_to_identifier(self, path, root=None):
"""Convert path relative to root to identifier."""
if root is not None:
path = self.strip_root(path, root)
return self.relative_path_to_identifier(path)
|
ocfl/dispositor.py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.