Refactor modules execution
diff --git a/cfg_checker/__init__.py b/cfg_checker/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cfg_checker/__init__.py
diff --git a/cfg_checker/cfg_check.py b/cfg_checker/cfg_check.py
new file mode 100644
index 0000000..9260f30
--- /dev/null
+++ b/cfg_checker/cfg_check.py
@@ -0,0 +1,112 @@
+import argparse
+import os
+import sys
+
+import reporter
+from cfg_checker.common import utils, const
+from cfg_checker.common import config, logger, logger_cli, pkg_dir
+from cfg_checker.clients import salt
+
+from cfg_checker.pkg_check import CloudPackageChecker
+
+pkg_dir = os.path.dirname(__file__)
+pkg_dir = os.path.normpath(pkg_dir)
+
+
+class MyParser(argparse.ArgumentParser):
+ def error(self, message):
+ sys.stderr.write('Error: {0}\n\n'.format(message))
+ self.print_help()
+
+
+
+def help_message():
+ print"""
+ Please, use following examples to generate info reports:\n
+ cfg_checker packages report\n
+ cfg_checker network check\n
+ cfg_checker network report\n
+ """
+ return
+
+def pkg_check(args):
+ # create package versions report
+ _filename = args.file
+ # init connection to salt and collect minion data
+ pChecker = CloudPackageChecker()
+ # collect data on installed packages
+ pChecker.collect_installed_packages()
+ # diff installed and candidates
+ # pChecker.collect_packages()
+ # report it
+ pChecker.create_html_report(_filename)
+
+
+def net_check(args):
+ print("This is net check routine")
+
+ return
+
+
+def net_report(args):
+ print("This is net check routine")
+
+ return
+
+
+def config_check_entrypoint():
+ # Main entrypointр
+ parser = MyParser(prog="Cloud configuration checker")
+ subparsers = parser.add_subparsers(dest='command')
+ # packages
+ pkg_parser = subparsers.add_parser(
+ 'packages',
+ help="Package versions check (Candidate vs Installed)"
+ )
+ pkg_subparsers = pkg_parser.add_subparsers(dest='type')
+
+ pkg_report_parser = pkg_subparsers.add_parser(
+ 'report',
+ help="Report package versions to HTML file"
+ )
+ pkg_report_parser.add_argument(
+ '-f',
+ '--file',
+ help="HTML filename to save report"
+ )
+
+ # networking
+ net_parser = subparsers.add_parser(
+ 'network',
+ help="Network infrastructure checks"
+ )
+ net_subparsers = net_parser.add_subparsers(dest='type')
+
+ net_check_parser = net_subparsers.add_parser(
+ 'check',
+ help="Do network check and print the result"
+ )
+ net_check_parser.set_defaults(func=net_check)
+
+ net_report_parser = net_subparsers.add_parser(
+ 'report',
+ help="Generate network check report"
+ )
+ net_report_parser.add_argument(
+ '-f',
+ '--file',
+ help="HTML filename to save report"
+ )
+ net_report_parser.set_defaults(func=net_report)
+
+ #parse arguments
+ args = parser.parse_args()
+
+ # Execute the command
+ result = args.func(args)
+
+ logger.debug(result)
+ return
+
+if __name__ == '__main__':
+ config_check_entrypoint()
diff --git a/cfg_checker/clients/__init__.py b/cfg_checker/clients/__init__.py
new file mode 100644
index 0000000..3e1f55c
--- /dev/null
+++ b/cfg_checker/clients/__init__.py
@@ -0,0 +1,29 @@
+from cfg_checker.common.salt_utils import SaltRemote
+from cfg_checker.common import logger
+
+# instance of the salt client
+salt = None
+
+
+def get_salt_remote(config):
+ """Singleton-like creation of instance
+
+ Arguments:
+ config {base_config} -- an instance to base_config
+ with creds and params
+
+ Returns:
+ SaltRemote -- instance of salt client
+ """
+
+ global salt
+ logger.info("Creating salt remote instance")
+ # create it once
+ if salt is None:
+ salt = SaltRemote(config)
+ # do most expensive operation with no strict timeout possible
+ # all nodes that answer ping
+ salt.nodes_active = salt.get_active_nodes()
+
+ # return once required
+ return salt
diff --git a/cfg_checker/common/__init__.py b/cfg_checker/common/__init__.py
new file mode 100644
index 0000000..297ace6
--- /dev/null
+++ b/cfg_checker/common/__init__.py
@@ -0,0 +1,13 @@
+import os
+import const
+
+from settings import pkg_dir, config
+from other import Utils
+from log import logger, logger_cli
+
+
+utils = Utils()
+const = const
+logger = logger
+logger_cli = logger_cli
+config = config
\ No newline at end of file
diff --git a/cfg_checker/common/const.py b/cfg_checker/common/const.py
new file mode 100644
index 0000000..4142ea7
--- /dev/null
+++ b/cfg_checker/common/const.py
@@ -0,0 +1,32 @@
+"""Constants that is not to be changed and used in all other files
+"""
+
+from __future__ import print_function, absolute_import
+
+import itertools
+
+_cnt = itertools.count()
+NODE_DOWN = next(_cnt)
+NODE_UP = next(_cnt)
+
+del _cnt
+
+all_roles_map = {
+ "apt": "repository",
+ "bmk": "validation",
+ "cfg": "master",
+ "cid": "cicd",
+ "cmn": "storage_monitor",
+ "cmp": "compute",
+ "ctl": "openstack_controller",
+ "dbs": "database",
+ "gtw": "openstack_gateway",
+ "kvm": "foundation",
+ "log": "stacklight_logger",
+ "mon": "monitoring",
+ "msg": "messaging",
+ "mtr": "stacklight_metering",
+ "osd": "storage_node",
+ "prx": "proxy",
+ "rgw": "storage_rados"
+}
diff --git a/cfg_checker/common/exception.py b/cfg_checker/common/exception.py
new file mode 100644
index 0000000..bc7cefe
--- /dev/null
+++ b/cfg_checker/common/exception.py
@@ -0,0 +1,21 @@
+from exceptions import Exception
+
+
+class CheckerBaseExceptions(Exception):
+ pass
+
+
+class CheckerException(CheckerBaseExceptions):
+ def __init__(self, message, *args, **kwargs):
+ super(CheckerException, self).__init__(message, *args, **kwargs)
+ # get the trace
+ # TODO: get and log traceback
+
+ # prettify message
+ self.message = "CheckerException: {}".format(message)
+
+
+class ConfigException(CheckerException):
+ def __init__(self, message, *args, **kwargs):
+ super(ConfigException, self).__init__(message, *args, **kwargs)
+ self.message = "Configuration error: {}".format(message)
diff --git a/cfg_checker/common/log.py b/cfg_checker/common/log.py
new file mode 100644
index 0000000..af52cac
--- /dev/null
+++ b/cfg_checker/common/log.py
@@ -0,0 +1,95 @@
+import os
+import logging
+
+pkg_dir = os.path.dirname(__file__)
+pkg_dir = os.path.join(pkg_dir, os.pardir, os.pardir)
+pkg_dir = os.path.normpath(pkg_dir)
+pkg_dir = os.path.abspath(pkg_dir)
+
+
+def color_me(color):
+ RESET_SEQ = "\033[0m"
+ COLOR_SEQ = "\033[1;%dm"
+
+ color_seq = COLOR_SEQ % (30 + color)
+
+ def closure(msg):
+ return color_seq + msg + RESET_SEQ
+ return closure
+
+
+class ColoredFormatter(logging.Formatter):
+ BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
+
+ colors = {
+ 'INFO': color_me(WHITE),
+ 'WARNING': color_me(YELLOW),
+ 'DEBUG': color_me(BLUE),
+ 'CRITICAL': color_me(YELLOW),
+ 'ERROR': color_me(RED)
+ }
+
+ def __init__(self, msg, use_color=True, datefmt=None):
+ logging.Formatter.__init__(self, msg, datefmt=datefmt)
+ self.use_color = use_color
+
+ def format(self, record):
+ orig = record.__dict__
+ record.__dict__ = record.__dict__.copy()
+ levelname = record.levelname
+
+ prn_name = levelname + ' ' * (8 - len(levelname))
+ if levelname in self.colors:
+ record.levelname = self.colors[levelname](prn_name)
+ else:
+ record.levelname = prn_name
+
+ # super doesn't work here in 2.6 O_o
+ res = logging.Formatter.format(self, record)
+
+ # res = super(ColoredFormatter, self).format(record)
+
+ # restore record, as it will be used by other formatters
+ record.__dict__ = orig
+ return res
+
+
+def setup_loggers(name, def_level=logging.DEBUG, log_fname=None):
+
+ # Stream Handler
+ sh = logging.StreamHandler()
+ sh.setLevel(def_level)
+ log_format = '%(message)s'
+ colored_formatter = ColoredFormatter(log_format, datefmt="%H:%M:%S")
+ sh.setFormatter(colored_formatter)
+
+ # File handler
+ if log_fname is not None:
+ fh = logging.FileHandler(log_fname)
+ log_format = '%(asctime)s - %(levelname)8s - %(name)-15s - %(message)s'
+ formatter = logging.Formatter(log_format, datefmt="%H:%M:%S")
+ fh.setFormatter(formatter)
+ fh.setLevel(logging.DEBUG)
+ else:
+ fh = None
+
+ logger = logging.getLogger(name)
+ logger.setLevel(logging.DEBUG)
+ if len(logger.handlers) == 0:
+ logger.addHandler(fh)
+
+ logger_cli = logging.getLogger(name + ".cli")
+ logger_cli.setLevel(logging.DEBUG)
+ if len(logger_cli.handlers) == 0:
+ logger_cli.addHandler(sh)
+
+ return logger, logger_cli
+
+# init instances of logger to be used by all other modules
+logger, logger_cli = setup_loggers(
+ 'cfg_checker',
+ log_fname=os.path.join(
+ pkg_dir,
+ os.getenv('LOGFILE', 'cfg_checker.log')
+ )
+)
diff --git a/cfg_checker/common/other.py b/cfg_checker/common/other.py
new file mode 100644
index 0000000..97030bb
--- /dev/null
+++ b/cfg_checker/common/other.py
@@ -0,0 +1,99 @@
+import os
+import re
+
+from cfg_checker.common.const import all_roles_map
+
+from cfg_checker.common.exception import ConfigException
+
+pkg_dir = os.path.dirname(__file__)
+pkg_dir = os.path.join(pkg_dir, os.pardir, os.pardir)
+pkg_dir = os.path.normpath(pkg_dir)
+pkg_dir = os.path.abspath(pkg_dir)
+
+
+class Utils(object):
+ @staticmethod
+ def validate_name(fqdn, message=False):
+ """
+ Function that tries to validate node name.
+ Checks if code contains letters, has '.' in it,
+ roles map contains code's role
+
+ :param fqdn: node FQDN name to supply for the check
+ :param message: True if validate should return error check message
+ :return: False if checks failed, True if all checks passed
+ """
+ _message = "Validation passed"
+
+ def _result():
+ return (True, _message) if message else True
+
+ # node role code checks
+ _code = re.findall("[a-zA-Z]+", fqdn.split('.')[0])
+ if len(_code) > 0:
+ if _code[0] in all_roles_map:
+ return _result()
+ else:
+ # log warning here
+ _message = "Node code is unknown, '{}'. " \
+ "Please, update map".format(_code)
+ else:
+ # log warning here
+ _message = "Node name is invalid, '{}'".format(fqdn)
+
+ # put other checks here
+
+ # output result
+ return _result()
+
+ @staticmethod
+ def node_string_to_list(node_string):
+ # supplied var should contain node list
+ # if there is no ',' -> use space as a delimiter
+ if node_string is not None:
+ if node_string.find(',') < 0:
+ return node_string.split(' ')
+ else:
+ return node_string.split(',')
+ else:
+ return []
+
+ def get_node_code(self, fqdn):
+ # validate
+ _isvalid, _message = self.validate_name(fqdn, message=True)
+ _code = re.findall("[a-zA-Z]+", fqdn.split('.')[0])
+ # check if it is valid and raise if not
+ if _isvalid:
+ return _code[0]
+ else:
+ raise ConfigException(_message)
+
+ def get_nodes_list(self, env, nodes_list):
+ _list = []
+ if env is None:
+ # nothing supplied, use the one in repo
+ try:
+ if not nodes_list:
+ return []
+ with open(os.path.join(pkg_dir, nodes_list)) as _f:
+ _list.extend(_f.read().splitlines())
+ except IOError as e:
+ raise ConfigException("Error while loading file, '{}': "
+ "{}".format(e.filename, e.strerror))
+ else:
+ _list.extend(self.node_string_to_list(env))
+
+ # validate names
+ _invalid = []
+ _valid = []
+ for idx in range(len(_list)):
+ _name = _list[idx]
+ if not self.validate_name(_name):
+ _invalid.append(_name)
+ else:
+ _valid.append(_name)
+
+ return _valid
+
+
+utils = Utils()
diff --git a/cfg_checker/common/salt_utils.py b/cfg_checker/common/salt_utils.py
new file mode 100644
index 0000000..ba1233b
--- /dev/null
+++ b/cfg_checker/common/salt_utils.py
@@ -0,0 +1,371 @@
+"""
+Module to handle interaction with salt
+"""
+import os
+import requests
+import time
+
+from cfg_checker.common import logger, config
+
+
+def list_to_target_string(node_list, separator):
+ result = ''
+ for node in node_list:
+ result += node + ' ' + separator + ' '
+ return result[:-(len(separator)+2)]
+
+
+class SaltRest(object):
+ _host = config.salt_host
+ _port = config.salt_port
+ uri = "http://" + config.salt_host + ":" + config.salt_port
+ _auth = {}
+
+ default_headers = {
+ 'Accept': 'application/json',
+ 'Content-Type': 'application/json',
+ 'X-Auth-Token': None
+ }
+
+ def __init__(self):
+ self._token = self._login()
+ self.last_response = None
+
+ def get(self, path='', headers=default_headers, cookies=None):
+ _path = os.path.join(self.uri, path)
+ logger.debug("GET '{}'\nHeaders: '{}'\nCookies: {}".format(
+ _path,
+ headers,
+ cookies
+ ))
+ return requests.get(
+ _path,
+ headers=headers,
+ cookies=cookies
+ )
+
+ def post(self, data, path='', headers=default_headers, cookies=None):
+ if data is None:
+ data = {}
+ _path = os.path.join(self.uri, path)
+ if path == 'login':
+ _data = str(data).replace(config.salt_pass, "*****")
+ else:
+ _data = data
+ logger.debug("POST '{}'\nHeaders: '{}'\nCookies: {}\nBody: {}".format(
+ _path,
+ headers,
+ cookies,
+ _data
+ ))
+ return requests.post(
+ os.path.join(self.uri, path),
+ headers=headers,
+ json=data,
+ cookies=cookies
+ )
+
+ def _login(self):
+ login_payload = {
+ 'username': config.salt_user,
+ 'password': config.salt_pass,
+ 'eauth': 'pam'
+ }
+
+ logger.debug("Logging in to salt master...")
+ _response = self.post(login_payload, path='login')
+
+ if _response.ok:
+ self._auth['response'] = _response.json()['return'][0]
+ self._auth['cookies'] = _response.cookies
+ self.default_headers['X-Auth-Token'] = \
+ self._auth['response']['token']
+ return self._auth['response']['token']
+ else:
+ raise EnvironmentError(
+ "HTTP:{}, Not authorized?".format(_response.status_code)
+ )
+
+ def salt_request(self, fn, *args, **kwargs):
+ # if token will expire in 5 min, re-login
+ if self._auth['response']['expire'] < time.time() + 300:
+ self._auth['response']['X-Auth-Token'] = self._login()
+
+ _method = getattr(self, fn)
+ _response = _method(*args, **kwargs)
+ self.last_response = _response
+ _content = "..."
+ _len = len(_response.content)
+ if _len < 1024:
+ _content = _response.content
+ logger.debug(
+ "Response (HTTP {}/{}), {}: {}".format(
+ _response.status_code,
+ _response.reason,
+ _len,
+ _content
+ )
+ )
+ if _response.ok:
+ return _response.json()['return']
+ else:
+ raise EnvironmentError(
+ "Salt Error: HTTP:{}, '{}'".format(
+ _response.status_code,
+ _response.reason
+ )
+ )
+
+
+class SaltRemote(SaltRest):
+ def __init__(self):
+ super(SaltRemote, self).__init__()
+
+ def cmd(
+ self,
+ tgt,
+ fun,
+ param=None,
+ client='local',
+ kwarg=None,
+ expr_form=None,
+ tgt_type=None,
+ timeout=None
+ ):
+ _timeout = timeout if timeout is not None else config.salt_timeout
+ _payload = {
+ 'fun': fun,
+ 'tgt': tgt,
+ 'client': client,
+ 'timeout': _timeout
+ }
+
+ if expr_form:
+ _payload['expr_form'] = expr_form
+ if tgt_type:
+ _payload['tgt_type'] = tgt_type
+ if param:
+ _payload['arg'] = param
+ if kwarg:
+ _payload['kwarg'] = kwarg
+
+ _response = self.salt_request('post', [_payload])
+ if isinstance(_response, list):
+ return _response[0]
+ else:
+ raise EnvironmentError(
+ "Unexpected response from from salt-api/LocalClient: "
+ "{}".format(_response)
+ )
+
+ def run(self, fun, kwarg=None):
+ _payload = {
+ 'client': 'runner',
+ 'fun': fun,
+ 'timeout': config.salt_timeout
+ }
+
+ if kwarg:
+ _payload['kwarg'] = kwarg
+
+ _response = self.salt_request('post', [_payload])
+ if isinstance(_response, list):
+ return _response[0]
+ else:
+ raise EnvironmentError(
+ "Unexpected response from from salt-api/RunnerClient: "
+ "{}".format(_response)
+ )
+
+ def wheel(self, fun, arg=None, kwarg=None):
+ _payload = {
+ 'client': 'wheel',
+ 'fun': fun,
+ 'timeout': config.salt_timeout
+ }
+
+ if arg:
+ _payload['arg'] = arg
+ if kwarg:
+ _payload['kwarg'] = kwarg
+
+ _response = self.salt_request('post', _payload)['data']
+ if _response['success']:
+ return _response
+ else:
+ raise EnvironmentError(
+ "Salt Error: '{}'".format(_response['return']))
+
+ def pillar_request(self, node_target, pillar_submodule, argument):
+ # example cli: 'salt "ctl01*" pillar.keys rsyslog'
+ _type = "compound"
+ if isinstance(node_target, list):
+ _type = "list"
+ return self.cmd(
+ node_target,
+ "pillar." + pillar_submodule,
+ argument,
+ expr_form=_type
+ )
+
+ def pillar_keys(self, node_target, argument):
+ return self.pillar_request(node_target, 'keys', argument)
+
+ def pillar_get(self, node_target, argument):
+ return self.pillar_request(node_target, 'get', argument)
+
+ def pillar_data(self, node_target, argument):
+ return self.pillar_request(node_target, 'data', argument)
+
+ def pillar_raw(self, node_target, argument):
+ return self.pillar_request(node_target, 'raw', argument)
+
+ def list_minions(self):
+ """
+ Fails in salt version 2016.3.8
+ api returns dict of minions with grains
+ """
+ return self.salt_request('get', 'minions')
+
+ def list_keys(self):
+ """
+ Fails in salt version 2016.3.8
+ api should return dict:
+ {
+ 'local': [],
+ 'minions': [],
+ 'minions_denied': [],
+ 'minions_pre': [],
+ 'minions_rejected': [],
+ }
+ """
+ return self.salt_request('get', path='keys')
+
+ def get_status(self):
+ """
+ 'runner' client is the equivalent of 'salt-run'
+ Returns the
+ """
+ return self.run(
+ 'manage.status',
+ kwarg={'timeout': 10}
+ )
+
+ def get_active_nodes(self):
+ if config.skip_nodes:
+ logger.info("Nodes to be skipped: {0}".format(config.skip_nodes))
+ return self.cmd(
+ '* and not ' + list_to_target_string(
+ config.skip_nodes,
+ 'and not'
+ ),
+ 'test.ping',
+ expr_form='compound')
+ else:
+ return self.cmd('*', 'test.ping')
+
+ def get_monitoring_ip(self, param_name):
+ salt_output = self.cmd(
+ 'docker:client:stack:monitoring',
+ 'pillar.get',
+ param=param_name,
+ expr_form='pillar')
+ return salt_output[salt_output.keys()[0]]
+
+ def f_touch_master(self, path, makedirs=True):
+ _kwarg = {
+ "makedirs": makedirs
+ }
+ salt_output = self.cmd(
+ "cfg01*",
+ "file.touch",
+ param=path,
+ kwarg=_kwarg
+ )
+ return salt_output[salt_output.keys()[0]]
+
+ def f_append_master(self, path, strings_list, makedirs=True):
+ _kwarg = {
+ "makedirs": makedirs
+ }
+ _args = [path]
+ _args.extend(strings_list)
+ salt_output = self.cmd(
+ "cfg01*",
+ "file.write",
+ param=_args,
+ kwarg=_kwarg
+ )
+ return salt_output[salt_output.keys()[0]]
+
+ def mkdir(self, target, path, tgt_type=None):
+ salt_output = self.cmd(
+ target,
+ "file.mkdir",
+ param=path,
+ expr_form=tgt_type
+ )
+ return salt_output
+
+ def f_manage_file(self, target_path, source,
+ sfn='', ret='{}',
+ source_hash={},
+ user='root', group='root', backup_mode='755',
+ show_diff='base',
+ contents='', makedirs=True):
+ """
+ REST variation of file.get_managed
+ CLI execution goes like this (10 agrs):
+ salt cfg01\* file.manage_file /root/test_scripts/pkg_versions.py
+ '' '{}' /root/diff_pkg_version.py
+ '{hash_type: 'md5', 'hsum': <md5sum>}' root root '755' base ''
+ makedirs=True
+ param: name - target file placement when managed
+ param: source - source for the file
+ """
+ _source_hash = {
+ "hash_type": "md5",
+ "hsum": 000
+ }
+ _arg = [
+ target_path,
+ sfn,
+ ret,
+ source,
+ _source_hash,
+ user,
+ group,
+ backup_mode,
+ show_diff,
+ contents
+ ]
+ _kwarg = {
+ "makedirs": makedirs
+ }
+ salt_output = self.cmd(
+ "cfg01*",
+ "file.manage_file",
+ param=_arg,
+ kwarg=_kwarg
+ )
+ return salt_output[salt_output.keys()[0]]
+
+ def cache_file(self, target, source_path):
+ salt_output = self.cmd(
+ target,
+ "cp.cache_file",
+ param=source_path
+ )
+ return salt_output[salt_output.keys()[0]]
+
+ def get_file(self, target, source_path, target_path, tgt_type=None):
+ return self.cmd(
+ target,
+ "cp.get_file",
+ param=[source_path, target_path],
+ expr_form=tgt_type
+ )
+
+ @staticmethod
+ def compound_string_from_list(nodes_list):
+ return " or ".join(nodes_list)
diff --git a/cfg_checker/common/settings.py b/cfg_checker/common/settings.py
new file mode 100644
index 0000000..cea1f0c
--- /dev/null
+++ b/cfg_checker/common/settings.py
@@ -0,0 +1,107 @@
+import os
+
+from exception import ConfigException
+from log import logger
+from other import utils
+
+pkg_dir = os.path.dirname(__file__)
+pkg_dir = os.path.join(pkg_dir, os.pardir, os.pardir)
+pkg_dir = os.path.normpath(pkg_dir)
+pkg_dir = os.path.abspath(pkg_dir)
+
+_default_work_folder = os.path.normpath(pkg_dir)
+
+
+class CheckerConfiguration(object):
+ def _init_values(self):
+ """Load values from environment variables or put default ones
+ """
+
+ self.name = "CheckerConfig"
+ self.working_folder = os.environ.get(
+ 'CFG_TESTS_WORK_DIR',
+ _default_work_folder
+ )
+ self.date_format = "%Y-%m-%d %H:%M:%S.%f%z"
+ self.default_tz = "UTC"
+
+ self.salt_host = os.environ.get('SALT_URL', None)
+ self.salt_port = os.environ.get('SALT_PORT', '6969')
+ self.salt_user = os.environ.get('SALT_USER', 'salt')
+ self.salt_pass = os.environ.get('SALT_PASSWORD', None)
+ self.salt_timeout = os.environ.get('SALT_TIMEOUT', 30)
+ self.salt_file_root = os.environ.get('SALT_FILE_ROOT', None)
+ self.salt_scripts_folder = os.environ.get(
+ 'SALT_SCRIPTS_FOLDER',
+ 'cfg_checker_scripts'
+ )
+ self.all_nodes = utils.get_nodes_list(
+ os.environ.get('CFG_ALL_NODES', None),
+ os.environ.get('SALT_NODE_LIST_FILE', None)
+ )
+ self.skip_nodes = utils.node_string_to_list(os.environ.get(
+ 'CFG_SKIP_NODES',
+ None
+ ))
+
+ @staticmethod
+ def _init_env(env_name=None):
+ """[summary]
+
+ Keyword Arguments:
+ env_name {str} -- environment name to search configuration
+ files in etc/<env_name>.env (default: {None})
+
+ Raises:
+ ConfigException -- on IO error when loading env file
+ ConfigException -- on env file failed validation
+ """
+ # load env file as init os.environment with its values
+ if env_name is None:
+ _env_name = 'default'
+ else:
+ _env_name = env_name
+ _config_path = os.path.join(pkg_dir, 'etc', _env_name + '.env')
+ if os.path.isfile(_config_path):
+ with open(_config_path) as _f:
+ _list = _f.read().splitlines()
+ logger.debug("Loading env vars from '{}'".format(_config_path))
+ else:
+ raise ConfigException(
+ "Failed to load enviroment vars from '{}'".format(
+ _config_path
+ )
+ )
+ for index in range(len(_list)):
+ _line = _list[index]
+ # skip comments
+ if _line.strip().startswith('#'):
+ continue
+ # validate
+ _errors = []
+ if _line.find('=') < 0 or _line.count('=') > 1:
+ _errors.append("Line {}: {}".format(index, _line))
+ else:
+ # save values
+ _t = _line.split('=')
+ _key, _value = _t[0], _t[1]
+ os.environ[_key] = _value
+ # if there was errors, report them
+ if _errors:
+ raise ConfigException(
+ "Environment file failed validation in lines: {}".format(
+ "\n".join(_errors)
+ )
+ )
+ else:
+ logger.debug("Loaded total of '{}' vars".format(len(_list)))
+
+ def __init__(self):
+ """Base configuration class. Only values that are common for all scripts
+ """
+ _env = os.getenv('SALT_ENV', None)
+ self._init_env(_env)
+ self._init_values()
+
+
+config = CheckerConfiguration()
diff --git a/cfg_checker/pkg_check.py b/cfg_checker/pkg_check.py
new file mode 100644
index 0000000..34907c5
--- /dev/null
+++ b/cfg_checker/pkg_check.py
@@ -0,0 +1,189 @@
+import json
+import os
+import sys
+
+from copy import deepcopy
+
+import reporter
+
+from cfg_checker.common import utils, const
+from cfg_checker.common import config, logger, logger_cli, pkg_dir
+from cfg_checker.common import salt_utils
+
+node_tmpl = {
+ 'role': '',
+ 'node_group': '',
+ 'status': const.NODE_DOWN,
+ 'pillars': {},
+ 'grains': {}
+}
+
+
+class CloudPackageChecker(object):
+ _config = config
+
+ def __init__(self):
+ logger_cli.info("Collecting nodes for package check")
+ # simple salt rest client
+ self.salt = salt_utils.SaltRemote()
+
+ # Keys for all nodes
+ # this is not working in scope of 2016.8.3, will overide with list
+ # cls.node_keys = cls.salt.list_keys()
+
+ logger_cli.info("Collecting node names existing in the cloud")
+ self.node_keys = {
+ 'minions': base_config.all_nodes
+ }
+
+ # all that answer ping
+ _active = self.salt.get_active_nodes()
+ logger_cli.info("Nodes responded: {}".format(_active))
+ # just inventory for faster interaction
+ # iterate through all accepted nodes and create a dict for it
+ self.nodes = {}
+ for _name in self.node_keys['minions']:
+ _nc = utils.get_node_code(_name)
+ _rmap = const.all_roles_map
+ _role = _rmap[_nc] if _nc in _rmap else 'unknown'
+ _status = const.NODE_UP if _name in _active else const.NODE_DOWN
+
+ self.nodes[_name] = deepcopy(node_tmpl)
+ self.nodes[_name]['node_group'] = _nc
+ self.nodes[_name]['role'] = _role
+ self.nodes[_name]['status'] = _status
+
+ logger_cli.info("{} nodes collected".format(len(self.nodes)))
+
+ def collect_installed_packages(self):
+ """
+ Collect installed packages on each node
+ sets 'installed' dict property in the class
+
+ :return: none
+ """
+ logger_cli.info("Collecting installed packages")
+ # form an all nodes compound string to use in salt
+ _active_nodes_string = self.salt.compound_string_from_list(
+ filter(
+ lambda nd: self.nodes[nd]['status'] == const.NODE_UP,
+ self.nodes
+ )
+ )
+ # Prepare script
+ _script_filename = "pkg_versions.py"
+ _p = os.path.join(PKG_DIR, 'scripts', _script_filename)
+ with open(_p, 'rt') as fd:
+ _script = fd.read().splitlines()
+ _storage_path = os.path.join(
+ base_config.salt_file_root, base_config.salt_scripts_folder
+ )
+ logger_cli.info(
+ "Uploading script {} to master's file cache folder: '{}'".format(
+ _script_filename,
+ _storage_path
+ )
+ )
+ _result = self.salt.mkdir("cfg01*", _storage_path)
+ # Form cache, source and target path
+ _cache_path = os.path.join(_storage_path, _script_filename)
+ _source_path = os.path.join(
+ 'salt://',
+ base_config.salt_scripts_folder,
+ _script_filename
+ )
+ _target_path = os.path.join(
+ '/root',
+ base_config.salt_scripts_folder,
+ _script_filename
+ )
+
+ logger.debug("Creating file in cache '{}'".format(_cache_path))
+ _result = self.salt.f_touch_master(_cache_path)
+ _result = self.salt.f_append_master(_cache_path, _script)
+ # command salt to copy file to minions
+ logger.debug("Creating script target folder '{}'".format(_cache_path))
+ _result = self.salt.mkdir(
+ _active_nodes_string,
+ os.path.join(
+ '/root',
+ base_config.salt_scripts_folder
+ ),
+ tgt_type="compound"
+ )
+ logger_cli.info("Running script to all active nodes")
+ _result = self.salt.get_file(
+ _active_nodes_string,
+ _source_path,
+ _target_path,
+ tgt_type="compound"
+ )
+ # execute pkg collecting script
+ logger.debug("Running script to all nodes")
+ # handle results for each node
+ _result = self.salt.cmd(
+ _active_nodes_string,
+ 'cmd.run',
+ param='python {}'.format(_target_path),
+ expr_form="compound"
+ )
+ for key in self.nodes.keys():
+ # due to much data to be passed from salt, it is happening in order
+ if key in _result:
+ _text = _result[key]
+ _dict = json.loads(_text[_text.find('{'):])
+ self.nodes[key]['packages'] = _dict
+ else:
+ self.nodes[key]['packages'] = {}
+ logger_cli.info("{} has {} packages installed".format(
+ key,
+ len(self.nodes[key]['packages'].keys())
+ ))
+
+ def collect_packages(self):
+ """
+ Check package versions in repos vs installed
+
+ :return: no return values, all date put to dict in place
+ """
+ _all_packages = {}
+ for node_name, node_value in self.nodes.iteritems():
+ for package_name in node_value['packages']:
+ if package_name not in _all_packages:
+ _all_packages[package_name] = {}
+ _all_packages[package_name][node_name] = node_value
+
+ # TODO: process data for per-package basis
+
+ self.all_packages = _all_packages
+
+ def create_html_report(self, filename):
+ """
+ Create static html showing packages diff per node
+
+ :return: buff with html
+ """
+ _report = reporter.ReportToFile(
+ reporter.HTMLPackageCandidates(),
+ filename
+ )
+ _report({
+ "nodes": self.nodes,
+ "diffs": {}
+ })
+
+
+if __name__ == '__main__':
+ # init connection to salt and collect minion data
+ cl = CloudPackageChecker()
+
+ # collect data on installed packages
+ cl.collect_installed_packages()
+
+ # diff installed and candidates
+ # cl.collect_packages()
+
+ # report it
+ cl.create_html_report("./pkg_versions.html")
+
+ sys.exit(0)
diff --git a/cfg_checker/reclass_cmp.py b/cfg_checker/reclass_cmp.py
new file mode 100644
index 0000000..d644679
--- /dev/null
+++ b/cfg_checker/reclass_cmp.py
@@ -0,0 +1,312 @@
+"""Model Comparer:
+- yaml parser
+- class tree comparison
+"""
+import itertools
+import os
+import yaml
+
+import reporter
+from cfg_checker.common import logger, logger_cli
+
+
+global prefix_name
+global model_name_1, model_path_1
+global model_name_2, model_path_2
+
+prefix_name = "emk"
+model_name_1 = "dev"
+model_path_1 = "/Users/savex/proj/mediakind/reclass-dev"
+model_name_2 = "stg"
+model_path_2 = "/Users/savex/proj/mediakind/reclass-stg"
+
+
+class ModelComparer(object):
+ """Collection of functions to compare model data.
+ """
+ models = {}
+
+ @staticmethod
+ def load_yaml_class(fname):
+ """Loads a yaml from the file and forms a tree item
+
+ Arguments:
+ fname {string} -- full path to the yaml file
+ """
+ _yaml = {}
+ try:
+ _size = 0
+ with open(fname, 'r') as f:
+ _yaml = yaml.load(f)
+ _size = f.tell()
+ # TODO: do smth with the data
+ if not _yaml:
+ logger_cli.warning("WARN: empty file '{}'".format(fname))
+ _yaml = {}
+ else:
+ logger.debug("...loaded YAML '{}' ({}b)".format(fname, _size))
+ return _yaml
+ except yaml.YAMLError as exc:
+ logger_cli.error(exc)
+ except IOError as e:
+ logger_cli.error(
+ "Error loading file '{}': {}".format(fname, e.message)
+ )
+ raise Exception("CRITICAL: Failed to load YAML data: {}".format(
+ e.message + e.strerror
+ ))
+
+ def load_model_tree(self, name, root_path="/srv/salt/reclass"):
+ """Walks supplied path for the YAML filed and loads the tree
+
+ Arguments:
+ root_folder_path {string} -- Path to Model's root folder. Optional
+ """
+ logger_cli.info("Loading reclass tree from '{}'".format(root_path))
+ # prepare the file tree to walk
+ raw_tree = {}
+ # Credits to Andrew Clark@MIT. Original code is here:
+ # http://code.activestate.com/recipes/577879-create-a-nested-dictionary-from-oswalk/
+ root_path = root_path.rstrip(os.sep)
+ start = root_path.rfind(os.sep) + 1
+ root_key = root_path.rsplit(os.sep, 1)[1]
+ # Look Ma! I am walking the file tree with no recursion!
+ for path, dirs, files in os.walk(root_path):
+ # if this is a hidden folder, ignore it
+ _filders_list = path[start:].split(os.sep)
+ if any(item.startswith(".") for item in _filders_list):
+ continue
+ # cut absolute part of the path and split folder names
+ folders = path[start:].split(os.sep)
+ subdir = {}
+ # create generator of files that are not hidden
+ _exts = ('.yml', '.yaml')
+ _subfiles = (file for file in files
+ if file.endswith(_exts) and not file.startswith('.'))
+ for _file in _subfiles:
+ # cut file extension. All reclass files are '.yml'
+ _subnode = _file
+ # load all YAML class data into the tree
+ subdir[_subnode] = self.load_yaml_class(
+ os.path.join(path, _file)
+ )
+ try:
+ # Save original filepath, just in case
+ subdir[_subnode]["_source"] = os.path.join(
+ path[start:],
+ _file
+ )
+ except Exception:
+ logger.warning(
+ "Non-yaml file detected: {}".format(_file)
+ )
+ # creating dict structure out of folder list. Pure python magic
+ parent = reduce(dict.get, folders[:-1], raw_tree)
+ parent[folders[-1]] = subdir
+ # save it as a single data object
+ self.models[name] = raw_tree[root_key]
+ return True
+
+ def generate_model_report_tree(self):
+ """Use two loaded models to generate comparison table with
+ values are groupped by YAML files
+ """
+ def find_changes(dict1, dict2, path=""):
+ _report = {}
+ for k in dict1.keys():
+ # yamls might load values as non-str types
+ if not isinstance(k, str):
+ _new_path = path + ":" + str(k)
+ else:
+ _new_path = path + ":" + k
+ # ignore _source key
+ if k == "_source":
+ continue
+ # check if this is an env name cluster entry
+ if dict2 is not None and \
+ k == model_name_1 and \
+ model_name_2 in dict2.keys():
+ k1 = model_name_1
+ k2 = model_name_2
+ if type(dict1[k1]) is dict:
+ if path == "":
+ _new_path = k1
+ _child_report = find_changes(
+ dict1[k1],
+ dict2[k2],
+ _new_path
+ )
+ _report.update(_child_report)
+ elif dict2 is None or k not in dict2:
+ # no key in dict2
+ _report[_new_path] = {
+ "type": "value",
+ "raw_values": [dict1[k], "N/A"],
+ "str_values": [
+ "{}".format(dict1[k]),
+ "n/a"
+ ]
+ }
+ logger.info(
+ "{}: {}, {}".format(_new_path, dict1[k], "N/A")
+ )
+ else:
+ if type(dict1[k]) is dict:
+ if path == "":
+ _new_path = k
+ _child_report = find_changes(
+ dict1[k],
+ dict2[k],
+ _new_path
+ )
+ _report.update(_child_report)
+ elif type(dict1[k]) is list and type(dict2[k]) is list:
+ # use ifilterfalse to compare lists of dicts
+ try:
+ _removed = list(
+ itertools.ifilterfalse(
+ lambda x: x in dict2[k],
+ dict1[k]
+ )
+ )
+ _added = list(
+ itertools.ifilterfalse(
+ lambda x: x in dict1[k],
+ dict2[k]
+ )
+ )
+ except TypeError as e:
+ # debug routine,
+ # should not happen, due to list check above
+ logger.error(
+ "Caught lambda type mismatch: {}".format(
+ e.message
+ )
+ )
+ logger_cli.warning(
+ "Types mismatch for correct compare: "
+ "{}, {}".format(
+ type(dict1[k]),
+ type(dict2[k])
+ )
+ )
+ _removed = None
+ _added = None
+ _original = ["= {}".format(item) for item in dict1[k]]
+ if _removed or _added:
+ _removed_str_lst = ["- {}".format(item)
+ for item in _removed]
+ _added_str_lst = ["+ {}".format(item)
+ for item in _added]
+ _report[_new_path] = {
+ "type": "list",
+ "raw_values": [
+ dict1[k],
+ _removed_str_lst + _added_str_lst
+ ],
+ "str_values": [
+ "{}".format('\n'.join(_original)),
+ "{}\n{}".format(
+ '\n'.join(_removed_str_lst),
+ '\n'.join(_added_str_lst)
+ )
+ ]
+ }
+ logger.info(
+ "{}:\n"
+ "{} original items total".format(
+ _new_path,
+ len(dict1[k])
+ )
+ )
+ if _removed:
+ logger.info(
+ "{}".format('\n'.join(_removed_str_lst))
+ )
+ if _added:
+ logger.info(
+ "{}".format('\n'.join(_added_str_lst))
+ )
+ else:
+ # in case of type mismatch
+ # considering it as not equal
+ d1 = dict1
+ d2 = dict2
+ val1 = d1[k] if isinstance(d1, dict) else d1
+ val2 = d2[k] if isinstance(d2, dict) else d2
+ try:
+ match = val1 == val2
+ except TypeError as e:
+ logger.warning(
+ "One of the values is not a dict: "
+ "{}, {}".format(
+ str(dict1),
+ str(dict2)
+ ))
+ match = False
+ if not match:
+ _report[_new_path] = {
+ "type": "value",
+ "raw_values": [val1, val2],
+ "str_values": [
+ "{}".format(val1),
+ "{}".format(val2)
+ ]
+ }
+ logger.info("{}: {}, {}".format(
+ _new_path,
+ val1,
+ val2
+ ))
+ return _report
+ # tmp report for keys
+ diff_report = find_changes(
+ self.models[model_name_1],
+ self.models[model_name_2]
+ )
+ # prettify the report
+ for key in diff_report.keys():
+ # break the key in two parts
+ _ext = ".yml"
+ if ".yaml" in key:
+ _ext = ".yaml"
+ _split = key.split(_ext)
+ _file_path = _split[0]
+ _param_path = "none"
+ if len(_split) > 1:
+ _param_path = _split[1]
+ diff_report[key].update({
+ "class_file": _file_path + _ext,
+ "param": _param_path,
+ })
+
+ diff_report["diff_names"] = [model_name_1, model_name_2]
+ return diff_report
+
+
+# temporary executing the parser as a main prog
+if __name__ == '__main__':
+ mComparer = ModelComparer()
+ mComparer.load_model_tree(
+ model_name_1,
+ model_path_1
+ )
+ mComparer.load_model_tree(
+ model_name_2,
+ model_path_2
+ )
+ diffs = mComparer.generate_model_report_tree()
+
+ report_file = \
+ prefix_name + "-" + model_name_1 + "-vs-" + model_name_2 + ".html"
+ report = reporter.ReportToFile(
+ reporter.HTMLModelCompare(),
+ report_file
+ )
+ logger_cli.info("...generating report to {}".format(report_file))
+ report({
+ "nodes": {},
+ "diffs": diffs
+ })
+ # with open("./gen_tree.json", "w+") as _out:
+ # _out.write(json.dumps(mComparer.generate_model_report_tree))
diff --git a/cfg_checker/reporter.py b/cfg_checker/reporter.py
new file mode 100644
index 0000000..3b21894
--- /dev/null
+++ b/cfg_checker/reporter.py
@@ -0,0 +1,178 @@
+import jinja2
+import six
+import abc
+import os
+
+from cfg_checker.common import const
+
+pkg_dir = os.path.dirname(__file__)
+pkg_dir = os.path.join(pkg_dir, os.pardir)
+pkg_dir = os.path.normpath(pkg_dir)
+
+
+def shortname(node_fqdn):
+ # form shortname out of node fqdn
+ return node_fqdn.split(".", 1)[0]
+
+
+def is_equal(pkg_dict):
+ # compare versions of given package
+ return pkg_dict['installed'] == pkg_dict['candidate']
+
+
+def is_active(node_dict):
+ # check node status in node dict
+ return node_dict['status'] == const.NODE_UP
+
+
+def line_breaks(text):
+ # replace python linebreaks with html breaks
+ return text.replace("\n", "<br />")
+
+
+@six.add_metaclass(abc.ABCMeta)
+class _Base(object):
+ def __init__(self):
+ self.jinja2_env = self.init_jinja2_env()
+
+ @abc.abstractmethod
+ def __call__(self, payload):
+ pass
+
+ @staticmethod
+ def init_jinja2_env():
+ return jinja2.Environment(
+ loader=jinja2.FileSystemLoader(os.path.join(pkg_dir, 'templates')),
+ trim_blocks=True,
+ lstrip_blocks=True)
+
+
+class _TMPLBase(_Base):
+ @abc.abstractproperty
+ def tmpl(self):
+ pass
+
+ @staticmethod
+ def _count_totals(data):
+ data['counters']['total_nodes'] = len(data['nodes'])
+
+ def __call__(self, payload):
+ # init data structures
+ data = self.common_data()
+ data.update({
+ "nodes": payload['nodes'],
+ "diffs": payload['diffs']
+ })
+
+ # add template specific data
+ self._extend_data(data)
+
+ # do counts global
+ self._count_totals(data)
+
+ # specific filters
+ self.jinja2_env.filters['shortname'] = shortname
+ self.jinja2_env.filters['is_equal'] = is_equal
+ self.jinja2_env.filters['is_active'] = is_active
+ self.jinja2_env.filters['linebreaks'] = line_breaks
+
+ # render!
+ tmpl = self.jinja2_env.get_template(self.tmpl)
+ return tmpl.render(data)
+
+ def common_data(self):
+ return {
+ 'counters': {},
+ 'salt_info': {}
+ }
+
+ def _extend_data(self, data):
+ pass
+
+
+# Package versions report
+class HTMLPackageCandidates(_TMPLBase):
+ tmpl = "pkg_versions_tmpl.j2"
+
+ @staticmethod
+ def is_fail_uniq(p_dict, p_name, nodes, node_name):
+ # look up package fail for nodes with similar role
+ _tgroup = nodes[node_name]['node_group']
+ # filter all nodes with the same role
+ _nodes_list = filter(
+ lambda nd: nodes[nd]['node_group'] == _tgroup and nd != node_name,
+ nodes
+ )
+ # lookup same package
+ _fail_uniq = False
+ for _node_name in _nodes_list:
+ # check if there is a package present on node
+ _nd = nodes[_node_name]['packages']
+ if p_name not in _nd:
+ continue
+ # if both backages has same version and differ from candidate
+ if p_dict['candidate'] == _nd[p_name]['candidate'] \
+ and _nd[p_name]['candidate'] == _nd[p_name]['installed']:
+ # it is not uniq, mark and break
+ _fail_uniq = True
+ return _fail_uniq
+
+ def _extend_data(self, data):
+ _all_pkg = 0
+ for key, value in data['nodes'].iteritems():
+ # add count of packages for this node to total
+ _all_pkg += len(value.keys())
+
+ # count differences
+ data['counters'][key] = {}
+ data['counters'][key]['packages'] = len(value['packages'].keys())
+ data['counters'][key]['package_diff'] = 0
+ for pkg_name, pkg_value in value['packages'].iteritems():
+ if pkg_value['installed'] != pkg_value['candidate']:
+ pkg_value['is_equal'] = False
+ pkg_value['fail_uniq'] = self.is_fail_uniq(
+ pkg_value,
+ pkg_name,
+ data['nodes'],
+ key
+ )
+ data['counters'][key]['package_diff'] += 1
+ else:
+ pkg_value['is_equal'] = True
+ pkg_value['fail_uniq'] = False
+
+ data['counters']['total_packages'] = _all_pkg
+
+
+# Package versions report
+class HTMLModelCompare(_TMPLBase):
+ tmpl = "model_tree_cmp_tmpl.j2"
+
+ def _extend_data(self, data):
+ # move names into separate place
+ data["names"] = data["diffs"].pop("diff_names")
+
+ # counters - mdl_diff
+ data['counters']['mdl_diff'] = len(data["diffs"].keys())
+
+
+class ReportToFile(object):
+ def __init__(self, report, target):
+ self.report = report
+ self.target = target
+
+ def __call__(self, payload):
+ payload = self.report(payload)
+
+ if isinstance(self.target, six.string_types):
+ self._wrapped_dump(payload)
+ else:
+ self._dump(payload, self.target)
+
+ def _wrapped_dump(self, payload):
+ with open(self.target, 'wt') as target:
+ self._dump(payload, target)
+
+ @staticmethod
+ def _dump(payload, target):
+ target.write(payload)