Network check fixes
- Proper network mapping
- Proper reclass lookup
- VIP detection
- Simple error gathering
- IP shown as 'exploded', i.e. in CIDR format
- MTU matching and detection
- Errors class for handling errors, including codes and indices
- Summary and detailed errors view
- Flake8 refactoring
Change-Id: I8ee37d345bdc21c7ad930bf8305acd28f8c121c8
Related-PROD: PROD-28199
diff --git a/cfg_checker/cfg_check.py b/cfg_checker/cfg_check.py
index 6b64804..00e40d2 100644
--- a/cfg_checker/cfg_check.py
+++ b/cfg_checker/cfg_check.py
@@ -2,11 +2,10 @@
import os
import sys
import traceback
+from logging import DEBUG, INFO
-from logging import INFO, DEBUG
-
-from cfg_checker.common.exception import CheckerException
from cfg_checker.common import config, logger, logger_cli
+from cfg_checker.common.exception import CheckerException
pkg_dir = os.path.dirname(__file__)
@@ -18,6 +17,7 @@
'reclass': ['list', 'diff']
}
+
class MyParser(argparse.ArgumentParser):
def error(self, message):
sys.stderr.write('Error: {0}\n\n'.format(message))
@@ -36,14 +36,14 @@
def config_check_entrypoint():
"""
- Main entry point. Uses nested parsers structure
+ Main entry point. Uses nested parsers structure
with a default function to execute the comand
:return: - no return value
"""
# Main entrypoint
parser = MyParser(prog="# Mirantis Cloud configuration checker")
-
+
# Parsers (each parser can have own arguments)
# - subparsers (command)
# |- pkg_parser
@@ -109,6 +109,12 @@
help="Do network check and print the result"
)
+ net_check_parser.add_argument(
+ '--detailed',
+ action="store_true", default=False,
+ help="Print error details after summary"
+ )
+
net_report_parser = net_subparsers.add_parser(
'report',
help="Generate network check report"
@@ -119,7 +125,7 @@
metavar='network_html_filename',
help="HTML filename to save report"
)
-
+
# reclass
reclass_parser = subparsers.add_parser(
'reclass',
@@ -157,18 +163,16 @@
help="HTML filename to save report"
)
-
-
- #parse arguments
+ # parse arguments
try:
args = parser.parse_args()
- except TypeError as e:
+ except TypeError:
logger_cli.info("\n# Please, check arguments")
return
# Pass externally configured values
config.ssh_uses_sudo = args.sudo
-
+
# Handle options
if args.debug:
logger_cli.setLevel(DEBUG)
@@ -191,14 +195,18 @@
else:
# form function name to call
_method_name = "do_" + args.type
- _target_module = __import__("cfg_checker.modules."+args.command, fromlist=[""])
+ _target_module = __import__(
+ "cfg_checker.modules."+args.command,
+ fromlist=[""]
+ )
_method = getattr(_target_module, _method_name)
-
+
# Execute the command
result = _method(args)
logger.debug(result)
+
def cli_main():
try:
config_check_entrypoint()
@@ -216,5 +224,6 @@
))
))
+
if __name__ == '__main__':
cli_main()
diff --git a/cfg_checker/clients/__init__.py b/cfg_checker/clients/__init__.py
index 3e1f55c..88992f7 100644
--- a/cfg_checker/clients/__init__.py
+++ b/cfg_checker/clients/__init__.py
@@ -1,5 +1,5 @@
-from cfg_checker.common.salt_utils import SaltRemote
from cfg_checker.common import logger
+from cfg_checker.common.salt_utils import SaltRemote
# instance of the salt client
salt = None
diff --git a/cfg_checker/common/__init__.py b/cfg_checker/common/__init__.py
index 297ace6..427bc76 100644
--- a/cfg_checker/common/__init__.py
+++ b/cfg_checker/common/__init__.py
@@ -1,13 +1,14 @@
-import os
import const
-from settings import pkg_dir, config
-from other import Utils
from log import logger, logger_cli
+from other import Utils
+
+from settings import config
+
utils = Utils()
const = const
logger = logger
logger_cli = logger_cli
-config = config
\ No newline at end of file
+config = config
diff --git a/cfg_checker/common/const.py b/cfg_checker/common/const.py
index 8ca0d8c..f1f69ae 100644
--- a/cfg_checker/common/const.py
+++ b/cfg_checker/common/const.py
@@ -1,7 +1,7 @@
"""Constants that is not to be changed and used in all other files
"""
-from __future__ import print_function, absolute_import
+from __future__ import absolute_import, print_function
import itertools
diff --git a/cfg_checker/common/exception.py b/cfg_checker/common/exception.py
index 8e8b818..52aab2d 100644
--- a/cfg_checker/common/exception.py
+++ b/cfg_checker/common/exception.py
@@ -31,3 +31,9 @@
def __init__(self, message, *args, **kwargs):
super(InvalidReturnException, self).__init__(message, *args, **kwargs)
self.message = "# Unexpected return value: {}".format(message)
+
+
+class ErrorMappingException(CheckerException):
+ def __init__(self, message, *args, **kwargs):
+ super(ErrorMappingException, self).__init__(message, *args, **kwargs)
+ self.message = "# Unexpected error mapping/type: {}".format(message)
diff --git a/cfg_checker/common/log.py b/cfg_checker/common/log.py
index 9b29ba4..6edac2f 100644
--- a/cfg_checker/common/log.py
+++ b/cfg_checker/common/log.py
@@ -1,5 +1,5 @@
-import os
import logging
+import os
pkg_dir = os.path.dirname(__file__)
pkg_dir = os.path.join(pkg_dir, os.pardir, os.pardir)
@@ -85,6 +85,7 @@
return logger, logger_cli
+
# init instances of logger to be used by all other modules
logger, logger_cli = setup_loggers(
'cfg_checker',
diff --git a/cfg_checker/common/other.py b/cfg_checker/common/other.py
index 47cb865..1d34776 100644
--- a/cfg_checker/common/other.py
+++ b/cfg_checker/common/other.py
@@ -3,7 +3,6 @@
import subprocess
from cfg_checker.common.const import all_roles_map
-
from cfg_checker.common.exception import ConfigException
pkg_dir = os.path.dirname(__file__)
diff --git a/cfg_checker/common/salt_utils.py b/cfg_checker/common/salt_utils.py
index b913531..8b1b47f 100644
--- a/cfg_checker/common/salt_utils.py
+++ b/cfg_checker/common/salt_utils.py
@@ -3,12 +3,13 @@
"""
import json
import os
-import requests
import time
-from cfg_checker.common import logger, logger_cli, config
+from cfg_checker.common import config, logger, logger_cli
+from cfg_checker.common.exception import InvalidReturnException, SaltException
from cfg_checker.common.other import shell
-from cfg_checker.common.exception import SaltException, InvalidReturnException
+
+import requests
def _extract_password(_raw):
@@ -17,11 +18,11 @@
else:
try:
_json = json.loads(_raw)
- except ValueError as e:
+ except ValueError:
raise SaltException(
"# Return value is not a json: '{}'".format(_raw)
)
-
+
return _json["local"]
@@ -41,7 +42,7 @@
_ssh_cmd.append(config.ssh_host)
if config.ssh_uses_sudo:
_ssh_cmd.append("sudo")
-
+
_ssh_cmd.append(_salt_cmd)
_ssh_cmd = " ".join(_ssh_cmd)
logger_cli.debug("...calling salt: '{}'".format(_ssh_cmd))
@@ -53,6 +54,7 @@
else:
return _extract_password(_result)
+
def get_local_password():
"""Calls salt locally to get password from the pillar
@@ -86,7 +88,13 @@
self._token = self._login()
self.last_response = None
- def get(self, path='', headers=default_headers, cookies=None, timeout=None):
+ def get(
+ self,
+ path='',
+ headers=default_headers,
+ cookies=None,
+ timeout=None
+ ):
_path = os.path.join(self.uri, path)
logger.debug("# GET '{}'\nHeaders: '{}'\nCookies: {}".format(
_path,
@@ -108,12 +116,14 @@
_data = str(data).replace(self._pass, "*****")
else:
_data = data
- logger.debug("# POST '{}'\nHeaders: '{}'\nCookies: {}\nBody: {}".format(
- _path,
- headers,
- cookies,
- _data
- ))
+ logger.debug(
+ "# POST '{}'\nHeaders: '{}'\nCookies: {}\nBody: {}".format(
+ _path,
+ headers,
+ cookies,
+ _data
+ )
+ )
return requests.post(
os.path.join(self.uri, path),
headers=headers,
@@ -289,7 +299,7 @@
"""
try:
_r = self.salt_request('get', 'minions', timeout=10)
- except requests.exceptions.ReadTimeout as e:
+ except requests.exceptions.ReadTimeout:
logger_cli.debug("... timeout waiting list minions from Salt API")
_r = None
return _r[0] if _r else None
@@ -322,7 +332,7 @@
def get_active_nodes(self):
"""Used when other minion list metods fail
-
+
:return: json result from salt test.ping
"""
if config.skip_nodes:
@@ -336,7 +346,7 @@
expr_form='compound')
else:
_r = self.cmd('*', 'test.ping')
- # Return all nodes that responded
+ # Return all nodes that responded
return [node for node in _r.keys() if _r[node]]
def get_monitoring_ip(self, param_name):
@@ -391,7 +401,7 @@
"""
REST variation of file.get_managed
CLI execution goes like this (10 agrs):
- salt cfg01\* file.manage_file /root/test_scripts/pkg_versions.py
+ salt cfg01\\* file.manage_file /root/test_scripts/pkg_versions.py
'' '{}' /root/diff_pkg_version.py
'{hash_type: 'md5', 'hsum': <md5sum>}' root root '755' base ''
makedirs=True
diff --git a/cfg_checker/common/settings.py b/cfg_checker/common/settings.py
index 8d8111c..33e7c25 100644
--- a/cfg_checker/common/settings.py
+++ b/cfg_checker/common/settings.py
@@ -1,7 +1,9 @@
import os
from exception import ConfigException
-from log import logger, logger_cli
+
+from log import logger_cli
+
from other import utils
pkg_dir = os.path.dirname(__file__)
@@ -74,7 +76,11 @@
if os.path.isfile(_config_path):
with open(_config_path) as _f:
_list = _f.read().splitlines()
- logger_cli.info("# Loading env vars from '{}'".format(_config_path))
+ logger_cli.info(
+ "# Loading env vars from '{}'".format(
+ _config_path
+ )
+ )
else:
raise ConfigException(
"# Failed to load enviroment vars from '{}'".format(
@@ -103,7 +109,11 @@
)
)
else:
- logger_cli.debug("-> ...loaded total of '{}' vars".format(len(_list)))
+ logger_cli.debug(
+ "-> ...loaded total of '{}' vars".format(
+ len(_list)
+ )
+ )
self.salt_env = _env_name
def __init__(self):
diff --git a/cfg_checker/helpers/console_utils.py b/cfg_checker/helpers/console_utils.py
index 33e1a39..5c32506 100644
--- a/cfg_checker/helpers/console_utils.py
+++ b/cfg_checker/helpers/console_utils.py
@@ -1,4 +1,3 @@
-from time import sleep
import sys
@@ -9,7 +8,7 @@
self.bar_size = bar_size
def write_progress(self, index, note=''):
- #calc index and percent values
+ # calc index and percent values
_percent = (100 * index) / self.total
_index = (self.bar_size * index) / self.total
# clear the line
@@ -24,7 +23,7 @@
note
))
sys.stdout.flush()
-
+
@staticmethod
def newline():
sys.stdout.write('\n')
diff --git a/cfg_checker/helpers/errors.py b/cfg_checker/helpers/errors.py
new file mode 100644
index 0000000..b124315
--- /dev/null
+++ b/cfg_checker/helpers/errors.py
@@ -0,0 +1,123 @@
+from cfg_checker.common import logger
+from cfg_checker.common.exception import ErrorMappingException
+
+
+class ErrorIndex(object):
+ _area_code = ""
+ _delimiter = ""
+ _index = 0
+ _errors = {}
+ _types = {
+ 0: "Unknown error"
+ }
+
+ def __init__(self, area_code, delimiter='-'):
+ self._area_code = area_code
+ self._delimiter = delimiter
+ self._index += 1
+
+ def _format_error_code(self, index):
+ _t = "{:02d}".format(self._errors[index]['type'])
+ _i = "{:04d}".format(index)
+ _fmt = self._delimiter.join([self._area_code, _t, _i])
+ return _fmt
+
+ def _format_error(self, index):
+ # error code
+ _code = self._format_error_code(index)
+ # prepare data as string list
+ _d = self._errors[index]['data']
+ _data = ["{}: {}".format(_k, _v) for _k, _v in _d.iteritems()]
+ # format message
+ _msg = "### {}: {}\n{}".format(
+ _code,
+ self._get_error_type_text(self._errors[index]['type']),
+ "\n".join(_data)
+ )
+ return _msg
+
+ def _get_error_type_text(self, err_type):
+ if err_type not in self._types:
+ raise ErrorMappingException(
+ "type code {} not found".format(err_type)
+ )
+ else:
+ return self._types[err_type]
+
+ def get_error_code(self, index):
+ if index in self._errors.keys():
+ return self._format_error(index)
+ else:
+ raise ErrorMappingException(
+ "no error found for index {}".format(index)
+ )
+
+ def add_error_type(self, err_type, message):
+ if err_type in self._types:
+ raise ErrorMappingException(
+ "type code {} reserved for {}".format(
+ err_type,
+ self._types[err_type]
+ )
+ )
+ else:
+ self._types[err_type] = message
+
+ def add_error(self, err_type, **kwargs):
+ # check error type
+ if err_type not in self._types.keys():
+ logger.error(
+ "Error type not listed: '{}'; unknown used".format(err_type)
+ )
+ err_type = 0
+ _err = {
+ "type": err_type,
+ "data": kwargs
+ }
+ self._errors[self._index] = _err
+ self._index += 1
+
+ def get_errors_total(self):
+ return self._index-1
+
+ def get_indices(self):
+ return self._errors.keys()
+
+ def get_error(self, index):
+ if index in self._errors.keys():
+ return self._format_error(index)
+ else:
+ return "Unknown error index of {}".format(index)
+
+ def get_summary(self, print_zeros=True):
+ # create summary with counts per error type
+ _list = []
+ for _type in self._types.keys():
+ _len = len(
+ filter(
+ lambda i: self._errors[i]['type'] == _type,
+ self._errors
+ )
+ )
+ if _len:
+ _num_str = "{:5d}".format(_len)
+ elif print_zeros:
+ _num_str = "{:>5s}".format("-")
+ else:
+ continue
+ _list.append(
+ "{}: {}".format(
+ _num_str,
+ self._types[_type]
+ )
+ )
+
+ return "\n".join(_list)
+
+ def get_errors_as_list(self):
+ # create list of strings with error messages
+ _list = []
+ for _idx in range(0, self._index - 1):
+ _list.append("{}".format(self.get_error(_idx)))
+
+ return _list
diff --git a/cfg_checker/modules/network/__init__.py b/cfg_checker/modules/network/__init__.py
index 6217b1a..8f4a037 100644
--- a/cfg_checker/modules/network/__init__.py
+++ b/cfg_checker/modules/network/__init__.py
@@ -1,7 +1,8 @@
+from cfg_checker.common import logger_cli
+from cfg_checker.helpers import args_utils
+
import checker
-from cfg_checker.helpers import args_utils
-from cfg_checker.common import logger_cli
def _prepare_check():
_checker_class = checker.NetworkChecker()
@@ -9,19 +10,23 @@
_checker_class.collect_network_info()
return _checker_class
+
def do_check(args):
logger_cli.info("# Network check to console")
netChecker = _prepare_check()
netChecker.print_network_report()
- return
+ netChecker.print_summary()
+
+ if args.detailed:
+ netChecker.print_error_details()
def do_report(args):
logger_cli.info("# Network report")
_filename = args_utils.get_arg(args, 'html')
-
+
netChecker = _prepare_check()
netChecker.create_html_report(_filename)
diff --git a/cfg_checker/modules/network/checker.py b/cfg_checker/modules/network/checker.py
index b0056c8..25060a6 100644
--- a/cfg_checker/modules/network/checker.py
+++ b/cfg_checker/modules/network/checker.py
@@ -1,28 +1,63 @@
-import json
-import os
-import sys
import ipaddress
+import json
-from copy import deepcopy
+from cfg_checker.common import logger_cli
+from cfg_checker.modules.network.network_errors import NetworkErrors
+from cfg_checker.nodes import SaltNodes
from cfg_checker.reports import reporter
-from cfg_checker.common import utils, const
-from cfg_checker.common import config, logger, logger_cli, pkg_dir
-from cfg_checker.common import salt_utils
-from cfg_checker.nodes import SaltNodes, node_tmpl
class NetworkChecker(SaltNodes):
- @staticmethod
- def _map_network_for_host(host, if_class, net_list, data):
- if if_class.network in net_list.keys():
- # There is a network
- net_list[if_class.network][host] = data
+ def __init__(self):
+ super(NetworkChecker, self).__init__()
+ self.errors = NetworkErrors()
+
+ # adding net data to tree
+ def _add_data(self, _list, _n, _h, _d):
+ if _n not in _list:
+ _list[_n] = {}
+ _list[_n][_h] = [_d]
+ elif _h not in _list[_n]:
+ # there is no such host, just create it
+ _list[_n][_h] = [_d]
else:
- # create subnet key
- net_list[if_class.network] = {}
- # add the host to the dict
- net_list[if_class.network][host] = data
+ # there is such host... this is an error
+ self.errors.add_error(
+ self.errors.NET_DUPLICATE_IF,
+ host=_h,
+ dup_if=_d['name']
+ )
+ _list[_n][_h].append(_d)
+
+ # TODO: refactor map creation. Build one map instead of two separate
+ def _map_network_for_host(self, host, if_class, net_list, data):
+ # filter networks for this IF IP
+ _nets = [n for n in net_list.keys() if if_class.ip in n]
+ _masks = [n.netmask for n in _nets]
+ if len(_nets) > 1:
+ # There a multiple network found for this IP, Error
+ self.errors.add_error(
+ self.errors.NET_SUBNET_INTERSECT,
+ host=host,
+ networks="; ".join(_nets)
+ )
+ # check mask match
+ if len(_nets) > 0 and if_class.netmask not in _masks:
+ self.errors.add_error(
+ self.errors.NET_MASK_MISMATCH,
+ host=host,
+ if_name=data['name'],
+ if_cidr=if_class.exploded,
+ if_mapped_networks=", ".join([str(_n) for _n in _nets])
+ )
+
+ if len(_nets) < 1:
+ self._add_data(net_list, if_class.network, host, data)
+ else:
+ # add all data
+ for net in _nets:
+ self._add_data(net_list, net, host, data)
return net_list
@@ -33,8 +68,11 @@
:return: none
"""
logger_cli.info("# Mapping node runtime network data")
- _result = self.execute_script_on_active_nodes("ifs_data.py", args=["json"])
-
+ _result = self.execute_script_on_active_nodes(
+ "ifs_data.py",
+ args=["json"]
+ )
+ self.stage = "Runtime"
for key in self.nodes.keys():
# check if we are to work with this node
if not self.is_node_available(key):
@@ -54,6 +92,7 @@
))
logger_cli.info("-> done collecting networks data")
+ # TODO: Mimic reclass structure for easy compare
logger_cli.info("### Building network tree")
# match interfaces by IP subnets
_all_nets = {}
@@ -66,26 +105,35 @@
if net_name in ['lo']:
# skip the localhost
continue
- _ip4s = net_data['ipv4']
- for _ip_str in _ip4s.keys():
- # create interface class
+ # get data and make sure that wide mask goes first
+ _ip4s = sorted(
+ net_data['ipv4'],
+ key=lambda s: s[s.index('/'):]
+ )
+ for _ip_str in _ip4s:
+ # create interface class
_if = ipaddress.IPv4Interface(_ip_str)
- net_data['name'] = net_name
- net_data['if'] = _if
-
- _all_nets = self._map_network_for_host(
- host,
- _if,
- _all_nets,
- net_data
- )
+ if 'name' not in net_data:
+ net_data['name'] = net_name
+ if 'ifs' not in net_data:
+ net_data['ifs'] = [_if]
+ # map it
+ _all_nets = self._map_network_for_host(
+ host,
+ _if,
+ _all_nets,
+ net_data
+ )
+ else:
+ # data is already there, just add VIP
+ net_data['ifs'].append(_if)
# save collected info
self.all_nets = _all_nets
-
def collect_reclass_networks(self):
logger_cli.info("# Mapping reclass networks")
+ self.stage = "Reclass"
# Get networks from reclass and mark them
_reclass_nets = {}
# Get required pillars
@@ -101,9 +149,11 @@
if 'interface' in _pillar:
_pillar = _pillar['interface']
else:
- logger_cli.info("...skipping node '{}', no IF section in reclass".format(
- node
- ))
+ logger_cli.info(
+ "... node '{}' skipped, no IF section in reclass".format(
+ node
+ )
+ )
continue
for _if_name, _if_data in _pillar.iteritems():
if 'address' in _if_data:
@@ -111,7 +161,7 @@
_if_data['address'] + '/' + _if_data['netmask']
)
_if_data['name'] = _if_name
- _if_data['if'] = _if
+ _if_data['ifs'] = [_if]
_reclass_nets = self._map_network_for_host(
node,
@@ -122,7 +172,6 @@
self.reclass_nets = _reclass_nets
-
def print_network_report(self):
"""
Create text report for CLI
@@ -132,7 +181,8 @@
_all_nets = self.all_nets.keys()
logger_cli.info("# Reclass networks")
logger_cli.info(
- " {0:17} {1:25}: {2:19} {3:5}{4:10} {5}{6} {7} / {8} / {9}".format(
+ " {0:17} {1:25}: "
+ "{2:19} {3:5}{4:10} {5}{6} {7} / {8} / {9}".format(
"Hostname",
"IF",
"IP",
@@ -145,7 +195,8 @@
"rcGate"
)
)
-
+ # TODO: Move matching to separate function
+ self.stage = "Matching"
_reclass = [n for n in _all_nets if n in self.reclass_nets]
for network in _reclass:
# shortcuts
@@ -154,7 +205,7 @@
names = sorted(self.all_nets[network].keys())
for hostname in names:
if not self.is_node_available(hostname, log=False):
- logger_cli.info(
+ logger_cli.info(
" {0:17} {1}".format(
hostname.split('.')[0],
"... no data for the node"
@@ -167,8 +218,8 @@
if not _route:
_gate = "no route!"
else:
- _gate = _route['gateway'] if _route['gateway'] else "empty"
-
+ _gate = _route['gateway'] if _route['gateway'] else "-"
+
# get the default gateway
if 'default' in _routes:
_d_gate = ipaddress.IPv4Address(
@@ -179,45 +230,107 @@
_d_gate_str = _d_gate if _d_gate else "No default gateway!"
_a = self.all_nets[network][hostname]
- # Check if reclass has such network
- if hostname in self.reclass_nets[network]:
- _r = self.reclass_nets[network][hostname]
- else:
- # Supply empty dict if there is no reclass gathered
- _r = {}
-
- # Take gateway parameter for this IF
- # from corresponding reclass record
- # TODO: Update gateway search mechanism
- if not self.is_node_available(hostname):
- _r_gate = "-"
- elif _a['if'].network not in self.reclass_nets:
- _r_gate = "no IF in reclass!"
- elif not hostname in self.reclass_nets[_a['if'].network]:
- _r_gate = "no IF on node in reclass!"
- else:
- _rd = self.reclass_nets[_a['if'].network][hostname]
- _r_gate = _rd['gateway'] if 'gateway' in _rd else "empty"
+ for _host in _a:
+ for _if in _host['ifs']:
+ # get proper reclass
+ _ip_str = str(_if.exploded)
+ _r = {}
+ for _item in self.reclass_nets[network][hostname]:
+ for _item_ifs in _item['ifs']:
+ if _ip_str == str(_item_ifs.exploded):
+ _r = _item
- if not 'enabled' in _r:
- _enabled = "no record!"
- else:
- _enabled = "(enabled)" if _r['enabled'] else "(disabled)"
- _text = "{0:25}: {1:19} {2:5}{3:10} {4:4}{5:10} {6} / {7} / {8}".format(
- _a['name'],
- str(_a['if'].ip),
- _a['mtu'],
- '('+str(_r['mtu'])+')' if 'mtu' in _r else '(unset!)',
- _a['state'],
- _enabled,
- _gate,
- _d_gate_str,
- _r_gate
- )
- logger_cli.info(
- " {0:17} {1}".format(hostname.split('.')[0], _text)
- )
-
+ # check if node is UP
+ if not self.is_node_available(hostname):
+ _r_gate = "-"
+ # get proper network from reclass
+ else:
+ # Lookup match for the ip
+ _r_gate = "no IF in reclass!"
+ # get all networks with this hostname
+ _rn = self.reclass_nets
+ _nets = filter(
+ lambda n: hostname in _rn[n].keys(),
+ self.reclass_nets
+ )
+ _rd = None
+ for _item in _nets:
+ # match ip
+ _r_dat = self.reclass_nets[_item][hostname]
+ for _r_ifs in _r_dat:
+ for _r_if in _r_ifs['ifs']:
+ if _if.ip == _r_if.ip:
+ _rd = _r_ifs
+ break
+ if _rd:
+ _gs = 'gateway'
+ _e = "empty"
+ _r_gate = _rd[_gs] if _gs in _rd else _e
+ break
+
+ # IF status in reclass
+ if 'enabled' not in _r:
+ _enabled = "no record!"
+ else:
+ _e = "enabled"
+ _d = "disabled"
+ _enabled = "("+_e+")" if _r[_e] else "("+_d+")"
+
+ _name = _host['name']
+ _rc_mtu = _r['mtu'] if 'mtu' in _r else None
+
+ # Check if this is a VIP
+ if _if.network.prefixlen == 32:
+ _name = " "*20
+ _ip_str += " VIP"
+ _rc_mtu = "(-)"
+ _enabled = "(-)"
+ _r_gate = "-"
+
+ # Check if this is a default MTU
+ elif _host['mtu'] == '1500':
+ # reclass is empty if MTU is untended to be 1500
+ _rc_mtu = "(-)"
+ elif _rc_mtu:
+ # if there is an MTU value, match it
+ if _host['mtu'] != str(_rc_mtu):
+ self.errors.add_error(
+ self.errors.NET_MTU_MISMATCH,
+ host=hostname,
+ if_name=_name,
+ if_cidr=_ip_str,
+ reclass_mtu=_rc_mtu,
+ runtime_mtu=_host['mtu']
+ )
+ else:
+ # there is no MTU value in reclass
+ self.errors.add_error(
+ self.errors.NET_MTU_EMPTY,
+ host=hostname,
+ if_name=_name,
+ if_cidr=_ip_str,
+ if_mtu=_host['mtu']
+ )
+
+ _text = "{0:25}: {1:19} {2:5}{3:10} {4:4}{5:10} {6} "
+ "/ {7} / {8}".format(
+ _name,
+ _ip_str,
+ _host['mtu'],
+ str(_rc_mtu) if _rc_mtu else "(No!)",
+ _host['state'],
+ _enabled,
+ _gate,
+ _d_gate_str,
+ _r_gate
+ )
+ logger_cli.info(
+ " {0:17} {1}".format(
+ hostname.split('.')[0],
+ _text
+ )
+ )
+
logger_cli.info("\n# Other networks")
_other = [n for n in _all_nets if n not in self.reclass_nets]
for network in _other:
@@ -225,17 +338,41 @@
names = sorted(self.all_nets[network].keys())
for hostname in names:
- _text = "{0:25}: {1:19} {2:5} {3:4}".format(
- self.all_nets[network][hostname]['name'],
- str(self.all_nets[network][hostname]['if'].ip),
- self.all_nets[network][hostname]['mtu'],
- self.all_nets[network][hostname]['state']
- )
- logger_cli.info(
- " {0:17} {1}".format(hostname.split('.')[0], _text)
- )
+ for _n in self.all_nets[network][hostname]:
+ _ifs = [str(ifs.ip) for ifs in _n['ifs']]
+ _text = "{0:25}: {1:19} {2:5} {3:4}".format(
+ _n['name'],
+ ", ".join(_ifs),
+ _n['mtu'],
+ _n['state']
+ )
+ logger_cli.info(
+ " {0:17} {1}".format(hostname.split('.')[0], _text)
+ )
-
+ def print_summary(self):
+ _total_errors = self.errors.get_errors_total()
+ # Summary
+ logger_cli.info(
+ "\n{:=^8s}\n{:^8s}\n{:=^8s}".format(
+ "=",
+ "Totals",
+ "="
+ )
+ )
+ logger_cli.info(self.errors.get_summary(print_zeros=False))
+ logger_cli.info('-'*20)
+ logger_cli.info("{:5d} total errors found\n".format(_total_errors))
+
+ def print_error_details(self):
+ # Detailed errors
+ if self.errors.get_errors_total() > 0:
+ logger_cli.info("\n# Errors")
+ for _msg in self.errors.get_errors_as_list():
+ logger_cli.info("{}\n".format(_msg))
+ else:
+ logger_cli.info("-> No errors\n")
+
def create_html_report(self, filename):
"""
Create static html showing network schema-like report
diff --git a/cfg_checker/modules/network/network_errors.py b/cfg_checker/modules/network/network_errors.py
new file mode 100644
index 0000000..2ec3d31
--- /dev/null
+++ b/cfg_checker/modules/network/network_errors.py
@@ -0,0 +1,42 @@
+import itertools
+
+from cfg_checker.helpers.errors import ErrorIndex
+
+
+_c = itertools.count(1)
+
+
+class NetworkErrors(ErrorIndex):
+ # error type codes here
+ NET_MTU_MISMATCH = next(_c)
+ NET_MTU_EMPTY = next(_c)
+ NET_DUPLICATE_IF = next(_c)
+ NET_SUBNET_INTERSECT = next(_c)
+ NET_MASK_MISMATCH = next(_c)
+
+ def __init__(self):
+ super(NetworkErrors, self).__init__("NET")
+
+ self.add_error_type(
+ self.NET_MTU_MISMATCH,
+ "MTU mismatch on runtime interface and in reclass"
+ )
+ self.add_error_type(
+ self.NET_MTU_EMPTY,
+ "MTU value is not 1500 on runtime and empty in reclass"
+ )
+ self.add_error_type(
+ self.NET_DUPLICATE_IF,
+ "Duplicate interface specified"
+ )
+ self.add_error_type(
+ self.NET_SUBNET_INTERSECT,
+ "Subnets intersection detected"
+ )
+ self.add_error_type(
+ self.NET_MASK_MISMATCH,
+ "IFs mask settings for the same subnet is not the same"
+ )
+
+
+del _c
diff --git a/cfg_checker/modules/packages/__init__.py b/cfg_checker/modules/packages/__init__.py
index 774e674..5e717d6 100644
--- a/cfg_checker/modules/packages/__init__.py
+++ b/cfg_checker/modules/packages/__init__.py
@@ -1,7 +1,7 @@
-import checker
-
from cfg_checker.helpers import args_utils
+import checker
+
def do_report(args):
"""Create package versions report, HTML
diff --git a/cfg_checker/modules/packages/checker.py b/cfg_checker/modules/packages/checker.py
index 8a3456d..3225f70 100644
--- a/cfg_checker/modules/packages/checker.py
+++ b/cfg_checker/modules/packages/checker.py
@@ -1,18 +1,12 @@
import json
-import os
-#import sys
-from copy import deepcopy
-
+from cfg_checker.common import const, logger_cli
from cfg_checker.common.exception import ConfigException
-from cfg_checker.common import utils, const
-from cfg_checker.common import config, logger, logger_cli, pkg_dir
-from cfg_checker.common import salt_utils
from cfg_checker.helpers.console_utils import Progress
-from cfg_checker.nodes import SaltNodes, node_tmpl
+from cfg_checker.nodes import SaltNodes
from cfg_checker.reports import reporter
-from versions import PkgVersions, DebianVersion, VersionCmpResult
+from versions import DebianVersion, PkgVersions, VersionCmpResult
class CloudPackageChecker(SaltNodes):
@@ -99,7 +93,6 @@
_eo += _val['results'].keys().count(const.VERSION_ERR)
_do += _val['results'].keys().count(const.VERSION_DOWN)
-
_progress.newline()
_data['errors'] = {
@@ -133,13 +126,15 @@
_text = _result[key]
try:
_dict = json.loads(_text[_text.find('{'):])
- except ValueError as e:
+ except ValueError:
logger_cli.info("... no JSON for '{}'".format(
key
))
- logger_cli.debug("ERROR:\n{}\n".format(_text[:_text.find('{')]))
+ logger_cli.debug(
+ "ERROR:\n{}\n".format(_text[:_text.find('{')])
+ )
_dict = {}
-
+
self.nodes[key]['packages'] = _dict
else:
self.nodes[key]['packages'] = {}
@@ -157,8 +152,10 @@
"""
# Preload OpenStack release versions
_desc = PkgVersions()
-
- logger_cli.info("# Cross-comparing: Installed vs Candidates vs Release")
+
+ logger_cli.info(
+ "# Cross-comparing: Installed vs Candidates vs Release"
+ )
_progress = Progress(len(self.nodes.keys()))
_progress_index = 0
_total_processed = 0
@@ -196,10 +193,10 @@
# no description - no library :)
_vers = {}
_pkg_desc = _desc.dummy_desc
-
+
# get specific set for this OS release if present
if _os in _vers:
- _v = _vers[_os]
+ _v = _vers[_os]
elif 'any' in _vers:
_v = _vers['any']
else:
@@ -212,13 +209,13 @@
"results": {},
"r": _release,
}
-
+
_cmp = VersionCmpResult(
_ver_ins,
_ver_can,
_all_packages[_name]['r']
)
-
+
# shortcut to results
_res = _all_packages[_name]['results']
# update status
@@ -240,7 +237,6 @@
self._packages = _all_packages
_progress.newline()
-
def create_report(self, filename, rtype, full=None):
"""
diff --git a/cfg_checker/modules/packages/versions.py b/cfg_checker/modules/packages/versions.py
index 10f65dc..9352dd6 100644
--- a/cfg_checker/modules/packages/versions.py
+++ b/cfg_checker/modules/packages/versions.py
@@ -1,7 +1,7 @@
import csv
import os
-from cfg_checker.common import config, logger, logger_cli, pkg_dir, const
+from cfg_checker.common import config, const, logger_cli, pkg_dir
class PkgVersions(object):
@@ -33,15 +33,16 @@
_app = row[2]
_repo = row[3]
# if release cell empty - use keyword 'any'
- _os_release = row[4] if len(row[4]) > 0 else 'any'
+ _os_release = row[4] if len(row[4]) > 0 else 'any'
# prepare versions dict
_l = self._labels
- _versions = {_l[i]:row[5+i] for i in range(0, len(row[5:]))}
-
+ _versions = {_l[i]: row[5+i] for i in range(0, len(row[5:]))}
+
if _pkg in self._list:
if _os_release in self._list[_pkg]["versions"]:
- # all pkg/os_releases should be uniq. If found, latest one used
+ # all pkg/os_releases should be uniq.
+ # If found, latest one used
logger_cli.info(
"-> WARNING: Duplicate package info found "
"'{}' (line {})".format(
@@ -59,17 +60,17 @@
"versions": {}
}
})
-
+
# and finally, update the versions for this release
self._list[_pkg]["versions"].update({
_os_release: _versions
})
-
+
def __getitem__(self, pkg_name):
- if pkg_name in self._list:
+ if pkg_name in self._list:
return self._list[pkg_name]
else:
- #return self._dummy_desc
+ # return self._dummy_desc
return None
@@ -95,7 +96,7 @@
_ord_map = [ord(ch) not in _chars for ch in version_fragment]
# if there is nothing to extract, return at once
if not any([_s in version_fragment for _s in _symbols]) \
- and not any(_ord_map):
+ and not any(_ord_map):
# no revisions
return version_fragment, ""
else:
@@ -114,7 +115,7 @@
_main = version_fragment[:_indices[0]]
_rev = version_fragment[_indices[0]:]
return _main, _rev
-
+
def __init__(self, version_string):
# save
if len(version_string) < 1:
@@ -139,7 +140,7 @@
self.upstream, self.upstream_rev = self.split_revision(_m)
self.debian, self.debian_rev = self.split_revision(_d)
self.version = version_string
-
+
# Following functions is a freestyle python mimic of apt's upstream, enjoy
# https://github.com/chaos/apt/blob/master/apt/apt-pkg/deb/debversion.cc#L42
# mimic produced in order not to pull any packages or call external code
@@ -160,7 +161,7 @@
return _num
_li += 1
_ri += 1
-
+
# diff found? lens equal?
if not _diff and _lL != _rL:
# lens not equal? Longer - later
@@ -168,7 +169,7 @@
else:
# equal
return 0
-
+
def _cmp_num(self, lf, rf):
# split fragments into lists
_lhf = lf.split('.') if '.' in lf else list(lf)
@@ -178,14 +179,14 @@
_rhf = [int(n) for n in _rhf if len(n)]
return self._cmp_fragment(_lhf, _rhf)
-
+
def _cmp_lex(self, lf, rf):
# cast each item into its ORD value
_lhf = [ord(n) for n in lf]
_rhf = [ord(n) for n in rf]
- return self._cmp_fragment(_lhf, _rhf)
- # end of cmps
+ return self._cmp_fragment(_lhf, _rhf)
+ # end of cmps
# main part compared using splitted numbers
# if equal, revision is compared using lexical comparizon
@@ -217,7 +218,7 @@
return True
else:
return False
-
+
def update_parts(self, target, status):
# updating parts of version statuses
if self._cmp_num(self.epoch, target.epoch) != 0:
@@ -226,13 +227,13 @@
self.epoch_status = const.VERSION_OK
if self._cmp_num(self.upstream, target.upstream) != 0 \
- or self._cmp_lex(self.upstream_rev, target.upstream_rev) != 0:
+ or self._cmp_lex(self.upstream_rev, target.upstream_rev) != 0:
self.upstream_status = status
else:
self.upstream_status = const.VERSION_OK
if self._cmp_lex(self.debian, target.debian) != 0 \
- or self._cmp_lex(self.debian_rev, target.debian_rev) != 0:
+ or self._cmp_lex(self.debian_rev, target.debian_rev) != 0:
self.debian_status = status
else:
self.debian_status = const.VERSION_OK
@@ -245,13 +246,12 @@
source = None
target = None
-
def __init__(self, i, c, r):
# compare three versions and write a result
self.source = i
self.status = const.VERSION_NA
self.action = const.ACT_NA
-
+
# Check if there is a release version present
if r and len(r.version) > 0 and r.version != 'n/a':
# I < C, installed version is older
@@ -333,7 +333,7 @@
elif i == c:
self.status = const.VERSION_OK
self.action = const.ACT_NA
-
+
# and we need to update per-part status
self.source.update_parts(self.target, self.status)
@@ -342,4 +342,4 @@
if _t.debian and _t.debian > _s.debian:
return True
else:
- return false
+ return False
diff --git a/cfg_checker/modules/reclass/__init__.py b/cfg_checker/modules/reclass/__init__.py
index 2546ec3..adae6df 100644
--- a/cfg_checker/modules/reclass/__init__.py
+++ b/cfg_checker/modules/reclass/__init__.py
@@ -1,21 +1,22 @@
import os
-import comparer
-import validator
-
from cfg_checker.common import logger_cli
from cfg_checker.helpers import args_utils
from cfg_checker.reports import reporter
+import comparer
+
+import validator
+
def do_list(args):
logger_cli.info("# Reclass list")
_arg_path = args_utils.get_arg(args, 'models_path')
logger_cli.info("-> Current path is: {}".format(_arg_path))
_path = args_utils.get_path_arg(_arg_path)
-
- logger_cli.info("# ...models path is '{}'".format(args.models_path))
-
+
+ logger_cli.info("# ...models path is '{}'".format(_path))
+
models = {}
for _folder in os.listdir(args.models_path):
# validate item as a model
@@ -24,15 +25,15 @@
_folder
)
_validated = validator.basic_model_validation_by_path(_model_path)
-
+
if not _validated:
logger_cli.info("-> '{}' not a valid model".format(_folder))
continue
else:
models[_folder] = _model_path
-
+
logger_cli.info("-> '{}' at '{}'".format(_folder, _model_path))
-
+
# TODO: collect info about the model
return
@@ -44,7 +45,7 @@
# checking folder params
_model1 = args_utils.get_path_arg(args.model1)
_model2 = args_utils.get_path_arg(args.model2)
-
+
# Do actual compare using hardcoded model names
mComparer = comparer.ModelComparer()
@@ -52,7 +53,7 @@
mComparer.model_path_1 = _model1
mComparer.model_name_2 = os.path.split(_model2)[1]
mComparer.model_path_2 = _model2
-
+
mComparer.load_model_tree(
mComparer.model_name_1,
mComparer.model_path_1
diff --git a/cfg_checker/modules/reclass/comparer.py b/cfg_checker/modules/reclass/comparer.py
index b0b7b37..6591d16 100644
--- a/cfg_checker/modules/reclass/comparer.py
+++ b/cfg_checker/modules/reclass/comparer.py
@@ -4,13 +4,14 @@
"""
import itertools
import os
+
+from cfg_checker.common import logger, logger_cli
+from cfg_checker.reports import reporter
+
import yaml
-from cfg_checker.reports import reporter
-from cfg_checker.common import logger, logger_cli
-
-def get_element(element_path, input_data):
+def get_element(element_path, input_data):
paths = element_path.split(":")
data = input_data
for i in range(0, len(paths)):
@@ -18,7 +19,7 @@
return data
-def pop_element(element_path, input_data):
+def pop_element(element_path, input_data):
paths = element_path.split(":")
data = input_data
# Search for last dict
@@ -38,7 +39,7 @@
"03_cluster": "classes:cluster",
"04_other": "classes"
}
-
+
models = {}
models_path = "/srv/salt/reclass"
model_name_1 = "source"
@@ -123,7 +124,7 @@
# creating dict structure out of folder list. Pure python magic
parent = reduce(dict.get, folders[:-1], raw_tree)
parent[folders[-1]] = subdir
-
+
self.models[name] = {}
# Brake in according to pathes
_parts = self._model_parts.keys()
@@ -133,7 +134,7 @@
self._model_parts[_parts[ii]],
raw_tree[root_key]
)
-
+
# save it as a single data object
self.models[name]["rc_diffs"] = raw_tree[root_key]
return True
@@ -223,8 +224,7 @@
if _removed or _added:
_removed_str_lst = ["- {}".format(item)
for item in _removed]
- _added_str_lst = ["+ {}".format(item)
- for item in _added]
+ _added_str_lst = ["+ {}".format(i) for i in _added]
_report[_new_path] = {
"type": "list",
"raw_values": [
@@ -287,7 +287,6 @@
))
return _report
-
def generate_model_report_tree(self):
"""Use two loaded models to generate comparison table with
values are groupped by YAML files
diff --git a/cfg_checker/modules/reclass/validator.py b/cfg_checker/modules/reclass/validator.py
index e7d7f06..8fc65a5 100644
--- a/cfg_checker/modules/reclass/validator.py
+++ b/cfg_checker/modules/reclass/validator.py
@@ -2,6 +2,7 @@
from cfg_checker.common import logger_cli
+
def basic_model_validation_by_path(path):
logger_cli.debug("\t...validating '{}' as a model".format(path))
_checks = []
@@ -20,7 +21,7 @@
_has_nodes = os.path.isdir(os.path.join(path, "nodes"))
logger_cli.debug("\t- has nodes? -> {}".format(_has_nodes))
_checks.append(_has_nodes)
-
+
logger_cli.debug("\t-> {}".format(
all(_checks)
))
diff --git a/cfg_checker/nodes.py b/cfg_checker/nodes.py
index 776c8b2..d518e75 100644
--- a/cfg_checker/nodes.py
+++ b/cfg_checker/nodes.py
@@ -1,12 +1,9 @@
-import json
import os
-import sys
+from copy import deepcopy
-from copy import deepcopy
-
-from cfg_checker.common import utils, const
-from cfg_checker.common import config, logger, logger_cli, pkg_dir
-from cfg_checker.common import salt_utils
+from cfg_checker.common import config, const
+from cfg_checker.common import logger, logger_cli, pkg_dir
+from cfg_checker.common import salt_utils, utils
node_tmpl = {
'role': '',
@@ -22,7 +19,7 @@
logger_cli.info("# Collecting nodes")
# simple salt rest client
self.salt = salt_utils.SaltRemote()
-
+
# Keys for all nodes
# this is not working in scope of 2016.8.3, will overide with list
logger_cli.debug("...collecting node names existing in the cloud")
@@ -36,18 +33,22 @@
self.node_keys = {
'minions': _keys['minions']
}
- except Exception as e:
+ except Exception:
_keys = None
self.node_keys = None
-
+
# List of minions with grains
_minions = self.salt.list_minions()
if _minions:
- logger_cli.info("-> api reported {} active minions".format(len(_minions)))
+ logger_cli.info(
+ "-> api reported {} active minions".format(len(_minions))
+ )
elif not self.node_keys:
# this is the last resort
_minions = config.load_nodes_list()
- logger_cli.info("-> {} nodes loaded from list file".format(len(_minions)))
+ logger_cli.info(
+ "-> {} nodes loaded from list file".format(len(_minions))
+ )
else:
_minions = self.node_keys['minions']
@@ -87,7 +88,7 @@
lambda nd: self.nodes[nd]['role'] == const.all_roles_map['cfg'],
self.nodes
)[0]
-
+
# OpenStack versions
self.mcp_release = self.salt.pillar_get(
self.master_node,
@@ -98,7 +99,6 @@
"_param:openstack_version"
)[self.master_node]
-
def skip_node(self, node):
# Add node to skip list
# Fro example if it is fails to comply with the rules
@@ -119,7 +119,9 @@
:return: no return value, data pulished internally
"""
- logger_cli.debug("...collecting node pillars for '{}'".format(pillar_path))
+ logger_cli.debug(
+ "...collecting node pillars for '{}'".format(pillar_path)
+ )
_result = self.salt.pillar_get(self.active_nodes_compound, pillar_path)
self.not_responded = []
for node, data in self.nodes.iteritems():
@@ -152,7 +154,7 @@
self.not_responded.append(node)
else:
_data[_pillar_keys[-1]] = _result[node]
-
+
def execute_script_on_active_nodes(self, script_filename, args=[]):
# Prepare script
_p = os.path.join(pkg_dir, 'scripts', script_filename)
@@ -162,12 +164,13 @@
config.salt_file_root, config.salt_scripts_folder
)
logger_cli.debug(
- "...Uploading script {} to master's file cache folder: '{}'".format(
+ "...Uploading script {} "
+ "to master's file cache folder: '{}'".format(
script_filename,
_storage_path
)
)
- _result = self.salt.mkdir("cfg01*", _storage_path)
+ self.salt.mkdir("cfg01*", _storage_path)
# Form cache, source and target path
_cache_path = os.path.join(_storage_path, script_filename)
_source_path = os.path.join(
@@ -182,11 +185,15 @@
)
logger_cli.debug("...creating file in cache '{}'".format(_cache_path))
- _result = self.salt.f_touch_master(_cache_path)
- _result = self.salt.f_append_master(_cache_path, _script)
+ self.salt.f_touch_master(_cache_path)
+ self.salt.f_append_master(_cache_path, _script)
# command salt to copy file to minions
- logger_cli.debug("...creating script target folder '{}'".format(_cache_path))
- _result = self.salt.mkdir(
+ logger_cli.debug(
+ "...creating script target folder '{}'".format(
+ _cache_path
+ )
+ )
+ self.salt.mkdir(
self.active_nodes_compound,
os.path.join(
'/root',
@@ -195,14 +202,15 @@
tgt_type="compound"
)
logger_cli.info("-> Running script to all active nodes")
- _result = self.salt.get_file(
+ logger.debug("... syncing file to nodes")
+ self.salt.get_file(
self.active_nodes_compound,
_source_path,
_target_path,
tgt_type="compound"
)
# execute pkg collecting script
- logger.debug("Running script to all nodes")
+ logger.debug("... running script")
# handle results for each node
_script_arguments = " ".join(args) if args else ""
self.not_responded = []
@@ -214,7 +222,7 @@
)
# all false returns means that there is no response
- self.not_responded = [_n for _n in _r.keys() if not _r[_n]]
+ self.not_responded = [_n for _n in _r.keys() if not _r[_n]]
return _r
def is_node_available(self, node, log=True):
@@ -228,4 +236,3 @@
return False
else:
return True
-
diff --git a/cfg_checker/reports/reporter.py b/cfg_checker/reports/reporter.py
index ec120f3..08015cf 100644
--- a/cfg_checker/reports/reporter.py
+++ b/cfg_checker/reports/reporter.py
@@ -1,12 +1,13 @@
import abc
-import jinja2
import os
-import six
import time
from cfg_checker.common import const
-from cfg_checker.common import logger, logger_cli
-from cfg_checker.helpers.console_utils import Progress
+from cfg_checker.common import logger_cli
+
+import jinja2
+
+import six
pkg_dir = os.path.dirname(__file__)
pkg_dir = os.path.join(pkg_dir, os.pardir, os.pardir)
@@ -161,7 +162,7 @@
# move names into separate place
data["names"] = data["diffs"].pop("diff_names")
data["tabs"] = data.pop("diffs")
-
+
# counters - mdl_diff
for _tab in data["tabs"].keys():
data['counters'][_tab] = len(data["tabs"][_tab]["diffs"].keys())
diff --git a/scripts/ifs_data.py b/scripts/ifs_data.py
index 7fbba28..e182fb1 100644
--- a/scripts/ifs_data.py
+++ b/scripts/ifs_data.py
@@ -1,7 +1,7 @@
-import re
-import sys
-import subprocess
import json
+import re
+import subprocess
+import sys
def shell(command):
@@ -116,7 +116,7 @@
_ifs[_if_name]['ipv4'][_ip] = {}
_ifs[_if_name]['ipv4'][_ip]['brd'] = _brd
_ifs[_if_name]['ipv4'][_ip]['other'] = _options
-
+
# Collect routes data and try to match it with network
# Compile regexp for detecting default route
_routes = {
diff --git a/scripts/pkg_versions.py b/scripts/pkg_versions.py
index be02aa8..66a46aa 100644
--- a/scripts/pkg_versions.py
+++ b/scripts/pkg_versions.py
@@ -1,7 +1,6 @@
-import sys
-import subprocess
import json
-
+import subprocess
+import sys
from multiprocessing.dummy import Pool
diff --git a/scripts/sniffer.py b/scripts/sniffer.py
index a9f9901..de18eb2 100644
--- a/scripts/sniffer.py
+++ b/scripts/sniffer.py
@@ -1,4 +1,4 @@
# This will be the basic sniffer script
# Its purpose is to sniff for a specific packet and return its headers-n-data
-pass
\ No newline at end of file
+pass
diff --git a/setup.py b/setup.py
index 2661496..91b1e80 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,7 @@
import glob
import os
-from setuptools import setup, find_packages
+from setuptools import find_packages, setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.md')).read()
@@ -9,7 +9,7 @@
DATA = [
('etc', [f for f in glob.glob(os.path.join('etc', '*'))]),
('templates', [f for f in glob.glob(os.path.join('templates', '*'))]),
-# ('res', [f for f in glob.glob(os.path.join('res', '*'))])
+ # ('res', [f for f in glob.glob(os.path.join('res', '*'))])
]
dependencies = [