Alex Savatieiev | 9b2f651 | 2019-02-20 18:05:00 -0600 | [diff] [blame] | 1 | import json |
| 2 | import os |
| 3 | import sys |
Alex Savatieiev | e961399 | 2019-02-21 18:20:35 -0600 | [diff] [blame] | 4 | import ipaddress |
Alex Savatieiev | 9b2f651 | 2019-02-20 18:05:00 -0600 | [diff] [blame] | 5 | |
| 6 | from copy import deepcopy |
| 7 | |
| 8 | import reporter |
| 9 | |
| 10 | from cfg_checker.common import utils, const |
| 11 | from cfg_checker.common import config, logger, logger_cli, pkg_dir |
| 12 | from cfg_checker.common import salt_utils |
| 13 | from cfg_checker.nodes import SaltNodes, node_tmpl |
| 14 | |
| 15 | |
| 16 | class NetworkChecker(SaltNodes): |
| 17 | def collect_network_info(self): |
| 18 | """ |
| 19 | Collects info on the network using ifs_data.py script |
| 20 | |
| 21 | :return: none |
| 22 | """ |
| 23 | logger_cli.info("### Collecting network data") |
| 24 | _result = self.execute_script("ifs_data.py", args=["json"]) |
| 25 | |
| 26 | for key in self.nodes.keys(): |
| 27 | # due to much data to be passed from salt, it is happening in order |
| 28 | if key in _result: |
| 29 | _text = _result[key] |
| 30 | _dict = json.loads(_text[_text.find('{'):]) |
| 31 | self.nodes[key]['networks'] = _dict |
| 32 | else: |
| 33 | self.nodes[key]['networks'] = {} |
| 34 | logger_cli.debug("# {} has {} networks".format( |
| 35 | key, |
| 36 | len(self.nodes[key]['networks'].keys()) |
| 37 | )) |
| 38 | logger_cli.info("-> Done collecting networks data") |
| 39 | |
Alex Savatieiev | e961399 | 2019-02-21 18:20:35 -0600 | [diff] [blame] | 40 | # dump collected data to speed up coding |
| 41 | # with open('dump.json', 'w+') as ff: |
| 42 | # ff.write(json.dumps(self.nodes)) |
| 43 | |
| 44 | # load dump data |
| 45 | # with open('dump.json', 'r') as ff: |
| 46 | # _nodes = json.loads(ff.read()) |
| 47 | |
| 48 | logger_cli.info("### Building network tree") |
| 49 | # match physical interfaces by MAC addresses |
| 50 | _all_nets = {} |
Alex Savatieiev | a05921f | 2019-02-21 18:21:39 -0600 | [diff] [blame] | 51 | for host, node_data in self.nodes.iteritems(): |
Alex Savatieiev | e961399 | 2019-02-21 18:20:35 -0600 | [diff] [blame] | 52 | for net_name, net_data in node_data['networks'].iteritems(): |
| 53 | # get ips and calculate subnets |
| 54 | if net_name == 'lo': |
| 55 | continue |
| 56 | _ip4s = net_data['ipv4'] |
| 57 | for _ip_str in _ip4s.keys(): |
| 58 | _if = ipaddress.IPv4Interface(_ip_str) |
| 59 | if not any(_if.ip in net for net in _all_nets.keys()): |
| 60 | # IP not fits into existing networks |
| 61 | if _if.network not in _all_nets.keys(): |
| 62 | _all_nets[_if.network] = {} |
| 63 | |
| 64 | _all_nets[_if.network][host] = {} |
| 65 | _all_nets[_if.network][host]['text'] = \ |
| 66 | "{0:30}: {1:19} {2:5} {3:4}".format( |
| 67 | net_name, |
| 68 | str(_if.ip), |
| 69 | net_data['mtu'], |
| 70 | net_data['state'] |
| 71 | ) |
| 72 | _all_nets[_if.network][host]['if_data'] = net_data |
| 73 | else: |
| 74 | # There is a network that ip fits into |
| 75 | for _net in _all_nets.keys(): |
| 76 | if _if.ip in _net: |
| 77 | if host not in _all_nets[_net]: |
| 78 | _all_nets[_net][host] = {} |
| 79 | _all_nets[_net][host]['text'] = \ |
| 80 | "{0:30}: {1:19} {2:5} {3:4}".format( |
| 81 | net_name, |
| 82 | str(_if.ip), |
| 83 | net_data['mtu'], |
| 84 | net_data['state'] |
| 85 | ) |
| 86 | _all_nets[_net][host]['if_data'] = \ |
| 87 | net_data |
| 88 | |
| 89 | # save collected info |
| 90 | self.all_networks = _all_nets |
Alex Savatieiev | 9b2f651 | 2019-02-20 18:05:00 -0600 | [diff] [blame] | 91 | |
Alex Savatieiev | 9c64211 | 2019-02-26 13:55:43 -0600 | [diff] [blame] | 92 | # Get networks from reclass |
| 93 | # TODO: |
| 94 | |
| 95 | return |
| 96 | |
Alex Savatieiev | 9b2f651 | 2019-02-20 18:05:00 -0600 | [diff] [blame] | 97 | def print_network_report(self): |
| 98 | """ |
| 99 | Create text report for CLI |
| 100 | |
| 101 | :return: none |
| 102 | """ |
Alex Savatieiev | e961399 | 2019-02-21 18:20:35 -0600 | [diff] [blame] | 103 | for network, nodes in self.all_networks.iteritems(): |
| 104 | logger_cli.info("-> {}".format(str(network))) |
| 105 | names = sorted(nodes.keys()) |
| 106 | |
| 107 | for hostname in names: |
| 108 | logger_cli.info( |
| 109 | "\t{0:10} {1}".format( |
| 110 | hostname.split('.')[0], |
| 111 | nodes[hostname]['text'] |
| 112 | ) |
| 113 | ) |
Alex Savatieiev | 9b2f651 | 2019-02-20 18:05:00 -0600 | [diff] [blame] | 114 | |
| 115 | def create_html_report(self, filename): |
| 116 | """ |
| 117 | Create static html showing network schema-like report |
| 118 | |
| 119 | :return: none |
| 120 | """ |
| 121 | logger_cli.info("### Generating report to '{}'".format(filename)) |
| 122 | _report = reporter.ReportToFile( |
| 123 | reporter.HTMLNetworkReport(), |
| 124 | filename |
| 125 | ) |
| 126 | _report({ |
| 127 | "nodes": self.nodes, |
| 128 | "diffs": {} |
| 129 | }) |
| 130 | logger_cli.info("-> Done") |
| 131 | |
| 132 | |
| 133 | if __name__ == '__main__': |
| 134 | # init connection to salt and collect minion data |
| 135 | cl = NetworkChecker() |
| 136 | |
| 137 | # collect data on installed packages |
| 138 | cl.collect_network_info() |
| 139 | |
| 140 | # diff installed and candidates |
| 141 | # cl.collect_packages() |
| 142 | |
| 143 | # report it |
| 144 | cl.create_html_report("./pkg_versions.html") |
| 145 | |
| 146 | sys.exit(0) |