blob: d50064c6f08585ed469b6ee9dec3761943b6df85 [file] [log] [blame]
Alex Savatieievd48994d2018-12-13 12:13:00 +01001"""Model Comparer:
2- yaml parser
3- class tree comparison
4"""
5import itertools
6# import json
7import os
8import yaml
9
10import reporter
11from ci_checker.common import utils
12from ci_checker.common import base_config, logger, logger_cli, PKG_DIR
13
14
15class ModelComparer(object):
16 """Collection of functions to compare model data.
17 """
18 models = {}
19
20 @staticmethod
21 def load_yaml_class(fname):
22 """Loads a yaml from the file and forms a tree item
23
24 Arguments:
25 fname {string} -- full path to the yaml file
26 """
27 _yaml = {}
28 try:
29 _size = 0
30 with open(fname, 'r') as f:
31 _yaml = yaml.load(f)
32 _size = f.tell()
33 # TODO: do smth with the data
34 if not _yaml:
35 logger_cli.warning("WARN: empty file '{}'".format(fname))
36 _yaml = {}
37 else:
38 logger.debug("...loaded YAML '{}' ({}b)".format(fname, _size))
39 return _yaml
40 except yaml.YAMLError as exc:
41 logger_cli.error(exc)
42 except IOError as e:
43 logger_cli.error(
44 "Error loading file '{}': {}".format(fname, e.message)
45 )
46 raise Exception("CRITICAL: Failed to load YAML data: {}".format(
47 e.message
48 ))
49
50 def load_model_tree(self, name, root_path="/srv/salt/reclass"):
51 """Walks supplied path for the YAML filed and loads the tree
52
53 Arguments:
54 root_folder_path {string} -- Path to Model's root folder. Optional
55 """
56 logger_cli.info("Loading reclass tree from '{}'".format(root_path))
57 # prepare the file tree to walk
58 raw_tree = {}
59 # Credits to Andrew Clark@MIT. Original code is here:
60 # http://code.activestate.com/recipes/577879-create-a-nested-dictionary-from-oswalk/
61 root_path = root_path.rstrip(os.sep)
62 start = root_path.rfind(os.sep) + 1
63 root_key = root_path.rsplit(os.sep, 1)[1]
64 # Look Ma! I am walking the file tree with no recursion!
65 for path, dirs, files in os.walk(root_path):
66 # if this is a hidden folder, ignore it
67 _filders_list = path[start:].split(os.sep)
68 if any(item.startswith(".") for item in _filders_list):
69 continue
70 # cut absolute part of the path and split folder names
71 folders = path[start:].split(os.sep)
72 subdir = {}
73 # create generator of files that are not hidden
74 _subfiles = (file for file in files if not file.startswith("."))
75 for _file in _subfiles:
76 # cut file extension. All reclass files are '.yml'
77 _subnode = _file
78 # load all YAML class data into the tree
79 subdir[_subnode] = self.load_yaml_class(
80 os.path.join(path, _file)
81 )
82 # Save original filepath, just in case
83 subdir[_subnode]["_source"] = os.path.join(path[start:], _file)
84 # creating dict structure out of folder list. Pure python magic
85 parent = reduce(dict.get, folders[:-1], raw_tree)
86 parent[folders[-1]] = subdir
87 # save it as a single data object
88 self.models[name] = raw_tree[root_key]
89 return True
90
91 def generate_model_report_tree(self):
92 """Use all loaded models to generate comparison table with
93 values are groupped by YAML files
94 """
95 def find_changes(dict1, dict2, path=""):
96 _report = {}
97 for k in dict1.keys():
98 _new_path = path + ":" + k
99 if k == "_source":
100 continue
101 if k not in dict2:
102 # no key in dict2
103 _report[_new_path] = [dict1[k], "N/A"]
104 logger_cli.info(
105 "{}: {}, {}".format(_new_path, dict1[k], "N/A")
106 )
107 else:
108 if type(dict1[k]) is dict:
109 if path == "":
110 _new_path = k
111 _child_report = find_changes(
112 dict1[k],
113 dict2[k],
114 _new_path
115 )
116 _report.update(_child_report)
117 elif type(dict1[k]) is list:
118 # use ifilterfalse to compare lists of dicts
119 _removed = list(
120 itertools.ifilterfalse(
121 lambda x: x in dict2[k],
122 dict1[k]
123 )
124 )
125 _added = list(
126 itertools.ifilterfalse(
127 lambda x: x in dict1[k],
128 dict2[k]
129 )
130 )
131 if _removed or _added:
132 _removed_str_lst = ["- {}".format(item)
133 for item in _removed]
134 _added_str_lst = ["+ {}".format(item)
135 for item in _added]
136 _report[_new_path] = [
137 dict1[k],
138 _removed_str_lst + _added_str_lst
139 ]
140 logger_cli.info(
141 "{}:\n"
142 "{} original items total".format(
143 _new_path,
144 len(dict1[k])
145 )
146 )
147 if _removed:
148 logger_cli.info(
149 "{}".format('\n'.join(_removed_str_lst))
150 )
151 if _added:
152 logger_cli.info(
153 "{}".format('\n'.join(_added_str_lst))
154 )
155 else:
156 if dict1[k] != dict2[k]:
157 _report[_new_path] = [dict1[k], dict2[k]]
158 logger_cli.info("{}: {}, {}".format(
159 _new_path,
160 dict1[k],
161 dict2[k]
162 ))
163 return _report
164 # tmp report for keys
165 diff_report = find_changes(
166 self.models["inspur_Aug"],
167 self.models['inspur_Dec']
168 )
169 return diff_report
170
171
172# temporary executing the parser as a main prog
173if __name__ == '__main__':
174 mComparer = ModelComparer()
175 mComparer.load_model_tree(
176 'inspur_Aug',
177 '/Users/savex/proj/inspur_hc/reclass_cmp/reclass-20180810'
178 )
179 mComparer.load_model_tree(
180 'inspur_Dec',
181 '/Users/savex/proj/inspur_hc/reclass_cmp/reclass-20181210'
182 )
183 diffs = mComparer.generate_model_report_tree()
184
185 report = reporter.ReportToFile(
186 reporter.HTMLModelCompare(),
187 './mdl_diff.html'
188 )
189 report(mdl_diff=diffs)
190 # with open("./gen_tree.json", "w+") as _out:
191 # _out.write(json.dumps(mComparer.generate_model_report_tree))