Py3 compatibility
This change makes nodegenerator work with Python3.
PROD-26064
Change-Id: I4841d91bc98212883c91a23bec18760271980ab6
diff --git a/reclass/nodegenerator.py b/reclass/nodegenerator.py
index 43c451d..ca69339 100644
--- a/reclass/nodegenerator.py
+++ b/reclass/nodegenerator.py
@@ -6,9 +6,13 @@
import os
from pprint import pprint as pp
import re
+import six
import sys
import yaml
+if sys.version_info[0] == 3:
+ unicode = lambda x: x
+
##
# A hack needed to save order of fields in output. The only reason it is here
@@ -77,7 +81,7 @@
elif isinstance(value, collections.Iterable):
return any(has_subst(x) for x in value)
elif isinstance(value, dict):
- return any(has_subst(k) or has_subst(v) for k, v in value.iteritems())
+ return any(has_subst(k) or has_subst(v) for k, v in six.iteritems(value))
return False
@@ -94,7 +98,7 @@
elif type(entity) is list:
return [subster(x) for x in entity]
elif isinstance(entity, dict):
- return dict((subster(k), subster(v)) for k, v in entity.iteritems())
+ return dict((subster(k), subster(v)) for k, v in six.iteritems(entity))
return entity
@@ -112,7 +116,7 @@
# TODO: remove side effects.
def update_dict(x, y, block_override=False):
- for key, value in y.iteritems():
+ for key, value in six.iteritems(y):
if key in x and x[key] != value:
if (type(x[key]) == type(value) and type(value) == str
and block_override):
@@ -143,6 +147,7 @@
[update_dict(result, e) for e in exps]
return result
+
external = {}
def get_configs(base, cluster):
configs_to_process, out = [], []
@@ -153,21 +158,21 @@
if fname == 'init.yml':
if contains(config, ITRAIT):
with open(config, 'r') as f:
- external.update(get_system(yaml.load(f)))
+ external.update(get_system(yaml.safe_load(f)))
configs_to_process.append(config)
# NOTE: this is a special case left here for the time being.
elif fname == 'nodes.yml': # TODO: refactor it.
nodes_definition = config
else:
with open(config, 'r') as f:
- data = yaml.load(f)
+ data = yaml.safe_load(f)
if data is None:
continue
if get_params(data):
configs_to_process.append(config)
for config in configs_to_process:
with open(config, 'r') as f:
- data = yaml.load(f)
+ data = yaml.safe_load(f)
data['src'] = [config]
out.append(data)
return out, nodes_definition
@@ -180,7 +185,7 @@
def innerread(x, params, content):
with open(x, 'r') as f:
- data = yaml.load(f)
+ data = yaml.safe_load(f)
params = {} if params is None else params
data = {} if data is None else data
update_dict(params, get_params(data), True)
@@ -199,7 +204,7 @@
for x in map(fixname, storage_cnames):
node_content = {}
data = innerread(x, params, node_content)
- for nodename, nodecontent in node_content.iteritems():
+ for nodename, nodecontent in six.iteritems(node_content):
if out[nodename].get('src') is not None:
out[nodename]['src'].append(x)
else:
@@ -269,7 +274,7 @@
def update_count(string, value, padding):
return string.replace('<<count>>', str(value).zfill(padding))
out = {}
- for k, v in from_node.iteritems():
+ for k, v in six.iteritems(from_node):
if type(v) is str:
outv = update_count(v, count, count_padding)
elif type(v) is list:
@@ -278,13 +283,13 @@
# TODO: refactor.
elif type(v) is dict:
outv = copy.deepcopy(v)
- for ik, iv in v.iteritems():
+ for ik, iv in six.iteritems(v):
if type(iv) is dict:
# apparently key is always 'value'
- for iiv in iv.itervalues():
+ for iiv in six.itervalues(iv):
ranges = re.findall(_iprange, iiv)
if ranges:
- addr = ip_ranges[ranges[0]].next()
+ addr = six.next(ip_ranges[ranges[0]])
outv[ik] = str(addr.exploded)
out[k] = outv
return out
@@ -295,7 +300,7 @@
local_node['params'].update(repeat['params'])
# Process ranges. Ranges look like the only repeatable entity, so
# it is not generalized yet. NOTE: check other models.
- ip_ranges = repeat['ip_ranges'].iteritems()
+ ip_ranges = six.iteritems(repeat['ip_ranges'])
ip_ranges = dict((ip_rn, get_ip(ip_r)) for ip_rn, ip_r in ip_ranges)
# Generate counted nodes.
for count in range(repeat['start'], repeat['start'] + repeat['count']):
@@ -309,11 +314,11 @@
return 'repeat' in node.keys()
with open(nodes_definition, 'r') as f:
- nodes_init = yaml.load(f)['parameters']['reclass']['storage']['node']
+ nodes_init = yaml.safe_load(f)['parameters']['reclass']['storage']['node']
basic_nodes = copy.deepcopy(basic_nodes) # preserve state.
basic_nodes_extended, extra_nodes = {}, []
- for nname, node in basic_nodes.iteritems():
+ for nname, node in six.iteritems(basic_nodes):
if needs_expansion(node):
continue
extension = nodes_init.get(nname, {}).get('classes', [])
@@ -327,7 +332,7 @@
for node in nodes_init.values():
if needs_expansion(node):
extra_nodes.extend(do_count_expansion(node))
- all_nodes = basic_nodes_extended.values()
+ all_nodes = list(basic_nodes_extended.values())
all_nodes.extend(extra_nodes)
return all_nodes