Add 'reclass-create-inventory-context' command
diff --git a/reclass_tools/cli.py b/reclass_tools/cli.py
index 1bd80b7..becff4e 100644
--- a/reclass_tools/cli.py
+++ b/reclass_tools/cli.py
@@ -128,6 +128,7 @@
params = parser.parse_args(args)
vcp_node_names = reclass_models.vcp_list(domain=params.domain)
+ #print('\n'.join(sorted(vcp_node_names)))
print('\n'.join(sorted(('{0}.{1}'.format(name, domain) for name, domain in vcp_node_names))))
@@ -152,3 +153,26 @@
print(yaml.dump(current_underlay_context, default_flow_style=False))
+
+def render_dir(args=None):
+ try:
+ from reclass_tools import create_inventory
+ except ImportError:
+ print("Please run this tool on the salt-master node with installed 'reclass'")
+ return
+
+ parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter,
+ description="Render a coockiecutter-based template directory using several different context files")
+ parser.add_argument('--template-dir', '-t', dest='template_dir',
+ help=('Coockiecutter-based template directory'))
+ parser.add_argument('--output-dir', '-o', dest='output_dir',
+ help=('Path to the directory where the rendered template will be placed'))
+ parser.add_argument('--context', '-c', dest='contexts',
+ help=('Path to the directory where the rendered template will be placed'),
+ nargs='+')
+
+ params = parser.parse_args(args)
+
+ create_inventory.render_dir(template_dir=params.template_dir, output_dir=params.output_dir, contexts=params.contexts)
+
+
diff --git a/reclass_tools/create_inventory.py b/reclass_tools/create_inventory.py
index 26ad89e..ce0d7a6 100644
--- a/reclass_tools/create_inventory.py
+++ b/reclass_tools/create_inventory.py
@@ -40,12 +40,17 @@
vcp_list = reclass_models.vcp_list(domain=domain, inventory=inventory)
reclass_storage = reclass_models.reclass_storage(domain=domain, inventory=inventory)
- current_underlay_context = {
- 'current_clusters': {
- }
- }
+ if domain is None:
+ raise Exception("Please specify a domain name from: \n{}".format('\n'.join(reclass_storage.keys())))
- for domain, storage_nodes in reclass_storage.items():
+ #current_underlay_context = {
+ # 'current_clusters': {
+ # }
+ #}
+
+ for storage_domain, storage_nodes in reclass_storage.items():
+ if storage_domain != domain:
+ continue
current_cluster_nodes = {}
for storage_node_name, storage_node in storage_nodes.items():
@@ -73,8 +78,14 @@
if reclass_key:
helpers.create_nested_key(current_cluster_nodes[inventory_node_name], path=key_path, value=reclass_key)
- current_underlay_context['current_clusters'][domain] = {
- 'nodes': current_cluster_nodes
+ #current_underlay_context['current_clusters'][domain] = {
+ # 'nodes': current_cluster_nodes
+ #}
+ current_underlay_context = {
+ 'cookiecutter': {
+ 'cluster_name': storage_domain,
+ 'nodes': current_cluster_nodes,
+ }
}
return current_underlay_context
@@ -106,7 +117,7 @@
# ..
-def render_environment_class():
+def render_dir(template_dir, output_dir, contexts):
"""Coockiecutter echancement to use several source JSON files
:param template_dir: directory with templates to render
@@ -126,15 +137,36 @@
#ipdb> output_dir
#'/root/my_new_deployment/'
- repo_dir = '/root/cookiecutter-templates/cluster_product/openstack'
+ print(template_dir)
+ print(output_dir)
+ print(contexts)
+ #return
+ #repo_dir = '/root/cookiecutter-templates/cluster_product/openstack'
overwrite_if_exists = True
- output_dir = '/root/my_new_deployment/'
- context = {'cookiecutter': {'openstack_telemetry_node02_hostname': 'mdb02' }}
+ #output_dir = '/root/my_new_deployment/'
+ #context = {'cookiecutter': {'openstack_telemetry_node02_hostname': 'mdb02' }}
+
+ merged_context = {}
+ for fcon in contexts:
+ if fcon.endswith('.yaml'):
+ context = helpers.yaml_read(fcon)
+ elif fcon.endswith('.json'):
+ context = helpers.json_read(fcon)
+ else:
+ print("Error: Please use YAML or JSON files for contexts")
+ return # should be exit 1
+
+
+ #merged_context.update(context)
+ #merged_context = dict(chain(merged_context.items(), context.items()))
+ merged_context = helpers.merge_nested_objects(merged_context, context)
+
+ #print(yaml.dump(merged_context, default_flow_style=False))
try:
generate.generate_files(
- repo_dir=repo_dir,
- context=context,
+ repo_dir=template_dir,
+ context=merged_context,
overwrite_if_exists=overwrite_if_exists,
output_dir=output_dir
)
@@ -146,7 +178,6 @@
context_str = yaml.dump(
undefined_err.context,
- indent=4,
default_flow_style=False
)
print('='*15 + ' Context: '+ '='*15 + '\n{}'.format(context_str) + '='*40)
diff --git a/reclass_tools/helpers.py b/reclass_tools/helpers.py
index 75e3185..322ac71 100644
--- a/reclass_tools/helpers.py
+++ b/reclass_tools/helpers.py
@@ -1,3 +1,7 @@
+import os
+import json
+import yaml
+
def get_nested_key(data, path=None):
if type(path) is not list:
@@ -42,3 +46,134 @@
path = path[:-1]
+def yaml_read(yaml_file):
+ if os.path.isfile(yaml_file):
+ with open(yaml_file, 'r') as f:
+ return yaml.load(f)
+ else:
+ print("\'{}\' is not a file!".format(yaml_file))
+
+
+def json_read(yaml_file):
+ if os.path.isfile(yaml_file):
+ with open(yaml_file, 'r') as f:
+ return json.load(f)
+ else:
+ print("\'{}\' is not a file!".format(yaml_file))
+
+
+def merge_nested_objects(obj_1, obj_2):
+ """Merge two objects with optional key overwrites
+
+ Original : https://stackoverflow.com/a/17860173
+ - Merges dicts and lists
+ - If a dict key has the suffix '__overwrite__' and boolean value,
+ then the key is assumed as a special keyword for merging:
+ <key>__overwrite__: True # Overwrite the existing <key> content with <key> from obj_2
+ <key>__overwrite__: False # Keep the existing <key> content from obj_1
+
+
+ Case #1: Merge dicts and lists, overwrite other types with latest value
+
+ dict_a = {
+ 'host': '1.1.1.1',
+ 'ssh': {
+ 'login': 'user'
+ }
+ }
+
+ dict_b = {
+ 'host': '2.2.2.2',
+ 'ssh': {
+ 'password': 'pass'
+ }
+ }
+
+ print(merge_nested_objects(dict_a, dict_b))
+ {
+ 'host': '2.2.2.2',
+ 'ssh': {
+ 'login': 'user',
+ 'password': 'pass',
+ }
+ }
+
+ Case #2: Use <key>__overwrite__: True to remove previous key content
+
+ dict_a = {
+ 'host': '1.1.1.1'
+ 'ssh': {
+ 'login': 'user'
+ }
+ }
+
+ dict_b = {
+ 'ssh__overwrite__': True
+ 'ssh': {
+ 'password': 'pass'
+ }
+ }
+
+ print(merge_nested_objects(dict_a, dict_b))
+ {
+ 'host': '1.1.1.1',
+ 'ssh': {
+ 'password': 'pass',
+ }
+ }
+
+ Case #3: Use <key>__overwrite__: False to skip merging key if already exists
+
+ dict_a = {
+ 'host': '1.1.1.1'
+ 'ssh': {
+ 'login': 'user'
+ }
+ }
+
+ dict_b = {
+ 'host__overwrite__': False
+ 'host': '2.2.2.2'
+ 'ssh': {
+ 'login__overwrite__': False
+ 'login': 'new_user'
+ 'password': 'pass'
+ }
+ }
+
+ print(merge_nested_objects(dict_a, dict_b))
+ {
+ 'host': '1.1.1.1',
+ 'ssh': {
+ 'login': 'user',
+ 'password': 'pass'
+ }
+ }
+
+
+ """
+ # Merge two dicts
+ if isinstance(obj_1, dict) and isinstance(obj_2, dict):
+ result = {}
+ for key, value in obj_1.iteritems():
+ if key not in obj_2:
+ result[key] = value
+ else:
+ overwrite_key = key + '__overwrite__'
+ if overwrite_key in obj_2 and obj_2[overwrite_key] == True:
+ result[key] = obj_2[key]
+ elif overwrite_key in obj_2 and obj_2[overwrite_key] == False:
+ result[key] = value
+ else:
+ result[key] = merge_nested_objects(value, obj_2[key])
+ for key, value in obj_2.iteritems():
+ if key not in obj_1:
+ result[key] = value
+ return result
+
+ # Add two lists
+ if isinstance(obj_1, list) and isinstance(obj_2, list):
+ return obj_1 + obj_2
+
+ # Overwrite a value with new one
+ return obj_2
diff --git a/reclass_tools/walk_models.py b/reclass_tools/walk_models.py
index d57aef5..7017722 100644
--- a/reclass_tools/walk_models.py
+++ b/reclass_tools/walk_models.py
@@ -32,12 +32,12 @@
yield (log)
-def yaml_read(yaml_file):
- if os.path.isfile(yaml_file):
- with open(yaml_file, 'r') as f:
- return yaml.load(f)
- else:
- print("\'{}\' is not a file!".format(yaml_file))
+#def yaml_read(yaml_file):
+# if os.path.isfile(yaml_file):
+# with open(yaml_file, 'r') as f:
+# return yaml.load(f)
+# else:
+# print("\'{}\' is not a file!".format(yaml_file))
class OpenFile(object):
@@ -110,7 +110,7 @@
for path in paths:
for log in walkfiles(path, verbose):
if log.fname.endswith('.yml'):
- model = yaml_read(log.fname)
+ model = helpers.yaml_read(log.fname)
if model is not None:
# Collect all params from the models
_param = helpers.get_nested_key(model, ['parameters', '_param'])
@@ -140,7 +140,7 @@
for path in paths:
for fyml in walkfiles(path, verbose=verbose):
if fyml.fname.endswith('.yml'):
- model = yaml_read(fyml.fname)
+ model = helpers.yaml_read(fyml.fname)
if model is not None:
# Clear linux.network.interfaces