Merge pull request #1 from epcim/pr/fix/fix_raise_UndefinedVariableError

fix raise of UndefinedVariableError, improve err output from Salt
diff --git a/MANIFEST.in b/MANIFEST.in
index 1c1accc..268de42 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -7,6 +7,7 @@
 prune reclass/datatypes/tests
 prune reclass/storage/tests
 prune reclass/utils/tests
+prune reclass/values/tests
 # Exclude "source only" content
 prune doc
 prune examples
diff --git a/reclass/__init__.py b/reclass/__init__.py
index 7cd6c30..83b962f 100644
--- a/reclass/__init__.py
+++ b/reclass/__init__.py
@@ -15,8 +15,10 @@
     storage_class = StorageBackendLoader(storage_type).load()
     return MemcacheProxy(storage_class(nodes_uri, classes_uri, **kwargs))
 
+def get_path_mangler(storage_type,**kwargs):
+    return StorageBackendLoader(storage_type).path_mangler()
 
-def output(data, fmt, pretty_print=False):
+def output(data, fmt, pretty_print=False, no_refs=False):
     output_class = OutputLoader(fmt).load()
     outputter = output_class()
-    return outputter.dump(data, pretty_print=pretty_print)
+    return outputter.dump(data, pretty_print=pretty_print, no_refs=no_refs)
diff --git a/reclass/adapters/ansible.py b/reclass/adapters/ansible.py
index cbf5f17..6794a0d 100755
--- a/reclass/adapters/ansible.py
+++ b/reclass/adapters/ansible.py
@@ -27,6 +27,7 @@
         ansible_dir = os.path.abspath(os.path.dirname(sys.argv[0]))
 
         defaults = {'inventory_base_uri': ansible_dir,
+                    'no_refs' : False,
                     'pretty_print' : True,
                     'output' : 'json',
                     'applications_postfix': '_hosts'
@@ -54,10 +55,9 @@
                               add_options_cb=add_ansible_options_group,
                               defaults=defaults)
 
-        storage = get_storage(options.storage_type, options.nodes_uri,
-                              options.classes_uri)
+        storage = get_storage(options.storage_type, options.nodes_uri, options.classes_uri)
         class_mappings = defaults.get('class_mappings')
-        reclass = Core(storage, class_mappings)
+        reclass = Core(storage, class_mappings, default_environment=None)
 
         if options.mode == MODE_NODEINFO:
             data = reclass.nodeinfo(options.hostname)
@@ -81,7 +81,7 @@
 
             data = groups
 
-        print output(data, options.output, options.pretty_print)
+        print output(data, options.output, options.pretty_print, options.no_refs)
 
     except ReclassException, e:
         e.exit_with_message(sys.stderr)
diff --git a/reclass/adapters/salt.py b/reclass/adapters/salt.py
index 1b45823..beca9e5 100755
--- a/reclass/adapters/salt.py
+++ b/reclass/adapters/salt.py
@@ -9,11 +9,10 @@
 
 import os, sys, posix
 
-from reclass import get_storage, output
+from reclass import get_storage, output, get_path_mangler
 from reclass.core import Core
 from reclass.errors import ReclassException
-from reclass.config import find_and_read_configfile, get_options, \
-        path_mangler
+from reclass.config import find_and_read_configfile, get_options
 from reclass.constants import MODE_NODEINFO
 from reclass.defaults import *
 from reclass.version import *
@@ -26,14 +25,13 @@
                class_mappings=None,
                propagate_pillar_data_to_reclass=False):
 
-    nodes_uri, classes_uri = path_mangler(inventory_base_uri,
-                                          nodes_uri, classes_uri)
-    storage = get_storage(storage_type, nodes_uri, classes_uri,
-                          default_environment='base')
+    path_mangler = get_path_mangler(storage_type)
+    nodes_uri, classes_uri = path_mangler(inventory_base_uri, nodes_uri, classes_uri)
+    storage = get_storage(storage_type, nodes_uri, classes_uri)
     input_data = None
     if propagate_pillar_data_to_reclass:
         input_data = pillar
-    reclass = Core(storage, class_mappings, input_data=input_data)
+    reclass = Core(storage, class_mappings, input_data=input_data, default_environment='base')
 
     data = reclass.nodeinfo(minion_id)
     params = data.get('parameters', {})
@@ -47,14 +45,12 @@
 
 def top(minion_id, storage_type=OPT_STORAGE_TYPE,
         inventory_base_uri=OPT_INVENTORY_BASE_URI, nodes_uri=OPT_NODES_URI,
-        classes_uri=OPT_CLASSES_URI,
-        class_mappings=None):
+        classes_uri=OPT_CLASSES_URI, class_mappings=None):
 
-    nodes_uri, classes_uri = path_mangler(inventory_base_uri,
-                                          nodes_uri, classes_uri)
-    storage = get_storage(storage_type, nodes_uri, classes_uri,
-                          default_environment='base')
-    reclass = Core(storage, class_mappings, input_data=None)
+    path_mangler = get_path_mangler(storage_type)
+    nodes_uri, classes_uri = path_mangler(inventory_base_uri, nodes_uri, classes_uri)
+    storage = get_storage(storage_type, nodes_uri, classes_uri)
+    reclass = Core(storage, class_mappings, input_data=None, default_environment='base')
 
     # if the minion_id is not None, then return just the applications for the
     # specific minion, otherwise return the entire top data (which we need for
@@ -81,6 +77,7 @@
     try:
         inventory_dir = os.path.abspath(os.path.dirname(sys.argv[0]))
         defaults = {'pretty_print' : True,
+                    'no_refs' : False,
                     'output' : 'yaml',
                     'inventory_base_uri': inventory_dir
                    }
@@ -111,7 +108,7 @@
                        classes_uri=options.classes_uri,
                        class_mappings=class_mappings)
 
-        print output(data, options.output, options.pretty_print)
+        print output(data, options.output, options.pretty_print, options.no_refs)
 
     except ReclassException, e:
         e.exit_with_message(sys.stderr)
diff --git a/reclass/cli.py b/reclass/cli.py
index 5666e16..15df811 100644
--- a/reclass/cli.py
+++ b/reclass/cli.py
@@ -19,25 +19,23 @@
 
 def main():
     try:
-        defaults = {'pretty_print' : OPT_PRETTY_PRINT,
+        defaults = {'no_refs' : OPT_NO_REFS,
+                    'pretty_print' : OPT_PRETTY_PRINT,
                     'output' : OPT_OUTPUT
                    }
         defaults.update(find_and_read_configfile())
-        options = get_options(RECLASS_NAME, VERSION, DESCRIPTION,
-                              defaults=defaults)
 
-        storage = get_storage(options.storage_type, options.nodes_uri,
-                              options.classes_uri, default_environment='base')
+        options = get_options(RECLASS_NAME, VERSION, DESCRIPTION, defaults=defaults)
+        storage = get_storage(options.storage_type, options.nodes_uri, options.classes_uri)
         class_mappings = defaults.get('class_mappings')
-        reclass = Core(storage, class_mappings)
+        reclass = Core(storage, class_mappings, default_environment='base')
 
         if options.mode == MODE_NODEINFO:
             data = reclass.nodeinfo(options.nodename)
-
         else:
             data = reclass.inventory()
 
-        print output(data, options.output, options.pretty_print)
+        print output(data, options.output, options.pretty_print, options.no_refs)
 
     except ReclassException, e:
         e.exit_with_message(sys.stderr)
diff --git a/reclass/config.py b/reclass/config.py
index 17d0dc6..2653586 100644
--- a/reclass/config.py
+++ b/reclass/config.py
@@ -11,7 +11,8 @@
 
 import errors
 from defaults import *
-from constants import MODE_NODEINFO, MODE_INVENTORY
+from constants import MODE_NODEINFO, MODE_INVENTORY 
+from reclass import get_path_mangler
 
 def make_db_options_group(parser, defaults={}):
     ret = optparse.OptionGroup(parser, 'Database options',
@@ -20,15 +21,14 @@
                    default=defaults.get('storage_type', OPT_STORAGE_TYPE),
                    help='the type of storage backend to use [%default]')
     ret.add_option('-b', '--inventory-base-uri', dest='inventory_base_uri',
-                   default=defaults.get('inventory_base_uri',
-                                        OPT_INVENTORY_BASE_URI),
+                   default=defaults.get('inventory_base_uri', OPT_INVENTORY_BASE_URI),
                    help='the base URI to prepend to nodes and classes [%default]'),
     ret.add_option('-u', '--nodes-uri', dest='nodes_uri',
                    default=defaults.get('nodes_uri', OPT_NODES_URI),
                    help='the URI to the nodes storage [%default]'),
     ret.add_option('-c', '--classes-uri', dest='classes_uri',
                    default=defaults.get('classes_uri', OPT_CLASSES_URI),
-                   help='the URI to the classes storage [%default]')
+                   help='the URI to the classes storage [%default]'),
     return ret
 
 
@@ -38,10 +38,12 @@
     ret.add_option('-o', '--output', dest='output',
                    default=defaults.get('output', OPT_OUTPUT),
                    help='output format (yaml or json) [%default]')
-    ret.add_option('-y', '--pretty-print', dest='pretty_print',
-                   action="store_true",
+    ret.add_option('-y', '--pretty-print', dest='pretty_print', action="store_true",
                    default=defaults.get('pretty_print', OPT_PRETTY_PRINT),
                    help='try to make the output prettier [%default]')
+    ret.add_option('-r', '--no-refs', dest='no_refs', action="store_true",
+                   default=defaults.get('no_refs', OPT_NO_REFS),
+                   help='output all key values do not use yaml references [%default]')
     return ret
 
 
@@ -128,30 +130,6 @@
     return parser, option_checker
 
 
-def path_mangler(inventory_base_uri, nodes_uri, classes_uri):
-
-    if inventory_base_uri is None:
-        # if inventory_base is not given, default to current directory
-        inventory_base_uri = os.getcwd()
-
-    nodes_uri = nodes_uri or 'nodes'
-    classes_uri = classes_uri or 'classes'
-
-    def _path_mangler_inner(path):
-        ret = os.path.join(inventory_base_uri, path)
-        ret = os.path.expanduser(ret)
-        return os.path.abspath(ret)
-
-    n, c = map(_path_mangler_inner, (nodes_uri, classes_uri))
-    if n == c:
-        raise errors.DuplicateUriError(n, c)
-    common = os.path.commonprefix((n, c))
-    if common == n or common == c:
-        raise errors.UriOverlapError(n, c)
-
-    return n, c
-
-
 def get_options(name, version, description,
                             inventory_shortopt='-i',
                             inventory_longopt='--inventory',
@@ -175,9 +153,8 @@
     options, args = parser.parse_args()
     checker(options, args)
 
-    options.nodes_uri, options.classes_uri = \
-            path_mangler(options.inventory_base_uri, options.nodes_uri,
-                         options.classes_uri)
+    path_mangler = get_path_mangler(options.storage_type)
+    options.nodes_uri, options.classes_uri = path_mangler(options.inventory_base_uri, options.nodes_uri, options.classes_uri)
 
     return options
 
diff --git a/reclass/core.py b/reclass/core.py
index 76bd0a8..3cf3721 100644
--- a/reclass/core.py
+++ b/reclass/core.py
@@ -7,21 +7,25 @@
 # Released under the terms of the Artistic Licence 2.0
 #
 
+import copy
 import time
-#import types
 import re
-#import sys
 import fnmatch
 import shlex
-from reclass.datatypes import Entity, Classes, Parameters
+import string
+import yaml
+from reclass.output.yaml_outputter import ExplicitDumper
+from reclass.datatypes import Entity, Classes, Parameters, Exports
 from reclass.errors import MappingFormatError, ClassNotFound
+from reclass.defaults import AUTOMATIC_RECLASS_PARAMETERS
 
 class Core(object):
 
-    def __init__(self, storage, class_mappings, input_data=None):
+    def __init__(self, storage, class_mappings, input_data=None, default_environment=None):
         self._storage = storage
         self._class_mappings = class_mappings
         self._input_data = input_data
+        self._default_environment = default_environment
 
     @staticmethod
     def _get_timestamp():
@@ -81,23 +85,26 @@
         p = Parameters(self._input_data)
         return Entity(parameters=p, name='input data')
 
-    def _recurse_entity(self, entity, merge_base=None, seen=None, nodename=None):
+    def _recurse_entity(self, entity, merge_base=None, seen=None, nodename=None, environment=None):
         if seen is None:
             seen = {}
 
+        if environment is None:
+            environment = self._default_environment
+
         if merge_base is None:
             merge_base = Entity(name='empty (@{0})'.format(nodename))
 
         for klass in entity.classes.as_list():
             if klass not in seen:
                 try:
-                    class_entity = self._storage.get_class(klass)
+                    class_entity = self._storage.get_class(klass, environment)
                 except ClassNotFound, e:
                     e.set_nodename(nodename)
                     raise e
 
                 descent = self._recurse_entity(class_entity, seen=seen,
-                                               nodename=nodename)
+                                               nodename=nodename, environment=environment)
                 # on every iteration, we merge the result of the recursive
                 # descent into what we have so far…
                 merge_base.merge(descent)
@@ -109,17 +116,41 @@
         merge_base.merge(entity)
         return merge_base
 
-    def _nodeinfo(self, nodename):
+    def _get_automatic_parameters(self, nodename, environment):
+        if AUTOMATIC_RECLASS_PARAMETERS:
+            return Parameters({ '_reclass_': { 'name': { 'full': nodename, 'short': string.split(nodename, '.')[0] },
+                                               'environment': environment } })
+        else:
+            return Parameters()
+
+    def _get_inventory(self):
+        inventory = {}
+        for nodename in self._storage.enumerate_nodes():
+            node = self._node_entity(nodename)
+            node.interpolate_exports()
+            inventory[nodename] = node.exports.as_dict()
+        return inventory
+
+    def _node_entity(self, nodename):
         node_entity = self._storage.get_node(nodename)
+        if node_entity.environment == None:
+            node_entity.environment = self._default_environment
         base_entity = Entity(name='base')
         base_entity.merge(self._get_class_mappings_entity(node_entity.name))
         base_entity.merge(self._get_input_data_entity())
+        base_entity.merge_parameters(self._get_automatic_parameters(nodename, node_entity.environment))
         seen = {}
-        merge_base = self._recurse_entity(base_entity, seen=seen,
-                                          nodename=base_entity.name)
-        ret = self._recurse_entity(node_entity, merge_base, seen=seen,
-                                   nodename=node_entity.name)
-        ret.interpolate()
+        merge_base = self._recurse_entity(base_entity, seen=seen, nodename=base_entity.name,
+                                          environment=node_entity.environment)
+        return self._recurse_entity(node_entity, merge_base, seen=seen, nodename=node_entity.name,
+                                    environment=node_entity.environment)
+
+    def _nodeinfo(self, nodename, inventory):
+        ret = self._node_entity(nodename)
+        ret.initialise_interpolation()
+        if ret.parameters.has_inv_query() and inventory is None:
+            inventory = self._get_inventory()
+        ret.interpolate(nodename, inventory)
         return ret
 
     def _nodeinfo_as_dict(self, nodename, entity):
@@ -133,12 +164,18 @@
         return ret
 
     def nodeinfo(self, nodename):
-        return self._nodeinfo_as_dict(nodename, self._nodeinfo(nodename))
+        return self._nodeinfo_as_dict(nodename, self._nodeinfo(nodename, None))
 
     def inventory(self):
+        query_nodes = set()
         entities = {}
+        inventory = self._get_inventory()
         for n in self._storage.enumerate_nodes():
-            entities[n] = self._nodeinfo(n)
+            entities[n] = self._nodeinfo(n, inventory)
+            if entities[n].parameters.has_inv_query():
+                nodes.add(n)
+        for n in query_nodes:
+            entities[n] = self._nodeinfo(n, inventory)
 
         nodes = {}
         applications = {}
diff --git a/reclass/datatypes/__init__.py b/reclass/datatypes/__init__.py
index 20f7551..b506de0 100644
--- a/reclass/datatypes/__init__.py
+++ b/reclass/datatypes/__init__.py
@@ -9,4 +9,5 @@
 from applications import Applications
 from classes import Classes
 from entity import Entity
+from exports import Exports
 from parameters import Parameters
diff --git a/reclass/datatypes/entity.py b/reclass/datatypes/entity.py
index 573a28c..8ad66c0 100644
--- a/reclass/datatypes/entity.py
+++ b/reclass/datatypes/entity.py
@@ -8,6 +8,7 @@
 #
 from classes import Classes
 from applications import Applications
+from exports import Exports
 from parameters import Parameters
 
 class Entity(object):
@@ -17,23 +18,34 @@
     uri of the Entity that is being merged.
     '''
     def __init__(self, classes=None, applications=None, parameters=None,
-                 uri=None, name=None, environment=None):
+                 exports=None, uri=None, name=None, environment=None):
         if classes is None: classes = Classes()
         self._set_classes(classes)
         if applications is None: applications = Applications()
         self._set_applications(applications)
         if parameters is None: parameters = Parameters()
+        if exports is None: exports = Exports()
         self._set_parameters(parameters)
+        self._set_exports(exports)
         self._uri = uri or ''
         self._name = name or ''
-        self._environment = environment or ''
+        self._environment = environment
 
     name = property(lambda s: s._name)
+    short_name = property(lambda s: s._short_name)
     uri = property(lambda s: s._uri)
-    environment = property(lambda s: s._environment)
     classes = property(lambda s: s._classes)
     applications = property(lambda s: s._applications)
     parameters = property(lambda s: s._parameters)
+    exports = property(lambda s: s._exports)
+
+    @property
+    def environment(self):
+        return self._environment
+
+    @environment.setter
+    def environment(self, value):
+        self._environment = value
 
     def _set_classes(self, classes):
         if not isinstance(classes, Classes):
@@ -53,22 +65,43 @@
                             'instance of type %s' % type(parameters))
         self._parameters = parameters
 
+    def _set_exports(self, exports):
+        if not isinstance(exports, Exports):
+            raise TypeError('Entity.exports cannot be set to '\
+                            'instance of type %s' % type(exports))
+        self._exports = exports
+
     def merge(self, other):
         self._classes.merge_unique(other._classes)
         self._applications.merge_unique(other._applications)
         self._parameters.merge(other._parameters)
+        self._exports.merge(other._exports)
         self._name = other.name
         self._uri = other.uri
-        self._environment = other.environment
+        if other.environment != None:
+            self._environment = other.environment
 
-    def interpolate(self):
-        self._parameters.interpolate()
+    def merge_parameters(self, params):
+        self._parameters.merge(params)
+
+    def interpolate(self, nodename, inventory):
+        self._parameters.interpolate(inventory)
+        self.interpolate_exports()
+
+    def initialise_interpolation(self):
+        self._parameters.initialise_interpolation()
+        self._exports.initialise_interpolation()
+
+    def interpolate_exports(self):
+        self.initialise_interpolation()
+        self._exports.interpolate_from_external(self._parameters)
 
     def __eq__(self, other):
         return isinstance(other, type(self)) \
                 and self._applications == other._applications \
                 and self._classes == other._classes \
                 and self._parameters == other._parameters \
+                and self._exports == other._exports \
                 and self._name == other._name \
                 and self._uri == other._uri
 
@@ -76,16 +109,15 @@
         return not self.__eq__(other)
 
     def __repr__(self):
-        return "%s(%r, %r, %r, uri=%r, name=%r)" % (self.__class__.__name__,
-                                                    self.classes,
-                                                    self.applications,
-                                                    self.parameters,
-                                                    self.uri,
-                                                    self.name)
+        return "%s(%r, %r, %r, %r, uri=%r, name=%r, environment=%r)" % (
+                   self.__class__.__name__, self.classes, self.applications,
+                   self.parameters, self.exports, self.uri, self.name,
+                   self.environment)
 
     def as_dict(self):
         return {'classes': self._classes.as_list(),
                 'applications': self._applications.as_list(),
                 'parameters': self._parameters.as_dict(),
+                'exports': self._exports.as_dict(),
                 'environment': self._environment
                }
diff --git a/reclass/datatypes/exports.py b/reclass/datatypes/exports.py
new file mode 100644
index 0000000..33095ae
--- /dev/null
+++ b/reclass/datatypes/exports.py
@@ -0,0 +1,46 @@
+#
+# -*- coding: utf-8 -*-
+#
+# This file is part of reclass (http://github.com/madduck/reclass)
+#
+from parameters import Parameters
+from reclass.errors import UndefinedVariableError
+
+class Exports(Parameters):
+
+    def __init__(self, mapping=None, delimiter=None, options=None):
+        super(Exports, self).__init__(mapping, delimiter, options)
+
+    def __repr__(self):
+        return '%s(%r, %r)' % (self.__class__.__name__, self._base,
+                               self.delimiter)
+
+    def delete_key(self, key):
+        self._base.pop(key, None)
+        self._unrendered.pop(key, None)
+
+    def overwrite(self, other):
+        overdict = {'~' + key: value for key, value in other.iteritems()}
+        self.merge(overdict)
+
+    def interpolate_from_external(self, external):
+        self._initialise_interpolate()
+        external._initialise_interpolate()
+        while len(self._unrendered) > 0:
+            path, v = self._unrendered.iteritems().next()
+            value = path.get_value(self._base)
+            external._interpolate_references(path, value, None)
+            new = self._interpolate_render_from_external(external._base, path, value)
+            path.set_value(self._base, new)
+            del self._unrendered[path]
+
+    def _interpolate_render_from_external(self, context, path, value):
+        try:
+            new = value.render(context, None, self._options)
+        except UndefinedVariableError as e:
+            raise UndefinedVariableError(e.var, path)
+        if isinstance(new, dict):
+            self._render_simple_dict(new, path)
+        elif isinstance(new, list):
+            self._render_simple_list(new, path)
+        return new
diff --git a/reclass/datatypes/parameters.py b/reclass/datatypes/parameters.py
index a39324e..cc0c74c 100644
--- a/reclass/datatypes/parameters.py
+++ b/reclass/datatypes/parameters.py
@@ -6,12 +6,17 @@
 # Copyright © 2007–14 martin f. krafft <madduck@madduck.net>
 # Released under the terms of the Artistic Licence 2.0
 #
+
+import copy
+import sys
 import types
-from reclass.defaults import PARAMETER_INTERPOLATION_DELIMITER,\
-                             PARAMETER_DICT_KEY_OVERRIDE_PREFIX
+from collections import namedtuple
+from reclass.defaults import *
 from reclass.utils.dictpath import DictPath
-from reclass.utils.refvalue import RefValue
-from reclass.errors import InfiniteRecursionError, UndefinedVariableError
+from reclass.values.mergeoptions import MergeOptions
+from reclass.values.value import Value
+from reclass.values.valuelist import ValueList
+from reclass.errors import InfiniteRecursionError, UndefinedVariableError, InterpolationError
 
 class Parameters(object):
     '''
@@ -39,16 +44,23 @@
     DEFAULT_PATH_DELIMITER = PARAMETER_INTERPOLATION_DELIMITER
     DICT_KEY_OVERRIDE_PREFIX = PARAMETER_DICT_KEY_OVERRIDE_PREFIX
 
-    def __init__(self, mapping=None, delimiter=None):
+    def __init__(self, mapping=None, delimiter=None, options=None):
         if delimiter is None:
             delimiter = Parameters.DEFAULT_PATH_DELIMITER
+        if options is None:
+            options = MergeOptions()
         self._delimiter = delimiter
         self._base = {}
-        self._occurrences = {}
+        self._unrendered = None
+        self._escapes_handled = {}
+        self._has_inv_query = False
+        self._options = options
+        self._keep_overrides = False
         if mapping is not None:
-            # we initialise by merging, otherwise the list of references might
-            # not be updated
-            self.merge(mapping, initmerge=True)
+            # we initialise by merging
+            self._keep_overrides = True
+            self.merge(mapping)
+            self._keep_overrides = False
 
     delimiter = property(lambda self: self._delimiter)
 
@@ -57,7 +69,7 @@
 
     def __repr__(self):
         return '%s(%r, %r)' % (self.__class__.__name__, self._base,
-                               self.delimiter)
+                               self._delimiter)
 
     def __eq__(self, other):
         return isinstance(other, type(self)) \
@@ -67,60 +79,46 @@
     def __ne__(self, other):
         return not self.__eq__(other)
 
+    def has_inv_query(self):
+        return self._has_inv_query
+
     def as_dict(self):
         return self._base.copy()
 
-    def _update_scalar(self, cur, new, path):
-        if isinstance(cur, RefValue) and path in self._occurrences:
-            # If the current value already holds a RefValue, we better forget
-            # the occurrence, or else interpolate() will later overwrite
-            # unconditionally. If the new value is a RefValue, the occurrence
-            # will be added again further on
-            del self._occurrences[path]
-
-        if self.delimiter is None or not isinstance(new, (types.StringTypes,
-                                                          RefValue)):
-            # either there is no delimiter defined (and hence no references
-            # are being used), or the new value is not a string (and hence
-            # cannot be turned into a RefValue), and not a RefValue. We can
-            # shortcut and just return the new scalar
-            return new
-
-        elif isinstance(new, RefValue):
-            # the new value is (already) a RefValue, so we need not touch it
-            # at all
-            ret = new
-
+    def _wrap_value(self, value):
+        if isinstance(value, dict):
+            return self._wrap_dict(value)
+        elif isinstance(value, list):
+            return self._wrap_list(value)
+        elif isinstance(value, (Value, ValueList)):
+            return value
         else:
-            # the new value is a string, let's see if it contains references,
-            # by way of wrapping it in a RefValue and querying the result
-            ret = RefValue(new, self.delimiter)
-            if not ret.has_references():
-                # do not replace with RefValue instance if there are no
-                # references, i.e. discard the RefValue in ret, just return
-                # the new value
-                return new
+            return Value(value, self._delimiter)
 
-        # So we now have a RefValue. Let's, keep a reference to the instance
-        # we just created, in a dict indexed by the dictionary path, instead
-        # of just a list. The keys are required to resolve dependencies during
-        # interpolation
-        self._occurrences[path] = ret
-        return ret
+    def _wrap_list(self, source):
+        return [ self._wrap_value(v) for v in source ]
 
-    def _extend_list(self, cur, new, path):
-        if isinstance(cur, list):
-            ret = cur
-            offset = len(cur)
+    def _wrap_dict(self, source):
+        return { k: self._wrap_value(v) for k, v in source.iteritems() }
+
+    def _update_value(self, cur, new, path):
+        if isinstance(cur, Value):
+            values = ValueList(cur)
+        elif isinstance(cur, ValueList):
+            values = cur
         else:
-            ret = [cur]
-            offset = 1
+            values = ValueList(Value(cur))
 
-        for i in xrange(len(new)):
-            ret.append(self._merge_recurse(None, new[i], path.new_subpath(offset + i)))
-        return ret
+        if isinstance(new, Value):
+            values.append(new)
+        elif isinstance(new, ValueList):
+            values.extend(new)
+        else:
+            values.append(Value(new))
 
-    def _merge_dict(self, cur, new, path, initmerge):
+        return values
+
+    def _merge_dict(self, cur, new, path):
         """Merge a dictionary with another dictionary.
 
         Iterate over keys in new. If this is not an initialization merge and
@@ -139,31 +137,16 @@
 
         """
 
-        if isinstance(cur, dict):
-            ret = cur
-        else:
-            # nothing sensible to do
-            raise TypeError('Cannot merge dict into {0} '
-                            'objects'.format(type(cur)))
-
-        if self.delimiter is None:
-            # a delimiter of None indicates that there is no value
-            # processing to be done, and since there is no current
-            # value, we do not need to walk the new dictionary:
-            ret.update(new)
-            return ret
-
+        ret = cur
         ovrprfx = Parameters.DICT_KEY_OVERRIDE_PREFIX
-
         for key, newvalue in new.iteritems():
-            if key.startswith(ovrprfx) and not initmerge:
+            if key.startswith(ovrprfx) and not self._keep_overrides:
                 ret[key.lstrip(ovrprfx)] = newvalue
             else:
-                ret[key] = self._merge_recurse(ret.get(key), newvalue,
-                                            path.new_subpath(key), initmerge)
+                ret[key] = self._merge_recurse(ret.get(key), newvalue, path.new_subpath(key))
         return ret
 
-    def _merge_recurse(self, cur, new, path=None, initmerge=False):
+    def _merge_recurse(self, cur, new, path=None):
         """Merge a parameter with another parameter.
 
         Iterate over keys in new. Call _merge_dict, _extend_list, or
@@ -182,23 +165,15 @@
 
         """
 
-        if path is None:
-            path = DictPath(self.delimiter)
 
-        if isinstance(new, dict):
-            if cur is None:
-                cur = {}
-            return self._merge_dict(cur, new, path, initmerge)
-
-        elif isinstance(new, list):
-            if cur is None:
-                cur = []
-            return self._extend_list(cur, new, path)
-
+        if cur is None:
+            return new
+        elif isinstance(new, dict) and isinstance(cur, dict):
+            return self._merge_dict(cur, new, path)
         else:
-            return self._update_scalar(cur, new, path)
+            return self._update_value(cur, new, path)
 
-    def merge(self, other, initmerge=False):
+    def merge(self, other):
         """Merge function (public edition).
 
         Call _merge_recurse on self with either another Parameter object or a
@@ -212,65 +187,125 @@
 
         """
 
+        self._unrendered = None
         if isinstance(other, dict):
-            self._base = self._merge_recurse(self._base, other,
-                                             None, initmerge)
-
+            wrapped = self._wrap_dict(other)
         elif isinstance(other, self.__class__):
-            self._base = self._merge_recurse(self._base, other._base,
-                                             None, initmerge)
-
+            wrapped = self._wrap_dict(other._base)
         else:
             raise TypeError('Cannot merge %s objects into %s' % (type(other),
                             self.__class__.__name__))
+        self._base = self._merge_recurse(self._base, wrapped, DictPath(self._delimiter))
 
-    def has_unresolved_refs(self):
-        return len(self._occurrences) > 0
+    def _render_simple_container(self, container, key, value, path):
+            if isinstance(value, ValueList):
+                if value.is_complex():
+                    self._unrendered[path.new_subpath(key)] = True
+                    if value.has_inv_query():
+                        self._has_inv_query = True
+                    return
+                else:
+                    value = value.merge(self._options)
+            if isinstance(value, Value) and value.is_container():
+                value = value.contents()
+            if isinstance(value, dict):
+                self._render_simple_dict(value, path.new_subpath(key))
+                container[key] = value
+            elif isinstance(value, list):
+                self._render_simple_list(value, path.new_subpath(key))
+                container[key] = value
+            elif isinstance(value, Value):
+                if value.is_complex():
+                    self._unrendered[path.new_subpath(key)] = True
+                    if value.has_inv_query():
+                        self._has_inv_query = True
+                else:
+                    container[key] = value.render(None, None, self._options)
 
-    def interpolate(self):
-        while self.has_unresolved_refs():
+    def _render_simple_dict(self, dictionary, path):
+        for key, value in dictionary.iteritems():
+            self._render_simple_container(dictionary, key, value, path)
+
+    def _render_simple_list(self, item_list, path):
+        for n, value in enumerate(item_list):
+            self._render_simple_container(item_list, n, value, path)
+
+    def interpolate(self, inventory=None):
+        self._initialise_interpolate()
+        while len(self._unrendered) > 0:
             # we could use a view here, but this is simple enough:
             # _interpolate_inner removes references from the refs hash after
             # processing them, so we cannot just iterate the dict
-            path, refvalue = self._occurrences.iteritems().next()
-            self._interpolate_inner(path, refvalue)
+            path, v = self._unrendered.iteritems().next()
+            self._interpolate_inner(path, inventory)
 
-    def _interpolate_inner(self, path, refvalue):
-        self._occurrences[path] = True  # mark as seen
-        for ref in refvalue.get_references():
-            path_from_ref = DictPath(self.delimiter, ref)
-            try:
-                refvalue_inner = self._occurrences[path_from_ref]
+    def initialise_interpolation(self):
+        self._unrendered = None
+        self._initialise_interpolate()
 
-                # If there is no reference, then this will throw a KeyError,
-                # look further down where this is caught and execution passed
-                # to the next iteration of the loop
-                #
-                # If we get here, then the ref references another parameter,
-                # requiring us to recurse, dereferencing first those refs that
-                # are most used and are thus at the leaves of the dependency
-                # tree.
+    def _initialise_interpolate(self):
+        if self._unrendered is None:
+            self._unrendered = {}
+            self._has_inv_query = False
+            self._render_simple_dict(self._base, DictPath(self._delimiter))
 
-                if refvalue_inner is True:
-                    # every call to _interpolate_inner replaces the value of
-                    # the saved occurrences of a reference with True.
-                    # Therefore, if we encounter True instead of a refvalue,
-                    # it means that we have already processed it and are now
-                    # faced with a cyclical reference.
-                    raise InfiniteRecursionError(path, ref)
-                self._interpolate_inner(path_from_ref, refvalue_inner)
+    def _interpolate_inner(self, path, inventory):
+        value = path.get_value(self._base)
+        if not isinstance(value, (Value, ValueList)):
+            # references to lists and dicts are only deepcopied when merged
+            # together so it's possible a value with references in a referenced
+            # list or dict has already been visited by _interpolate_inner
+            del self._unrendered[path]
+            return
+        self._unrendered[path] = False
+        self._interpolate_references(path, value, inventory)
+        new = self._interpolate_render_value(path, value, inventory)
+        path.set_value(self._base, new)
+        del self._unrendered[path]
 
-            except KeyError as e:
-                # not actually an error, but we are done resolving all
-                # dependencies of the current ref, so move on
-                continue
-
+    def _interpolate_render_value(self, path, value, inventory):
         try:
-            new = refvalue.render(self._base)
-            path.set_value(self._base, new)
-
-            # finally, remove the reference from the occurrences cache
-            del self._occurrences[path]
+            new = value.render(self._base, inventory, self._options)
         except UndefinedVariableError as e:
             raise UndefinedVariableError(e.var, path)
 
+        if isinstance(new, dict):
+            self._render_simple_dict(new, path)
+        elif isinstance(new, list):
+            self._render_simple_list(new, path)
+        return new
+
+    def _interpolate_references(self, path, value, inventory):
+        all_refs = False
+        while not all_refs:
+            for ref in value.get_references():
+                path_from_ref = DictPath(self._delimiter, ref)
+
+                if path_from_ref in self._unrendered:
+                    if self._unrendered[path_from_ref] is False:
+                        # every call to _interpolate_inner replaces the value of
+                        # self._unrendered[path] with False
+                        # Therefore, if we encounter False instead of True,
+                        # it means that we have already processed it and are now
+                        # faced with a cyclical reference.
+                        raise InfiniteRecursionError(path, ref)
+                    else:
+                        self._interpolate_inner(path_from_ref, inventory)
+                else:
+                    # ensure ancestor keys are already dereferenced
+                    ancestor = DictPath(self._delimiter)
+                    for k in path_from_ref.key_parts():
+                        ancestor = ancestor.new_subpath(k)
+                        if ancestor in self._unrendered:
+                            self._interpolate_inner(ancestor, inventory)
+            if value.allRefs():
+                all_refs = True
+            else:
+                # not all references in the value could be calculated previously so
+                # try recalculating references with current context and recursively
+                # call _interpolate_inner if the number of references has increased
+                # Otherwise raise an error
+                old = len(value.get_references())
+                value.assembleRefs(self._base)
+                if old == len(value.get_references()):
+                    raise InterpolationError('Bad reference count, path:' + repr(path))
diff --git a/reclass/datatypes/tests/test_entity.py b/reclass/datatypes/tests/test_entity.py
index 17ec9e8..8f693f1 100644
--- a/reclass/datatypes/tests/test_entity.py
+++ b/reclass/datatypes/tests/test_entity.py
@@ -6,7 +6,7 @@
 # Copyright © 2007–14 martin f. krafft <madduck@madduck.net>
 # Released under the terms of the Artistic Licence 2.0
 #
-from reclass.datatypes import Entity, Classes, Parameters, Applications
+from reclass.datatypes import Entity, Classes, Parameters, Applications, Exports
 import unittest
 try:
     import unittest.mock as mock
@@ -14,12 +14,12 @@
     import mock
 
 @mock.patch.multiple('reclass.datatypes', autospec=True, Classes=mock.DEFAULT,
-                     Applications=mock.DEFAULT,
-                     Parameters=mock.DEFAULT)
+                     Applications=mock.DEFAULT, Parameters=mock.DEFAULT,
+                     Exports=mock.DEFAULT)
 class TestEntity(unittest.TestCase):
 
-    def _make_instances(self, Classes, Applications, Parameters):
-        return Classes(), Applications(), Parameters()
+    def _make_instances(self, Classes, Applications, Parameters, Exports):
+        return Classes(), Applications(), Parameters(), Exports()
 
     def test_constructor_default(self, **mocks):
         # Actually test the real objects by calling the default constructor,
@@ -30,19 +30,22 @@
         self.assertIsInstance(e.classes, Classes)
         self.assertIsInstance(e.applications, Applications)
         self.assertIsInstance(e.parameters, Parameters)
+        self.assertIsInstance(e.exports, Exports)
 
     def test_constructor_empty(self, **types):
         instances = self._make_instances(**types)
         e = Entity(*instances)
         self.assertEqual(e.name, '')
         self.assertEqual(e.uri, '')
-        cl, al, pl = [getattr(i, '__len__') for i in instances]
+        cl, al, pl, ex = [getattr(i, '__len__') for i in instances]
         self.assertEqual(len(e.classes), cl.return_value)
         cl.assert_called_once_with()
         self.assertEqual(len(e.applications), al.return_value)
         al.assert_called_once_with()
         self.assertEqual(len(e.parameters), pl.return_value)
         pl.assert_called_once_with()
+        self.assertEqual(len(e.exports), pl.return_value)
+        ex.assert_called_once_with()
 
     def test_constructor_empty_named(self, **types):
         name = 'empty'
@@ -147,10 +150,50 @@
         comp['classes'] = instances[0].as_list()
         comp['applications'] = instances[1].as_list()
         comp['parameters'] = instances[2].as_dict()
+        comp['exports'] = instances[3].as_dict()
         comp['environment'] = 'test'
         d = entity.as_dict()
         self.assertDictEqual(d, comp)
 
+class TestEntityNoMock(unittest.TestCase):
+
+    def test_exports_with_refs(self):
+        inventory = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}}
+        node3_exports = Exports({'a': '${a}', 'b': '${b}'})
+        node3_parameters = Parameters({'name': 'node3', 'a': '${c}', 'b': 5})
+        node3_parameters.merge({'c': 3})
+        node3_entity = Entity(None, None, node3_parameters, node3_exports)
+        node3_entity.interpolate_exports()
+        inventory['node3'] = node3_entity.exports.as_dict()
+        r = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}, 'node3': {'a': 3, 'b': 5}}
+        self.assertDictEqual(inventory, r)
+
+    def test_reference_to_an_export(self):
+        inventory = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}}
+        node3_exports = Exports({'a': '${a}', 'b': '${b}'})
+        node3_parameters = Parameters({'name': 'node3', 'ref': '${exp}', 'a': '${c}', 'b': 5})
+        node3_parameters.merge({'c': 3, 'exp': '$[ exports:a ]'})
+        node3_entity = Entity(None, None, node3_parameters, node3_exports)
+        node3_entity.interpolate_exports()
+        inventory['node3'] = node3_entity.exports.as_dict()
+        node3_entity.interpolate('node3', inventory)
+        res_inv = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}, 'node3': {'a': 3, 'b': 5}}
+        res_params = {'a': 3, 'c': 3, 'b': 5, 'name': 'node3', 'exp': {'node1': 1, 'node3': 3, 'node2': 3}, 'ref': {'node1': 1, 'node3': 3, 'node2': 3}}
+        self.assertDictEqual(node3_parameters.as_dict(), res_params)
+        self.assertDictEqual(inventory, res_inv)
+
+    def test_exports_with_nested_references(self):
+        inventory = {'node1': {'alpha': {'a': 1, 'b': 2}}, 'node2': {'alpha': {'a': 3, 'b': 4}}}
+        node3_exports = Exports({'alpha': '${alpha}'})
+        node3_parameters = Parameters({'name': 'node3', 'alpha': {'a': '${one}', 'b': '${two}'}, 'beta': '$[ exports:alpha ]', 'one': '111', 'two': '${three}', 'three': '123'})
+        node3_entity = Entity(None, None, node3_parameters, node3_exports)
+        res_params = {'beta': {'node1': {'a': 1, 'b': 2}, 'node3': {'a': '111', 'b': '123'}, 'node2': {'a': 3, 'b': 4}}, 'name': 'node3', 'alpha': {'a': '111', 'b': '123'}, 'three': '123', 'two': '123', 'one': '111'}
+        res_inv = {'node1': {'alpha': {'a': 1, 'b': 2}}, 'node2': {'alpha': {'a': 3, 'b': 4}}, 'node3': {'alpha': {'a': '111', 'b': '123'}}}
+        node3_entity.interpolate_exports()
+        inventory['node3'] = node3_entity.exports.as_dict()
+        node3_entity.interpolate('node3', inventory)
+        self.assertDictEqual(node3_parameters.as_dict(), res_params)
+        self.assertDictEqual(inventory, res_inv)
 
 if __name__ == '__main__':
     unittest.main()
diff --git a/reclass/datatypes/tests/test_exports.py b/reclass/datatypes/tests/test_exports.py
new file mode 100644
index 0000000..e8c1c7c
--- /dev/null
+++ b/reclass/datatypes/tests/test_exports.py
@@ -0,0 +1,52 @@
+#
+# -*- coding: utf-8 -*-
+#
+# This file is part of reclass (http://github.com/madduck/reclass)
+#
+
+from reclass.datatypes import Exports, Parameters
+import unittest
+try:
+    import unittest.mock as mock
+except ImportError:
+    import mock
+
+class TestExportsNoMock(unittest.TestCase):
+
+    def test_overwrite_method(self):
+        e = Exports({'alpha': { 'one': 1, 'two': 2}})
+        d = {'alpha': { 'three': 3, 'four': 4}}
+        e.overwrite(d)
+        e.initialise_interpolation()
+        self.assertEqual(e.as_dict(), d)
+
+    def test_value_expr_exports(self):
+        e = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}}
+        p = Parameters({'exp': '$[ exports:a ]'})
+        r = {'exp': {'node1': 1, 'node2': 3}}
+        p.interpolate(e)
+        self.assertEqual(p.as_dict(), r)
+
+    def test_if_expr_exports(self):
+        e = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}}
+        p = Parameters({'exp': '$[ exports:a if exports:b == 4 ]'})
+        r = {'exp': {'node2': 3}}
+        p.interpolate(e)
+        self.assertEqual(p.as_dict(), r)
+
+    def test_if_expr_exports_with_refs(self):
+        e = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}}
+        p = Parameters({'exp': '$[ exports:a if exports:b == self:test_value ]', 'test_value': 2})
+        r = {'exp': {'node1': 1}, 'test_value': 2}
+        p.interpolate(e)
+        self.assertEqual(p.as_dict(), r)
+
+    def test_list_if_expr_exports(self):
+        e = {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 3}, 'node3': {'a': 3, 'b': 2}}
+        p = Parameters({'exp': '$[ if exports:b == 2 ]'})
+        r = {'exp': ['node1', 'node3']}
+        p.interpolate(e)
+        self.assertEqual(p.as_dict(), r)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/reclass/datatypes/tests/test_parameters.py b/reclass/datatypes/tests/test_parameters.py
index 5100639..376922e 100644
--- a/reclass/datatypes/tests/test_parameters.py
+++ b/reclass/datatypes/tests/test_parameters.py
@@ -7,8 +7,9 @@
 # Released under the terms of the Artistic Licence 2.0
 #
 from reclass.datatypes import Parameters
-from reclass.defaults import PARAMETER_INTERPOLATION_SENTINELS
+from reclass.defaults import REFERENCE_SENTINELS, ESCAPE_CHARACTER
 from reclass.errors import InfiniteRecursionError
+from reclass.values.mergeoptions import MergeOptions
 import unittest
 try:
     import unittest.mock as mock
@@ -25,6 +26,8 @@
         p._base = mock.MagicMock(spec_set=dict, wraps=base)
         p._base.__repr__ = mock.MagicMock(autospec=dict.__repr__,
                                           return_value=repr(base))
+        p._base.__getitem__.side_effect = base.__getitem__
+        p._base.__setitem__.side_effect = base.__setitem__
         return p, p._base
 
     def test_len_empty(self):
@@ -114,6 +117,7 @@
 
     def test_get_dict(self):
         p, b = self._construct_mocked_params(SIMPLE)
+        p.initialise_interpolation()
         self.assertDictEqual(p.as_dict(), SIMPLE)
 
     def test_merge_scalars(self):
@@ -121,6 +125,7 @@
         mergee = {'five':5,'four':4,'None':None,'tuple':(1,2,3)}
         p2, b2 = self._construct_mocked_params(mergee)
         p1.merge(p2)
+        p1.initialise_interpolation()
         for key, value in mergee.iteritems():
             # check that each key, value in mergee resulted in a get call and
             # a __setitem__ call against b1 (the merge target)
@@ -134,12 +139,14 @@
         p1.interpolate()
         self.assertEqual(p1.as_dict()['b'], mock.sentinel.goal)
 
+
 class TestParametersNoMock(unittest.TestCase):
 
     def test_merge_scalars(self):
         p = Parameters(SIMPLE)
         mergee = {'five':5,'four':4,'None':None,'tuple':(1,2,3)}
         p.merge(mergee)
+        p.initialise_interpolation()
         goal = SIMPLE.copy()
         goal.update(mergee)
         self.assertDictEqual(p.as_dict(), goal)
@@ -148,6 +155,7 @@
         p = Parameters(SIMPLE)
         mergee = {'two':5,'four':4,'three':None,'one':(1,2,3)}
         p.merge(mergee)
+        p.initialise_interpolation()
         goal = SIMPLE.copy()
         goal.update(mergee)
         self.assertDictEqual(p.as_dict(), goal)
@@ -158,24 +166,32 @@
         p1 = Parameters(dict(list=l1[:]))
         p2 = Parameters(dict(list=l2))
         p1.merge(p2)
+        p1.initialise_interpolation()
         self.assertListEqual(p1.as_dict()['list'], l1+l2)
 
     def test_merge_list_into_scalar(self):
         l = ['foo', 1, 2]
-        p1 = Parameters(dict(key=l[0]))
+        options = MergeOptions()
+        options.allow_list_over_scalar = True
+        p1 = Parameters(dict(key=l[0]), options=options)
         p1.merge(Parameters(dict(key=l[1:])))
+        p1.initialise_interpolation()
         self.assertListEqual(p1.as_dict()['key'], l)
 
     def test_merge_scalar_over_list(self):
         l = ['foo', 1, 2]
-        p1 = Parameters(dict(key=l[:2]))
+        options = MergeOptions()
+        options.allow_scalar_over_list = True
+        p1 = Parameters(dict(key=l[:2]), options=options)
         p1.merge(Parameters(dict(key=l[2])))
+        p1.initialise_interpolation()
         self.assertEqual(p1.as_dict()['key'], l[2])
 
     def test_merge_dicts(self):
         mergee = {'five':5,'four':4,'None':None,'tuple':(1,2,3)}
         p = Parameters(dict(dict=SIMPLE))
         p.merge(Parameters(dict(dict=mergee)))
+        p.initialise_interpolation()
         goal = SIMPLE.copy()
         goal.update(mergee)
         self.assertDictEqual(p.as_dict(), dict(dict=goal))
@@ -184,6 +200,7 @@
         mergee = {'two':5,'four':4,'three':None,'one':(1,2,3)}
         p = Parameters(dict(dict=SIMPLE))
         p.merge(Parameters(dict(dict=mergee)))
+        p.initialise_interpolation()
         goal = SIMPLE.copy()
         goal.update(mergee)
         self.assertDictEqual(p.as_dict(), dict(dict=goal))
@@ -198,22 +215,27 @@
                 'two': ['gamma']}
         p = Parameters(dict(dict=base))
         p.merge(Parameters(dict(dict=mergee)))
+        p.initialise_interpolation()
         self.assertDictEqual(p.as_dict(), dict(dict=goal))
 
     def test_merge_dict_into_scalar(self):
         p = Parameters(dict(base='foo'))
         with self.assertRaises(TypeError):
             p.merge(Parameters(dict(base=SIMPLE)))
+            p.interpolate()
 
     def test_merge_scalar_over_dict(self):
-        p = Parameters(dict(base=SIMPLE))
+        options = MergeOptions()
+        options.allow_scalar_over_dict = True
+        p = Parameters(dict(base=SIMPLE), options=options)
         mergee = {'base':'foo'}
         p.merge(Parameters(mergee))
+        p.initialise_interpolation()
         self.assertDictEqual(p.as_dict(), mergee)
 
     def test_interpolate_single(self):
         v = 42
-        d = {'foo': 'bar'.join(PARAMETER_INTERPOLATION_SENTINELS),
+        d = {'foo': 'bar'.join(REFERENCE_SENTINELS),
              'bar': v}
         p = Parameters(d)
         p.interpolate()
@@ -221,7 +243,7 @@
 
     def test_interpolate_multiple(self):
         v = '42'
-        d = {'foo': 'bar'.join(PARAMETER_INTERPOLATION_SENTINELS) + 'meep'.join(PARAMETER_INTERPOLATION_SENTINELS),
+        d = {'foo': 'bar'.join(REFERENCE_SENTINELS) + 'meep'.join(REFERENCE_SENTINELS),
              'bar': v[0],
              'meep': v[1]}
         p = Parameters(d)
@@ -230,8 +252,8 @@
 
     def test_interpolate_multilevel(self):
         v = 42
-        d = {'foo': 'bar'.join(PARAMETER_INTERPOLATION_SENTINELS),
-             'bar': 'meep'.join(PARAMETER_INTERPOLATION_SENTINELS),
+        d = {'foo': 'bar'.join(REFERENCE_SENTINELS),
+             'bar': 'meep'.join(REFERENCE_SENTINELS),
              'meep': v}
         p = Parameters(d)
         p.interpolate()
@@ -239,7 +261,7 @@
 
     def test_interpolate_list(self):
         l = [41,42,43]
-        d = {'foo': 'bar'.join(PARAMETER_INTERPOLATION_SENTINELS),
+        d = {'foo': 'bar'.join(REFERENCE_SENTINELS),
              'bar': l}
         p = Parameters(d)
         p.interpolate()
@@ -247,11 +269,232 @@
 
     def test_interpolate_infrecursion(self):
         v = 42
-        d = {'foo': 'bar'.join(PARAMETER_INTERPOLATION_SENTINELS),
-             'bar': 'foo'.join(PARAMETER_INTERPOLATION_SENTINELS)}
+        d = {'foo': 'bar'.join(REFERENCE_SENTINELS),
+             'bar': 'foo'.join(REFERENCE_SENTINELS)}
         p = Parameters(d)
         with self.assertRaises(InfiniteRecursionError):
             p.interpolate()
 
+    def test_nested_references(self):
+        d = {'a': '${${z}}', 'b': 2, 'z': 'b'}
+        r = {'a': 2, 'b': 2, 'z': 'b'}
+        p = Parameters(d)
+        p.interpolate()
+        self.assertEqual(p.as_dict(), r)
+
+    def test_nested_deep_references(self):
+        d = {'one': { 'a': 1, 'b': '${one:${one:c}}', 'c': 'a' } }
+        r = {'one': { 'a': 1, 'b': 1, 'c': 'a'} }
+        p = Parameters(d)
+        p.interpolate()
+        self.assertEqual(p.as_dict(), r)
+
+    def test_stray_occurrence_overwrites_during_interpolation(self):
+        p1 = Parameters({'r' : 1, 'b': '${r}'})
+        p2 = Parameters({'b' : 2})
+        p1.merge(p2)
+        p1.interpolate()
+        self.assertEqual(p1.as_dict()['b'], 2)
+
+    def test_referenced_dict_deep_overwrite(self):
+        p1 = Parameters({'alpha': {'one': {'a': 1, 'b': 2} } })
+        p2 = Parameters({'beta': '${alpha}'})
+        p3 = Parameters({'alpha': {'one': {'c': 3, 'd': 4} },
+                         'beta':  {'one': {'a': 99} } })
+        r = {'alpha': {'one': {'a':1, 'b': 2, 'c': 3, 'd':4} },
+             'beta': {'one': {'a':99, 'b': 2, 'c': 3, 'd':4} } }
+        p1.merge(p2)
+        p1.merge(p3)
+        p1.interpolate()
+        self.assertEqual(p1.as_dict(), r)
+
+    def test_complex_reference_overwriting(self):
+        p1 = Parameters({'one': 'abc_123_${two}_${three}', 'two': 'XYZ', 'four': 4})
+        p2 = Parameters({'one': 'QWERTY_${three}_${four}', 'three': '999'})
+        r = {'one': 'QWERTY_999_4', 'two': 'XYZ', 'three': '999', 'four': 4}
+        p1.merge(p2)
+        p1.interpolate()
+        self.assertEqual(p1.as_dict(), r)
+
+    def test_nested_reference_with_overwriting(self):
+        p1 = Parameters({'one': {'a': 1, 'b': 2, 'z': 'a'},
+                         'two': '${one:${one:z}}' })
+        p2 = Parameters({'one': {'z': 'b'} })
+        r = {'one': {'a': 1, 'b':2, 'z': 'b'}, 'two': 2}
+        p1.merge(p2)
+        p1.interpolate()
+        self.assertEqual(p1.as_dict(), r)
+
+    def test_merge_referenced_lists(self):
+        p1 = Parameters({'one': [ 1, 2, 3 ], 'two': [ 4, 5, 6 ], 'three': '${one}'})
+        p2 = Parameters({'three': '${two}'})
+        r = {'one': [ 1, 2, 3 ], 'two': [ 4, 5, 6], 'three': [ 1, 2, 3, 4, 5, 6 ]}
+        p1.merge(p2)
+        p1.interpolate()
+        self.assertEqual(p1.as_dict(), r)
+
+    def test_merge_referenced_dicts(self):
+        p1 = Parameters({'one': {'a': 1, 'b': 2}, 'two': {'c': 3, 'd': 4}, 'three': '${one}'})
+        p2 = Parameters({'three': '${two}'})
+        r = {'one': {'a': 1, 'b': 2}, 'two': {'c': 3, 'd': 4}, 'three': {'a': 1, 'b': 2, 'c': 3, 'd': 4}}
+        p1.merge(p2)
+        p1.interpolate()
+        self.assertEqual(p1.as_dict(), r)
+
+    def test_deep_refs_in_referenced_dicts(self):
+        p = Parameters({'A': '${C:a}', 'B': {'a': 1, 'b': 2}, 'C': '${B}'})
+        r = {'A': 1, 'B': {'a': 1, 'b': 2}, 'C': {'a': 1, 'b': 2}}
+        p.interpolate()
+        self.assertEqual(p.as_dict(), r)
+
+    def test_overwrite_none(self):
+        p1 = Parameters({'A': None, 'B': None, 'C': None, 'D': None, 'E': None, 'F': None})
+        p2 = Parameters({'A': 'abc', 'B': [1, 2, 3], 'C': {'a': 'aaa', 'b': 'bbb'}, 'D': '${A}', 'E': '${B}', 'F': '${C}'})
+        r = {'A': 'abc', 'B': [1, 2, 3], 'C': {'a': 'aaa', 'b': 'bbb'}, 'D': 'abc', 'E': [1, 2, 3], 'F': {'a': 'aaa', 'b': 'bbb'}}
+        p1.merge(p2)
+        p1.interpolate()
+        self.assertEqual(p1.as_dict(), r)
+
+    def test_interpolate_escaping(self):
+        v = 'bar'.join(REFERENCE_SENTINELS)
+        d = {'foo': ESCAPE_CHARACTER + 'bar'.join(REFERENCE_SENTINELS),
+             'bar': 'unused'}
+        p = Parameters(d)
+        p.initialise_interpolation()
+        self.assertEqual(p.as_dict()['foo'], v)
+
+    def test_interpolate_double_escaping(self):
+        v = ESCAPE_CHARACTER + 'meep'
+        d = {'foo': ESCAPE_CHARACTER + ESCAPE_CHARACTER + 'bar'.join(REFERENCE_SENTINELS),
+             'bar': 'meep'}
+        p = Parameters(d)
+        p.interpolate()
+        self.assertEqual(p.as_dict()['foo'], v)
+
+    def test_interpolate_escaping_backwards_compatibility(self):
+        """In all following cases, escaping should not happen and the escape character
+        needs to be printed as-is, to ensure backwards compatibility to older versions."""
+        v = ' '.join([
+            # Escape character followed by unescapable character
+            '1', ESCAPE_CHARACTER,
+            # Escape character followed by escape character
+            '2', ESCAPE_CHARACTER + ESCAPE_CHARACTER,
+            # Escape character followed by interpolation end sentinel
+            '3', ESCAPE_CHARACTER + REFERENCE_SENTINELS[1],
+            # Escape character at the end of the string
+            '4', ESCAPE_CHARACTER
+            ])
+        d = {'foo': v}
+        p = Parameters(d)
+        p.initialise_interpolation()
+        self.assertEqual(p.as_dict()['foo'], v)
+
+    def test_escape_close_in_ref(self):
+        p1 = Parameters({'one}': 1, 'two': '${one\\}}'})
+        r = {'one}': 1, 'two': 1}
+        p1.interpolate()
+        self.assertEqual(p1.as_dict(), r)
+
+    def test_double_escape_in_ref(self):
+        d = {'one\\': 1, 'two': '${one\\\\}'}
+        p1 = Parameters(d)
+        r = {'one\\': 1, 'two': 1}
+        p1.interpolate()
+        self.assertEqual(p1.as_dict(), r)
+
+    def test_merging_for_multiple_nodes(self):
+        p1 = Parameters({ 'alpha': { 'one': 111 }})
+        p2 = Parameters({ 'beta': {'two': '${alpha:one}' }})
+        p3 = Parameters({ 'beta': {'two': 222 }})
+        n1 = Parameters({ 'name': 'node1'})
+        r1 = { 'alpha': { 'one': 111 }, 'beta': { 'two': 111 }, 'name': 'node1' }
+        r2 = { 'alpha': { 'one': 111 }, 'beta': { 'two': 222 }, 'name': 'node2' }
+        n1.merge(p1)
+        n1.merge(p2)
+        n1.interpolate()
+        n2 = Parameters({'name': 'node2'})
+        n2.merge(p1)
+        n2.merge(p2)
+        n2.merge(p3)
+        n2.interpolate()
+        self.assertEqual(n1.as_dict(), r1)
+        self.assertEqual(n2.as_dict(), r2)
+
+    def test_list_merging_for_multiple_nodes(self):
+        p1 = Parameters({ 'alpha': { 'one': [1, 2] }})
+        p2 = Parameters({ 'beta': {'two': '${alpha:one}' }})
+        p3 = Parameters({ 'beta': {'two': [3] }})
+        n1 = Parameters({ 'name': 'node1'})
+        r1 = { 'alpha': { 'one': [1, 2] }, 'beta': { 'two': [1, 2] }, 'name': 'node1' }
+        r2 = { 'alpha': { 'one': [1, 2] }, 'beta': { 'two': [1, 2, 3] }, 'name': 'node2' }
+        n1.merge(p1)
+        n1.merge(p2)
+        n1.interpolate()
+        n2 = Parameters({'name': 'node2'})
+        n2.merge(p1)
+        n2.merge(p2)
+        n2.merge(p3)
+        n2.interpolate()
+        self.assertEqual(n1.as_dict(), r1)
+        self.assertEqual(n2.as_dict(), r2)
+
+    def test_dict_merging_for_multiple_nodes(self):
+        p1 = Parameters({ 'alpha': { 'one': { 'a': 'aa', 'b': 'bb' }}})
+        p2 = Parameters({ 'beta': {'two': '${alpha:one}' }})
+        p3 = Parameters({ 'beta': {'two': {'c': 'cc' }}})
+        n1 = Parameters({ 'name': 'node1'})
+        r1 = { 'alpha': { 'one': {'a': 'aa', 'b': 'bb'} }, 'beta': { 'two': {'a': 'aa', 'b': 'bb'} }, 'name': 'node1' }
+        r2 = { 'alpha': { 'one': {'a': 'aa', 'b': 'bb'} }, 'beta': { 'two': {'a': 'aa', 'b': 'bb', 'c': 'cc'} }, 'name': 'node2' }
+        n1.merge(p1)
+        n1.merge(p2)
+        n1.interpolate()
+        n2 = Parameters({'name': 'node2'})
+        n2.merge(p1)
+        n2.merge(p2)
+        n2.merge(p3)
+        n2.interpolate()
+        self.assertEqual(n1.as_dict(), r1)
+        self.assertEqual(n2.as_dict(), r2)
+
+    def test_list_merging_with_refs_for_multiple_nodes(self):
+        p1 = Parameters({ 'alpha': { 'one': [1, 2], 'two': [3, 4] }})
+        p2 = Parameters({ 'beta': { 'three': '${alpha:one}' }})
+        p3 = Parameters({ 'beta': { 'three': '${alpha:two}' }})
+        p4 = Parameters({ 'beta': { 'three': '${alpha:one}' }})
+        n1 = Parameters({ 'name': 'node1' })
+        r1 = {'alpha': {'one': [1, 2], 'two': [3, 4]}, 'beta': {'three': [1, 2]}, 'name': 'node1'}
+        r2 = {'alpha': {'one': [1, 2], 'two': [3, 4]}, 'beta': {'three': [1, 2, 3, 4, 1, 2]}, 'name': 'node2'}
+        n2 = Parameters({ 'name': 'node2' })
+        n2.merge(p1)
+        n2.merge(p2)
+        n2.merge(p3)
+        n2.merge(p4)
+        n2.interpolate()
+        n1.merge(p1)
+        n1.merge(p2)
+        n1.interpolate()
+        self.assertEqual(n1.as_dict(), r1)
+        self.assertEqual(n2.as_dict(), r2)
+
+    def test_nested_refs_with_multiple_nodes(self):
+        p1 = Parameters({ 'alpha': { 'one': 1, 'two': 2 } })
+        p2 = Parameters({ 'beta': { 'three': 'one' } })
+        p3 = Parameters({ 'beta': { 'three': 'two' } })
+        p4 = Parameters({ 'beta': { 'four': '${alpha:${beta:three}}' } })
+        n1 = Parameters({ 'name': 'node1' })
+        r1 = {'alpha': {'one': 1, 'two': 2}, 'beta': {'three': 'one', 'four': 1}, 'name': 'node1'}
+        r2 = {'alpha': {'one': 1, 'two': 2}, 'beta': {'three': 'two', 'four': 2}, 'name': 'node2'}
+        n1.merge(p1)
+        n1.merge(p4)
+        n1.merge(p2)
+        n1.interpolate()
+        n2 = Parameters({ 'name': 'node2' })
+        n2.merge(p1)
+        n2.merge(p4)
+        n2.merge(p3)
+        n2.interpolate()
+        self.assertEqual(n1.as_dict(), r1)
+        self.assertEqual(n2.as_dict(), r2)
+
 if __name__ == '__main__':
     unittest.main()
diff --git a/reclass/defaults.py b/reclass/defaults.py
index fb04c83..557d511 100644
--- a/reclass/defaults.py
+++ b/reclass/defaults.py
@@ -15,6 +15,7 @@
 OPT_NODES_URI = 'nodes'
 OPT_CLASSES_URI = 'classes'
 OPT_PRETTY_PRINT = True
+OPT_NO_REFS = False
 OPT_OUTPUT = 'yaml'
 
 CONFIG_FILE_SEARCH_PATH = [os.getcwd(),
@@ -24,6 +25,15 @@
                           ]
 CONFIG_FILE_NAME = RECLASS_NAME + '-config.yml'
 
-PARAMETER_INTERPOLATION_SENTINELS = ('${', '}')
+REFERENCE_SENTINELS = ('${', '}')
+EXPORT_SENTINELS = ('$[', ']')
 PARAMETER_INTERPOLATION_DELIMITER = ':'
 PARAMETER_DICT_KEY_OVERRIDE_PREFIX = '~'
+ESCAPE_CHARACTER = '\\'
+
+MERGE_ALLOW_SCALAR_OVER_DICT = False
+MERGE_ALLOW_SCALAR_OVER_LIST = False
+MERGE_ALLOW_LIST_OVER_SCALAR = False
+MERGE_ALLOW_DICT_OVER_SCALAR = False
+
+AUTOMATIC_RECLASS_PARAMETERS = True
diff --git a/reclass/errors.py b/reclass/errors.py
index 9228985..d38ed84 100644
--- a/reclass/errors.py
+++ b/reclass/errors.py
@@ -10,7 +10,7 @@
 import posix, sys
 import traceback
 
-from reclass.defaults import PARAMETER_INTERPOLATION_SENTINELS
+from reclass.defaults import REFERENCE_SENTINELS
 
 class ReclassException(Exception):
 
@@ -138,7 +138,7 @@
     context = property(lambda self: self._context)
 
     def _get_message(self):
-        msg = "Cannot resolve " + self._var.join(PARAMETER_INTERPOLATION_SENTINELS)
+        msg = "Cannot resolve " + self._var.join(REFERENCE_SENTINELS)
         if self._context:
             msg += ' in the context of %s' % self._context
         return msg
@@ -151,7 +151,7 @@
 
     def __init__(self, string, end_sentinel):
         super(IncompleteInterpolationError, self).__init__(msg=None)
-        self._ref = string.join(PARAMETER_INTERPOLATION_SENTINELS)
+        self._ref = string.join(REFERENCE_SENTINELS)
         self._end_sentinel = end_sentinel
 
     def _get_message(self):
@@ -164,7 +164,7 @@
     def __init__(self, path, ref):
         super(InfiniteRecursionError, self).__init__(msg=None)
         self._path = path
-        self._ref = ref.join(PARAMETER_INTERPOLATION_SENTINELS)
+        self._ref = ref.join(REFERENCE_SENTINELS)
 
     def _get_message(self):
         msg = "Infinite recursion while resolving {0} at {1}"
@@ -214,3 +214,22 @@
               "definition in '{3}'. Nodes can only be defined once " \
               "per inventory."
         return msg.format(self._storage, self._name, self._uris[1], self._uris[0])
+
+
+class ParseError(ReclassException):
+
+    def __init__(self, msg, line, col, lineno, rc=posix.EX_DATAERR):
+        super(ParseError, self).__init__(rc=rc, msg=None)
+        self._err = msg
+        self._line = line
+        self._col = col
+        self._lineno = lineno
+
+    def _get_message(self):
+        msg = "Parse error: {0} : {1} at char {2}"
+        return msg.format(self._line, self._err, self._col - 1)
+
+class ExpressionError(ReclassException):
+
+    def __init__(self, msg, rc=posix.EX_DATAERR):
+        super(ExpressionError, self).__init__(rc=rc, msg=msg)
diff --git a/reclass/output/json_outputter.py b/reclass/output/json_outputter.py
index dab86ed..8c79039 100644
--- a/reclass/output/json_outputter.py
+++ b/reclass/output/json_outputter.py
@@ -11,7 +11,7 @@
 
 class Outputter(OutputterBase):
 
-    def dump(self, data, pretty_print=False):
+    def dump(self, data, pretty_print=False, no_refs=False):
         separators = (',', ': ') if pretty_print else (',', ':')
         indent = 2 if pretty_print else None
         return json.dumps(data, indent=indent, separators=separators)
diff --git a/reclass/output/yaml_outputter.py b/reclass/output/yaml_outputter.py
index 2c70cc3..9a0d098 100644
--- a/reclass/output/yaml_outputter.py
+++ b/reclass/output/yaml_outputter.py
@@ -11,5 +11,16 @@
 
 class Outputter(OutputterBase):
 
-    def dump(self, data, pretty_print=False):
-        return yaml.dump(data, default_flow_style=not pretty_print)
+    def dump(self, data, pretty_print=False, no_refs=False):
+        if (no_refs):
+            return yaml.dump(data, default_flow_style=not pretty_print, Dumper=ExplicitDumper)
+        else:
+            return yaml.dump(data, default_flow_style=not pretty_print)
+
+class ExplicitDumper(yaml.SafeDumper):
+    """
+    A dumper that will never emit aliases.
+    """
+
+    def ignore_aliases(self, data):
+        return True
diff --git a/reclass/storage/__init__.py b/reclass/storage/__init__.py
index 8ae2408..3990b91 100644
--- a/reclass/storage/__init__.py
+++ b/reclass/storage/__init__.py
@@ -25,3 +25,7 @@
     def enumerate_nodes(self):
         msg = "Storage class '{0}' does not implement node enumeration."
         raise NotImplementedError(msg.format(self.name))
+
+    def path_mangler(self):
+        msg = "Storage class '{0}' does not implement path_mangler."
+        raise NotImplementedError(msg.format(self.name))
diff --git a/reclass/storage/common.py b/reclass/storage/common.py
new file mode 100644
index 0000000..6a77fc8
--- /dev/null
+++ b/reclass/storage/common.py
@@ -0,0 +1,22 @@
+import os
+
+class NameMangler:
+    @staticmethod
+    def nodes(relpath, name):
+        # nodes are identified just by their basename, so
+        # no mangling required
+        return relpath, name
+
+    @staticmethod
+    def classes(relpath, name):
+        if relpath == '.' or relpath == '':
+            # './' is converted to None
+            return None, name
+        parts = relpath.split(os.path.sep)
+        if name != 'init':
+            # "init" is the directory index, so only append the basename
+            # to the path parts for all other filenames. This has the
+            # effect that data in file "foo/init.yml" will be registered
+            # as data for class "foo", not "foo.init"
+            parts.append(name)
+        return relpath, '.'.join(parts)
diff --git a/reclass/storage/loader.py b/reclass/storage/loader.py
index 399e7fd..77fdecb 100644
--- a/reclass/storage/loader.py
+++ b/reclass/storage/loader.py
@@ -23,3 +23,9 @@
                                  '"{1}"'.format(self._name, klassname))
 
         return klass
+
+    def path_mangler(self, name='path_mangler'):
+        function = getattr(self._module, name, None)
+        if function is None:
+            raise AttributeError('Storage backend class {0} does not export "{1}"'.format(self._name, name))
+        return function
diff --git a/reclass/storage/memcache_proxy.py b/reclass/storage/memcache_proxy.py
index 7d9ab5e..6c898a2 100644
--- a/reclass/storage/memcache_proxy.py
+++ b/reclass/storage/memcache_proxy.py
@@ -30,30 +30,27 @@
 
     name = property(lambda self: self._real_storage.name)
 
-    @staticmethod
-    def _cache_proxy(name, cache, getter):
-        try:
-            ret = cache[name]
-
-        except KeyError, e:
-            ret = getter(name)
-            cache[name] = ret
-
-        return ret
-
     def get_node(self, name):
         if not self._cache_nodes:
             return self._real_storage.get_node(name)
+        try:
+            return self._nodes_cache[name]
+        except KeyError, e:
+            ret = self._real_storage.get_node(name)
+            self._nodes_cache[name] = ret
+        return ret
 
-        return MemcacheProxy._cache_proxy(name, self._nodes_cache,
-                                          self._real_storage.get_node)
-
-    def get_class(self, name):
+    def get_class(self, name, environment):
         if not self._cache_classes:
-            return self._real_storage.get_class(name)
-
-        return MemcacheProxy._cache_proxy(name, self._classes_cache,
-                                          self._real_storage.get_class)
+            return self._real_storage.get_class(name, environment)
+        try:
+            return self._classes_cache[environment][name]
+        except KeyError, e:
+            if environment not in self._classes_cache:
+                self._classes_cache[environment] = dict()
+            ret = self._real_storage.get_class(name, environment)
+            self._classes_cache[environment][name] = ret
+        return ret
 
     def enumerate_nodes(self):
         if not self._cache_nodelist:
diff --git a/reclass/storage/mixed/__init__.py b/reclass/storage/mixed/__init__.py
new file mode 100644
index 0000000..d9983fd
--- /dev/null
+++ b/reclass/storage/mixed/__init__.py
@@ -0,0 +1,58 @@
+#
+# -*- coding: utf-8 -*-
+#
+# This file is part of reclass
+
+import collections
+import copy
+
+import reclass.errors
+from reclass import get_storage
+from reclass.storage import NodeStorageBase
+
+def path_mangler(inventory_base_uri, nodes_uri, classes_uri):
+    if nodes_uri == classes_uri:
+        raise errors.DuplicateUriError(nodes_uri, classes_uri)
+    return nodes_uri, classes_uri
+
+STORAGE_NAME = 'mixed'
+
+class ExternalNodeStorage(NodeStorageBase):
+
+    MixedUri = collections.namedtuple('MixedURI', 'storage_type options')
+
+    def __init__(self, nodes_uri, classes_uri):
+        super(ExternalNodeStorage, self).__init__(STORAGE_NAME)
+
+        self._nodes_uri = self._uri(nodes_uri)
+        self._nodes_storage = get_storage(self._nodes_uri.storage_type, self._nodes_uri.options, None)
+        self._classes_default_uri = self._uri(classes_uri)
+        self._classes_default_storage = get_storage(self._classes_default_uri.storage_type, None, self._classes_default_uri.options)
+
+        self._classes_storage = dict()
+        if 'env_overrides' in classes_uri:
+            for override in classes_uri['env_overrides']:
+                for env, options in override.iteritems():
+                        uri = copy.deepcopy(classes_uri)
+                        uri.update(options)
+                        uri = self._uri(uri)
+                        self._classes_storage[env] = get_storage(uri.storage_type, None, uri.options)
+
+    def _uri(self, uri):
+        ret = copy.deepcopy(uri)
+        ret['storage_type'] = uri['storage_type']
+        if 'env_overrides' in ret:
+            del ret['env_overrides']
+        if uri['storage_type'] == 'yaml_fs':
+            ret = ret['uri']
+        return self.MixedUri(uri['storage_type'], ret)
+
+    def get_node(self, name):
+        return self._nodes_storage.get_node(name)
+
+    def get_class(self, name, environment):
+        storage = self._classes_storage.get(environment, self._classes_default_storage)
+        return storage.get_class(name, environment=environment)
+
+    def enumerate_nodes(self):
+        return self._nodes_storage.enumerate_nodes()
diff --git a/reclass/storage/tests/test_memcache_proxy.py b/reclass/storage/tests/test_memcache_proxy.py
index 066c27e..6764251 100644
--- a/reclass/storage/tests/test_memcache_proxy.py
+++ b/reclass/storage/tests/test_memcache_proxy.py
@@ -47,23 +47,23 @@
         p = MemcacheProxy(self._storage, cache_classes=False)
         NAME = 'foo'; NAME2 = 'bar'; RET = 'baz'
         self._storage.get_class.return_value = RET
-        self.assertEqual(p.get_class(NAME), RET)
-        self.assertEqual(p.get_class(NAME), RET)
-        self.assertEqual(p.get_class(NAME2), RET)
-        self.assertEqual(p.get_class(NAME2), RET)
-        expected = [mock.call(NAME), mock.call(NAME),
-                    mock.call(NAME2), mock.call(NAME2)]
+        self.assertEqual(p.get_class(NAME, None), RET)
+        self.assertEqual(p.get_class(NAME, None), RET)
+        self.assertEqual(p.get_class(NAME2, None), RET)
+        self.assertEqual(p.get_class(NAME2, None), RET)
+        expected = [mock.call(NAME, None), mock.call(NAME, None),
+                    mock.call(NAME2, None), mock.call(NAME2, None)]
         self.assertListEqual(self._storage.get_class.call_args_list, expected)
 
     def test_classes_caching(self):
         p = MemcacheProxy(self._storage, cache_classes=True)
         NAME = 'foo'; NAME2 = 'bar'; RET = 'baz'
         self._storage.get_class.return_value = RET
-        self.assertEqual(p.get_class(NAME), RET)
-        self.assertEqual(p.get_class(NAME), RET)
-        self.assertEqual(p.get_class(NAME2), RET)
-        self.assertEqual(p.get_class(NAME2), RET)
-        expected = [mock.call(NAME), mock.call(NAME2)] # called once each
+        self.assertEqual(p.get_class(NAME, None), RET)
+        self.assertEqual(p.get_class(NAME, None), RET)
+        self.assertEqual(p.get_class(NAME2, None), RET)
+        self.assertEqual(p.get_class(NAME2, None), RET)
+        expected = [mock.call(NAME, None), mock.call(NAME2, None)] # called once each
         self.assertListEqual(self._storage.get_class.call_args_list, expected)
 
     def test_nodelist_no_caching(self):
diff --git a/reclass/storage/tests/test_yamldata.py b/reclass/storage/tests/test_yamldata.py
new file mode 100644
index 0000000..d8129ce
--- /dev/null
+++ b/reclass/storage/tests/test_yamldata.py
@@ -0,0 +1,37 @@
+#
+# -*- coding: utf-8 -*-
+#
+# This file is part of reclass (http://github.com/madduck/reclass)
+#
+
+from reclass.storage.yamldata import YamlData
+
+import unittest
+
+class TestYamlData(unittest.TestCase):
+
+    def setUp(self):
+        lines = [ 'classes:',
+                  '  - testdir.test1',
+                  '  - testdir.test2',
+                  '  - test3',
+                  '',
+                  'environment: base',
+                  '',
+                  'parameters:',
+                  '  _TEST_:',
+                  '    alpha: 1',
+                  '    beta: two' ]
+        self.data = '\n'.join(lines)
+        self.yamldict = { 'classes': [ 'testdir.test1', 'testdir.test2', 'test3' ],
+                          'environment': 'base',
+                          'parameters': { '_TEST_': { 'alpha': 1, 'beta': 'two' } }
+                        }
+
+    def test_yaml_from_string(self):
+        res = YamlData.from_string(self.data, 'testpath')
+        self.assertEqual(res.uri, 'testpath')
+        self.assertEqual(res.get_data(), self.yamldict)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/reclass/storage/yaml_fs/__init__.py b/reclass/storage/yaml_fs/__init__.py
index 5a13050..35e4f05 100644
--- a/reclass/storage/yaml_fs/__init__.py
+++ b/reclass/storage/yaml_fs/__init__.py
@@ -8,8 +8,11 @@
 #
 import os, sys
 import fnmatch
+import yaml
+from reclass.output.yaml_outputter import ExplicitDumper
 from reclass.storage import NodeStorageBase
-from yamlfile import YamlFile
+from reclass.storage.common import NameMangler
+from reclass.storage.yamldata import YamlData
 from directory import Directory
 from reclass.datatypes import Entity
 import reclass.errors
@@ -21,34 +24,42 @@
     #print >>sys.stderr, msg
     pass
 
+def path_mangler(inventory_base_uri, nodes_uri, classes_uri):
+
+    if inventory_base_uri is None:
+        # if inventory_base is not given, default to current directory
+        inventory_base_uri = os.getcwd()
+
+    nodes_uri = nodes_uri or 'nodes'
+    classes_uri = classes_uri or 'classes'
+
+    def _path_mangler_inner(path):
+        ret = os.path.join(inventory_base_uri, path)
+        ret = os.path.expanduser(ret)
+        return os.path.abspath(ret)
+
+    n, c = map(_path_mangler_inner, (nodes_uri, classes_uri))
+    if n == c:
+        raise errors.DuplicateUriError(n, c)
+    common = os.path.commonprefix((n, c))
+    if common == n or common == c:
+        raise errors.UriOverlapError(n, c)
+
+    return n, c
+
+
 class ExternalNodeStorage(NodeStorageBase):
 
-    def __init__(self, nodes_uri, classes_uri, default_environment=None):
+    def __init__(self, nodes_uri, classes_uri):
         super(ExternalNodeStorage, self).__init__(STORAGE_NAME)
 
-        def name_mangler(relpath, name):
-            # nodes are identified just by their basename, so
-            # no mangling required
-            return relpath, name
-        self._nodes_uri = nodes_uri
-        self._nodes = self._enumerate_inventory(nodes_uri, name_mangler)
+        if nodes_uri is not None:
+            self._nodes_uri = nodes_uri
+            self._nodes = self._enumerate_inventory(nodes_uri, NameMangler.nodes)
 
-        def name_mangler(relpath, name):
-            if relpath == '.':
-                # './' is converted to None
-                return None, name
-            parts = relpath.split(os.path.sep)
-            if name != 'init':
-                # "init" is the directory index, so only append the basename
-                # to the path parts for all other filenames. This has the
-                # effect that data in file "foo/init.yml" will be registered
-                # as data for class "foo", not "foo.init"
-                parts.append(name)
-            return relpath, '.'.join(parts)
-        self._classes_uri = classes_uri
-        self._classes = self._enumerate_inventory(classes_uri, name_mangler)
-
-        self._default_environment = default_environment
+        if classes_uri is not None:
+            self._classes_uri = classes_uri
+            self._classes = self._enumerate_inventory(classes_uri, NameMangler.classes)
 
     nodes_uri = property(lambda self: self._nodes_uri)
     classes_uri = property(lambda self: self._classes_uri)
@@ -84,16 +95,16 @@
             name = os.path.splitext(relpath)[0]
         except KeyError, e:
             raise reclass.errors.NodeNotFound(self.name, name, self.nodes_uri)
-        entity = YamlFile(path).get_entity(name, self._default_environment)
+        entity = YamlData.from_file(path).get_entity(name)
         return entity
 
-    def get_class(self, name, nodename=None):
+    def get_class(self, name, environment):
         vvv('GET CLASS {0}'.format(name))
         try:
             path = os.path.join(self.classes_uri, self._classes[name])
         except KeyError, e:
             raise reclass.errors.ClassNotFound(self.name, name, self.classes_uri)
-        entity = YamlFile(path).get_entity(name)
+        entity = YamlData.from_file(path).get_entity(name)
         return entity
 
     def enumerate_nodes(self):
diff --git a/reclass/storage/yaml_fs/yamlfile.py b/reclass/storage/yaml_fs/yamlfile.py
deleted file mode 100644
index 717a911..0000000
--- a/reclass/storage/yaml_fs/yamlfile.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#
-# -*- coding: utf-8 -*-
-#
-# This file is part of reclass (http://github.com/madduck/reclass)
-#
-# Copyright © 2007–14 martin f. krafft <madduck@madduck.net>
-# Released under the terms of the Artistic Licence 2.0
-#
-from reclass import datatypes
-import yaml
-import os
-from reclass.errors import NotFoundError
-
-class YamlFile(object):
-
-    def __init__(self, path):
-        ''' Initialise a yamlfile object '''
-        if not os.path.isfile(path):
-            raise NotFoundError('No such file: %s' % path)
-        if not os.access(path, os.R_OK):
-            raise NotFoundError('Cannot open: %s' % path)
-        self._path = path
-        self._data = dict()
-        self._read()
-    path = property(lambda self: self._path)
-
-    def _read(self):
-        fp = file(self._path)
-        data = yaml.safe_load(fp)
-        if data is not None:
-            self._data = data
-        fp.close()
-
-    def get_entity(self, name=None, default_environment=None):
-        classes = self._data.get('classes')
-        if classes is None:
-            classes = []
-        classes = datatypes.Classes(classes)
-
-        applications = self._data.get('applications')
-        if applications is None:
-            applications = []
-        applications = datatypes.Applications(applications)
-
-        parameters = self._data.get('parameters')
-        if parameters is None:
-            parameters = {}
-        parameters = datatypes.Parameters(parameters)
-
-        env = self._data.get('environment', default_environment)
-
-        if name is None:
-            name = self._path
-
-        return datatypes.Entity(classes, applications, parameters,
-                                name=name, environment=env,
-                                uri='yaml_fs://{0}'.format(self._path))
-
-    def __repr__(self):
-        return '<{0} {1}, {2}>'.format(self.__class__.__name__, self._path,
-                                       self._data.keys())
diff --git a/reclass/storage/yaml_git/__init__.py b/reclass/storage/yaml_git/__init__.py
new file mode 100644
index 0000000..614d481
--- /dev/null
+++ b/reclass/storage/yaml_git/__init__.py
@@ -0,0 +1,267 @@
+#
+# -*- coding: utf-8 -*-
+#
+# This file is part of reclass
+
+import collections
+import distutils.version
+import fnmatch
+import os
+
+# Squelch warning on centos7 due to upgrading cffi
+# see https://github.com/saltstack/salt/pull/39871
+import warnings
+with warnings.catch_warnings():
+    warnings.simplefilter('ignore')
+    import pygit2
+
+import reclass.errors
+from reclass.storage import NodeStorageBase
+from reclass.storage.common import NameMangler
+from reclass.storage.yamldata import YamlData
+
+FILE_EXTENSION = '.yml'
+STORAGE_NAME = 'yaml_git'
+
+def path_mangler(inventory_base_uri, nodes_uri, classes_uri):
+    if nodes_uri == classes_uri:
+        raise errors.DuplicateUriError(nodes_uri, classes_uri)
+    return nodes_uri, classes_uri
+
+
+GitMD = collections.namedtuple('GitMD', ['name', 'path', 'id'], verbose=False, rename=False)
+
+
+class GitURI(object):
+
+    def __init__(self, dictionary):
+        self.repo = None
+        self.branch = None
+        self.root = None
+        self.cache_dir = None
+        self.pubkey = None
+        self.privkey = None
+        self.password = None
+        self.update(dictionary)
+
+    def update(self, dictionary):
+        if 'repo' in dictionary: self.repo = dictionary['repo']
+        if 'branch' in dictionary: self.branch = dictionary['branch']
+        if 'cache_dir' in dictionary: self.cache_dir = dictionary['cache_dir']
+        if 'pubkey' in dictionary: self.pubkey = dictionary['pubkey']
+        if 'privkey' in dictionary: self.privkey = dictionary['privkey']
+        if 'password' in dictionary: self.password = dictionary['password']
+        if 'root' in dictionary:
+            if dictionary['root'] is None:
+                self.root = None
+            else:
+                self.root = dictionary['root'].replace('/', '.')
+
+    def __repr__(self):
+        return '<{0}: {1} {2} {3}>'.format(self.__class__.__name__, self.repo, self.branch, self.root)
+
+
+class GitRepo(object):
+
+    def __init__(self, uri):
+        self.transport, _, self.url = uri.repo.partition('://')
+        self.name = self.url.replace('/', '_')
+        self.credentials = None
+        self.remotecallbacks = None
+        if uri.cache_dir is None:
+            self.cache_dir = '{0}/{1}/{2}'.format(os.path.expanduser("~"), '.reclass/cache/git', self.name)
+        else:
+            self.cache_dir = '{0}/{1}'.format(uri.cache_dir, self.name)
+
+        self._init_repo(uri)
+        self._fetch()
+        self.branches = self.repo.listall_branches()
+        self.files = self.files_in_repo()
+
+    def _init_repo(self, uri):
+        if os.path.exists(self.cache_dir):
+            self.repo = pygit2.Repository(self.cache_dir)
+        else:
+            os.makedirs(self.cache_dir)
+            self.repo = pygit2.init_repository(self.cache_dir, bare=True)
+
+        if not self.repo.remotes:
+            self.repo.create_remote('origin', self.url)
+
+        if 'ssh' in self.transport:
+            if '@' in self.url:
+                user, _, _ = self.url.partition('@')
+            else:
+                user = 'gitlab'
+
+            if uri.pubkey is not None:
+                creds = pygit2.Keypair(user, uri.pubkey, uri.privkey, uri.password)
+            else:
+                creds = pygit2.KeypairFromAgent(user)
+
+            pygit2_version = pygit2.__version__
+            if distutils.version.LooseVersion(pygit2_version) >= distutils.version.LooseVersion('0.23.2'):
+                self.remotecallbacks = pygit2.RemoteCallbacks(credentials=creds)
+                self.credentials = None
+            else:
+                self.remotecallbacks = None
+                self.credentials = creds
+
+    def _fetch(self):
+        origin = self.repo.remotes[0]
+        fetch_kwargs = {}
+        if self.remotecallbacks is not None:
+            fetch_kwargs['callbacks'] = self.remotecallbacks
+        if self.credentials is not None:
+            origin.credentials = self.credentials
+        fetch_results = origin.fetch(**fetch_kwargs)
+
+        remote_branches = self.repo.listall_branches(pygit2.GIT_BRANCH_REMOTE)
+        local_branches = self.repo.listall_branches()
+        for remote_branch_name in remote_branches:
+            _, _, local_branch_name = remote_branch_name.partition('/')
+            remote_branch = self.repo.lookup_branch(remote_branch_name, pygit2.GIT_BRANCH_REMOTE)
+            if local_branch_name not in local_branches:
+                local_branch = self.repo.create_branch(local_branch_name, self.repo[remote_branch.target.hex])
+                local_branch.upstream = remote_branch
+            else:
+                local_branch = self.repo.lookup_branch(local_branch_name)
+                if local_branch.target != remote_branch.target:
+                    local_branch.set_target(remote_branch.target)
+
+        local_branches = self.repo.listall_branches()
+        for local_branch_name in local_branches:
+            remote_branch_name = '{0}/{1}'.format(origin.name, local_branch_name)
+            if remote_branch_name not in remote_branches:
+                local_branch = self.repo.lookup_branch(local_branch_name)
+                local.branch.delete()
+
+    def get(self, id):
+        return self.repo.get(id)
+
+    def files_in_tree(self, tree, path):
+        files = []
+        for entry in tree:
+            if entry.filemode == pygit2.GIT_FILEMODE_TREE:
+                subtree = self.repo.get(entry.id)
+                if path == '':
+                    subpath = entry.name
+                else:
+                    subpath = '/'.join([path, entry.name])
+                files.extend(self.files_in_tree(subtree, subpath))
+            else:
+                if path == '':
+                   relpath = entry.name
+                else:
+                   relpath = '/'.join([path, entry.name])
+                files.append(GitMD(entry.name, relpath, entry.id))
+        return files
+
+    def files_in_branch(self, branch):
+        tree = self.repo.revparse_single(branch).tree
+        return self.files_in_tree(tree, '')
+
+    def files_in_repo(self):
+        ret = {}
+        for bname in self.branches:
+            branch = {}
+            files = self.files_in_branch(bname)
+            for file in files:
+                if fnmatch.fnmatch(file.name, '*{0}'.format(FILE_EXTENSION)):
+                    name = os.path.splitext(file.name)[0]
+                    relpath = os.path.dirname(file.path)
+                    relpath, name = NameMangler.classes(relpath, name)
+                    if name in ret:
+                        raise reclass.errors.DuplicateNodeNameError(self.name + ' - ' + bname, name, ret[name], path)
+                    else:
+                        branch[name] = file
+            ret[bname] = branch
+        return ret
+
+    def nodes(self, branch, subdir):
+        ret = {}
+        for name, file in self.files[branch].iteritems():
+            if subdir is None or name.startswith(subdir):
+                node_name = os.path.splitext(file.name)[0]
+                if node_name in ret:
+                    raise reclass.errors.DuplicateNodeNameError(self.name, name, files[name], path)
+                else:
+                    ret[node_name] = file
+        return ret
+
+class ExternalNodeStorage(NodeStorageBase):
+
+    def __init__(self, nodes_uri, classes_uri):
+        super(ExternalNodeStorage, self).__init__(STORAGE_NAME)
+        self._repos = dict()
+
+        if nodes_uri is not None:
+            self._nodes_uri = GitURI({ 'branch': 'master' })
+            self._nodes_uri.update(nodes_uri)
+            self._load_repo(self._nodes_uri)
+            self._nodes = self._repos[self._nodes_uri.repo].nodes(self._nodes_uri.branch, self._nodes_uri.root)
+
+        if classes_uri is not None:
+            self._classes_default_uri = GitURI({ 'branch': '__env__' })
+            self._classes_default_uri.update(classes_uri)
+            self._load_repo(self._classes_default_uri)
+
+            self._classes_uri = []
+            if 'env_overrides' in classes_uri:
+                for override in classes_uri['env_overrides']:
+                    for env, options in override.iteritems():
+                        uri = GitURI(self._classes_default_uri)
+                        uri.update({ 'branch': env })
+                        uri.update(options)
+                        self._classes_uri.append((env, uri))
+                        self._load_repo(uri)
+
+            self._classes_uri.append(('*', self._classes_default_uri))
+
+    nodes_uri = property(lambda self: self._nodes_uri)
+    classes_uri = property(lambda self: self._classes_uri)
+
+    def get_node(self, name):
+        file = self._nodes[name]
+        blob = self._repos[self._nodes_uri.repo].get(file.id)
+        entity = YamlData.from_string(blob.data, 'git_fs://{0}#{1}/{2}'.format(self._nodes_uri.repo, self._nodes_uri.branch, file.path)).get_entity(name)
+        return entity
+
+    def get_class(self, name, environment):
+        uri = self._env_to_uri(environment)
+        if uri.root is not None:
+            name = '{0}.{1}'.format(uri.root, name)
+        if uri.repo not in self._repos:
+            raise reclass.errors.NotFoundError("Repo " + uri.repo + " unknown or missing")
+        if uri.branch not in self._repos[uri.repo].files:
+            raise reclass.errors.NotFoundError("Branch " + uri.branch + " missing from " + uri.repo)
+        if name not in self._repos[uri.repo].files[uri.branch]:
+            raise reclass.errors.NotFoundError("File " + name + " missing from " + uri.repo + " branch " + uri.branch)
+        file = self._repos[uri.repo].files[uri.branch][name]
+        blob = self._repos[uri.repo].get(file.id)
+        entity = YamlData.from_string(blob.data, 'git_fs://{0}#{1}/{2}'.format(uri.repo, uri.branch, file.path)).get_entity(name)
+        return entity
+
+    def enumerate_nodes(self):
+        return self._nodes.keys()
+
+    def _load_repo(self, uri):
+        if uri.repo not in self._repos:
+            self._repos[uri.repo] = GitRepo(uri)
+
+    def _env_to_uri(self, environment):
+        ret = None
+        if environment is None:
+            ret = self._classes_default_uri
+        else:
+            for env, uri in self._classes_uri:
+                if env == environment:
+                    ret = uri
+                    break
+        if ret is None:
+            ret = self._classes_default_uri
+        if ret.branch == '__env__':
+            ret.branch = environment
+        if ret.branch == None:
+            ret.branch = 'master'
+        return ret
diff --git a/reclass/storage/yamldata.py b/reclass/storage/yamldata.py
new file mode 100644
index 0000000..b28db06
--- /dev/null
+++ b/reclass/storage/yamldata.py
@@ -0,0 +1,84 @@
+#
+# -*- coding: utf-8 -*-
+#
+# This file is part of reclass (http://github.com/madduck/reclass)
+#
+# Copyright © 2007–14 martin f. krafft <madduck@madduck.net>
+# Released under the terms of the Artistic Licence 2.0
+#
+from reclass import datatypes
+import yaml
+import os
+from reclass.errors import NotFoundError
+
+class YamlData(object):
+
+    @classmethod
+    def from_file(cls, path):
+        ''' Initialise yaml data from a local file '''
+        if not os.path.isfile(path):
+            raise NotFoundError('No such file: %s' % path)
+        if not os.access(path, os.R_OK):
+            raise NotFoundError('Cannot open: %s' % path)
+        y = cls('yaml_fs://{0}'.format(path))
+        fp = file(path)
+        data = yaml.safe_load(fp)
+        if data is not None:
+            y._data = data
+        fp.close()
+        return y
+
+    @classmethod
+    def from_string(cls, string, uri):
+        ''' Initialise yaml data from a string '''
+        y = cls(uri)
+        data = yaml.safe_load(string)
+        if data is not None:
+            y._data = data
+        return y
+
+    def __init__(self, uri):
+        self._uri = uri
+        self._data = dict()
+
+    uri = property(lambda self: self._uri)
+
+    def get_data(self):
+        return self._data
+
+    def get_entity(self, name=None):
+        classes = self._data.get('classes')
+        if classes is None:
+            classes = []
+        classes = datatypes.Classes(classes)
+
+        applications = self._data.get('applications')
+        if applications is None:
+            applications = []
+        applications = datatypes.Applications(applications)
+
+        parameters = self._data.get('parameters')
+        if parameters is None:
+            parameters = {}
+        parameters = datatypes.Parameters(parameters)
+
+        exports = self._data.get('exports')
+        if exports is None:
+            exports = {}
+        exports = datatypes.Exports(exports)
+
+        if name is None:
+            name = self._uri
+
+        env = self._data.get('environment', None)
+
+        return datatypes.Entity(classes, applications, parameters, exports,
+                                name=name, environment=env, uri=self.uri)
+
+    def __str__(self):
+        return '<{0} {1}, {2}>'.format(self.__class__.__name__, self._uri,
+                                       self._data)
+
+    def __repr__(self):
+        return '<{0} {1}, {2}>'.format(self.__class__.__name__, self._uri,
+                                       self._data.keys())
diff --git a/reclass/utils/dictpath.py b/reclass/utils/dictpath.py
index db95e66..0d23c96 100644
--- a/reclass/utils/dictpath.py
+++ b/reclass/utils/dictpath.py
@@ -59,12 +59,12 @@
         if contents is None:
             self._parts = []
         else:
-            if isinstance(contents, types.StringTypes):
+            if isinstance(contents, list):
+                self._parts = contents
+            elif isinstance(contents, types.StringTypes):
                 self._parts = self._split_string(contents)
             elif isinstance(contents, tuple):
                 self._parts = list(contents)
-            elif isinstance(contents, list):
-                self._parts = contents
             else:
                 raise TypeError('DictPath() takes string or list, '\
                                 'not %s' % type(contents))
@@ -112,14 +112,42 @@
     def _escape_string(self, string):
         return string.replace(self._delim, '\\' + self._delim)
 
+    def has_ancestors(self):
+        return len(self._parts) > 1
+
+    def key_parts(self):
+        if self.has_ancestors():
+            return self._parts[::len(self._parts)-1]
+        else:
+            return []
+
     def new_subpath(self, key):
-        try:
-            return DictPath(self._delim, self._parts + [self._escape_string(key)])
-        except AttributeError as e:
-            return DictPath(self._delim, self._parts + [key])
+        return DictPath(self._delim, self._parts + [key])
 
     def get_value(self, base):
         return self._get_innermost_container(base)[self._get_key()]
 
     def set_value(self, base, value):
         self._get_innermost_container(base)[self._get_key()] = value
+
+    def drop_first(self):
+        del self._parts[0]
+        return self
+
+    def exists_in(self, container):
+        item = container
+        for i in self._parts:
+            if isinstance(item, (dict, list)):
+                if i in item:
+                    if isinstance(item, dict):
+                        item = item[i]
+                    elif isinstance(container, list):
+                        item = item[int(i)]
+                else:
+                    return False
+            else:
+                if item == self._parts[-1]:
+                    return True
+                else:
+                    return False
+        return True
diff --git a/reclass/utils/refvalue.py b/reclass/utils/refvalue.py
deleted file mode 100644
index b8e730b..0000000
--- a/reclass/utils/refvalue.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#
-# -*- coding: utf-8 -*-
-#
-# This file is part of reclass (http://github.com/madduck/reclass)
-#
-# Copyright © 2007–14 martin f. krafft <madduck@madduck.net>
-# Released under the terms of the Artistic Licence 2.0
-#
-
-import re
-
-from reclass.utils.dictpath import DictPath
-from reclass.defaults import PARAMETER_INTERPOLATION_SENTINELS, \
-        PARAMETER_INTERPOLATION_DELIMITER
-from reclass.errors import IncompleteInterpolationError, \
-        UndefinedVariableError
-
-_SENTINELS = [re.escape(s) for s in PARAMETER_INTERPOLATION_SENTINELS]
-_RE = '{0}\s*(.+?)\s*{1}'.format(*_SENTINELS)
-
-class RefValue(object):
-    '''
-    Isolates references in string values
-
-    RefValue can be used to isolate and eventually expand references to other
-    parameters in strings. Those references can then be iterated and rendered
-    in the context of a dictionary to resolve those references.
-
-    RefValue always gets constructed from a string, because templating
-    — essentially this is what's going on — is necessarily always about
-    strings. Therefore, generally, the rendered value of a RefValue instance
-    will also be a string.
-
-    Nevertheless, as this might not be desirable, RefValue will return the
-    referenced variable without casting it to a string, if the templated
-    string contains nothing but the reference itself.
-
-    For instance:
-
-      mydict = {'favcolour': 'yellow', 'answer': 42, 'list': [1,2,3]}
-      RefValue('My favourite colour is ${favolour}').render(mydict)
-      → 'My favourite colour is yellow'      # a string
-
-      RefValue('The answer is ${answer}').render(mydict)
-      → 'The answer is 42'                   # a string
-
-      RefValue('${answer}').render(mydict)
-      → 42                                   # an int
-
-      RefValue('${list}').render(mydict)
-      → [1,2,3]                              # an list
-
-    The markers used to identify references are set in reclass.defaults, as is
-    the default delimiter.
-    '''
-
-    INTERPOLATION_RE = re.compile(_RE)
-
-    def __init__(self, string, delim=PARAMETER_INTERPOLATION_DELIMITER):
-        self._strings = []
-        self._refs = []
-        self._delim = delim
-        self._parse(string)
-
-    def _parse(self, string):
-        parts = RefValue.INTERPOLATION_RE.split(string)
-        self._refs = parts[1:][::2]
-        self._strings = parts[0:][::2]
-        self._check_strings(string)
-
-    def _check_strings(self, orig):
-        for s in self._strings:
-            pos = s.find(PARAMETER_INTERPOLATION_SENTINELS[0])
-            if pos >= 0:
-                raise IncompleteInterpolationError(orig,
-                                                   PARAMETER_INTERPOLATION_SENTINELS[1])
-
-    def _resolve(self, ref, context):
-        path = DictPath(self._delim, ref)
-        try:
-            return path.get_value(context)
-        except KeyError as e:
-            raise UndefinedVariableError(ref)
-
-    def has_references(self):
-        return len(self._refs) > 0
-
-    def get_references(self):
-        return self._refs
-
-    def _assemble(self, resolver):
-        if not self.has_references():
-            return self._strings[0]
-
-        if self._strings == ['', '']:
-            # preserve the type of the referenced variable
-            return resolver(self._refs[0])
-
-        # reassemble the string by taking a string and str(ref) pairwise
-        ret = ''
-        for i in range(0, len(self._refs)):
-            ret += self._strings[i] + str(resolver(self._refs[i]))
-        if len(self._strings) > len(self._refs):
-            # and finally append a trailing string, if any
-            ret += self._strings[-1]
-        return ret
-
-    def render(self, context):
-        resolver = lambda s: self._resolve(s, context)
-        return self._assemble(resolver)
-
-    def __repr__(self):
-        do_not_resolve = lambda s: s.join(PARAMETER_INTERPOLATION_SENTINELS)
-        return 'RefValue(%r, %r)' % (self._assemble(do_not_resolve),
-                                     self._delim)
diff --git a/reclass/values/__init__.py b/reclass/values/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/reclass/values/__init__.py
diff --git a/reclass/values/compitem.py b/reclass/values/compitem.py
new file mode 100644
index 0000000..ea342a5
--- /dev/null
+++ b/reclass/values/compitem.py
@@ -0,0 +1,49 @@
+#
+# -*- coding: utf-8 -*-
+#
+# This file is part of reclass
+#
+
+from item import Item
+
+class CompItem(Item):
+
+    def __init__(self, items):
+        self.type = Item.COMPOSITE
+        self._items = items
+        self._refs = []
+        self._allRefs = False
+        self.assembleRefs()
+
+    def assembleRefs(self, context={}):
+        self._refs = []
+        self._allRefs = True
+        for item in self._items:
+            if item.has_references():
+                item.assembleRefs(context)
+                self._refs.extend(item.get_references())
+                if item.allRefs() is False:
+                    self._allRefs = False
+
+    def contents(self):
+        return self._items
+
+    def allRefs(self):
+        return self._allRefs
+
+    def has_references(self):
+        return len(self._refs) > 0
+
+    def get_references(self):
+        return self._refs
+
+    def render(self, context, inventory):
+        # Preserve type if only one item
+        if len(self._items) == 1:
+            return self._items[0].render(context, inventory)
+        # Multiple items
+        strings = [ str(i.render(context, inventory)) for i in self._items ]
+        return "".join(strings)
+
+    def __repr__(self):
+        return 'CompItem(%r)' % self._items
diff --git a/reclass/values/dictitem.py b/reclass/values/dictitem.py
new file mode 100644
index 0000000..bc58f67
--- /dev/null
+++ b/reclass/values/dictitem.py
@@ -0,0 +1,33 @@
+#
+# -*- coding: utf-8 -*-
+#
+# This file is part of reclass
+#
+
+from item import Item
+
+class DictItem(Item):
+
+    def __init__(self, item):
+        self.type = Item.DICTIONARY
+        self._dict = item
+
+    def contents(self):
+        return self._dict
+
+    def is_container(self):
+        return True
+
+    def merge_over(self, item, options):
+        if item.type == Item.SCALAR:
+            if item.contents() is None or options.allow_dict_over_scalar:
+                return self
+            else:
+                raise TypeError('allow dict over scalar = False: cannot merge %s onto %s' % (repr(self), repr(item)))
+        raise TypeError('Cannot merge %s over %s' % (repr(self), repr(item)))
+
+    def render(self, context, inventory):
+        return self._dict
+
+    def __repr__(self):
+        return 'DictItem(%r)' % self._dict
diff --git a/reclass/values/invitem.py b/reclass/values/invitem.py
new file mode 100644
index 0000000..bd887b7
--- /dev/null
+++ b/reclass/values/invitem.py
@@ -0,0 +1,246 @@
+#
+# -*- coding: utf-8 -*-
+#
+# This file is part of reclass
+#
+
+import copy
+import pyparsing as pp
+
+from item import Item
+from reclass.utils.dictpath import DictPath
+from reclass.errors import ExpressionError, ParseError, UndefinedVariableError
+
+_OBJ = 'OBJ'
+_TEST = 'TEST'
+_LIST_TEST = 'LIST_TEST'
+
+_VALUE = 'VALUE'
+_IF = 'IF'
+
+_EQUAL = '=='
+_NOT_EQUAL = '!='
+
+class InvItem(Item):
+
+    def _get_parser():
+
+        def _object(string, location, tokens):
+            token = tokens[0]
+            tokens[0] = (_OBJ, token)
+
+        def _integer(string, location, tokens):
+            try:
+                token = int(tokens[0])
+            except ValueError:
+                token = tokens[0]
+            tokens[0] = (_OBJ, token)
+
+        def _number(string, location, tokens):
+            try:
+                token = float(tokens[0])
+            except ValueError:
+                token = tokens[0]
+            tokens[0] = (_OBJ, token)
+
+        def _test(string, location, tokens):
+            token = tokens[0]
+            tokens[0] = (_TEST, token)
+
+        def _if(string, location, tokens):
+            token = tokens[0]
+            tokens[0] = (_IF, token)
+
+        def _expr_var(string, location, tokens):
+            token = tokens[0]
+            tokens[0] = (_VALUE, token)
+
+        def _expr_test(string, location, tokens):
+            token = tokens[0]
+            tokens[0] = (_TEST, token)
+
+        def _expr_list_test(string, location, tokens):
+            token = tokens[0]
+            tokens[0] = (_LIST_TEST, token)
+
+        white_space = pp.White().suppress()
+        end = pp.StringEnd()
+        operator = (pp.Literal(_EQUAL) | pp.Literal(_NOT_EQUAL)).setParseAction(_test)
+        begin_if = pp.CaselessLiteral(_IF, ).setParseAction(_if)
+        obj = pp.Word(pp.printables).setParseAction(_object)
+        integer = pp.Word('0123456789-').setParseAction(_integer)
+        number = pp.Word('0123456789-.').setParseAction(_number)
+        item = integer | number | obj
+        expr_var = pp.Group(obj + pp.Optional(white_space) + end).setParseAction(_expr_var)
+        expr_test = pp.Group(obj + white_space + begin_if + white_space + item + white_space + operator + white_space + item).setParseAction(_expr_test)
+        expr_list_test = pp.Group(begin_if + white_space + item + white_space + operator + white_space + item).setParseAction(_expr_list_test)
+        expr = pp.Optional(white_space) + (expr_test | expr_var | expr_list_test)
+        return expr
+
+    _parser = _get_parser()
+
+    def __init__(self, item, delimiter):
+        self.type = Item.INV_QUERY
+        self._delimiter = delimiter
+        self._expr_type = None
+        self._refs = []
+        self._expr = []
+        self._parse_expression(item.render(None, None))
+
+    def _parse_expression(self, expr):
+        try:
+            tokens = InvItem._parser.parseString(expr).asList()
+        except pp.ParseException as e:
+            raise ParseError(e.msg, e.line, e.col, e.lineno)
+
+        if len(tokens) == 1:
+            self._expr_type = tokens[0][0]
+            self._expr = list(tokens[0][1])
+        else:
+            raise ExpressionError('Failed to parse %s' % str(expr))
+
+        if self._expr_type == _TEST:
+            export, parameter, value = self._get_vars(self._expr[2][1], None, None, None)
+            export, parameter, value = self._get_vars(self._expr[4][1], export, parameter, value)
+            if parameter is not None:
+                path = parameter
+                path.drop_first()
+                self._refs.append(str(path))
+        elif self._expr_type == _LIST_TEST:
+            export, parameter, value = self._get_vars(self._expr[1][1], None, None, None)
+            export, parameter, value = self._get_vars(self._expr[3][1], export, parameter, value)
+            if parameter is not None:
+                path = parameter
+                path.drop_first()
+                self._refs.append(str(path))
+
+    def assembleRefs(self, context):
+        return
+
+    def contents(self):
+        return self._expr
+
+    def has_inv_query(self):
+        return True
+
+    def has_references(self):
+        return len(self._refs) > 0
+
+    def get_references(self):
+        return self._refs
+
+    def _resolve(self, path, dictionary):
+        try:
+            return path.get_value(dictionary)
+        except KeyError as e:
+            raise UndefinedVariableError(str(path))
+
+    def _value_expression(self, inventory):
+        results = {}
+        path = DictPath(self._delimiter, self._expr[0][1]).drop_first()
+        for node, items in inventory.iteritems():
+            if path.exists_in(items):
+                results[node] = copy.deepcopy(self._resolve(path, items))
+        return results
+
+    def _test_expression(self, context, inventory):
+        export_path = None
+        parameter_path = None
+        parameter_value = None
+        test = None
+        value_path = DictPath(self._delimiter, self._expr[0][1])
+
+        if self._expr[3][1] == _EQUAL:
+            test = _EQUAL
+        elif self._expr[3][1] == _NOT_EQUAL:
+            test = _NOT_EQUAL
+
+        export_path, parameter_path, parameter_value = self._get_vars(self._expr[2][1], export_path, parameter_path, parameter_value)
+        export_path, parameter_path, parameter_value = self._get_vars(self._expr[4][1], export_path, parameter_path, parameter_value)
+
+        if parameter_path is not None:
+            parameter_path.drop_first()
+            parameter_value = self._resolve(parameter_path, context)
+
+        if export_path is None or parameter_value is None or test is None or value_path is None:
+            ExpressionError('Failed to render %s' % str(self))
+
+        export_path.drop_first()
+        value_path.drop_first()
+
+        results = {}
+        for node, items in inventory.iteritems():
+            if export_path.exists_in(items):
+                export_value = self._resolve(export_path, items)
+                test_passed = False
+                if test == _EQUAL and export_value == parameter_value:
+                    test_passed = True
+                elif test == _NOT_EQUAL and export_value != parameter_value:
+                    test_passed = True
+                if test_passed:
+                    results[node] = copy.deepcopy(self._resolve(value_path, items))
+        return results
+
+    def _list_test_expression(self, context, inventory):
+        export_path = None
+        parameter_path = None
+        parameter_value = None
+        test = None
+
+        if self._expr[2][1] == _EQUAL:
+            test = _EQUAL
+        elif self._expr[2][1] == _NOT_EQUAL:
+            test = _NOT_EQUAL
+
+        export_path, parameter_path, parameter_value = self._get_vars(self._expr[1][1], export_path, parameter_path, parameter_value)
+        export_path, parameter_path, parameter_value = self._get_vars(self._expr[3][1], export_path, parameter_path, parameter_value)
+
+        if parameter_path is not None:
+            parameter_path.drop_first()
+            parameter_value = self._resolve(parameter_path, context)
+
+        if export_path is None or parameter_value is None or test is None:
+            ExpressionError('Failed to render %s' % str(self))
+
+        export_path.drop_first()
+
+        results = []
+        for node, items in inventory.iteritems():
+            if export_path.exists_in(items):
+                export_value = self._resolve(export_path, items)
+                test_passed = False
+                if test == _EQUAL and export_value == parameter_value:
+                    test_passed = True
+                elif test == _NOT_EQUAL and export_value != parameter_value:
+                    test_passed = True
+                if test_passed:
+                    results.append(node)
+        return results
+
+    def _get_vars(self, var, export, parameter, value):
+        if isinstance(var, str):
+            path = DictPath(self._delimiter, var)
+            if path.path[0].lower() == 'exports':
+                export = path
+            elif path.path[0].lower() == 'self':
+                parameter = path
+            else:
+                value = var
+        else:
+            value = var
+        return export, parameter, value
+
+    def render(self, context, inventory):
+        if self._expr_type == _VALUE:
+            return self._value_expression(inventory)
+        elif self._expr_type == _TEST:
+            return self._test_expression(context, inventory)
+        elif self._expr_type == _LIST_TEST:
+            return self._list_test_expression(context, inventory)
+        raise ExpressionError('Failed to render %s' % str(self))
+
+    def __str__(self):
+        return ' '.join(str(j) for i,j in self._expr)
+
+    def __repr__(self):
+        return 'InvItem(%r)' % self._expr
diff --git a/reclass/values/item.py b/reclass/values/item.py
new file mode 100644
index 0000000..1d29ab1
--- /dev/null
+++ b/reclass/values/item.py
@@ -0,0 +1,44 @@
+#
+# -*- coding: utf-8 -*-
+#
+# This file is part of reclass
+#
+
+from reclass.utils.dictpath import DictPath
+from reclass.errors import UndefinedVariableError
+
+class Item(object):
+
+    COMPOSITE = 1
+    DICTIONARY = 2
+    INV_QUERY = 3
+    LIST = 4
+    REFERENCE = 5
+    SCALAR = 6
+
+    def allRefs(self):
+        return True
+
+    def has_references(self):
+        return False
+
+    def has_inv_query(self):
+        return False
+
+    def is_container(self):
+        return False
+
+    def is_complex(self):
+        return (self.has_references() | self.has_inv_query())
+
+    def contents(self):
+        msg = "Item class {0} does not implement contents()"
+        raise NotImplementedError(msg.format(self.__class__.__name__))
+
+    def merge_over(self, item, options):
+        msg = "Item class {0} does not implement merge_over()"
+        raise NotImplementedError(msg.format(self.__class__.__name__))
+
+    def render(self, context, exports):
+        msg = "Item class {0} does not implement render()"
+        raise NotImplementedError(msg.format(self.__class__.__name__))
diff --git a/reclass/values/listitem.py b/reclass/values/listitem.py
new file mode 100644
index 0000000..ede8251
--- /dev/null
+++ b/reclass/values/listitem.py
@@ -0,0 +1,39 @@
+#
+# -*- coding: utf-8 -*-
+#
+# This file is part of reclass
+#
+
+from item import Item
+
+class ListItem(Item):
+
+    def __init__(self, item):
+        self.type = Item.LIST
+        self._list = item
+
+    def contents(self):
+        return self._list
+
+    def is_container(self):
+        return True
+
+    def render(self, context, inventory):
+        return self._list
+
+    def merge_over(self, item, options):
+        if item.type == Item.LIST:
+            item._list.extend(self._list)
+            return item
+        elif item.type == Item.SCALAR:
+            if item.contents() is None:
+                return self
+            elif options.allow_list_over_scalar:
+                self._list.insert(0, item.contents())
+                return self
+            else:
+                raise TypeError('allow list over scalar = False: cannot merge %s onto %s' % (repr(self), repr(item)))
+        raise TypeError('Cannot merge %s over %s' % (repr(self), repr(item)))
+
+    def __repr__(self):
+        return 'ListItem(%r)' % (self._list)
diff --git a/reclass/values/mergeoptions.py b/reclass/values/mergeoptions.py
new file mode 100644
index 0000000..c5a7e59
--- /dev/null
+++ b/reclass/values/mergeoptions.py
@@ -0,0 +1,8 @@
+from reclass.defaults import *
+
+class MergeOptions(object):
+    def __init__ (self):
+        self.allow_scalar_over_dict = MERGE_ALLOW_SCALAR_OVER_DICT
+        self.allow_scalar_over_list = MERGE_ALLOW_SCALAR_OVER_LIST
+        self.allow_list_over_scalar = MERGE_ALLOW_LIST_OVER_SCALAR
+        self.allow_dict_over_scalar = MERGE_ALLOW_DICT_OVER_SCALAR
diff --git a/reclass/values/parser.py b/reclass/values/parser.py
new file mode 100644
index 0000000..c15c8d1
--- /dev/null
+++ b/reclass/values/parser.py
@@ -0,0 +1,153 @@
+#
+# -*- coding: utf-8 -*-
+#
+# This file is part of reclass
+#
+
+import pyparsing as pp
+
+from compitem import CompItem
+from invitem import InvItem
+from refitem import RefItem
+from scaitem import ScaItem
+
+from reclass.defaults import ESCAPE_CHARACTER, REFERENCE_SENTINELS, EXPORT_SENTINELS
+from reclass.errors import ParseError
+
+_STR = 1
+_REF = 2
+_INV = 3
+
+_ESCAPE = ESCAPE_CHARACTER
+_DOUBLE_ESCAPE = _ESCAPE + _ESCAPE
+
+_REF_OPEN = REFERENCE_SENTINELS[0]
+_REF_CLOSE = REFERENCE_SENTINELS[1]
+_REF_CLOSE_FIRST = _REF_CLOSE[0]
+_REF_ESCAPE_OPEN = _ESCAPE + _REF_OPEN
+_REF_ESCAPE_CLOSE = _ESCAPE + _REF_CLOSE
+_REF_DOUBLE_ESCAPE_OPEN = _DOUBLE_ESCAPE + _REF_OPEN
+_REF_DOUBLE_ESCAPE_CLOSE = _DOUBLE_ESCAPE + _REF_CLOSE
+_REF_EXCLUDES = _ESCAPE + _REF_OPEN + _REF_CLOSE
+
+_INV_OPEN = EXPORT_SENTINELS[0]
+_INV_CLOSE = EXPORT_SENTINELS[1]
+_INV_CLOSE_FIRST = _INV_CLOSE[0]
+_INV_ESCAPE_OPEN = _ESCAPE + _INV_OPEN
+_INV_ESCAPE_CLOSE = _ESCAPE + _INV_CLOSE
+_INV_DOUBLE_ESCAPE_OPEN = _DOUBLE_ESCAPE + _INV_OPEN
+_INV_DOUBLE_ESCAPE_CLOSE = _DOUBLE_ESCAPE + _INV_CLOSE
+_INV_EXCLUDES = _ESCAPE + _INV_OPEN + _INV_CLOSE
+
+_EXCLUDES = _ESCAPE + _REF_OPEN + _REF_CLOSE + _INV_OPEN + _INV_CLOSE
+
+def _string(string, location, tokens):
+    token = tokens[0]
+    tokens[0] = (_STR, token)
+
+def _reference(string, location, tokens):
+    token = list(tokens[0])
+    tokens[0] = (_REF, token)
+
+def _invquery(string, location, tokens):
+    token = list(tokens[0])
+    tokens[0] = (_INV, token)
+
+def _get_parser():
+    double_escape = pp.Combine(pp.Literal(_DOUBLE_ESCAPE) + pp.MatchFirst([pp.FollowedBy(_REF_OPEN), pp.FollowedBy(_REF_CLOSE),
+                               pp.FollowedBy(_INV_OPEN), pp.FollowedBy(_INV_CLOSE)])).setParseAction(pp.replaceWith(_ESCAPE))
+
+    ref_open = pp.Literal(_REF_OPEN).suppress()
+    ref_close = pp.Literal(_REF_CLOSE).suppress()
+    ref_not_open = ~pp.Literal(_REF_OPEN) + ~pp.Literal(_REF_ESCAPE_OPEN) + ~pp.Literal(_REF_DOUBLE_ESCAPE_OPEN)
+    ref_not_close = ~pp.Literal(_REF_CLOSE) + ~pp.Literal(_REF_ESCAPE_CLOSE) + ~pp.Literal(_REF_DOUBLE_ESCAPE_CLOSE)
+    ref_escape_open = pp.Literal(_REF_ESCAPE_OPEN).setParseAction(pp.replaceWith(_REF_OPEN))
+    ref_escape_close = pp.Literal(_REF_ESCAPE_CLOSE).setParseAction(pp.replaceWith(_REF_CLOSE))
+    ref_text = pp.CharsNotIn(_REF_EXCLUDES) | pp.CharsNotIn(_REF_CLOSE_FIRST, exact=1)
+    ref_content = pp.Combine(pp.OneOrMore(ref_not_open + ref_not_close + ref_text))
+    ref_string = pp.MatchFirst([double_escape, ref_escape_open, ref_escape_close, ref_content]).setParseAction(_string)
+    ref_item = pp.Forward()
+    ref_items = pp.OneOrMore(ref_item)
+    reference = (ref_open + pp.Group(ref_items) + ref_close).setParseAction(_reference)
+    ref_item << (reference | ref_string)
+
+    inv_open = pp.Literal(_INV_OPEN).suppress()
+    inv_close = pp.Literal(_INV_CLOSE).suppress()
+    inv_not_open = ~pp.Literal(_INV_OPEN) + ~pp.Literal(_INV_ESCAPE_OPEN) + ~pp.Literal(_INV_DOUBLE_ESCAPE_OPEN)
+    inv_not_close = ~pp.Literal(_INV_CLOSE) + ~pp.Literal(_INV_ESCAPE_CLOSE) + ~pp.Literal(_INV_DOUBLE_ESCAPE_CLOSE)
+    inv_escape_open = pp.Literal(_INV_ESCAPE_OPEN).setParseAction(pp.replaceWith(_INV_OPEN))
+    inv_escape_close = pp.Literal(_INV_ESCAPE_CLOSE).setParseAction(pp.replaceWith(_INV_CLOSE))
+    inv_text = pp.CharsNotIn(_INV_CLOSE_FIRST)
+    inv_content = pp.Combine(pp.OneOrMore(inv_not_close + inv_text))
+    inv_string = pp.MatchFirst([double_escape, inv_escape_open, inv_escape_close, inv_content]).setParseAction(_string)
+    inv_items = pp.OneOrMore(inv_string)
+    export = (inv_open + pp.Group(inv_items) + inv_close).setParseAction(_invquery)
+
+    text = pp.CharsNotIn(_EXCLUDES) | pp.CharsNotIn('', exact=1)
+    content = pp.Combine(pp.OneOrMore(ref_not_open + inv_not_open + text))
+    string = pp.MatchFirst([double_escape, ref_escape_open, inv_escape_open, content]).setParseAction(_string)
+
+    item = reference | export | string
+    line = pp.OneOrMore(item) + pp.StringEnd()
+    return line
+
+def _get_simple_ref_parser():
+    string = pp.CharsNotIn(_EXCLUDES).setParseAction(_string)
+    ref_open = pp.Literal(_REF_OPEN).suppress()
+    ref_close = pp.Literal(_REF_CLOSE).suppress()
+    reference = (ref_open + pp.Group(string) + ref_close).setParseAction(_reference)
+    line = pp.StringStart() + pp.Optional(string) + reference + pp.Optional(string) + pp.StringEnd()
+    return line
+
+
+class Parser(object):
+
+    _parser = _get_parser()
+    _simple_ref_parser = _get_simple_ref_parser()
+
+    def parse(self, value, delimiter):
+        self._delimiter = delimiter
+        dollars = value.count('$')
+        if dollars == 0:
+            # speed up: only use pyparsing if there is a $ in the string
+            return ScaItem(value)
+        elif dollars == 1:
+            # speed up: try a simple reference
+            try:
+                tokens = self._simple_ref_parser.leaveWhitespace().parseString(value).asList()
+            except pp.ParseException as e:
+                # fall back on the full parser
+                try:
+                    tokens = self._parser.leaveWhitespace().parseString(value).asList()
+                except pp.ParseException as e:
+                    raise ParseError(e.msg, e.line, e.col, e.lineno)
+        else:
+            # use the full parser
+            try:
+                tokens = self._parser.leaveWhitespace().parseString(value).asList()
+            except pp.ParseException as e:
+                raise ParseError(e.msg, e.line, e.col, e.lineno)
+
+        items = self._create_items(tokens)
+        if len(items) == 1:
+            return items[0]
+        else:
+            return CompItem(items)
+
+    _create_dict = { _STR: (lambda s, v: ScaItem(v)),
+                     _REF: (lambda s, v: s._create_ref(v)),
+                     _INV: (lambda s, v: s._create_inv(v)) }
+
+    def _create_items(self, tokens):
+        return [ self._create_dict[t](self, v) for t, v in tokens ]
+
+    def _create_ref(self, tokens):
+        items = [ self._create_dict[t](self, v) for t, v in tokens ]
+        return RefItem(items, self._delimiter)
+
+    def _create_inv(self, tokens):
+        items = [ ScaItem(v) for t, v in tokens ]
+        if len(items) == 1:
+            return InvItem(items[0], self._delimiter)
+        else:
+            return InvItem(CompItem(items), self._delimiter)
diff --git a/reclass/values/refitem.py b/reclass/values/refitem.py
new file mode 100644
index 0000000..b97780a
--- /dev/null
+++ b/reclass/values/refitem.py
@@ -0,0 +1,63 @@
+#
+# -*- coding: utf-8 -*-
+#
+# This file is part of reclass
+#
+
+from item import Item
+from reclass.utils.dictpath import DictPath
+from reclass.errors import UndefinedVariableError
+
+class RefItem(Item):
+
+    def __init__(self, items, delimiter):
+        self.type = Item.REFERENCE
+        self._delimiter = delimiter
+        self._items = items
+        self._refs = []
+        self._allRefs = False
+        self.assembleRefs()
+
+    def assembleRefs(self, context={}):
+        self._refs = []
+        self._allRefs = True
+        for item in self._items:
+            if item.has_references():
+                item.assembleRefs(context)
+                self._refs.extend(item.get_references())
+                if item.allRefs() == False:
+                    self._allRefs = False
+        try:
+            strings = [ str(i.render(context, None)) for i in self._items ]
+            value = "".join(strings)
+            self._refs.append(value)
+        except UndefinedVariableError as e:
+            self._allRefs = False
+
+    def contents(self):
+        return self._items
+
+    def allRefs(self):
+        return self._allRefs
+
+    def has_references(self):
+        return len(self._refs) > 0
+
+    def get_references(self):
+        return self._refs
+
+    def _resolve(self, ref, context):
+        path = DictPath(self._delimiter, ref)
+        try:
+            return path.get_value(context)
+        except KeyError as e:
+            raise UndefinedVariableError(ref)
+
+    def render(self, context, inventory):
+        if len(self._items) == 1:
+            return self._resolve(self._items[0].render(context, inventory), context)
+        strings = [ str(i.render(context, inventory)) for i in self._items ]
+        return self._resolve("".join(strings), context)
+
+    def __repr__(self):
+        return 'RefItem(%r)' % self._items
diff --git a/reclass/values/scaitem.py b/reclass/values/scaitem.py
new file mode 100644
index 0000000..151e123
--- /dev/null
+++ b/reclass/values/scaitem.py
@@ -0,0 +1,37 @@
+#
+# -*- coding: utf-8 -*-
+#
+# This file is part of reclass
+#
+
+from item import Item
+
+class ScaItem(Item):
+
+    def __init__(self, value):
+        self.type = Item.SCALAR
+        self._value = value
+
+    def contents(self):
+        return self._value
+
+    def merge_over(self, item, options):
+        if item.type == Item.SCALAR:
+            return self
+        elif item.type == Item.LIST:
+            if options.allow_scalar_over_list:
+                return self
+            else:
+                raise TypeError('allow scalar over list = False: cannot merge %s over %s' % (repr(self), repr(item)))
+        elif item.type == Item.DICTIONARY:
+            if options.allow_scalar_over_dict:
+                return self
+            else:
+                raise TypeError('allow scalar over dict = False: cannot merge %s over %s' % (repr(self), repr(item)))
+        raise TypeError('Cannot merge %s over %s' % (repr(self), repr(item)))
+
+    def render(self, context, inventory):
+        return self._value
+
+    def __repr__(self):
+        return 'ScaItem({0!r})'.format(self._value)
diff --git a/reclass/values/tests/__init__.py b/reclass/values/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/reclass/values/tests/__init__.py
diff --git a/reclass/utils/tests/test_refvalue.py b/reclass/values/tests/test_value.py
similarity index 75%
rename from reclass/utils/tests/test_refvalue.py
rename to reclass/values/tests/test_value.py
index 23d7e7b..8211dcd 100644
--- a/reclass/utils/tests/test_refvalue.py
+++ b/reclass/values/tests/test_value.py
@@ -7,16 +7,18 @@
 # Released under the terms of the Artistic Licence 2.0
 #
 
-from reclass.utils.refvalue import RefValue
-from reclass.defaults import PARAMETER_INTERPOLATION_SENTINELS, \
+import pyparsing as pp
+
+from reclass.values.value import Value
+from reclass.defaults import REFERENCE_SENTINELS, \
         PARAMETER_INTERPOLATION_DELIMITER
 from reclass.errors import UndefinedVariableError, \
-        IncompleteInterpolationError
+        IncompleteInterpolationError, ParseError
 import unittest
 
 def _var(s):
-    return '%s%s%s' % (PARAMETER_INTERPOLATION_SENTINELS[0], s,
-                       PARAMETER_INTERPOLATION_SENTINELS[1])
+    return '%s%s%s' % (REFERENCE_SENTINELS[0], s,
+                       REFERENCE_SENTINELS[1])
 
 CONTEXT = {'favcolour':'yellow',
            'motd':{'greeting':'Servus!',
@@ -31,18 +33,18 @@
 def _poor_mans_template(s, var, value):
     return s.replace(_var(var), value)
 
-class TestRefValue(unittest.TestCase):
+class TestValue(unittest.TestCase):
 
     def test_simple_string(self):
         s = 'my cat likes to hide in boxes'
-        tv = RefValue(s)
+        tv = Value(s)
         self.assertFalse(tv.has_references())
-        self.assertEquals(tv.render(CONTEXT), s)
+        self.assertEquals(tv.render(CONTEXT, None), s)
 
     def _test_solo_ref(self, key):
         s = _var(key)
-        tv = RefValue(s)
-        res = tv.render(CONTEXT)
+        tv = Value(s)
+        res = tv.render(CONTEXT, None)
         self.assertTrue(tv.has_references())
         self.assertEqual(res, CONTEXT[key])
 
@@ -63,65 +65,65 @@
 
     def test_single_subst_bothends(self):
         s = 'I like ' + _var('favcolour') + ' and I like it'
-        tv = RefValue(s)
+        tv = Value(s)
         self.assertTrue(tv.has_references())
-        self.assertEqual(tv.render(CONTEXT),
+        self.assertEqual(tv.render(CONTEXT, None),
                          _poor_mans_template(s, 'favcolour',
                                              CONTEXT['favcolour']))
 
     def test_single_subst_start(self):
         s = _var('favcolour') + ' is my favourite colour'
-        tv = RefValue(s)
+        tv = Value(s)
         self.assertTrue(tv.has_references())
-        self.assertEqual(tv.render(CONTEXT),
+        self.assertEqual(tv.render(CONTEXT, None),
                          _poor_mans_template(s, 'favcolour',
                                              CONTEXT['favcolour']))
 
     def test_single_subst_end(self):
         s = 'I like ' + _var('favcolour')
-        tv = RefValue(s)
+        tv = Value(s)
         self.assertTrue(tv.has_references())
-        self.assertEqual(tv.render(CONTEXT),
+        self.assertEqual(tv.render(CONTEXT, None),
                          _poor_mans_template(s, 'favcolour',
                                              CONTEXT['favcolour']))
 
     def test_deep_subst_solo(self):
         var = PARAMETER_INTERPOLATION_DELIMITER.join(('motd', 'greeting'))
         s = _var(var)
-        tv = RefValue(s)
+        tv = Value(s)
         self.assertTrue(tv.has_references())
-        self.assertEqual(tv.render(CONTEXT),
+        self.assertEqual(tv.render(CONTEXT, None),
                          _poor_mans_template(s, var,
                                              CONTEXT['motd']['greeting']))
 
     def test_multiple_subst(self):
         greet = PARAMETER_INTERPOLATION_DELIMITER.join(('motd', 'greeting'))
         s = _var(greet) + ' I like ' + _var('favcolour') + '!'
-        tv = RefValue(s)
+        tv = Value(s)
         self.assertTrue(tv.has_references())
         want = _poor_mans_template(s, greet, CONTEXT['motd']['greeting'])
         want = _poor_mans_template(want, 'favcolour', CONTEXT['favcolour'])
-        self.assertEqual(tv.render(CONTEXT), want)
+        self.assertEqual(tv.render(CONTEXT, None), want)
 
     def test_multiple_subst_flush(self):
         greet = PARAMETER_INTERPOLATION_DELIMITER.join(('motd', 'greeting'))
         s = _var(greet) + ' I like ' + _var('favcolour')
-        tv = RefValue(s)
+        tv = Value(s)
         self.assertTrue(tv.has_references())
         want = _poor_mans_template(s, greet, CONTEXT['motd']['greeting'])
         want = _poor_mans_template(want, 'favcolour', CONTEXT['favcolour'])
-        self.assertEqual(tv.render(CONTEXT), want)
+        self.assertEqual(tv.render(CONTEXT, None), want)
 
     def test_undefined_variable(self):
         s = _var('no_such_variable')
-        tv = RefValue(s)
+        tv = Value(s)
         with self.assertRaises(UndefinedVariableError):
-            tv.render(CONTEXT)
+            tv.render(CONTEXT, None)
 
     def test_incomplete_variable(self):
-        s = PARAMETER_INTERPOLATION_SENTINELS[0] + 'incomplete'
-        with self.assertRaises(IncompleteInterpolationError):
-            tv = RefValue(s)
+        s = REFERENCE_SENTINELS[0] + 'incomplete'
+        with self.assertRaises(ParseError):
+            tv = Value(s)
 
 if __name__ == '__main__':
     unittest.main()
diff --git a/reclass/values/value.py b/reclass/values/value.py
new file mode 100644
index 0000000..355aab2
--- /dev/null
+++ b/reclass/values/value.py
@@ -0,0 +1,61 @@
+#
+# -*- coding: utf-8 -*-
+#
+# This file is part of reclass
+#
+
+from parser import Parser
+from dictitem import DictItem
+from listitem import ListItem
+from scaitem import ScaItem
+from reclass.defaults import PARAMETER_INTERPOLATION_DELIMITER
+
+class Value(object):
+
+    _parser = Parser()
+
+    def __init__(self, value, delimiter=PARAMETER_INTERPOLATION_DELIMITER):
+        self._delimiter = delimiter
+        if isinstance(value, str):
+            self._item = self._parser.parse(value, delimiter)
+        elif isinstance(value, list):
+            self._item = ListItem(value)
+        elif isinstance(value, dict):
+            self._item = DictItem(value)
+        else:
+            self._item = ScaItem(value)
+
+    def is_container(self):
+        return self._item.is_container()
+
+    def allRefs(self):
+        return self._item.allRefs()
+
+    def has_references(self):
+        return self._item.has_references()
+
+    def has_inv_query(self):
+        return self._item.has_inv_query()
+
+    def is_complex(self):
+        return self._item.is_complex()
+
+    def get_references(self):
+        return self._item.get_references()
+
+    def assembleRefs(self, context):
+        if self._item.has_references():
+            self._item.assembleRefs(context)
+
+    def render(self, context, inventory, options=None):
+        return self._item.render(context, inventory)
+
+    def contents(self):
+        return self._item.contents()
+
+    def merge_over(self, value, options):
+        self._item = self._item.merge_over(value._item, options)
+        return self
+
+    def __repr__(self):
+        return 'Value(%r)' % self._item
diff --git a/reclass/values/valuelist.py b/reclass/values/valuelist.py
new file mode 100644
index 0000000..38f782c
--- /dev/null
+++ b/reclass/values/valuelist.py
@@ -0,0 +1,101 @@
+#
+# -*- coding: utf-8 -*-
+#
+# This file is part of reclass
+#
+
+import copy
+
+class ValueList(object):
+
+    def __init__(self, value):
+        self._refs = []
+        self._allRefs = True
+        self._values = [ value ]
+        self._has_inv_query = False
+        self._update()
+
+    def append(self, value):
+        self._values.append(value)
+        self._update()
+
+    def extend(self, values):
+        self._values.extend(values._values)
+        self._update()
+
+    def _update(self):
+        self._has_inv_query = False
+        self.assembleRefs()
+        self._check_for_inv_query()
+
+    def has_references(self):
+        return len(self._refs) > 0
+
+    def has_inv_query(self):
+        return self._has_inv_query
+
+    def is_complex(self):
+        return (self.has_references() | self.has_inv_query())
+
+    def get_references(self):
+        return self._refs
+
+    def allRefs(self):
+        return self._allRefs
+
+    def _check_for_inv_query(self):
+        self._has_inv_query = False
+        for value in self._values:
+            if value.has_inv_query():
+                self._has_inv_query = True
+
+    def assembleRefs(self, context={}):
+        self._refs = []
+        self._allRefs = True
+        for value in self._values:
+            value.assembleRefs(context)
+            if value.has_references():
+                self._refs.extend(value.get_references())
+            if value.allRefs() is False:
+                self._allRefs = False
+
+    def merge(self, options):
+        output = None
+        for n, value in enumerate(self._values):
+            if output is None:
+                output = value
+            else:
+                output = value.merge_over(output, options)
+        return output
+
+    def render(self, context, inventory, options):
+        from reclass.datatypes.parameters import Parameters
+
+        output = None
+        deepCopied = False
+        for n, value in enumerate(self._values):
+            if output is None:
+                output = self._values[n].render(context, inventory)
+                deepCopied = False
+            else:
+                new = value.render(context, inventory)
+                if isinstance(output, dict) and isinstance(new, dict):
+                    p1 = Parameters(output, value._delimiter)
+                    p2 = Parameters(new, value._delimiter)
+                    p1.merge(p2)
+                    output = p1.as_dict()
+                    continue
+                elif isinstance(output, list) and isinstance(new, list):
+                    if not deepCopied:
+                        output = copy.deepcopy(output)
+                        deepCopied = True
+                    output.extend(new)
+                    continue
+                elif isinstance(output, (dict, list)) or isinstance(new, (dict, list)):
+                    raise TypeError('Cannot merge %s over %s' % (repr(self._values[n]), repr(self._values[n-1])))
+                else:
+                    output = new
+        return output
+
+    def __repr__(self):
+        return 'ValueList(%r)' % self._values
diff --git a/reclass/version.py b/reclass/version.py
index a2aa99a..fb3e039 100644
--- a/reclass/version.py
+++ b/reclass/version.py
@@ -7,12 +7,12 @@
 # Released under the terms of the Artistic Licence 2.0
 #
 RECLASS_NAME = 'reclass'
-DESCRIPTION = 'merge data by recursive descent down an ancestry hierarchy'
-VERSION = '1.4.1'
-AUTHOR = 'martin f. krafft'
-AUTHOR_EMAIL = 'reclass@pobox.madduck.net'
-MAINTAINER = 'Jason Ritzke (@Rtzq0)'
-MAINTAINER_EMAIL = 'jasonritzke@4loopz.com'
-COPYRIGHT = 'Copyright © 2007–14 ' + AUTHOR
+DESCRIPTION = 'merge data by recursive descent down an ancestry hierarchy (forked extended version)'
+VERSION = '1.5'
+AUTHOR = 'martin f. krafft / Andrew Pickford'
+AUTHOR_EMAIL = 'andrewp@nikhef.nl'
+MAINTAINER = 'Andrew Pickford'
+MAINTAINER_EMAIL = ''
+COPYRIGHT = 'Copyright © 2007–14 martin f. krafft, extentions © 2017 Andrew Pickford'
 LICENCE = 'Artistic Licence 2.0'
-URL = 'https://github.com/madduck/reclass'
+URL = 'https://github.com/AndrewPickford/reclass'
diff --git a/requirements.txt b/requirements.txt
index c3726e8..ea72e95 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1 +1,3 @@
+pyparsing
 pyyaml
+pygit2
diff --git a/setup.cfg b/setup.cfg
index d645be7..2f5e543 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -3,3 +3,6 @@
 # 3. If at all possible, it is good practice to do this. If you cannot, you
 # will need to generate wheels for each Python version that you support.
 universal=0
+
+[install]
+prefix: /usr