Initial commit

add first helper: reclass-dump-params
diff --git a/reclass_tools/__init__.py b/reclass_tools/__init__.py
new file mode 100644
index 0000000..0418604
--- /dev/null
+++ b/reclass_tools/__init__.py
@@ -0,0 +1,38 @@
+import os
+import time
+import logging.config
+
+
+LOGGER_SETTINGS = {
+    'version': 1,
+    'disable_existing_loggers': False,
+    'loggers': {
+        'reclass_tools': {
+            'level': 'DEBUG',
+            'handlers': ['console_output'],
+        },
+        'paramiko': {'level': 'WARNING'},
+        'iso8601': {'level': 'WARNING'},
+        'keystoneauth': {'level': 'WARNING'},
+    },
+    'handlers': {
+        'console_output': {
+            'class': 'logging.StreamHandler',
+            'level': 'INFO',
+            'formatter': 'default',
+            'stream': 'ext://sys.stdout',
+        },
+    },
+    'formatters': {
+        'default': {
+            'format': '%(asctime)s - %(levelname)s - %(filename)s:'
+                      '%(lineno)d -- %(message)s',
+            'datefmt': '%Y-%m-%d %H:%M:%S',
+        },
+    },
+}
+
+logging.config.dictConfig(LOGGER_SETTINGS)
+# set logging timezone to GMT
+logging.Formatter.converter = time.gmtime
+logger = logging.getLogger(__name__)
diff --git a/reclass_tools/cli.py b/reclass_tools/cli.py
new file mode 100644
index 0000000..b9ad985
--- /dev/null
+++ b/reclass_tools/cli.py
@@ -0,0 +1,56 @@
+#    Copyright 2013 - 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+from __future__ import print_function
+
+import argparse
+import os
+import sys
+import yaml
+
+from reclass_tools import walk_models
+
+
+def execute(params):
+
+    results = walk_models.get_all_reclass_params(
+        params.paths,
+        identity_files=params.identity_files,
+        verbose=params.verbose)
+
+    print(yaml.dump(results))
+
+
+def dump_params(args=None):
+    if args is None:
+        args = sys.argv[1:]
+
+    parser = argparse.ArgumentParser(
+        formatter_class=argparse.RawTextHelpFormatter,
+        description="")
+    parser.add_argument('-i', dest='identity_files',
+                        help=('For SSH connections, selects a file from which \n'
+                              'the identity (private key) for public key \n'
+                              'authentication is read. It is possible to have \n'
+                              'multiple -i options.'),
+                        action='append')
+    parser.add_argument('--verbose', dest='verbose', action='store_const', const=True,
+                        help='Show verbosed output.', default=False)
+    parser.add_argument('paths', help='Paths to search for *.yml files.', nargs='+')
+
+    if len(args) == 0:
+        args = ['-h']
+
+    params = parser.parse_args(args)
+    execute(params)
diff --git a/reclass_tools/helpers/__init__.py b/reclass_tools/helpers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/reclass_tools/helpers/__init__.py
diff --git a/reclass_tools/helpers/decorators.py b/reclass_tools/helpers/decorators.py
new file mode 100644
index 0000000..be79ec1
--- /dev/null
+++ b/reclass_tools/helpers/decorators.py
@@ -0,0 +1,318 @@
+#    Copyright 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+from __future__ import unicode_literals
+
+import collections
+import functools
+import inspect
+import logging
+import sys
+import threading
+import time
+
+import six
+
+from reclass_tools import logger
+
+
+def threaded(name=None, started=False, daemon=False):
+    """Make function or method threaded with passing arguments
+
+    If decorator added not as function, name is generated from function name.
+
+    :type name: str
+    :type started: bool
+    :type daemon: bool
+    """
+
+    def real_decorator(func):
+        @functools.wraps(func)
+        def wrapper(*args, **kwargs):
+            """Thread generator for function
+
+            :rtype: Thread
+            """
+            if name is None:
+                func_name = 'Threaded {}'.format(func.__name__)
+            else:
+                func_name = name
+            thread = threading.Thread(
+                target=func,
+                name=func_name,
+                args=args,
+                kwargs=kwargs)
+            if daemon:
+                thread.daemon = True
+            if started:
+                thread.start()
+            return thread
+        return wrapper
+
+    if name is not None and callable(name):
+        func, name = name, None
+        return real_decorator(func)
+
+    return real_decorator
+
+
+def retry(exception, count=10, delay=1):
+    """Retry decorator
+
+    Retries to run decorated method with the same parameters in case of
+    thrown :exception:
+
+    :type exception: class
+    :param exception: exception class
+    :type count: int
+    :param count: retry count
+    :type delay: int
+    :param delay: delay between retries in seconds
+    :rtype: function
+    """
+    def decorator(func):
+        if inspect.ismethod(func):
+            full_name = '{}:{}.{}'.format(
+                inspect.getmodule(func.im_class).__name__,
+                func.im_class.__name__,
+                func.__name__)
+        elif inspect.isfunction(func):
+            full_name = '{}.{}'.format(
+                inspect.getmodule(func).__name__,
+                func.__name__)
+        else:
+            raise Exception(
+                'Wrong func parameter type {!r}'.format(func))
+
+        @functools.wraps(func)
+        def wrapper(*args, **kwargs):
+            i = 0
+            while True:
+                try:
+                    return func(*args, **kwargs)
+                except exception as e:
+                    i += 1
+                    if i >= count:
+                        raise
+
+                    logger.debug(
+                        'Exception {!r} while running {!r}. '
+                        'Waiting {} seconds.'.format(e, func.__name__, delay),
+                        exc_info=True)  # logs traceback
+                    time.sleep(delay)
+
+                    arg_str = ', '.join((
+                        ', '.join(map(repr, args)),
+                        ', '.join('{}={!r}'.format(k, v) for k, v in kwargs),
+                    ))
+                    logger.debug('Retrying {}({})'.format(full_name, arg_str))
+
+        return wrapper
+
+    return decorator
+
+
+# pylint: disable=no-member
+def get_arg_names(func):
+    """get argument names for function
+
+    :param func: func
+    :return: list of function argnames
+    :rtype: list
+
+    >>> def tst_1():
+    ...     pass
+
+    >>> get_arg_names(tst_1)
+    []
+
+    >>> def tst_2(arg):
+    ...     pass
+
+    >>> get_arg_names(tst_2)
+    ['arg']
+    """
+    # noinspection PyUnresolvedReferences
+    if six.PY2:
+        spec = inspect.getargspec(func=func)
+        args = spec.args[:]
+        if spec.varargs:
+            args.append(spec.varargs)
+        if spec.keywords:
+            args.append(spec.keywords)
+        return args
+    return list(inspect.signature(obj=func).parameters.keys())
+
+
+def _getcallargs(func, *positional, **named):
+    """get real function call arguments without calling function
+
+    :rtype: dict
+    """
+    # noinspection PyUnresolvedReferences
+    if sys.version_info[0:2] < (3, 5):  # apply_defaults is py35 feature
+        orig_args = inspect.getcallargs(func, *positional, **named)
+        # Construct OrderedDict as Py3
+        arguments = collections.OrderedDict(
+            [(key, orig_args[key]) for key in get_arg_names(func)]
+        )
+        return arguments
+    sig = inspect.signature(func).bind(*positional, **named)
+    sig.apply_defaults()  # after bind we doesn't have defaults
+    return sig.arguments
+# pylint:enable=no-member
+
+
+def _simple(item):
+    """Check for nested iterations: True, if not"""
+    return not isinstance(item, (list, set, tuple, dict))
+
+
+_formatters = {
+    'simple': "{spc:<{indent}}{val!r}".format,
+    'text': "{spc:<{indent}}{prefix}'''{string}'''".format,
+    'dict': "\n{spc:<{indent}}{key!r:{size}}: {val},".format,
+    }
+
+
+def pretty_repr(src, indent=0, no_indent_start=False, max_indent=20):
+    """Make human readable repr of object
+
+    :param src: object to process
+    :type src: object
+    :param indent: start indentation, all next levels is +4
+    :type indent: int
+    :param no_indent_start: do not indent open bracket and simple parameters
+    :type no_indent_start: bool
+    :param max_indent: maximal indent before classic repr() call
+    :type max_indent: int
+    :return: formatted string
+    """
+    if _simple(src) or indent >= max_indent:
+        indent = 0 if no_indent_start else indent
+        if isinstance(src, (six.binary_type, six.text_type)):
+            if isinstance(src, six.binary_type):
+                string = src.decode(
+                    encoding='utf-8',
+                    errors='backslashreplace'
+                )
+                prefix = 'b'
+            else:
+                string = src
+                prefix = 'u'
+            return _formatters['text'](
+                spc='',
+                indent=indent,
+                prefix=prefix,
+                string=string
+            )
+        return _formatters['simple'](
+            spc='',
+            indent=indent,
+            val=src
+        )
+    if isinstance(src, dict):
+        prefix, suffix = '{', '}'
+        result = ''
+        max_len = len(max([repr(key) for key in src])) if src else 0
+        for key, val in src.items():
+            result += _formatters['dict'](
+                spc='',
+                indent=indent + 4,
+                size=max_len,
+                key=key,
+                val=pretty_repr(val, indent + 8, no_indent_start=True)
+            )
+        return (
+            '\n{start:>{indent}}'.format(
+                start=prefix,
+                indent=indent + 1
+            ) +
+            result +
+            '\n{end:>{indent}}'.format(end=suffix, indent=indent + 1)
+        )
+    if isinstance(src, list):
+        prefix, suffix = '[', ']'
+    elif isinstance(src, tuple):
+        prefix, suffix = '(', ')'
+    else:
+        prefix, suffix = '{', '}'
+    result = ''
+    for elem in src:
+        if _simple(elem):
+            result += '\n'
+        result += pretty_repr(elem, indent + 4) + ','
+    return (
+        '\n{start:>{indent}}'.format(
+            start=prefix,
+            indent=indent + 1) +
+        result +
+        '\n{end:>{indent}}'.format(end=suffix, indent=indent + 1)
+    )
+
+
+def logwrap(log=logger, log_level=logging.DEBUG, exc_level=logging.ERROR):
+    """Log function calls
+
+    :type log: logging.Logger
+    :type log_level: int
+    :type exc_level: int
+    :rtype: callable
+    """
+    def real_decorator(func):
+        @functools.wraps(func)
+        def wrapped(*args, **kwargs):
+            call_args = _getcallargs(func, *args, **kwargs)
+            args_repr = ""
+            if len(call_args) > 0:
+                args_repr = "\n    " + "\n    ".join((
+                    "{key!r}={val},".format(
+                        key=key,
+                        val=pretty_repr(val, indent=8, no_indent_start=True)
+                    )
+                    for key, val in call_args.items())
+                ) + '\n'
+            log.log(
+                level=log_level,
+                msg="Calling: \n{name!r}({arguments})".format(
+                    name=func.__name__,
+                    arguments=args_repr
+                )
+            )
+            try:
+                result = func(*args, **kwargs)
+                log.log(
+                    level=log_level,
+                    msg="Done: {name!r} with result:\n{result}".format(
+                        name=func.__name__,
+                        result=pretty_repr(result))
+                )
+            except BaseException:
+                log.log(
+                    level=exc_level,
+                    msg="Failed: \n{name!r}({arguments})".format(
+                        name=func.__name__,
+                        arguments=args_repr,
+                    ),
+                    exc_info=True
+                )
+                raise
+            return result
+        return wrapped
+
+    if not isinstance(log, logging.Logger):
+        func, log = log, logger
+        return real_decorator(func)
+
+    return real_decorator
diff --git a/reclass_tools/helpers/exec_result.py b/reclass_tools/helpers/exec_result.py
new file mode 100644
index 0000000..3dc6245
--- /dev/null
+++ b/reclass_tools/helpers/exec_result.py
@@ -0,0 +1,379 @@
+#    Copyright 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+from __future__ import unicode_literals
+
+import json
+import threading
+
+import yaml
+
+from reclass_tools.helpers import proc_enums
+from reclass_tools import logger
+
+
+deprecated_aliases = {
+    'stdout_str',
+    'stderr_str',
+    'stdout_json',
+    'stdout_yaml'
+}
+
+
+class ExecResult(object):
+    __slots__ = [
+        '__cmd', '__stdout', '__stderr', '__exit_code',
+        '__stdout_str', '__stderr_str', '__stdout_brief', '__stderr_brief',
+        '__stdout_json', '__stdout_yaml',
+        '__lock'
+    ]
+
+    def __init__(self, cmd, stdout=None, stderr=None,
+                 exit_code=proc_enums.ExitCodes.EX_INVALID):
+        """Command execution result read from fifo
+
+        :type cmd: str
+        :type stdout: list
+        :type stderr: list
+        :type exit_code: ExitCodes
+        """
+        self.__lock = threading.RLock()
+
+        self.__cmd = cmd
+        self.__stdout = stdout if stdout is not None else []
+        self.__stderr = stderr if stderr is not None else []
+
+        self.__exit_code = None
+        self.exit_code = exit_code
+
+        # By default is none:
+        self.__stdout_str = None
+        self.__stderr_str = None
+        self.__stdout_brief = None
+        self.__stderr_brief = None
+
+        self.__stdout_json = None
+        self.__stdout_yaml = None
+
+    @property
+    def lock(self):
+        """Lock object for thread-safe operation
+
+        :rtype: RLock
+        """
+        return self.__lock
+
+    @staticmethod
+    def _get_bytearray_from_array(src):
+        """Get bytearray from array of bytes blocks
+
+        :type src: list(bytes)
+        :rtype: bytearray
+        """
+        return bytearray(b''.join(src))
+
+    @staticmethod
+    def _get_str_from_bin(src):
+        """Join data in list to the string, with python 2&3 compatibility.
+
+        :type src: bytearray
+        :rtype: str
+        """
+        return src.strip().decode(
+            encoding='utf-8',
+            errors='backslashreplace'
+        )
+
+    @classmethod
+    def _get_brief(cls, data):
+        """Get brief output: 7 lines maximum (3 first + ... + 3 last)
+
+        :type data: list(bytes)
+        :rtype: str
+        """
+        src = data if len(data) <= 7 else data[:3] + [b'...\n'] + data[-3:]
+        return cls._get_str_from_bin(
+            cls._get_bytearray_from_array(src)
+        )
+
+    @property
+    def cmd(self):
+        """Executed command
+
+        :rtype: str
+        """
+        return self.__cmd
+
+    @property
+    def stdout(self):
+        """Stdout output as list of binaries
+
+        :rtype: list(bytes)
+        """
+        return self.__stdout
+
+    @stdout.setter
+    def stdout(self, new_val):
+        """Stdout output as list of binaries
+
+        :type new_val: list(bytes)
+        :raises: TypeError
+        """
+        if not isinstance(new_val, (list, type(None))):
+            raise TypeError('stdout should be list only!')
+        with self.lock:
+            self.__stdout_str = None
+            self.__stdout_brief = None
+            self.__stdout_json = None
+            self.__stdout_yaml = None
+            self.__stdout = new_val
+
+    @property
+    def stderr(self):
+        """Stderr output as list of binaries
+
+        :rtype: list(bytes)
+        """
+        return self.__stderr
+
+    @stderr.setter
+    def stderr(self, new_val):
+        """Stderr output as list of binaries
+
+        :type new_val: list(bytes)
+        :raises: TypeError
+        """
+        if not isinstance(new_val, (list, None)):
+            raise TypeError('stderr should be list only!')
+        with self.lock:
+            self.__stderr_str = None
+            self.__stderr_brief = None
+            self.__stderr = new_val
+
+    @property
+    def stdout_bin(self):
+        """Stdout in binary format
+
+        Sometimes logging is used to log binary objects too (example: Session),
+        and for debug purposes we can use this as data source.
+        :rtype: bytearray
+        """
+        with self.lock:
+            return self._get_bytearray_from_array(self.stdout)
+
+    @property
+    def stderr_bin(self):
+        """Stderr in binary format
+
+        :rtype: bytearray
+        """
+        with self.lock:
+            return self._get_bytearray_from_array(self.stderr)
+
+    @property
+    def stdout_str(self):
+        """Stdout output as string
+
+        :rtype: str
+        """
+        with self.lock:
+            if self.__stdout_str is None:
+                self.__stdout_str = self._get_str_from_bin(self.stdout_bin)
+            return self.__stdout_str
+
+    @property
+    def stderr_str(self):
+        """Stderr output as string
+
+        :rtype: str
+        """
+        with self.lock:
+            if self.__stderr_str is None:
+                self.__stderr_str = self._get_str_from_bin(self.stderr_bin)
+            return self.__stderr_str
+
+    @property
+    def stdout_brief(self):
+        """Brief stdout output (mostly for exceptions)
+
+        :rtype: str
+        """
+        with self.lock:
+            if self.__stdout_brief is None:
+                self.__stdout_brief = self._get_brief(self.stdout)
+            return self.__stdout_brief
+
+    @property
+    def stderr_brief(self):
+        """Brief stderr output (mostly for exceptions)
+
+        :rtype: str
+        """
+        with self.lock:
+            if self.__stderr_brief is None:
+                self.__stderr_brief = self._get_brief(self.stderr)
+            return self.__stderr_brief
+
+    @property
+    def exit_code(self):
+        """Return(exit) code of command
+
+        :rtype: int
+        """
+        return self.__exit_code
+
+    @exit_code.setter
+    def exit_code(self, new_val):
+        """Return(exit) code of command
+
+        :type new_val: int
+        """
+        if not isinstance(new_val, (int, proc_enums.ExitCodes)):
+            raise TypeError('Exit code is strictly int')
+        with self.lock:
+            if isinstance(new_val, int) and \
+                    new_val in proc_enums.ExitCodes.__members__.values():
+                new_val = proc_enums.ExitCodes(new_val)
+            self.__exit_code = new_val
+
+    def __deserialize(self, fmt):
+        """Deserialize stdout as data format
+
+        :type fmt: str
+        :rtype: object
+        :raises: DevopsError
+        """
+        try:
+            if fmt == 'json':
+                return json.loads(self.stdout_str, encoding='utf-8')
+            elif fmt == 'yaml':
+                return yaml.safe_load(self.stdout_str)
+        except BaseException:
+            tmpl = (
+                " stdout is not valid {fmt}:\n"
+                '{{stdout!r}}\n'.format(
+                    fmt=fmt))
+            logger.exception(self.cmd + tmpl.format(stdout=self.stdout_str))
+            raise TypeError(
+                self.cmd + tmpl.format(stdout=self.stdout_brief))
+        msg = '{fmt} deserialize target is not implemented'.format(fmt=fmt)
+        logger.error(msg)
+        raise NotImplementedError(msg)
+
+    @property
+    def stdout_json(self):
+        """JSON from stdout
+
+        :rtype: object
+        """
+        with self.lock:
+            if self.__stdout_json is None:
+                # noinspection PyTypeChecker
+                self.__stdout_json = self.__deserialize(fmt='json')
+            return self.__stdout_json
+
+    @property
+    def stdout_yaml(self):
+        """YAML from stdout
+
+        :rtype: Union(list, dict, None)
+        """
+        with self.lock:
+            if self.__stdout_yaml is None:
+                # noinspection PyTypeChecker
+                self.__stdout_yaml = self.__deserialize(fmt='yaml')
+            return self.__stdout_yaml
+
+    def __dir__(self):
+        return [
+            'cmd', 'stdout', 'stderr', 'exit_code',
+            'stdout_bin', 'stderr_bin',
+            'stdout_str', 'stderr_str', 'stdout_brief', 'stderr_brief',
+            'stdout_json', 'stdout_yaml',
+            'lock'
+        ]
+
+    def __getitem__(self, item):
+        if item in dir(self):
+            return getattr(self, item)
+        raise IndexError(
+            '"{item}" not found in {dir}'.format(
+                item=item, dir=dir(self)
+            )
+        )
+
+    def __setitem__(self, key, value):
+        rw = ['stdout', 'stderr', 'exit_code']
+        if key in rw:
+            setattr(self, key, value)
+            return
+        if key in deprecated_aliases:
+            logger.warning(
+                '{key} is read-only and calculated automatically'.format(
+                    key=key
+                )
+            )
+            return
+        if key in dir(self):
+            raise RuntimeError(
+                '{key} is read-only!'.format(key=key)
+            )
+        raise IndexError(
+            '{key} not found in {dir}'.format(
+                key=key, dir=rw
+            )
+        )
+
+    def __repr__(self):
+        return (
+            '{cls}(cmd={cmd!r}, stdout={stdout}, stderr={stderr}, '
+            'exit_code={exit_code!s})'.format(
+                cls=self.__class__.__name__,
+                cmd=self.cmd,
+                stdout=self.stdout,
+                stderr=self.stderr,
+                exit_code=self.exit_code
+            ))
+
+    def __str__(self):
+        return (
+            "{cls}(\n\tcmd={cmd!r},"
+            "\n\t stdout=\n'{stdout_brief}',"
+            "\n\tstderr=\n'{stderr_brief}', "
+            '\n\texit_code={exit_code!s}\n)'.format(
+                cls=self.__class__.__name__,
+                cmd=self.cmd,
+                stdout_brief=self.stdout_brief,
+                stderr_brief=self.stderr_brief,
+                exit_code=self.exit_code
+            )
+        )
+
+    def __eq__(self, other):
+        return all(
+            (
+                getattr(self, val) == getattr(other, val)
+                for val in ['cmd', 'stdout', 'stderr', 'exit_code']
+            )
+        )
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __hash__(self):
+        return hash(
+            (
+                self.__class__, self.cmd, self.stdout_str, self.stderr_str,
+                self.exit_code
+            ))
diff --git a/reclass_tools/helpers/proc_enums.py b/reclass_tools/helpers/proc_enums.py
new file mode 100644
index 0000000..73518fc
--- /dev/null
+++ b/reclass_tools/helpers/proc_enums.py
@@ -0,0 +1,124 @@
+#    Copyright 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+import enum
+
+
+@enum.unique
+class SigNum(enum.IntEnum):
+    SIGHUP = 1  # Hangup (POSIX).
+    SIGINT = 2  # Interrupt (ANSI).
+    SIGQUIT = 3  # Quit (POSIX).
+    SIGILL = 4  # Illegal instruction (ANSI).
+    SIGTRAP = 5  # Trace trap (POSIX).
+    SIGABRT = 6  # Abort (ANSI).
+    SIGBUS = 7  # BUS error (4.2 BSD).
+    SIGFPE = 8  # Floating-point exception (ANSI).
+    SIGKILL = 9  # Kill, unblockable (POSIX).
+    SIGUSR1 = 10  # User-defined signal 1 (POSIX).
+    SIGSEGV = 11  # Segmentation violation (ANSI).
+    SIGUSR2 = 12  # User-defined signal 2 (POSIX).
+    SIGPIPE = 13  # Broken pipe (POSIX).
+    SIGALRM = 14  # Alarm clock (POSIX).
+    SIGTERM = 15  # Termination (ANSI).
+    SIGSTKFLT = 16  # Stack fault.
+    SIGCHLD = 17  # Child status has changed (POSIX).
+    SIGCONT = 18  # Continue (POSIX).
+    SIGSTOP = 19  # Stop, unblockable (POSIX).
+    SIGTSTP = 20  # Keyboard stop (POSIX).
+    SIGTTIN = 21  # Background read from tty (POSIX).
+    SIGTTOU = 22  # Background write to tty (POSIX).
+    SIGURG = 23  # Urgent condition on socket (4.2 BSD).
+    SIGXCPU = 24  # CPU limit exceeded (4.2 BSD).
+    SIGXFSZ = 25  # File size limit exceeded (4.2 BSD).
+    SIGVTALRM = 26  # Virtual alarm clock (4.2 BSD).
+    SIGPROF = 27  # Profiling alarm clock (4.2 BSD).
+    SIGWINCH = 28  # Window size change (4.3 BSD, Sun).
+    SIGPOLL = 29  # Pollable event occurred (System V)
+    SIGPWR = 30  # Power failure restart (System V).
+    SIGSYS = 31  # Bad system call.
+
+    def __str__(self):
+        return "{name}<{value:d}(0x{value:02X})>".format(
+            name=self.name,
+            value=self.value
+        )
+
+
+@enum.unique
+class ExitCodes(enum.IntEnum):
+    EX_OK = 0  # successful termination
+
+    EX_INVALID = 0xDEADBEEF  # uint32 debug value. Impossible for POSIX
+
+    EX_ERROR = 1  # general failure
+    EX_BUILTIN = 2  # Misuse of shell builtins (according to Bash)
+
+    EX_USAGE = 64  # command line usage error
+    EX_DATAERR = 65  # data format error
+    EX_NOINPUT = 66  # cannot open input
+    EX_NOUSER = 67  # addressee unknown
+    EX_NOHOST = 68  # host name unknown
+    EX_UNAVAILABLE = 69  # service unavailable
+    EX_SOFTWARE = 70  # internal software error
+    EX_OSERR = 71  # system error (e.g., can't fork)
+    EX_OSFILE = 72  # critical OS file missing
+    EX_CANTCREAT = 73  # can't create (user) output file
+    EX_IOERR = 74  # input/output error
+    EX_TEMPFAIL = 75  # temp failure; user is invited to retry
+    EX_PROTOCOL = 76  # remote error in protocol
+    EX_NOPERM = 77  # permission denied
+    EX_CONFIG = 78  # configuration error
+
+    EX_NOEXEC = 126  # If a command is found but is not executable
+    EX_NOCMD = 127  # If a command is not found
+
+    # Signal exits:
+    EX_SIGHUP = 128 + SigNum.SIGHUP
+    EX_SIGINT = 128 + SigNum.SIGINT
+    EX_SIGQUIT = 128 + SigNum.SIGQUIT
+    EX_SIGILL = 128 + SigNum.SIGILL
+    EX_SIGTRAP = 128 + SigNum.SIGTRAP
+    EX_SIGABRT = 128 + SigNum.SIGABRT
+    EX_SIGBUS = 128 + SigNum.SIGBUS
+    EX_SIGFPE = 128 + SigNum.SIGFPE
+    EX_SIGKILL = 128 + SigNum.SIGKILL
+    EX_SIGUSR1 = 128 + SigNum.SIGUSR1
+    EX_SIGSEGV = 128 + SigNum.SIGSEGV
+    EX_SIGUSR2 = 128 + SigNum.SIGUSR2
+    EX_SIGPIPE = 128 + SigNum.SIGPIPE
+    EX_SIGALRM = 128 + SigNum.SIGALRM
+    EX_SIGTERM = 128 + SigNum.SIGTERM
+    EX_SIGSTKFLT = 128 + SigNum.SIGSTKFLT
+    EX_SIGCHLD = 128 + SigNum.SIGCHLD
+    EX_SIGCONT = 128 + SigNum.SIGCONT
+    EX_SIGSTOP = 128 + SigNum.SIGSTOP
+    EX_SIGTSTP = 128 + SigNum.SIGTSTP
+    EX_SIGTTIN = 128 + SigNum.SIGTTIN
+    EX_SIGTTOU = 128 + SigNum.SIGTTOU
+    EX_SIGURG = 128 + SigNum.SIGURG
+    EX_SIGXCPU = 128 + SigNum.SIGXCPU
+    EX_SIGXFSZ = 128 + SigNum.SIGXFSZ
+    EX_SIGVTALRM = 128 + SigNum.SIGVTALRM
+    EX_SIGPROF = 128 + SigNum.SIGPROF
+    EX_SIGWINCH = 128 + SigNum.SIGWINCH
+    EX_SIGPOLL = 128 + SigNum.SIGPOLL
+    EX_SIGPWR = 128 + SigNum.SIGPWR
+    EX_SIGSYS = 128 + SigNum.SIGSYS
+
+    def __str__(self):
+        return "{name}<{value:d}(0x{value:02X})>".format(
+            name=self.name,
+            value=self.value
+        )
diff --git a/reclass_tools/helpers/ssh_client.py b/reclass_tools/helpers/ssh_client.py
new file mode 100644
index 0000000..da3655d
--- /dev/null
+++ b/reclass_tools/helpers/ssh_client.py
@@ -0,0 +1,1147 @@
+#    Copyright 2013 - 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+from __future__ import unicode_literals
+
+import base64
+import os
+import posixpath
+import stat
+import sys
+import threading
+import time
+import warnings
+
+import paramiko
+import six
+
+from reclass_tools.helpers import decorators
+from reclass_tools.helpers import exec_result
+from reclass_tools.helpers import proc_enums
+from reclass_tools import logger
+
+
+def get_private_keys(home, identity_files=None):
+    if not identity_files:
+        identity_files = ['.ssh/id_rsa']
+    keys = []
+    for i in identity_files:
+        with open(os.path.join(home, i)) as f:
+            keys.append(paramiko.RSAKey.from_private_key(f))
+    return keys
+
+
+class SSHAuth(object):
+    __slots__ = ['__username', '__password', '__key', '__keys']
+
+    def __init__(
+            self,
+            username=None, password=None, key=None, keys=None):
+        """SSH authorisation object
+
+        Used to authorize SSHClient.
+        Single SSHAuth object is associated with single host:port.
+        Password and key is private, other data is read-only.
+
+        :type username: str
+        :type password: str
+        :type key: paramiko.RSAKey
+        :type keys: list
+        """
+        self.__username = username
+        self.__password = password
+        self.__key = key
+        self.__keys = [None]
+        if key is not None:
+            # noinspection PyTypeChecker
+            self.__keys.append(key)
+        if keys is not None:
+            for key in keys:
+                if key not in self.__keys:
+                    self.__keys.append(key)
+
+    @property
+    def username(self):
+        """Username for auth
+
+        :rtype: str
+        """
+        return self.__username
+
+    @staticmethod
+    def __get_public_key(key):
+        """Internal method for get public key from private
+
+        :type key: paramiko.RSAKey
+        """
+        if key is None:
+            return None
+        return '{0} {1}'.format(key.get_name(), key.get_base64())
+
+    @property
+    def public_key(self):
+        """public key for stored private key if presents else None
+
+        :rtype: str
+        """
+        return self.__get_public_key(self.__key)
+
+    def enter_password(self, tgt):
+        """Enter password to STDIN
+
+        Note: required for 'sudo' call
+
+        :type tgt: file
+        :rtype: str
+        """
+        # noinspection PyTypeChecker
+        return tgt.write('{}\n'.format(self.__password))
+
+    def connect(self, client, hostname=None, port=22, log=True):
+        """Connect SSH client object using credentials
+
+        :type client:
+            paramiko.client.SSHClient
+            paramiko.transport.Transport
+        :type log: bool
+        :raises paramiko.AuthenticationException
+        """
+        kwargs = {
+            'username': self.username,
+            'password': self.__password}
+        if hostname is not None:
+            kwargs['hostname'] = hostname
+            kwargs['port'] = port
+
+        keys = [self.__key]
+        keys.extend([k for k in self.__keys if k != self.__key])
+
+        for key in keys:
+            kwargs['pkey'] = key
+            try:
+                client.connect(**kwargs)
+                if self.__key != key:
+                    self.__key = key
+                    logger.debug(
+                        'Main key has been updated, public key is: \n'
+                        '{}'.format(self.public_key))
+                return
+            except paramiko.PasswordRequiredException:
+                if self.__password is None:
+                    logger.exception('No password has been set!')
+                    raise
+                else:
+                    logger.critical(
+                        'Unexpected PasswordRequiredException, '
+                        'when password is set!')
+                    raise
+            except paramiko.AuthenticationException:
+                continue
+        msg = 'Connection using stored authentication info failed!'
+        if log:
+            logger.exception(
+                'Connection using stored authentication info failed!')
+        raise paramiko.AuthenticationException(msg)
+
+    def __hash__(self):
+        return hash((
+            self.__class__,
+            self.username,
+            self.__password,
+            tuple(self.__keys)
+        ))
+
+    def __eq__(self, other):
+        return hash(self) == hash(other)
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __deepcopy__(self, memo):
+        return self.__class__(
+            username=self.username,
+            password=self.__password,
+            key=self.__key,
+            keys=self.__keys.copy()
+        )
+
+    def copy(self):
+        return self.__class__(
+            username=self.username,
+            password=self.__password,
+            key=self.__key,
+            keys=self.__keys
+        )
+
+    def __repr__(self):
+        _key = (
+            None if self.__key is None else
+            '<private for pub: {}>'.format(self.public_key)
+        )
+        _keys = []
+        for k in self.__keys:
+            if k == self.__key:
+                continue
+            # noinspection PyTypeChecker
+            _keys.append(
+                '<private for pub: {}>'.format(
+                    self.__get_public_key(key=k)) if k is not None else None)
+
+        return (
+            '{cls}(username={username}, '
+            'password=<*masked*>, key={key}, keys={keys})'.format(
+                cls=self.__class__.__name__,
+                username=self.username,
+                key=_key,
+                keys=_keys)
+        )
+
+    def __str__(self):
+        return (
+            '{cls} for {username}'.format(
+                cls=self.__class__.__name__,
+                username=self.username,
+            )
+        )
+
+
+class _MemorizedSSH(type):
+    """Memorize metaclass for SSHClient
+
+    This class implements caching and managing of SSHClient connections.
+    Class is not in public scope: all required interfaces is accessible throw
+      SSHClient classmethods.
+
+    Main flow is:
+      SSHClient() -> check for cached connection and
+        - If exists the same: check for alive, reconnect if required and return
+        - If exists with different credentials: delete and continue processing
+          create new connection and cache on success
+      * Note: each invocation of SSHClient instance will return current dir to
+        the root of the current user home dir ("cd ~").
+        It is necessary to avoid unpredictable behavior when the same
+        connection is used from different places.
+        If you need to enter some directory and execute command there, please
+        use the following approach:
+        cmd1 = "cd <some dir> && <command1>"
+        cmd2 = "cd <some dir> && <command2>"
+
+    Close cached connections is allowed per-client and all stored:
+      connection will be closed, but still stored in cache for faster reconnect
+
+    Clear cache is strictly not recommended:
+      from this moment all open connections should be managed manually,
+      duplicates is possible.
+    """
+    __cache = {}
+
+    def __call__(
+            cls,
+            host, port=22,
+            username=None, password=None, private_keys=None,
+            auth=None
+    ):
+        """Main memorize method: check for cached instance and return it
+
+        :type host: str
+        :type port: int
+        :type username: str
+        :type password: str
+        :type private_keys: list
+        :type auth: SSHAuth
+        :rtype: SSHClient
+        """
+        if (host, port) in cls.__cache:
+            key = host, port
+            if auth is None:
+                auth = SSHAuth(
+                    username=username, password=password, keys=private_keys)
+            if hash((cls, host, port, auth)) == hash(cls.__cache[key]):
+                ssh = cls.__cache[key]
+                # noinspection PyBroadException
+                try:
+                    ssh.execute('cd ~', timeout=5)
+                except BaseException:  # Note: Do not change to lower level!
+                    logger.debug('Reconnect {}'.format(ssh))
+                    ssh.reconnect()
+                return ssh
+            if sys.getrefcount(cls.__cache[key]) == 2:
+                # If we have only cache reference and temporary getrefcount
+                # reference: close connection before deletion
+                logger.debug('Closing {} as unused'.format(cls.__cache[key]))
+                cls.__cache[key].close()
+            del cls.__cache[key]
+        # noinspection PyArgumentList
+        return super(
+            _MemorizedSSH, cls).__call__(
+            host=host, port=port,
+            username=username, password=password, private_keys=private_keys,
+            auth=auth)
+
+    @classmethod
+    def record(mcs, ssh):
+        """Record SSH client to cache
+
+        :type ssh: SSHClient
+        """
+        mcs.__cache[(ssh.hostname, ssh.port)] = ssh
+
+    @classmethod
+    def clear_cache(mcs):
+        """Clear cached connections for initialize new instance on next call"""
+        n_count = 3 if six.PY3 else 4
+        # PY3: cache, ssh, temporary
+        # PY4: cache, values mapping, ssh, temporary
+        for ssh in mcs.__cache.values():
+            if sys.getrefcount(ssh) == n_count:
+                logger.debug('Closing {} as unused'.format(ssh))
+                ssh.close()
+        mcs.__cache = {}
+
+    @classmethod
+    def close_connections(mcs, hostname=None):
+        """Close connections for selected or all cached records
+
+        :type hostname: str
+        """
+        if hostname is None:
+            keys = [key for key, ssh in mcs.__cache.items() if ssh.is_alive]
+        else:
+            keys = [
+                (host, port)
+                for (host, port), ssh
+                in mcs.__cache.items() if host == hostname and ssh.is_alive]
+        # raise ValueError(keys)
+        for key in keys:
+            mcs.__cache[key].close()
+
+
+class SSHClient(six.with_metaclass(_MemorizedSSH, object)):
+    __slots__ = [
+        '__hostname', '__port', '__auth', '__ssh', '__sftp', 'sudo_mode',
+        '__lock'
+    ]
+
+    class __get_sudo(object):
+        """Context manager for call commands with sudo"""
+        def __init__(self, ssh, enforce=None):
+            """Context manager for call commands with sudo
+
+            :type ssh: SSHClient
+            :type enforce: bool
+            """
+            self.__ssh = ssh
+            self.__sudo_status = ssh.sudo_mode
+            self.__enforce = enforce
+
+        def __enter__(self):
+            self.__sudo_status = self.__ssh.sudo_mode
+            if self.__enforce is not None:
+                self.__ssh.sudo_mode = self.__enforce
+
+        def __exit__(self, exc_type, exc_val, exc_tb):
+            self.__ssh.sudo_mode = self.__sudo_status
+
+    # noinspection PyPep8Naming
+    class get_sudo(__get_sudo):
+        """Context manager for call commands with sudo"""
+
+        def __init__(self, ssh, enforce=True):
+            warnings.warn(
+                'SSHClient.get_sudo(SSHClient()) is deprecated in favor of '
+                'SSHClient().sudo(enforce=...) , which is much more powerful.')
+            super(self.__class__, self).__init__(ssh=ssh, enforce=enforce)
+
+    def __hash__(self):
+        return hash((
+            self.__class__,
+            self.hostname,
+            self.port,
+            self.auth))
+
+    def __init__(
+            self,
+            host, port=22,
+            username=None, password=None, private_keys=None,
+            auth=None
+    ):
+        """SSHClient helper
+
+        :type host: str
+        :type port: int
+        :type username: str
+        :type password: str
+        :type private_keys: list
+        :type auth: SSHAuth
+        """
+        self.__lock = threading.RLock()
+
+        self.__hostname = host
+        self.__port = port
+
+        self.sudo_mode = False
+        self.__ssh = paramiko.SSHClient()
+        self.__ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+        self.__sftp = None
+
+        self.__auth = auth if auth is None else auth.copy()
+
+        if auth is None:
+            msg = (
+                'SSHClient(host={host}, port={port}, username={username}): '
+                'initialization by username/password/private_keys '
+                'is deprecated in favor of SSHAuth usage. '
+                'Please update your code'.format(
+                    host=host, port=port, username=username
+                ))
+            warnings.warn(msg, DeprecationWarning)
+            logger.debug(msg)
+
+            self.__auth = SSHAuth(
+                username=username,
+                password=password,
+                keys=private_keys
+            )
+
+        self.__connect()
+        _MemorizedSSH.record(ssh=self)
+        if auth is None:
+            logger.info(
+                '{0}:{1}> SSHAuth was made from old style creds: '
+                '{2}'.format(self.hostname, self.port, self.auth))
+
+    @property
+    def lock(self):
+        """Connection lock
+
+        :rtype: threading.RLock
+        """
+        return self.__lock
+
+    @property
+    def auth(self):
+        """Internal authorisation object
+
+        Attention: this public property is mainly for inheritance,
+        debug and information purposes.
+        Calls outside SSHClient and child classes is sign of incorrect design.
+        Change is completely disallowed.
+
+        :rtype: SSHAuth
+        """
+        return self.__auth
+
+    @property
+    def hostname(self):
+        """Connected remote host name
+
+        :rtype: str
+        """
+        return self.__hostname
+
+    @property
+    def host(self):
+        """Hostname access for backward compatibility
+
+        :rtype: str
+        """
+        warnings.warn(
+            'host has been deprecated in favor of hostname',
+            DeprecationWarning
+        )
+        return self.hostname
+
+    @property
+    def port(self):
+        """Connected remote port number
+
+        :rtype: int
+        """
+        return self.__port
+
+    @property
+    def is_alive(self):
+        """Paramiko status: ready to use|reconnect required
+
+        :rtype: bool
+        """
+        return self.__ssh.get_transport() is not None
+
+    def __repr__(self):
+        return '{cls}(host={host}, port={port}, auth={auth!r})'.format(
+            cls=self.__class__.__name__, host=self.hostname, port=self.port,
+            auth=self.auth
+        )
+
+    def __str__(self):
+        return '{cls}(host={host}, port={port}) for user {user}'.format(
+            cls=self.__class__.__name__, host=self.hostname, port=self.port,
+            user=self.auth.username
+        )
+
+    @property
+    def _ssh(self):
+        """ssh client object getter for inheritance support only
+
+        Attention: ssh client object creation and change
+        is allowed only by __init__ and reconnect call.
+
+        :rtype: paramiko.SSHClient
+        """
+        return self.__ssh
+
+    @decorators.retry(paramiko.SSHException, count=3, delay=3)
+    def __connect(self):
+        """Main method for connection open"""
+        with self.lock:
+            self.auth.connect(
+                client=self.__ssh,
+                hostname=self.hostname, port=self.port,
+                log=True)
+
+    def __connect_sftp(self):
+        """SFTP connection opener"""
+        with self.lock:
+            try:
+                self.__sftp = self.__ssh.open_sftp()
+            except paramiko.SSHException:
+                logger.warning('SFTP enable failed! SSH only is accessible.')
+
+    @property
+    def _sftp(self):
+        """SFTP channel access for inheritance
+
+        :rtype: paramiko.sftp_client.SFTPClient
+        :raises: paramiko.SSHException
+        """
+        if self.__sftp is not None:
+            return self.__sftp
+        logger.debug('SFTP is not connected, try to connect...')
+        self.__connect_sftp()
+        if self.__sftp is not None:
+            return self.__sftp
+        raise paramiko.SSHException('SFTP connection failed')
+
+    def close(self):
+        """Close SSH and SFTP sessions"""
+        with self.lock:
+            # noinspection PyBroadException
+            try:
+                self.__ssh.close()
+                self.__sftp = None
+            except Exception:
+                logger.exception("Could not close ssh connection")
+                if self.__sftp is not None:
+                    # noinspection PyBroadException
+                    try:
+                        self.__sftp.close()
+                    except Exception:
+                        logger.exception("Could not close sftp connection")
+
+    @staticmethod
+    def clear():
+        warnings.warn(
+            "clear is removed: use close() only if it mandatory: "
+            "it's automatically called on revert|shutdown|suspend|destroy",
+            DeprecationWarning
+        )
+
+    @classmethod
+    def _clear_cache(cls):
+        """Enforce clear memorized records"""
+        warnings.warn(
+            '_clear_cache() is dangerous and not recommended for normal use!',
+            Warning
+        )
+        _MemorizedSSH.clear_cache()
+
+    @classmethod
+    def close_connections(cls, hostname=None):
+        """Close cached connections: if hostname is not set, then close all
+
+        :type hostname: str
+        """
+        _MemorizedSSH.close_connections(hostname=hostname)
+
+    def __del__(self):
+        """Destructor helper: close channel and threads BEFORE closing others
+
+        Due to threading in paramiko, default destructor could generate asserts
+        on close, so we calling channel close before closing main ssh object.
+        """
+        self.__ssh.close()
+        self.__sftp = None
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        pass
+
+    def reconnect(self):
+        """Reconnect SSH session"""
+        with self.lock:
+            self.close()
+
+            self.__ssh = paramiko.SSHClient()
+            self.__ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+
+            self.__connect()
+
+    def sudo(self, enforce=None):
+        """Call contextmanager for sudo mode change
+
+        :type enforce: bool
+        :param enforce: Enforce sudo enabled or disabled. By default: None
+        """
+        return self.__get_sudo(ssh=self, enforce=enforce)
+
+    def check_call(
+            self,
+            command, verbose=False, timeout=None,
+            error_info=None,
+            expected=None, raise_on_err=True, **kwargs):
+        """Execute command and check for return code
+
+        :type command: str
+        :type verbose: bool
+        :type timeout: int
+        :type error_info: str
+        :type expected: list
+        :type raise_on_err: bool
+        :rtype: ExecResult
+        :raises: DevopsCalledProcessError
+        """
+        if expected is None:
+            expected = [proc_enums.ExitCodes.EX_OK]
+        else:
+            expected = [
+                proc_enums.ExitCodes(code)
+                if (
+                    isinstance(code, int) and
+                    code in proc_enums.ExitCodes.__members__.values())
+                else code
+                for code in expected
+                ]
+        ret = self.execute(command, verbose, timeout, **kwargs)
+        if ret['exit_code'] not in expected:
+            message = (
+                "{append}Command '{cmd!r}' returned exit code {code!s} while "
+                "expected {expected!s}\n".format(
+                    append=error_info + '\n' if error_info else '',
+                    cmd=command,
+                    code=ret['exit_code'],
+                    expected=expected,
+                ))
+            logger.error(message)
+            if raise_on_err:
+                raise SSHCalledProcessError(
+                    command, ret['exit_code'],
+                    expected=expected,
+                    stdout=ret['stdout_brief'],
+                    stderr=ret['stdout_brief'])
+        return ret
+
+    def check_stderr(
+            self,
+            command, verbose=False, timeout=None,
+            error_info=None,
+            raise_on_err=True, **kwargs):
+        """Execute command expecting return code 0 and empty STDERR
+
+        :type command: str
+        :type verbose: bool
+        :type timeout: int
+        :type error_info: str
+        :type raise_on_err: bool
+        :rtype: ExecResult
+        :raises: DevopsCalledProcessError
+        """
+        ret = self.check_call(
+            command, verbose, timeout=timeout,
+            error_info=error_info, raise_on_err=raise_on_err, **kwargs)
+        if ret['stderr']:
+            message = (
+                "{append}Command '{cmd!r}' STDERR while not expected\n"
+                "\texit code: {code!s}\n".format(
+                    append=error_info + '\n' if error_info else '',
+                    cmd=command,
+                    code=ret['exit_code'],
+                ))
+            logger.error(message)
+            if raise_on_err:
+                raise SSHCalledProcessError(
+                    command,
+                    ret['exit_code'],
+                    stdout=ret['stdout_brief'],
+                    stderr=ret['stdout_brief'])
+        return ret
+
+    @classmethod
+    def execute_together(
+            cls, remotes, command, expected=None, raise_on_err=True, **kwargs):
+        """Execute command on multiple remotes in async mode
+
+        :type remotes: list
+        :type command: str
+        :type expected: list
+        :type raise_on_err: bool
+        :raises: DevopsCalledProcessError
+        """
+        if expected is None:
+            expected = [0]
+        futures = {}
+        errors = {}
+        for remote in set(remotes):  # Use distinct remotes
+            chan, _, _, _ = remote.execute_async(command, **kwargs)
+            futures[remote] = chan
+        for remote, chan in futures.items():
+            ret = chan.recv_exit_status()
+            chan.close()
+            if ret not in expected:
+                errors[remote.hostname] = ret
+        if errors and raise_on_err:
+            raise SSHCalledProcessError(command, errors)
+
+    @classmethod
+    def __exec_command(
+            cls, command, channel, stdout, stderr, timeout, verbose=False):
+        """Get exit status from channel with timeout
+
+        :type command: str
+        :type channel: paramiko.channel.Channel
+        :type stdout: paramiko.channel.ChannelFile
+        :type stderr: paramiko.channel.ChannelFile
+        :type timeout: int
+        :type verbose: bool
+        :rtype: ExecResult
+        :raises: TimeoutError
+        """
+        def poll_stream(src, verb_logger=None):
+            dst = []
+            try:
+                for line in src:
+                    dst.append(line)
+                    if verb_logger is not None:
+                        verb_logger(
+                            line.decode('utf-8',
+                                        errors='backslashreplace').rstrip()
+                        )
+            except IOError:
+                pass
+            return dst
+
+        def poll_streams(result, channel, stdout, stderr, verbose):
+            if channel.recv_ready():
+                result.stdout += poll_stream(
+                    src=stdout,
+                    verb_logger=logger.info if verbose else logger.debug)
+            if channel.recv_stderr_ready():
+                result.stderr += poll_stream(
+                    src=stderr,
+                    verb_logger=logger.error if verbose else logger.debug)
+
+        @decorators.threaded(started=True)
+        def poll_pipes(stdout, stderr, result, stop, channel):
+            """Polling task for FIFO buffers
+
+            :type stdout: paramiko.channel.ChannelFile
+            :type stderr: paramiko.channel.ChannelFile
+            :type result: ExecResult
+            :type stop: Event
+            :type channel: paramiko.channel.Channel
+            """
+
+            while not stop.isSet():
+                time.sleep(0.1)
+                poll_streams(
+                    result=result,
+                    channel=channel,
+                    stdout=stdout,
+                    stderr=stderr,
+                    verbose=verbose
+                )
+
+                if channel.status_event.is_set():
+                    result.exit_code = result.exit_code = channel.exit_status
+
+                    result.stdout += poll_stream(
+                        src=stdout,
+                        verb_logger=logger.info if verbose else logger.debug)
+                    result.stderr += poll_stream(
+                        src=stderr,
+                        verb_logger=logger.error if verbose else logger.debug)
+
+                    stop.set()
+
+        # channel.status_event.wait(timeout)
+        result = exec_result.ExecResult(cmd=command)
+        stop_event = threading.Event()
+        if verbose:
+            logger.info("\nExecuting command: {!r}".format(command.rstrip()))
+        else:
+            logger.debug("\nExecuting command: {!r}".format(command.rstrip()))
+        poll_pipes(
+            stdout=stdout,
+            stderr=stderr,
+            result=result,
+            stop=stop_event,
+            channel=channel
+        )
+
+        stop_event.wait(timeout)
+
+        # Process closed?
+        if stop_event.isSet():
+            stop_event.clear()
+            channel.close()
+            return result
+
+        stop_event.set()
+        channel.close()
+
+        wait_err_msg = ('Wait for {0!r} during {1}s: no return code!\n'
+                        .format(command, timeout))
+        output_brief_msg = ('\tSTDOUT:\n'
+                            '{0}\n'
+                            '\tSTDERR"\n'
+                            '{1}'.format(result.stdout_brief,
+                                         result.stderr_brief))
+        logger.debug(wait_err_msg)
+        raise SSHTimeoutError(wait_err_msg + output_brief_msg)
+
+    def execute(self, command, verbose=False, timeout=None, **kwargs):
+        """Execute command and wait for return code
+
+        :type command: str
+        :type verbose: bool
+        :type timeout: int
+        :rtype: ExecResult
+        :raises: TimeoutError
+        """
+        chan, _, stderr, stdout = self.execute_async(command, **kwargs)
+
+        result = self.__exec_command(
+            command, chan, stdout, stderr, timeout,
+            verbose=verbose
+        )
+
+        message = (
+            '\n{cmd!r} execution results: Exit code: {code!s}'.format(
+                cmd=command,
+                code=result.exit_code
+            ))
+        if verbose:
+            logger.info(message)
+        else:
+            logger.debug(message)
+        return result
+
+    def execute_async(self, command, get_pty=False):
+        """Execute command in async mode and return channel with IO objects
+
+        :type command: str
+        :type get_pty: bool
+        :rtype:
+            tuple(
+                paramiko.Channel,
+                paramiko.ChannelFile,
+                paramiko.ChannelFile,
+                paramiko.ChannelFile
+            )
+        """
+        logger.debug("Executing command: {!r}".format(command.rstrip()))
+
+        chan = self._ssh.get_transport().open_session()
+
+        if get_pty:
+            # Open PTY
+            chan.get_pty(
+                term='vt100',
+                width=80, height=24,
+                width_pixels=0, height_pixels=0
+            )
+
+        stdin = chan.makefile('wb')
+        stdout = chan.makefile('rb')
+        stderr = chan.makefile_stderr('rb')
+        cmd = "{}\n".format(command)
+        if self.sudo_mode:
+            encoded_cmd = base64.b64encode(cmd.encode('utf-8')).decode('utf-8')
+            cmd = ("sudo -S bash -c 'eval \"$(base64 -d "
+                   "<(echo \"{0}\"))\"'").format(
+                encoded_cmd
+            )
+            chan.exec_command(cmd)
+            if stdout.channel.closed is False:
+                self.auth.enter_password(stdin)
+                stdin.flush()
+        else:
+            chan.exec_command(cmd)
+        return chan, stdin, stderr, stdout
+
+    def execute_through_host(
+            self,
+            hostname,
+            cmd,
+            auth=None,
+            target_port=22,
+            timeout=None,
+            verbose=False
+    ):
+        """Execute command on remote host through currently connected host
+
+        :type hostname: str
+        :type cmd: str
+        :type auth: SSHAuth
+        :type target_port: int
+        :type timeout: int
+        :type verbose: bool
+        :rtype: ExecResult
+        :raises: TimeoutError
+        """
+        if auth is None:
+            auth = self.auth
+
+        intermediate_channel = self._ssh.get_transport().open_channel(
+            kind='direct-tcpip',
+            dest_addr=(hostname, target_port),
+            src_addr=(self.hostname, 0))
+        transport = paramiko.Transport(sock=intermediate_channel)
+
+        # start client and authenticate transport
+        auth.connect(transport)
+
+        # open ssh session
+        channel = transport.open_session()
+
+        # Make proxy objects for read
+        stdout = channel.makefile('rb')
+        stderr = channel.makefile_stderr('rb')
+
+        channel.exec_command(cmd)
+
+        # noinspection PyDictCreation
+        result = self.__exec_command(
+            cmd, channel, stdout, stderr, timeout, verbose=verbose)
+
+        intermediate_channel.close()
+
+        return result
+
+    def mkdir(self, path):
+        """run 'mkdir -p path' on remote
+
+        :type path: str
+        """
+        if self.exists(path):
+            return
+        logger.debug("Creating directory: {}".format(path))
+        # noinspection PyTypeChecker
+        self.execute("mkdir -p {}\n".format(path))
+
+    def rm_rf(self, path):
+        """run 'rm -rf path' on remote
+
+        :type path: str
+        """
+        logger.debug("rm -rf {}".format(path))
+        # noinspection PyTypeChecker
+        self.execute("rm -rf {}".format(path))
+
+    def open(self, path, mode='r'):
+        """Open file on remote using SFTP session
+
+        :type path: str
+        :type mode: str
+        :return: file.open() stream
+        """
+        return self._sftp.open(path, mode)
+
+    def upload(self, source, target):
+        """Upload file(s) from source to target using SFTP session
+
+        :type source: str
+        :type target: str
+        """
+        logger.debug("Copying '%s' -> '%s'", source, target)
+
+        if self.isdir(target):
+            target = posixpath.join(target, os.path.basename(source))
+
+        source = os.path.expanduser(source)
+        if not os.path.isdir(source):
+            self._sftp.put(source, target)
+            return
+
+        for rootdir, _, files in os.walk(source):
+            targetdir = os.path.normpath(
+                os.path.join(
+                    target,
+                    os.path.relpath(rootdir, source))).replace("\\", "/")
+
+            self.mkdir(targetdir)
+
+            for entry in files:
+                local_path = os.path.join(rootdir, entry)
+                remote_path = posixpath.join(targetdir, entry)
+                if self.exists(remote_path):
+                    self._sftp.unlink(remote_path)
+                self._sftp.put(local_path, remote_path)
+
+    def download(self, destination, target):
+        """Download file(s) to target from destination
+
+        :type destination: str
+        :type target: str
+        :rtype: bool
+        """
+        logger.debug(
+            "Copying '%s' -> '%s' from remote to local host",
+            destination, target
+        )
+
+        if os.path.isdir(target):
+            target = posixpath.join(target, os.path.basename(destination))
+
+        if not self.isdir(destination):
+            if self.exists(destination):
+                self._sftp.get(destination, target)
+            else:
+                logger.debug(
+                    "Can't download %s because it doesn't exist", destination
+                )
+        else:
+            logger.debug(
+                "Can't download %s because it is a directory", destination
+            )
+        return os.path.exists(target)
+
+    def exists(self, path):
+        """Check for file existence using SFTP session
+
+        :type path: str
+        :rtype: bool
+        """
+        try:
+            self._sftp.lstat(path)
+            return True
+        except IOError:
+            return False
+
+    def stat(self, path):
+        """Get stat info for path with following symlinks
+
+        :type path: str
+        :rtype: paramiko.sftp_attr.SFTPAttributes
+        """
+        return self._sftp.stat(path)
+
+    def isfile(self, path, follow_symlink=False):
+        """Check, that path is file using SFTP session
+
+        :type path: str
+        :type follow_symlink: bool (default=False), resolve symlinks
+        :rtype: bool
+        """
+        try:
+            if follow_symlink:
+                attrs = self._sftp.stat(path)
+            else:
+                attrs = self._sftp.lstat(path)
+            return attrs.st_mode & stat.S_IFREG != 0
+        except IOError:
+            return False
+
+    def isdir(self, path, follow_symlink=False):
+        """Check, that path is directory using SFTP session
+
+        :type path: str
+        :type follow_symlink: bool (default=False), resolve symlinks
+        :rtype: bool
+        """
+        try:
+            if follow_symlink:
+                attrs = self._sftp.stat(path)
+            else:
+                attrs = self._sftp.lstat(path)
+            return attrs.st_mode & stat.S_IFDIR != 0
+        except IOError:
+            return False
+
+    def walk(self, path):
+        files=[]
+        folders=[]
+        try:
+            for item in self._sftp.listdir_iter(path):
+                if item.st_mode & stat.S_IFDIR:
+                    folders.append(item.filename)
+                else:
+                    files.append(item.filename)
+        except IOError as e:
+            print("Error opening directory {0}: {1}".format(path, e))
+
+        yield path, folders, files
+        for folder in folders:
+            for res in self.walk(os.path.join(path, folder)):
+                yield res
+
+
+class SSHClientError(Exception):
+    """Base class for errors"""
+
+
+class SSHCalledProcessError(SSHClientError):
+    @staticmethod
+    def _makestr(data):
+        if isinstance(data, six.binary_type):
+            return data.decode('utf-8', errors='backslashreplace')
+        elif isinstance(data, six.text_type):
+            return data
+        else:
+            return repr(data)
+
+    def __init__(
+            self, command, returncode, expected=0, stdout=None, stderr=None):
+        self.returncode = returncode
+        self.expected = expected
+        self.cmd = command
+        self.stdout = stdout
+        self.stderr = stderr
+        message = (
+            "Command '{cmd}' returned exit code {code} while "
+            "expected {expected}".format(
+                cmd=self._makestr(self.cmd),
+                code=self.returncode,
+                expected=self.expected
+            ))
+        if self.stdout:
+            message += "\n\tSTDOUT:\n{}".format(self._makestr(self.stdout))
+        if self.stderr:
+            message += "\n\tSTDERR:\n{}".format(self._makestr(self.stderr))
+        super(SSHCalledProcessError, self).__init__(message)
+
+    @property
+    def output(self):
+        warnings.warn(
+            'output is deprecated, please use stdout and stderr separately',
+            DeprecationWarning)
+        return self.stdout + self.stderr
+
+
+class SSHTimeoutError(SSHClientError):
+    pass
+
+
+__all__ = ['SSHAuth', 'SSHClient', 'SSHClientError', 'SSHCalledProcessError', 'SSHTimeoutError']
diff --git a/reclass_tools/walk_models.py b/reclass_tools/walk_models.py
new file mode 100644
index 0000000..353b0e9
--- /dev/null
+++ b/reclass_tools/walk_models.py
@@ -0,0 +1,218 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import hashlib
+import os
+import re
+import tarfile
+
+import urllib2
+import yaml
+
+from reclass_tools.helpers import ssh_client
+
+
+def walkfiles(topdir, identity_files=None, verbose=False):
+    if ":" in topdir:
+        host, path = topdir.split(":")
+        private_keys = ssh_client.get_private_keys(os.environ.get("HOME"), identity_files)
+        if "@" in host:
+            username, host = host.split("@")
+        else:
+            username = os.environ.get("USER")
+        remote = ssh_client.SSHClient(
+            host, username=username, private_keys=private_keys)
+
+        walker = remote.walk(path)
+        opener = remote.open
+        prefix = remote.host + ":"
+        isdir = remote.isdir(path, follow_symlink=True)
+    else:
+        walker = os.walk(topdir)
+        opener = open
+        prefix = ''
+        isdir = os.path.isdir(topdir)
+
+    if isdir:
+        for dirName, subdirList, fileList in walker:
+            for filename in fileList:
+                filepath = os.path.join(dirName,filename)
+                if verbose:
+                    print (prefix + filepath)
+                with OpenFile(filepath, opener) as log:
+                    yield (log)
+    else:
+        if verbose:
+            print (topdir)
+        with OpenFile(topdir, opener) as log:
+            yield (log)
+
+
+def yaml_read(yaml_file):
+    if os.path.isfile(yaml_file):
+        with open(yaml_file, 'r') as f:
+            return yaml.load(f)
+    else:
+        print("\'{}\' is not a file!".format(yaml_file))
+
+
+class OpenFile(object):
+
+    fname = None
+    opener = None
+    readlines = None
+    fobj = None
+
+    def __init__(self, fname, opener):
+        self.fname = fname
+        self.opener = opener
+
+    def get_parser(self):
+        parsers = {'/lastlog': self.fake_parser,
+                    '/wtmp': self.fake_parser,
+                    '/btmp': self.fake_parser,
+                    '/atop.log': self.fake_parser,
+                    '/atop_': self.fake_parser,
+                    '/atop_current': self.fake_parser,
+                    '/supervisord.log': self.docker_parser,
+                    '.gz': self.gz_parser,
+                    '.bz2': self.gz_parser,
+                   }
+        for w in parsers.keys():
+            if w in self.fname:
+                self.readlines = parsers[w]
+                return
+        try:
+            self.fobj = self.opener(self.fname, 'r')
+            self.readlines = self.plaintext_parser
+        except IOError as e:
+            print("Error opening file {0}: {1}".format(self.fname, e))
+            if self.fobj:
+                self.fobj.close()
+            self.fobj =  None
+            self.readlines = self.fake_parser
+
+    def plaintext_parser(self):
+        try:
+            for s in self.fobj.readlines():
+                yield s
+        except IOError as e:
+            print("Error reading file {0}: {1}".format(self.fname, e))
+
+    def fake_parser(self):
+        yield ''
+
+    def docker_parser(self):
+        yield ''
+
+    def gz_parser(self):
+        yield ''
+
+    def bz2_parser(self):
+        yield ''
+
+    def __enter__(self):
+        self.get_parser()
+        return self
+
+    def __exit__(self, x, y, z):
+        if self.fobj:
+            self.fobj.close()
+
+
+def get_nested_key(data, path=None):
+    if type(path) is not list:
+        raise("Use 'list' object with key names for 'path'")
+    for key in path:
+        value = data.get(key, None)
+        if value:
+            data = value
+        else:
+            return None
+    return data
+
+def remove_nested_key(data, path=None):
+    if type(path) is not list:
+        raise("Use 'list' object with key names for 'path'")
+
+    # Remove the value from the specified key
+    val = get_nested_key(data, path[:-1])
+    val[path[-1]] = None
+
+    # Clear parent keys if empty
+    while path:
+        val = get_nested_key(data, path)
+        if val:
+            # Non-empty value, nothing to do
+            return
+        else:
+            get_nested_key(data, path[:-1]).pop(path[-1])
+            path = path[:-1]
+
+
+def get_all_reclass_params(paths, verbose=False):
+    """Return dict with all used values for each param"""
+    #path = '/srv/salt/reclass/classes'
+    _params = dict()
+    for path in paths:
+        for log in walkfiles(path, verbose=verbose):
+            if log.fname.endswith('.yml'):
+                model = yaml_read(log.fname)
+                if model is not None:
+                    # Collect all params from the models
+                    _param = get_nested_key(model, ['parameters', '_param'])
+                    if _param:
+                        for key, val in _param.items():
+                            if key in _params:
+                                _params[key].append(val)
+                            else:
+                                _params[key] = [val]
+
+    return _params
+    #print(yaml.dump(_params))
+
+
+def remove_reclass_parameter(path, parameter, verbose=False):
+    """Removes specified key from parameters from all reclass models"""
+    #path = '/srv/salt/reclass/classes'
+    _params = dict()
+    for log in walkfiles(path, verbose=verbose):
+        if log.fname.endswith('.yml'):
+            model = yaml_read(log.fname)
+            if model is not None:
+
+                # Clear linux.network.interfaces
+                interfaces = get_nested_key(model, ['parameters', 'linux', 'network', 'interface'])
+                if interfaces:
+                    print(log.fname)
+                    print(interfaces.keys())
+
+                    remove_nested_key(model, ['parameters', 'linux', 'network', 'interface'])
+
+                    print(model)
+                    with open(log.fname, 'w') as f:
+                        f.write(
+                            yaml.dump(
+                                model, default_flow_style=False
+                            )
+                        )
+
+#                #print(yaml.dump(interfaces, default_flow_style=False))
+
+#                lvm = get_nested_key(model, ['parameters', 'linux', 'storage', 'lvm'])
+#                if lvm:
+#                    print(log.fname)
+#                    print(lvm.keys())
+#                    #print(yaml.dump(lvm, default_flow_style=False))
+
+#                mount = get_nested_key(model, ['parameters', 'linux', 'storage', 'mount'])
+#                if mount:
+#                    print(log.fname)
+#                    print(mount.keys())
+#                    #print(yaml.dump(mount, default_flow_style=False))
+
+#                swap = get_nested_key(model, ['parameters', 'linux', 'storage', 'swap'])
+#                if swap:
+#                    print(log.fname)
+#                    print(swap.keys())
+#                        #print(yaml.dump(swap, default_flow_style=False))