Initial commit

add first helper: reclass-dump-params
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..72364f9
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,89 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*,cover
+.hypothesis/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# IPython Notebook
+.ipynb_checkpoints
+
+# pyenv
+.python-version
+
+# celery beat schedule file
+celerybeat-schedule
+
+# dotenv
+.env
+
+# virtualenv
+venv/
+ENV/
+
+# Spyder project settings
+.spyderproject
+
+# Rope project settings
+.ropeproject
diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 0000000..3f676ce
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,484 @@
+[MASTER]
+
+# Specify a configuration file.
+#rcfile=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Add files or directories to the blacklist. They should be base names, not
+# paths.
+ignore=CVS, tox, logs, migrations
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# List of plugins (as comma separated values of python modules names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+# Use multiple processes to speed up Pylint.
+jobs=1
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension=no
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code
+extension-pkg-whitelist=
+
+# Allow optimization of some AST trees. This will activate a peephole AST
+# optimizer, which will apply various small optimizations. For instance, it can
+# be used to obtain the result of joining multiple strings with the addition
+# operator. Joining a lot of strings can lead to a maximum recursion error in
+# Pylint and this flag can prevent that. It has one side effect, the resulting
+# AST will be different than the one from reality.
+optimize-ast=no
+
+
+[MESSAGES CONTROL]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
+confidence=
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time. See also the "--disable" option for examples.
+
+# old-style-class (C1001)
+# return-arg-in-generator (E0106)
+# slots-on-old-class (E1001)
+# super-on-old-class (E1002)
+# missing-super-argument (E1004)
+# print-statement (E1601)
+# parameter-unpacking (E1602)
+# unpacking-in-except (E1603)
+# old-raise-syntax (E1604)
+# backtick (E1605)
+# long-suffix (E1606)
+# old-ne-operator (E1607)
+# old-octal-literal (E1608)
+# import-star-module-level (E1609)
+# lowercase-l-suffix (W0332)
+# deprecated-module (W0402)
+# invalid-encoded-data (W0512)
+# property-on-old-class (W1001)
+# boolean-datetime (W1502)
+# deprecated-method (W1505)
+# apply-builtin (W1601)
+# basestring-builtin (W1602)
+# buffer-builtin (W1603)
+# cmp-builtin (W1604)
+# coerce-builtin (W1605)
+# execfile-builtin (W1606)
+# file-builtin (W1607)
+# long-builtin (W1608)
+# raw_input-builtin (W1609)
+# reduce-builtin (W1610)
+# standarderror-builtin (W1611)
+# unicode-builtin (W1612)
+# xrange-builtin (W1613)
+# coerce-method (W1614)
+# delslice-method (W1615)
+# getslice-method (W1616)
+# setslice-method (W1617)
+# old-division (W1619)
+# dict-iter-method (W1620)
+# dict-view-method (W1621)
+# next-method-called (W1622)
+# metaclass-assignment (W1623)
+# indexing-exception (W1624)
+# raising-string (W1625)
+# reload-builtin (W1626)
+# oct-method (W1627)
+# hex-method (W1628)
+# nonzero-method (W1629)
+# cmp-method (W1630)
+# input-builtin (W1632)
+# round-builtin (W1633)
+# intern-builtin (W1634)
+# unichr-builtin (W1635)
+# map-builtin-not-iterating (W1636)
+# zip-builtin-not-iterating (W1637)
+# range-builtin-not-iterating (W1638)
+# filter-builtin-not-iterating (W1639)
+# filter-builtin-not-iterating (W1639)
+# using-cmp-argument (W1640)
+
+enable = E0106,C1001,E1001,E1002,E1004,E1601,E1602,E1603,E1604,E1605,E1606,E1607,E1608,E1609,W0332,W0402,W0512,W1001,W1502,W1505,W1601,W1602,W1603,W1604,W1605,W1606,W1607,W1608,W1609,W1610,W1611,W1612,W1613,W1614,W1615,W1616,W1617,W1619,W1620,W1621,W1622,W1623,W1624,W1625,W1626,W1627,W1628,W1629,W1630,W1632,W1633,W1634,W1635,W1636,W1637,W1638,W1639,W1640,
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once).You can also use "--disable=all" to
+# disable everything first and then reenable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use"--disable=all --enable=classes
+# --disable=W"
+
+# Disabling pointless reports:
+# RP0401:	External dependencies
+# RP0402:	Modules dependencies graph
+# RP0801:	Duplication
+# R0801: Duplication
+# cyclic-import (R0401) - produces false-negative results
+
+# Disabling messages:
+# no-member (E1101) - false negative on django
+
+# pointless-string-statement (W0105)
+# unnecessary-lambda (W0108)
+# deprecated-lambda (W0110)
+# bad-builtin (W0141)
+# fixme (W0511)
+# unused-argument (W0613)
+# redefined-outer-name (W0621)
+# cell-var-from-loop (W0640)
+# bare-except (W0702)
+# broad-except (W0703)
+# logging-format-interpolation (W1202)
+# anomalous-backslash-in-string (W1401) - DO NOT ENABLE, INCORRECTLY PARSES REGEX
+# no-absolute-import (W1618):
+#  import missing `from __future__ import absolute_import` Used when an import is not accompanied by from __future__ import absolute_import (default behaviour in Python 3)
+
+# invalid-name (C0103)
+# missing-docstring (C0111)
+# misplaced-comparison-constant (C0122)
+# too-many-lines (C0302)
+# bad-continuation (C0330)
+
+# too-many-ancestors (R0901)
+# too-many-instance-attributes (R0902)
+# too-many-public-methods (R0904)
+# too-few-public-methods (R0903)
+# too-many-return-statements (R0911)
+# too-many-branches (R0912)
+# too-many-arguments (R0913)
+# too-many-locals (R0914)
+# too-many-statements (R0915)
+
+# locally-disabled (I0011)
+# locally-enabled (I0012)
+
+disable=E1101,I0011,I0012,R0902,RP0401,RP0402,RP0801,R0801,W0141,W1618,W0621,W1202,W1401,W0703,W0702,C0111,W0640,C0122,W0511, W0613, C0103, R0903, C0330, C0302, R0915, R0914, R0912, W0105, R0904, R0911, W0108, W0110, R0913, R0901, R0401
+
+[REPORTS]
+
+# Set the output format. Available formats are text, parseable, colorized, msvs
+# (visual studio) and html. You can also give a reporter class, eg
+# mypackage.mymodule.MyReporterClass.
+output-format=colorized
+#output-format=parseable
+
+# Put messages in a separate file for each module / package specified on the
+# command line instead of printing them on stdout. Reports (if any) will be
+# written in a file name "pylint_global.[txt|html]".
+files-output=no
+
+# Tells whether to display a full report or only the messages
+reports=yes
+
+# Python expression which should return a note less than 10 (10 is the highest
+# note). You have access to the variables errors warning, statement which
+# respectively contain the number of errors / warnings messages and the total
+# number of statements analyzed. This is used by the global evaluation report
+# (RP0004).
+evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details
+#msg-template=
+
+
+[VARIABLES]
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# A regular expression matching the name of dummy variables (i.e. expectedly
+# not used).
+dummy-variables-rgx=_$|dummy
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid to define new builtins when possible.
+additional-builtins=
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=cb_,_cb
+
+
+[TYPECHECK]
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+ignore-mixin-members=yes
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis. It
+# supports qualified module names, as well as Unix pattern matching.
+ignored-modules=
+
+# List of classes names for which member attributes should not be checked
+# (useful for classes with attributes dynamically set). This supports can work
+# with qualified names.
+ignored-classes=
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+generated-members=
+
+
+[SPELLING]
+
+# Spelling dictionary name. Available dictionaries: none. To make it working
+# install python-enchant package.
+spelling-dict=
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=
+
+# A path to a file that contains private dictionary; one word per line.
+spelling-private-dict-file=
+
+# Tells whether to store unknown words to indicated private dictionary in
+# --spelling-private-dict-file option instead of raising a message.
+spelling-store-unknown-words=no
+
+
+[SIMILARITIES]
+
+# Minimum lines number of a similarity.
+min-similarity-lines=10
+
+# Ignore comments when computing similarities.
+ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+ignore-docstrings=yes
+
+# Ignore imports when computing similarities.
+ignore-imports=no
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,XXX,TODO
+
+
+[LOGGING]
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format
+logging-modules=logging
+
+
+[FORMAT]
+
+# Maximum number of characters on a single line.
+max-line-length=80
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines=^\s*(# )?<?https?://\S+>?$
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt=no
+
+# List of optional constructs for which whitespace checking is disabled. `dict-
+# separator` is used to allow tabulation in dicts, etc.: {1  : 1,\n222: 2}.
+# `trailing-comma` allows a space between comma and closing bracket: (a, ).
+# `empty-line` allows space-only lines.
+no-space-check=trailing-comma,dict-separator
+
+# Maximum number of lines in a module
+max-module-lines=1500
+
+# String used as indentation unit. This is usually "    " (4 spaces) or "\t" (1
+# tab).
+indent-string='    '
+
+# Number of spaces of indent required inside a hanging  or continued line.
+indent-after-paren=4
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=
+
+
+[BASIC]
+
+# List of builtins function names that should not be used, separated by a comma
+bad-functions=map,filter,input
+
+# Good variable names which should always be accepted, separated by a comma
+good-names=i,j,k,ex,Run,_,x,e,ip
+
+# Bad variable names which should always be refused, separated by a comma
+bad-names=foo,bar,baz,toto,tutu,tata
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=
+
+# Include a hint for the correct naming format with invalid-name
+include-naming-hint=no
+
+# Regular expression matching correct function names
+function-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for function names
+function-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression matching correct variable names
+variable-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for variable names
+variable-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression matching correct constant names
+const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
+
+# Naming hint for constant names
+const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
+
+# Regular expression matching correct attribute names
+attr-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for attribute names
+attr-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression matching correct argument names
+argument-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for argument names
+argument-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression matching correct class attribute names
+class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
+
+# Naming hint for class attribute names
+class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
+
+# Regular expression matching correct inline iteration names
+inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
+
+# Naming hint for inline iteration names
+inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
+
+# Regular expression matching correct class names
+class-rgx=[A-Z_][a-zA-Z0-9]+$
+
+# Naming hint for class names
+class-name-hint=[A-Z_][a-zA-Z0-9]+$
+
+# Regular expression matching correct module names
+module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
+
+# Naming hint for module names
+module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
+
+# Regular expression matching correct method names
+method-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for method names
+method-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+no-docstring-rgx=^_
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=-1
+
+
+[ELIF]
+
+# Maximum number of nested blocks for function / method body
+max-nested-blocks=5
+
+
+[IMPORTS]
+
+# Deprecated modules which should not be used, separated by a comma
+deprecated-modules=regsub,TERMIOS,Bastion,rexec
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled)
+import-graph=
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled)
+ext-import-graph=
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled)
+int-import-graph=
+
+
+[DESIGN]
+
+# Maximum number of arguments for function / method
+max-args=10
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore
+ignored-argument-names=_.*
+
+# Maximum number of locals for function / method body
+max-locals=15
+
+# Maximum number of return / yield for function / method body
+max-returns=6
+
+# Maximum number of branch for function / method body
+max-branches=12
+
+# Maximum number of statements in function / method body
+max-statements=50
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=15
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=2
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=20
+
+# Maximum number of boolean expressions in a if statement
+max-bool-expr=5
+
+
+[CLASSES]
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,__new__,setUp
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg=mcs
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected=_asdict,_fields,_replace,_source,_make
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "Exception"
+overgeneral-exceptions=Exception
diff --git a/.pylintrc_gerrit b/.pylintrc_gerrit
new file mode 100644
index 0000000..5c45137
--- /dev/null
+++ b/.pylintrc_gerrit
@@ -0,0 +1,485 @@
+[MASTER]
+
+# Specify a configuration file.
+#rcfile=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Add files or directories to the blacklist. They should be base names, not
+# paths.
+ignore=CVS, tox, logs, migrations
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# List of plugins (as comma separated values of python modules names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+# Use multiple processes to speed up Pylint.
+jobs=1
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension=no
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code
+extension-pkg-whitelist=
+
+# Allow optimization of some AST trees. This will activate a peephole AST
+# optimizer, which will apply various small optimizations. For instance, it can
+# be used to obtain the result of joining multiple strings with the addition
+# operator. Joining a lot of strings can lead to a maximum recursion error in
+# Pylint and this flag can prevent that. It has one side effect, the resulting
+# AST will be different than the one from reality.
+optimize-ast=no
+
+
+[MESSAGES CONTROL]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
+confidence=
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time. See also the "--disable" option for examples.
+
+# old-style-class (C1001)
+# return-arg-in-generator (E0106)
+# slots-on-old-class (E1001)
+# super-on-old-class (E1002)
+# missing-super-argument (E1004)
+# print-statement (E1601)
+# parameter-unpacking (E1602)
+# unpacking-in-except (E1603)
+# old-raise-syntax (E1604)
+# backtick (E1605)
+# long-suffix (E1606)
+# old-ne-operator (E1607)
+# old-octal-literal (E1608)
+# import-star-module-level (E1609)
+# lowercase-l-suffix (W0332)
+# deprecated-module (W0402)
+# invalid-encoded-data (W0512)
+# property-on-old-class (W1001)
+# boolean-datetime (W1502)
+# deprecated-method (W1505)
+# apply-builtin (W1601)
+# basestring-builtin (W1602)
+# buffer-builtin (W1603)
+# cmp-builtin (W1604)
+# coerce-builtin (W1605)
+# execfile-builtin (W1606)
+# file-builtin (W1607)
+# long-builtin (W1608)
+# raw_input-builtin (W1609)
+# reduce-builtin (W1610)
+# standarderror-builtin (W1611)
+# unicode-builtin (W1612)
+# xrange-builtin (W1613)
+# coerce-method (W1614)
+# delslice-method (W1615)
+# getslice-method (W1616)
+# setslice-method (W1617)
+# old-division (W1619)
+# dict-iter-method (W1620)
+# dict-view-method (W1621)
+# next-method-called (W1622)
+# metaclass-assignment (W1623)
+# indexing-exception (W1624)
+# raising-string (W1625)
+# reload-builtin (W1626)
+# oct-method (W1627)
+# hex-method (W1628)
+# nonzero-method (W1629)
+# cmp-method (W1630)
+# input-builtin (W1632)
+# round-builtin (W1633)
+# intern-builtin (W1634)
+# unichr-builtin (W1635)
+# map-builtin-not-iterating (W1636)
+# zip-builtin-not-iterating (W1637)
+# range-builtin-not-iterating (W1638)
+# filter-builtin-not-iterating (W1639)
+# filter-builtin-not-iterating (W1639)
+# using-cmp-argument (W1640)
+
+enable = E0106,C1001,E1001,E1002,E1004,E1601,E1602,E1603,E1604,E1605,E1606,E1607,E1608,E1609,W0332,W0402,W0512,W1001,W1502,W1505,W1601,W1602,W1603,W1604,W1605,W1606,W1607,W1608,W1609,W1610,W1611,W1612,W1613,W1614,W1615,W1616,W1617,W1619,W1620,W1621,W1622,W1623,W1624,W1625,W1626,W1627,W1628,W1629,W1630,W1632,W1633,W1634,W1635,W1636,W1637,W1638,W1639,W1640,
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once).You can also use "--disable=all" to
+# disable everything first and then reenable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use"--disable=all --enable=classes
+# --disable=W"
+
+# Disabling pointless reports:
+# RP0401:	External dependencies
+# RP0402:	Modules dependencies graph
+# RP0801:	Duplication
+# R0801: Duplication
+# cyclic-import (R0401) - produces false-negative results
+
+# Disabling messages:
+# no-member (E1101) - false negative on django
+
+# pointless-string-statement (W0105)
+# unnecessary-lambda (W0108)
+# deprecated-lambda (W0110)
+# bad-builtin (W0141)
+# protected-access (W0212)
+# fixme (W0511)
+# unused-argument (W0613)
+# redefined-outer-name (W0621)
+# cell-var-from-loop (W0640)
+# bare-except (W0702)
+# broad-except (W0703)
+# logging-format-interpolation (W1202)
+# anomalous-backslash-in-string (W1401) - DO NOT ENABLE, INCORRECTLY PARSES REGEX
+# no-absolute-import (W1618):
+#  import missing `from __future__ import absolute_import` Used when an import is not accompanied by from __future__ import absolute_import (default behaviour in Python 3)
+
+# invalid-name (C0103)
+# missing-docstring (C0111)
+# misplaced-comparison-constant (C0122)
+# too-many-lines (C0302)
+# bad-continuation (C0330)
+
+# too-many-ancestors (R0901)
+# too-many-instance-attributes (R0902)
+# too-many-public-methods (R0904)
+# too-few-public-methods (R0903)
+# too-many-return-statements (R0911)
+# too-many-branches (R0912)
+# too-many-arguments (R0913)
+# too-many-locals (R0914)
+# too-many-statements (R0915)
+
+# locally-disabled (I0011)
+# locally-enabled (I0012)
+
+disable=E1101,I0011,I0012,R0902,RP0401,RP0402,RP0801,R0801, W0141,W1618,W0621,W1202,W1401,W0703,W0702,C0111,W0640,C0122,W0511, W0613, C0103, R0903, C0330, W0212, C0302, R0915, R0914, R0912, W0105, R0904, R0911, W0108, W0110, R0913, R0901, R0401
+
+[REPORTS]
+
+# Set the output format. Available formats are text, parseable, colorized, msvs
+# (visual studio) and html. You can also give a reporter class, eg
+# mypackage.mymodule.MyReporterClass.
+output-format=text
+#output-format=parseable
+
+# Put messages in a separate file for each module / package specified on the
+# command line instead of printing them on stdout. Reports (if any) will be
+# written in a file name "pylint_global.[txt|html]".
+files-output=no
+
+# Tells whether to display a full report or only the messages
+reports=yes
+
+# Python expression which should return a note less than 10 (10 is the highest
+# note). You have access to the variables errors warning, statement which
+# respectively contain the number of errors / warnings messages and the total
+# number of statements analyzed. This is used by the global evaluation report
+# (RP0004).
+evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details
+#msg-template=
+
+
+[VARIABLES]
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# A regular expression matching the name of dummy variables (i.e. expectedly
+# not used).
+dummy-variables-rgx=_$|dummy
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid to define new builtins when possible.
+additional-builtins=
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=cb_,_cb
+
+
+[TYPECHECK]
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+ignore-mixin-members=yes
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis. It
+# supports qualified module names, as well as Unix pattern matching.
+ignored-modules=
+
+# List of classes names for which member attributes should not be checked
+# (useful for classes with attributes dynamically set). This supports can work
+# with qualified names.
+ignored-classes=
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+generated-members=
+
+
+[SPELLING]
+
+# Spelling dictionary name. Available dictionaries: none. To make it working
+# install python-enchant package.
+spelling-dict=
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=
+
+# A path to a file that contains private dictionary; one word per line.
+spelling-private-dict-file=
+
+# Tells whether to store unknown words to indicated private dictionary in
+# --spelling-private-dict-file option instead of raising a message.
+spelling-store-unknown-words=no
+
+
+[SIMILARITIES]
+
+# Minimum lines number of a similarity.
+min-similarity-lines=10
+
+# Ignore comments when computing similarities.
+ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+ignore-docstrings=yes
+
+# Ignore imports when computing similarities.
+ignore-imports=no
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,XXX,TODO
+
+
+[LOGGING]
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format
+logging-modules=logging
+
+
+[FORMAT]
+
+# Maximum number of characters on a single line.
+max-line-length=80
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines=^\s*(# )?<?https?://\S+>?$
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt=no
+
+# List of optional constructs for which whitespace checking is disabled. `dict-
+# separator` is used to allow tabulation in dicts, etc.: {1  : 1,\n222: 2}.
+# `trailing-comma` allows a space between comma and closing bracket: (a, ).
+# `empty-line` allows space-only lines.
+no-space-check=trailing-comma,dict-separator
+
+# Maximum number of lines in a module
+max-module-lines=1500
+
+# String used as indentation unit. This is usually "    " (4 spaces) or "\t" (1
+# tab).
+indent-string='    '
+
+# Number of spaces of indent required inside a hanging  or continued line.
+indent-after-paren=4
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=
+
+
+[BASIC]
+
+# List of builtins function names that should not be used, separated by a comma
+bad-functions=map,filter,input
+
+# Good variable names which should always be accepted, separated by a comma
+good-names=i,j,k,ex,Run,_,x,e,ip
+
+# Bad variable names which should always be refused, separated by a comma
+bad-names=foo,bar,baz,toto,tutu,tata
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=
+
+# Include a hint for the correct naming format with invalid-name
+include-naming-hint=no
+
+# Regular expression matching correct function names
+function-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for function names
+function-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression matching correct variable names
+variable-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for variable names
+variable-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression matching correct constant names
+const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
+
+# Naming hint for constant names
+const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
+
+# Regular expression matching correct attribute names
+attr-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for attribute names
+attr-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression matching correct argument names
+argument-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for argument names
+argument-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression matching correct class attribute names
+class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
+
+# Naming hint for class attribute names
+class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
+
+# Regular expression matching correct inline iteration names
+inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
+
+# Naming hint for inline iteration names
+inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
+
+# Regular expression matching correct class names
+class-rgx=[A-Z_][a-zA-Z0-9]+$
+
+# Naming hint for class names
+class-name-hint=[A-Z_][a-zA-Z0-9]+$
+
+# Regular expression matching correct module names
+module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
+
+# Naming hint for module names
+module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
+
+# Regular expression matching correct method names
+method-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for method names
+method-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+no-docstring-rgx=^_
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=-1
+
+
+[ELIF]
+
+# Maximum number of nested blocks for function / method body
+max-nested-blocks=5
+
+
+[IMPORTS]
+
+# Deprecated modules which should not be used, separated by a comma
+deprecated-modules=regsub,TERMIOS,Bastion,rexec
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled)
+import-graph=
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled)
+ext-import-graph=
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled)
+int-import-graph=
+
+
+[DESIGN]
+
+# Maximum number of arguments for function / method
+max-args=10
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore
+ignored-argument-names=_.*
+
+# Maximum number of locals for function / method body
+max-locals=15
+
+# Maximum number of return / yield for function / method body
+max-returns=6
+
+# Maximum number of branch for function / method body
+max-branches=12
+
+# Maximum number of statements in function / method body
+max-statements=50
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=15
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=2
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=20
+
+# Maximum number of boolean expressions in a if statement
+max-bool-expr=5
+
+
+[CLASSES]
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,__new__,setUp
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg=mcs
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected=_asdict,_fields,_replace,_source,_make
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "Exception"
+overgeneral-exceptions=Exception
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..8dada3e
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,201 @@
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "{}"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright {yyyy} {name of copyright owner}
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/README.md b/README.md
index a3664b5..a01aab7 100644
--- a/README.md
+++ b/README.md
@@ -1 +1,14 @@
-# reclass-tools
\ No newline at end of file
+# reclass-tools
+QA tools for manipulating reclass models
+
+Install:
+--------
+
+    apt-get install python-virtualenv python-pip build-essential python-dev libssl-dev
+    virtualenv venv-reclass-tools
+    source venv-reclass-tools/bin/activate
+    pip install https://github.com/dis-xcom/reclass-tools
+
+Usage:
+------
+
diff --git a/bindep.txt b/bindep.txt
new file mode 100644
index 0000000..e0a9927
--- /dev/null
+++ b/bindep.txt
@@ -0,0 +1,7 @@
+# This is a cross-platform list tracking distribution packages needed by tests;
+# see http://docs.openstack.org/infra/bindep/ for additional information.
+
+
+build-essential [platform:dpkg]
+libssl-dev [platform:dpkg]
+python-dev [platform:dpkg]
diff --git a/reclass_tools/__init__.py b/reclass_tools/__init__.py
new file mode 100644
index 0000000..0418604
--- /dev/null
+++ b/reclass_tools/__init__.py
@@ -0,0 +1,38 @@
+import os
+import time
+import logging.config
+
+
+LOGGER_SETTINGS = {
+    'version': 1,
+    'disable_existing_loggers': False,
+    'loggers': {
+        'reclass_tools': {
+            'level': 'DEBUG',
+            'handlers': ['console_output'],
+        },
+        'paramiko': {'level': 'WARNING'},
+        'iso8601': {'level': 'WARNING'},
+        'keystoneauth': {'level': 'WARNING'},
+    },
+    'handlers': {
+        'console_output': {
+            'class': 'logging.StreamHandler',
+            'level': 'INFO',
+            'formatter': 'default',
+            'stream': 'ext://sys.stdout',
+        },
+    },
+    'formatters': {
+        'default': {
+            'format': '%(asctime)s - %(levelname)s - %(filename)s:'
+                      '%(lineno)d -- %(message)s',
+            'datefmt': '%Y-%m-%d %H:%M:%S',
+        },
+    },
+}
+
+logging.config.dictConfig(LOGGER_SETTINGS)
+# set logging timezone to GMT
+logging.Formatter.converter = time.gmtime
+logger = logging.getLogger(__name__)
diff --git a/reclass_tools/cli.py b/reclass_tools/cli.py
new file mode 100644
index 0000000..b9ad985
--- /dev/null
+++ b/reclass_tools/cli.py
@@ -0,0 +1,56 @@
+#    Copyright 2013 - 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+from __future__ import print_function
+
+import argparse
+import os
+import sys
+import yaml
+
+from reclass_tools import walk_models
+
+
+def execute(params):
+
+    results = walk_models.get_all_reclass_params(
+        params.paths,
+        identity_files=params.identity_files,
+        verbose=params.verbose)
+
+    print(yaml.dump(results))
+
+
+def dump_params(args=None):
+    if args is None:
+        args = sys.argv[1:]
+
+    parser = argparse.ArgumentParser(
+        formatter_class=argparse.RawTextHelpFormatter,
+        description="")
+    parser.add_argument('-i', dest='identity_files',
+                        help=('For SSH connections, selects a file from which \n'
+                              'the identity (private key) for public key \n'
+                              'authentication is read. It is possible to have \n'
+                              'multiple -i options.'),
+                        action='append')
+    parser.add_argument('--verbose', dest='verbose', action='store_const', const=True,
+                        help='Show verbosed output.', default=False)
+    parser.add_argument('paths', help='Paths to search for *.yml files.', nargs='+')
+
+    if len(args) == 0:
+        args = ['-h']
+
+    params = parser.parse_args(args)
+    execute(params)
diff --git a/reclass_tools/helpers/__init__.py b/reclass_tools/helpers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/reclass_tools/helpers/__init__.py
diff --git a/reclass_tools/helpers/decorators.py b/reclass_tools/helpers/decorators.py
new file mode 100644
index 0000000..be79ec1
--- /dev/null
+++ b/reclass_tools/helpers/decorators.py
@@ -0,0 +1,318 @@
+#    Copyright 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+from __future__ import unicode_literals
+
+import collections
+import functools
+import inspect
+import logging
+import sys
+import threading
+import time
+
+import six
+
+from reclass_tools import logger
+
+
+def threaded(name=None, started=False, daemon=False):
+    """Make function or method threaded with passing arguments
+
+    If decorator added not as function, name is generated from function name.
+
+    :type name: str
+    :type started: bool
+    :type daemon: bool
+    """
+
+    def real_decorator(func):
+        @functools.wraps(func)
+        def wrapper(*args, **kwargs):
+            """Thread generator for function
+
+            :rtype: Thread
+            """
+            if name is None:
+                func_name = 'Threaded {}'.format(func.__name__)
+            else:
+                func_name = name
+            thread = threading.Thread(
+                target=func,
+                name=func_name,
+                args=args,
+                kwargs=kwargs)
+            if daemon:
+                thread.daemon = True
+            if started:
+                thread.start()
+            return thread
+        return wrapper
+
+    if name is not None and callable(name):
+        func, name = name, None
+        return real_decorator(func)
+
+    return real_decorator
+
+
+def retry(exception, count=10, delay=1):
+    """Retry decorator
+
+    Retries to run decorated method with the same parameters in case of
+    thrown :exception:
+
+    :type exception: class
+    :param exception: exception class
+    :type count: int
+    :param count: retry count
+    :type delay: int
+    :param delay: delay between retries in seconds
+    :rtype: function
+    """
+    def decorator(func):
+        if inspect.ismethod(func):
+            full_name = '{}:{}.{}'.format(
+                inspect.getmodule(func.im_class).__name__,
+                func.im_class.__name__,
+                func.__name__)
+        elif inspect.isfunction(func):
+            full_name = '{}.{}'.format(
+                inspect.getmodule(func).__name__,
+                func.__name__)
+        else:
+            raise Exception(
+                'Wrong func parameter type {!r}'.format(func))
+
+        @functools.wraps(func)
+        def wrapper(*args, **kwargs):
+            i = 0
+            while True:
+                try:
+                    return func(*args, **kwargs)
+                except exception as e:
+                    i += 1
+                    if i >= count:
+                        raise
+
+                    logger.debug(
+                        'Exception {!r} while running {!r}. '
+                        'Waiting {} seconds.'.format(e, func.__name__, delay),
+                        exc_info=True)  # logs traceback
+                    time.sleep(delay)
+
+                    arg_str = ', '.join((
+                        ', '.join(map(repr, args)),
+                        ', '.join('{}={!r}'.format(k, v) for k, v in kwargs),
+                    ))
+                    logger.debug('Retrying {}({})'.format(full_name, arg_str))
+
+        return wrapper
+
+    return decorator
+
+
+# pylint: disable=no-member
+def get_arg_names(func):
+    """get argument names for function
+
+    :param func: func
+    :return: list of function argnames
+    :rtype: list
+
+    >>> def tst_1():
+    ...     pass
+
+    >>> get_arg_names(tst_1)
+    []
+
+    >>> def tst_2(arg):
+    ...     pass
+
+    >>> get_arg_names(tst_2)
+    ['arg']
+    """
+    # noinspection PyUnresolvedReferences
+    if six.PY2:
+        spec = inspect.getargspec(func=func)
+        args = spec.args[:]
+        if spec.varargs:
+            args.append(spec.varargs)
+        if spec.keywords:
+            args.append(spec.keywords)
+        return args
+    return list(inspect.signature(obj=func).parameters.keys())
+
+
+def _getcallargs(func, *positional, **named):
+    """get real function call arguments without calling function
+
+    :rtype: dict
+    """
+    # noinspection PyUnresolvedReferences
+    if sys.version_info[0:2] < (3, 5):  # apply_defaults is py35 feature
+        orig_args = inspect.getcallargs(func, *positional, **named)
+        # Construct OrderedDict as Py3
+        arguments = collections.OrderedDict(
+            [(key, orig_args[key]) for key in get_arg_names(func)]
+        )
+        return arguments
+    sig = inspect.signature(func).bind(*positional, **named)
+    sig.apply_defaults()  # after bind we doesn't have defaults
+    return sig.arguments
+# pylint:enable=no-member
+
+
+def _simple(item):
+    """Check for nested iterations: True, if not"""
+    return not isinstance(item, (list, set, tuple, dict))
+
+
+_formatters = {
+    'simple': "{spc:<{indent}}{val!r}".format,
+    'text': "{spc:<{indent}}{prefix}'''{string}'''".format,
+    'dict': "\n{spc:<{indent}}{key!r:{size}}: {val},".format,
+    }
+
+
+def pretty_repr(src, indent=0, no_indent_start=False, max_indent=20):
+    """Make human readable repr of object
+
+    :param src: object to process
+    :type src: object
+    :param indent: start indentation, all next levels is +4
+    :type indent: int
+    :param no_indent_start: do not indent open bracket and simple parameters
+    :type no_indent_start: bool
+    :param max_indent: maximal indent before classic repr() call
+    :type max_indent: int
+    :return: formatted string
+    """
+    if _simple(src) or indent >= max_indent:
+        indent = 0 if no_indent_start else indent
+        if isinstance(src, (six.binary_type, six.text_type)):
+            if isinstance(src, six.binary_type):
+                string = src.decode(
+                    encoding='utf-8',
+                    errors='backslashreplace'
+                )
+                prefix = 'b'
+            else:
+                string = src
+                prefix = 'u'
+            return _formatters['text'](
+                spc='',
+                indent=indent,
+                prefix=prefix,
+                string=string
+            )
+        return _formatters['simple'](
+            spc='',
+            indent=indent,
+            val=src
+        )
+    if isinstance(src, dict):
+        prefix, suffix = '{', '}'
+        result = ''
+        max_len = len(max([repr(key) for key in src])) if src else 0
+        for key, val in src.items():
+            result += _formatters['dict'](
+                spc='',
+                indent=indent + 4,
+                size=max_len,
+                key=key,
+                val=pretty_repr(val, indent + 8, no_indent_start=True)
+            )
+        return (
+            '\n{start:>{indent}}'.format(
+                start=prefix,
+                indent=indent + 1
+            ) +
+            result +
+            '\n{end:>{indent}}'.format(end=suffix, indent=indent + 1)
+        )
+    if isinstance(src, list):
+        prefix, suffix = '[', ']'
+    elif isinstance(src, tuple):
+        prefix, suffix = '(', ')'
+    else:
+        prefix, suffix = '{', '}'
+    result = ''
+    for elem in src:
+        if _simple(elem):
+            result += '\n'
+        result += pretty_repr(elem, indent + 4) + ','
+    return (
+        '\n{start:>{indent}}'.format(
+            start=prefix,
+            indent=indent + 1) +
+        result +
+        '\n{end:>{indent}}'.format(end=suffix, indent=indent + 1)
+    )
+
+
+def logwrap(log=logger, log_level=logging.DEBUG, exc_level=logging.ERROR):
+    """Log function calls
+
+    :type log: logging.Logger
+    :type log_level: int
+    :type exc_level: int
+    :rtype: callable
+    """
+    def real_decorator(func):
+        @functools.wraps(func)
+        def wrapped(*args, **kwargs):
+            call_args = _getcallargs(func, *args, **kwargs)
+            args_repr = ""
+            if len(call_args) > 0:
+                args_repr = "\n    " + "\n    ".join((
+                    "{key!r}={val},".format(
+                        key=key,
+                        val=pretty_repr(val, indent=8, no_indent_start=True)
+                    )
+                    for key, val in call_args.items())
+                ) + '\n'
+            log.log(
+                level=log_level,
+                msg="Calling: \n{name!r}({arguments})".format(
+                    name=func.__name__,
+                    arguments=args_repr
+                )
+            )
+            try:
+                result = func(*args, **kwargs)
+                log.log(
+                    level=log_level,
+                    msg="Done: {name!r} with result:\n{result}".format(
+                        name=func.__name__,
+                        result=pretty_repr(result))
+                )
+            except BaseException:
+                log.log(
+                    level=exc_level,
+                    msg="Failed: \n{name!r}({arguments})".format(
+                        name=func.__name__,
+                        arguments=args_repr,
+                    ),
+                    exc_info=True
+                )
+                raise
+            return result
+        return wrapped
+
+    if not isinstance(log, logging.Logger):
+        func, log = log, logger
+        return real_decorator(func)
+
+    return real_decorator
diff --git a/reclass_tools/helpers/exec_result.py b/reclass_tools/helpers/exec_result.py
new file mode 100644
index 0000000..3dc6245
--- /dev/null
+++ b/reclass_tools/helpers/exec_result.py
@@ -0,0 +1,379 @@
+#    Copyright 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+from __future__ import unicode_literals
+
+import json
+import threading
+
+import yaml
+
+from reclass_tools.helpers import proc_enums
+from reclass_tools import logger
+
+
+deprecated_aliases = {
+    'stdout_str',
+    'stderr_str',
+    'stdout_json',
+    'stdout_yaml'
+}
+
+
+class ExecResult(object):
+    __slots__ = [
+        '__cmd', '__stdout', '__stderr', '__exit_code',
+        '__stdout_str', '__stderr_str', '__stdout_brief', '__stderr_brief',
+        '__stdout_json', '__stdout_yaml',
+        '__lock'
+    ]
+
+    def __init__(self, cmd, stdout=None, stderr=None,
+                 exit_code=proc_enums.ExitCodes.EX_INVALID):
+        """Command execution result read from fifo
+
+        :type cmd: str
+        :type stdout: list
+        :type stderr: list
+        :type exit_code: ExitCodes
+        """
+        self.__lock = threading.RLock()
+
+        self.__cmd = cmd
+        self.__stdout = stdout if stdout is not None else []
+        self.__stderr = stderr if stderr is not None else []
+
+        self.__exit_code = None
+        self.exit_code = exit_code
+
+        # By default is none:
+        self.__stdout_str = None
+        self.__stderr_str = None
+        self.__stdout_brief = None
+        self.__stderr_brief = None
+
+        self.__stdout_json = None
+        self.__stdout_yaml = None
+
+    @property
+    def lock(self):
+        """Lock object for thread-safe operation
+
+        :rtype: RLock
+        """
+        return self.__lock
+
+    @staticmethod
+    def _get_bytearray_from_array(src):
+        """Get bytearray from array of bytes blocks
+
+        :type src: list(bytes)
+        :rtype: bytearray
+        """
+        return bytearray(b''.join(src))
+
+    @staticmethod
+    def _get_str_from_bin(src):
+        """Join data in list to the string, with python 2&3 compatibility.
+
+        :type src: bytearray
+        :rtype: str
+        """
+        return src.strip().decode(
+            encoding='utf-8',
+            errors='backslashreplace'
+        )
+
+    @classmethod
+    def _get_brief(cls, data):
+        """Get brief output: 7 lines maximum (3 first + ... + 3 last)
+
+        :type data: list(bytes)
+        :rtype: str
+        """
+        src = data if len(data) <= 7 else data[:3] + [b'...\n'] + data[-3:]
+        return cls._get_str_from_bin(
+            cls._get_bytearray_from_array(src)
+        )
+
+    @property
+    def cmd(self):
+        """Executed command
+
+        :rtype: str
+        """
+        return self.__cmd
+
+    @property
+    def stdout(self):
+        """Stdout output as list of binaries
+
+        :rtype: list(bytes)
+        """
+        return self.__stdout
+
+    @stdout.setter
+    def stdout(self, new_val):
+        """Stdout output as list of binaries
+
+        :type new_val: list(bytes)
+        :raises: TypeError
+        """
+        if not isinstance(new_val, (list, type(None))):
+            raise TypeError('stdout should be list only!')
+        with self.lock:
+            self.__stdout_str = None
+            self.__stdout_brief = None
+            self.__stdout_json = None
+            self.__stdout_yaml = None
+            self.__stdout = new_val
+
+    @property
+    def stderr(self):
+        """Stderr output as list of binaries
+
+        :rtype: list(bytes)
+        """
+        return self.__stderr
+
+    @stderr.setter
+    def stderr(self, new_val):
+        """Stderr output as list of binaries
+
+        :type new_val: list(bytes)
+        :raises: TypeError
+        """
+        if not isinstance(new_val, (list, None)):
+            raise TypeError('stderr should be list only!')
+        with self.lock:
+            self.__stderr_str = None
+            self.__stderr_brief = None
+            self.__stderr = new_val
+
+    @property
+    def stdout_bin(self):
+        """Stdout in binary format
+
+        Sometimes logging is used to log binary objects too (example: Session),
+        and for debug purposes we can use this as data source.
+        :rtype: bytearray
+        """
+        with self.lock:
+            return self._get_bytearray_from_array(self.stdout)
+
+    @property
+    def stderr_bin(self):
+        """Stderr in binary format
+
+        :rtype: bytearray
+        """
+        with self.lock:
+            return self._get_bytearray_from_array(self.stderr)
+
+    @property
+    def stdout_str(self):
+        """Stdout output as string
+
+        :rtype: str
+        """
+        with self.lock:
+            if self.__stdout_str is None:
+                self.__stdout_str = self._get_str_from_bin(self.stdout_bin)
+            return self.__stdout_str
+
+    @property
+    def stderr_str(self):
+        """Stderr output as string
+
+        :rtype: str
+        """
+        with self.lock:
+            if self.__stderr_str is None:
+                self.__stderr_str = self._get_str_from_bin(self.stderr_bin)
+            return self.__stderr_str
+
+    @property
+    def stdout_brief(self):
+        """Brief stdout output (mostly for exceptions)
+
+        :rtype: str
+        """
+        with self.lock:
+            if self.__stdout_brief is None:
+                self.__stdout_brief = self._get_brief(self.stdout)
+            return self.__stdout_brief
+
+    @property
+    def stderr_brief(self):
+        """Brief stderr output (mostly for exceptions)
+
+        :rtype: str
+        """
+        with self.lock:
+            if self.__stderr_brief is None:
+                self.__stderr_brief = self._get_brief(self.stderr)
+            return self.__stderr_brief
+
+    @property
+    def exit_code(self):
+        """Return(exit) code of command
+
+        :rtype: int
+        """
+        return self.__exit_code
+
+    @exit_code.setter
+    def exit_code(self, new_val):
+        """Return(exit) code of command
+
+        :type new_val: int
+        """
+        if not isinstance(new_val, (int, proc_enums.ExitCodes)):
+            raise TypeError('Exit code is strictly int')
+        with self.lock:
+            if isinstance(new_val, int) and \
+                    new_val in proc_enums.ExitCodes.__members__.values():
+                new_val = proc_enums.ExitCodes(new_val)
+            self.__exit_code = new_val
+
+    def __deserialize(self, fmt):
+        """Deserialize stdout as data format
+
+        :type fmt: str
+        :rtype: object
+        :raises: DevopsError
+        """
+        try:
+            if fmt == 'json':
+                return json.loads(self.stdout_str, encoding='utf-8')
+            elif fmt == 'yaml':
+                return yaml.safe_load(self.stdout_str)
+        except BaseException:
+            tmpl = (
+                " stdout is not valid {fmt}:\n"
+                '{{stdout!r}}\n'.format(
+                    fmt=fmt))
+            logger.exception(self.cmd + tmpl.format(stdout=self.stdout_str))
+            raise TypeError(
+                self.cmd + tmpl.format(stdout=self.stdout_brief))
+        msg = '{fmt} deserialize target is not implemented'.format(fmt=fmt)
+        logger.error(msg)
+        raise NotImplementedError(msg)
+
+    @property
+    def stdout_json(self):
+        """JSON from stdout
+
+        :rtype: object
+        """
+        with self.lock:
+            if self.__stdout_json is None:
+                # noinspection PyTypeChecker
+                self.__stdout_json = self.__deserialize(fmt='json')
+            return self.__stdout_json
+
+    @property
+    def stdout_yaml(self):
+        """YAML from stdout
+
+        :rtype: Union(list, dict, None)
+        """
+        with self.lock:
+            if self.__stdout_yaml is None:
+                # noinspection PyTypeChecker
+                self.__stdout_yaml = self.__deserialize(fmt='yaml')
+            return self.__stdout_yaml
+
+    def __dir__(self):
+        return [
+            'cmd', 'stdout', 'stderr', 'exit_code',
+            'stdout_bin', 'stderr_bin',
+            'stdout_str', 'stderr_str', 'stdout_brief', 'stderr_brief',
+            'stdout_json', 'stdout_yaml',
+            'lock'
+        ]
+
+    def __getitem__(self, item):
+        if item in dir(self):
+            return getattr(self, item)
+        raise IndexError(
+            '"{item}" not found in {dir}'.format(
+                item=item, dir=dir(self)
+            )
+        )
+
+    def __setitem__(self, key, value):
+        rw = ['stdout', 'stderr', 'exit_code']
+        if key in rw:
+            setattr(self, key, value)
+            return
+        if key in deprecated_aliases:
+            logger.warning(
+                '{key} is read-only and calculated automatically'.format(
+                    key=key
+                )
+            )
+            return
+        if key in dir(self):
+            raise RuntimeError(
+                '{key} is read-only!'.format(key=key)
+            )
+        raise IndexError(
+            '{key} not found in {dir}'.format(
+                key=key, dir=rw
+            )
+        )
+
+    def __repr__(self):
+        return (
+            '{cls}(cmd={cmd!r}, stdout={stdout}, stderr={stderr}, '
+            'exit_code={exit_code!s})'.format(
+                cls=self.__class__.__name__,
+                cmd=self.cmd,
+                stdout=self.stdout,
+                stderr=self.stderr,
+                exit_code=self.exit_code
+            ))
+
+    def __str__(self):
+        return (
+            "{cls}(\n\tcmd={cmd!r},"
+            "\n\t stdout=\n'{stdout_brief}',"
+            "\n\tstderr=\n'{stderr_brief}', "
+            '\n\texit_code={exit_code!s}\n)'.format(
+                cls=self.__class__.__name__,
+                cmd=self.cmd,
+                stdout_brief=self.stdout_brief,
+                stderr_brief=self.stderr_brief,
+                exit_code=self.exit_code
+            )
+        )
+
+    def __eq__(self, other):
+        return all(
+            (
+                getattr(self, val) == getattr(other, val)
+                for val in ['cmd', 'stdout', 'stderr', 'exit_code']
+            )
+        )
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __hash__(self):
+        return hash(
+            (
+                self.__class__, self.cmd, self.stdout_str, self.stderr_str,
+                self.exit_code
+            ))
diff --git a/reclass_tools/helpers/proc_enums.py b/reclass_tools/helpers/proc_enums.py
new file mode 100644
index 0000000..73518fc
--- /dev/null
+++ b/reclass_tools/helpers/proc_enums.py
@@ -0,0 +1,124 @@
+#    Copyright 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+import enum
+
+
+@enum.unique
+class SigNum(enum.IntEnum):
+    SIGHUP = 1  # Hangup (POSIX).
+    SIGINT = 2  # Interrupt (ANSI).
+    SIGQUIT = 3  # Quit (POSIX).
+    SIGILL = 4  # Illegal instruction (ANSI).
+    SIGTRAP = 5  # Trace trap (POSIX).
+    SIGABRT = 6  # Abort (ANSI).
+    SIGBUS = 7  # BUS error (4.2 BSD).
+    SIGFPE = 8  # Floating-point exception (ANSI).
+    SIGKILL = 9  # Kill, unblockable (POSIX).
+    SIGUSR1 = 10  # User-defined signal 1 (POSIX).
+    SIGSEGV = 11  # Segmentation violation (ANSI).
+    SIGUSR2 = 12  # User-defined signal 2 (POSIX).
+    SIGPIPE = 13  # Broken pipe (POSIX).
+    SIGALRM = 14  # Alarm clock (POSIX).
+    SIGTERM = 15  # Termination (ANSI).
+    SIGSTKFLT = 16  # Stack fault.
+    SIGCHLD = 17  # Child status has changed (POSIX).
+    SIGCONT = 18  # Continue (POSIX).
+    SIGSTOP = 19  # Stop, unblockable (POSIX).
+    SIGTSTP = 20  # Keyboard stop (POSIX).
+    SIGTTIN = 21  # Background read from tty (POSIX).
+    SIGTTOU = 22  # Background write to tty (POSIX).
+    SIGURG = 23  # Urgent condition on socket (4.2 BSD).
+    SIGXCPU = 24  # CPU limit exceeded (4.2 BSD).
+    SIGXFSZ = 25  # File size limit exceeded (4.2 BSD).
+    SIGVTALRM = 26  # Virtual alarm clock (4.2 BSD).
+    SIGPROF = 27  # Profiling alarm clock (4.2 BSD).
+    SIGWINCH = 28  # Window size change (4.3 BSD, Sun).
+    SIGPOLL = 29  # Pollable event occurred (System V)
+    SIGPWR = 30  # Power failure restart (System V).
+    SIGSYS = 31  # Bad system call.
+
+    def __str__(self):
+        return "{name}<{value:d}(0x{value:02X})>".format(
+            name=self.name,
+            value=self.value
+        )
+
+
+@enum.unique
+class ExitCodes(enum.IntEnum):
+    EX_OK = 0  # successful termination
+
+    EX_INVALID = 0xDEADBEEF  # uint32 debug value. Impossible for POSIX
+
+    EX_ERROR = 1  # general failure
+    EX_BUILTIN = 2  # Misuse of shell builtins (according to Bash)
+
+    EX_USAGE = 64  # command line usage error
+    EX_DATAERR = 65  # data format error
+    EX_NOINPUT = 66  # cannot open input
+    EX_NOUSER = 67  # addressee unknown
+    EX_NOHOST = 68  # host name unknown
+    EX_UNAVAILABLE = 69  # service unavailable
+    EX_SOFTWARE = 70  # internal software error
+    EX_OSERR = 71  # system error (e.g., can't fork)
+    EX_OSFILE = 72  # critical OS file missing
+    EX_CANTCREAT = 73  # can't create (user) output file
+    EX_IOERR = 74  # input/output error
+    EX_TEMPFAIL = 75  # temp failure; user is invited to retry
+    EX_PROTOCOL = 76  # remote error in protocol
+    EX_NOPERM = 77  # permission denied
+    EX_CONFIG = 78  # configuration error
+
+    EX_NOEXEC = 126  # If a command is found but is not executable
+    EX_NOCMD = 127  # If a command is not found
+
+    # Signal exits:
+    EX_SIGHUP = 128 + SigNum.SIGHUP
+    EX_SIGINT = 128 + SigNum.SIGINT
+    EX_SIGQUIT = 128 + SigNum.SIGQUIT
+    EX_SIGILL = 128 + SigNum.SIGILL
+    EX_SIGTRAP = 128 + SigNum.SIGTRAP
+    EX_SIGABRT = 128 + SigNum.SIGABRT
+    EX_SIGBUS = 128 + SigNum.SIGBUS
+    EX_SIGFPE = 128 + SigNum.SIGFPE
+    EX_SIGKILL = 128 + SigNum.SIGKILL
+    EX_SIGUSR1 = 128 + SigNum.SIGUSR1
+    EX_SIGSEGV = 128 + SigNum.SIGSEGV
+    EX_SIGUSR2 = 128 + SigNum.SIGUSR2
+    EX_SIGPIPE = 128 + SigNum.SIGPIPE
+    EX_SIGALRM = 128 + SigNum.SIGALRM
+    EX_SIGTERM = 128 + SigNum.SIGTERM
+    EX_SIGSTKFLT = 128 + SigNum.SIGSTKFLT
+    EX_SIGCHLD = 128 + SigNum.SIGCHLD
+    EX_SIGCONT = 128 + SigNum.SIGCONT
+    EX_SIGSTOP = 128 + SigNum.SIGSTOP
+    EX_SIGTSTP = 128 + SigNum.SIGTSTP
+    EX_SIGTTIN = 128 + SigNum.SIGTTIN
+    EX_SIGTTOU = 128 + SigNum.SIGTTOU
+    EX_SIGURG = 128 + SigNum.SIGURG
+    EX_SIGXCPU = 128 + SigNum.SIGXCPU
+    EX_SIGXFSZ = 128 + SigNum.SIGXFSZ
+    EX_SIGVTALRM = 128 + SigNum.SIGVTALRM
+    EX_SIGPROF = 128 + SigNum.SIGPROF
+    EX_SIGWINCH = 128 + SigNum.SIGWINCH
+    EX_SIGPOLL = 128 + SigNum.SIGPOLL
+    EX_SIGPWR = 128 + SigNum.SIGPWR
+    EX_SIGSYS = 128 + SigNum.SIGSYS
+
+    def __str__(self):
+        return "{name}<{value:d}(0x{value:02X})>".format(
+            name=self.name,
+            value=self.value
+        )
diff --git a/reclass_tools/helpers/ssh_client.py b/reclass_tools/helpers/ssh_client.py
new file mode 100644
index 0000000..da3655d
--- /dev/null
+++ b/reclass_tools/helpers/ssh_client.py
@@ -0,0 +1,1147 @@
+#    Copyright 2013 - 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+from __future__ import unicode_literals
+
+import base64
+import os
+import posixpath
+import stat
+import sys
+import threading
+import time
+import warnings
+
+import paramiko
+import six
+
+from reclass_tools.helpers import decorators
+from reclass_tools.helpers import exec_result
+from reclass_tools.helpers import proc_enums
+from reclass_tools import logger
+
+
+def get_private_keys(home, identity_files=None):
+    if not identity_files:
+        identity_files = ['.ssh/id_rsa']
+    keys = []
+    for i in identity_files:
+        with open(os.path.join(home, i)) as f:
+            keys.append(paramiko.RSAKey.from_private_key(f))
+    return keys
+
+
+class SSHAuth(object):
+    __slots__ = ['__username', '__password', '__key', '__keys']
+
+    def __init__(
+            self,
+            username=None, password=None, key=None, keys=None):
+        """SSH authorisation object
+
+        Used to authorize SSHClient.
+        Single SSHAuth object is associated with single host:port.
+        Password and key is private, other data is read-only.
+
+        :type username: str
+        :type password: str
+        :type key: paramiko.RSAKey
+        :type keys: list
+        """
+        self.__username = username
+        self.__password = password
+        self.__key = key
+        self.__keys = [None]
+        if key is not None:
+            # noinspection PyTypeChecker
+            self.__keys.append(key)
+        if keys is not None:
+            for key in keys:
+                if key not in self.__keys:
+                    self.__keys.append(key)
+
+    @property
+    def username(self):
+        """Username for auth
+
+        :rtype: str
+        """
+        return self.__username
+
+    @staticmethod
+    def __get_public_key(key):
+        """Internal method for get public key from private
+
+        :type key: paramiko.RSAKey
+        """
+        if key is None:
+            return None
+        return '{0} {1}'.format(key.get_name(), key.get_base64())
+
+    @property
+    def public_key(self):
+        """public key for stored private key if presents else None
+
+        :rtype: str
+        """
+        return self.__get_public_key(self.__key)
+
+    def enter_password(self, tgt):
+        """Enter password to STDIN
+
+        Note: required for 'sudo' call
+
+        :type tgt: file
+        :rtype: str
+        """
+        # noinspection PyTypeChecker
+        return tgt.write('{}\n'.format(self.__password))
+
+    def connect(self, client, hostname=None, port=22, log=True):
+        """Connect SSH client object using credentials
+
+        :type client:
+            paramiko.client.SSHClient
+            paramiko.transport.Transport
+        :type log: bool
+        :raises paramiko.AuthenticationException
+        """
+        kwargs = {
+            'username': self.username,
+            'password': self.__password}
+        if hostname is not None:
+            kwargs['hostname'] = hostname
+            kwargs['port'] = port
+
+        keys = [self.__key]
+        keys.extend([k for k in self.__keys if k != self.__key])
+
+        for key in keys:
+            kwargs['pkey'] = key
+            try:
+                client.connect(**kwargs)
+                if self.__key != key:
+                    self.__key = key
+                    logger.debug(
+                        'Main key has been updated, public key is: \n'
+                        '{}'.format(self.public_key))
+                return
+            except paramiko.PasswordRequiredException:
+                if self.__password is None:
+                    logger.exception('No password has been set!')
+                    raise
+                else:
+                    logger.critical(
+                        'Unexpected PasswordRequiredException, '
+                        'when password is set!')
+                    raise
+            except paramiko.AuthenticationException:
+                continue
+        msg = 'Connection using stored authentication info failed!'
+        if log:
+            logger.exception(
+                'Connection using stored authentication info failed!')
+        raise paramiko.AuthenticationException(msg)
+
+    def __hash__(self):
+        return hash((
+            self.__class__,
+            self.username,
+            self.__password,
+            tuple(self.__keys)
+        ))
+
+    def __eq__(self, other):
+        return hash(self) == hash(other)
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __deepcopy__(self, memo):
+        return self.__class__(
+            username=self.username,
+            password=self.__password,
+            key=self.__key,
+            keys=self.__keys.copy()
+        )
+
+    def copy(self):
+        return self.__class__(
+            username=self.username,
+            password=self.__password,
+            key=self.__key,
+            keys=self.__keys
+        )
+
+    def __repr__(self):
+        _key = (
+            None if self.__key is None else
+            '<private for pub: {}>'.format(self.public_key)
+        )
+        _keys = []
+        for k in self.__keys:
+            if k == self.__key:
+                continue
+            # noinspection PyTypeChecker
+            _keys.append(
+                '<private for pub: {}>'.format(
+                    self.__get_public_key(key=k)) if k is not None else None)
+
+        return (
+            '{cls}(username={username}, '
+            'password=<*masked*>, key={key}, keys={keys})'.format(
+                cls=self.__class__.__name__,
+                username=self.username,
+                key=_key,
+                keys=_keys)
+        )
+
+    def __str__(self):
+        return (
+            '{cls} for {username}'.format(
+                cls=self.__class__.__name__,
+                username=self.username,
+            )
+        )
+
+
+class _MemorizedSSH(type):
+    """Memorize metaclass for SSHClient
+
+    This class implements caching and managing of SSHClient connections.
+    Class is not in public scope: all required interfaces is accessible throw
+      SSHClient classmethods.
+
+    Main flow is:
+      SSHClient() -> check for cached connection and
+        - If exists the same: check for alive, reconnect if required and return
+        - If exists with different credentials: delete and continue processing
+          create new connection and cache on success
+      * Note: each invocation of SSHClient instance will return current dir to
+        the root of the current user home dir ("cd ~").
+        It is necessary to avoid unpredictable behavior when the same
+        connection is used from different places.
+        If you need to enter some directory and execute command there, please
+        use the following approach:
+        cmd1 = "cd <some dir> && <command1>"
+        cmd2 = "cd <some dir> && <command2>"
+
+    Close cached connections is allowed per-client and all stored:
+      connection will be closed, but still stored in cache for faster reconnect
+
+    Clear cache is strictly not recommended:
+      from this moment all open connections should be managed manually,
+      duplicates is possible.
+    """
+    __cache = {}
+
+    def __call__(
+            cls,
+            host, port=22,
+            username=None, password=None, private_keys=None,
+            auth=None
+    ):
+        """Main memorize method: check for cached instance and return it
+
+        :type host: str
+        :type port: int
+        :type username: str
+        :type password: str
+        :type private_keys: list
+        :type auth: SSHAuth
+        :rtype: SSHClient
+        """
+        if (host, port) in cls.__cache:
+            key = host, port
+            if auth is None:
+                auth = SSHAuth(
+                    username=username, password=password, keys=private_keys)
+            if hash((cls, host, port, auth)) == hash(cls.__cache[key]):
+                ssh = cls.__cache[key]
+                # noinspection PyBroadException
+                try:
+                    ssh.execute('cd ~', timeout=5)
+                except BaseException:  # Note: Do not change to lower level!
+                    logger.debug('Reconnect {}'.format(ssh))
+                    ssh.reconnect()
+                return ssh
+            if sys.getrefcount(cls.__cache[key]) == 2:
+                # If we have only cache reference and temporary getrefcount
+                # reference: close connection before deletion
+                logger.debug('Closing {} as unused'.format(cls.__cache[key]))
+                cls.__cache[key].close()
+            del cls.__cache[key]
+        # noinspection PyArgumentList
+        return super(
+            _MemorizedSSH, cls).__call__(
+            host=host, port=port,
+            username=username, password=password, private_keys=private_keys,
+            auth=auth)
+
+    @classmethod
+    def record(mcs, ssh):
+        """Record SSH client to cache
+
+        :type ssh: SSHClient
+        """
+        mcs.__cache[(ssh.hostname, ssh.port)] = ssh
+
+    @classmethod
+    def clear_cache(mcs):
+        """Clear cached connections for initialize new instance on next call"""
+        n_count = 3 if six.PY3 else 4
+        # PY3: cache, ssh, temporary
+        # PY4: cache, values mapping, ssh, temporary
+        for ssh in mcs.__cache.values():
+            if sys.getrefcount(ssh) == n_count:
+                logger.debug('Closing {} as unused'.format(ssh))
+                ssh.close()
+        mcs.__cache = {}
+
+    @classmethod
+    def close_connections(mcs, hostname=None):
+        """Close connections for selected or all cached records
+
+        :type hostname: str
+        """
+        if hostname is None:
+            keys = [key for key, ssh in mcs.__cache.items() if ssh.is_alive]
+        else:
+            keys = [
+                (host, port)
+                for (host, port), ssh
+                in mcs.__cache.items() if host == hostname and ssh.is_alive]
+        # raise ValueError(keys)
+        for key in keys:
+            mcs.__cache[key].close()
+
+
+class SSHClient(six.with_metaclass(_MemorizedSSH, object)):
+    __slots__ = [
+        '__hostname', '__port', '__auth', '__ssh', '__sftp', 'sudo_mode',
+        '__lock'
+    ]
+
+    class __get_sudo(object):
+        """Context manager for call commands with sudo"""
+        def __init__(self, ssh, enforce=None):
+            """Context manager for call commands with sudo
+
+            :type ssh: SSHClient
+            :type enforce: bool
+            """
+            self.__ssh = ssh
+            self.__sudo_status = ssh.sudo_mode
+            self.__enforce = enforce
+
+        def __enter__(self):
+            self.__sudo_status = self.__ssh.sudo_mode
+            if self.__enforce is not None:
+                self.__ssh.sudo_mode = self.__enforce
+
+        def __exit__(self, exc_type, exc_val, exc_tb):
+            self.__ssh.sudo_mode = self.__sudo_status
+
+    # noinspection PyPep8Naming
+    class get_sudo(__get_sudo):
+        """Context manager for call commands with sudo"""
+
+        def __init__(self, ssh, enforce=True):
+            warnings.warn(
+                'SSHClient.get_sudo(SSHClient()) is deprecated in favor of '
+                'SSHClient().sudo(enforce=...) , which is much more powerful.')
+            super(self.__class__, self).__init__(ssh=ssh, enforce=enforce)
+
+    def __hash__(self):
+        return hash((
+            self.__class__,
+            self.hostname,
+            self.port,
+            self.auth))
+
+    def __init__(
+            self,
+            host, port=22,
+            username=None, password=None, private_keys=None,
+            auth=None
+    ):
+        """SSHClient helper
+
+        :type host: str
+        :type port: int
+        :type username: str
+        :type password: str
+        :type private_keys: list
+        :type auth: SSHAuth
+        """
+        self.__lock = threading.RLock()
+
+        self.__hostname = host
+        self.__port = port
+
+        self.sudo_mode = False
+        self.__ssh = paramiko.SSHClient()
+        self.__ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+        self.__sftp = None
+
+        self.__auth = auth if auth is None else auth.copy()
+
+        if auth is None:
+            msg = (
+                'SSHClient(host={host}, port={port}, username={username}): '
+                'initialization by username/password/private_keys '
+                'is deprecated in favor of SSHAuth usage. '
+                'Please update your code'.format(
+                    host=host, port=port, username=username
+                ))
+            warnings.warn(msg, DeprecationWarning)
+            logger.debug(msg)
+
+            self.__auth = SSHAuth(
+                username=username,
+                password=password,
+                keys=private_keys
+            )
+
+        self.__connect()
+        _MemorizedSSH.record(ssh=self)
+        if auth is None:
+            logger.info(
+                '{0}:{1}> SSHAuth was made from old style creds: '
+                '{2}'.format(self.hostname, self.port, self.auth))
+
+    @property
+    def lock(self):
+        """Connection lock
+
+        :rtype: threading.RLock
+        """
+        return self.__lock
+
+    @property
+    def auth(self):
+        """Internal authorisation object
+
+        Attention: this public property is mainly for inheritance,
+        debug and information purposes.
+        Calls outside SSHClient and child classes is sign of incorrect design.
+        Change is completely disallowed.
+
+        :rtype: SSHAuth
+        """
+        return self.__auth
+
+    @property
+    def hostname(self):
+        """Connected remote host name
+
+        :rtype: str
+        """
+        return self.__hostname
+
+    @property
+    def host(self):
+        """Hostname access for backward compatibility
+
+        :rtype: str
+        """
+        warnings.warn(
+            'host has been deprecated in favor of hostname',
+            DeprecationWarning
+        )
+        return self.hostname
+
+    @property
+    def port(self):
+        """Connected remote port number
+
+        :rtype: int
+        """
+        return self.__port
+
+    @property
+    def is_alive(self):
+        """Paramiko status: ready to use|reconnect required
+
+        :rtype: bool
+        """
+        return self.__ssh.get_transport() is not None
+
+    def __repr__(self):
+        return '{cls}(host={host}, port={port}, auth={auth!r})'.format(
+            cls=self.__class__.__name__, host=self.hostname, port=self.port,
+            auth=self.auth
+        )
+
+    def __str__(self):
+        return '{cls}(host={host}, port={port}) for user {user}'.format(
+            cls=self.__class__.__name__, host=self.hostname, port=self.port,
+            user=self.auth.username
+        )
+
+    @property
+    def _ssh(self):
+        """ssh client object getter for inheritance support only
+
+        Attention: ssh client object creation and change
+        is allowed only by __init__ and reconnect call.
+
+        :rtype: paramiko.SSHClient
+        """
+        return self.__ssh
+
+    @decorators.retry(paramiko.SSHException, count=3, delay=3)
+    def __connect(self):
+        """Main method for connection open"""
+        with self.lock:
+            self.auth.connect(
+                client=self.__ssh,
+                hostname=self.hostname, port=self.port,
+                log=True)
+
+    def __connect_sftp(self):
+        """SFTP connection opener"""
+        with self.lock:
+            try:
+                self.__sftp = self.__ssh.open_sftp()
+            except paramiko.SSHException:
+                logger.warning('SFTP enable failed! SSH only is accessible.')
+
+    @property
+    def _sftp(self):
+        """SFTP channel access for inheritance
+
+        :rtype: paramiko.sftp_client.SFTPClient
+        :raises: paramiko.SSHException
+        """
+        if self.__sftp is not None:
+            return self.__sftp
+        logger.debug('SFTP is not connected, try to connect...')
+        self.__connect_sftp()
+        if self.__sftp is not None:
+            return self.__sftp
+        raise paramiko.SSHException('SFTP connection failed')
+
+    def close(self):
+        """Close SSH and SFTP sessions"""
+        with self.lock:
+            # noinspection PyBroadException
+            try:
+                self.__ssh.close()
+                self.__sftp = None
+            except Exception:
+                logger.exception("Could not close ssh connection")
+                if self.__sftp is not None:
+                    # noinspection PyBroadException
+                    try:
+                        self.__sftp.close()
+                    except Exception:
+                        logger.exception("Could not close sftp connection")
+
+    @staticmethod
+    def clear():
+        warnings.warn(
+            "clear is removed: use close() only if it mandatory: "
+            "it's automatically called on revert|shutdown|suspend|destroy",
+            DeprecationWarning
+        )
+
+    @classmethod
+    def _clear_cache(cls):
+        """Enforce clear memorized records"""
+        warnings.warn(
+            '_clear_cache() is dangerous and not recommended for normal use!',
+            Warning
+        )
+        _MemorizedSSH.clear_cache()
+
+    @classmethod
+    def close_connections(cls, hostname=None):
+        """Close cached connections: if hostname is not set, then close all
+
+        :type hostname: str
+        """
+        _MemorizedSSH.close_connections(hostname=hostname)
+
+    def __del__(self):
+        """Destructor helper: close channel and threads BEFORE closing others
+
+        Due to threading in paramiko, default destructor could generate asserts
+        on close, so we calling channel close before closing main ssh object.
+        """
+        self.__ssh.close()
+        self.__sftp = None
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        pass
+
+    def reconnect(self):
+        """Reconnect SSH session"""
+        with self.lock:
+            self.close()
+
+            self.__ssh = paramiko.SSHClient()
+            self.__ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+
+            self.__connect()
+
+    def sudo(self, enforce=None):
+        """Call contextmanager for sudo mode change
+
+        :type enforce: bool
+        :param enforce: Enforce sudo enabled or disabled. By default: None
+        """
+        return self.__get_sudo(ssh=self, enforce=enforce)
+
+    def check_call(
+            self,
+            command, verbose=False, timeout=None,
+            error_info=None,
+            expected=None, raise_on_err=True, **kwargs):
+        """Execute command and check for return code
+
+        :type command: str
+        :type verbose: bool
+        :type timeout: int
+        :type error_info: str
+        :type expected: list
+        :type raise_on_err: bool
+        :rtype: ExecResult
+        :raises: DevopsCalledProcessError
+        """
+        if expected is None:
+            expected = [proc_enums.ExitCodes.EX_OK]
+        else:
+            expected = [
+                proc_enums.ExitCodes(code)
+                if (
+                    isinstance(code, int) and
+                    code in proc_enums.ExitCodes.__members__.values())
+                else code
+                for code in expected
+                ]
+        ret = self.execute(command, verbose, timeout, **kwargs)
+        if ret['exit_code'] not in expected:
+            message = (
+                "{append}Command '{cmd!r}' returned exit code {code!s} while "
+                "expected {expected!s}\n".format(
+                    append=error_info + '\n' if error_info else '',
+                    cmd=command,
+                    code=ret['exit_code'],
+                    expected=expected,
+                ))
+            logger.error(message)
+            if raise_on_err:
+                raise SSHCalledProcessError(
+                    command, ret['exit_code'],
+                    expected=expected,
+                    stdout=ret['stdout_brief'],
+                    stderr=ret['stdout_brief'])
+        return ret
+
+    def check_stderr(
+            self,
+            command, verbose=False, timeout=None,
+            error_info=None,
+            raise_on_err=True, **kwargs):
+        """Execute command expecting return code 0 and empty STDERR
+
+        :type command: str
+        :type verbose: bool
+        :type timeout: int
+        :type error_info: str
+        :type raise_on_err: bool
+        :rtype: ExecResult
+        :raises: DevopsCalledProcessError
+        """
+        ret = self.check_call(
+            command, verbose, timeout=timeout,
+            error_info=error_info, raise_on_err=raise_on_err, **kwargs)
+        if ret['stderr']:
+            message = (
+                "{append}Command '{cmd!r}' STDERR while not expected\n"
+                "\texit code: {code!s}\n".format(
+                    append=error_info + '\n' if error_info else '',
+                    cmd=command,
+                    code=ret['exit_code'],
+                ))
+            logger.error(message)
+            if raise_on_err:
+                raise SSHCalledProcessError(
+                    command,
+                    ret['exit_code'],
+                    stdout=ret['stdout_brief'],
+                    stderr=ret['stdout_brief'])
+        return ret
+
+    @classmethod
+    def execute_together(
+            cls, remotes, command, expected=None, raise_on_err=True, **kwargs):
+        """Execute command on multiple remotes in async mode
+
+        :type remotes: list
+        :type command: str
+        :type expected: list
+        :type raise_on_err: bool
+        :raises: DevopsCalledProcessError
+        """
+        if expected is None:
+            expected = [0]
+        futures = {}
+        errors = {}
+        for remote in set(remotes):  # Use distinct remotes
+            chan, _, _, _ = remote.execute_async(command, **kwargs)
+            futures[remote] = chan
+        for remote, chan in futures.items():
+            ret = chan.recv_exit_status()
+            chan.close()
+            if ret not in expected:
+                errors[remote.hostname] = ret
+        if errors and raise_on_err:
+            raise SSHCalledProcessError(command, errors)
+
+    @classmethod
+    def __exec_command(
+            cls, command, channel, stdout, stderr, timeout, verbose=False):
+        """Get exit status from channel with timeout
+
+        :type command: str
+        :type channel: paramiko.channel.Channel
+        :type stdout: paramiko.channel.ChannelFile
+        :type stderr: paramiko.channel.ChannelFile
+        :type timeout: int
+        :type verbose: bool
+        :rtype: ExecResult
+        :raises: TimeoutError
+        """
+        def poll_stream(src, verb_logger=None):
+            dst = []
+            try:
+                for line in src:
+                    dst.append(line)
+                    if verb_logger is not None:
+                        verb_logger(
+                            line.decode('utf-8',
+                                        errors='backslashreplace').rstrip()
+                        )
+            except IOError:
+                pass
+            return dst
+
+        def poll_streams(result, channel, stdout, stderr, verbose):
+            if channel.recv_ready():
+                result.stdout += poll_stream(
+                    src=stdout,
+                    verb_logger=logger.info if verbose else logger.debug)
+            if channel.recv_stderr_ready():
+                result.stderr += poll_stream(
+                    src=stderr,
+                    verb_logger=logger.error if verbose else logger.debug)
+
+        @decorators.threaded(started=True)
+        def poll_pipes(stdout, stderr, result, stop, channel):
+            """Polling task for FIFO buffers
+
+            :type stdout: paramiko.channel.ChannelFile
+            :type stderr: paramiko.channel.ChannelFile
+            :type result: ExecResult
+            :type stop: Event
+            :type channel: paramiko.channel.Channel
+            """
+
+            while not stop.isSet():
+                time.sleep(0.1)
+                poll_streams(
+                    result=result,
+                    channel=channel,
+                    stdout=stdout,
+                    stderr=stderr,
+                    verbose=verbose
+                )
+
+                if channel.status_event.is_set():
+                    result.exit_code = result.exit_code = channel.exit_status
+
+                    result.stdout += poll_stream(
+                        src=stdout,
+                        verb_logger=logger.info if verbose else logger.debug)
+                    result.stderr += poll_stream(
+                        src=stderr,
+                        verb_logger=logger.error if verbose else logger.debug)
+
+                    stop.set()
+
+        # channel.status_event.wait(timeout)
+        result = exec_result.ExecResult(cmd=command)
+        stop_event = threading.Event()
+        if verbose:
+            logger.info("\nExecuting command: {!r}".format(command.rstrip()))
+        else:
+            logger.debug("\nExecuting command: {!r}".format(command.rstrip()))
+        poll_pipes(
+            stdout=stdout,
+            stderr=stderr,
+            result=result,
+            stop=stop_event,
+            channel=channel
+        )
+
+        stop_event.wait(timeout)
+
+        # Process closed?
+        if stop_event.isSet():
+            stop_event.clear()
+            channel.close()
+            return result
+
+        stop_event.set()
+        channel.close()
+
+        wait_err_msg = ('Wait for {0!r} during {1}s: no return code!\n'
+                        .format(command, timeout))
+        output_brief_msg = ('\tSTDOUT:\n'
+                            '{0}\n'
+                            '\tSTDERR"\n'
+                            '{1}'.format(result.stdout_brief,
+                                         result.stderr_brief))
+        logger.debug(wait_err_msg)
+        raise SSHTimeoutError(wait_err_msg + output_brief_msg)
+
+    def execute(self, command, verbose=False, timeout=None, **kwargs):
+        """Execute command and wait for return code
+
+        :type command: str
+        :type verbose: bool
+        :type timeout: int
+        :rtype: ExecResult
+        :raises: TimeoutError
+        """
+        chan, _, stderr, stdout = self.execute_async(command, **kwargs)
+
+        result = self.__exec_command(
+            command, chan, stdout, stderr, timeout,
+            verbose=verbose
+        )
+
+        message = (
+            '\n{cmd!r} execution results: Exit code: {code!s}'.format(
+                cmd=command,
+                code=result.exit_code
+            ))
+        if verbose:
+            logger.info(message)
+        else:
+            logger.debug(message)
+        return result
+
+    def execute_async(self, command, get_pty=False):
+        """Execute command in async mode and return channel with IO objects
+
+        :type command: str
+        :type get_pty: bool
+        :rtype:
+            tuple(
+                paramiko.Channel,
+                paramiko.ChannelFile,
+                paramiko.ChannelFile,
+                paramiko.ChannelFile
+            )
+        """
+        logger.debug("Executing command: {!r}".format(command.rstrip()))
+
+        chan = self._ssh.get_transport().open_session()
+
+        if get_pty:
+            # Open PTY
+            chan.get_pty(
+                term='vt100',
+                width=80, height=24,
+                width_pixels=0, height_pixels=0
+            )
+
+        stdin = chan.makefile('wb')
+        stdout = chan.makefile('rb')
+        stderr = chan.makefile_stderr('rb')
+        cmd = "{}\n".format(command)
+        if self.sudo_mode:
+            encoded_cmd = base64.b64encode(cmd.encode('utf-8')).decode('utf-8')
+            cmd = ("sudo -S bash -c 'eval \"$(base64 -d "
+                   "<(echo \"{0}\"))\"'").format(
+                encoded_cmd
+            )
+            chan.exec_command(cmd)
+            if stdout.channel.closed is False:
+                self.auth.enter_password(stdin)
+                stdin.flush()
+        else:
+            chan.exec_command(cmd)
+        return chan, stdin, stderr, stdout
+
+    def execute_through_host(
+            self,
+            hostname,
+            cmd,
+            auth=None,
+            target_port=22,
+            timeout=None,
+            verbose=False
+    ):
+        """Execute command on remote host through currently connected host
+
+        :type hostname: str
+        :type cmd: str
+        :type auth: SSHAuth
+        :type target_port: int
+        :type timeout: int
+        :type verbose: bool
+        :rtype: ExecResult
+        :raises: TimeoutError
+        """
+        if auth is None:
+            auth = self.auth
+
+        intermediate_channel = self._ssh.get_transport().open_channel(
+            kind='direct-tcpip',
+            dest_addr=(hostname, target_port),
+            src_addr=(self.hostname, 0))
+        transport = paramiko.Transport(sock=intermediate_channel)
+
+        # start client and authenticate transport
+        auth.connect(transport)
+
+        # open ssh session
+        channel = transport.open_session()
+
+        # Make proxy objects for read
+        stdout = channel.makefile('rb')
+        stderr = channel.makefile_stderr('rb')
+
+        channel.exec_command(cmd)
+
+        # noinspection PyDictCreation
+        result = self.__exec_command(
+            cmd, channel, stdout, stderr, timeout, verbose=verbose)
+
+        intermediate_channel.close()
+
+        return result
+
+    def mkdir(self, path):
+        """run 'mkdir -p path' on remote
+
+        :type path: str
+        """
+        if self.exists(path):
+            return
+        logger.debug("Creating directory: {}".format(path))
+        # noinspection PyTypeChecker
+        self.execute("mkdir -p {}\n".format(path))
+
+    def rm_rf(self, path):
+        """run 'rm -rf path' on remote
+
+        :type path: str
+        """
+        logger.debug("rm -rf {}".format(path))
+        # noinspection PyTypeChecker
+        self.execute("rm -rf {}".format(path))
+
+    def open(self, path, mode='r'):
+        """Open file on remote using SFTP session
+
+        :type path: str
+        :type mode: str
+        :return: file.open() stream
+        """
+        return self._sftp.open(path, mode)
+
+    def upload(self, source, target):
+        """Upload file(s) from source to target using SFTP session
+
+        :type source: str
+        :type target: str
+        """
+        logger.debug("Copying '%s' -> '%s'", source, target)
+
+        if self.isdir(target):
+            target = posixpath.join(target, os.path.basename(source))
+
+        source = os.path.expanduser(source)
+        if not os.path.isdir(source):
+            self._sftp.put(source, target)
+            return
+
+        for rootdir, _, files in os.walk(source):
+            targetdir = os.path.normpath(
+                os.path.join(
+                    target,
+                    os.path.relpath(rootdir, source))).replace("\\", "/")
+
+            self.mkdir(targetdir)
+
+            for entry in files:
+                local_path = os.path.join(rootdir, entry)
+                remote_path = posixpath.join(targetdir, entry)
+                if self.exists(remote_path):
+                    self._sftp.unlink(remote_path)
+                self._sftp.put(local_path, remote_path)
+
+    def download(self, destination, target):
+        """Download file(s) to target from destination
+
+        :type destination: str
+        :type target: str
+        :rtype: bool
+        """
+        logger.debug(
+            "Copying '%s' -> '%s' from remote to local host",
+            destination, target
+        )
+
+        if os.path.isdir(target):
+            target = posixpath.join(target, os.path.basename(destination))
+
+        if not self.isdir(destination):
+            if self.exists(destination):
+                self._sftp.get(destination, target)
+            else:
+                logger.debug(
+                    "Can't download %s because it doesn't exist", destination
+                )
+        else:
+            logger.debug(
+                "Can't download %s because it is a directory", destination
+            )
+        return os.path.exists(target)
+
+    def exists(self, path):
+        """Check for file existence using SFTP session
+
+        :type path: str
+        :rtype: bool
+        """
+        try:
+            self._sftp.lstat(path)
+            return True
+        except IOError:
+            return False
+
+    def stat(self, path):
+        """Get stat info for path with following symlinks
+
+        :type path: str
+        :rtype: paramiko.sftp_attr.SFTPAttributes
+        """
+        return self._sftp.stat(path)
+
+    def isfile(self, path, follow_symlink=False):
+        """Check, that path is file using SFTP session
+
+        :type path: str
+        :type follow_symlink: bool (default=False), resolve symlinks
+        :rtype: bool
+        """
+        try:
+            if follow_symlink:
+                attrs = self._sftp.stat(path)
+            else:
+                attrs = self._sftp.lstat(path)
+            return attrs.st_mode & stat.S_IFREG != 0
+        except IOError:
+            return False
+
+    def isdir(self, path, follow_symlink=False):
+        """Check, that path is directory using SFTP session
+
+        :type path: str
+        :type follow_symlink: bool (default=False), resolve symlinks
+        :rtype: bool
+        """
+        try:
+            if follow_symlink:
+                attrs = self._sftp.stat(path)
+            else:
+                attrs = self._sftp.lstat(path)
+            return attrs.st_mode & stat.S_IFDIR != 0
+        except IOError:
+            return False
+
+    def walk(self, path):
+        files=[]
+        folders=[]
+        try:
+            for item in self._sftp.listdir_iter(path):
+                if item.st_mode & stat.S_IFDIR:
+                    folders.append(item.filename)
+                else:
+                    files.append(item.filename)
+        except IOError as e:
+            print("Error opening directory {0}: {1}".format(path, e))
+
+        yield path, folders, files
+        for folder in folders:
+            for res in self.walk(os.path.join(path, folder)):
+                yield res
+
+
+class SSHClientError(Exception):
+    """Base class for errors"""
+
+
+class SSHCalledProcessError(SSHClientError):
+    @staticmethod
+    def _makestr(data):
+        if isinstance(data, six.binary_type):
+            return data.decode('utf-8', errors='backslashreplace')
+        elif isinstance(data, six.text_type):
+            return data
+        else:
+            return repr(data)
+
+    def __init__(
+            self, command, returncode, expected=0, stdout=None, stderr=None):
+        self.returncode = returncode
+        self.expected = expected
+        self.cmd = command
+        self.stdout = stdout
+        self.stderr = stderr
+        message = (
+            "Command '{cmd}' returned exit code {code} while "
+            "expected {expected}".format(
+                cmd=self._makestr(self.cmd),
+                code=self.returncode,
+                expected=self.expected
+            ))
+        if self.stdout:
+            message += "\n\tSTDOUT:\n{}".format(self._makestr(self.stdout))
+        if self.stderr:
+            message += "\n\tSTDERR:\n{}".format(self._makestr(self.stderr))
+        super(SSHCalledProcessError, self).__init__(message)
+
+    @property
+    def output(self):
+        warnings.warn(
+            'output is deprecated, please use stdout and stderr separately',
+            DeprecationWarning)
+        return self.stdout + self.stderr
+
+
+class SSHTimeoutError(SSHClientError):
+    pass
+
+
+__all__ = ['SSHAuth', 'SSHClient', 'SSHClientError', 'SSHCalledProcessError', 'SSHTimeoutError']
diff --git a/reclass_tools/walk_models.py b/reclass_tools/walk_models.py
new file mode 100644
index 0000000..353b0e9
--- /dev/null
+++ b/reclass_tools/walk_models.py
@@ -0,0 +1,218 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import hashlib
+import os
+import re
+import tarfile
+
+import urllib2
+import yaml
+
+from reclass_tools.helpers import ssh_client
+
+
+def walkfiles(topdir, identity_files=None, verbose=False):
+    if ":" in topdir:
+        host, path = topdir.split(":")
+        private_keys = ssh_client.get_private_keys(os.environ.get("HOME"), identity_files)
+        if "@" in host:
+            username, host = host.split("@")
+        else:
+            username = os.environ.get("USER")
+        remote = ssh_client.SSHClient(
+            host, username=username, private_keys=private_keys)
+
+        walker = remote.walk(path)
+        opener = remote.open
+        prefix = remote.host + ":"
+        isdir = remote.isdir(path, follow_symlink=True)
+    else:
+        walker = os.walk(topdir)
+        opener = open
+        prefix = ''
+        isdir = os.path.isdir(topdir)
+
+    if isdir:
+        for dirName, subdirList, fileList in walker:
+            for filename in fileList:
+                filepath = os.path.join(dirName,filename)
+                if verbose:
+                    print (prefix + filepath)
+                with OpenFile(filepath, opener) as log:
+                    yield (log)
+    else:
+        if verbose:
+            print (topdir)
+        with OpenFile(topdir, opener) as log:
+            yield (log)
+
+
+def yaml_read(yaml_file):
+    if os.path.isfile(yaml_file):
+        with open(yaml_file, 'r') as f:
+            return yaml.load(f)
+    else:
+        print("\'{}\' is not a file!".format(yaml_file))
+
+
+class OpenFile(object):
+
+    fname = None
+    opener = None
+    readlines = None
+    fobj = None
+
+    def __init__(self, fname, opener):
+        self.fname = fname
+        self.opener = opener
+
+    def get_parser(self):
+        parsers = {'/lastlog': self.fake_parser,
+                    '/wtmp': self.fake_parser,
+                    '/btmp': self.fake_parser,
+                    '/atop.log': self.fake_parser,
+                    '/atop_': self.fake_parser,
+                    '/atop_current': self.fake_parser,
+                    '/supervisord.log': self.docker_parser,
+                    '.gz': self.gz_parser,
+                    '.bz2': self.gz_parser,
+                   }
+        for w in parsers.keys():
+            if w in self.fname:
+                self.readlines = parsers[w]
+                return
+        try:
+            self.fobj = self.opener(self.fname, 'r')
+            self.readlines = self.plaintext_parser
+        except IOError as e:
+            print("Error opening file {0}: {1}".format(self.fname, e))
+            if self.fobj:
+                self.fobj.close()
+            self.fobj =  None
+            self.readlines = self.fake_parser
+
+    def plaintext_parser(self):
+        try:
+            for s in self.fobj.readlines():
+                yield s
+        except IOError as e:
+            print("Error reading file {0}: {1}".format(self.fname, e))
+
+    def fake_parser(self):
+        yield ''
+
+    def docker_parser(self):
+        yield ''
+
+    def gz_parser(self):
+        yield ''
+
+    def bz2_parser(self):
+        yield ''
+
+    def __enter__(self):
+        self.get_parser()
+        return self
+
+    def __exit__(self, x, y, z):
+        if self.fobj:
+            self.fobj.close()
+
+
+def get_nested_key(data, path=None):
+    if type(path) is not list:
+        raise("Use 'list' object with key names for 'path'")
+    for key in path:
+        value = data.get(key, None)
+        if value:
+            data = value
+        else:
+            return None
+    return data
+
+def remove_nested_key(data, path=None):
+    if type(path) is not list:
+        raise("Use 'list' object with key names for 'path'")
+
+    # Remove the value from the specified key
+    val = get_nested_key(data, path[:-1])
+    val[path[-1]] = None
+
+    # Clear parent keys if empty
+    while path:
+        val = get_nested_key(data, path)
+        if val:
+            # Non-empty value, nothing to do
+            return
+        else:
+            get_nested_key(data, path[:-1]).pop(path[-1])
+            path = path[:-1]
+
+
+def get_all_reclass_params(paths, verbose=False):
+    """Return dict with all used values for each param"""
+    #path = '/srv/salt/reclass/classes'
+    _params = dict()
+    for path in paths:
+        for log in walkfiles(path, verbose=verbose):
+            if log.fname.endswith('.yml'):
+                model = yaml_read(log.fname)
+                if model is not None:
+                    # Collect all params from the models
+                    _param = get_nested_key(model, ['parameters', '_param'])
+                    if _param:
+                        for key, val in _param.items():
+                            if key in _params:
+                                _params[key].append(val)
+                            else:
+                                _params[key] = [val]
+
+    return _params
+    #print(yaml.dump(_params))
+
+
+def remove_reclass_parameter(path, parameter, verbose=False):
+    """Removes specified key from parameters from all reclass models"""
+    #path = '/srv/salt/reclass/classes'
+    _params = dict()
+    for log in walkfiles(path, verbose=verbose):
+        if log.fname.endswith('.yml'):
+            model = yaml_read(log.fname)
+            if model is not None:
+
+                # Clear linux.network.interfaces
+                interfaces = get_nested_key(model, ['parameters', 'linux', 'network', 'interface'])
+                if interfaces:
+                    print(log.fname)
+                    print(interfaces.keys())
+
+                    remove_nested_key(model, ['parameters', 'linux', 'network', 'interface'])
+
+                    print(model)
+                    with open(log.fname, 'w') as f:
+                        f.write(
+                            yaml.dump(
+                                model, default_flow_style=False
+                            )
+                        )
+
+#                #print(yaml.dump(interfaces, default_flow_style=False))
+
+#                lvm = get_nested_key(model, ['parameters', 'linux', 'storage', 'lvm'])
+#                if lvm:
+#                    print(log.fname)
+#                    print(lvm.keys())
+#                    #print(yaml.dump(lvm, default_flow_style=False))
+
+#                mount = get_nested_key(model, ['parameters', 'linux', 'storage', 'mount'])
+#                if mount:
+#                    print(log.fname)
+#                    print(mount.keys())
+#                    #print(yaml.dump(mount, default_flow_style=False))
+
+#                swap = get_nested_key(model, ['parameters', 'linux', 'storage', 'swap'])
+#                if swap:
+#                    print(log.fname)
+#                    print(swap.keys())
+#                        #print(yaml.dump(swap, default_flow_style=False))
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..70667b6
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,3 @@
+paramiko>=1.16.0,!=2.0.1
+PyYAML>=3.1.0
+six>=1.9.0
diff --git a/run_tests.sh b/run_tests.sh
new file mode 100755
index 0000000..0460772
--- /dev/null
+++ b/run_tests.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+set -e
+set -x
+
+tox -v
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..3da6344
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,29 @@
+[metadata]
+name = reclass-tools
+version = 0.0.1
+summary = QA tools for manipulating reclass models
+description-file =
+    README.md
+author = Dennis Dmitriev
+author-email = dis-xcom@gmail.com
+home-page = https://github.com/dis-xcom/
+classifier =
+    Intended Audience :: Information Technology
+    Intended Audience :: System Administrators
+    License :: OSI Approved :: Apache Software License
+    Operating System :: POSIX :: Linux
+
+[build_sphinx]
+source-dir = doc/source
+build-dir = doc/build
+all_files = 1
+
+[upload_sphinx]
+upload-dir = doc/build/html
+
+[files]
+packages = reclass_tools
+
+[entry_points]
+console_scripts =
+    reclass-dump-params = reclass_tools.cli:dump_params
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..056c16c
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
+import setuptools
+
+# In python < 2.7.4, a lazy loading of package `pbr` will break
+# setuptools if some other modules registered functions in `atexit`.
+# solution from: http://bugs.python.org/issue15881#msg170215
+try:
+    import multiprocessing  # noqa
+except ImportError:
+    pass
+
+setuptools.setup(
+    setup_requires=['pbr'],
+    pbr=True)
diff --git a/test-requirements.txt b/test-requirements.txt
new file mode 100644
index 0000000..635923d
--- /dev/null
+++ b/test-requirements.txt
@@ -0,0 +1,2 @@
+mock>=1.2
+pytest>=2.7.1
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..ebc74ed
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,77 @@
+# Tox (http://tox.testrun.org/) is a tool for running tests
+# in multiple virtualenvs. This configuration file will run the
+# test suite on all supported python versions. To use it, "pip install tox"
+# and then run "tox" from this directory.
+
+[tox]
+minversion = 2.0
+envlist = pep8, py{27,34,35}, pylint, pylint-py{27,34,35}, cover, docs
+skipsdist = True
+skip_missing_interpreters = True
+
+
+[testenv]
+usedevelop = True
+passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY
+deps =
+    sphinx
+    -r{toxinidir}/test-requirements.txt
+commands =
+    py.test -vv {posargs:reclass_tools/tests}
+
+[testenv:venv]
+commands = {posargs:}
+
+[testenv:cover]
+deps =
+    pytest-cov
+    -r{toxinidir}/test-requirements.txt
+commands =
+    py.test --cov-config .coveragerc --cov-report html --cov=reclass_tools reclass_tools/tests
+    coverage html -d {envlogdir}
+    coverage report --fail-under 80
+
+
+[testenv:pep8]
+deps = hacking==0.10.1
+usedevelop = False
+commands =
+    flake8
+
+[testenv:pylint]
+deps =
+    -r{toxinidir}/test-requirements.txt
+    pylint
+commands =
+    pylint --rcfile=.pylintrc_gerrit reclass_tools
+
+[testenv:pylint-py27]
+deps=
+    -r{toxinidir}/test-requirements.txt
+    pylint
+commands=pylint reclass_tools
+
+[testenv:pylint-py34]
+deps=
+    -r{toxinidir}/test-requirements.txt
+    pylint
+commands=pylint reclass_tools
+
+[testenv:pylint-py35]
+deps=
+    -r{toxinidir}/test-requirements.txt
+    pylint
+commands=pylint reclass_tools
+
+[flake8]
+exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build,tools,__init__.py,docs
+show-pep8 = True
+show-source = True
+count = True
+
+[pytest]
+
+[testenv:docs]
+deps =
+    sphinx
+commands = python setup.py build_sphinx