Initial commit with fixtures

- add fixtures for hardware and underlay
- add fuel-devops template tcpcloud-default.yaml

* Migration of fixtures is not finished yet
diff --git a/tcp_tests/helpers/__init__.py b/tcp_tests/helpers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tcp_tests/helpers/__init__.py
diff --git a/tcp_tests/helpers/containers.py b/tcp_tests/helpers/containers.py
new file mode 100644
index 0000000..7d1306a
--- /dev/null
+++ b/tcp_tests/helpers/containers.py
@@ -0,0 +1,162 @@
+#    Copyright 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+from __future__ import division
+
+from tcp_tests import logger
+
+
+LOG = logger.logger
+
+
+def exec_in_container(container, cmd):
+    command = container.create_exec(cmd)
+    stdout = container.start_exec(command)
+    inspect = container.client.exec_inspect(command['Id'])
+    return stdout, inspect['ExitCode']
+
+
+class ContainerEngine(object):
+    def __init__(self,
+                 remote=None,
+                 image_name=None,
+                 container_repo=None,
+                 proxy_url=None,
+                 user_id=0,
+                 container_name=None,
+                 dir_for_home='/var/home',
+                 ):
+        self.remote = remote
+        self.container_repo = container_repo
+        self.repository_tag = 'latest'
+        self.proxy_url = proxy_url or ""
+        self.user_id = user_id
+        self.image_name = image_name
+        self.container_name = container_name
+        self.dir_for_home = dir_for_home
+        self.home_bind_path = '{0}/{1}'.format(
+            self.dir_for_home, self.container_name)
+        self.setup()
+
+    def image_exists(self, tag='latest'):
+        cmd = "docker images | grep {0}| awk '{{print $1}}'".format(
+            self.image_name)
+        LOG.info('Checking Docker images...')
+        result = self.remote.execute(cmd)
+        LOG.debug(result)
+        existing_images = [line.strip().split() for line in result['stdout']]
+        return [self.container_repo, tag] in existing_images
+
+    def pull_image(self):
+        # TODO(dtyzhnenko): add possibility to load image from local path or
+        # remote link provided in settings, in order to speed up downloading
+        cmd = 'docker pull {0}'.format(self.container_repo)
+        LOG.debug('Downloading Rally repository/image from registry...')
+        result = self.remote.execute(cmd)
+        LOG.debug(result)
+        return self.image_exists()
+
+    def run_container_command(self, command, in_background=False):
+        command = str(command).replace(r"'", r"'\''")
+        options = ''
+        if in_background:
+            options = '{0} -d'.format(options)
+        cmd = ("docker run {options} --user {user_id} --net=\"host\"  -e "
+               "\"http_proxy={proxy_url}\" -e \"https_proxy={proxy_url}\" "
+               "-v {dir_for_home}:{home_bind_path} {container_repo}:{tag} "
+               "/bin/bash -c '{command}'".format(
+                   options=options,
+                   user_id=self.user_id,
+                   proxy_url=self.proxy_url,
+                   dir_for_home=self.dir_for_home,
+                   home_bind_path=self.home_bind_path,
+                   container_repo=self.container_repo,
+                   tag=self.repository_tag,
+                   command=command))
+        LOG.debug(
+            'Executing command "{0}" in Rally container {1}..'.format(
+                cmd, self.container_repo
+            )
+        )
+        result = self.remote.execute(cmd)
+        LOG.debug(result)
+        return result
+
+    def setup_utils(self):
+        utils = ['gawk', 'vim', 'curl']
+        cmd = ('unset http_proxy https_proxy; apt-get update; '
+               'apt-get install -y {0}'.format(' '.join(utils)))
+        LOG.debug('Installing utils "{0}" to the  container...'.format(
+            utils))
+        result = self.run_container_command(cmd)
+        assert result['exit_code'] == 0, \
+            "Utils installation failed in container: {0}".format(result)
+
+    def prepare_image(self):
+        self.setup_utils()
+        last_container_cmd = "docker ps -lq"
+        result = self.remote.execute(last_container_cmd)
+        assert result['exit_code'] == 0, \
+            "Unable to get last container ID: {0}!".format(result)
+        last_container = ''.join([line.strip() for line in result['stdout']])
+        commit_cmd = 'docker commit {0} {1}:ready'.format(last_container,
+                                                          self.container_repo)
+        result = self.remote.execute(commit_cmd)
+        assert result['exit_code'] == 0, \
+            "Commit to Docker image '{0}' failed: {1}.".format(
+                self.container_repo, result)
+        return self.image_exists(tag='ready')
+
+    def setup_bash_alias(self):
+        alias_name = '{}_docker'.format(self.image_name)
+        check_alias_cmd = '. /root/.bashrc && alias {0}'.format(alias_name)
+        result = self.remote.execute(check_alias_cmd)
+        if result['exit_code'] == 0:
+            return
+        LOG.debug(
+            'Creating bash alias for {} inside container...'.format(
+                self.image_name
+            )
+        )
+        create_alias_cmd = ("alias {alias_name}='docker run --user {user_id} "
+                            "--net=\"host\"  -e \"http_proxy={proxy_url}\" -t "
+                            "-i -v {dir_for_home}:{home_bind_path}  "
+                            "{container_repo}:{tag} {image_name}'".format(
+                                alias_name=alias_name,
+                                user_id=self.user_id,
+                                proxy_url=self.proxy_url,
+                                dir_for_home=self.dir_for_home,
+                                home_bind_path=self.home_bind_path,
+                                container_repo=self.container_repo,
+                                tag=self.repository_tag,
+                                image_name=self.image_name))
+        result = self.remote.execute('echo "{0}">> /root/.bashrc'.format(
+            create_alias_cmd))
+        assert result['exit_code'] == 0, \
+            ("Alias creation for running {0} from container "
+             "failed: {1}.").format(self.image_name, result)
+        result = self.remote.execute(check_alias_cmd)
+        assert result['exit_code'] == 0, \
+            ("Alias creation for running {} from container "
+             "failed: {1}.").format(self.image_name, result)
+
+    def setup(self):
+        if not self.image_exists():
+            assert self.pull_image(), \
+                "Docker image for {} not found!".format(self.image_name)
+        if not self.image_exists(tag='ready'):
+            assert self.prepare_image(), \
+                "Docker image for {} is not ready!".format(self.image_name)
+        self.repository_tag = 'ready'
+        self.setup_bash_alias()
diff --git a/tcp_tests/helpers/env_config.py b/tcp_tests/helpers/env_config.py
new file mode 100644
index 0000000..3ad9a36
--- /dev/null
+++ b/tcp_tests/helpers/env_config.py
@@ -0,0 +1,318 @@
+#    Copyright 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+# TODO(slebedev): implement unit tests
+
+import copy
+import json
+import re
+
+from devops.helpers import templates
+import yaml
+
+from tcp_tests.helpers import exceptions
+from tcp_tests import logger
+
+
+LOG = logger.logger
+
+
+class DevopsConfigMissingKey(KeyError):
+    def __init__(self, key, keypath):
+        super(DevopsConfigMissingKey, self).__init__()
+        self.key = key
+        self.keypath
+
+    def __str__(self):
+        return "Key '{0}' by keypath '{1}' is missing".format(
+            self.key,
+            self.keypath
+        )
+
+
+def fail_if_obj(x):
+    if not isinstance(x, int):
+        raise TypeError("Expecting int value!")
+
+
+def fix_devops_config(config):
+    """Function for get correct structure of config
+
+    :param config: dict
+    :returns: config dict
+    """
+    if not isinstance(config, dict):
+        raise exceptions.DevopsConfigTypeError(
+            type_name=type(config).__name__
+        )
+    if 'template' in config:
+        return copy.deepcopy(config)
+    else:
+        return {
+            "template": {
+                "devops_settings": copy.deepcopy(config)
+            }
+        }
+
+
+def list_update(obj, indexes, value):
+    """Procedure for setting value into list (nested too), need
+    in some functions where we are not able to set value directly.
+
+    e.g.: we want to change element in nested list.
+
+    obj = [12, 34, [3, 5, [0, 4], 3], 85]
+    list_update(obj, [2, 2, 1], 50) => obj[2][2][1] = 50
+    print(obj) => [12, 34, [3, 5, [0, 50], 3], 85]
+
+    :param obj: source list
+    :param indexes: list with indexes for recursive process
+    :param value: some value for setting
+    """
+    def check_obj(obj):
+        if not isinstance(obj, list):
+            raise TypeError("obj must be a list instance!")
+    check_obj(obj)
+    if len(indexes) > 0:
+        cur = obj
+        last_index = indexes[-1]
+        fail_if_obj(last_index)
+        for i in indexes[:-1]:
+            fail_if_obj(i)
+            check_obj(cur[i])
+            cur = cur[i]
+        cur[last_index] = value
+
+
+def return_obj(indexes=[]):
+    """Function returns dict() or list() object given nesting, it needs by
+    set_value_for_dict_by_keypath().
+
+    Examples:
+        return_obj() => {}
+        return_obj([0]) => [{}]
+        return_obj([-1]) => [{}]
+        return_obj([-1, 1, -2]) => [[None, [{}, None]]]
+        return_obj([2]) => [None, None, {}]
+        return_obj([1,3]) => [None, [None, None, None, {}]]
+    """
+    if not isinstance(indexes, list):
+        raise TypeError("indexes must be a list!")
+    if len(indexes) > 0:
+        # Create resulting initial object with 1 element
+        result = [None]
+        # And save it's ref
+        cur = result
+        # lambda for extending list elements
+        li = (lambda x: [None] * x)
+        # lambda for nesting of list
+        nesting = (lambda x: x if x >= 0 else abs(x) - 1)
+        # save last index
+        last_index = indexes[-1]
+        fail_if_obj(last_index)
+        # loop from first till penultimate elements of indexes
+        # we must create nesting list and set current position to
+        # element at next index in indexes list
+        for i in indexes[:-1]:
+            fail_if_obj(i)
+            cur.extend(li(nesting(i)))
+            cur[i] = [None]
+            cur = cur[i]
+        # Perform last index
+        cur.extend(li(nesting(last_index)))
+        cur[last_index] = {}
+        return result
+    else:
+        return dict()
+
+
+def keypath(paths):
+    """Function to make string keypath from list of paths"""
+    return ".".join(list(paths))
+
+
+def disassemble_path(path):
+    """Func for disassembling path into key and indexes list (if needed)
+
+    :param path: string
+    :returns: key string, indexes list
+    """
+    pattern = re.compile("\[([0-9]*)\]")
+    # find all indexes of possible list object in path
+    indexes = (lambda x: [int(r) for r in pattern.findall(x)]
+               if pattern.search(x) else [])
+    # get key
+    base_key = (lambda x: re.sub(pattern, '', x))
+    return base_key(path), indexes(path)
+
+
+def set_value_for_dict_by_keypath(source, paths, value, new_on_missing=True):
+    """Procedure for setting specific value by keypath in dict
+
+    :param source: dict
+    :param paths: string
+    :param value: value to set by keypath
+    """
+    paths = paths.lstrip(".").split(".")
+    walked_paths = []
+    # Store the last path
+    last_path = paths.pop()
+    data = source
+    # loop to go through dict
+    while len(paths) > 0:
+        path = paths.pop(0)
+        key, indexes = disassemble_path(path)
+        walked_paths.append(key)
+        if key not in data:
+            if new_on_missing:
+                # if object is missing, we create new one
+                data[key] = return_obj(indexes)
+            else:
+                raise DevopsConfigMissingKey(key, keypath(walked_paths[:-1]))
+
+        data = data[key]
+
+        # if we can not get element in list, we should
+        # throw an exception with walked path
+        for i in indexes:
+            try:
+                tmp = data[i]
+            except IndexError as err:
+                LOG.error(
+                    "Couldn't access {0} element of '{1}' keypath".format(
+                        i, keypath(walked_paths)
+                    )
+                )
+                LOG.error(
+                    "Dump of '{0}':\n{1}".format(
+                        keypath(walked_paths),
+                        json.dumps(data)
+                    )
+                )
+                raise type(err)(
+                    "Can't access '{0}' element of '{1}' object! "
+                    "'{2}' object found!".format(
+                        i,
+                        keypath(walked_paths),
+                        data
+                    )
+                )
+            data = tmp
+            walked_paths[-1] += "[{0}]".format(i)
+
+    key, indexes = disassemble_path(last_path)
+    i_count = len(indexes)
+    if key not in data:
+        if new_on_missing:
+            data[key] = return_obj(indexes)
+        else:
+            raise DevopsConfigMissingKey(key, keypath(walked_paths))
+    elif i_count > 0 and not isinstance(data[key], list):
+        raise TypeError(
+            ("Key '{0}' by '{1}' keypath expected as list "
+             "but '{3}' obj found").format(
+                 key, keypath(walked_paths), type(data[key]).__name__
+            )
+        )
+    if i_count == 0:
+        data[key] = value
+    else:
+        try:
+            list_update(data[key], indexes, value)
+        except (IndexError, TypeError) as err:
+            LOG.error(
+                "Error while setting by '{0}' key of '{1}' keypath".format(
+                    last_path,
+                    keypath(walked_paths)
+                )
+            )
+            LOG.error(
+                "Dump of object by '{0}' keypath:\n{1}".format(
+                    keypath(walked_paths),
+                    json.dumps(data)
+                )
+            )
+            raise type(err)(
+                "Couldn't set value by '{0}' key of '{1}' keypath'".format(
+                    last_path,
+                    keypath(walked_paths)
+                )
+            )
+
+
+class EnvironmentConfig(object):
+    def __init__(self):
+        super(EnvironmentConfig, self).__init__()
+        self._config = None
+
+    @property
+    def config(self):
+        return self._config
+
+    @config.setter
+    def config(self, config):
+        """Setter for config
+
+        :param config: dict
+        """
+        self._config = fix_devops_config(config)
+
+    def __getitem__(self, key):
+        if self._config is not None:
+            conf = self._config['template']['devops_settings']
+            return copy.deepcopy(conf.get(key, None))
+        else:
+            return None
+
+    @logger.logwrap
+    def set_value_by_keypath(self, keypath, value):
+        """Function for set value of devops settings by keypath.
+
+        It's forbidden to set value of self.config directly, so
+        it's possible simply set value by keypath
+        """
+        if self.config is None:
+            raise exceptions.DevopsConfigIsNone()
+        conf = self._config['template']['devops_settings']
+        set_value_for_dict_by_keypath(conf, keypath, value)
+
+    def save(self, filename):
+        """Dump current config into given file
+
+        :param filename: string
+        """
+        if self._config is None:
+            raise exceptions.DevopsConfigIsNone()
+        with open(filename, 'w') as f:
+            f.write(
+                yaml.dump(
+                    self._config, default_flow_style=False
+                )
+            )
+
+    def load_template(self, filename):
+        """Method for reading file with devops config
+
+        :param filename: string
+        """
+        if filename is not None:
+            LOG.debug(
+                "Preparing to load config from template '{0}'".format(
+                    filename
+                )
+            )
+            self.config = templates.yaml_template_load(filename)
+        else:
+            LOG.error("Template filename is not set, loading config " +
+                      "from template aborted.")
diff --git a/tcp_tests/helpers/exceptions.py b/tcp_tests/helpers/exceptions.py
new file mode 100644
index 0000000..259880e
--- /dev/null
+++ b/tcp_tests/helpers/exceptions.py
@@ -0,0 +1,123 @@
+#    Copyright 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+
+class UnexpectedExitCode(Exception):
+    def __init__(self, command, ec, expected_ec, stdout=None, stderr=None):
+        """Exception for unexpected exit code after executing shell/ssh command
+
+        :param command: str - executed command
+        :param ec: int - actual exit code
+        :param expected_ec: list of integers - expected exit codes
+        :param stdout: str
+        :param stderr: str
+        """
+        self.ec = ec
+        self.expected_ec = expected_ec
+        self.cmd = command
+        self.stdout = stdout
+        self.stderr = stderr
+        super(UnexpectedExitCode, self).__init__()
+
+    def __str__(self):
+        message = "Command '{cmd:s}' returned unexpected exit code {code:d}," \
+                  " while waiting for {exp}".format(cmd=self.cmd,
+                                                    code=self.ec,
+                                                    exp=self.expected_ec)
+        if self.stdout:
+            message += "stdout: {}\n".format(self.stdout)
+        if self.stderr:
+            message += "stderr: {}\n".format(self.stderr)
+        return message
+
+
+class VariableNotSet(Exception):
+    def __init__(self, variable_name, expected_value):
+        self.variable_name = variable_name
+        self.expected_value = expected_value
+        super(VariableNotSet, self).__init__()
+
+    def __str__(self):
+        return "Variable {0} was not set in value {1}".format(
+            self.variable_name, self.expected_value)
+
+
+class DevopsConfigPathIsNotSet(ValueError):
+    def __str__(self):
+        return "Devops config/template path is not set!"
+
+
+class DevopsConfigTypeError(TypeError):
+    def __init__(self, type_name):
+        self.type_name = type_name
+        super(DevopsConfigTypeError, self).__init__()
+
+    def __str__(self):
+        return "Devops config should be dict instead of {0}".format(
+            self.type_name
+        )
+
+
+class DevopsConfigIsNone(ValueError):
+    def __str__(self):
+        return "Devops config is None!"
+
+
+class EnvironmentNameIsNotSet(ValueError):
+    def __str__(self):
+        return "Couldn't get environment name!"
+
+
+class EnvironmentDoesNotExist(BaseException):
+    def __init__(self, env_name):
+        super(EnvironmentDoesNotExist, self).__init__()
+        self.env_name = env_name
+
+    def __str__(self):
+        return "Environment {0} does not exist!".format(
+            self.env_name
+        )
+
+
+class EnvironmentAlreadyExists(BaseException):
+    def __init__(self, env_name):
+        super(EnvironmentAlreadyExists, self).__init__()
+        self.env_name = env_name
+
+    def __str__(self):
+        return "Environment {0} already exists!".format(
+            self.env_name
+        )
+
+
+class EnvironmentSnapshotMissing(BaseException):
+    def __init__(self, env_name, snapshot_name):
+        super(EnvironmentSnapshotMissing, self).__init__()
+        self.env_name = env_name
+        self.snapshot_name = snapshot_name
+
+    def __str__(self):
+        return ("Environment '{0}' doesn't have requested snapshot '{1}'! "
+                "Please create the snapshot manually or erase the environment."
+                .format(self.env_name, self.snapshot_name))
+
+
+class EnvironmentIsNotSet(BaseException):
+    def __str__(self):
+        return "Environment is not set!"
+
+
+class BaseImageIsNotSet(BaseException):
+    def __str__(self):
+        return "Base image for creating VMs is not set!"
diff --git a/tcp_tests/helpers/ext.py b/tcp_tests/helpers/ext.py
new file mode 100644
index 0000000..5771eae
--- /dev/null
+++ b/tcp_tests/helpers/ext.py
@@ -0,0 +1,50 @@
+#    Copyright 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+import collections
+
+from enum import IntEnum
+
+
+def enum(*values, **kwargs):
+    names = kwargs.get('names')
+    if names:
+        return collections.namedtuple('Enum', names)(*values)
+    return collections.namedtuple('Enum', values)(*values)
+
+UNDERLAY_NODE_ROLE = enum(
+    'salt-master',
+    'salt-minion',
+)
+
+NETWORK_TYPE = enum(
+    'private',
+    'public'
+)
+
+SNAPSHOT = enum(
+    'hardware',
+    'underlay',
+    'tcp_deployed',
+    'os_deployed',
+)
+
+LOG_LEVELS = enum(
+    'INFO',
+    'WARNING',
+    'ERROR',
+    'CRITICAL',
+    'DEBUG',
+    'NOTE'
+)
diff --git a/tcp_tests/helpers/log_step.py b/tcp_tests/helpers/log_step.py
new file mode 100644
index 0000000..64ec7aa
--- /dev/null
+++ b/tcp_tests/helpers/log_step.py
@@ -0,0 +1,70 @@
+#    Copyright 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+import re
+
+from tcp_tests import logger
+
+
+LOG = logger.logger
+
+
+def parse_test_doc(docstring):
+    test_case = {}
+    parse_regex = re.compile(r'(?P<title>^(.*\S.*\n)+)+'
+                             r'(?P<empty_line1>\s*\n)'
+                             r'\s*Scenario:\s*\n(?P<scenario>(.+\n)+)'
+                             r'(?P<empty_line2>\s*(\n|$))?'
+                             r'(\s*Duration:\s+(?P<duration>\d+).*\n)?')
+    doc_match = re.match(parse_regex, docstring)
+
+    if not doc_match:
+        LOG.error("Can't parse test docstring, unknown format!")
+        return test_case
+
+    test_case['title'] = re.sub(r'[\n\s]+',  # replace multiple spaces and
+                                ' ',         # line breaks by single space
+                                doc_match.group('title')
+                                ).strip()
+
+    test_case['steps'] = []
+    for raw_step in re.split(r'\s+\d+\.\s*', doc_match.group('scenario')):
+        if not raw_step:
+            # start or end of the string
+            continue
+        test_case['steps'].append(
+            re.sub(r'[\n\s]+',  # replace multiple spaces and
+                   ' ',         # line breaks by single space
+                   raw_step
+                   ).strip()
+        )
+
+    # TODO(apanchenko): now it works only with 'seconds'
+    duration = doc_match.group('duration') or 1000
+    test_case['duration'] = int(duration)
+    return test_case
+
+
+def log_step(func, step_num):
+    if not func.__doc__:
+        LOG.error("Can't show step #{0}: docstring for method {1} not "
+                  "found!".format(step_num, func.__name__))
+    test_case_steps = parse_test_doc(func.__doc__)['steps']
+    try:
+        LOG.info(" *** [STEP#{0}] {1} ***".format(
+            step_num,
+            test_case_steps[step_num - 1]))
+    except IndexError:
+        LOG.error("Can't show step #{0}: docstring for method {1} does't "
+                  "contain it!".format(step_num, func.__name__))
diff --git a/tcp_tests/helpers/metaclasses.py b/tcp_tests/helpers/metaclasses.py
new file mode 100644
index 0000000..6e1e79b
--- /dev/null
+++ b/tcp_tests/helpers/metaclasses.py
@@ -0,0 +1,27 @@
+#    Copyright 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+
+class SingletonMeta(type):
+    """Metaclass for Singleton
+
+    Main goals: not need to implement __new__ in singleton classes
+    """
+    _instances = {}
+
+    def __call__(cls, *args, **kwargs):
+        if cls not in cls._instances:
+            cls._instances[cls] = super(
+                SingletonMeta, cls).__call__(*args, **kwargs)
+        return cls._instances[cls]
diff --git a/tcp_tests/helpers/oslo_cfg_types.py b/tcp_tests/helpers/oslo_cfg_types.py
new file mode 100644
index 0000000..7465ad0
--- /dev/null
+++ b/tcp_tests/helpers/oslo_cfg_types.py
@@ -0,0 +1,106 @@
+#    Copyright 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+import json
+import os
+
+from oslo_config import cfg
+from oslo_config import types
+
+
+# See http://docs.openstack.org/developer/oslo.config/types.html
+Boolean = types.Boolean
+Integer = types.Integer
+Float = types.Float
+String = types.String
+MultiString = types.MultiString
+List = types.List
+Dict = types.Dict
+IPAddress = types.IPAddress
+Hostname = types.Hostname
+URI = types.URI
+
+
+# JSON config types inspired by https://review.openstack.org/100521
+class JSONList(types.ConfigType):
+    """JSON list type.
+
+       Decode JSON list from a string value to python list.
+    """
+
+    def __init__(self, type_name='JSONList value'):
+        super(JSONList, self).__init__(type_name=type_name)
+
+    def __call__(self, value):
+        if isinstance(value, list):
+            return value
+
+        try:
+            result = json.loads(value)
+        except ValueError:
+            raise ValueError("No JSON object could be decoded from the value: "
+                             "{0}".format(value))
+        if not isinstance(result, list):
+            raise ValueError("Expected JSONList, but decoded '{0}' from the "
+                             "value: {1}".format(type(result), value))
+        return result
+
+    def __repr__(self):
+        return 'JSONList'
+
+    def __eq__(self, other):
+        return self.__class__ == other.__class__
+
+    def _formatter(self, value):
+        return json.dumps(value)
+
+
+class JSONDict(types.ConfigType):
+    """JSON dictionary type.
+
+       Decode JSON dictionary from a string value to python dict.
+    """
+    def __init__(self, type_name='JSONDict value'):
+        super(JSONDict, self).__init__(type_name=type_name)
+
+    def __call__(self, value):
+        if isinstance(value, dict):
+            return value
+
+        try:
+            result = json.loads(value)
+        except ValueError:
+            raise ValueError("No JSON object could be decoded from the value: "
+                             "{0}".format(value))
+        if not isinstance(result, dict):
+            raise ValueError("Expected JSONDict, but decoded '{0}' from the "
+                             "value: {1}".format(type(result), value))
+        return result
+
+    def __repr__(self):
+        return 'JSONDict'
+
+    def __eq__(self, other):
+        return self.__class__ == other.__class__
+
+    def _formatter(self, value):
+        return json.dumps(value)
+
+
+class Cfg(cfg.Opt):
+    """Wrapper for cfg.Opt class that reads default form evironment variables.
+    """
+    def __init__(self, *args, **kwargs):
+        super(Cfg, self).__init__(*args, **kwargs)
+        env_var_name = self.name.upper()
+        self.default = os.environ.get(env_var_name, self.default)
diff --git a/tcp_tests/helpers/utils.py b/tcp_tests/helpers/utils.py
new file mode 100644
index 0000000..a5ad2b8
--- /dev/null
+++ b/tcp_tests/helpers/utils.py
@@ -0,0 +1,413 @@
+#    Copyright 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+import copy
+import os
+import shutil
+import tempfile
+import time
+import traceback
+
+import paramiko
+import yaml
+from devops.helpers import helpers
+from devops.helpers import ssh_client
+from elasticsearch import Elasticsearch
+
+from tcp_tests import logger
+from tcp_tests import settings
+from tcp_tests.helpers import ext
+
+LOG = logger.logger
+
+
+def get_test_method_name():
+    raise NotImplementedError
+
+
+def update_yaml(yaml_tree=None, yaml_value='', is_uniq=True,
+                yaml_file=settings.TIMESTAT_PATH_YAML, remote=None):
+    """Store/update a variable in YAML file.
+
+    yaml_tree - path to the variable in YAML file, will be created if absent,
+    yaml_value - value of the variable, will be overwritten if exists,
+    is_uniq - If false, add the unique two-digit suffix to the variable name.
+    """
+    def get_file(path, remote=None, mode="r"):
+        if remote:
+            return remote.open(path, mode)
+        else:
+            return open(path, mode)
+
+    if yaml_tree is None:
+        yaml_tree = []
+    with get_file(yaml_file, remote) as file_obj:
+        yaml_data = yaml.safe_load(file_obj)
+
+    # Walk through the 'yaml_data' dict, find or create a tree using
+    # sub-keys in order provided in 'yaml_tree' list
+    item = yaml_data
+    for n in yaml_tree[:-1]:
+        if n not in item:
+            item[n] = {}
+        item = item[n]
+
+    if is_uniq:
+        last = yaml_tree[-1]
+    else:
+        # Create an uniq suffix in range '_00' to '_99'
+        for n in range(100):
+            last = str(yaml_tree[-1]) + '_' + str(n).zfill(2)
+            if last not in item:
+                break
+
+    item[last] = yaml_value
+    with get_file(yaml_file, remote, mode='w') as file_obj:
+        yaml.dump(yaml_data, file_obj, default_flow_style=False)
+
+
+class TimeStat(object):
+    """Context manager for measuring the execution time of the code.
+
+    Usage:
+    with TimeStat([name],[is_uniq=True]):
+    """
+
+    def __init__(self, name=None, is_uniq=False):
+        if name:
+            self.name = name
+        else:
+            self.name = 'timestat'
+        self.is_uniq = is_uniq
+        self.begin_time = 0
+        self.end_time = 0
+        self.total_time = 0
+
+    def __enter__(self):
+        self.begin_time = time.time()
+        return self
+
+    def __exit__(self, exc_type, exc_value, exc_tb):
+        self.end_time = time.time()
+        self.total_time = self.end_time - self.begin_time
+
+        # Create a path where the 'self.total_time' will be stored.
+        yaml_path = []
+
+        # There will be a list of one or two yaml subkeys:
+        # - first key name is the method name of the test
+        method_name = get_test_method_name()
+        if method_name:
+            yaml_path.append(method_name)
+
+        # - second (subkey) name is provided from the decorator (the name of
+        # the just executed function), or manually.
+        yaml_path.append(self.name)
+
+        try:
+            update_yaml(yaml_path, '{:.2f}'.format(self.total_time),
+                        self.is_uniq)
+        except Exception:
+            LOG.error("Error storing time statistic for {0}"
+                      " {1}".format(yaml_path, traceback.format_exc()))
+            raise
+
+    @property
+    def spent_time(self):
+        return time.time() - self.begin_time
+
+
+def reduce_occurrences(items, text):
+    """ Return string without items(substrings)
+        Args:
+            items: iterable of strings
+            test: string
+        Returns:
+            string
+        Raise:
+            AssertionError if any substing not present in source text
+    """
+    for item in items:
+        LOG.debug(
+            "Verifying string {} is shown in "
+            "\"\"\"\n{}\n\"\"\"".format(item, text))
+        assert text.count(item) != 0
+        text = text.replace(item, "", 1)
+    return text
+
+
+def generate_keys():
+    key = paramiko.RSAKey.generate(1024)
+    public = key.get_base64()
+    dirpath = tempfile.mkdtemp()
+    key.write_private_key_file(os.path.join(dirpath, 'id_rsa'))
+    with open(os.path.join(dirpath, 'id_rsa.pub'), 'w') as pub_file:
+        pub_file.write(public)
+    return dirpath
+
+
+def clean_dir(dirpath):
+    shutil.rmtree(dirpath)
+
+
+def retry(tries_number=3, exception=Exception):
+    def _retry(func):
+        assert tries_number >= 1, 'ERROR! @retry is called with no tries!'
+
+        def wrapper(*args, **kwargs):
+            iter_number = 1
+            while True:
+                try:
+                    LOG.debug('Calling function "{0}" with args "{1}" and '
+                              'kwargs "{2}". Try # {3}.'.format(func.__name__,
+                                                                args,
+                                                                kwargs,
+                                                                iter_number))
+                    return func(*args, **kwargs)
+                except exception as e:
+                    if iter_number > tries_number:
+                        LOG.debug('Failed to execute function "{0}" with {1} '
+                                  'tries!'.format(func.__name__, tries_number))
+                        raise e
+                iter_number += 1
+        return wrapper
+    return _retry
+
+
+class ElasticClient(object):
+    def __init__(self, host='localhost', port=9200):
+        self.es = Elasticsearch([{'host': '{}'.format(host),
+                                  'port': port}])
+        self.host = host
+        self.port = port
+
+    def find(self, key, value):
+        LOG.info('Search for {} for {}'.format(key, value))
+        search_request_body = '{' +\
+            '  "query": {' +\
+            '   "simple_query_string": {' +\
+            '     "query": "{}",'.format(value) +\
+            '     "analyze_wildcard" : "true",' +\
+            '     "fields" : ["{}"],'.format(key) +\
+            '     "default_operator": "AND"' +\
+            '     }' +\
+            ' },' +\
+            '  "size": 1' +\
+            '}'
+        LOG.info('Search by {}'.format(search_request_body))
+
+        def is_found():
+            def temporary_status():
+                res = self.es.search(index='_all', body=search_request_body)
+                return res['hits']['total'] != 0
+            return temporary_status
+
+        predicate = is_found()
+        helpers.wait(predicate, timeout=300,
+                     timeout_msg='Timeout waiting, result from elastic')
+
+        es_raw = self.es.search(index='_all', body=search_request_body)
+        if es_raw['timed_out']:
+            raise RuntimeError('Elastic search timeout exception')
+
+        return ElasticSearchResult(key, value, es_raw['hits']['total'], es_raw)
+
+
+class ElasticSearchResult(object):
+    def __init__(self, key, value, count, raw):
+        self.key = key
+        self.value = value
+        self.count = count
+        self.raw = raw
+        if self.count != 0:
+            self.items = raw['hits']['hits']
+
+    def get(self, index):
+        if self.count != 0:
+            return self.items[index]['_source']
+        else:
+            None
+
+
+def create_file(node, pod, path, size,
+                namespace=ext.Namespace.BASE_NAMESPACE):
+    node.check_call(
+        'kubectl exec {} --namespace={} {}'.format(
+            pod.name,
+            namespace,
+            'dd -- if=/dev/zero -- of={} bs=1MB count={}'.format(path, size)),
+        expected=[ext.ExitCodes.EX_OK])
+
+
+def run_daily_cron(node, pod, task,
+                   namespace=ext.Namespace.BASE_NAMESPACE):
+    node.check_call(
+        'kubectl exec {} --namespace={} {}'.format(
+            pod.name,
+            namespace,
+            '/etc/cron.daily/{}'.format(task)),
+        expected=[ext.ExitCodes.EX_OK])
+
+
+def list_files(node, pod, path, mask,
+               namespace=ext.Namespace.BASE_NAMESPACE):
+    return "".join(node.check_call(
+        'kubectl exec {} --namespace={} {}'.format(
+            pod.name,
+            namespace,
+            'find {} -- -iname {}'.format(path, mask)),
+        expected=[ext.ExitCodes.EX_OK])['stdout']) \
+        .replace('\n', ' ').strip().split(" ")
+
+
+def rm_files(node, pod, path,
+             namespace=ext.Namespace.BASE_NAMESPACE):
+    node.execute(
+        'kubectl exec {} --namespace={} {}'.format(
+            pod.name,
+            namespace,
+            'rm -- {}'.format(path)))
+
+
+class YamlEditor(object):
+    """Manipulations with local or remote .yaml files.
+
+    Usage:
+
+    with YamlEditor("tasks.yaml") as editor:
+        editor.content[key] = "value"
+
+    with YamlEditor("astute.yaml", ip=self.admin_ip) as editor:
+        editor.content[key] = "value"
+    """
+
+    def __init__(self, file_path, host=None, port=None,
+                 username=None, password=None, private_keys=None,
+                 document_id=0,
+                 default_flow_style=False, default_style=None):
+        self.__file_path = file_path
+        self.host = host
+        self.port = port or 22
+        self.username = username
+        self.__password = password
+        self.__private_keys = private_keys or []
+        self.__content = None
+        self.__documents = [{}, ]
+        self.__document_id = document_id
+        self.__original_content = None
+        self.default_flow_style = default_flow_style
+        self.default_style = default_style
+
+    @property
+    def file_path(self):
+        """Open file path
+
+        :rtype: str
+        """
+        return self.__file_path
+
+    @property
+    def content(self):
+        if self.__content is None:
+            self.__content = self.get_content()
+        return self.__content
+
+    @content.setter
+    def content(self, new_content):
+        self.__content = new_content
+
+    def __get_file(self, mode="r"):
+        if self.host:
+            remote = ssh_client.SSHClient(
+                host=self.host,
+                port=self.port,
+                username=self.username,
+                password=self.__password,
+                private_keys=self.__private_keys)
+
+            return remote.open(self.__file_path, mode=mode)
+        else:
+            return open(self.__file_path, mode=mode)
+
+    def get_content(self):
+        """Return a single document from YAML"""
+        def multi_constructor(loader, tag_suffix, node):
+            """Stores all unknown tags content into a dict
+
+            Original yaml:
+            !unknown_tag
+            - some content
+
+            Python object:
+            {"!unknown_tag": ["some content", ]}
+            """
+            if type(node.value) is list:
+                if type(node.value[0]) is tuple:
+                    return {node.tag: loader.construct_mapping(node)}
+                else:
+                    return {node.tag: loader.construct_sequence(node)}
+            else:
+                return {node.tag: loader.construct_scalar(node)}
+
+        yaml.add_multi_constructor("!", multi_constructor)
+        with self.__get_file() as file_obj:
+            self.__documents = [x for x in yaml.load_all(file_obj)]
+            return self.__documents[self.__document_id]
+
+    def write_content(self, content=None):
+        if content:
+            self.content = content
+        self.__documents[self.__document_id] = self.content
+
+        def representer(dumper, data):
+            """Represents a dict key started with '!' as a YAML tag
+
+            Assumes that there is only one !tag in the dict at the
+            current indent.
+
+            Python object:
+            {"!unknown_tag": ["some content", ]}
+
+            Resulting yaml:
+            !unknown_tag
+            - some content
+            """
+            key = data.keys()[0]
+            if key.startswith("!"):
+                value = data[key]
+                if type(value) is dict:
+                    node = dumper.represent_mapping(key, value)
+                elif type(value) is list:
+                    node = dumper.represent_sequence(key, value)
+                else:
+                    node = dumper.represent_scalar(key, value)
+            else:
+                node = dumper.represent_mapping(u'tag:yaml.org,2002:map', data)
+            return node
+
+        yaml.add_representer(dict, representer)
+        with self.__get_file("w") as file_obj:
+            yaml.dump_all(self.__documents, file_obj,
+                          default_flow_style=self.default_flow_style,
+                          default_style=self.default_style)
+
+    def __enter__(self):
+        self.__content = self.get_content()
+        self.__original_content = copy.deepcopy(self.content)
+        return self
+
+    def __exit__(self, x, y, z):
+        if self.content == self.__original_content:
+            return
+        self.write_content()