Add scenario for automatic Galera upgrade

Change-Id: I080f169292d62f12bb3fa7496e3644a655ffe131
diff --git a/tcp_tests/fixtures/reclass_fixtures.py b/tcp_tests/fixtures/reclass_fixtures.py
new file mode 100644
index 0000000..a2d84cb
--- /dev/null
+++ b/tcp_tests/fixtures/reclass_fixtures.py
@@ -0,0 +1,17 @@
+import pytest
+
+from tcp_tests import logger
+from tcp_tests.managers import reclass_manager
+
+LOG = logger.logger
+
+
+@pytest.fixture(scope='function')
+def reclass_actions(config, underlay_actions):
+    """Fixture that provides various actions for salt
+
+    :param config: fixture provides oslo.config
+    :param underlay_actions: fixture provides underlay manager
+    :rtype: ReclassManager
+    """
+    return reclass_manager.ReclassManager(config, underlay_actions)
diff --git a/tcp_tests/managers/reclass_manager.py b/tcp_tests/managers/reclass_manager.py
new file mode 100644
index 0000000..0e1133c
--- /dev/null
+++ b/tcp_tests/managers/reclass_manager.py
@@ -0,0 +1,120 @@
+#    Copyright 2016 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+from tcp_tests import logger
+from tcp_tests.managers.execute_commands import ExecuteCommandsMixin
+
+LOG = logger.logger
+
+
+class ReclassManager(ExecuteCommandsMixin):
+    """docstring for ReclassManager"""
+
+    __config = None
+    __underlay = None
+    reclass_tools_cmd = ". venv-reclass-tools/bin/activate; reclass-tools "
+    tgt = "cfg01"    # place where the reclass-tools installed
+
+    def __init__(self, config, underlay):
+        self.__config = config
+        self.__underlay = underlay
+
+        reclass_node = [node_name
+                        for node_name in self.__underlay.node_names()
+                        if self.tgt in node_name]
+        self.ssh = self.__underlay.remote(node_name=reclass_node[0])
+
+        super(ReclassManager, self).__init__(config=config, underlay=underlay)
+
+    def check_existence(self, key):
+        if key in self.ssh.check_call(
+                "{reclass_tools} get-key {key} /srv/salt/reclass/classes"
+                .format(
+                    reclass_tools=self.reclass_tools_cmd,
+                    key=key
+                )):
+            LOG.warning("({}) key already exists in reclass".format(key))
+            return True
+        return False
+
+    def add_key(self, key, value, short_path):
+        """
+        Shows alert if key exists
+
+        :param key: string, parameters which will be added or updated
+        :param value: value of key
+        :param short_path: path to reclass yaml file.
+            It takes into account default path where the reclass locates.
+            May look like cluster/*/cicd/control/leader.yml
+        :return: None
+        """
+        self.check_existence(key)
+        self.ssh.check_call(
+            "{reclass_tools} add-key {key} {value} \
+            /srv/salt/reclass/classes/{path}".format(
+                reclass_tools=self.reclass_tools_cmd,
+                key=key,
+                value=value,
+                path=short_path
+            ))
+
+    def add_bool_key(self, key, value, short_path):
+        """
+        Shows alert if key exists
+
+        :param key: string, parameters which will be added or updated
+        :param value: value of key
+        :param short_path: path to reclass yaml file.
+            It takes into account default path where the reclass locates.
+            May look like cluster/*/cicd/control/leader.yml
+        :return: None
+        """
+        self.check_existence(key)
+        self.ssh.check_call(
+            "{reclass_tools} add-bool-key {key} {value} \
+            /srv/salt/reclass/classes/{path}".format(
+                reclass_tools=self.reclass_tools_cmd,
+                key=key,
+                value=value,
+                path=short_path
+            ), raise_on_err=False)
+
+    def add_class(self, value, short_path):
+        """
+        Shows warning if class exists
+        :param value: role to add to 'classes' parameter in the reclass
+        :param short_path: path to reclass yaml file.
+            It takes into account default path where the reclass locates.
+            May look like cluster/*/cicd/control/leader.yml
+        :return: None
+        """
+        if value in self.ssh.check_call(
+                "{reclass_tools} get-key classes \
+                /srv/salt/reclass/classes/{path}".format(
+                    reclass_tools=self.reclass_tools_cmd,
+                    value=value,
+                    path=short_path
+                )):
+            LOG.warning("Class {} already exists in {}".format(
+                value,
+                short_path
+            ))
+
+        self.ssh.check_call(
+            "{reclass_tools} add-key classes {value} \
+            /srv/salt/reclass/classes/{path} --merge".format(
+                reclass_tools=self.reclass_tools_cmd,
+                value=value,
+                path=short_path
+            ))
diff --git a/tcp_tests/tests/system/conftest.py b/tcp_tests/tests/system/conftest.py
index 0bd4232..0627ede 100644
--- a/tcp_tests/tests/system/conftest.py
+++ b/tcp_tests/tests/system/conftest.py
@@ -28,6 +28,7 @@
 from tcp_tests.fixtures.drivetrain_fixtures import *  # noqa
 from tcp_tests.fixtures.day1_fixtures import *  # noqa
 from tcp_tests.fixtures.runtest_fixtures import * # noqa
+from tcp_tests.fixtures.reclass_fixtures import * # noqa
 
 
 __all__ = sorted([  # sort for documentation
diff --git a/tcp_tests/tests/system/test_update.py b/tcp_tests/tests/system/test_update.py
index 3db5a74..4ec04b0 100644
--- a/tcp_tests/tests/system/test_update.py
+++ b/tcp_tests/tests/system/test_update.py
@@ -11,7 +11,7 @@
 class TestUpdateMcpCluster(object):
     """
     Following the steps in
-    https://docs.mirantis.com/mcp/q4-18/mcp-operations-guide/update-upgrade/minor-update.html#minor-update
+    https://docs.mirantis.com/mcp/master/mcp-operations-guide/update-upgrade/minor-update.html#minor-update
     """
 
     @pytest.mark.grab_versions
@@ -134,6 +134,76 @@
         assert update_drivetrain == 'SUCCESS', "{0}\n{1}".format(
             description, '\n'.join(stages))
 
+    @pytest.mark.grab_versions
+    @pytest.mark.parametrize("_", [settings.ENV_NAME])
     @pytest.mark.run_mcp_update
-    def test_update_gluster(self):
-        pass
+    def test_update_galera(self, salt_actions, reclass_actions, show_step, _):
+        """ Upgrade Galera automatically
+
+        Scenario:
+            1. Include the Galera upgrade pipeline job to DriveTrain
+            2. Apply the jenkins.client state on the Jenkins nodes
+            3. set the openstack_upgrade_enabled parameter to true
+            4. Refresh pillars
+            5. Add repositories with new Galera packages
+            6. Start job from Jenkins
+        """
+        salt = salt_actions
+        reclass = reclass_actions
+        # ################### Enable pipeline #################################
+        show_step(1)
+        reclass.add_class(
+            "system.jenkins.client.job.deploy.update.upgrade_galera",
+            "cluster/*/cicd/control/leader.yml")
+        show_step(2)
+        salt.enforce_state("I@jenkins:client", "jenkins.client")
+
+        # ############### Enable automatic upgrade ############################
+        show_step(3)
+        reclass.add_bool_key("parameters._param.openstack_upgrade_enabled",
+                             "True",
+                             "cluster/*/infra/init.yml")
+
+        show_step(4)
+        salt.enforce_state("dbs*", "saltutil.refresh_pillar")
+
+        # ############# Add repositories with new Galera packages #######
+        show_step(5)
+        salt.enforce_state("dbs*", "linux.system.repo")
+        salt.enforce_state("cfg*", "salt.master")
+
+        jenkins_creds = salt.get_cluster_jenkins_creds()
+
+        # #################### Login Jenkins on cid01 node ###################
+        show_step(6)
+
+        jenkins_url = jenkins_creds.get('url')
+        jenkins_user = jenkins_creds.get('user')
+        jenkins_pass = jenkins_creds.get('pass')
+        jenkins_build_timeout = 40*60
+        job_name = 'deploy-upgrade-galera'
+        job_parameters = {
+            'INTERACTIVE': 'false'
+        }
+
+        update_galera = run_jenkins_job.run_job(
+            host=jenkins_url,
+            username=jenkins_user,
+            password=jenkins_pass,
+            build_timeout=jenkins_build_timeout,
+            verbose=False,
+            job_name=job_name,
+            job_parameters=job_parameters)
+
+        (description, stages) = get_jenkins_job_stages.get_deployment_result(
+            host=jenkins_url,
+            username=jenkins_user,
+            password=jenkins_pass,
+            job_name=job_name,
+            build_number='lastBuild')
+
+        LOG.info(description)
+        LOG.info('\n'.join(stages))
+
+        assert update_galera == 'SUCCESS', "{0}\n{1}".format(
+            description, '\n'.join(stages))