Merge "Fix small misconfiguration in E/// templates"
diff --git a/checklist.yaml b/checklist.yaml
index aa35519..107c75f 100644
--- a/checklist.yaml
+++ b/checklist.yaml
@@ -6,6 +6,19 @@
     status: ProdFailed
     defects: PROD-0
 
+  - title: deployment_*
+    errors:
+      - "SaltReqTimeoutError: Message timed out"
+    status: ProdFailed
+    defects: PROD-35053
+
+  - title: deployment_*
+    errors:
+      - "Exception: Step 'Configure salt-minion on osd"
+      - No space left on device
+    status: ProdFailed
+    defects: PROD-36101
+
   - title: 'tempest.api.network.admin.test_routers.RoutersAdminTest.test_update_router_set_gateway[id-6cc285d8-46bf-4f36-9b1a-783e3008ba79]'
     errors:
       - 'testtools.matchers._impl.MismatchError: 0 != 1'
diff --git a/jobs/pipelines/run-test-scenarios.groovy b/jobs/pipelines/run-test-scenarios.groovy
index f62825f..31aff06 100644
--- a/jobs/pipelines/run-test-scenarios.groovy
+++ b/jobs/pipelines/run-test-scenarios.groovy
@@ -48,12 +48,13 @@
                     }
                 } catch (e) {
                     common.printMsg("Tests are failed: " + e.message, "purple")
+                    currentBuild.result = 'FAILURE'
                 }
             } // stage("Run tests")
 
             stage("Archive all xml reports") {
                 dir("${env.slave_workdir }") {
-                    archiveArtifacts artifacts: "**/*.xml,**/*.ini,**/*.log,**/*.tar.gz"
+                    archiveArtifacts artifacts: "**/*.xml,**/*.log"
                     }
             }
 
diff --git a/jobs/project.yaml b/jobs/project.yaml
index fec4708..3e0f38c 100644
--- a/jobs/project.yaml
+++ b/jobs/project.yaml
@@ -53,6 +53,7 @@
       # - diff_params_reclass_system - can't moved to JJB, unsupported parameters
       - environment-template-ci
       - self-deploy-jobs
+      - self-deploy-virtualenvs
       - tcp-qa-ci
       - test_cleanup
       # - testrail-reporter-ci - can't moved to JJB, unsupported parameters
diff --git a/jobs/templates/self-deploy-virtulenvs.yaml b/jobs/templates/self-deploy-virtulenvs.yaml
new file mode 100644
index 0000000..eb2c89b
--- /dev/null
+++ b/jobs/templates/self-deploy-virtulenvs.yaml
@@ -0,0 +1,24 @@
+---
+- job-template:
+    concurrent: false
+    disabled: false
+    description: '{job-description}'
+    name: self-deploy-virtualenvs
+    display-name: Update virtualenvs on SRE CI
+    parameters:
+    - string:
+        default: 'master'
+        description: 'tcp-qa review refspec'
+        name: TCP_QA_REFS
+        trim: 'true'
+    scm:
+      - git:
+          branches:
+            - FETCH_HEAD
+          refspec: '${{TCP_QA_REFS}}'
+          url: https://gerrit.mcp.mirantis.com/mcp/tcp-qa
+    builders:
+      - shell: "chmod +x tcp_tests/templates/_packer/scripts/jenkins_virtualenvs.sh"
+      - shell: "export CHANGE_RIGHTS=false ; ./tcp_tests/templates/_packer/scripts/jenkins_virtualenvs.sh"
+
+
diff --git a/jobs/templates/test-scenarios.yml b/jobs/templates/test-scenarios.yml
index 730341e..cffbe58 100644
--- a/jobs/templates/test-scenarios.yml
+++ b/jobs/templates/test-scenarios.yml
@@ -62,17 +62,31 @@
     name: 'backup-tests'
     test_scenario:
 
-      - backup-saltmaster-queens-dvr-sl:
+      - backup-saltmaster:
           run-test-opts: '-k TestBackupRestoreMaster'
           deployment: heat-cicd-queens-dvr-sl
+          display-name: Backup/Restore SaltMaster
 
-      - backup-saltmaster-pike-dvr-sl:
+      - backup-saltmaster-with-maas:
          run-test-opts: '-k TestBackupRestoreMaster'
-         deployment: heat-cicd-pike-dvr-sl
+         deployment: bm-cicd-queens-ovs-maas
+         display-name: Backup/Restore SaltMaster (with MAAS)
 
       - backup-cassandra-queens-contrail-sl:
          run-test-opts: '-k TestBackupRestoreCassandra'
          deployment: heat-cicd-queens-contrail41-sl
+         display-name: Backup/restore Cassandra
+
+      - backup-galera-queens-sl:
+          run-test-opts: '-k TestBackupRestoreGalera'
+          deployment: heat-cicd-queens-dvr-sl
+          display-name: Backup/restore Galera
+
+      - backup-zookeeper-queens-sl:
+          run-test-opts: '-k TestBackupRestoreZooKeeper'
+          deployment: heat-cicd-queens-dvr-sl
+          display-name: Backup/restore Zookeeper
+
 
     jobs:
       - '{test_scenario}'
@@ -81,8 +95,17 @@
     name: 'ceph-tests'
     test_scenario:
       - ceph_osd-queens-dvr-sl:
-         run-test-opts: '-k TestCephOsd'
-         deployment: heat-cicd-queens-dvr-sl
+          run-test-opts: '-k TestCephOsd'
+          deployment: heat-cicd-queens-dvr-sl
+          display-name: Add/Remove OSD node
+      - ceph_cmn-queens-dvr-sl:
+          run-test-opts: '-k TestCephMon'
+          deployment: heat-cicd-queens-dvr-sl
+          display-name: Add/Remove CMN node
+      - ceph_mgr-queens-dvr-sl:
+          run-test-opts: '-k TestCephMgr'
+          deployment: heat-cicd-queens-dvr-sl
+          display-name: Add/Remove MGR node
     jobs:
       - '{test_scenario}'
 
@@ -119,30 +142,36 @@
           deployment: released-heat-cicd-pike-dvr-sl
           disabled: true
           run-test-opts: '{test-opt}'
+          display-name: MCP update (pike)
 
       - mcp-update-queens-dvr-sl:
           deployment: released-heat-cicd-queens-dvr-sl
           disabled: true
           run-test-opts: '{test-opt}'
+          display-name: MCP update (queens)
 
       - mcp-update-pike-contrail-sl:
           deployment: released-heat-cicd-pike-contrail41-sl
           disabled: true
           run-test-opts: '{test-opt-with-contrail}'
+          display-name: MCP update (pike + OC)
 
       - mcp-update-queens-contrail-sl:
           deployment: released-heat-cicd-queens-contrail41-sl
           disabled: true
           run-test-opts: '{test-opt-with-contrail}'
+          display-name: MCP update (queens + OC)
 
       - os-update-pike-to-queens:
           deployment: heat-cicd-pike-dvr-sl
           run-test-opts: '-k TestUpdatePikeToQueens'
+          display-name: Update Pike -> Queens
 
       - ceph-update-luminous-to-nautilus:
           deployment: heat-cicd-pike-dvr-sl
           disabled: true
           run-test-opts: '-k TestCephUpdate'
+          display-name: Update Ceph Luminous -> Nautilus
 
     jobs:
       - '{test_scenario}'
@@ -150,6 +179,7 @@
 ###################### JOB TEMPLATE ###################
 - job-template:
     name: '{test_scenario}'
+    display-name: '{display-name}'
     project-type: pipeline
     concurrent: false
     disabled: '{disabled|false}'
@@ -192,12 +222,6 @@
         name: ENV_NAME
         trim: 'false'
     - string:
-        default: ''
-        description: 'Example: refs/changes/89/411189/36
-                       (for now - only one reference allowed)'
-        name: TCP_QA_REFS
-        trim: 'false'
-    - string:
         default: 'openstack_slave_{deployment}'
         description: 'Required: Name of the jenkins slave to create the environment
                       To be set by the parent deployment job.'
@@ -210,6 +234,11 @@
         name: PARENT_WORKSPACE
         trim: 'false'
     - string:
+        default: '2019.2.0'
+        description: 'MCP version'
+        name: MCP_VERSION
+        trim: 'false'
+    - string:
         default: ''
         description: 'Completed steps to install components on the environment.
                       If tests require some additional components, it may be installed in
diff --git a/jobs/view.yaml b/jobs/view.yaml
index fc30261..fdf9f3c 100644
--- a/jobs/view.yaml
+++ b/jobs/view.yaml
@@ -23,12 +23,8 @@
     view-type: list
     description: |
       Managed by JJB <br>
-      <b>!!! Jobs run on manual trigger, deploy 2019.2.4 release with enabled Q4 updates repos from http://mirror.mirantis.com/update/2019.2.0/ !!!</b><br>
+      <b>!!! Jobs run on manual trigger, deploy 2019.2.4 release with enabled Q4 updates repos from http://mirror.mirantis.com/update/2019.2.4/ !!!</b><br>
       see deploy details To enable deploy from tags 2019.2.4<br>
-      SALT_MODELS_SYSTEM_COMMIT: fe72c46d604dcf1d44e4478d60204846b899a77a<br>
-      COOKIECUTTER_TEMPLATE_COMMIT:1deaf78727cb3bad748fbd73bcfb6af451a046c1<br>
-      MK_PIPELINES_REF: refs/tags/2019.2.4<br>
-      PIPELINE_LIBRARY_REF: refs/tags/2019.2.4<br>
     filter-executors: true
     filter-queue: true
     job-name:
@@ -71,11 +67,12 @@
     name: self-deploy
     view-type: list
     description: |
-      MCP-2019.2.x-proposed-updates, managed by JJB
+      Jobs to update SRE CI configuration
     filter-executors: true
     filter-queue: true
     job-name:
       - self-deploy-jobs
+      - self-deploy-virtualenvs
     columns:
       - status
       - weather
diff --git a/tcp_tests/managers/reclass_manager.py b/tcp_tests/managers/reclass_manager.py
index 56116fc..7c75086 100644
--- a/tcp_tests/managers/reclass_manager.py
+++ b/tcp_tests/managers/reclass_manager.py
@@ -155,6 +155,7 @@
                 value,
                 short_path
             ))
+            return
 
         self.ssh.check_call(
             "{reclass_tools} add-key classes {value} \
@@ -164,6 +165,23 @@
                 path=short_path
             ))
 
+    def delete_class(self, value, short_path):
+        """
+        Shows warning if class doesn't exist
+        :param value: role to delete from 'classes' parameter in the reclass
+        :param short_path: path to reclass yaml file.
+            It takes into account default path where the reclass locates.
+            May look like cluster/*/cicd/control/leader.yml
+        :return: None
+        """
+        self.ssh.check_call(
+            "{reclass_tools} del-key classes {value} \
+            /srv/salt/reclass/classes/{path}".format(
+                reclass_tools=self.reclass_tools_cmd,
+                value=value,
+                path=short_path
+            ))
+
     def delete_key(self, key, short_path):
         """
         Remove key from the provided file
diff --git a/tcp_tests/templates/_packer/scripts/jenkins_virtualenvs.sh b/tcp_tests/templates/_packer/scripts/jenkins_virtualenvs.sh
index b949cfb..1e0d43e 100644
--- a/tcp_tests/templates/_packer/scripts/jenkins_virtualenvs.sh
+++ b/tcp_tests/templates/_packer/scripts/jenkins_virtualenvs.sh
@@ -5,6 +5,10 @@
 REPORT_VENV_PATH=/home/jenkins/venv_testrail_reporter
 TESTMARKER_VENV_PATH=/home/jenkins/venv_testrail_analyzer
 
+if [ ! "$CHANGE_RIGHTS" ]; then
+    CHANGE_RIGHTS=true
+fi
+
 if [ ! -d ${DEVOPS_VENV_PATH} ]; then
     virtualenv ${DEVOPS_VENV_PATH}
 fi
@@ -29,7 +33,11 @@
 
 # Install testRail analyzer
 . ${TESTMARKER_VENV_PATH}/bin/activate
-pip install git+https://github.com/ibumarskov/testrail-reporter -U
+#pip install git+https://github.com/ibumarskov/testrail-reporter -U
+# Pull from review to test changes in testrail-reporter before their merging
+pip install git+https://review.gerrithub.io/ibumarskov/testrail-reporter@refs/changes/94/514594/3
 deactivate
 
-chown -R jenkins:jenkins /home/jenkins/
+if [ "$CHANGE_RIGHTS" = true ]; then
+  chown -R jenkins:jenkins /home/jenkins/
+fi
diff --git a/tcp_tests/templates/bm-e7-cicd-pike-ovs-maas/salt.yaml b/tcp_tests/templates/bm-e7-cicd-pike-ovs-maas/salt.yaml
index 8b3822d..613c9bf 100644
--- a/tcp_tests/templates/bm-e7-cicd-pike-ovs-maas/salt.yaml
+++ b/tcp_tests/templates/bm-e7-cicd-pike-ovs-maas/salt.yaml
@@ -94,4 +94,15 @@
 
 {{SHARED_TEST_TOOLS.MACRO_INSTALL_RECLASS_TOOLS()}}
 {{ SHARED_WORKAROUNDS.MACRO_CEPH_SET_PGNUM() }}
-{{ SHARED_WORKAROUNDS.CLEAR_CEPH_OSD_DRIVES() }}
\ No newline at end of file
+{{ SHARED_WORKAROUNDS.CLEAR_CEPH_OSD_DRIVES() }}
+
+- description: Disable known_hosts_autopopulation
+  cmd: |
+    set -ex;
+    . /root/venv-reclass-tools/bin/activate;
+      reclass-tools add-bool-key parameters.openssh.client.known_hosts_autopopulation false /srv/salt/reclass/nodes/_generated/cfg01.{{ LAB_CONFIG_NAME }}.local.yml
+    git add /srv/salt/reclass/nodes/_generated
+    git commit -m "[from tcp-qa] known_hosts_autopopulation disabled"
+  node_name: {{ HOSTNAME_CFG01 }}
+  retry: {count: 1, delay: 5}
+  skip_fail: false
\ No newline at end of file
diff --git a/tcp_tests/tests/system/test_ceph_operations.py b/tcp_tests/tests/system/test_ceph_operations.py
index 02a6888..6f2fb52 100644
--- a/tcp_tests/tests/system/test_ceph_operations.py
+++ b/tcp_tests/tests/system/test_ceph_operations.py
@@ -66,7 +66,7 @@
 """
 
 
-@pytest.fixture(scope='module')
+@pytest.fixture(scope='session')
 def add_xtra_node_to_salt(salt_actions, underlay_actions,
                           config, reclass_actions):
     """
@@ -80,6 +80,15 @@
     cfg_node = [node['node_name'] for node in config.underlay.ssh
                 if 'salt_master' in node.get('roles')][0]
 
+    salt_actions.enforce_state("*", "reclass")
+    reclass_actions.add_class(
+        "environment.heat-cicd-queens-dvr-sl.linux_network_interface",
+        short_path="../nodes/_generated/xtra.*.yml")
+    reclass_actions.add_class("environment.heat-cicd-queens-dvr-sl.overrides",
+                              short_path="../nodes/_generated/xtra.*.yml")
+    reclass_actions.merge_context(yaml_context=xtra_network_interface,
+                                  short_path="../nodes/_generated/xtra.*.yml")
+
     underlay_actions.check_call(
             "salt-key -a {node} --include-all -y".format(node=xtra_node),
             node_name=cfg_node,
@@ -90,9 +99,8 @@
         node_name=xtra_node,
         raise_on_err=False)
     salt_actions.enforce_state("I@salt:master", "reclass")
-
-    reclass_actions.merge_context(yaml_context=xtra_network_interface,
-                                  short_path="../nodes/_generated/xtra.*.yml")
+    salt_actions.enforce_state("xtra*", "linux")
+    salt_actions.enforce_state("xtra*", "openssh")
 
     yield
 
@@ -103,6 +111,12 @@
     #         raise_on_err=False)
 
 
+@pytest.fixture(scope='session')
+def wa_prod36167(reclass_actions):
+    reclass_actions.delete_class("system.salt.control.virt",
+                                 "classes/cluster/*/infra/kvm.yml")
+
+
 @pytest.mark.usefixtures("add_xtra_node_to_salt")
 class TestCephOsd(object):
 
@@ -310,6 +324,12 @@
     def test_add_node_process(self,
                               drivetrain_actions,
                               describe_node_in_reclass):
+        """
+        https://docs.mirantis.com/mcp/q4-18/mcp-operations-guide/openstack-operations/ceph-operations/manage-nodes/add-mon-nodes.html
+        :param drivetrain_actions:
+        :param describe_node_in_reclass:
+        :return:
+        """
         dt = drivetrain_actions
 
         job_name = "ceph-add-node"
@@ -340,6 +360,139 @@
         assert job_result == 'SUCCESS', job_description
 
 
+add_rgw_ceph_init_yml = """
+parameters:
+    _param:
+        ceph_rgw_node04_hostname: xtra
+        ceph_rgw_node04_address: 10.6.0.205
+        ceph_rgw_node04_ceph_public_address: 10.166.49.209
+    linux:
+        network:
+            host:
+                rgw04:
+                    address: ${_param:ceph_rgw_node04_address}
+                    names:
+                    - ${_param:ceph_rgw_node04_hostname}
+                    - ${_param:ceph_rgw_node04_hostname}.${_param:cluster_domain}
+"""  # noqa: E501
+
+add_rgw_ceph_rgw_yml = """
+parameters:
+  _param:
+    cluster_node04_hostname: ${_param:ceph_rgw_node04_hostname}
+    cluster_node04_address: ${_param:ceph_rgw_node04_address}
+  ceph:
+    common:
+      keyring:
+        rgw.xtra:
+          caps:
+            mon: "allow rw"
+            osd: "allow rwx"
+  haproxy:
+    proxy:
+      listen:
+        radosgw:
+          servers:
+            - name: ${_param:cluster_node04_hostname}
+              host: ${_param:cluster_node04_address}
+              port: ${_param:haproxy_radosgw_source_port}
+              params: check
+"""
+
+add_rgw_config_init_yml = """
+parameters:
+  reclass:
+    storage:
+      node:
+        ceph_rgw_node04:
+          name: ${_param:ceph_rgw_node04_hostname}
+          domain: ${_param:cluster_domain}
+          classes:
+          - cluster.${_param:cluster_name}.ceph.rgw
+          params:
+            salt_master_host: ${_param:reclass_config_master}
+            linux_system_codename:  ${_param:ceph_rgw_system_codename}
+            single_address: ${_param:ceph_rgw_node04_address}
+            deploy_address: ${_param:ceph_rgw_node04_deploy_address}
+            ceph_public_address: ${_param:ceph_rgw_node04_public_address}
+            keepalived_vip_priority: 104
+"""
+
+
+@pytest.mark.usefixtures("add_xtra_node_to_salt")
+class TestCephRgw(object):
+    @pytest.fixture
+    def describe_node_in_reclass(self,
+                                 reclass_actions, salt_actions):
+        LOG.info("Executing pytest SETUP "
+                 "from describe_node_in_reclass fixture")
+        reclass = reclass_actions
+        # ---- cluster/*/ceph/init.yml --------------
+        reclass.merge_context(yaml_context=add_rgw_ceph_init_yml,
+                              short_path="cluster/*/ceph/init.yml")
+
+        reclass.merge_context(yaml_context=add_rgw_ceph_rgw_yml,
+                              short_path="cluster/*/ceph/rgw.yml")
+
+        reclass.merge_context(yaml_context=add_rgw_config_init_yml,
+                              short_path="cluster/*/infra/config/init.yml")
+
+        salt_actions.run_state("*", "saltutil.refresh_pillar")
+
+    @pytest.fixture
+    def remove_node_from_reclass(self,
+                                 reclass_actions, salt_actions):
+        LOG.info("Executing pytest SETUP "
+                 "from remove_node_from_reclass fixture")
+        # reclass = reclass_actions
+        # reclass.delete_key(
+        #     key="parameters.reclass.storage.node.ceph_rgw_node04",
+        #     short_path="cluster/*/infra/config/init.yml")
+        # reclass.delete_key(
+        #     key="parameters.linux.network.host.xtra",
+        #     short_path="cluster/*/ceph/init.yml"
+        # )
+
+    def test_add_node_process(self,
+                              drivetrain_actions,
+                              describe_node_in_reclass):
+        """
+        https://docs.mirantis.com/mcp/q4-18/mcp-operations-guide/openstack-operations/ceph-operations/manage-nodes/add-rgw-nodes.html
+        :param drivetrain_actions:
+        :param describe_node_in_reclass:
+        :return:
+        """
+        dt = drivetrain_actions
+
+        job_name = "ceph-add-node"
+        job_parameters = {
+            'HOST': 'xtra*',
+            'USE_UPMAP': True
+            }
+        job_result, job_description = dt.start_job_on_jenkins(
+            job_name=job_name,
+            job_parameters=job_parameters,
+            verbose=True)
+        assert job_result == 'SUCCESS', job_description
+
+    def test_delete_node_process(self,
+                                 remove_node_from_reclass,
+                                 drivetrain_actions):
+        dt = drivetrain_actions
+
+        job_name = "ceph-remove-node"
+        job_parameters = {
+            'HOST': 'xtra*',
+            'USE_UPMAP': True
+            }
+        job_result, job_description = dt.start_job_on_jenkins(
+            job_name=job_name,
+            job_parameters=job_parameters,
+            verbose=True)
+        assert job_result == 'SUCCESS', job_description
+
+
+@pytest.mark.usefixtures("add_xtra_node_to_salt")
 class TestCephMgr(object):
     def test_add_node(self):
         pass