Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 1 | import pytest |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 2 | import sys |
| 3 | import os |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 4 | |
| 5 | from tcp_tests import logger |
| 6 | from tcp_tests import settings |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 7 | |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 8 | sys.path.append(os.getcwd()) |
| 9 | try: |
| 10 | from tcp_tests.fixtures import config_fixtures |
| 11 | from tcp_tests.managers import underlay_ssh_manager |
| 12 | from tcp_tests.managers import saltmanager as salt_manager |
| 13 | except ImportError: |
| 14 | print("ImportError: Run the application from the tcp-qa directory or " |
| 15 | "set the PYTHONPATH environment variable to directory which contains" |
| 16 | " ./tcp_tests") |
| 17 | sys.exit(1) |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 18 | LOG = logger.logger |
| 19 | |
| 20 | |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 21 | def has_only_similar(values_by_nodes): |
| 22 | """ |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 23 | :param values_by_nodes: dict |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 24 | :return: bool, True if all items in the dict have similar values |
| 25 | """ |
| 26 | values = list(values_by_nodes.values()) |
| 27 | return all(value == values[0] for value in values) |
| 28 | |
| 29 | |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 30 | def get_control_plane_targets(): |
| 31 | config = config_fixtures.config() |
| 32 | underlay = underlay_ssh_manager.UnderlaySSHManager(config) |
| 33 | saltmanager = salt_manager.SaltManager(config, underlay) |
Hanna Arhipova | fbcea85 | 2019-10-03 13:24:53 +0300 | [diff] [blame] | 34 | targets = list() |
Hanna Arhipova | 52bb17d | 2019-10-28 19:15:17 +0200 | [diff] [blame] | 35 | telemetry_exists = False |
| 36 | barbican_exists = False |
Hanna Arhipova | fbcea85 | 2019-10-03 13:24:53 +0300 | [diff] [blame] | 37 | try: |
| 38 | targets += saltmanager.run_state( |
| 39 | "I@keystone:server", 'test.ping')[0]['return'][0].keys() |
| 40 | targets += saltmanager.run_state( |
| 41 | "I@nginx:server and not I@salt:master", |
| 42 | "test.ping")[0]['return'][0].keys() |
Hanna Arhipova | 4462dd0 | 2019-10-28 19:05:08 +0200 | [diff] [blame] | 43 | telemetry_exists = saltmanager.get_single_pillar( |
| 44 | "I@salt:master", |
| 45 | "_param:openstack_telemetry_hostname") |
| 46 | barbican_exists = saltmanager.get_single_pillar( |
| 47 | "I@salt:master", |
| 48 | "_param:barbican_enabled") |
Hanna Arhipova | fbcea85 | 2019-10-03 13:24:53 +0300 | [diff] [blame] | 49 | except BaseException as err: |
| 50 | LOG.warning("Can't retrieve data from Salt. \ |
| 51 | Maybe cluster is not deployed completely.\ |
| 52 | Err: {}".format(err)) |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 53 | |
Hanna Arhipova | 75f21bb | 2019-10-07 19:22:15 +0300 | [diff] [blame] | 54 | # check for Manila existence |
| 55 | # if saltmanager.get_single_pillar("I@salt:master", |
| 56 | # "_param:manila_service_protocol"): |
| 57 | # targets.append('share*') |
| 58 | |
| 59 | # check for Tenant Telemetry existence |
Hanna Arhipova | 4462dd0 | 2019-10-28 19:05:08 +0200 | [diff] [blame] | 60 | if telemetry_exists: |
Hanna Arhipova | 75f21bb | 2019-10-07 19:22:15 +0300 | [diff] [blame] | 61 | targets.append('mdb*') |
| 62 | |
| 63 | # check for Barbican existence |
Hanna Arhipova | 4462dd0 | 2019-10-28 19:05:08 +0200 | [diff] [blame] | 64 | if barbican_exists: |
Hanna Arhipova | 75f21bb | 2019-10-07 19:22:15 +0300 | [diff] [blame] | 65 | targets.append('kmn*') |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 66 | return targets |
| 67 | |
| 68 | |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 69 | @pytest.fixture(scope='class') |
Hanna Arhipova | 672efc6 | 2021-03-15 09:55:14 +0200 | [diff] [blame^] | 70 | def dont_switch_to_proposed(request): |
| 71 | return request.config.getoption("--dont-switch-to-proposed") |
| 72 | |
| 73 | |
| 74 | @pytest.fixture(scope='class') |
| 75 | def switch_to_proposed_pipelines(reclass_actions, salt_actions, |
| 76 | dont_switch_to_proposed): |
| 77 | if dont_switch_to_proposed: |
| 78 | return True |
| 79 | |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 80 | reclass = reclass_actions |
| 81 | proposed_repo = "http://mirror.mirantis.com/update/proposed/" |
| 82 | repo_param = "parameters._param.linux_system_repo_update_url" |
Hanna Arhipova | 7cfeb07 | 2019-10-11 15:04:10 +0300 | [diff] [blame] | 83 | |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 84 | proposed_pipeline_branch = "release/proposed/2019.2.0" |
| 85 | pipeline_branch_param = "parameters._param.jenkins_pipelines_branch" |
| 86 | infra_yml = "cluster/*/infra/init.yml" |
| 87 | |
| 88 | LOG.info("Check reclass has release/proposed/2019.2.0 branches") |
| 89 | if reclass.get_key(pipeline_branch_param, |
| 90 | infra_yml) == proposed_pipeline_branch \ |
| 91 | and reclass.get_key(repo_param, infra_yml) == proposed_repo: |
| 92 | return True |
| 93 | |
| 94 | LOG.info("Switch to release/proposed/2019.2.0 branches") |
| 95 | reclass.add_key(pipeline_branch_param, proposed_pipeline_branch, infra_yml) |
| 96 | |
| 97 | reclass.add_key(repo_param, proposed_repo, infra_yml) |
| 98 | reclass.add_key(repo_param, proposed_repo, "cluster/*/openstack/init.yml") |
| 99 | reclass.add_key(repo_param, proposed_repo, "cluster/*/stacklight/init.yml") |
| 100 | reclass.add_key(repo_param, proposed_repo, "cluster/*/ceph/init.yml") |
Hanna Arhipova | 7cfeb07 | 2019-10-11 15:04:10 +0300 | [diff] [blame] | 101 | |
| 102 | salt_actions.run_state("*", "saltutil.refresh_pillar") |
Hanna Arhipova | 9ee7590 | 2019-10-29 16:33:26 +0200 | [diff] [blame] | 103 | salt_actions.enforce_state("*", "salt.minion") |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 104 | salt_actions.enforce_state("I@jenkins:client", "jenkins.client") |
| 105 | |
| 106 | |
Hanna Arhipova | 745fdbb | 2020-03-30 14:40:44 +0300 | [diff] [blame] | 107 | @pytest.fixture |
Hanna Arhipova | ec201e7 | 2020-10-26 17:14:25 +0200 | [diff] [blame] | 108 | def wa_for_galera_clustercheck_password_prod35705(reclass_actions, |
| 109 | salt_actions): |
Hanna Arhipova | 0e816d2 | 2021-01-29 00:17:44 +0200 | [diff] [blame] | 110 | tgt = "I@galera:master or I@galera:slave" |
| 111 | if not salt_actions.get_pillar(tgt, |
| 112 | "_param:galera_clustercheck_password")[0]: |
| 113 | reclass_actions.add_key( |
| 114 | "parameters._param.galera_clustercheck_password", |
| 115 | "a"*32, |
| 116 | "cluster/*/infra/secrets.yml") |
| 117 | salt_actions.run_state(tgt, "saltutil.refresh_pillar") |
| 118 | salt_actions.enforce_state(tgt, "galera") |
| 119 | salt_actions.enforce_state(tgt, "haproxy") |
| 120 | reclass_actions.commit( |
| 121 | "[from TCP-QA] Add galera_clustercheck_password") |
| 122 | else: |
Hanna Arhipova | 672efc6 | 2021-03-15 09:55:14 +0200 | [diff] [blame^] | 123 | |
Hanna Arhipova | 0e816d2 | 2021-01-29 00:17:44 +0200 | [diff] [blame] | 124 | LOG.info("Skipping WA for Galera Clustercheck Password") |
Hanna Arhipova | 745fdbb | 2020-03-30 14:40:44 +0300 | [diff] [blame] | 125 | |
| 126 | |
Hanna Arhipova | 97fac6e | 2020-12-03 13:10:56 +0200 | [diff] [blame] | 127 | @pytest.fixture |
| 128 | def wa_for_alerta_password_prod35958(reclass_actions, |
| 129 | salt_actions): |
Hanna Arhipova | 0e816d2 | 2021-01-29 00:17:44 +0200 | [diff] [blame] | 130 | |
| 131 | if not salt_actions.get_pillar("I@prometheus:alerta", |
| 132 | "_param:alerta_admin_api_key_generated")[0]: |
| 133 | reclass_actions.add_key( |
| 134 | "parameters._param.alerta_admin_api_key_generated", |
| 135 | "a"*32, |
| 136 | "cluster/*/infra/secrets.yml") |
| 137 | reclass_actions.add_key( |
| 138 | "parameters._param.alerta_admin_key", |
| 139 | "${_param:alerta_admin_api_key_generated}", |
| 140 | "cluster/*/stacklight/init.yml") |
| 141 | reclass_actions.commit("[from TCP-QA] Add alerta_admin_key") |
| 142 | salt_actions.run_state( |
| 143 | "I@prometheus:alerta or I@prometheus:alertmanager", |
| 144 | "saltutil.refresh_pillar") |
| 145 | salt_actions.enforce_state( |
| 146 | "I@prometheus:alerta", "prometheus.alerta") |
| 147 | salt_actions.enforce_state( |
| 148 | "I@prometheus:alertmanager", "prometheus.alertmanager") |
| 149 | salt_actions.enforce_state( |
| 150 | "I@prometheus:alerta or I@prometheus:alertmanager", |
| 151 | "docker.client") |
| 152 | else: |
| 153 | LOG.info("Skipping WA for Alerta API key") |
Hanna Arhipova | 97fac6e | 2020-12-03 13:10:56 +0200 | [diff] [blame] | 154 | |
| 155 | |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 156 | @pytest.fixture(scope='class') |
| 157 | def enable_openstack_update(reclass_actions, salt_actions): |
| 158 | param = "parameters._param.openstack_upgrade_enabled" |
| 159 | context_file = "cluster/*/infra/init.yml" |
| 160 | |
| 161 | LOG.info("Enable openstack_upgrade_enabled in reclass") |
| 162 | reclass_actions.add_bool_key(param, "True", context_file) |
| 163 | salt_actions.run_state("*", "saltutil.refresh_pillar") |
| 164 | yield True |
| 165 | LOG.info("Disable openstack_upgrade_enabled in reclass") |
| 166 | reclass_actions.add_bool_key(param, "False", context_file) |
| 167 | salt_actions.run_state("*", "saltutil.refresh_pillar") |
| 168 | |
| 169 | |
Hanna Arhipova | 745fdbb | 2020-03-30 14:40:44 +0300 | [diff] [blame] | 170 | @pytest.mark.usefixtures("switch_to_proposed_pipelines", |
Hanna Arhipova | 97fac6e | 2020-12-03 13:10:56 +0200 | [diff] [blame] | 171 | "wa_for_galera_clustercheck_password_prod35705", |
| 172 | "wa_for_alerta_password_prod35958") |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 173 | class TestUpdateMcpCluster(object): |
| 174 | """ |
| 175 | Following the steps in |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 176 | https://docs.mirantis.com/mcp/master/mcp-operations-guide/update-upgrade/minor-update.html#minor-update |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 177 | """ |
| 178 | |
| 179 | @pytest.mark.grab_versions |
| 180 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
| 181 | @pytest.mark.run_mcp_update |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 182 | def test_update_drivetrain(self, salt_actions, drivetrain_actions, |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 183 | show_step, _): |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 184 | """Updating DriveTrain component to release/proposed/2019.2.0 version |
| 185 | |
| 186 | Scenario: |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 187 | 1. Add workaround for PROD-32751 |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 188 | 2. Run job git-mirror-downstream-mk-pipelines |
| 189 | 3. Run job git-mirror-downstream-pipeline-library |
| 190 | 4. If jobs are passed then start 'Deploy - upgrade MCP Drivetrain' |
| 191 | |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 192 | Duration: ~70 min |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 193 | """ |
| 194 | salt = salt_actions |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 195 | dt = drivetrain_actions |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 196 | |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 197 | # #################### Add workaround for PROD-32751 ################# |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 198 | show_step(1) |
| 199 | |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 200 | # FIXME: workaround for PROD-32751 |
| 201 | salt.cmd_run("cfg01*", "cd /srv/salt/reclass; git add -u && \ |
| 202 | git commit --allow-empty -m 'Cluster model update'") |
| 203 | |
| 204 | # ################### Downstream mk-pipelines ######################### |
| 205 | show_step(2) |
| 206 | job_name = 'git-mirror-downstream-mk-pipelines' |
| 207 | job_parameters = { |
| 208 | 'BRANCHES': 'release/proposed/2019.2.0' |
| 209 | } |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 210 | job_result, job_description = dt.start_job_on_jenkins( |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 211 | job_name=job_name, |
Hanna Arhipova | c3c5929 | 2020-04-23 16:29:09 +0300 | [diff] [blame] | 212 | job_parameters=job_parameters, |
| 213 | verbose=True) |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 214 | |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 215 | assert job_result == 'SUCCESS', job_description |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 216 | |
| 217 | # ################### Downstream pipeline-library #################### |
| 218 | show_step(3) |
| 219 | job_name = 'git-mirror-downstream-pipeline-library' |
| 220 | job_parameters = { |
| 221 | 'BRANCHES': 'release/proposed/2019.2.0' |
| 222 | } |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 223 | job_result, job_description = dt.start_job_on_jenkins( |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 224 | job_name=job_name, |
Hanna Arhipova | c3c5929 | 2020-04-23 16:29:09 +0300 | [diff] [blame] | 225 | job_parameters=job_parameters, |
| 226 | verbose=True) |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 227 | |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 228 | assert job_result == 'SUCCESS', job_description |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 229 | |
| 230 | # ################### Start 'Deploy - upgrade MCP Drivetrain' job ##### |
| 231 | show_step(4) |
| 232 | |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 233 | job_name = 'upgrade-mcp-release' |
| 234 | job_parameters = { |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 235 | 'GIT_REFSPEC': 'release/proposed/2019.2.0', |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 236 | 'MK_PIPELINES_REFSPEC': 'release/proposed/2019.2.0', |
| 237 | 'TARGET_MCP_VERSION': '2019.2.0' |
| 238 | } |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 239 | job_result, job_description = dt.start_job_on_jenkins( |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 240 | job_name=job_name, |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 241 | job_parameters=job_parameters, |
Hanna Arhipova | c3c5929 | 2020-04-23 16:29:09 +0300 | [diff] [blame] | 242 | verbose=True, |
Hanna Arhipova | e7f2e32 | 2021-02-25 13:53:18 +0200 | [diff] [blame] | 243 | build_timeout=4 * 60 * 60) |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 244 | |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 245 | assert job_result == 'SUCCESS', job_description |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 246 | |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 247 | @pytest.mark.grab_versions |
| 248 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 249 | @pytest.mark.run_mcp_update |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 250 | def test_update_glusterfs(self, salt_actions, reclass_actions, |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 251 | drivetrain_actions, show_step, _): |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 252 | """ Upgrade GlusterFS |
| 253 | Scenario: |
| 254 | 1. In infra/init.yml in Reclass, add the glusterfs_version parameter |
| 255 | 2. Start linux.system.repo state |
| 256 | 3. Start "update-glusterfs" job |
| 257 | 4. Check version for GlusterFS servers |
| 258 | 5. Check version for GlusterFS clients |
| 259 | |
| 260 | """ |
| 261 | salt = salt_actions |
| 262 | reclass = reclass_actions |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 263 | dt = drivetrain_actions |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 264 | |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 265 | # ############## Change reclass ###################################### |
| 266 | show_step(1) |
| 267 | reclass.add_key( |
| 268 | "parameters._param.linux_system_repo_mcp_glusterfs_version_number", |
| 269 | "5", |
| 270 | "cluster/*/infra/init.yml" |
| 271 | ) |
| 272 | # ################# Run linux.system state ########################### |
| 273 | show_step(2) |
| 274 | salt.enforce_state("*", "linux.system.repo") |
| 275 | |
| 276 | # ############## Start deploy-upgrade-galera job ##################### |
| 277 | show_step(3) |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 278 | job_name = 'update-glusterfs' |
| 279 | |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 280 | job_result, job_description = dt.start_job_on_jenkins( |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 281 | job_name=job_name, |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 282 | build_timeout=40 * 60) |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 283 | |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 284 | assert job_result == 'SUCCESS', job_description |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 285 | |
| 286 | # ################ Check GlusterFS version for servers ############## |
| 287 | show_step(4) |
| 288 | gluster_server_versions_by_nodes = salt.cmd_run( |
| 289 | "I@glusterfs:server", |
| 290 | "glusterd --version|head -n1")[0] |
| 291 | |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 292 | assert has_only_similar(gluster_server_versions_by_nodes), \ |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 293 | gluster_server_versions_by_nodes |
| 294 | |
| 295 | # ################ Check GlusterFS version for clients ############## |
| 296 | show_step(5) |
| 297 | gluster_client_versions_by_nodes = salt.cmd_run( |
| 298 | "I@glusterfs:client", |
| 299 | "glusterfs --version|head -n1")[0] |
| 300 | |
| 301 | assert has_only_similar(gluster_client_versions_by_nodes), \ |
| 302 | gluster_client_versions_by_nodes |
| 303 | |
| 304 | @pytest.mark.grab_versions |
| 305 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
| 306 | @pytest.mark.run_mcp_update |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 307 | def test_update_galera(self, salt_actions, reclass_actions, |
| 308 | drivetrain_actions, show_step, _): |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 309 | """ Upgrade Galera automatically |
| 310 | |
| 311 | Scenario: |
| 312 | 1. Include the Galera upgrade pipeline job to DriveTrain |
| 313 | 2. Apply the jenkins.client state on the Jenkins nodes |
| 314 | 3. set the openstack_upgrade_enabled parameter to true |
| 315 | 4. Refresh pillars |
| 316 | 5. Add repositories with new Galera packages |
| 317 | 6. Start job from Jenkins |
| 318 | """ |
| 319 | salt = salt_actions |
| 320 | reclass = reclass_actions |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 321 | dt = drivetrain_actions |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 322 | # ################### Enable pipeline ################################# |
| 323 | show_step(1) |
| 324 | reclass.add_class( |
| 325 | "system.jenkins.client.job.deploy.update.upgrade_galera", |
| 326 | "cluster/*/cicd/control/leader.yml") |
| 327 | show_step(2) |
| 328 | salt.enforce_state("I@jenkins:client", "jenkins.client") |
| 329 | |
| 330 | # ############### Enable automatic upgrade ############################ |
| 331 | show_step(3) |
| 332 | reclass.add_bool_key("parameters._param.openstack_upgrade_enabled", |
| 333 | "True", |
| 334 | "cluster/*/infra/init.yml") |
| 335 | |
| 336 | show_step(4) |
Hanna Arhipova | 7cfeb07 | 2019-10-11 15:04:10 +0300 | [diff] [blame] | 337 | salt.run_state("dbs*", "saltutil.refresh_pillar") |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 338 | |
| 339 | # ############# Add repositories with new Galera packages ####### |
| 340 | show_step(5) |
| 341 | salt.enforce_state("dbs*", "linux.system.repo") |
| 342 | salt.enforce_state("cfg*", "salt.master") |
| 343 | |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 344 | # #################### Login Jenkins on cid01 node ################### |
| 345 | show_step(6) |
| 346 | |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 347 | job_name = 'deploy-upgrade-galera' |
| 348 | job_parameters = { |
| 349 | 'INTERACTIVE': 'false' |
| 350 | } |
| 351 | |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 352 | job_result, job_description = dt.start_job_on_jenkins( |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 353 | job_name=job_name, |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 354 | job_parameters=job_parameters, |
| 355 | build_timeout=40 * 60) |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 356 | |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 357 | assert job_result == 'SUCCESS', job_description |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 358 | |
| 359 | @pytest.fixture |
| 360 | def disable_automatic_failover_neutron_for_test(self, salt_actions): |
| 361 | """ |
| 362 | On each OpenStack controller node, modify the neutron.conf file |
| 363 | Restart the neutron-server service |
| 364 | """ |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 365 | |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 366 | def comment_line(node, file_name, word): |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 367 | """ |
| 368 | Adds '#' before the specific line in specific file |
| 369 | |
| 370 | :param node: string, salt target of node where the file locates |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 371 | :param file_name: string, full path to the file |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 372 | :param word: string, the begin of line which should be commented |
| 373 | :return: None |
| 374 | """ |
| 375 | salt_actions.cmd_run(node, |
| 376 | "sed -i 's/^{word}/#{word}/' {file}". |
| 377 | format(word=word, |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 378 | file=file_name)) |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 379 | |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 380 | def add_line(node, file_name, line): |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 381 | """ |
| 382 | Appends line to the end of file |
| 383 | |
| 384 | :param node: string, salt target of node where the file locates |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 385 | :param file_name: string, full path to the file |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 386 | :param line: string, line that should be added |
| 387 | :return: None |
| 388 | """ |
| 389 | salt_actions.cmd_run(node, "echo {line} >> {file}".format( |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 390 | line=line, |
| 391 | file=file_name)) |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 392 | |
| 393 | neutron_conf = '/etc/neutron/neutron.conf' |
| 394 | neutron_server = "I@neutron:server" |
| 395 | # ######## Create backup for config file ####################### |
| 396 | salt_actions.cmd_run( |
| 397 | neutron_server, |
| 398 | "cp -p {file} {file}.backup".format(file=neutron_conf)) |
| 399 | |
| 400 | # ## Change parameters in neutron.conf' |
| 401 | comment_line(neutron_server, neutron_conf, |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 402 | "allow_automatic_l3agent_failover", ) |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 403 | comment_line(neutron_server, neutron_conf, |
| 404 | "allow_automatic_dhcp_failover") |
| 405 | add_line(neutron_server, neutron_conf, |
| 406 | "allow_automatic_dhcp_failover = false") |
| 407 | add_line(neutron_server, neutron_conf, |
| 408 | "allow_automatic_l3agent_failover = false") |
| 409 | |
| 410 | # ## Apply changed config to the neutron-server service |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 411 | result = salt_actions.cmd_run(neutron_server, |
| 412 | "service neutron-server restart") |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 413 | # TODO: add check that neutron-server is up and running |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 414 | yield result |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 415 | # ## Revert file changes |
| 416 | salt_actions.cmd_run( |
| 417 | neutron_server, |
| 418 | "cp -p {file}.backup {file}".format(file=neutron_conf)) |
| 419 | salt_actions.cmd_run(neutron_server, |
| 420 | "service neutron-server restart") |
| 421 | |
| 422 | @pytest.fixture |
| 423 | def disable_neutron_agents_for_test(self, salt_actions): |
| 424 | """ |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 425 | Disable the neutron services before the test and |
| 426 | enable it after test |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 427 | """ |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 428 | result = salt_actions.cmd_run("I@neutron:server", """ |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 429 | service neutron-dhcp-agent stop && \ |
| 430 | service neutron-l3-agent stop && \ |
| 431 | service neutron-metadata-agent stop && \ |
| 432 | service neutron-openvswitch-agent stop |
| 433 | """) |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 434 | yield result |
| 435 | # |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 436 | salt_actions.cmd_run("I@neutron:server", """ |
| 437 | service neutron-dhcp-agent start && \ |
| 438 | service neutron-l3-agent start && \ |
| 439 | service neutron-metadata-agent start && \ |
| 440 | service neutron-openvswitch-agent start |
| 441 | """) |
| 442 | # TODO: add check that all services are UP and running |
| 443 | |
| 444 | @pytest.mark.grab_versions |
| 445 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
| 446 | @pytest.mark.run_mcp_update |
| 447 | def test_update_rabbit(self, salt_actions, reclass_actions, |
| 448 | drivetrain_actions, show_step, _, |
| 449 | disable_automatic_failover_neutron_for_test, |
| 450 | disable_neutron_agents_for_test): |
| 451 | """ Updates RabbitMQ |
| 452 | Scenario: |
| 453 | 1. Include the RabbitMQ upgrade pipeline job to DriveTrain |
| 454 | 2. Add repositories with new RabbitMQ packages |
| 455 | 3. Start Deploy - upgrade RabbitMQ pipeline |
| 456 | |
| 457 | Updating RabbitMq should be completed before the OpenStack updating |
| 458 | process starts |
| 459 | """ |
| 460 | salt = salt_actions |
| 461 | reclass = reclass_actions |
| 462 | dt = drivetrain_actions |
| 463 | |
| 464 | # ####### Include the RabbitMQ upgrade pipeline job to DriveTrain #### |
| 465 | show_step(1) |
| 466 | reclass.add_class( |
| 467 | "system.jenkins.client.job.deploy.update.upgrade_rabbitmq", |
| 468 | "cluster/*/cicd/control/leader.yml") |
| 469 | salt.enforce_state("I@jenkins:client", "jenkins.client") |
| 470 | |
| 471 | reclass.add_bool_key("parameters._param.openstack_upgrade_enabled", |
| 472 | "True", |
| 473 | "cluster/*/infra/init.yml") |
| 474 | salt.run_state("I@rabbitmq:server", "saltutil.refresh_pillar") |
| 475 | |
| 476 | # ########### Add repositories with new RabbitMQ packages ############ |
| 477 | show_step(2) |
| 478 | salt.enforce_state("I@rabbitmq:server", "linux.system.repo") |
| 479 | |
| 480 | # ########### Start Deploy - upgrade RabbitMQ pipeline ############ |
| 481 | show_step(3) |
| 482 | job_parameters = { |
| 483 | 'INTERACTIVE': 'false' |
| 484 | } |
| 485 | |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 486 | job_result, job_description = dt.start_job_on_jenkins( |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 487 | job_name='deploy-upgrade-rabbitmq', |
| 488 | job_parameters=job_parameters, |
| 489 | build_timeout=40 * 60 |
| 490 | ) |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 491 | assert job_result == 'SUCCESS', job_description |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 492 | |
| 493 | @pytest.mark.grab_versions |
| 494 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
| 495 | @pytest.mark.run_mcp_update |
| 496 | def test_update_ceph(self, salt_actions, drivetrain_actions, show_step, _): |
| 497 | """ Updates Ceph to the latest minor version |
| 498 | |
| 499 | Scenario: |
| 500 | 1. Add workaround for unhealth Ceph |
| 501 | 2. Start ceph-upgrade job with default parameters |
| 502 | 3. Check Ceph version for all nodes |
| 503 | |
| 504 | https://docs.mirantis.com/mcp/master/mcp-operations-guide/update-upgrade/minor-update/ceph-update.html |
| 505 | """ |
| 506 | salt = salt_actions |
| 507 | dt = drivetrain_actions |
| 508 | |
| 509 | # ###################### Add workaround for unhealth Ceph ############ |
| 510 | show_step(1) |
| 511 | salt.cmd_run("I@ceph:radosgw", |
| 512 | "ceph config set 'mon pg warn max object skew' 20") |
| 513 | # ###################### Start ceph-upgrade pipeline ################# |
| 514 | show_step(2) |
| 515 | job_parameters = {} |
| 516 | |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 517 | job_result, job_description = dt.start_job_on_jenkins( |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 518 | job_name='ceph-update', |
| 519 | job_parameters=job_parameters) |
| 520 | |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 521 | assert job_result == 'SUCCESS', job_description |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 522 | |
| 523 | # ########## Verify Ceph version ##################################### |
| 524 | show_step(3) |
| 525 | |
| 526 | ceph_version_by_nodes = salt.cmd_run( |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 527 | "I@ceph:* and not I@ceph:monitoring and not I@ceph:backup:server", |
| 528 | "ceph version")[0] |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 529 | |
| 530 | assert has_only_similar(ceph_version_by_nodes), ceph_version_by_nodes |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 531 | |
Hanna Arhipova | ac00856 | 2019-10-17 11:54:23 +0300 | [diff] [blame] | 532 | @pytest.mark.grab_versions |
| 533 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
| 534 | @pytest.mark.run_mcp_update |
| 535 | def test_update_stacklight(self, _, drivetrain_actions): |
| 536 | """ Update packages for Stacklight |
| 537 | Scenario: |
| 538 | 1. Start Deploy - upgrade Stacklight job |
| 539 | """ |
| 540 | drivetrain = drivetrain_actions |
| 541 | |
| 542 | job_parameters = { |
| 543 | "STAGE_UPGRADE_DOCKER_COMPONENTS": True, |
| 544 | "STAGE_UPGRADE_ES_KIBANA": True, |
| 545 | "STAGE_UPGRADE_SYSTEM_PART": True |
| 546 | } |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 547 | job_result, job_description = drivetrain.start_job_on_jenkins( |
Hanna Arhipova | ac00856 | 2019-10-17 11:54:23 +0300 | [diff] [blame] | 548 | job_name="stacklight-upgrade", |
| 549 | job_parameters=job_parameters) |
| 550 | |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 551 | assert job_result == 'SUCCESS', job_description |
Hanna Arhipova | ac00856 | 2019-10-17 11:54:23 +0300 | [diff] [blame] | 552 | |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 553 | |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 554 | @pytest.mark.usefixtures("switch_to_proposed_pipelines", |
| 555 | "enable_openstack_update") |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 556 | class TestOpenstackUpdate(object): |
| 557 | |
| 558 | @pytest.mark.grab_versions |
| 559 | @pytest.mark.run_mcp_update |
| 560 | def test__pre_update__enable_pipeline_job(self, |
| 561 | reclass_actions, salt_actions, |
| 562 | show_step): |
| 563 | """ Enable pipeline in the Drivetrain |
| 564 | |
| 565 | Scenario: |
| 566 | 1. Add deploy.update.* classes to the reclass |
| 567 | 2. Start jenkins.client salt state |
| 568 | |
| 569 | """ |
| 570 | salt = salt_actions |
| 571 | reclass = reclass_actions |
| 572 | show_step(1) |
| 573 | reclass.add_class("system.jenkins.client.job.deploy.update.upgrade", |
| 574 | "cluster/*/cicd/control/leader.yml") |
| 575 | |
| 576 | reclass.add_class( |
| 577 | "system.jenkins.client.job.deploy.update.upgrade_ovs_gateway", |
| 578 | "cluster/*/cicd/control/leader.yml") |
| 579 | |
| 580 | reclass.add_class( |
| 581 | "system.jenkins.client.job.deploy.update.upgrade_compute", |
| 582 | "cluster/*/cicd/control/leader.yml") |
| 583 | |
| 584 | show_step(2) |
| 585 | r, errors = salt.enforce_state("I@jenkins:client", "jenkins.client") |
| 586 | assert errors is None |
| 587 | |
| 588 | @pytest.mark.grab_versions |
| 589 | @pytest.mark.parametrize('target', get_control_plane_targets()) |
| 590 | @pytest.mark.run_mcp_update |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 591 | def test__update__control_plane(self, drivetrain_actions, target): |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 592 | """Start 'Deploy - upgrade control VMs' for specific node |
| 593 | """ |
| 594 | job_parameters = { |
| 595 | "TARGET_SERVERS": target, |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 596 | "OS_DIST_UPGRADE": True, |
PGlazov | ed19b35 | 2020-05-21 16:42:27 +0400 | [diff] [blame] | 597 | "UPGRADE_SALTSTACK": False, |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 598 | "OS_UPGRADE": True, |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 599 | "INTERACTIVE": False} |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 600 | job_result, job_description = drivetrain_actions.start_job_on_jenkins( |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 601 | job_name="deploy-upgrade-control", |
| 602 | job_parameters=job_parameters) |
| 603 | |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 604 | assert job_result == 'SUCCESS', job_description |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 605 | |
| 606 | @pytest.mark.grab_versions |
| 607 | @pytest.mark.run_mcp_update |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 608 | def test__update__data_plane(self, drivetrain_actions, salt_actions): |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 609 | """Start 'Deploy - upgrade OVS gateway' |
| 610 | """ |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 611 | if not salt_actions.cmd_run("gtw*", "test.ping")[0].keys(): |
| 612 | pytest.skip("This deployment doesn't have gtw* nodes") |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 613 | job_parameters = { |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 614 | "OS_DIST_UPGRADE": True, |
| 615 | "OS_UPGRADE": True, |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 616 | "INTERACTIVE": False} |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 617 | job_result, job_description = drivetrain_actions.start_job_on_jenkins( |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 618 | job_name="deploy-upgrade-ovs-gateway", |
| 619 | job_parameters=job_parameters) |
| 620 | |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 621 | assert job_result == 'SUCCESS', job_description |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 622 | |
| 623 | @pytest.mark.grab_versions |
| 624 | @pytest.mark.run_mcp_update |
| 625 | def test__update__computes(self, drivetrain_actions): |
| 626 | """Start 'Deploy - upgrade computes' |
| 627 | """ |
| 628 | job_parameters = { |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 629 | "OS_DIST_UPGRADE": True, |
| 630 | "OS_UPGRADE": True, |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 631 | "INTERACTIVE": False} |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 632 | job_result, job_description = drivetrain_actions.start_job_on_jenkins( |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 633 | job_name="deploy-upgrade-compute", |
| 634 | job_parameters=job_parameters) |
| 635 | |
Hanna Arhipova | 508f653 | 2021-01-27 15:52:45 +0200 | [diff] [blame] | 636 | assert job_result == 'SUCCESS', job_description |