Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 1 | import pytest |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 2 | import sys |
| 3 | import os |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 4 | |
| 5 | from tcp_tests import logger |
| 6 | from tcp_tests import settings |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 7 | |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 8 | sys.path.append(os.getcwd()) |
| 9 | try: |
| 10 | from tcp_tests.fixtures import config_fixtures |
| 11 | from tcp_tests.managers import underlay_ssh_manager |
| 12 | from tcp_tests.managers import saltmanager as salt_manager |
| 13 | except ImportError: |
| 14 | print("ImportError: Run the application from the tcp-qa directory or " |
| 15 | "set the PYTHONPATH environment variable to directory which contains" |
| 16 | " ./tcp_tests") |
| 17 | sys.exit(1) |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 18 | LOG = logger.logger |
| 19 | |
| 20 | |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 21 | def has_only_similar(values_by_nodes): |
| 22 | """ |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 23 | :param values_by_nodes: dict |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 24 | :return: bool, True if all items in the dict have similar values |
| 25 | """ |
| 26 | values = list(values_by_nodes.values()) |
| 27 | return all(value == values[0] for value in values) |
| 28 | |
| 29 | |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 30 | def get_control_plane_targets(): |
| 31 | config = config_fixtures.config() |
| 32 | underlay = underlay_ssh_manager.UnderlaySSHManager(config) |
| 33 | saltmanager = salt_manager.SaltManager(config, underlay) |
Hanna Arhipova | fbcea85 | 2019-10-03 13:24:53 +0300 | [diff] [blame] | 34 | targets = list() |
Hanna Arhipova | 52bb17d | 2019-10-28 19:15:17 +0200 | [diff] [blame] | 35 | telemetry_exists = False |
| 36 | barbican_exists = False |
Hanna Arhipova | fbcea85 | 2019-10-03 13:24:53 +0300 | [diff] [blame] | 37 | try: |
| 38 | targets += saltmanager.run_state( |
| 39 | "I@keystone:server", 'test.ping')[0]['return'][0].keys() |
| 40 | targets += saltmanager.run_state( |
| 41 | "I@nginx:server and not I@salt:master", |
| 42 | "test.ping")[0]['return'][0].keys() |
Hanna Arhipova | 4462dd0 | 2019-10-28 19:05:08 +0200 | [diff] [blame] | 43 | telemetry_exists = saltmanager.get_single_pillar( |
| 44 | "I@salt:master", |
| 45 | "_param:openstack_telemetry_hostname") |
| 46 | barbican_exists = saltmanager.get_single_pillar( |
| 47 | "I@salt:master", |
| 48 | "_param:barbican_enabled") |
Hanna Arhipova | fbcea85 | 2019-10-03 13:24:53 +0300 | [diff] [blame] | 49 | except BaseException as err: |
| 50 | LOG.warning("Can't retrieve data from Salt. \ |
| 51 | Maybe cluster is not deployed completely.\ |
| 52 | Err: {}".format(err)) |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 53 | |
Hanna Arhipova | 75f21bb | 2019-10-07 19:22:15 +0300 | [diff] [blame] | 54 | # check for Manila existence |
| 55 | # if saltmanager.get_single_pillar("I@salt:master", |
| 56 | # "_param:manila_service_protocol"): |
| 57 | # targets.append('share*') |
| 58 | |
| 59 | # check for Tenant Telemetry existence |
Hanna Arhipova | 4462dd0 | 2019-10-28 19:05:08 +0200 | [diff] [blame] | 60 | if telemetry_exists: |
Hanna Arhipova | 75f21bb | 2019-10-07 19:22:15 +0300 | [diff] [blame] | 61 | targets.append('mdb*') |
| 62 | |
| 63 | # check for Barbican existence |
Hanna Arhipova | 4462dd0 | 2019-10-28 19:05:08 +0200 | [diff] [blame] | 64 | if barbican_exists: |
Hanna Arhipova | 75f21bb | 2019-10-07 19:22:15 +0300 | [diff] [blame] | 65 | targets.append('kmn*') |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 66 | return targets |
| 67 | |
| 68 | |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 69 | @pytest.fixture(scope='class') |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 70 | def switch_to_proposed_pipelines(reclass_actions, salt_actions): |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 71 | reclass = reclass_actions |
| 72 | proposed_repo = "http://mirror.mirantis.com/update/proposed/" |
| 73 | repo_param = "parameters._param.linux_system_repo_update_url" |
Hanna Arhipova | 7cfeb07 | 2019-10-11 15:04:10 +0300 | [diff] [blame] | 74 | |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 75 | proposed_pipeline_branch = "release/proposed/2019.2.0" |
| 76 | pipeline_branch_param = "parameters._param.jenkins_pipelines_branch" |
| 77 | infra_yml = "cluster/*/infra/init.yml" |
| 78 | |
| 79 | LOG.info("Check reclass has release/proposed/2019.2.0 branches") |
| 80 | if reclass.get_key(pipeline_branch_param, |
| 81 | infra_yml) == proposed_pipeline_branch \ |
| 82 | and reclass.get_key(repo_param, infra_yml) == proposed_repo: |
| 83 | return True |
| 84 | |
| 85 | LOG.info("Switch to release/proposed/2019.2.0 branches") |
| 86 | reclass.add_key(pipeline_branch_param, proposed_pipeline_branch, infra_yml) |
| 87 | |
| 88 | reclass.add_key(repo_param, proposed_repo, infra_yml) |
| 89 | reclass.add_key(repo_param, proposed_repo, "cluster/*/openstack/init.yml") |
| 90 | reclass.add_key(repo_param, proposed_repo, "cluster/*/stacklight/init.yml") |
| 91 | reclass.add_key(repo_param, proposed_repo, "cluster/*/ceph/init.yml") |
Hanna Arhipova | 7cfeb07 | 2019-10-11 15:04:10 +0300 | [diff] [blame] | 92 | |
| 93 | salt_actions.run_state("*", "saltutil.refresh_pillar") |
Hanna Arhipova | 9ee7590 | 2019-10-29 16:33:26 +0200 | [diff] [blame] | 94 | salt_actions.enforce_state("*", "salt.minion") |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 95 | salt_actions.enforce_state("I@jenkins:client", "jenkins.client") |
| 96 | |
| 97 | |
Hanna Arhipova | 745fdbb | 2020-03-30 14:40:44 +0300 | [diff] [blame] | 98 | @pytest.fixture |
| 99 | def wa_for_sphinx_prod34406(reclass_actions, salt_actions): |
| 100 | reclass_actions.add_key( |
| 101 | "parameters._param.sphinx_proxy_password_generated", |
| 102 | "a"*32, |
| 103 | "cluster/*/infra/secrets.yml") |
| 104 | salt_actions.run_state("I@nginx:server", |
| 105 | "saltutil.refresh_pillar") |
| 106 | |
| 107 | |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 108 | @pytest.fixture(scope='class') |
| 109 | def enable_openstack_update(reclass_actions, salt_actions): |
| 110 | param = "parameters._param.openstack_upgrade_enabled" |
| 111 | context_file = "cluster/*/infra/init.yml" |
| 112 | |
| 113 | LOG.info("Enable openstack_upgrade_enabled in reclass") |
| 114 | reclass_actions.add_bool_key(param, "True", context_file) |
| 115 | salt_actions.run_state("*", "saltutil.refresh_pillar") |
| 116 | yield True |
| 117 | LOG.info("Disable openstack_upgrade_enabled in reclass") |
| 118 | reclass_actions.add_bool_key(param, "False", context_file) |
| 119 | salt_actions.run_state("*", "saltutil.refresh_pillar") |
| 120 | |
| 121 | |
Hanna Arhipova | 745fdbb | 2020-03-30 14:40:44 +0300 | [diff] [blame] | 122 | @pytest.mark.usefixtures("switch_to_proposed_pipelines", |
| 123 | "wa_for_sphinx_prod34406") |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 124 | class TestUpdateMcpCluster(object): |
| 125 | """ |
| 126 | Following the steps in |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 127 | https://docs.mirantis.com/mcp/master/mcp-operations-guide/update-upgrade/minor-update.html#minor-update |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 128 | """ |
| 129 | |
| 130 | @pytest.mark.grab_versions |
| 131 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
| 132 | @pytest.mark.run_mcp_update |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 133 | def test_update_drivetrain(self, salt_actions, drivetrain_actions, |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 134 | show_step, _): |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 135 | """Updating DriveTrain component to release/proposed/2019.2.0 version |
| 136 | |
| 137 | Scenario: |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 138 | 1. Add workaround for PROD-32751 |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 139 | 2. Run job git-mirror-downstream-mk-pipelines |
| 140 | 3. Run job git-mirror-downstream-pipeline-library |
| 141 | 4. If jobs are passed then start 'Deploy - upgrade MCP Drivetrain' |
| 142 | |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 143 | Duration: ~70 min |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 144 | """ |
| 145 | salt = salt_actions |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 146 | dt = drivetrain_actions |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 147 | |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 148 | # #################### Add workaround for PROD-32751 ################# |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 149 | show_step(1) |
| 150 | |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 151 | # FIXME: workaround for PROD-32751 |
| 152 | salt.cmd_run("cfg01*", "cd /srv/salt/reclass; git add -u && \ |
| 153 | git commit --allow-empty -m 'Cluster model update'") |
| 154 | |
| 155 | # ################### Downstream mk-pipelines ######################### |
| 156 | show_step(2) |
| 157 | job_name = 'git-mirror-downstream-mk-pipelines' |
| 158 | job_parameters = { |
| 159 | 'BRANCHES': 'release/proposed/2019.2.0' |
| 160 | } |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 161 | update_pipelines = dt.start_job_on_jenkins( |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 162 | job_name=job_name, |
| 163 | job_parameters=job_parameters) |
| 164 | |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 165 | assert update_pipelines == 'SUCCESS' |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 166 | |
| 167 | # ################### Downstream pipeline-library #################### |
| 168 | show_step(3) |
| 169 | job_name = 'git-mirror-downstream-pipeline-library' |
| 170 | job_parameters = { |
| 171 | 'BRANCHES': 'release/proposed/2019.2.0' |
| 172 | } |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 173 | update_pipeline_library = dt.start_job_on_jenkins( |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 174 | job_name=job_name, |
| 175 | job_parameters=job_parameters) |
| 176 | |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 177 | assert update_pipeline_library == 'SUCCESS' |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 178 | |
| 179 | # ################### Start 'Deploy - upgrade MCP Drivetrain' job ##### |
| 180 | show_step(4) |
| 181 | |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 182 | job_name = 'upgrade-mcp-release' |
| 183 | job_parameters = { |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 184 | 'GIT_REFSPEC': 'release/proposed/2019.2.0', |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 185 | 'MK_PIPELINES_REFSPEC': 'release/proposed/2019.2.0', |
| 186 | 'TARGET_MCP_VERSION': '2019.2.0' |
| 187 | } |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 188 | update_drivetrain = dt.start_job_on_jenkins( |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 189 | job_name=job_name, |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 190 | job_parameters=job_parameters, |
Hanna Arhipova | 85db495 | 2020-03-31 20:20:06 +0300 | [diff] [blame] | 191 | build_timeout=3 * 60 * 60) |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 192 | |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 193 | assert update_drivetrain == 'SUCCESS' |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 194 | |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 195 | @pytest.mark.grab_versions |
| 196 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 197 | @pytest.mark.run_mcp_update |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 198 | def test_update_glusterfs(self, salt_actions, reclass_actions, |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 199 | drivetrain_actions, show_step, _): |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 200 | """ Upgrade GlusterFS |
| 201 | Scenario: |
| 202 | 1. In infra/init.yml in Reclass, add the glusterfs_version parameter |
| 203 | 2. Start linux.system.repo state |
| 204 | 3. Start "update-glusterfs" job |
| 205 | 4. Check version for GlusterFS servers |
| 206 | 5. Check version for GlusterFS clients |
| 207 | |
| 208 | """ |
| 209 | salt = salt_actions |
| 210 | reclass = reclass_actions |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 211 | dt = drivetrain_actions |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 212 | |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 213 | # ############## Change reclass ###################################### |
| 214 | show_step(1) |
| 215 | reclass.add_key( |
| 216 | "parameters._param.linux_system_repo_mcp_glusterfs_version_number", |
| 217 | "5", |
| 218 | "cluster/*/infra/init.yml" |
| 219 | ) |
| 220 | # ################# Run linux.system state ########################### |
| 221 | show_step(2) |
| 222 | salt.enforce_state("*", "linux.system.repo") |
| 223 | |
| 224 | # ############## Start deploy-upgrade-galera job ##################### |
| 225 | show_step(3) |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 226 | job_name = 'update-glusterfs' |
| 227 | |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 228 | update_glusterfs = dt.start_job_on_jenkins( |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 229 | job_name=job_name, |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 230 | build_timeout=40 * 60) |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 231 | |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 232 | assert update_glusterfs == 'SUCCESS' |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 233 | |
| 234 | # ################ Check GlusterFS version for servers ############## |
| 235 | show_step(4) |
| 236 | gluster_server_versions_by_nodes = salt.cmd_run( |
| 237 | "I@glusterfs:server", |
| 238 | "glusterd --version|head -n1")[0] |
| 239 | |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 240 | assert has_only_similar(gluster_server_versions_by_nodes), \ |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 241 | gluster_server_versions_by_nodes |
| 242 | |
| 243 | # ################ Check GlusterFS version for clients ############## |
| 244 | show_step(5) |
| 245 | gluster_client_versions_by_nodes = salt.cmd_run( |
| 246 | "I@glusterfs:client", |
| 247 | "glusterfs --version|head -n1")[0] |
| 248 | |
| 249 | assert has_only_similar(gluster_client_versions_by_nodes), \ |
| 250 | gluster_client_versions_by_nodes |
| 251 | |
| 252 | @pytest.mark.grab_versions |
| 253 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
| 254 | @pytest.mark.run_mcp_update |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 255 | def test_update_galera(self, salt_actions, reclass_actions, |
| 256 | drivetrain_actions, show_step, _): |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 257 | """ Upgrade Galera automatically |
| 258 | |
| 259 | Scenario: |
| 260 | 1. Include the Galera upgrade pipeline job to DriveTrain |
| 261 | 2. Apply the jenkins.client state on the Jenkins nodes |
| 262 | 3. set the openstack_upgrade_enabled parameter to true |
| 263 | 4. Refresh pillars |
| 264 | 5. Add repositories with new Galera packages |
| 265 | 6. Start job from Jenkins |
| 266 | """ |
| 267 | salt = salt_actions |
| 268 | reclass = reclass_actions |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 269 | dt = drivetrain_actions |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 270 | # ################### Enable pipeline ################################# |
| 271 | show_step(1) |
| 272 | reclass.add_class( |
| 273 | "system.jenkins.client.job.deploy.update.upgrade_galera", |
| 274 | "cluster/*/cicd/control/leader.yml") |
| 275 | show_step(2) |
| 276 | salt.enforce_state("I@jenkins:client", "jenkins.client") |
| 277 | |
| 278 | # ############### Enable automatic upgrade ############################ |
| 279 | show_step(3) |
| 280 | reclass.add_bool_key("parameters._param.openstack_upgrade_enabled", |
| 281 | "True", |
| 282 | "cluster/*/infra/init.yml") |
| 283 | |
| 284 | show_step(4) |
Hanna Arhipova | 7cfeb07 | 2019-10-11 15:04:10 +0300 | [diff] [blame] | 285 | salt.run_state("dbs*", "saltutil.refresh_pillar") |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 286 | |
| 287 | # ############# Add repositories with new Galera packages ####### |
| 288 | show_step(5) |
| 289 | salt.enforce_state("dbs*", "linux.system.repo") |
| 290 | salt.enforce_state("cfg*", "salt.master") |
| 291 | |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 292 | # #################### Login Jenkins on cid01 node ################### |
| 293 | show_step(6) |
| 294 | |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 295 | job_name = 'deploy-upgrade-galera' |
| 296 | job_parameters = { |
| 297 | 'INTERACTIVE': 'false' |
| 298 | } |
| 299 | |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 300 | update_galera = dt.start_job_on_jenkins( |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 301 | job_name=job_name, |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 302 | job_parameters=job_parameters, |
| 303 | build_timeout=40 * 60) |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 304 | |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 305 | assert update_galera == 'SUCCESS' |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 306 | |
| 307 | @pytest.fixture |
| 308 | def disable_automatic_failover_neutron_for_test(self, salt_actions): |
| 309 | """ |
| 310 | On each OpenStack controller node, modify the neutron.conf file |
| 311 | Restart the neutron-server service |
| 312 | """ |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 313 | |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 314 | def comment_line(node, file_name, word): |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 315 | """ |
| 316 | Adds '#' before the specific line in specific file |
| 317 | |
| 318 | :param node: string, salt target of node where the file locates |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 319 | :param file_name: string, full path to the file |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 320 | :param word: string, the begin of line which should be commented |
| 321 | :return: None |
| 322 | """ |
| 323 | salt_actions.cmd_run(node, |
| 324 | "sed -i 's/^{word}/#{word}/' {file}". |
| 325 | format(word=word, |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 326 | file=file_name)) |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 327 | |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 328 | def add_line(node, file_name, line): |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 329 | """ |
| 330 | Appends line to the end of file |
| 331 | |
| 332 | :param node: string, salt target of node where the file locates |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 333 | :param file_name: string, full path to the file |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 334 | :param line: string, line that should be added |
| 335 | :return: None |
| 336 | """ |
| 337 | salt_actions.cmd_run(node, "echo {line} >> {file}".format( |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 338 | line=line, |
| 339 | file=file_name)) |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 340 | |
| 341 | neutron_conf = '/etc/neutron/neutron.conf' |
| 342 | neutron_server = "I@neutron:server" |
| 343 | # ######## Create backup for config file ####################### |
| 344 | salt_actions.cmd_run( |
| 345 | neutron_server, |
| 346 | "cp -p {file} {file}.backup".format(file=neutron_conf)) |
| 347 | |
| 348 | # ## Change parameters in neutron.conf' |
| 349 | comment_line(neutron_server, neutron_conf, |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 350 | "allow_automatic_l3agent_failover", ) |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 351 | comment_line(neutron_server, neutron_conf, |
| 352 | "allow_automatic_dhcp_failover") |
| 353 | add_line(neutron_server, neutron_conf, |
| 354 | "allow_automatic_dhcp_failover = false") |
| 355 | add_line(neutron_server, neutron_conf, |
| 356 | "allow_automatic_l3agent_failover = false") |
| 357 | |
| 358 | # ## Apply changed config to the neutron-server service |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 359 | result = salt_actions.cmd_run(neutron_server, |
| 360 | "service neutron-server restart") |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 361 | # TODO: add check that neutron-server is up and running |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 362 | yield result |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 363 | # ## Revert file changes |
| 364 | salt_actions.cmd_run( |
| 365 | neutron_server, |
| 366 | "cp -p {file}.backup {file}".format(file=neutron_conf)) |
| 367 | salt_actions.cmd_run(neutron_server, |
| 368 | "service neutron-server restart") |
| 369 | |
| 370 | @pytest.fixture |
| 371 | def disable_neutron_agents_for_test(self, salt_actions): |
| 372 | """ |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 373 | Disable the neutron services before the test and |
| 374 | enable it after test |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 375 | """ |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 376 | result = salt_actions.cmd_run("I@neutron:server", """ |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 377 | service neutron-dhcp-agent stop && \ |
| 378 | service neutron-l3-agent stop && \ |
| 379 | service neutron-metadata-agent stop && \ |
| 380 | service neutron-openvswitch-agent stop |
| 381 | """) |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 382 | yield result |
| 383 | # |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 384 | salt_actions.cmd_run("I@neutron:server", """ |
| 385 | service neutron-dhcp-agent start && \ |
| 386 | service neutron-l3-agent start && \ |
| 387 | service neutron-metadata-agent start && \ |
| 388 | service neutron-openvswitch-agent start |
| 389 | """) |
| 390 | # TODO: add check that all services are UP and running |
| 391 | |
| 392 | @pytest.mark.grab_versions |
| 393 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
| 394 | @pytest.mark.run_mcp_update |
| 395 | def test_update_rabbit(self, salt_actions, reclass_actions, |
| 396 | drivetrain_actions, show_step, _, |
| 397 | disable_automatic_failover_neutron_for_test, |
| 398 | disable_neutron_agents_for_test): |
| 399 | """ Updates RabbitMQ |
| 400 | Scenario: |
| 401 | 1. Include the RabbitMQ upgrade pipeline job to DriveTrain |
| 402 | 2. Add repositories with new RabbitMQ packages |
| 403 | 3. Start Deploy - upgrade RabbitMQ pipeline |
| 404 | |
| 405 | Updating RabbitMq should be completed before the OpenStack updating |
| 406 | process starts |
| 407 | """ |
| 408 | salt = salt_actions |
| 409 | reclass = reclass_actions |
| 410 | dt = drivetrain_actions |
| 411 | |
| 412 | # ####### Include the RabbitMQ upgrade pipeline job to DriveTrain #### |
| 413 | show_step(1) |
| 414 | reclass.add_class( |
| 415 | "system.jenkins.client.job.deploy.update.upgrade_rabbitmq", |
| 416 | "cluster/*/cicd/control/leader.yml") |
| 417 | salt.enforce_state("I@jenkins:client", "jenkins.client") |
| 418 | |
| 419 | reclass.add_bool_key("parameters._param.openstack_upgrade_enabled", |
| 420 | "True", |
| 421 | "cluster/*/infra/init.yml") |
| 422 | salt.run_state("I@rabbitmq:server", "saltutil.refresh_pillar") |
| 423 | |
| 424 | # ########### Add repositories with new RabbitMQ packages ############ |
| 425 | show_step(2) |
| 426 | salt.enforce_state("I@rabbitmq:server", "linux.system.repo") |
| 427 | |
| 428 | # ########### Start Deploy - upgrade RabbitMQ pipeline ############ |
| 429 | show_step(3) |
| 430 | job_parameters = { |
| 431 | 'INTERACTIVE': 'false' |
| 432 | } |
| 433 | |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 434 | update_rabbit = dt.start_job_on_jenkins( |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 435 | job_name='deploy-upgrade-rabbitmq', |
| 436 | job_parameters=job_parameters, |
| 437 | build_timeout=40 * 60 |
| 438 | ) |
| 439 | assert update_rabbit == 'SUCCESS' |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 440 | |
| 441 | @pytest.mark.grab_versions |
| 442 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
| 443 | @pytest.mark.run_mcp_update |
| 444 | def test_update_ceph(self, salt_actions, drivetrain_actions, show_step, _): |
| 445 | """ Updates Ceph to the latest minor version |
| 446 | |
| 447 | Scenario: |
| 448 | 1. Add workaround for unhealth Ceph |
| 449 | 2. Start ceph-upgrade job with default parameters |
| 450 | 3. Check Ceph version for all nodes |
| 451 | |
| 452 | https://docs.mirantis.com/mcp/master/mcp-operations-guide/update-upgrade/minor-update/ceph-update.html |
| 453 | """ |
| 454 | salt = salt_actions |
| 455 | dt = drivetrain_actions |
| 456 | |
| 457 | # ###################### Add workaround for unhealth Ceph ############ |
| 458 | show_step(1) |
| 459 | salt.cmd_run("I@ceph:radosgw", |
| 460 | "ceph config set 'mon pg warn max object skew' 20") |
| 461 | # ###################### Start ceph-upgrade pipeline ################# |
| 462 | show_step(2) |
| 463 | job_parameters = {} |
| 464 | |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 465 | update_ceph = dt.start_job_on_jenkins( |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 466 | job_name='ceph-update', |
| 467 | job_parameters=job_parameters) |
| 468 | |
| 469 | assert update_ceph == 'SUCCESS' |
| 470 | |
| 471 | # ########## Verify Ceph version ##################################### |
| 472 | show_step(3) |
| 473 | |
| 474 | ceph_version_by_nodes = salt.cmd_run( |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 475 | "I@ceph:* and not I@ceph:monitoring and not I@ceph:backup:server", |
| 476 | "ceph version")[0] |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 477 | |
| 478 | assert has_only_similar(ceph_version_by_nodes), ceph_version_by_nodes |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 479 | |
Hanna Arhipova | ac00856 | 2019-10-17 11:54:23 +0300 | [diff] [blame] | 480 | @pytest.mark.grab_versions |
| 481 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
| 482 | @pytest.mark.run_mcp_update |
| 483 | def test_update_stacklight(self, _, drivetrain_actions): |
| 484 | """ Update packages for Stacklight |
| 485 | Scenario: |
| 486 | 1. Start Deploy - upgrade Stacklight job |
| 487 | """ |
| 488 | drivetrain = drivetrain_actions |
| 489 | |
| 490 | job_parameters = { |
| 491 | "STAGE_UPGRADE_DOCKER_COMPONENTS": True, |
| 492 | "STAGE_UPGRADE_ES_KIBANA": True, |
| 493 | "STAGE_UPGRADE_SYSTEM_PART": True |
| 494 | } |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 495 | upgrade_control_pipeline = drivetrain.start_job_on_jenkins( |
Hanna Arhipova | ac00856 | 2019-10-17 11:54:23 +0300 | [diff] [blame] | 496 | job_name="stacklight-upgrade", |
| 497 | job_parameters=job_parameters) |
| 498 | |
| 499 | assert upgrade_control_pipeline == 'SUCCESS' |
| 500 | |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 501 | |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 502 | @pytest.mark.usefixtures("switch_to_proposed_pipelines", |
| 503 | "enable_openstack_update") |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 504 | class TestOpenstackUpdate(object): |
| 505 | |
| 506 | @pytest.mark.grab_versions |
| 507 | @pytest.mark.run_mcp_update |
| 508 | def test__pre_update__enable_pipeline_job(self, |
| 509 | reclass_actions, salt_actions, |
| 510 | show_step): |
| 511 | """ Enable pipeline in the Drivetrain |
| 512 | |
| 513 | Scenario: |
| 514 | 1. Add deploy.update.* classes to the reclass |
| 515 | 2. Start jenkins.client salt state |
| 516 | |
| 517 | """ |
| 518 | salt = salt_actions |
| 519 | reclass = reclass_actions |
| 520 | show_step(1) |
| 521 | reclass.add_class("system.jenkins.client.job.deploy.update.upgrade", |
| 522 | "cluster/*/cicd/control/leader.yml") |
| 523 | |
| 524 | reclass.add_class( |
| 525 | "system.jenkins.client.job.deploy.update.upgrade_ovs_gateway", |
| 526 | "cluster/*/cicd/control/leader.yml") |
| 527 | |
| 528 | reclass.add_class( |
| 529 | "system.jenkins.client.job.deploy.update.upgrade_compute", |
| 530 | "cluster/*/cicd/control/leader.yml") |
| 531 | |
| 532 | show_step(2) |
| 533 | r, errors = salt.enforce_state("I@jenkins:client", "jenkins.client") |
| 534 | assert errors is None |
| 535 | |
| 536 | @pytest.mark.grab_versions |
| 537 | @pytest.mark.parametrize('target', get_control_plane_targets()) |
| 538 | @pytest.mark.run_mcp_update |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 539 | def test__update__control_plane(self, drivetrain_actions, target): |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 540 | """Start 'Deploy - upgrade control VMs' for specific node |
| 541 | """ |
| 542 | job_parameters = { |
| 543 | "TARGET_SERVERS": target, |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 544 | "OS_DIST_UPGRADE": True, |
PGlazov | ed19b35 | 2020-05-21 16:42:27 +0400 | [diff] [blame^] | 545 | "UPGRADE_SALTSTACK": False, |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 546 | "OS_UPGRADE": True, |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 547 | "INTERACTIVE": False} |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 548 | upgrade_control_pipeline = drivetrain_actions.start_job_on_jenkins( |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 549 | job_name="deploy-upgrade-control", |
| 550 | job_parameters=job_parameters) |
| 551 | |
| 552 | assert upgrade_control_pipeline == 'SUCCESS' |
| 553 | |
| 554 | @pytest.mark.grab_versions |
| 555 | @pytest.mark.run_mcp_update |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 556 | def test__update__data_plane(self, drivetrain_actions, salt_actions): |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 557 | """Start 'Deploy - upgrade OVS gateway' |
| 558 | """ |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 559 | if not salt_actions.cmd_run("gtw*", "test.ping")[0].keys(): |
| 560 | pytest.skip("This deployment doesn't have gtw* nodes") |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 561 | job_parameters = { |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 562 | "OS_DIST_UPGRADE": True, |
| 563 | "OS_UPGRADE": True, |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 564 | "INTERACTIVE": False} |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 565 | upgrade_data_pipeline = drivetrain_actions.start_job_on_jenkins( |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 566 | job_name="deploy-upgrade-ovs-gateway", |
| 567 | job_parameters=job_parameters) |
| 568 | |
| 569 | assert upgrade_data_pipeline == 'SUCCESS' |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 570 | |
| 571 | @pytest.mark.grab_versions |
| 572 | @pytest.mark.run_mcp_update |
| 573 | def test__update__computes(self, drivetrain_actions): |
| 574 | """Start 'Deploy - upgrade computes' |
| 575 | """ |
| 576 | job_parameters = { |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 577 | "OS_DIST_UPGRADE": True, |
| 578 | "OS_UPGRADE": True, |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 579 | "INTERACTIVE": False} |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 580 | upgrade_compute_pipeline = drivetrain_actions.start_job_on_jenkins( |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 581 | job_name="deploy-upgrade-compute", |
| 582 | job_parameters=job_parameters) |
| 583 | |
| 584 | assert upgrade_compute_pipeline == 'SUCCESS' |