Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 1 | import pytest |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 2 | import sys |
| 3 | import os |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 4 | |
| 5 | from tcp_tests import logger |
| 6 | from tcp_tests import settings |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 7 | |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 8 | sys.path.append(os.getcwd()) |
| 9 | try: |
| 10 | from tcp_tests.fixtures import config_fixtures |
| 11 | from tcp_tests.managers import underlay_ssh_manager |
| 12 | from tcp_tests.managers import saltmanager as salt_manager |
| 13 | except ImportError: |
| 14 | print("ImportError: Run the application from the tcp-qa directory or " |
| 15 | "set the PYTHONPATH environment variable to directory which contains" |
| 16 | " ./tcp_tests") |
| 17 | sys.exit(1) |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 18 | LOG = logger.logger |
| 19 | |
| 20 | |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 21 | def has_only_similar(values_by_nodes): |
| 22 | """ |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 23 | :param values_by_nodes: dict |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 24 | :return: bool, True if all items in the dict have similar values |
| 25 | """ |
| 26 | values = list(values_by_nodes.values()) |
| 27 | return all(value == values[0] for value in values) |
| 28 | |
| 29 | |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 30 | def get_control_plane_targets(): |
| 31 | config = config_fixtures.config() |
| 32 | underlay = underlay_ssh_manager.UnderlaySSHManager(config) |
| 33 | saltmanager = salt_manager.SaltManager(config, underlay) |
Hanna Arhipova | fbcea85 | 2019-10-03 13:24:53 +0300 | [diff] [blame] | 34 | targets = list() |
Hanna Arhipova | 52bb17d | 2019-10-28 19:15:17 +0200 | [diff] [blame] | 35 | telemetry_exists = False |
| 36 | barbican_exists = False |
Hanna Arhipova | fbcea85 | 2019-10-03 13:24:53 +0300 | [diff] [blame] | 37 | try: |
| 38 | targets += saltmanager.run_state( |
| 39 | "I@keystone:server", 'test.ping')[0]['return'][0].keys() |
| 40 | targets += saltmanager.run_state( |
| 41 | "I@nginx:server and not I@salt:master", |
| 42 | "test.ping")[0]['return'][0].keys() |
Hanna Arhipova | 4462dd0 | 2019-10-28 19:05:08 +0200 | [diff] [blame] | 43 | telemetry_exists = saltmanager.get_single_pillar( |
| 44 | "I@salt:master", |
| 45 | "_param:openstack_telemetry_hostname") |
| 46 | barbican_exists = saltmanager.get_single_pillar( |
| 47 | "I@salt:master", |
| 48 | "_param:barbican_enabled") |
Hanna Arhipova | fbcea85 | 2019-10-03 13:24:53 +0300 | [diff] [blame] | 49 | except BaseException as err: |
| 50 | LOG.warning("Can't retrieve data from Salt. \ |
| 51 | Maybe cluster is not deployed completely.\ |
| 52 | Err: {}".format(err)) |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 53 | |
Hanna Arhipova | 75f21bb | 2019-10-07 19:22:15 +0300 | [diff] [blame] | 54 | # check for Manila existence |
| 55 | # if saltmanager.get_single_pillar("I@salt:master", |
| 56 | # "_param:manila_service_protocol"): |
| 57 | # targets.append('share*') |
| 58 | |
| 59 | # check for Tenant Telemetry existence |
Hanna Arhipova | 4462dd0 | 2019-10-28 19:05:08 +0200 | [diff] [blame] | 60 | if telemetry_exists: |
Hanna Arhipova | 75f21bb | 2019-10-07 19:22:15 +0300 | [diff] [blame] | 61 | targets.append('mdb*') |
| 62 | |
| 63 | # check for Barbican existence |
Hanna Arhipova | 4462dd0 | 2019-10-28 19:05:08 +0200 | [diff] [blame] | 64 | if barbican_exists: |
Hanna Arhipova | 75f21bb | 2019-10-07 19:22:15 +0300 | [diff] [blame] | 65 | targets.append('kmn*') |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 66 | return targets |
| 67 | |
| 68 | |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 69 | @pytest.fixture(scope='class') |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 70 | def switch_to_proposed_pipelines(reclass_actions, salt_actions): |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 71 | reclass = reclass_actions |
| 72 | proposed_repo = "http://mirror.mirantis.com/update/proposed/" |
| 73 | repo_param = "parameters._param.linux_system_repo_update_url" |
Hanna Arhipova | 7cfeb07 | 2019-10-11 15:04:10 +0300 | [diff] [blame] | 74 | |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 75 | proposed_pipeline_branch = "release/proposed/2019.2.0" |
| 76 | pipeline_branch_param = "parameters._param.jenkins_pipelines_branch" |
| 77 | infra_yml = "cluster/*/infra/init.yml" |
| 78 | |
| 79 | LOG.info("Check reclass has release/proposed/2019.2.0 branches") |
| 80 | if reclass.get_key(pipeline_branch_param, |
| 81 | infra_yml) == proposed_pipeline_branch \ |
| 82 | and reclass.get_key(repo_param, infra_yml) == proposed_repo: |
| 83 | return True |
| 84 | |
| 85 | LOG.info("Switch to release/proposed/2019.2.0 branches") |
| 86 | reclass.add_key(pipeline_branch_param, proposed_pipeline_branch, infra_yml) |
| 87 | |
| 88 | reclass.add_key(repo_param, proposed_repo, infra_yml) |
| 89 | reclass.add_key(repo_param, proposed_repo, "cluster/*/openstack/init.yml") |
| 90 | reclass.add_key(repo_param, proposed_repo, "cluster/*/stacklight/init.yml") |
| 91 | reclass.add_key(repo_param, proposed_repo, "cluster/*/ceph/init.yml") |
Hanna Arhipova | 7cfeb07 | 2019-10-11 15:04:10 +0300 | [diff] [blame] | 92 | |
| 93 | salt_actions.run_state("*", "saltutil.refresh_pillar") |
Hanna Arhipova | 9ee7590 | 2019-10-29 16:33:26 +0200 | [diff] [blame] | 94 | salt_actions.enforce_state("*", "salt.minion") |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 95 | salt_actions.enforce_state("I@jenkins:client", "jenkins.client") |
| 96 | |
| 97 | |
Hanna Arhipova | 745fdbb | 2020-03-30 14:40:44 +0300 | [diff] [blame] | 98 | @pytest.fixture |
Hanna Arhipova | ec201e7 | 2020-10-26 17:14:25 +0200 | [diff] [blame] | 99 | def wa_for_galera_clustercheck_password_prod35705(reclass_actions, |
| 100 | salt_actions): |
Hanna Arhipova | 745fdbb | 2020-03-30 14:40:44 +0300 | [diff] [blame] | 101 | reclass_actions.add_key( |
Hanna Arhipova | ec201e7 | 2020-10-26 17:14:25 +0200 | [diff] [blame] | 102 | "parameters._param.galera_clustercheck_password", |
Hanna Arhipova | 745fdbb | 2020-03-30 14:40:44 +0300 | [diff] [blame] | 103 | "a"*32, |
| 104 | "cluster/*/infra/secrets.yml") |
Hanna Arhipova | ec201e7 | 2020-10-26 17:14:25 +0200 | [diff] [blame] | 105 | salt_actions.run_state( |
| 106 | "I@galera:master or I@galera:slave", "saltutil.refresh_pillar") |
| 107 | salt_actions.enforce_state( |
| 108 | "I@galera:master or I@galera:slave", "galera") |
| 109 | salt_actions.enforce_state( |
| 110 | "I@galera:master or I@galera:slave", "haproxy") |
Hanna Arhipova | 745fdbb | 2020-03-30 14:40:44 +0300 | [diff] [blame] | 111 | |
| 112 | |
Hanna Arhipova | 97fac6e | 2020-12-03 13:10:56 +0200 | [diff] [blame^] | 113 | @pytest.fixture |
| 114 | def wa_for_alerta_password_prod35958(reclass_actions, |
| 115 | salt_actions): |
| 116 | reclass_actions.add_key( |
| 117 | "parameters._param.alerta_admin_api_key_generated", |
| 118 | "a"*32, |
| 119 | "cluster/*/infra/secrets.yml") |
| 120 | salt_actions.run_state( |
| 121 | "I@prometheus:alerta or I@prometheus:alertmanager", |
| 122 | "saltutil.refresh_pillar") |
| 123 | salt_actions.enforce_state( |
| 124 | "I@prometheus:alerta", "prometheus.alerta") |
| 125 | salt_actions.enforce_state( |
| 126 | "I@prometheus:alertmanager", "prometheus.alertmanager") |
| 127 | salt_actions.enforce_state( |
| 128 | "I@prometheus:alerta or I@prometheus:alertmanager", |
| 129 | "docker.client") |
| 130 | |
| 131 | |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 132 | @pytest.fixture(scope='class') |
| 133 | def enable_openstack_update(reclass_actions, salt_actions): |
| 134 | param = "parameters._param.openstack_upgrade_enabled" |
| 135 | context_file = "cluster/*/infra/init.yml" |
| 136 | |
| 137 | LOG.info("Enable openstack_upgrade_enabled in reclass") |
| 138 | reclass_actions.add_bool_key(param, "True", context_file) |
| 139 | salt_actions.run_state("*", "saltutil.refresh_pillar") |
| 140 | yield True |
| 141 | LOG.info("Disable openstack_upgrade_enabled in reclass") |
| 142 | reclass_actions.add_bool_key(param, "False", context_file) |
| 143 | salt_actions.run_state("*", "saltutil.refresh_pillar") |
| 144 | |
| 145 | |
Hanna Arhipova | 745fdbb | 2020-03-30 14:40:44 +0300 | [diff] [blame] | 146 | @pytest.mark.usefixtures("switch_to_proposed_pipelines", |
Hanna Arhipova | 97fac6e | 2020-12-03 13:10:56 +0200 | [diff] [blame^] | 147 | "wa_for_galera_clustercheck_password_prod35705", |
| 148 | "wa_for_alerta_password_prod35958") |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 149 | class TestUpdateMcpCluster(object): |
| 150 | """ |
| 151 | Following the steps in |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 152 | https://docs.mirantis.com/mcp/master/mcp-operations-guide/update-upgrade/minor-update.html#minor-update |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 153 | """ |
| 154 | |
| 155 | @pytest.mark.grab_versions |
| 156 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
| 157 | @pytest.mark.run_mcp_update |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 158 | def test_update_drivetrain(self, salt_actions, drivetrain_actions, |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 159 | show_step, _): |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 160 | """Updating DriveTrain component to release/proposed/2019.2.0 version |
| 161 | |
| 162 | Scenario: |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 163 | 1. Add workaround for PROD-32751 |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 164 | 2. Run job git-mirror-downstream-mk-pipelines |
| 165 | 3. Run job git-mirror-downstream-pipeline-library |
| 166 | 4. If jobs are passed then start 'Deploy - upgrade MCP Drivetrain' |
| 167 | |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 168 | Duration: ~70 min |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 169 | """ |
| 170 | salt = salt_actions |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 171 | dt = drivetrain_actions |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 172 | |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 173 | # #################### Add workaround for PROD-32751 ################# |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 174 | show_step(1) |
| 175 | |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 176 | # FIXME: workaround for PROD-32751 |
| 177 | salt.cmd_run("cfg01*", "cd /srv/salt/reclass; git add -u && \ |
| 178 | git commit --allow-empty -m 'Cluster model update'") |
| 179 | |
| 180 | # ################### Downstream mk-pipelines ######################### |
| 181 | show_step(2) |
| 182 | job_name = 'git-mirror-downstream-mk-pipelines' |
| 183 | job_parameters = { |
| 184 | 'BRANCHES': 'release/proposed/2019.2.0' |
| 185 | } |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 186 | update_pipelines = dt.start_job_on_jenkins( |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 187 | job_name=job_name, |
Hanna Arhipova | c3c5929 | 2020-04-23 16:29:09 +0300 | [diff] [blame] | 188 | job_parameters=job_parameters, |
| 189 | verbose=True) |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 190 | |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 191 | assert update_pipelines == 'SUCCESS' |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 192 | |
| 193 | # ################### Downstream pipeline-library #################### |
| 194 | show_step(3) |
| 195 | job_name = 'git-mirror-downstream-pipeline-library' |
| 196 | job_parameters = { |
| 197 | 'BRANCHES': 'release/proposed/2019.2.0' |
| 198 | } |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 199 | update_pipeline_library = dt.start_job_on_jenkins( |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 200 | job_name=job_name, |
Hanna Arhipova | c3c5929 | 2020-04-23 16:29:09 +0300 | [diff] [blame] | 201 | job_parameters=job_parameters, |
| 202 | verbose=True) |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 203 | |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 204 | assert update_pipeline_library == 'SUCCESS' |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 205 | |
| 206 | # ################### Start 'Deploy - upgrade MCP Drivetrain' job ##### |
| 207 | show_step(4) |
| 208 | |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 209 | job_name = 'upgrade-mcp-release' |
| 210 | job_parameters = { |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 211 | 'GIT_REFSPEC': 'release/proposed/2019.2.0', |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 212 | 'MK_PIPELINES_REFSPEC': 'release/proposed/2019.2.0', |
| 213 | 'TARGET_MCP_VERSION': '2019.2.0' |
| 214 | } |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 215 | update_drivetrain = dt.start_job_on_jenkins( |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 216 | job_name=job_name, |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 217 | job_parameters=job_parameters, |
Hanna Arhipova | c3c5929 | 2020-04-23 16:29:09 +0300 | [diff] [blame] | 218 | verbose=True, |
Hanna Arhipova | 85db495 | 2020-03-31 20:20:06 +0300 | [diff] [blame] | 219 | build_timeout=3 * 60 * 60) |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 220 | |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 221 | assert update_drivetrain == 'SUCCESS' |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 222 | |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 223 | @pytest.mark.grab_versions |
| 224 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
Hanna Arhipova | 71ecc27 | 2019-08-20 14:54:22 +0300 | [diff] [blame] | 225 | @pytest.mark.run_mcp_update |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 226 | def test_update_glusterfs(self, salt_actions, reclass_actions, |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 227 | drivetrain_actions, show_step, _): |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 228 | """ Upgrade GlusterFS |
| 229 | Scenario: |
| 230 | 1. In infra/init.yml in Reclass, add the glusterfs_version parameter |
| 231 | 2. Start linux.system.repo state |
| 232 | 3. Start "update-glusterfs" job |
| 233 | 4. Check version for GlusterFS servers |
| 234 | 5. Check version for GlusterFS clients |
| 235 | |
| 236 | """ |
| 237 | salt = salt_actions |
| 238 | reclass = reclass_actions |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 239 | dt = drivetrain_actions |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 240 | |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 241 | # ############## Change reclass ###################################### |
| 242 | show_step(1) |
| 243 | reclass.add_key( |
| 244 | "parameters._param.linux_system_repo_mcp_glusterfs_version_number", |
| 245 | "5", |
| 246 | "cluster/*/infra/init.yml" |
| 247 | ) |
| 248 | # ################# Run linux.system state ########################### |
| 249 | show_step(2) |
| 250 | salt.enforce_state("*", "linux.system.repo") |
| 251 | |
| 252 | # ############## Start deploy-upgrade-galera job ##################### |
| 253 | show_step(3) |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 254 | job_name = 'update-glusterfs' |
| 255 | |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 256 | update_glusterfs = dt.start_job_on_jenkins( |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 257 | job_name=job_name, |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 258 | build_timeout=40 * 60) |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 259 | |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 260 | assert update_glusterfs == 'SUCCESS' |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 261 | |
| 262 | # ################ Check GlusterFS version for servers ############## |
| 263 | show_step(4) |
| 264 | gluster_server_versions_by_nodes = salt.cmd_run( |
| 265 | "I@glusterfs:server", |
| 266 | "glusterd --version|head -n1")[0] |
| 267 | |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 268 | assert has_only_similar(gluster_server_versions_by_nodes), \ |
Hanna Arhipova | cc3759b | 2019-08-28 16:01:11 +0300 | [diff] [blame] | 269 | gluster_server_versions_by_nodes |
| 270 | |
| 271 | # ################ Check GlusterFS version for clients ############## |
| 272 | show_step(5) |
| 273 | gluster_client_versions_by_nodes = salt.cmd_run( |
| 274 | "I@glusterfs:client", |
| 275 | "glusterfs --version|head -n1")[0] |
| 276 | |
| 277 | assert has_only_similar(gluster_client_versions_by_nodes), \ |
| 278 | gluster_client_versions_by_nodes |
| 279 | |
| 280 | @pytest.mark.grab_versions |
| 281 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
| 282 | @pytest.mark.run_mcp_update |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 283 | def test_update_galera(self, salt_actions, reclass_actions, |
| 284 | drivetrain_actions, show_step, _): |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 285 | """ Upgrade Galera automatically |
| 286 | |
| 287 | Scenario: |
| 288 | 1. Include the Galera upgrade pipeline job to DriveTrain |
| 289 | 2. Apply the jenkins.client state on the Jenkins nodes |
| 290 | 3. set the openstack_upgrade_enabled parameter to true |
| 291 | 4. Refresh pillars |
| 292 | 5. Add repositories with new Galera packages |
| 293 | 6. Start job from Jenkins |
| 294 | """ |
| 295 | salt = salt_actions |
| 296 | reclass = reclass_actions |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 297 | dt = drivetrain_actions |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 298 | # ################### Enable pipeline ################################# |
| 299 | show_step(1) |
| 300 | reclass.add_class( |
| 301 | "system.jenkins.client.job.deploy.update.upgrade_galera", |
| 302 | "cluster/*/cicd/control/leader.yml") |
| 303 | show_step(2) |
| 304 | salt.enforce_state("I@jenkins:client", "jenkins.client") |
| 305 | |
| 306 | # ############### Enable automatic upgrade ############################ |
| 307 | show_step(3) |
| 308 | reclass.add_bool_key("parameters._param.openstack_upgrade_enabled", |
| 309 | "True", |
| 310 | "cluster/*/infra/init.yml") |
| 311 | |
| 312 | show_step(4) |
Hanna Arhipova | 7cfeb07 | 2019-10-11 15:04:10 +0300 | [diff] [blame] | 313 | salt.run_state("dbs*", "saltutil.refresh_pillar") |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 314 | |
| 315 | # ############# Add repositories with new Galera packages ####### |
| 316 | show_step(5) |
| 317 | salt.enforce_state("dbs*", "linux.system.repo") |
| 318 | salt.enforce_state("cfg*", "salt.master") |
| 319 | |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 320 | # #################### Login Jenkins on cid01 node ################### |
| 321 | show_step(6) |
| 322 | |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 323 | job_name = 'deploy-upgrade-galera' |
| 324 | job_parameters = { |
| 325 | 'INTERACTIVE': 'false' |
| 326 | } |
| 327 | |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 328 | update_galera = dt.start_job_on_jenkins( |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 329 | job_name=job_name, |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 330 | job_parameters=job_parameters, |
| 331 | build_timeout=40 * 60) |
Hanna Arhipova | 94a8abe | 2019-08-22 14:11:46 +0300 | [diff] [blame] | 332 | |
Hanna Arhipova | 17b2c10 | 2019-09-06 16:44:17 +0300 | [diff] [blame] | 333 | assert update_galera == 'SUCCESS' |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 334 | |
| 335 | @pytest.fixture |
| 336 | def disable_automatic_failover_neutron_for_test(self, salt_actions): |
| 337 | """ |
| 338 | On each OpenStack controller node, modify the neutron.conf file |
| 339 | Restart the neutron-server service |
| 340 | """ |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 341 | |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 342 | def comment_line(node, file_name, word): |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 343 | """ |
| 344 | Adds '#' before the specific line in specific file |
| 345 | |
| 346 | :param node: string, salt target of node where the file locates |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 347 | :param file_name: string, full path to the file |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 348 | :param word: string, the begin of line which should be commented |
| 349 | :return: None |
| 350 | """ |
| 351 | salt_actions.cmd_run(node, |
| 352 | "sed -i 's/^{word}/#{word}/' {file}". |
| 353 | format(word=word, |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 354 | file=file_name)) |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 355 | |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 356 | def add_line(node, file_name, line): |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 357 | """ |
| 358 | Appends line to the end of file |
| 359 | |
| 360 | :param node: string, salt target of node where the file locates |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 361 | :param file_name: string, full path to the file |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 362 | :param line: string, line that should be added |
| 363 | :return: None |
| 364 | """ |
| 365 | salt_actions.cmd_run(node, "echo {line} >> {file}".format( |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 366 | line=line, |
| 367 | file=file_name)) |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 368 | |
| 369 | neutron_conf = '/etc/neutron/neutron.conf' |
| 370 | neutron_server = "I@neutron:server" |
| 371 | # ######## Create backup for config file ####################### |
| 372 | salt_actions.cmd_run( |
| 373 | neutron_server, |
| 374 | "cp -p {file} {file}.backup".format(file=neutron_conf)) |
| 375 | |
| 376 | # ## Change parameters in neutron.conf' |
| 377 | comment_line(neutron_server, neutron_conf, |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 378 | "allow_automatic_l3agent_failover", ) |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 379 | comment_line(neutron_server, neutron_conf, |
| 380 | "allow_automatic_dhcp_failover") |
| 381 | add_line(neutron_server, neutron_conf, |
| 382 | "allow_automatic_dhcp_failover = false") |
| 383 | add_line(neutron_server, neutron_conf, |
| 384 | "allow_automatic_l3agent_failover = false") |
| 385 | |
| 386 | # ## Apply changed config to the neutron-server service |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 387 | result = salt_actions.cmd_run(neutron_server, |
| 388 | "service neutron-server restart") |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 389 | # TODO: add check that neutron-server is up and running |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 390 | yield result |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 391 | # ## Revert file changes |
| 392 | salt_actions.cmd_run( |
| 393 | neutron_server, |
| 394 | "cp -p {file}.backup {file}".format(file=neutron_conf)) |
| 395 | salt_actions.cmd_run(neutron_server, |
| 396 | "service neutron-server restart") |
| 397 | |
| 398 | @pytest.fixture |
| 399 | def disable_neutron_agents_for_test(self, salt_actions): |
| 400 | """ |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 401 | Disable the neutron services before the test and |
| 402 | enable it after test |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 403 | """ |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 404 | result = salt_actions.cmd_run("I@neutron:server", """ |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 405 | service neutron-dhcp-agent stop && \ |
| 406 | service neutron-l3-agent stop && \ |
| 407 | service neutron-metadata-agent stop && \ |
| 408 | service neutron-openvswitch-agent stop |
| 409 | """) |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 410 | yield result |
| 411 | # |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 412 | salt_actions.cmd_run("I@neutron:server", """ |
| 413 | service neutron-dhcp-agent start && \ |
| 414 | service neutron-l3-agent start && \ |
| 415 | service neutron-metadata-agent start && \ |
| 416 | service neutron-openvswitch-agent start |
| 417 | """) |
| 418 | # TODO: add check that all services are UP and running |
| 419 | |
| 420 | @pytest.mark.grab_versions |
| 421 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
| 422 | @pytest.mark.run_mcp_update |
| 423 | def test_update_rabbit(self, salt_actions, reclass_actions, |
| 424 | drivetrain_actions, show_step, _, |
| 425 | disable_automatic_failover_neutron_for_test, |
| 426 | disable_neutron_agents_for_test): |
| 427 | """ Updates RabbitMQ |
| 428 | Scenario: |
| 429 | 1. Include the RabbitMQ upgrade pipeline job to DriveTrain |
| 430 | 2. Add repositories with new RabbitMQ packages |
| 431 | 3. Start Deploy - upgrade RabbitMQ pipeline |
| 432 | |
| 433 | Updating RabbitMq should be completed before the OpenStack updating |
| 434 | process starts |
| 435 | """ |
| 436 | salt = salt_actions |
| 437 | reclass = reclass_actions |
| 438 | dt = drivetrain_actions |
| 439 | |
| 440 | # ####### Include the RabbitMQ upgrade pipeline job to DriveTrain #### |
| 441 | show_step(1) |
| 442 | reclass.add_class( |
| 443 | "system.jenkins.client.job.deploy.update.upgrade_rabbitmq", |
| 444 | "cluster/*/cicd/control/leader.yml") |
| 445 | salt.enforce_state("I@jenkins:client", "jenkins.client") |
| 446 | |
| 447 | reclass.add_bool_key("parameters._param.openstack_upgrade_enabled", |
| 448 | "True", |
| 449 | "cluster/*/infra/init.yml") |
| 450 | salt.run_state("I@rabbitmq:server", "saltutil.refresh_pillar") |
| 451 | |
| 452 | # ########### Add repositories with new RabbitMQ packages ############ |
| 453 | show_step(2) |
| 454 | salt.enforce_state("I@rabbitmq:server", "linux.system.repo") |
| 455 | |
| 456 | # ########### Start Deploy - upgrade RabbitMQ pipeline ############ |
| 457 | show_step(3) |
| 458 | job_parameters = { |
| 459 | 'INTERACTIVE': 'false' |
| 460 | } |
| 461 | |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 462 | update_rabbit = dt.start_job_on_jenkins( |
Hanna Arhipova | 1fcaf44 | 2019-09-06 15:30:45 +0300 | [diff] [blame] | 463 | job_name='deploy-upgrade-rabbitmq', |
| 464 | job_parameters=job_parameters, |
| 465 | build_timeout=40 * 60 |
| 466 | ) |
| 467 | assert update_rabbit == 'SUCCESS' |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 468 | |
| 469 | @pytest.mark.grab_versions |
| 470 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
| 471 | @pytest.mark.run_mcp_update |
| 472 | def test_update_ceph(self, salt_actions, drivetrain_actions, show_step, _): |
| 473 | """ Updates Ceph to the latest minor version |
| 474 | |
| 475 | Scenario: |
| 476 | 1. Add workaround for unhealth Ceph |
| 477 | 2. Start ceph-upgrade job with default parameters |
| 478 | 3. Check Ceph version for all nodes |
| 479 | |
| 480 | https://docs.mirantis.com/mcp/master/mcp-operations-guide/update-upgrade/minor-update/ceph-update.html |
| 481 | """ |
| 482 | salt = salt_actions |
| 483 | dt = drivetrain_actions |
| 484 | |
| 485 | # ###################### Add workaround for unhealth Ceph ############ |
| 486 | show_step(1) |
| 487 | salt.cmd_run("I@ceph:radosgw", |
| 488 | "ceph config set 'mon pg warn max object skew' 20") |
| 489 | # ###################### Start ceph-upgrade pipeline ################# |
| 490 | show_step(2) |
| 491 | job_parameters = {} |
| 492 | |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 493 | update_ceph = dt.start_job_on_jenkins( |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 494 | job_name='ceph-update', |
| 495 | job_parameters=job_parameters) |
| 496 | |
| 497 | assert update_ceph == 'SUCCESS' |
| 498 | |
| 499 | # ########## Verify Ceph version ##################################### |
| 500 | show_step(3) |
| 501 | |
| 502 | ceph_version_by_nodes = salt.cmd_run( |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 503 | "I@ceph:* and not I@ceph:monitoring and not I@ceph:backup:server", |
| 504 | "ceph version")[0] |
Hanna Arhipova | d35a29b | 2019-09-04 13:24:06 +0300 | [diff] [blame] | 505 | |
| 506 | assert has_only_similar(ceph_version_by_nodes), ceph_version_by_nodes |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 507 | |
Hanna Arhipova | ac00856 | 2019-10-17 11:54:23 +0300 | [diff] [blame] | 508 | @pytest.mark.grab_versions |
| 509 | @pytest.mark.parametrize("_", [settings.ENV_NAME]) |
| 510 | @pytest.mark.run_mcp_update |
| 511 | def test_update_stacklight(self, _, drivetrain_actions): |
| 512 | """ Update packages for Stacklight |
| 513 | Scenario: |
| 514 | 1. Start Deploy - upgrade Stacklight job |
| 515 | """ |
| 516 | drivetrain = drivetrain_actions |
| 517 | |
| 518 | job_parameters = { |
| 519 | "STAGE_UPGRADE_DOCKER_COMPONENTS": True, |
| 520 | "STAGE_UPGRADE_ES_KIBANA": True, |
| 521 | "STAGE_UPGRADE_SYSTEM_PART": True |
| 522 | } |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 523 | upgrade_control_pipeline = drivetrain.start_job_on_jenkins( |
Hanna Arhipova | ac00856 | 2019-10-17 11:54:23 +0300 | [diff] [blame] | 524 | job_name="stacklight-upgrade", |
| 525 | job_parameters=job_parameters) |
| 526 | |
| 527 | assert upgrade_control_pipeline == 'SUCCESS' |
| 528 | |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 529 | |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 530 | @pytest.mark.usefixtures("switch_to_proposed_pipelines", |
| 531 | "enable_openstack_update") |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 532 | class TestOpenstackUpdate(object): |
| 533 | |
| 534 | @pytest.mark.grab_versions |
| 535 | @pytest.mark.run_mcp_update |
| 536 | def test__pre_update__enable_pipeline_job(self, |
| 537 | reclass_actions, salt_actions, |
| 538 | show_step): |
| 539 | """ Enable pipeline in the Drivetrain |
| 540 | |
| 541 | Scenario: |
| 542 | 1. Add deploy.update.* classes to the reclass |
| 543 | 2. Start jenkins.client salt state |
| 544 | |
| 545 | """ |
| 546 | salt = salt_actions |
| 547 | reclass = reclass_actions |
| 548 | show_step(1) |
| 549 | reclass.add_class("system.jenkins.client.job.deploy.update.upgrade", |
| 550 | "cluster/*/cicd/control/leader.yml") |
| 551 | |
| 552 | reclass.add_class( |
| 553 | "system.jenkins.client.job.deploy.update.upgrade_ovs_gateway", |
| 554 | "cluster/*/cicd/control/leader.yml") |
| 555 | |
| 556 | reclass.add_class( |
| 557 | "system.jenkins.client.job.deploy.update.upgrade_compute", |
| 558 | "cluster/*/cicd/control/leader.yml") |
| 559 | |
| 560 | show_step(2) |
| 561 | r, errors = salt.enforce_state("I@jenkins:client", "jenkins.client") |
| 562 | assert errors is None |
| 563 | |
| 564 | @pytest.mark.grab_versions |
| 565 | @pytest.mark.parametrize('target', get_control_plane_targets()) |
| 566 | @pytest.mark.run_mcp_update |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 567 | def test__update__control_plane(self, drivetrain_actions, target): |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 568 | """Start 'Deploy - upgrade control VMs' for specific node |
| 569 | """ |
| 570 | job_parameters = { |
| 571 | "TARGET_SERVERS": target, |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 572 | "OS_DIST_UPGRADE": True, |
PGlazov | ed19b35 | 2020-05-21 16:42:27 +0400 | [diff] [blame] | 573 | "UPGRADE_SALTSTACK": False, |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 574 | "OS_UPGRADE": True, |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 575 | "INTERACTIVE": False} |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 576 | upgrade_control_pipeline = drivetrain_actions.start_job_on_jenkins( |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 577 | job_name="deploy-upgrade-control", |
| 578 | job_parameters=job_parameters) |
| 579 | |
| 580 | assert upgrade_control_pipeline == 'SUCCESS' |
| 581 | |
| 582 | @pytest.mark.grab_versions |
| 583 | @pytest.mark.run_mcp_update |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 584 | def test__update__data_plane(self, drivetrain_actions, salt_actions): |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 585 | """Start 'Deploy - upgrade OVS gateway' |
| 586 | """ |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 587 | if not salt_actions.cmd_run("gtw*", "test.ping")[0].keys(): |
| 588 | pytest.skip("This deployment doesn't have gtw* nodes") |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 589 | job_parameters = { |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 590 | "OS_DIST_UPGRADE": True, |
| 591 | "OS_UPGRADE": True, |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 592 | "INTERACTIVE": False} |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 593 | upgrade_data_pipeline = drivetrain_actions.start_job_on_jenkins( |
Hanna Arhipova | eb3a211 | 2019-09-13 18:45:21 +0300 | [diff] [blame] | 594 | job_name="deploy-upgrade-ovs-gateway", |
| 595 | job_parameters=job_parameters) |
| 596 | |
| 597 | assert upgrade_data_pipeline == 'SUCCESS' |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 598 | |
| 599 | @pytest.mark.grab_versions |
| 600 | @pytest.mark.run_mcp_update |
| 601 | def test__update__computes(self, drivetrain_actions): |
| 602 | """Start 'Deploy - upgrade computes' |
| 603 | """ |
| 604 | job_parameters = { |
Hanna Arhipova | 606d518 | 2019-12-13 13:23:55 +0200 | [diff] [blame] | 605 | "OS_DIST_UPGRADE": True, |
| 606 | "OS_UPGRADE": True, |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 607 | "INTERACTIVE": False} |
Hanna Arhipova | a85bfa6 | 2020-01-03 18:49:15 +0200 | [diff] [blame] | 608 | upgrade_compute_pipeline = drivetrain_actions.start_job_on_jenkins( |
Hanna Arhipova | 1942996 | 2019-10-17 15:16:49 +0300 | [diff] [blame] | 609 | job_name="deploy-upgrade-compute", |
| 610 | job_parameters=job_parameters) |
| 611 | |
| 612 | assert upgrade_compute_pipeline == 'SUCCESS' |