Mikhail Kraynov | e5cc81b | 2018-10-03 13:01:06 +0400 | [diff] [blame] | 1 | import jenkins |
Oleksii Zhurba | a25984b | 2018-06-15 15:30:41 -0500 | [diff] [blame] | 2 | from xml.dom import minidom |
Hanna Arhipova | 16e93fb | 2019-01-23 19:03:01 +0200 | [diff] [blame] | 3 | import utils |
Oleksii Zhurba | a25984b | 2018-06-15 15:30:41 -0500 | [diff] [blame] | 4 | import json |
| 5 | import pytest |
Mikhail Kraynov | 351e841 | 2018-10-04 18:27:44 +0400 | [diff] [blame] | 6 | import time |
| 7 | import os |
| 8 | from pygerrit2 import GerritRestAPI, HTTPBasicAuth |
| 9 | from requests import HTTPError |
| 10 | import git |
mkraynov | 360c30d | 2018-09-27 17:02:45 +0400 | [diff] [blame] | 11 | import ldap |
| 12 | import ldap.modlist as modlist |
Mikhail Kraynov | 351e841 | 2018-10-04 18:27:44 +0400 | [diff] [blame] | 13 | |
Hanna Arhipova | e792be5 | 2019-02-13 13:28:11 +0200 | [diff] [blame] | 14 | |
Mikhail Kraynov | 351e841 | 2018-10-04 18:27:44 +0400 | [diff] [blame] | 15 | def join_to_gerrit(local_salt_client, gerrit_user, gerrit_password): |
Oleksii Zhurba | 4bfd2ee | 2019-04-10 21:56:58 -0500 | [diff] [blame] | 16 | gerrit_port = local_salt_client.pillar_get( |
| 17 | tgt='I@gerrit:client and not I@salt:master', |
| 18 | param='_param:haproxy_gerrit_bind_port', |
| 19 | expr_form='compound') |
| 20 | gerrit_address = local_salt_client.pillar_get( |
| 21 | tgt='I@gerrit:client and not I@salt:master', |
| 22 | param='_param:haproxy_gerrit_bind_host', |
| 23 | expr_form='compound') |
Mikhail Kraynov | 351e841 | 2018-10-04 18:27:44 +0400 | [diff] [blame] | 24 | url = 'http://{0}:{1}'.format(gerrit_address,gerrit_port) |
| 25 | auth = HTTPBasicAuth(gerrit_user, gerrit_password) |
| 26 | rest = GerritRestAPI(url=url, auth=auth) |
| 27 | return rest |
Oleksii Zhurba | a25984b | 2018-06-15 15:30:41 -0500 | [diff] [blame] | 28 | |
Hanna Arhipova | e792be5 | 2019-02-13 13:28:11 +0200 | [diff] [blame] | 29 | |
Mikhail Kraynov | e5cc81b | 2018-10-03 13:01:06 +0400 | [diff] [blame] | 30 | def join_to_jenkins(local_salt_client, jenkins_user, jenkins_password): |
Oleksii Zhurba | 4bfd2ee | 2019-04-10 21:56:58 -0500 | [diff] [blame] | 31 | jenkins_port = local_salt_client.pillar_get( |
| 32 | tgt='I@jenkins:client and not I@salt:master', |
| 33 | param='_param:haproxy_jenkins_bind_port', |
| 34 | expr_form='compound') |
| 35 | jenkins_address = local_salt_client.pillar_get( |
| 36 | tgt='I@jenkins:client and not I@salt:master', |
| 37 | param='_param:haproxy_jenkins_bind_host', |
| 38 | expr_form='compound') |
Mikhail Kraynov | e5cc81b | 2018-10-03 13:01:06 +0400 | [diff] [blame] | 39 | jenkins_url = 'http://{0}:{1}'.format(jenkins_address,jenkins_port) |
| 40 | server = jenkins.Jenkins(jenkins_url, username=jenkins_user, password=jenkins_password) |
| 41 | return server |
| 42 | |
Hanna Arhipova | e792be5 | 2019-02-13 13:28:11 +0200 | [diff] [blame] | 43 | |
Mikhail Kraynov | 351e841 | 2018-10-04 18:27:44 +0400 | [diff] [blame] | 44 | def get_password(local_salt_client,service): |
Oleksii Zhurba | 4bfd2ee | 2019-04-10 21:56:58 -0500 | [diff] [blame] | 45 | password = local_salt_client.pillar_get( |
| 46 | tgt=service, |
| 47 | param='_param:openldap_admin_password') |
Mikhail Kraynov | 351e841 | 2018-10-04 18:27:44 +0400 | [diff] [blame] | 48 | return password |
| 49 | |
Hanna Arhipova | e792be5 | 2019-02-13 13:28:11 +0200 | [diff] [blame] | 50 | |
Hanna Arhipova | b7e866c | 2019-04-10 13:49:56 +0300 | [diff] [blame] | 51 | def test_drivetrain_gerrit(local_salt_client, check_cicd): |
Mikhail Kraynov | 351e841 | 2018-10-04 18:27:44 +0400 | [diff] [blame] | 52 | gerrit_password = get_password(local_salt_client,'gerrit:client') |
| 53 | gerrit_error = '' |
| 54 | current_date = time.strftime("%Y%m%d-%H.%M.%S", time.localtime()) |
| 55 | test_proj_name = "test-dt-{0}".format(current_date) |
Oleksii Zhurba | 4bfd2ee | 2019-04-10 21:56:58 -0500 | [diff] [blame] | 56 | gerrit_port = local_salt_client.pillar_get( |
| 57 | tgt='I@gerrit:client and not I@salt:master', |
| 58 | param='_param:haproxy_gerrit_bind_port', |
| 59 | expr_form='compound') |
| 60 | gerrit_address = local_salt_client.pillar_get( |
| 61 | tgt='I@gerrit:client and not I@salt:master', |
| 62 | param='_param:haproxy_gerrit_bind_host', |
| 63 | expr_form='compound') |
Mikhail Kraynov | 351e841 | 2018-10-04 18:27:44 +0400 | [diff] [blame] | 64 | try: |
| 65 | #Connecting to gerrit and check connection |
| 66 | server = join_to_gerrit(local_salt_client,'admin',gerrit_password) |
| 67 | gerrit_check = server.get("/changes/?q=owner:self%20status:open") |
| 68 | #Check deleteproject plugin and skip test if the plugin is not installed |
| 69 | gerrit_plugins = server.get("/plugins/?all") |
| 70 | if 'deleteproject' not in gerrit_plugins: |
| 71 | pytest.skip("Delete-project plugin is not installed") |
| 72 | #Create test project and add description |
| 73 | server.put("/projects/"+test_proj_name) |
| 74 | server.put("/projects/"+test_proj_name+"/description",json={"description":"Test DriveTrain project","commit_message": "Update the project description"}) |
| 75 | except HTTPError, e: |
| 76 | gerrit_error = e |
| 77 | try: |
| 78 | #Create test folder and init git |
| 79 | repo_dir = os.path.join(os.getcwd(),test_proj_name) |
| 80 | file_name = os.path.join(repo_dir, current_date) |
| 81 | repo = git.Repo.init(repo_dir) |
| 82 | #Add remote url for this git repo |
| 83 | origin = repo.create_remote('origin', 'http://admin:{1}@{2}:{3}/{0}.git'.format(test_proj_name,gerrit_password,gerrit_address,gerrit_port)) |
| 84 | #Add commit-msg hook to automatically add Change-Id to our commit |
| 85 | os.system("curl -Lo {0}/.git/hooks/commit-msg 'http://admin:{1}@{2}:{3}/tools/hooks/commit-msg' > /dev/null 2>&1".format(repo_dir,gerrit_password,gerrit_address,gerrit_port)) |
| 86 | os.system("chmod u+x {0}/.git/hooks/commit-msg".format(repo_dir)) |
| 87 | #Create a test file |
| 88 | f = open(file_name, 'w+') |
| 89 | f.write("This is a test file for DriveTrain test") |
| 90 | f.close() |
| 91 | #Add file to git and commit it to Gerrit for review |
| 92 | repo.index.add([file_name]) |
| 93 | repo.index.commit("This is a test commit for DriveTrain test") |
| 94 | repo.git.push("origin", "HEAD:refs/for/master") |
| 95 | #Get change id from Gerrit. Set Code-Review +2 and submit this change |
| 96 | changes = server.get("/changes/?q=project:{0}".format(test_proj_name)) |
| 97 | last_change = changes[0].get('change_id') |
Hanna Arhipova | b7e866c | 2019-04-10 13:49:56 +0300 | [diff] [blame] | 98 | server.post("/changes/{0}/revisions/1/review".format(last_change),json={"message": "All is good","labels":{"Code-Review":"+2"}}) |
Mikhail Kraynov | 351e841 | 2018-10-04 18:27:44 +0400 | [diff] [blame] | 99 | server.post("/changes/{0}/submit".format(last_change)) |
| 100 | except HTTPError, e: |
| 101 | gerrit_error = e |
| 102 | finally: |
| 103 | #Delete test project |
| 104 | server.post("/projects/"+test_proj_name+"/deleteproject~delete") |
| 105 | assert gerrit_error == '',\ |
| 106 | 'Something is wrong with Gerrit'.format(gerrit_error) |
| 107 | |
Hanna Arhipova | e792be5 | 2019-02-13 13:28:11 +0200 | [diff] [blame] | 108 | |
Hanna Arhipova | b7e866c | 2019-04-10 13:49:56 +0300 | [diff] [blame] | 109 | def test_drivetrain_openldap(local_salt_client, check_cicd): |
Hanna Arhipova | 16a8f41 | 2019-04-08 17:10:38 +0300 | [diff] [blame] | 110 | """ |
| 111 | 1. Create a test user 'DT_test_user' in openldap |
| 112 | 2. Add the user to admin group |
| 113 | 3. Login using the user to Jenkins |
| 114 | 4. Check that no error occurred |
| 115 | 5. Add the user to devops group in Gerrit and then login to Gerrit |
| 116 | using test_user credentials. |
| 117 | 6 Start job in jenkins from this user |
| 118 | 7. Get info from gerrit from this user |
| 119 | 6. Finally, delete the user from admin |
| 120 | group and openldap |
| 121 | """ |
| 122 | |
| 123 | # TODO split to several test cases. One check - per one test method. Make the login process in fixture |
mkraynov | 360c30d | 2018-09-27 17:02:45 +0400 | [diff] [blame] | 124 | ldap_password = get_password(local_salt_client,'openldap:client') |
| 125 | #Check that ldap_password is exists, otherwise skip test |
| 126 | if not ldap_password: |
| 127 | pytest.skip("Openldap service or openldap:client pillar \ |
| 128 | are not found on this environment.") |
Oleksii Zhurba | 4bfd2ee | 2019-04-10 21:56:58 -0500 | [diff] [blame] | 129 | ldap_port = local_salt_client.pillar_get( |
| 130 | tgt='I@openldap:client and not I@salt:master', |
| 131 | param='_param:haproxy_openldap_bind_port', |
| 132 | expr_form='compound') |
| 133 | ldap_address = local_salt_client.pillar_get( |
| 134 | tgt='I@openldap:client and not I@salt:master', |
| 135 | param='_param:haproxy_openldap_bind_host', |
| 136 | expr_form='compound') |
| 137 | ldap_dc = local_salt_client.pillar_get( |
| 138 | tgt='openldap:client', |
| 139 | param='_param:openldap_dn') |
| 140 | ldap_con_admin = local_salt_client.pillar_get( |
| 141 | tgt='openldap:client', |
| 142 | param='openldap:client:server:auth:user') |
mkraynov | 360c30d | 2018-09-27 17:02:45 +0400 | [diff] [blame] | 143 | ldap_url = 'ldap://{0}:{1}'.format(ldap_address,ldap_port) |
| 144 | ldap_error = '' |
| 145 | ldap_result = '' |
| 146 | gerrit_result = '' |
| 147 | gerrit_error = '' |
| 148 | jenkins_error = '' |
| 149 | #Test user's CN |
| 150 | test_user_name = 'DT_test_user' |
| 151 | test_user = 'cn={0},ou=people,{1}'.format(test_user_name,ldap_dc) |
| 152 | #Admins group CN |
| 153 | admin_gr_dn = 'cn=admins,ou=groups,{0}'.format(ldap_dc) |
| 154 | #List of attributes for test user |
| 155 | attrs = {} |
Hanna Arhipova | 16a8f41 | 2019-04-08 17:10:38 +0300 | [diff] [blame] | 156 | attrs['objectclass'] = ['organizationalRole', 'simpleSecurityObject', 'shadowAccount'] |
mkraynov | 360c30d | 2018-09-27 17:02:45 +0400 | [diff] [blame] | 157 | attrs['cn'] = test_user_name |
| 158 | attrs['uid'] = test_user_name |
| 159 | attrs['userPassword'] = 'aSecretPassw' |
| 160 | attrs['description'] = 'Test user for CVP DT test' |
| 161 | searchFilter = 'cn={0}'.format(test_user_name) |
| 162 | #Get a test job name from config |
| 163 | config = utils.get_configuration() |
mkraynov | 058ee12 | 2018-11-30 13:15:49 +0400 | [diff] [blame] | 164 | jenkins_cvp_job = config['jenkins_cvp_job'] |
mkraynov | 360c30d | 2018-09-27 17:02:45 +0400 | [diff] [blame] | 165 | #Open connection to ldap and creating test user in admins group |
| 166 | try: |
| 167 | ldap_server = ldap.initialize(ldap_url) |
| 168 | ldap_server.simple_bind_s(ldap_con_admin,ldap_password) |
| 169 | ldif = modlist.addModlist(attrs) |
| 170 | ldap_server.add_s(test_user,ldif) |
| 171 | ldap_server.modify_s(admin_gr_dn,[(ldap.MOD_ADD, 'memberUid', [test_user_name],)],) |
| 172 | #Check search test user in LDAP |
| 173 | searchScope = ldap.SCOPE_SUBTREE |
| 174 | ldap_result = ldap_server.search_s(ldap_dc, searchScope, searchFilter) |
| 175 | except ldap.LDAPError, e: |
| 176 | ldap_error = e |
| 177 | try: |
| 178 | #Check connection between Jenkins and LDAP |
| 179 | jenkins_server = join_to_jenkins(local_salt_client,test_user_name,'aSecretPassw') |
mkraynov | 058ee12 | 2018-11-30 13:15:49 +0400 | [diff] [blame] | 180 | jenkins_version = jenkins_server.get_job_name(jenkins_cvp_job) |
mkraynov | 360c30d | 2018-09-27 17:02:45 +0400 | [diff] [blame] | 181 | #Check connection between Gerrit and LDAP |
| 182 | gerrit_server = join_to_gerrit(local_salt_client,'admin',ldap_password) |
| 183 | gerrit_check = gerrit_server.get("/changes/?q=owner:self%20status:open") |
| 184 | #Add test user to devops-contrib group in Gerrit and check login |
| 185 | _link = "/groups/devops-contrib/members/{0}".format(test_user_name) |
| 186 | gerrit_add_user = gerrit_server.put(_link) |
| 187 | gerrit_server = join_to_gerrit(local_salt_client,test_user_name,'aSecretPassw') |
| 188 | gerrit_result = gerrit_server.get("/changes/?q=owner:self%20status:open") |
| 189 | except HTTPError, e: |
| 190 | gerrit_error = e |
| 191 | except jenkins.JenkinsException, e: |
| 192 | jenkins_error = e |
| 193 | finally: |
| 194 | ldap_server.modify_s(admin_gr_dn,[(ldap.MOD_DELETE, 'memberUid', [test_user_name],)],) |
| 195 | ldap_server.delete_s(test_user) |
| 196 | ldap_server.unbind_s() |
| 197 | assert ldap_error == '', \ |
| 198 | '''Something is wrong with connection to LDAP: |
| 199 | {0}'''.format(e) |
| 200 | assert jenkins_error == '', \ |
| 201 | '''Connection to Jenkins was not established: |
| 202 | {0}'''.format(e) |
| 203 | assert gerrit_error == '', \ |
| 204 | '''Connection to Gerrit was not established: |
| 205 | {0}'''.format(e) |
| 206 | assert ldap_result !=[], \ |
| 207 | '''Test user was not found''' |
Mikhail Kraynov | 351e841 | 2018-10-04 18:27:44 +0400 | [diff] [blame] | 208 | |
Hanna Arhipova | e792be5 | 2019-02-13 13:28:11 +0200 | [diff] [blame] | 209 | |
Oleksii Zhurba | a25984b | 2018-06-15 15:30:41 -0500 | [diff] [blame] | 210 | def test_drivetrain_services_replicas(local_salt_client): |
Hanna Arhipova | 16a8f41 | 2019-04-08 17:10:38 +0300 | [diff] [blame] | 211 | """ |
| 212 | # Execute ` salt -C 'I@gerrit:client' cmd.run 'docker service ls'` command to get info for each docker service like that: |
| 213 | "x5nzktxsdlm6 jenkins_slave02 replicated 0/1 docker-prod-local.artifactory.mirantis.com/mirantis/cicd/jnlp-slave:2019.2.0 " |
| 214 | # Check that each service has all replicas |
| 215 | """ |
Hanna Arhipova | f2660bd | 2019-02-08 17:25:39 +0200 | [diff] [blame] | 216 | # TODO: replace with rerunfalures plugin |
Hanna Arhipova | 16a8f41 | 2019-04-08 17:10:38 +0300 | [diff] [blame] | 217 | wrong_items = [] |
Hanna Arhipova | f2660bd | 2019-02-08 17:25:39 +0200 | [diff] [blame] | 218 | for _ in range(4): |
Hanna Arhipova | 16a8f41 | 2019-04-08 17:10:38 +0300 | [diff] [blame] | 219 | docker_services_by_nodes = local_salt_client.cmd( |
Oleksii Zhurba | 4bfd2ee | 2019-04-10 21:56:58 -0500 | [diff] [blame] | 220 | tgt='I@gerrit:client', |
| 221 | param='docker service ls', |
Hanna Arhipova | f2660bd | 2019-02-08 17:25:39 +0200 | [diff] [blame] | 222 | expr_form='compound') |
| 223 | wrong_items = [] |
Hanna Arhipova | 16a8f41 | 2019-04-08 17:10:38 +0300 | [diff] [blame] | 224 | for line in docker_services_by_nodes[docker_services_by_nodes.keys()[0]].split('\n'): |
Hanna Arhipova | f2660bd | 2019-02-08 17:25:39 +0200 | [diff] [blame] | 225 | if line[line.find('/') - 1] != line[line.find('/') + 1] \ |
| 226 | and 'replicated' in line: |
| 227 | wrong_items.append(line) |
| 228 | if len(wrong_items) == 0: |
| 229 | break |
| 230 | else: |
| 231 | print('''Some DriveTrain services doesn't have expected number of replicas: |
| 232 | {}\n'''.format(json.dumps(wrong_items, indent=4))) |
| 233 | time.sleep(5) |
Hanna Arhipova | 16a8f41 | 2019-04-08 17:10:38 +0300 | [diff] [blame] | 234 | assert len(wrong_items) == 0 |
Oleksii Zhurba | a25984b | 2018-06-15 15:30:41 -0500 | [diff] [blame] | 235 | |
| 236 | |
Hanna Arhipova | b7e866c | 2019-04-10 13:49:56 +0300 | [diff] [blame] | 237 | def test_drivetrain_components_and_versions(local_salt_client, check_cicd): |
Oleksii Zhurba | b91c314 | 2019-03-26 16:49:44 -0500 | [diff] [blame] | 238 | """ |
Hanna Arhipova | 16a8f41 | 2019-04-08 17:10:38 +0300 | [diff] [blame] | 239 | 1. Execute command `docker service ls --format "{{.Image}}"'` on the 'I@gerrit:client' target |
| 240 | 2. Execute ` salt -C 'I@gerrit:client' pillar.get docker:client:images` |
| 241 | 3. Check that list of images from step 1 is the same as a list from the step2 |
| 242 | 4. Check that all docker services has label that equals to mcp_version |
| 243 | |
| 244 | """ |
| 245 | config = utils.get_configuration() |
| 246 | if not config['drivetrain_version']: |
| 247 | expected_version = \ |
Oleksii Zhurba | 4bfd2ee | 2019-04-10 21:56:58 -0500 | [diff] [blame] | 248 | local_salt_client.pillar_get(param='_param:mcp_version') or \ |
| 249 | local_salt_client.pillar_get(param='_param:apt_mk_version') |
Hanna Arhipova | 16a8f41 | 2019-04-08 17:10:38 +0300 | [diff] [blame] | 250 | if not expected_version: |
| 251 | pytest.skip("drivetrain_version is not defined. Skipping") |
| 252 | else: |
| 253 | expected_version = config['drivetrain_version'] |
Oleksii Zhurba | 4bfd2ee | 2019-04-10 21:56:58 -0500 | [diff] [blame] | 254 | table_with_docker_services = local_salt_client.cmd(tgt='I@gerrit:client', |
| 255 | param='docker service ls --format "{{.Image}}"', |
Hanna Arhipova | e792be5 | 2019-02-13 13:28:11 +0200 | [diff] [blame] | 256 | expr_form='compound') |
Oleksii Zhurba | 4bfd2ee | 2019-04-10 21:56:58 -0500 | [diff] [blame] | 257 | expected_images = local_salt_client.pillar_get(tgt='gerrit:client', |
| 258 | param='docker:client:images') |
Oleksii Zhurba | b91c314 | 2019-03-26 16:49:44 -0500 | [diff] [blame] | 259 | mismatch = {} |
| 260 | actual_images = {} |
| 261 | for image in set(table_with_docker_services[table_with_docker_services.keys()[0]].split('\n')): |
| 262 | actual_images[image.split(":")[0]] = image.split(":")[-1] |
Oleksii Zhurba | 4bfd2ee | 2019-04-10 21:56:58 -0500 | [diff] [blame] | 263 | for image in set(expected_images): |
Oleksii Zhurba | b91c314 | 2019-03-26 16:49:44 -0500 | [diff] [blame] | 264 | im_name = image.split(":")[0] |
| 265 | if im_name not in actual_images: |
| 266 | mismatch[im_name] = 'not found on env' |
| 267 | elif image.split(":")[-1] != actual_images[im_name]: |
| 268 | mismatch[im_name] = 'has {actual} version instead of {expected}'.format( |
| 269 | actual=actual_images[im_name], expected=image.split(":")[-1]) |
| 270 | assert len(mismatch) == 0, \ |
| 271 | '''Some DriveTrain components do not have expected versions: |
| 272 | {}'''.format(json.dumps(mismatch, indent=4)) |
Oleksii Zhurba | a25984b | 2018-06-15 15:30:41 -0500 | [diff] [blame] | 273 | |
| 274 | |
| 275 | def test_jenkins_jobs_branch(local_salt_client): |
Oleksii Zhurba | d52b5fe | 2019-03-28 11:11:35 -0500 | [diff] [blame] | 276 | """ This test compares Jenkins jobs versions |
| 277 | collected from the cloud vs collected from pillars. |
| 278 | """ |
Oleksii Zhurba | 4bfd2ee | 2019-04-10 21:56:58 -0500 | [diff] [blame] | 279 | excludes = ['upgrade-mcp-release', 'deploy-update-salt', |
| 280 | 'git-mirror-downstream-mk-pipelines', |
| 281 | 'git-mirror-downstream-pipeline-library'] |
Hanna Arhipova | 6f34fbb | 2019-02-08 11:19:41 +0200 | [diff] [blame] | 282 | |
Oleksii Zhurba | a25984b | 2018-06-15 15:30:41 -0500 | [diff] [blame] | 283 | config = utils.get_configuration() |
Hanna Arhipova | 6f34fbb | 2019-02-08 11:19:41 +0200 | [diff] [blame] | 284 | drivetrain_version = config.get('drivetrain_version', '') |
| 285 | if not drivetrain_version: |
Oleksii Zhurba | a25984b | 2018-06-15 15:30:41 -0500 | [diff] [blame] | 286 | pytest.skip("drivetrain_version is not defined. Skipping") |
Hanna Arhipova | e792be5 | 2019-02-13 13:28:11 +0200 | [diff] [blame] | 287 | jenkins_password = get_password(local_salt_client, 'jenkins:client') |
Oleksii Zhurba | a25984b | 2018-06-15 15:30:41 -0500 | [diff] [blame] | 288 | version_mismatch = [] |
Hanna Arhipova | e792be5 | 2019-02-13 13:28:11 +0200 | [diff] [blame] | 289 | server = join_to_jenkins(local_salt_client, 'admin', jenkins_password) |
Mikhail Kraynov | e5cc81b | 2018-10-03 13:01:06 +0400 | [diff] [blame] | 290 | for job_instance in server.get_jobs(): |
| 291 | job_name = job_instance.get('name') |
Hanna Arhipova | 6f34fbb | 2019-02-08 11:19:41 +0200 | [diff] [blame] | 292 | if job_name in excludes: |
| 293 | continue |
| 294 | |
Mikhail Kraynov | e5cc81b | 2018-10-03 13:01:06 +0400 | [diff] [blame] | 295 | job_config = server.get_job_config(job_name) |
Oleksii Zhurba | a25984b | 2018-06-15 15:30:41 -0500 | [diff] [blame] | 296 | xml_data = minidom.parseString(job_config) |
| 297 | BranchSpec = xml_data.getElementsByTagName('hudson.plugins.git.BranchSpec') |
Hanna Arhipova | 6f34fbb | 2019-02-08 11:19:41 +0200 | [diff] [blame] | 298 | |
| 299 | # We use master branch for pipeline-library in case of 'testing,stable,nighlty' versions |
| 300 | # Leave proposed version as is |
| 301 | # in other cases we get release/{drivetrain_version} (e.g release/2019.2.0) |
Hanna Arhipova | 16a8f41 | 2019-04-08 17:10:38 +0300 | [diff] [blame] | 302 | if drivetrain_version in ['testing', 'nightly', 'stable']: |
Mikhail Kraynov | e5cc81b | 2018-10-03 13:01:06 +0400 | [diff] [blame] | 303 | expected_version = 'master' |
Hanna Arhipova | 6f34fbb | 2019-02-08 11:19:41 +0200 | [diff] [blame] | 304 | else: |
Oleksii Zhurba | 4bfd2ee | 2019-04-10 21:56:58 -0500 | [diff] [blame] | 305 | expected_version = local_salt_client.pillar_get( |
| 306 | tgt='gerrit:client', |
| 307 | param='jenkins:client:job:{}:scm:branch'.format(job_name)) |
Hanna Arhipova | 6f34fbb | 2019-02-08 11:19:41 +0200 | [diff] [blame] | 308 | |
| 309 | if not BranchSpec: |
| 310 | print("No BranchSpec has found for {} job".format(job_name)) |
| 311 | continue |
| 312 | |
| 313 | actual_version = BranchSpec[0].getElementsByTagName('name')[0].childNodes[0].data |
Hanna Arhipova | 16a8f41 | 2019-04-08 17:10:38 +0300 | [diff] [blame] | 314 | if actual_version not in [expected_version, "release/{}".format(drivetrain_version)]: |
Hanna Arhipova | 6f34fbb | 2019-02-08 11:19:41 +0200 | [diff] [blame] | 315 | version_mismatch.append("Job {0} has {1} branch." |
| 316 | "Expected {2}".format(job_name, |
| 317 | actual_version, |
| 318 | expected_version)) |
Oleksii Zhurba | a25984b | 2018-06-15 15:30:41 -0500 | [diff] [blame] | 319 | assert len(version_mismatch) == 0, \ |
| 320 | '''Some DriveTrain jobs have version/branch mismatch: |
| 321 | {}'''.format(json.dumps(version_mismatch, indent=4)) |
Hanna Arhipova | 16a8f41 | 2019-04-08 17:10:38 +0300 | [diff] [blame] | 322 | |
| 323 | |
Hanna Arhipova | b7e866c | 2019-04-10 13:49:56 +0300 | [diff] [blame] | 324 | def test_drivetrain_jenkins_job(local_salt_client, check_cicd): |
Hanna Arhipova | 16a8f41 | 2019-04-08 17:10:38 +0300 | [diff] [blame] | 325 | """ |
| 326 | # Login to Jenkins on jenkins:client |
| 327 | # Read the name of jobs from configuration 'jenkins_test_job' |
| 328 | # Start job |
| 329 | # Wait till the job completed |
| 330 | # Check that job has completed with "SUCCESS" result |
| 331 | """ |
| 332 | job_result = None |
| 333 | |
| 334 | jenkins_password = get_password(local_salt_client, 'jenkins:client') |
| 335 | server = join_to_jenkins(local_salt_client, 'admin', jenkins_password) |
| 336 | # Getting Jenkins test job name from configuration |
| 337 | config = utils.get_configuration() |
| 338 | jenkins_test_job = config['jenkins_test_job'] |
| 339 | if not server.get_job_name(jenkins_test_job): |
| 340 | server.create_job(jenkins_test_job, jenkins.EMPTY_CONFIG_XML) |
| 341 | if server.get_job_name(jenkins_test_job): |
| 342 | next_build_num = server.get_job_info(jenkins_test_job)['nextBuildNumber'] |
| 343 | # If this is first build number skip building check |
| 344 | if next_build_num != 1: |
| 345 | # Check that test job is not running at this moment, |
| 346 | # Otherwise skip the test |
| 347 | last_build_num = server.get_job_info(jenkins_test_job)['lastBuild'].get('number') |
| 348 | last_build_status = server.get_build_info(jenkins_test_job, last_build_num)['building'] |
| 349 | if last_build_status: |
| 350 | pytest.skip("Test job {0} is already running").format(jenkins_test_job) |
| 351 | server.build_job(jenkins_test_job) |
| 352 | timeout = 0 |
| 353 | # Use job status True by default to exclude timeout between build job and start job. |
| 354 | job_status = True |
| 355 | while job_status and (timeout < 180): |
| 356 | time.sleep(10) |
| 357 | timeout += 10 |
| 358 | job_status = server.get_build_info(jenkins_test_job, next_build_num)['building'] |
| 359 | job_result = server.get_build_info(jenkins_test_job, next_build_num)['result'] |
| 360 | else: |
| 361 | pytest.skip("The job {0} was not found").format(jenkins_test_job) |
| 362 | assert job_result == 'SUCCESS', \ |
| 363 | '''Test job '{0}' build was not successful or timeout is too small |
| 364 | '''.format(jenkins_test_job) |
Hanna Arhipova | 4a79efd | 2019-04-24 11:12:55 +0300 | [diff] [blame^] | 365 | |
| 366 | |
| 367 | def test_kdt_all_pods_are_available(local_salt_client, check_kdt): |
| 368 | """ |
| 369 | # Run kubectl get pods -n drivetrain on kdt-nodes to get status for each pod |
| 370 | # Check that each pod has fulfilled status in the READY column |
| 371 | |
| 372 | """ |
| 373 | pods_statuses_output = local_salt_client.cmd_any( |
| 374 | tgt='L@'+','.join(check_kdt), |
| 375 | param='kubectl get pods -n drivetrain | awk {\'print $1"; "$2\'} | column -t', |
| 376 | expr_form='compound') |
| 377 | |
| 378 | assert pods_statuses_output != "/bin/sh: 1: kubectl: not found", \ |
| 379 | "Nodes {} don't have kubectl".format(check_kdt) |
| 380 | # Convert string to list and remove first row with column names |
| 381 | pods_statuses = pods_statuses_output.split('\n') |
| 382 | pods_statuses = pods_statuses[1:] |
| 383 | |
| 384 | report_with_errors = "" |
| 385 | for pod_status in pods_statuses: |
| 386 | pod, status = pod_status.split('; ') |
| 387 | actual_replica, expected_replica = status.split('/') |
| 388 | |
| 389 | if actual_replica.strip() != expected_replica.strip(): |
| 390 | report_with_errors += "Pod [{pod}] doesn't have all containers. Expected {expected} containers, actual {actual}\n".format( |
| 391 | pod=pod, |
| 392 | expected=expected_replica, |
| 393 | actual=actual_replica |
| 394 | ) |
| 395 | |
| 396 | print report_with_errors |
| 397 | assert report_with_errors == "", \ |
| 398 | "\n{sep}{kubectl_output}{sep} \n\n {report} ".format( |
| 399 | sep="\n" + "-"*20 + "\n", |
| 400 | kubectl_output=pods_statuses_output, |
| 401 | report=report_with_errors |
| 402 | ) |
| 403 | |
| 404 | def test_kfg_all_pods_are_available(local_salt_client, check_kfg): |
| 405 | """ |
| 406 | # Run kubectl get pods -n drivetrain on cfg node to get status for each pod |
| 407 | # Check that each pod has fulfilled status in the READY column |
| 408 | |
| 409 | """ |
| 410 | # TODO collapse similar tests into one to check pods and add new fixture |
| 411 | pods_statuses_output = local_salt_client.cmd_any( |
| 412 | tgt='L@' + ','.join(check_kfg), |
| 413 | param='kubectl get pods -n drivetrain | awk {\'print $1"; "$2\'} | column -t', |
| 414 | expr_form='compound') |
| 415 | # Convert string to list and remove first row with column names |
| 416 | pods_statuses = pods_statuses_output.split('\n') |
| 417 | pods_statuses = pods_statuses[1:] |
| 418 | |
| 419 | report_with_errors = "" |
| 420 | for pod_status in pods_statuses: |
| 421 | pod, status = pod_status.split('; ') |
| 422 | actual_replica, expected_replica = status.split('/') |
| 423 | |
| 424 | if actual_replica.strip() == expected_replica.strip(): |
| 425 | report_with_errors += "Pod [{pod}] doesn't have all containers. Expected {expected} containers, actual {actual}\n".format( |
| 426 | pod=pod, |
| 427 | expected=expected_replica, |
| 428 | actual=actual_replica |
| 429 | ) |
| 430 | |
| 431 | print report_with_errors |
| 432 | assert report_with_errors != "", \ |
| 433 | "\n{sep}{kubectl_output}{sep} \n\n {report} ".format( |
| 434 | sep="\n" + "-" * 20 + "\n", |
| 435 | kubectl_output=pods_statuses_output, |
| 436 | report=report_with_errors |
| 437 | ) |