blob: 63d304bde10d6749fa8c4614d1aa0c73c85ebe61 [file] [log] [blame]
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +03001import jenkins
2from xml.dom import minidom
3import utils
4import json
5import pytest
6import time
7import os
8from pygerrit2 import GerritRestAPI, HTTPBasicAuth
9from requests import HTTPError
10import git
11import ldap
12import ldap.modlist as modlist
Hanna Arhipova1eef8312019-05-06 20:14:18 +030013import logging
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +030014
15def join_to_gerrit(local_salt_client, gerrit_user, gerrit_password):
16 gerrit_port = local_salt_client.pillar_get(
17 tgt='I@gerrit:client and not I@salt:master',
18 param='_param:haproxy_gerrit_bind_port',
19 expr_form='compound')
20 gerrit_address = local_salt_client.pillar_get(
21 tgt='I@gerrit:client and not I@salt:master',
22 param='_param:haproxy_gerrit_bind_host',
23 expr_form='compound')
24 url = 'http://{0}:{1}'.format(gerrit_address,gerrit_port)
25 auth = HTTPBasicAuth(gerrit_user, gerrit_password)
26 rest = GerritRestAPI(url=url, auth=auth)
27 return rest
28
29
30def join_to_jenkins(local_salt_client, jenkins_user, jenkins_password):
31 jenkins_port = local_salt_client.pillar_get(
32 tgt='I@jenkins:client and not I@salt:master',
33 param='_param:haproxy_jenkins_bind_port',
34 expr_form='compound')
35 jenkins_address = local_salt_client.pillar_get(
36 tgt='I@jenkins:client and not I@salt:master',
37 param='_param:haproxy_jenkins_bind_host',
38 expr_form='compound')
39 jenkins_url = 'http://{0}:{1}'.format(jenkins_address,jenkins_port)
40 server = jenkins.Jenkins(jenkins_url, username=jenkins_user, password=jenkins_password)
41 return server
42
43
44def get_password(local_salt_client,service):
45 password = local_salt_client.pillar_get(
46 tgt=service,
47 param='_param:openldap_admin_password')
48 return password
49
50
Oleksii Zhurba23c18332019-05-09 18:53:40 -050051@pytest.mark.full
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +030052def test_drivetrain_gerrit(local_salt_client, check_cicd):
53 gerrit_password = get_password(local_salt_client,'gerrit:client')
54 gerrit_error = ''
55 current_date = time.strftime("%Y%m%d-%H.%M.%S", time.localtime())
56 test_proj_name = "test-dt-{0}".format(current_date)
57 gerrit_port = local_salt_client.pillar_get(
58 tgt='I@gerrit:client and not I@salt:master',
59 param='_param:haproxy_gerrit_bind_port',
60 expr_form='compound')
61 gerrit_address = local_salt_client.pillar_get(
62 tgt='I@gerrit:client and not I@salt:master',
63 param='_param:haproxy_gerrit_bind_host',
64 expr_form='compound')
65 try:
66 #Connecting to gerrit and check connection
67 server = join_to_gerrit(local_salt_client,'admin',gerrit_password)
68 gerrit_check = server.get("/changes/?q=owner:self%20status:open")
69 #Check deleteproject plugin and skip test if the plugin is not installed
70 gerrit_plugins = server.get("/plugins/?all")
71 if 'deleteproject' not in gerrit_plugins:
72 pytest.skip("Delete-project plugin is not installed")
73 #Create test project and add description
74 server.put("/projects/"+test_proj_name)
75 server.put("/projects/"+test_proj_name+"/description",json={"description":"Test DriveTrain project","commit_message": "Update the project description"})
Hanna Arhipova1eef8312019-05-06 20:14:18 +030076 except HTTPError as e:
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +030077 gerrit_error = e
78 try:
79 #Create test folder and init git
80 repo_dir = os.path.join(os.getcwd(),test_proj_name)
81 file_name = os.path.join(repo_dir, current_date)
82 repo = git.Repo.init(repo_dir)
83 #Add remote url for this git repo
84 origin = repo.create_remote('origin', 'http://admin:{1}@{2}:{3}/{0}.git'.format(test_proj_name,gerrit_password,gerrit_address,gerrit_port))
85 #Add commit-msg hook to automatically add Change-Id to our commit
86 os.system("curl -Lo {0}/.git/hooks/commit-msg 'http://admin:{1}@{2}:{3}/tools/hooks/commit-msg' > /dev/null 2>&1".format(repo_dir,gerrit_password,gerrit_address,gerrit_port))
87 os.system("chmod u+x {0}/.git/hooks/commit-msg".format(repo_dir))
88 #Create a test file
89 f = open(file_name, 'w+')
90 f.write("This is a test file for DriveTrain test")
91 f.close()
92 #Add file to git and commit it to Gerrit for review
93 repo.index.add([file_name])
94 repo.index.commit("This is a test commit for DriveTrain test")
95 repo.git.push("origin", "HEAD:refs/for/master")
96 #Get change id from Gerrit. Set Code-Review +2 and submit this change
97 changes = server.get("/changes/?q=project:{0}".format(test_proj_name))
98 last_change = changes[0].get('change_id')
99 server.post("/changes/{0}/revisions/1/review".format(last_change),json={"message": "All is good","labels":{"Code-Review":"+2"}})
100 server.post("/changes/{0}/submit".format(last_change))
Hanna Arhipova1eef8312019-05-06 20:14:18 +0300101 except HTTPError as e:
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300102 gerrit_error = e
103 finally:
104 #Delete test project
105 server.post("/projects/"+test_proj_name+"/deleteproject~delete")
106 assert gerrit_error == '',\
107 'Something is wrong with Gerrit'.format(gerrit_error)
108
109
Oleksii Zhurba23c18332019-05-09 18:53:40 -0500110@pytest.mark.full
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300111def test_drivetrain_openldap(local_salt_client, check_cicd):
112 """
113 1. Create a test user 'DT_test_user' in openldap
114 2. Add the user to admin group
115 3. Login using the user to Jenkins
116 4. Check that no error occurred
117 5. Add the user to devops group in Gerrit and then login to Gerrit
118 using test_user credentials.
119 6 Start job in jenkins from this user
120 7. Get info from gerrit from this user
121 6. Finally, delete the user from admin
122 group and openldap
123 """
124
125 # TODO split to several test cases. One check - per one test method. Make the login process in fixture
126 ldap_password = get_password(local_salt_client,'openldap:client')
127 #Check that ldap_password is exists, otherwise skip test
128 if not ldap_password:
129 pytest.skip("Openldap service or openldap:client pillar \
130 are not found on this environment.")
131 ldap_port = local_salt_client.pillar_get(
132 tgt='I@openldap:client and not I@salt:master',
133 param='_param:haproxy_openldap_bind_port',
134 expr_form='compound')
135 ldap_address = local_salt_client.pillar_get(
136 tgt='I@openldap:client and not I@salt:master',
137 param='_param:haproxy_openldap_bind_host',
138 expr_form='compound')
139 ldap_dc = local_salt_client.pillar_get(
140 tgt='openldap:client',
141 param='_param:openldap_dn')
142 ldap_con_admin = local_salt_client.pillar_get(
143 tgt='openldap:client',
144 param='openldap:client:server:auth:user')
145 ldap_url = 'ldap://{0}:{1}'.format(ldap_address,ldap_port)
146 ldap_error = ''
147 ldap_result = ''
148 gerrit_result = ''
149 gerrit_error = ''
150 jenkins_error = ''
151 #Test user's CN
152 test_user_name = 'DT_test_user'
153 test_user = 'cn={0},ou=people,{1}'.format(test_user_name,ldap_dc)
154 #Admins group CN
155 admin_gr_dn = 'cn=admins,ou=groups,{0}'.format(ldap_dc)
156 #List of attributes for test user
157 attrs = {}
158 attrs['objectclass'] = ['organizationalRole', 'simpleSecurityObject', 'shadowAccount']
159 attrs['cn'] = test_user_name
160 attrs['uid'] = test_user_name
161 attrs['userPassword'] = 'aSecretPassw'
162 attrs['description'] = 'Test user for CVP DT test'
163 searchFilter = 'cn={0}'.format(test_user_name)
164 #Get a test job name from config
165 config = utils.get_configuration()
166 jenkins_cvp_job = config['jenkins_cvp_job']
167 #Open connection to ldap and creating test user in admins group
168 try:
169 ldap_server = ldap.initialize(ldap_url)
170 ldap_server.simple_bind_s(ldap_con_admin,ldap_password)
171 ldif = modlist.addModlist(attrs)
172 ldap_server.add_s(test_user,ldif)
173 ldap_server.modify_s(admin_gr_dn,[(ldap.MOD_ADD, 'memberUid', [test_user_name],)],)
174 #Check search test user in LDAP
175 searchScope = ldap.SCOPE_SUBTREE
176 ldap_result = ldap_server.search_s(ldap_dc, searchScope, searchFilter)
Hanna Arhipova1eef8312019-05-06 20:14:18 +0300177 except ldap.LDAPError as e:
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300178 ldap_error = e
179 try:
180 #Check connection between Jenkins and LDAP
181 jenkins_server = join_to_jenkins(local_salt_client,test_user_name,'aSecretPassw')
182 jenkins_version = jenkins_server.get_job_name(jenkins_cvp_job)
183 #Check connection between Gerrit and LDAP
184 gerrit_server = join_to_gerrit(local_salt_client,'admin',ldap_password)
185 gerrit_check = gerrit_server.get("/changes/?q=owner:self%20status:open")
186 #Add test user to devops-contrib group in Gerrit and check login
187 _link = "/groups/devops-contrib/members/{0}".format(test_user_name)
188 gerrit_add_user = gerrit_server.put(_link)
189 gerrit_server = join_to_gerrit(local_salt_client,test_user_name,'aSecretPassw')
190 gerrit_result = gerrit_server.get("/changes/?q=owner:self%20status:open")
Hanna Arhipova1eef8312019-05-06 20:14:18 +0300191 except HTTPError as e:
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300192 gerrit_error = e
Hanna Arhipova1eef8312019-05-06 20:14:18 +0300193 except jenkins.JenkinsException as e:
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300194 jenkins_error = e
195 finally:
196 ldap_server.modify_s(admin_gr_dn,[(ldap.MOD_DELETE, 'memberUid', [test_user_name],)],)
197 ldap_server.delete_s(test_user)
198 ldap_server.unbind_s()
199 assert ldap_error == '', \
200 '''Something is wrong with connection to LDAP:
201 {0}'''.format(e)
202 assert jenkins_error == '', \
203 '''Connection to Jenkins was not established:
204 {0}'''.format(e)
205 assert gerrit_error == '', \
206 '''Connection to Gerrit was not established:
207 {0}'''.format(e)
208 assert ldap_result !=[], \
209 '''Test user was not found'''
210
211
Oleksii Zhurba23c18332019-05-09 18:53:40 -0500212@pytest.mark.sl_dup
213#DockerService***Outage
214@pytest.mark.full
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300215def test_drivetrain_services_replicas(local_salt_client, check_cicd):
216 """
217 # Execute ` salt -C 'I@gerrit:client' cmd.run 'docker service ls'` command to get info for each docker service like that:
218 "x5nzktxsdlm6 jenkins_slave02 replicated 0/1 docker-prod-local.artifactory.mirantis.com/mirantis/cicd/jnlp-slave:2019.2.0 "
219 # Check that each service has all replicas
220 """
221 # TODO: replace with rerunfalures plugin
222 wrong_items = []
223 for _ in range(4):
224 docker_services_by_nodes = local_salt_client.cmd(
225 tgt='I@gerrit:client',
226 param='docker service ls',
227 expr_form='compound')
228 wrong_items = []
229 for line in docker_services_by_nodes[docker_services_by_nodes.keys()[0]].split('\n'):
230 if line[line.find('/') - 1] != line[line.find('/') + 1] \
231 and 'replicated' in line:
232 wrong_items.append(line)
233 if len(wrong_items) == 0:
234 break
235 else:
Hanna Arhipova1eef8312019-05-06 20:14:18 +0300236 logging.error('''Some DriveTrain services doesn't have expected number of replicas:
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300237 {}\n'''.format(json.dumps(wrong_items, indent=4)))
238 time.sleep(5)
239 assert len(wrong_items) == 0
240
241
Oleksii Zhurba23c18332019-05-09 18:53:40 -0500242@pytest.mark.full
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300243def test_drivetrain_components_and_versions(local_salt_client, check_cicd):
244 """
245 1. Execute command `docker service ls --format "{{.Image}}"'` on the 'I@gerrit:client' target
246 2. Execute ` salt -C 'I@gerrit:client' pillar.get docker:client:images`
247 3. Check that list of images from step 1 is the same as a list from the step2
248 4. Check that all docker services has label that equals to mcp_version
249
250 """
251 config = utils.get_configuration()
252 if not config['drivetrain_version']:
253 expected_version = \
254 local_salt_client.pillar_get(param='_param:mcp_version') or \
255 local_salt_client.pillar_get(param='_param:apt_mk_version')
256 if not expected_version:
257 pytest.skip("drivetrain_version is not defined. Skipping")
258 else:
259 expected_version = config['drivetrain_version']
260 table_with_docker_services = local_salt_client.cmd(tgt='I@gerrit:client',
261 param='docker service ls --format "{{.Image}}"',
262 expr_form='compound')
263 expected_images = local_salt_client.pillar_get(tgt='gerrit:client',
264 param='docker:client:images')
265 mismatch = {}
266 actual_images = {}
267 for image in set(table_with_docker_services[table_with_docker_services.keys()[0]].split('\n')):
268 actual_images[image.split(":")[0]] = image.split(":")[-1]
269 for image in set(expected_images):
270 im_name = image.split(":")[0]
271 if im_name not in actual_images:
272 mismatch[im_name] = 'not found on env'
273 elif image.split(":")[-1] != actual_images[im_name]:
274 mismatch[im_name] = 'has {actual} version instead of {expected}'.format(
275 actual=actual_images[im_name], expected=image.split(":")[-1])
276 assert len(mismatch) == 0, \
277 '''Some DriveTrain components do not have expected versions:
278 {}'''.format(json.dumps(mismatch, indent=4))
279
280
Oleksii Zhurba23c18332019-05-09 18:53:40 -0500281@pytest.mark.full
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300282def test_jenkins_jobs_branch(local_salt_client, check_cicd):
283 """ This test compares Jenkins jobs versions
284 collected from the cloud vs collected from pillars.
285 """
286 excludes = ['upgrade-mcp-release', 'deploy-update-salt',
287 'git-mirror-downstream-mk-pipelines',
288 'git-mirror-downstream-pipeline-library']
289
290 config = utils.get_configuration()
291 drivetrain_version = config.get('drivetrain_version', '')
292 jenkins_password = get_password(local_salt_client, 'jenkins:client')
293 version_mismatch = []
294 server = join_to_jenkins(local_salt_client, 'admin', jenkins_password)
295 for job_instance in server.get_jobs():
296 job_name = job_instance.get('name')
297 if job_name in excludes:
298 continue
299
300 job_config = server.get_job_config(job_name)
301 xml_data = minidom.parseString(job_config)
302 BranchSpec = xml_data.getElementsByTagName('hudson.plugins.git.BranchSpec')
303
304 # We use master branch for pipeline-library in case of 'testing,stable,nighlty' versions
305 # Leave proposed version as is
306 # in other cases we get release/{drivetrain_version} (e.g release/2019.2.0)
307 if drivetrain_version in ['testing', 'nightly', 'stable']:
308 expected_version = 'master'
309 else:
310 expected_version = local_salt_client.pillar_get(
311 tgt='gerrit:client',
312 param='jenkins:client:job:{}:scm:branch'.format(job_name))
313
314 if not BranchSpec:
Hanna Arhipova1eef8312019-05-06 20:14:18 +0300315 logging.debug("No BranchSpec has found for {} job".format(job_name))
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300316 continue
317
318 actual_version = BranchSpec[0].getElementsByTagName('name')[0].childNodes[0].data
Oleksii Zhurbae01d5e82019-05-17 14:04:28 -0500319 if expected_version and actual_version not in expected_version:
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300320 version_mismatch.append("Job {0} has {1} branch."
321 "Expected {2}".format(job_name,
322 actual_version,
323 expected_version))
324 assert len(version_mismatch) == 0, \
325 '''Some DriveTrain jobs have version/branch mismatch:
326 {}'''.format(json.dumps(version_mismatch, indent=4))
327
328
Oleksii Zhurba23c18332019-05-09 18:53:40 -0500329@pytest.mark.full
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300330def test_drivetrain_jenkins_job(local_salt_client, check_cicd):
331 """
332 # Login to Jenkins on jenkins:client
333 # Read the name of jobs from configuration 'jenkins_test_job'
334 # Start job
335 # Wait till the job completed
336 # Check that job has completed with "SUCCESS" result
337 """
338 job_result = None
339
340 jenkins_password = get_password(local_salt_client, 'jenkins:client')
341 server = join_to_jenkins(local_salt_client, 'admin', jenkins_password)
342 # Getting Jenkins test job name from configuration
343 config = utils.get_configuration()
344 jenkins_test_job = config['jenkins_test_job']
345 if not server.get_job_name(jenkins_test_job):
346 server.create_job(jenkins_test_job, jenkins.EMPTY_CONFIG_XML)
347 if server.get_job_name(jenkins_test_job):
348 next_build_num = server.get_job_info(jenkins_test_job)['nextBuildNumber']
349 # If this is first build number skip building check
350 if next_build_num != 1:
351 # Check that test job is not running at this moment,
352 # Otherwise skip the test
353 last_build_num = server.get_job_info(jenkins_test_job)['lastBuild'].get('number')
354 last_build_status = server.get_build_info(jenkins_test_job, last_build_num)['building']
355 if last_build_status:
356 pytest.skip("Test job {0} is already running").format(jenkins_test_job)
357 server.build_job(jenkins_test_job)
358 timeout = 0
359 # Use job status True by default to exclude timeout between build job and start job.
360 job_status = True
361 while job_status and (timeout < 180):
362 time.sleep(10)
363 timeout += 10
364 job_status = server.get_build_info(jenkins_test_job, next_build_num)['building']
365 job_result = server.get_build_info(jenkins_test_job, next_build_num)['result']
366 else:
367 pytest.skip("The job {0} was not found").format(jenkins_test_job)
368 assert job_result == 'SUCCESS', \
369 '''Test job '{0}' build was not successful or timeout is too small
370 '''.format(jenkins_test_job)
371
372
Oleksii Zhurba23c18332019-05-09 18:53:40 -0500373@pytest.mark.smoke
374# ??
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300375def test_kdt_all_pods_are_available(local_salt_client, check_kdt):
376 """
377 # Run kubectl get pods -n drivetrain on kdt-nodes to get status for each pod
378 # Check that each pod has fulfilled status in the READY column
379
380 """
381 pods_statuses_output = local_salt_client.cmd_any(
382 tgt='L@'+','.join(check_kdt),
383 param='kubectl get pods -n drivetrain | awk {\'print $1"; "$2\'} | column -t',
384 expr_form='compound')
385
386 assert pods_statuses_output != "/bin/sh: 1: kubectl: not found", \
387 "Nodes {} don't have kubectl".format(check_kdt)
388 # Convert string to list and remove first row with column names
389 pods_statuses = pods_statuses_output.split('\n')
390 pods_statuses = pods_statuses[1:]
391
392 report_with_errors = ""
393 for pod_status in pods_statuses:
394 pod, status = pod_status.split('; ')
395 actual_replica, expected_replica = status.split('/')
396
397 if actual_replica.strip() != expected_replica.strip():
398 report_with_errors += "Pod [{pod}] doesn't have all containers. Expected {expected} containers, actual {actual}\n".format(
399 pod=pod,
400 expected=expected_replica,
401 actual=actual_replica
402 )
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300403 assert report_with_errors == "", \
404 "\n{sep}{kubectl_output}{sep} \n\n {report} ".format(
405 sep="\n" + "-"*20 + "\n",
406 kubectl_output=pods_statuses_output,
407 report=report_with_errors
408 )
409
Oleksii Zhurba23c18332019-05-09 18:53:40 -0500410@pytest.mark.smoke
411# ??
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300412def test_kfg_all_pods_are_available(local_salt_client, check_kfg):
413 """
414 # Run kubectl get pods -n drivetrain on cfg node to get status for each pod
415 # Check that each pod has fulfilled status in the READY column
416
417 """
418 # TODO collapse similar tests into one to check pods and add new fixture
419 pods_statuses_output = local_salt_client.cmd_any(
420 tgt='L@' + ','.join(check_kfg),
421 param='kubectl get pods -n drivetrain | awk {\'print $1"; "$2\'} | column -t',
422 expr_form='compound')
423 # Convert string to list and remove first row with column names
424 pods_statuses = pods_statuses_output.split('\n')
425 pods_statuses = pods_statuses[1:]
426
427 report_with_errors = ""
428 for pod_status in pods_statuses:
429 pod, status = pod_status.split('; ')
430 actual_replica, expected_replica = status.split('/')
431
432 if actual_replica.strip() == expected_replica.strip():
433 report_with_errors += "Pod [{pod}] doesn't have all containers. Expected {expected} containers, actual {actual}\n".format(
434 pod=pod,
435 expected=expected_replica,
436 actual=actual_replica
437 )
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300438 assert report_with_errors != "", \
439 "\n{sep}{kubectl_output}{sep} \n\n {report} ".format(
440 sep="\n" + "-" * 20 + "\n",
441 kubectl_output=pods_statuses_output,
442 report=report_with_errors
Oleksii Zhurbae01d5e82019-05-17 14:04:28 -0500443 )