blob: c1c4880ae80da83254538deb0e3c3ee335373f00 [file] [log] [blame]
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +03001import jenkins
2from xml.dom import minidom
3import utils
4import json
5import pytest
6import time
7import os
8from pygerrit2 import GerritRestAPI, HTTPBasicAuth
9from requests import HTTPError
10import git
11import ldap
12import ldap.modlist as modlist
Hanna Arhipova1eef8312019-05-06 20:14:18 +030013import logging
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +030014
15def join_to_gerrit(local_salt_client, gerrit_user, gerrit_password):
16 gerrit_port = local_salt_client.pillar_get(
17 tgt='I@gerrit:client and not I@salt:master',
18 param='_param:haproxy_gerrit_bind_port',
19 expr_form='compound')
20 gerrit_address = local_salt_client.pillar_get(
21 tgt='I@gerrit:client and not I@salt:master',
22 param='_param:haproxy_gerrit_bind_host',
23 expr_form='compound')
24 url = 'http://{0}:{1}'.format(gerrit_address,gerrit_port)
25 auth = HTTPBasicAuth(gerrit_user, gerrit_password)
26 rest = GerritRestAPI(url=url, auth=auth)
27 return rest
28
29
30def join_to_jenkins(local_salt_client, jenkins_user, jenkins_password):
31 jenkins_port = local_salt_client.pillar_get(
32 tgt='I@jenkins:client and not I@salt:master',
33 param='_param:haproxy_jenkins_bind_port',
34 expr_form='compound')
35 jenkins_address = local_salt_client.pillar_get(
36 tgt='I@jenkins:client and not I@salt:master',
37 param='_param:haproxy_jenkins_bind_host',
38 expr_form='compound')
39 jenkins_url = 'http://{0}:{1}'.format(jenkins_address,jenkins_port)
40 server = jenkins.Jenkins(jenkins_url, username=jenkins_user, password=jenkins_password)
41 return server
42
43
44def get_password(local_salt_client,service):
45 password = local_salt_client.pillar_get(
46 tgt=service,
47 param='_param:openldap_admin_password')
48 return password
49
50
Oleksii Zhurba23c18332019-05-09 18:53:40 -050051@pytest.mark.full
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +030052def test_drivetrain_gerrit(local_salt_client, check_cicd):
53 gerrit_password = get_password(local_salt_client,'gerrit:client')
54 gerrit_error = ''
55 current_date = time.strftime("%Y%m%d-%H.%M.%S", time.localtime())
56 test_proj_name = "test-dt-{0}".format(current_date)
57 gerrit_port = local_salt_client.pillar_get(
58 tgt='I@gerrit:client and not I@salt:master',
59 param='_param:haproxy_gerrit_bind_port',
60 expr_form='compound')
61 gerrit_address = local_salt_client.pillar_get(
62 tgt='I@gerrit:client and not I@salt:master',
63 param='_param:haproxy_gerrit_bind_host',
64 expr_form='compound')
65 try:
66 #Connecting to gerrit and check connection
67 server = join_to_gerrit(local_salt_client,'admin',gerrit_password)
68 gerrit_check = server.get("/changes/?q=owner:self%20status:open")
69 #Check deleteproject plugin and skip test if the plugin is not installed
70 gerrit_plugins = server.get("/plugins/?all")
71 if 'deleteproject' not in gerrit_plugins:
72 pytest.skip("Delete-project plugin is not installed")
73 #Create test project and add description
74 server.put("/projects/"+test_proj_name)
75 server.put("/projects/"+test_proj_name+"/description",json={"description":"Test DriveTrain project","commit_message": "Update the project description"})
Hanna Arhipova1eef8312019-05-06 20:14:18 +030076 except HTTPError as e:
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +030077 gerrit_error = e
78 try:
79 #Create test folder and init git
80 repo_dir = os.path.join(os.getcwd(),test_proj_name)
81 file_name = os.path.join(repo_dir, current_date)
82 repo = git.Repo.init(repo_dir)
83 #Add remote url for this git repo
84 origin = repo.create_remote('origin', 'http://admin:{1}@{2}:{3}/{0}.git'.format(test_proj_name,gerrit_password,gerrit_address,gerrit_port))
85 #Add commit-msg hook to automatically add Change-Id to our commit
86 os.system("curl -Lo {0}/.git/hooks/commit-msg 'http://admin:{1}@{2}:{3}/tools/hooks/commit-msg' > /dev/null 2>&1".format(repo_dir,gerrit_password,gerrit_address,gerrit_port))
87 os.system("chmod u+x {0}/.git/hooks/commit-msg".format(repo_dir))
88 #Create a test file
89 f = open(file_name, 'w+')
90 f.write("This is a test file for DriveTrain test")
91 f.close()
92 #Add file to git and commit it to Gerrit for review
93 repo.index.add([file_name])
94 repo.index.commit("This is a test commit for DriveTrain test")
95 repo.git.push("origin", "HEAD:refs/for/master")
96 #Get change id from Gerrit. Set Code-Review +2 and submit this change
97 changes = server.get("/changes/?q=project:{0}".format(test_proj_name))
98 last_change = changes[0].get('change_id')
99 server.post("/changes/{0}/revisions/1/review".format(last_change),json={"message": "All is good","labels":{"Code-Review":"+2"}})
100 server.post("/changes/{0}/submit".format(last_change))
Hanna Arhipova1eef8312019-05-06 20:14:18 +0300101 except HTTPError as e:
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300102 gerrit_error = e
103 finally:
104 #Delete test project
105 server.post("/projects/"+test_proj_name+"/deleteproject~delete")
106 assert gerrit_error == '',\
107 'Something is wrong with Gerrit'.format(gerrit_error)
108
109
Oleksii Zhurba23c18332019-05-09 18:53:40 -0500110@pytest.mark.full
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300111def test_drivetrain_openldap(local_salt_client, check_cicd):
112 """
113 1. Create a test user 'DT_test_user' in openldap
114 2. Add the user to admin group
115 3. Login using the user to Jenkins
116 4. Check that no error occurred
117 5. Add the user to devops group in Gerrit and then login to Gerrit
118 using test_user credentials.
119 6 Start job in jenkins from this user
120 7. Get info from gerrit from this user
121 6. Finally, delete the user from admin
122 group and openldap
123 """
124
125 # TODO split to several test cases. One check - per one test method. Make the login process in fixture
126 ldap_password = get_password(local_salt_client,'openldap:client')
127 #Check that ldap_password is exists, otherwise skip test
128 if not ldap_password:
129 pytest.skip("Openldap service or openldap:client pillar \
130 are not found on this environment.")
131 ldap_port = local_salt_client.pillar_get(
132 tgt='I@openldap:client and not I@salt:master',
133 param='_param:haproxy_openldap_bind_port',
134 expr_form='compound')
135 ldap_address = local_salt_client.pillar_get(
136 tgt='I@openldap:client and not I@salt:master',
137 param='_param:haproxy_openldap_bind_host',
138 expr_form='compound')
139 ldap_dc = local_salt_client.pillar_get(
140 tgt='openldap:client',
141 param='_param:openldap_dn')
142 ldap_con_admin = local_salt_client.pillar_get(
143 tgt='openldap:client',
144 param='openldap:client:server:auth:user')
145 ldap_url = 'ldap://{0}:{1}'.format(ldap_address,ldap_port)
146 ldap_error = ''
147 ldap_result = ''
148 gerrit_result = ''
149 gerrit_error = ''
150 jenkins_error = ''
151 #Test user's CN
152 test_user_name = 'DT_test_user'
153 test_user = 'cn={0},ou=people,{1}'.format(test_user_name,ldap_dc)
154 #Admins group CN
155 admin_gr_dn = 'cn=admins,ou=groups,{0}'.format(ldap_dc)
156 #List of attributes for test user
157 attrs = {}
158 attrs['objectclass'] = ['organizationalRole', 'simpleSecurityObject', 'shadowAccount']
159 attrs['cn'] = test_user_name
160 attrs['uid'] = test_user_name
161 attrs['userPassword'] = 'aSecretPassw'
162 attrs['description'] = 'Test user for CVP DT test'
163 searchFilter = 'cn={0}'.format(test_user_name)
164 #Get a test job name from config
165 config = utils.get_configuration()
166 jenkins_cvp_job = config['jenkins_cvp_job']
167 #Open connection to ldap and creating test user in admins group
168 try:
169 ldap_server = ldap.initialize(ldap_url)
170 ldap_server.simple_bind_s(ldap_con_admin,ldap_password)
171 ldif = modlist.addModlist(attrs)
172 ldap_server.add_s(test_user,ldif)
173 ldap_server.modify_s(admin_gr_dn,[(ldap.MOD_ADD, 'memberUid', [test_user_name],)],)
174 #Check search test user in LDAP
175 searchScope = ldap.SCOPE_SUBTREE
176 ldap_result = ldap_server.search_s(ldap_dc, searchScope, searchFilter)
Hanna Arhipova1eef8312019-05-06 20:14:18 +0300177 except ldap.LDAPError as e:
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300178 ldap_error = e
179 try:
180 #Check connection between Jenkins and LDAP
181 jenkins_server = join_to_jenkins(local_salt_client,test_user_name,'aSecretPassw')
182 jenkins_version = jenkins_server.get_job_name(jenkins_cvp_job)
183 #Check connection between Gerrit and LDAP
184 gerrit_server = join_to_gerrit(local_salt_client,'admin',ldap_password)
185 gerrit_check = gerrit_server.get("/changes/?q=owner:self%20status:open")
186 #Add test user to devops-contrib group in Gerrit and check login
187 _link = "/groups/devops-contrib/members/{0}".format(test_user_name)
188 gerrit_add_user = gerrit_server.put(_link)
189 gerrit_server = join_to_gerrit(local_salt_client,test_user_name,'aSecretPassw')
190 gerrit_result = gerrit_server.get("/changes/?q=owner:self%20status:open")
Hanna Arhipova1eef8312019-05-06 20:14:18 +0300191 except HTTPError as e:
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300192 gerrit_error = e
Hanna Arhipova1eef8312019-05-06 20:14:18 +0300193 except jenkins.JenkinsException as e:
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300194 jenkins_error = e
195 finally:
196 ldap_server.modify_s(admin_gr_dn,[(ldap.MOD_DELETE, 'memberUid', [test_user_name],)],)
197 ldap_server.delete_s(test_user)
198 ldap_server.unbind_s()
199 assert ldap_error == '', \
200 '''Something is wrong with connection to LDAP:
201 {0}'''.format(e)
202 assert jenkins_error == '', \
203 '''Connection to Jenkins was not established:
204 {0}'''.format(e)
205 assert gerrit_error == '', \
206 '''Connection to Gerrit was not established:
207 {0}'''.format(e)
208 assert ldap_result !=[], \
209 '''Test user was not found'''
210
211
Oleksii Zhurba23c18332019-05-09 18:53:40 -0500212@pytest.mark.sl_dup
213#DockerService***Outage
214@pytest.mark.full
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300215def test_drivetrain_services_replicas(local_salt_client, check_cicd):
216 """
217 # Execute ` salt -C 'I@gerrit:client' cmd.run 'docker service ls'` command to get info for each docker service like that:
218 "x5nzktxsdlm6 jenkins_slave02 replicated 0/1 docker-prod-local.artifactory.mirantis.com/mirantis/cicd/jnlp-slave:2019.2.0 "
219 # Check that each service has all replicas
220 """
221 # TODO: replace with rerunfalures plugin
222 wrong_items = []
223 for _ in range(4):
224 docker_services_by_nodes = local_salt_client.cmd(
225 tgt='I@gerrit:client',
226 param='docker service ls',
227 expr_form='compound')
228 wrong_items = []
229 for line in docker_services_by_nodes[docker_services_by_nodes.keys()[0]].split('\n'):
230 if line[line.find('/') - 1] != line[line.find('/') + 1] \
231 and 'replicated' in line:
232 wrong_items.append(line)
233 if len(wrong_items) == 0:
234 break
235 else:
Hanna Arhipova1eef8312019-05-06 20:14:18 +0300236 logging.error('''Some DriveTrain services doesn't have expected number of replicas:
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300237 {}\n'''.format(json.dumps(wrong_items, indent=4)))
238 time.sleep(5)
239 assert len(wrong_items) == 0
240
241
Oleksii Zhurba23c18332019-05-09 18:53:40 -0500242@pytest.mark.full
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300243def test_drivetrain_components_and_versions(local_salt_client, check_cicd):
244 """
245 1. Execute command `docker service ls --format "{{.Image}}"'` on the 'I@gerrit:client' target
246 2. Execute ` salt -C 'I@gerrit:client' pillar.get docker:client:images`
247 3. Check that list of images from step 1 is the same as a list from the step2
248 4. Check that all docker services has label that equals to mcp_version
249
250 """
Hanna Arhipovadcff7142019-06-12 13:56:36 +0300251 def get_name(long_name):
252 return long_name.rsplit(':', 1)[0]
253
254 def get_tag(long_name):
255 return long_name.rsplit(':', 1)[-1]
256
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300257 table_with_docker_services = local_salt_client.cmd(tgt='I@gerrit:client',
258 param='docker service ls --format "{{.Image}}"',
259 expr_form='compound')
260 expected_images = local_salt_client.pillar_get(tgt='gerrit:client',
261 param='docker:client:images')
262 mismatch = {}
263 actual_images = {}
264 for image in set(table_with_docker_services[table_with_docker_services.keys()[0]].split('\n')):
Hanna Arhipovadcff7142019-06-12 13:56:36 +0300265 actual_images[get_name(image)] = get_tag(image)
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300266 for image in set(expected_images):
Hanna Arhipovadcff7142019-06-12 13:56:36 +0300267 im_name = get_name(image)
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300268 if im_name not in actual_images:
269 mismatch[im_name] = 'not found on env'
Hanna Arhipovadcff7142019-06-12 13:56:36 +0300270 elif get_tag(image) != actual_images[im_name]:
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300271 mismatch[im_name] = 'has {actual} version instead of {expected}'.format(
Hanna Arhipovadcff7142019-06-12 13:56:36 +0300272 actual=actual_images[im_name], expected=get_tag(image))
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300273 assert len(mismatch) == 0, \
274 '''Some DriveTrain components do not have expected versions:
275 {}'''.format(json.dumps(mismatch, indent=4))
276
277
Oleksii Zhurba23c18332019-05-09 18:53:40 -0500278@pytest.mark.full
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300279def test_jenkins_jobs_branch(local_salt_client, check_cicd):
280 """ This test compares Jenkins jobs versions
281 collected from the cloud vs collected from pillars.
282 """
283 excludes = ['upgrade-mcp-release', 'deploy-update-salt',
284 'git-mirror-downstream-mk-pipelines',
285 'git-mirror-downstream-pipeline-library']
286
287 config = utils.get_configuration()
288 drivetrain_version = config.get('drivetrain_version', '')
289 jenkins_password = get_password(local_salt_client, 'jenkins:client')
290 version_mismatch = []
291 server = join_to_jenkins(local_salt_client, 'admin', jenkins_password)
292 for job_instance in server.get_jobs():
293 job_name = job_instance.get('name')
294 if job_name in excludes:
295 continue
296
297 job_config = server.get_job_config(job_name)
298 xml_data = minidom.parseString(job_config)
299 BranchSpec = xml_data.getElementsByTagName('hudson.plugins.git.BranchSpec')
300
301 # We use master branch for pipeline-library in case of 'testing,stable,nighlty' versions
302 # Leave proposed version as is
303 # in other cases we get release/{drivetrain_version} (e.g release/2019.2.0)
304 if drivetrain_version in ['testing', 'nightly', 'stable']:
305 expected_version = 'master'
306 else:
307 expected_version = local_salt_client.pillar_get(
308 tgt='gerrit:client',
309 param='jenkins:client:job:{}:scm:branch'.format(job_name))
310
311 if not BranchSpec:
Hanna Arhipova1eef8312019-05-06 20:14:18 +0300312 logging.debug("No BranchSpec has found for {} job".format(job_name))
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300313 continue
314
315 actual_version = BranchSpec[0].getElementsByTagName('name')[0].childNodes[0].data
Oleksii Zhurbae01d5e82019-05-17 14:04:28 -0500316 if expected_version and actual_version not in expected_version:
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300317 version_mismatch.append("Job {0} has {1} branch."
318 "Expected {2}".format(job_name,
319 actual_version,
320 expected_version))
321 assert len(version_mismatch) == 0, \
322 '''Some DriveTrain jobs have version/branch mismatch:
323 {}'''.format(json.dumps(version_mismatch, indent=4))
324
325
Oleksii Zhurba23c18332019-05-09 18:53:40 -0500326@pytest.mark.full
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300327def test_drivetrain_jenkins_job(local_salt_client, check_cicd):
328 """
329 # Login to Jenkins on jenkins:client
330 # Read the name of jobs from configuration 'jenkins_test_job'
331 # Start job
332 # Wait till the job completed
333 # Check that job has completed with "SUCCESS" result
334 """
335 job_result = None
336
337 jenkins_password = get_password(local_salt_client, 'jenkins:client')
338 server = join_to_jenkins(local_salt_client, 'admin', jenkins_password)
339 # Getting Jenkins test job name from configuration
340 config = utils.get_configuration()
341 jenkins_test_job = config['jenkins_test_job']
342 if not server.get_job_name(jenkins_test_job):
343 server.create_job(jenkins_test_job, jenkins.EMPTY_CONFIG_XML)
344 if server.get_job_name(jenkins_test_job):
345 next_build_num = server.get_job_info(jenkins_test_job)['nextBuildNumber']
346 # If this is first build number skip building check
347 if next_build_num != 1:
348 # Check that test job is not running at this moment,
349 # Otherwise skip the test
350 last_build_num = server.get_job_info(jenkins_test_job)['lastBuild'].get('number')
351 last_build_status = server.get_build_info(jenkins_test_job, last_build_num)['building']
352 if last_build_status:
353 pytest.skip("Test job {0} is already running").format(jenkins_test_job)
354 server.build_job(jenkins_test_job)
355 timeout = 0
356 # Use job status True by default to exclude timeout between build job and start job.
357 job_status = True
358 while job_status and (timeout < 180):
359 time.sleep(10)
360 timeout += 10
361 job_status = server.get_build_info(jenkins_test_job, next_build_num)['building']
362 job_result = server.get_build_info(jenkins_test_job, next_build_num)['result']
363 else:
364 pytest.skip("The job {0} was not found").format(jenkins_test_job)
365 assert job_result == 'SUCCESS', \
366 '''Test job '{0}' build was not successful or timeout is too small
367 '''.format(jenkins_test_job)
368
369
Oleksii Zhurba23c18332019-05-09 18:53:40 -0500370@pytest.mark.smoke
371# ??
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300372def test_kdt_all_pods_are_available(local_salt_client, check_kdt):
373 """
374 # Run kubectl get pods -n drivetrain on kdt-nodes to get status for each pod
375 # Check that each pod has fulfilled status in the READY column
376
377 """
378 pods_statuses_output = local_salt_client.cmd_any(
379 tgt='L@'+','.join(check_kdt),
380 param='kubectl get pods -n drivetrain | awk {\'print $1"; "$2\'} | column -t',
381 expr_form='compound')
382
383 assert pods_statuses_output != "/bin/sh: 1: kubectl: not found", \
384 "Nodes {} don't have kubectl".format(check_kdt)
385 # Convert string to list and remove first row with column names
386 pods_statuses = pods_statuses_output.split('\n')
387 pods_statuses = pods_statuses[1:]
388
389 report_with_errors = ""
390 for pod_status in pods_statuses:
391 pod, status = pod_status.split('; ')
392 actual_replica, expected_replica = status.split('/')
393
394 if actual_replica.strip() != expected_replica.strip():
395 report_with_errors += "Pod [{pod}] doesn't have all containers. Expected {expected} containers, actual {actual}\n".format(
396 pod=pod,
397 expected=expected_replica,
398 actual=actual_replica
399 )
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300400 assert report_with_errors == "", \
401 "\n{sep}{kubectl_output}{sep} \n\n {report} ".format(
402 sep="\n" + "-"*20 + "\n",
403 kubectl_output=pods_statuses_output,
404 report=report_with_errors
405 )
406
Oleksii Zhurba23c18332019-05-09 18:53:40 -0500407@pytest.mark.smoke
408# ??
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300409def test_kfg_all_pods_are_available(local_salt_client, check_kfg):
410 """
411 # Run kubectl get pods -n drivetrain on cfg node to get status for each pod
412 # Check that each pod has fulfilled status in the READY column
413
414 """
415 # TODO collapse similar tests into one to check pods and add new fixture
416 pods_statuses_output = local_salt_client.cmd_any(
417 tgt='L@' + ','.join(check_kfg),
418 param='kubectl get pods -n drivetrain | awk {\'print $1"; "$2\'} | column -t',
419 expr_form='compound')
420 # Convert string to list and remove first row with column names
421 pods_statuses = pods_statuses_output.split('\n')
422 pods_statuses = pods_statuses[1:]
423
424 report_with_errors = ""
425 for pod_status in pods_statuses:
426 pod, status = pod_status.split('; ')
427 actual_replica, expected_replica = status.split('/')
428
429 if actual_replica.strip() == expected_replica.strip():
430 report_with_errors += "Pod [{pod}] doesn't have all containers. Expected {expected} containers, actual {actual}\n".format(
431 pod=pod,
432 expected=expected_replica,
433 actual=actual_replica
434 )
Hanna Arhipovae6ed8e42019-05-15 16:27:08 +0300435 assert report_with_errors != "", \
436 "\n{sep}{kubectl_output}{sep} \n\n {report} ".format(
437 sep="\n" + "-" * 20 + "\n",
438 kubectl_output=pods_statuses_output,
439 report=report_with_errors
Oleksii Zhurbae01d5e82019-05-17 14:04:28 -0500440 )