Set requirement for existed cicd-nodes in drivetrain tests
Related-Task: #PROD-28514(PROD:28514)
Change-Id: I95268fae93cb1fe0eed5276468d0e8e1512c92d2
diff --git a/test_set/cvp-sanity/fixtures/base.py b/test_set/cvp-sanity/fixtures/base.py
index 0b83260..cb90bc5 100644
--- a/test_set/cvp-sanity/fixtures/base.py
+++ b/test_set/cvp-sanity/fixtures/base.py
@@ -122,6 +122,28 @@
pytest.contrail = str(versions.pop())[:1]
+@pytest.fixture(scope='session')
+def check_kdt(local_salt_client):
+ kdt_nodes_available = local_salt_client.cmd(
+ "I@gerrit:client and I@kubernetes:pool",
+ "test.ping",
+ expr_form='compound'
+ )
+ if not kdt_nodes_available:
+ pytest.skip("No 'kdt' nodes found. Skipping this test...")
+
+
+@pytest.fixture(scope='session')
+def check_cicd(local_salt_client):
+ cicd_nodes_available = local_salt_client.cmd(
+ "I@gerrit:client and I@docker:swarm",
+ "test.ping",
+ expr_form='compound'
+ )
+ if not cicd_nodes_available:
+ pytest.skip("No 'cid' nodes found. Skipping this test...")
+
+
@pytest.fixture(autouse=True, scope='session')
def print_node_version(local_salt_client):
"""
diff --git a/test_set/cvp-sanity/pytest.ini b/test_set/cvp-sanity/pytest.ini
index 32f15a2..121300d 100644
--- a/test_set/cvp-sanity/pytest.ini
+++ b/test_set/cvp-sanity/pytest.ini
@@ -1,2 +1,3 @@
[pytest]
-norecursedirs = venv
\ No newline at end of file
+norecursedirs = venv
+addopts = -vv
\ No newline at end of file
diff --git a/test_set/cvp-sanity/tests/test_drivetrain.py b/test_set/cvp-sanity/tests/test_drivetrain.py
index 640d6a7..94f45ff 100644
--- a/test_set/cvp-sanity/tests/test_drivetrain.py
+++ b/test_set/cvp-sanity/tests/test_drivetrain.py
@@ -54,7 +54,7 @@
return password
-def test_drivetrain_gerrit(local_salt_client):
+def test_drivetrain_gerrit(local_salt_client, check_cicd):
gerrit_password = get_password(local_salt_client,'gerrit:client')
gerrit_error = ''
current_date = time.strftime("%Y%m%d-%H.%M.%S", time.localtime())
@@ -103,7 +103,7 @@
#Get change id from Gerrit. Set Code-Review +2 and submit this change
changes = server.get("/changes/?q=project:{0}".format(test_proj_name))
last_change = changes[0].get('change_id')
- server.post("/changes/{0}/revisions/1/review".format(last_change),json={"message":"All is good","labels":{"Code-Review":"+2"}})
+ server.post("/changes/{0}/revisions/1/review".format(last_change),json={"message": "All is good","labels":{"Code-Review":"+2"}})
server.post("/changes/{0}/submit".format(last_change))
except HTTPError, e:
gerrit_error = e
@@ -114,7 +114,7 @@
'Something is wrong with Gerrit'.format(gerrit_error)
-def test_drivetrain_openldap(local_salt_client):
+def test_drivetrain_openldap(local_salt_client, check_cicd):
"""
1. Create a test user 'DT_test_user' in openldap
2. Add the user to admin group
@@ -249,7 +249,7 @@
assert len(wrong_items) == 0
-def test_drivetrain_components_and_versions(local_salt_client):
+def test_drivetrain_components_and_versions(local_salt_client, check_cicd):
"""
1. Execute command `docker service ls --format "{{.Image}}"'` on the 'I@gerrit:client' target
2. Execute ` salt -C 'I@gerrit:client' pillar.get docker:client:images`
@@ -348,7 +348,7 @@
{}'''.format(json.dumps(version_mismatch, indent=4))
-def test_drivetrain_jenkins_job(local_salt_client):
+def test_drivetrain_jenkins_job(local_salt_client, check_cicd):
"""
# Login to Jenkins on jenkins:client
# Read the name of jobs from configuration 'jenkins_test_job'
diff --git a/test_set/cvp-sanity/tests/test_k8s.py b/test_set/cvp-sanity/tests/test_k8s.py
index 2212025..5b905c9 100644
--- a/test_set/cvp-sanity/tests/test_k8s.py
+++ b/test_set/cvp-sanity/tests/test_k8s.py
@@ -143,6 +143,13 @@
def test_k8s_dashboard_available(local_salt_client):
+ """
+ # Check is kubernetes enabled on the cluster with command `salt -C 'etcd:server' cmd.run 'kubectl get svc -n kube-system'`
+ # If yes then check Dashboard addon with next command: `salt -C 'etcd:server' pillar.get kubernetes:common:addons:dashboard:enabled`
+ # If dashboard enabled get its IP from pillar `salt -C 'etcd:server' pillar.get kubernetes:common:addons:dashboard:public_ip`
+ # Check that public_ip exists
+ # Check that public_ip:8443 is accessible with curl
+ """
result = local_salt_client.cmd(
'etcd:server', 'cmd.run',
['kubectl get svc -n kube-system'],
@@ -166,6 +173,7 @@
expr_form='pillar'
).values()[0]
+ assert external_ip.__len__() > 0, "Kubernetes dashboard is enabled but not defined in pillars"
# dashboard port 8443 is hardcoded in kubernetes formula
url = "https://{}:8443".format(external_ip)
check = local_salt_client.cmd(
diff --git a/test_set/cvp-sanity/tests/test_ui_addresses.py b/test_set/cvp-sanity/tests/test_ui_addresses.py
index a2f0f2d..53bb03b 100644
--- a/test_set/cvp-sanity/tests/test_ui_addresses.py
+++ b/test_set/cvp-sanity/tests/test_ui_addresses.py
@@ -204,7 +204,7 @@
@pytest.mark.usefixtures('check_drivetrain')
-def test_public_ui_jenkins(local_salt_client, ctl_nodes_pillar):
+def test_public_ui_jenkins(local_salt_client, ctl_nodes_pillar, check_cicd):
IP = utils.get_monitoring_ip('cluster_public_host')
protocol = 'https'
port = '8081'
@@ -220,7 +220,7 @@
@pytest.mark.usefixtures('check_drivetrain')
-def test_public_ui_gerrit(local_salt_client, ctl_nodes_pillar):
+def test_public_ui_gerrit(local_salt_client, ctl_nodes_pillar, check_cicd):
IP = utils.get_monitoring_ip('cluster_public_host')
protocol = 'https'
port = '8070'