Add logging for salt-api requests/responses
Change-Id: I0dd73c60cc4364ce5962ddb2520390ad3ee32f06
Closes-Task:#PROD-29994(PROD:29994)
diff --git a/test_set/cvp-sanity/README.md b/test_set/cvp-sanity/README.md
index 3b36f6f..7f0b4ec 100644
--- a/test_set/cvp-sanity/README.md
+++ b/test_set/cvp-sanity/README.md
@@ -4,7 +4,8 @@
This is salt-based set of tests for basic verification of MCP deployments
How to start
-=======================
+---
+*Target: test engineers*
1) Clone repo to any node (node must have an access via http to salt master):
```bash
@@ -22,7 +23,7 @@
# sudo python setup.py install
```
or
-1```bash
+```bash
# apt-get install python-virtualenv
```
@@ -63,6 +64,7 @@
```bash
# pytest -sv cvp-sanity/tests/ --ignore cvp-sanity/tests/test_mtu.py
```
+
CVP-sanity-checks supports tags (labels/marks/sets). As of now we have smoke,
full and sl_dup sets. Smoke will run essential tests only, full set runs all
tests and sl_dup is a special set that collects the tests that already exist in
@@ -71,3 +73,44 @@
```bash
# pytest -v -m smoke
```
+
+Logging
+---
+*Target: test developers*
+
+To make a logging in the cvp-sanity-tests module more consistent you should follow next recommendations:
++ Do not use **print** methods
+
++ Each string that needs to be in the stdout should be called with logging.warning level and higher
+```python
+import logging
+
+def test_t1():
+ logging.warning("Alert: Houston we have a problem!")
+ logging.error("We've lost a booster!")
+ logging.critical("Everything is broken!")
+```
+So these messages will be displayed in the pytest output after failed tests
+
++ Use logging.info and logging.debug to log some trivial string. E.g saving requests/responses, saving states os some variables
+```python
+import logging
+
+def test_t2():
+ logging.info("Skip {node} in this test")
+ logging.debug("Some function is called")
+```
+These messages will be logged in the log file only. You can forcely enable logging in the sdtout by calling pytest with next parameter
+```bash
+pytest --log-level=DEBUG
+```
+
+Logging to file
+---
+
+By default full.log is located in the working directory.
+And you can define folder for logs to store by the PYTEST_REPORT_DIR environment variable
+
+List of available environment variables
+===
+todo
\ No newline at end of file
diff --git a/test_set/cvp-sanity/conftest.py b/test_set/cvp-sanity/conftest.py
index 7c85d62..72e0359 100644
--- a/test_set/cvp-sanity/conftest.py
+++ b/test_set/cvp-sanity/conftest.py
@@ -1,4 +1,5 @@
from fixtures.base import *
+import logging
@pytest.hookimpl(tryfirst=True, hookwrapper=True)
@@ -20,10 +21,10 @@
return
try:
if request.node.rep_setup.failed:
- print("setup failed. The following steps were attempted: \n {steps}".format(steps=request.node.description))
+ logging.warning("setup failed. The following steps were attempted: \n {steps}".format(steps=request.node.description))
elif request.node.rep_setup.passed:
if request.node.rep_call.failed:
- print("test execution failed! The following steps were attempted: \n {steps}".format(steps=request.node.description))
+ logging.warning("test execution failed! The following steps were attempted: \n {steps}".format(steps=request.node.description))
except BaseException as e:
- print("Error in show_test_steps fixture: {}".format(e))
+ logging.info("Error in show_test_steps fixture: {}".format(e))
pass
diff --git a/test_set/cvp-sanity/fixtures/base.py b/test_set/cvp-sanity/fixtures/base.py
index 8e3b130..d1dbfe5 100644
--- a/test_set/cvp-sanity/fixtures/base.py
+++ b/test_set/cvp-sanity/fixtures/base.py
@@ -1,12 +1,31 @@
+import os
import pytest
import atexit
import utils
+import logging
+
+logging.basicConfig(
+ filename="{dir}/full.log".format(
+ dir=os.environ.get("PYTEST_REPORT_DIR") if os.environ.get("PYTEST_REPORT_DIR") else '.'
+ ),
+ level=logging.DEBUG,
+ format='[%(asctime)-15s] [%(funcName)s:%(lineno)s] %(message)s'
+)
+
+
+@pytest.fixture(autouse=True)
+def add_testname_to_saltapi_logs(request):
+ logging.info("\n{sep}\n {testname} \n{sep}\n".format(
+ sep="*"*100,
+ testname=request.node.name
+ ))
@pytest.fixture(scope='session')
def local_salt_client():
return utils.init_salt_client()
+
nodes = utils.calculate_groups()
@@ -174,9 +193,9 @@
report_text += template.format(node, *[item.split("=")[1] for item in data])
def write_report():
- print(report_text)
+ logging.info(report_text)
atexit.register(write_report)
yield
except Exception as e:
- print("print_node_version:: some error occurred: {}".format(e))
+ logging.info("print_node_version:: some error occurred: {}".format(e))
yield
diff --git a/test_set/cvp-sanity/pytest.ini b/test_set/cvp-sanity/pytest.ini
index 7d6dde9..3e970a4 100644
--- a/test_set/cvp-sanity/pytest.ini
+++ b/test_set/cvp-sanity/pytest.ini
@@ -1,3 +1,6 @@
[pytest]
norecursedirs = venv
-addopts = -vv --tb=short
\ No newline at end of file
+addopts = -vv --tb=short
+
+log_level=WARNING
+log_format = '%(message)s'
diff --git a/test_set/cvp-sanity/requirements.txt b/test_set/cvp-sanity/requirements.txt
index ad0b58a..b45a29e 100644
--- a/test_set/cvp-sanity/requirements.txt
+++ b/test_set/cvp-sanity/requirements.txt
@@ -1,4 +1,4 @@
-pytest==3.0.6
+pytest==3.7.1
requests>=2.20.0
flake8
PyYAML
@@ -6,3 +6,4 @@
pygerrit2==2.0.6
gitpython
python-ldap
+
diff --git a/test_set/cvp-sanity/tests/test_cinder_services.py b/test_set/cvp-sanity/tests/test_cinder_services.py
index 4a6c45e..a612bca 100644
--- a/test_set/cvp-sanity/tests/test_cinder_services.py
+++ b/test_set/cvp-sanity/tests/test_cinder_services.py
@@ -1,4 +1,5 @@
import pytest
+import logging
@pytest.mark.sl_dup
@@ -28,8 +29,6 @@
cinder_volume = local_salt_client.cmd_any(
tgt='keystone:server',
param='. /root/keystonercv3; cinder service-list | grep "volume" |grep -c -v -e "lvm"')
- print(backends_cinder)
- print(cinder_volume)
backends_num = len(backends_cinder.keys())
assert cinder_volume == str(backends_num), \
'Number of cinder-volume services ({0}) does not match ' \
diff --git a/test_set/cvp-sanity/tests/test_drivetrain.py b/test_set/cvp-sanity/tests/test_drivetrain.py
index 05ea831..1b71ad0 100644
--- a/test_set/cvp-sanity/tests/test_drivetrain.py
+++ b/test_set/cvp-sanity/tests/test_drivetrain.py
@@ -10,7 +10,7 @@
import git
import ldap
import ldap.modlist as modlist
-
+import logging
def join_to_gerrit(local_salt_client, gerrit_user, gerrit_password):
gerrit_port = local_salt_client.pillar_get(
@@ -73,7 +73,7 @@
#Create test project and add description
server.put("/projects/"+test_proj_name)
server.put("/projects/"+test_proj_name+"/description",json={"description":"Test DriveTrain project","commit_message": "Update the project description"})
- except HTTPError, e:
+ except HTTPError as e:
gerrit_error = e
try:
#Create test folder and init git
@@ -98,7 +98,7 @@
last_change = changes[0].get('change_id')
server.post("/changes/{0}/revisions/1/review".format(last_change),json={"message": "All is good","labels":{"Code-Review":"+2"}})
server.post("/changes/{0}/submit".format(last_change))
- except HTTPError, e:
+ except HTTPError as e:
gerrit_error = e
finally:
#Delete test project
@@ -174,7 +174,7 @@
#Check search test user in LDAP
searchScope = ldap.SCOPE_SUBTREE
ldap_result = ldap_server.search_s(ldap_dc, searchScope, searchFilter)
- except ldap.LDAPError, e:
+ except ldap.LDAPError as e:
ldap_error = e
try:
#Check connection between Jenkins and LDAP
@@ -188,9 +188,9 @@
gerrit_add_user = gerrit_server.put(_link)
gerrit_server = join_to_gerrit(local_salt_client,test_user_name,'aSecretPassw')
gerrit_result = gerrit_server.get("/changes/?q=owner:self%20status:open")
- except HTTPError, e:
+ except HTTPError as e:
gerrit_error = e
- except jenkins.JenkinsException, e:
+ except jenkins.JenkinsException as e:
jenkins_error = e
finally:
ldap_server.modify_s(admin_gr_dn,[(ldap.MOD_DELETE, 'memberUid', [test_user_name],)],)
@@ -233,7 +233,7 @@
if len(wrong_items) == 0:
break
else:
- print('''Some DriveTrain services doesn't have expected number of replicas:
+ logging.error('''Some DriveTrain services doesn't have expected number of replicas:
{}\n'''.format(json.dumps(wrong_items, indent=4)))
time.sleep(5)
assert len(wrong_items) == 0
@@ -312,7 +312,7 @@
param='jenkins:client:job:{}:scm:branch'.format(job_name))
if not BranchSpec:
- print("No BranchSpec has found for {} job".format(job_name))
+ logging.debug("No BranchSpec has found for {} job".format(job_name))
continue
actual_version = BranchSpec[0].getElementsByTagName('name')[0].childNodes[0].data
@@ -400,8 +400,6 @@
expected=expected_replica,
actual=actual_replica
)
-
- print report_with_errors
assert report_with_errors == "", \
"\n{sep}{kubectl_output}{sep} \n\n {report} ".format(
sep="\n" + "-"*20 + "\n",
@@ -437,8 +435,6 @@
expected=expected_replica,
actual=actual_replica
)
-
- print report_with_errors
assert report_with_errors != "", \
"\n{sep}{kubectl_output}{sep} \n\n {report} ".format(
sep="\n" + "-" * 20 + "\n",
diff --git a/test_set/cvp-sanity/tests/test_duplicate_ips.py b/test_set/cvp-sanity/tests/test_duplicate_ips.py
index bbd532e..392838f 100644
--- a/test_set/cvp-sanity/tests/test_duplicate_ips.py
+++ b/test_set/cvp-sanity/tests/test_duplicate_ips.py
@@ -2,6 +2,7 @@
from pprint import pformat
import os
import pytest
+import logging
import utils
@@ -32,7 +33,7 @@
for node in nodes:
if isinstance(nodes[node], bool):
# TODO: do not skip node
- print ("{} node is skipped".format(node))
+ logging.info("{} node is skipped".format(node))
continue
for iface in nodes[node]['ip4_interfaces']:
# Omit 'ip-less' ifaces
diff --git a/test_set/cvp-sanity/tests/test_k8s.py b/test_set/cvp-sanity/tests/test_k8s.py
index f04066e..d3455c3 100644
--- a/test_set/cvp-sanity/tests/test_k8s.py
+++ b/test_set/cvp-sanity/tests/test_k8s.py
@@ -1,6 +1,7 @@
import pytest
import json
import os
+import logging
def test_k8s_get_cs_status(local_salt_client):
@@ -138,9 +139,9 @@
hostname = 'https://docker-dev-virtual.docker.mirantis.net/artifactory/webapp/'
response = os.system('curl -s --insecure {} > /dev/null'.format(hostname))
if response == 0:
- print '{} is AVAILABLE'.format(hostname)
+ logging.info('{} is AVAILABLE'.format(hostname))
else:
- print '{} IS NOT AVAILABLE'.format(hostname)
+ logging.error('{} IS NOT AVAILABLE'.format(hostname))
def test_k8s_dashboard_available(local_salt_client):
diff --git a/test_set/cvp-sanity/tests/test_mtu.py b/test_set/cvp-sanity/tests/test_mtu.py
index b215c1d..01d6642 100644
--- a/test_set/cvp-sanity/tests/test_mtu.py
+++ b/test_set/cvp-sanity/tests/test_mtu.py
@@ -2,6 +2,7 @@
import json
import utils
import os
+import logging
@pytest.mark.full
@@ -23,7 +24,7 @@
for node, ifaces_info in network_info.iteritems():
if isinstance(ifaces_info, bool):
- print ("{} node is skipped".format(node))
+ logging.info("{} node is skipped".format(node))
continue
if node in kvm_nodes:
kvm_info = local_salt_client.cmd(tgt=node,
diff --git a/test_set/cvp-sanity/tests/test_ntp_sync.py b/test_set/cvp-sanity/tests/test_ntp_sync.py
index bf51290..6751f1a 100644
--- a/test_set/cvp-sanity/tests/test_ntp_sync.py
+++ b/test_set/cvp-sanity/tests/test_ntp_sync.py
@@ -1,10 +1,11 @@
import json
import utils
import pytest
+import logging
@pytest.mark.smoke
-#move to sl?
+# move to sl?
def test_ntp_sync(local_salt_client):
"""Test checks that system time is the same across all nodes"""
@@ -40,7 +41,7 @@
sys_peer_declared = False
if not state[node]:
# TODO: do not skip
- print ("Node {} is skipped".format(node))
+ logging.warning("Node {} is skipped".format(node))
continue
ntpq_output = state[node].split('\n')
# if output has no 'remote' in the head of ntpq output
diff --git a/test_set/cvp-sanity/tests/test_packet_checker.py b/test_set/cvp-sanity/tests/test_packet_checker.py
index 786fba9..382576a 100644
--- a/test_set/cvp-sanity/tests/test_packet_checker.py
+++ b/test_set/cvp-sanity/tests/test_packet_checker.py
@@ -1,6 +1,7 @@
import pytest
import json
import utils
+import logging
@pytest.mark.full
@@ -23,7 +24,7 @@
for node in total_nodes:
if not packages_versions[node]:
# TODO: do not skip node
- print "Node {} is skipped".format (node)
+ logging.warning("Node {} is skipped".format(node))
continue
nodes.append(node)
packages_names.update(packages_versions[node].keys())
diff --git a/test_set/cvp-sanity/tests/test_repo_list.py b/test_set/cvp-sanity/tests/test_repo_list.py
index 1ba255c..a864b2c 100644
--- a/test_set/cvp-sanity/tests/test_repo_list.py
+++ b/test_set/cvp-sanity/tests/test_repo_list.py
@@ -1,4 +1,5 @@
import pytest
+import logging
@pytest.mark.full
@@ -15,7 +16,7 @@
repos = info_salt[node]
if not info_salt[node]:
# TODO: do not skip node
- print "Node {} is skipped".format (node)
+ logging.warning("Node {} is skipped".format(node))
continue
for repo in repos.keys():
repository = repos[repo]
@@ -60,4 +61,4 @@
"Several problems found: {0}".format(
json.dumps(diff, indent=4))
if fail_counter == 0 and len(diff) > 0:
- print "\nWarning: nodes contain more repos than reclass"
+ logging.warning("\nWarning: nodes contain more repos than reclass")
diff --git a/test_set/cvp-sanity/tests/test_salt_master.py b/test_set/cvp-sanity/tests/test_salt_master.py
index f0d6499..f2d7f1a 100644
--- a/test_set/cvp-sanity/tests/test_salt_master.py
+++ b/test_set/cvp-sanity/tests/test_salt_master.py
@@ -1,5 +1,6 @@
import json
import pytest
+import logging
@pytest.mark.full
@@ -36,7 +37,7 @@
expr_form='pillar', check_status=True).values()[0]
reclass_warnings = reclass[reclass.keys()[0]].split('{\n "base":')[0]
if reclass_warnings:
- print "\nReclass-salt output has warnings"
+ logging.warning("\nReclass-salt output has warnings: {}".format(reclass_warnings))
reclass_nodes = reclass[reclass.keys()[0]].split('{\n "base":')[1]
assert reclass_nodes != '', 'No nodes were found in' \
' reclass-salt --top output'
diff --git a/test_set/cvp-sanity/tests/test_services.py b/test_set/cvp-sanity/tests/test_services.py
index 224c207..fac31e5 100644
--- a/test_set/cvp-sanity/tests/test_services.py
+++ b/test_set/cvp-sanity/tests/test_services.py
@@ -1,6 +1,7 @@
import pytest
import json
import utils
+import logging
# Some nodes can have services that are not applicable for other nodes in similar group.
# For example , there are 3 node in kvm group, but just kvm03 node has srv-volumes-backup.mount service
@@ -30,7 +31,7 @@
for node in services_by_nodes:
if not services_by_nodes[node]:
# TODO: do not skip node
- print "Node {} is skipped".format (node)
+ logging.info("Node {} is skipped".format (node))
continue
nodes.append(node)
all_services.update(services_by_nodes[node])
diff --git a/test_set/cvp-sanity/utils/__init__.py b/test_set/cvp-sanity/utils/__init__.py
index 62ccae7..06329fa 100644
--- a/test_set/cvp-sanity/utils/__init__.py
+++ b/test_set/cvp-sanity/utils/__init__.py
@@ -4,6 +4,9 @@
import re
import sys, traceback
import time
+import json
+import pytest
+import logging
class AuthenticationError(Exception):
@@ -17,7 +20,7 @@
self.url = self.config['SALT_URL'].strip()
if not re.match("^(http|https)://", self.url):
raise AuthenticationError("Salt URL should start \
- with http or https, given - {}".format(url))
+ with http or https, given - {}".format(self.url))
self.login_payload = {'username': self.config['SALT_USERNAME'],
'password': self.config['SALT_PASSWORD'], 'eauth': 'pam'}
# TODO: proxies
@@ -36,11 +39,11 @@
if not login_request.ok:
raise AuthenticationError("Authentication to SaltMaster failed")
except Exception as e:
- print ("\033[91m\nConnection to SaltMaster "
- "was not established.\n"
- "Please make sure that you "
- "provided correct credentials.\n"
- "Error message: {}\033[0m\n".format(e.message or e))
+ logging.warning("\033[91m\nConnection to SaltMaster "
+ "was not established.\n"
+ "Please make sure that you "
+ "provided correct credentials.\n"
+ "Error message: {}\033[0m\n".format(e.message or e))
traceback.print_exc(file=sys.stdout)
sys.exit()
self.expire = login_request.json()['return'][0]['expire']
@@ -57,20 +60,28 @@
accept_key_payload['arg'] = param
for i in range(retries):
+ logging.info("="*100)
+ logging.info("Send Request: {}".format(json.dumps(
+ accept_key_payload,
+ indent=4,
+ sort_keys=True)))
request = requests.post(self.url, headers=self.headers,
data=accept_key_payload,
cookies=self.cookies,
proxies=self.proxies)
+ logging.info("-"*100)
+ logging.info("Response: {}".format(json.dumps(
+ request.json(),
+ indent=4
+ )))
if not request.ok or not isinstance(request.json()['return'][0], dict):
- print("Salt master is not responding or response is incorrect. Output: {}".format(request))
+ logging.warning("Salt master is not responding or response is incorrect. Output: {}".format(request))
continue
response = request.json()['return'][0]
result = {key: response[key] for key in response if key not in self.skipped_nodes}
- if check_status:
- if False in result.values():
- print(
- "One or several nodes are not responding. Output {}".format(json.dumps(result, indent=4)))
- continue
+ if check_status and False in result.values():
+ logging.warning("One or several nodes are not responding. Output {}".format(json.dumps(result, indent=4)))
+ continue
break
else:
raise Exception("Error with Salt Master response")
@@ -124,8 +135,7 @@
node_groups = {}
nodes_names = set ()
expr_form = ''
- all_nodes = set(local_salt_client.test_ping(tgt='*',expr_form=None))
- print all_nodes
+ all_nodes = set(local_salt_client.test_ping(tgt='*', expr_form=None))
if 'groups' in config.keys() and 'PB_GROUPS' in os.environ.keys() and \
os.environ['PB_GROUPS'].lower() != 'false':
nodes_names.update(config['groups'].keys())
@@ -140,11 +150,11 @@
expr_form = 'pcre'
gluster_nodes = local_salt_client.test_ping(tgt='I@salt:control and '
- 'I@glusterfs:server',
- expr_form='compound')
+ 'I@glusterfs:server',
+ expr_form='compound')
kvm_nodes = local_salt_client.test_ping(tgt='I@salt:control and not '
- 'I@glusterfs:server',
- expr_form='compound')
+ 'I@glusterfs:server',
+ expr_form='compound')
for node_name in nodes_names:
skipped_groups = config.get('skipped_groups') or []
@@ -152,17 +162,17 @@
continue
if expr_form == 'pcre':
nodes = local_salt_client.test_ping(tgt='{}[0-9]{{1,3}}'.format(node_name),
- expr_form=expr_form)
+ expr_form=expr_form)
else:
nodes = local_salt_client.test_ping(tgt=config['groups'][node_name],
- expr_form=expr_form)
+ expr_form=expr_form)
if nodes == {}:
continue
- node_groups[node_name]=[x for x in nodes
- if x not in config['skipped_nodes']
- if x not in gluster_nodes.keys()
- if x not in kvm_nodes.keys()]
+ node_groups[node_name] = [x for x in nodes
+ if x not in config['skipped_nodes']
+ if x not in gluster_nodes.keys()
+ if x not in kvm_nodes.keys()]
all_nodes = set(all_nodes - set(node_groups[node_name]))
if node_groups[node_name] == []:
del node_groups[node_name]
@@ -172,7 +182,7 @@
all_nodes = set(all_nodes - set(kvm_nodes.keys()))
all_nodes = set(all_nodes - set(gluster_nodes.keys()))
if all_nodes:
- print ("These nodes were not collected {0}. Check config (groups section)".format(all_nodes))
+ logging.info("These nodes were not collected {0}. Check config (groups section)".format(all_nodes))
return node_groups