Fix wd restart behaviour
* run_wd_keepalive option - required, to be clearly catched by
salt service reload\restart
Set it by default=1, on service lvl.
* Add possibility to configure ping\load params
* Remove kernel module manipulations:
- Passing params to conf w\o reboot - makes no sense
- Params should be also passed to sysfs, if needed.
Those funtion should be done via linux-formula
Readme updated
* Mics: fix schema,update to latest run_tests, fix .travis tests
* Remove WA for LP:1448924 since fix already released.
Closes-Bug: PROD-19627
Change-Id: Ib8c380f178f7efd07c50557d0a81009f63992671
diff --git a/tests/pillar/server.sls b/tests/pillar/server.sls
index 62b28c7..9cdce64 100644
--- a/tests/pillar/server.sls
+++ b/tests/pillar/server.sls
@@ -1,8 +1,10 @@
watchdog:
server:
enabled: true
- timeout: 60
- # These parametrs should create file in /etc/modprobe.d/kernel_module.conf with content "option module soft_panic=1 parameter=second value_only"
- kernel:
- parameter:
- nowayout: 0
+ timeout: 360
+ ping:
+ 1: 127.0.0.1
+ 2: 0.0.0.0
+ ping_nic:
+ 1: lo
+ max_load_1: 100
diff --git a/tests/pillar/server_kernel_module.sls b/tests/pillar/server_kernel_module.sls
index e7ff6cf..576a03d 100644
--- a/tests/pillar/server_kernel_module.sls
+++ b/tests/pillar/server_kernel_module.sls
@@ -1,10 +1,12 @@
watchdog:
server:
enabled: true
- timeout: 60
+ timeout: 360
+ module: softdog
+linux:
+ system:
kernel:
- parameter:
- soft_panic: 1
- parameter: second
- value_only: none
- module:
+ module:
+ softdog:
+ option:
+ soft_panic: 1
diff --git a/tests/run_tests.sh b/tests/run_tests.sh
index d2d1221..a348912 100755
--- a/tests/run_tests.sh
+++ b/tests/run_tests.sh
@@ -28,6 +28,8 @@
SALT_OPTS="${SALT_OPTS} --retcode-passthrough --local -c ${SALT_CONFIG_DIR} --log-file=/dev/null"
+IGNORE_MODELVALIDATE_MASK=${IGNORE_MODELVALIDATE_MASK:-"novalidate"}
+
if [ "x${SALT_VERSION}" != "x" ]; then
PIP_SALT_VERSION="==${SALT_VERSION}"
fi
@@ -47,9 +49,8 @@
virtualenv $VENV_DIR
source ${VENV_DIR}/bin/activate
python -m pip install salt${PIP_SALT_VERSION}
- python -m pip install jsonschema
- if [[ -f ${CURDIR}/pip_requirements.txt ]]; then
- python -m pip install -r ${CURDIR}/pip_requirements.txt
+ if [[ -f ${CURDIR}/test-requirements.txt ]]; then
+ python -m pip install -r ${CURDIR}/test-requirements.txt
fi
}
@@ -155,19 +156,24 @@
}
prepare() {
- [ -d ${BUILDDIR} ] && mkdir -p ${BUILDDIR}
+ if [[ -f ${BUILDDIR}/.prepare_done ]]; then
+ log_info "${BUILDDIR}/.prepare_done exist, not rebuilding BUILDDIR"
+ return
+ fi
+ [[ -d ${BUILDDIR} ]] && mkdir -p ${BUILDDIR}
[[ ! -f "${VENV_DIR}/bin/activate" ]] && setup_virtualenv
setup_mock_bin
setup_pillar
setup_salt
install_dependencies
+ link_modules
+ touch ${BUILDDIR}/.prepare_done
}
lint_releasenotes() {
[[ ! -f "${VENV_DIR}/bin/activate" ]] && setup_virtualenv
source ${VENV_DIR}/bin/activate
- python -m pip install reno
reno lint ${CURDIR}/../
}
@@ -204,22 +210,30 @@
}
run_model_validate(){
- if [ -d ${SCHEMARDIR} ]; then
- # model validator require py modules
- fetch_dependency "salt:https://github.com/salt-formulas/salt-formula-salt"
- link_modules
- # Rendered Example:
- # python $(which salt-call) --local -c /test1/maas/tests/build/salt --id=maas_cluster modelschema.model_validate maas cluster
- for role in ${SCHEMARDIR}/*.yaml; do
- state_name=$(basename "${role%*.yaml}")
- minion_id="${state_name}"
- # in case debug-reruns, usefull to make cleanup
- [ -n "$DEBUG" ] && { salt_run saltutil.clear_cache; salt_run saltutil.refresh_pillar; salt_run saltutil.sync_all; }
- salt_run -m ${DEPSDIR}/salt-formula-salt --id=${minion_id} modelschema.model_validate ${FORMULA_NAME} ${state_name} || { log_err "Execution of ${FORMULA_NAME}.${state_name} failed"; exit 1 ; }
+ # Run modelschema.model_validate validation.
+ # TEST iterateble, run for `each formula ROLE against each ROLE_PILLARNAME`
+ # Pillars should be named in conviend ROLE_XXX.sls or ROLE.sls
+ # Example:
+ # client.sls client_auth.sls server.sls server_auth.sls
+ if [ -d ${SCHEMARDIR} ]; then
+ # model validator require py modules
+ fetch_dependency "salt:https://github.com/salt-formulas/salt-formula-salt"
+ link_modules
+ salt_run saltutil.clear_cache; salt_run saltutil.refresh_pillar; salt_run saltutil.sync_all;
+ for role in ${SCHEMARDIR}/*.yaml; do
+ role_name=$(basename "${role%*.yaml}")
+ for pillar in $(ls pillar/${role_name}*.sls | grep -v ${IGNORE_MODELVALIDATE_MASK} ); do
+ pillar_name=$(basename "${pillar%*.sls}")
+ local _message="FORMULA:${FORMULA_NAME} ROLE:${role_name} against PILLAR:${pillar_name}"
+ log_info "model_validate ${_message}"
+ # Rendered Example:
+ # python $(which salt-call) --local -c /test1/maas/tests/build/salt --id=maas_cluster modelschema.model_validate maas cluster
+ salt_run -m ${DEPSDIR}/salt-formula-salt --id=${pillar_name} modelschema.model_validate ${FORMULA_NAME} ${role_name} || { log_err "Execution of model_validate ${_message} failed"; exit 1 ; }
done
- else
- log_err "${SCHEMARDIR} not found!";
- fi
+ done
+ else
+ log_info "${SCHEMARDIR} not found!";
+ fi
}
dependency_check() {
diff --git a/tests/test-requirements.txt b/tests/test-requirements.txt
new file mode 100644
index 0000000..a0f561a
--- /dev/null
+++ b/tests/test-requirements.txt
@@ -0,0 +1,2 @@
+jsonschema
+reno