Merge "Remove spans from jenkins jobs output PROD-37154"
diff --git a/jobs/global.yaml b/jobs/global.yaml
index bd0c060..5281a99 100755
--- a/jobs/global.yaml
+++ b/jobs/global.yaml
@@ -10,8 +10,8 @@
<!--- INSERT ALERT HERE ^^^^^^^ --->
</font></h1>
- current-version: 2019.2.24
- previous-version: 2019.2.23
- disabled-proposed: true
- disabled-2019-2-0: false
- upgrade-to-tag: true
+ current-version: 2019.2.25
+ previous-version: 2019.2.24
+ disabled-proposed: false
+ disabled-2019-2-0: true
+ upgrade-to-tag: false
diff --git a/jobs/pipelines/cookied-model-generator.sh b/jobs/pipelines/cookied-model-generator.sh
index bd998b0..8774552 100644
--- a/jobs/pipelines/cookied-model-generator.sh
+++ b/jobs/pipelines/cookied-model-generator.sh
@@ -1,7 +1,8 @@
#!/bin/bash
# export REMOTE_JENKINS='172.19.112.216'
-set -eo pipefail;
-export REMOTE_JENKINS='sre-ci.scc.mirantis.net'
+set -eox pipefail;
+# Temporary call to global SCRIPT_JENKINS_IP while migrating jenkins
+export REMOTE_JENKINS="${SCRIPT_JENKINS_IP:-sre-ci.scc.mirantis.net}"
if [ ! -n "$LAB_CONTEXT_NAME" ]; then
echo 'LAB_CONTEXT_NAME is not set!
diff --git a/jobs/pipelines/self-deploy-jobs.groovy b/jobs/pipelines/self-deploy-jobs.groovy
index 2d048be..3d73b5a 100644
--- a/jobs/pipelines/self-deploy-jobs.groovy
+++ b/jobs/pipelines/self-deploy-jobs.groovy
@@ -23,6 +23,7 @@
cp jobs/config/sre-jenkins-job-builder-example.ini ${tmp_jenkins_config}
sed -i "s/user=some_user/user=\${juser}/g" ${tmp_jenkins_config}
sed -i "s/password=some_password/password=\${jpass}/g" ${tmp_jenkins_config}
+ sed -i "s|url=.*|url=${env.JENKINS_URL}|" ${tmp_jenkins_config}
"""
}
}
diff --git a/jobs/pipelines/swarm-bootstrap-salt-cluster-heat.groovy b/jobs/pipelines/swarm-bootstrap-salt-cluster-heat.groovy
index db375e6..7086240 100644
--- a/jobs/pipelines/swarm-bootstrap-salt-cluster-heat.groovy
+++ b/jobs/pipelines/swarm-bootstrap-salt-cluster-heat.groovy
@@ -51,7 +51,7 @@
def ubuntu_vcp_image_name = "ubuntu-vcp-2019.2.0"
def ubuntu_foundation_image_name = "ubuntu-16.04-foundation-2019.2.0"
-timeout(time: 2, unit: 'HOURS') {
+timeout(time: 3, unit: 'HOURS') {
timestamps {
node ("${PARENT_NODE_NAME}") {
if (! fileExists("${PARENT_WORKSPACE}")) {
diff --git a/jobs/pipelines/swarm-create-cfg-config-drive.groovy b/jobs/pipelines/swarm-create-cfg-config-drive.groovy
index aff36df..b64e867 100644
--- a/jobs/pipelines/swarm-create-cfg-config-drive.groovy
+++ b/jobs/pipelines/swarm-create-cfg-config-drive.groovy
@@ -42,54 +42,52 @@
withCredentials([[$class: 'SSHUserPrivateKeyBinding',
keyFileVariable: "GERRIT_KEY",
credentialsId: env.GERRIT_MCP_CREDENTIALS_ID,
- usernameVariable: "GERRIT_USERNAME",
- passwordVariable: "GERRIT_PASSWORD"]]) {
+ usernameVariable: "GERRIT_USERNAME"]]) {
def gerrit_user = env.GERRIT_USERNAME
- def gerrit_password = env.GERRIT_PASSWORD
String gerrit_host = "gerrit.mcp.mirantis.com"
String gerrit_port = "29418"
- ssh.prepareSshAgentKey(env.GERRIT_MCP_CREDENTIALS_ID)
-
- ssh.agentSh(
- "set -ex; " +
- "git clone ssh://${gerrit_user}@${gerrit_host}:${gerrit_port}/mcp/mcp-common-scripts mcp-common-scripts-git; " +
- "git clone --mirror ssh://${gerrit_user}@${gerrit_host}:${gerrit_port}/mk/mk-pipelines mk-pipelines; " +
- "git clone --mirror ssh://${gerrit_user}@${gerrit_host}:${gerrit_port}/mcp-ci/pipeline-library pipeline-library")
-
- if (COMMON_SCRIPTS_COMMIT != '') {
- sh ("""\
- set -ex
- cd mcp-common-scripts-git
- git checkout ${COMMON_SCRIPTS_COMMIT}
- git log -1
- """)
- }
-
- if (MCP_COMMON_SCRIPTS_REFS != '') {
- ssh.agentSh(
+ sshagent([env.GERRIT_MCP_CREDENTIALS_ID]) {
+ sh (
"set -ex; " +
- "cd mcp-common-scripts-git; " +
- "git fetch https://${gerrit_host}/mcp/mcp-common-scripts ${MCP_COMMON_SCRIPTS_REFS} && git checkout FETCH_HEAD; " +
- "git log -1")
- }
+ "git clone ssh://${gerrit_user}@${gerrit_host}:${gerrit_port}/mcp/mcp-common-scripts mcp-common-scripts-git; " +
+ "git clone --mirror ssh://${gerrit_user}@${gerrit_host}:${gerrit_port}/mk/mk-pipelines mk-pipelines; " +
+ "git clone --mirror ssh://${gerrit_user}@${gerrit_host}:${gerrit_port}/mcp-ci/pipeline-library pipeline-library")
- if (PIPELINE_LIBRARY_REF != '') {
- ssh.agentSh(
- "set -ex; " +
- "cd pipeline-library; " +
- "git fetch https://${gerrit_host}/mcp-ci/pipeline-library ${PIPELINE_LIBRARY_REF}; " +
- "git tag ${MCP_VERSION} FETCH_HEAD -f; " +
- "git branch -f release/${MCP_VERSION} FETCH_HEAD; " +
- "git log -1")
- }
- if (MK_PIPELINES_REF != '') {
- ssh.agentSh(
- "set -ex; " +
- "cd mk-pipelines; " +
- "git fetch https://${gerrit_host}/mk/mk-pipelines ${MK_PIPELINES_REF}; " +
- "git tag ${MCP_VERSION} FETCH_HEAD -f; " +
- "git branch -f release/${MCP_VERSION} FETCH_HEAD; " +
- "git log -1")
+ if (COMMON_SCRIPTS_COMMIT != '') {
+ sh ("""\
+ set -ex
+ cd mcp-common-scripts-git
+ git checkout ${COMMON_SCRIPTS_COMMIT}
+ git log -1
+ """)
+ }
+
+ if (MCP_COMMON_SCRIPTS_REFS != '') {
+ sh (
+ "set -ex; " +
+ "cd mcp-common-scripts-git; " +
+ "git fetch https://${gerrit_host}/mcp/mcp-common-scripts ${MCP_COMMON_SCRIPTS_REFS} && git checkout FETCH_HEAD; " +
+ "git log -1")
+ }
+
+ if (PIPELINE_LIBRARY_REF != '') {
+ sh (
+ "set -ex; " +
+ "cd pipeline-library; " +
+ "git fetch https://${gerrit_host}/mcp-ci/pipeline-library ${PIPELINE_LIBRARY_REF}; " +
+ "git tag ${MCP_VERSION} FETCH_HEAD -f; " +
+ "git branch -f release/${MCP_VERSION} FETCH_HEAD; " +
+ "git log -1")
+ }
+ if (MK_PIPELINES_REF != '') {
+ sh (
+ "set -ex; " +
+ "cd mk-pipelines; " +
+ "git fetch https://${gerrit_host}/mk/mk-pipelines ${MK_PIPELINES_REF}; " +
+ "git tag ${MCP_VERSION} FETCH_HEAD -f; " +
+ "git branch -f release/${MCP_VERSION} FETCH_HEAD; " +
+ "git log -1")
+ }
}
}
}
diff --git a/jobs/project.yaml b/jobs/project.yaml
index d47c7c3..f40d31b 100644
--- a/jobs/project.yaml
+++ b/jobs/project.yaml
@@ -69,4 +69,6 @@
# - testrail-reporter-ci - can't moved to JJB, unsupported parameters
# - testrail-reporter-gerrit-mcp-ci - can't moved to JJB, unsupported parameters
- release-artifact-checker
+ - cc_table_jjb
+ - packer-image-system.foundation
...
\ No newline at end of file
diff --git a/jobs/scripts/cc_table.sh b/jobs/scripts/cc_table.sh
new file mode 100644
index 0000000..4c5564f
--- /dev/null
+++ b/jobs/scripts/cc_table.sh
@@ -0,0 +1,197 @@
+#!/bin/bash
+set -x
+
+TITLE="Comparison of tcp-qa deployments"
+RIGHT_NOW=$(date +"%x %r %Z")
+TIME_STAMP="Updated on $RIGHT_NOW by $USER"
+
+list_of_envs=(
+ heat-cicd-pike-dvr-sl
+ heat-cicd-queens-contrail41-sl
+ heat-cicd-queens-dvr-sl
+ heat-cicd-pike-contrail-stb-sl
+ released-heat-cicd-pike-contrail41-sl
+ released-heat-cicd-pike-dvr-sl
+ released-heat-cicd-queens-contrail41-sl
+ released-heat-cicd-queens-dvr-sl
+ bm-cicd-pike-ovs-maas
+ bm-cicd-queens-ovs-maas
+ heat-bm-cicd-pike-contrail-sl
+ heat-bm-cicd-queens-contrail-sl
+ bm-e7-cicd-pike-ovs-maas
+ bm-e7-cicd-pike-odl-maas
+ bm-b300-cicd-queens-ovs-maas
+ bm-b300-e7-cicd-pike-ovs-maas
+)
+
+all_parameters=(
+ auditd_enabled
+ barbican_enabled
+ barbican_backend
+ barbican_integration_enabled
+ jenkins_slave_type
+ updates_mirantis_version
+ bmk_enabled
+ ceph_enabled
+ ceph_version
+ cicd_enabled
+ ceph_osd_backend
+ ceph_osd_mode
+ ceph_osd_data_disks
+ ceph_osd_journal_or_block_db_disks
+ ceph_osds_per_device
+ ceph_osd_data_size
+ ceph_osd_journal_size
+ ceph_osd_block_db_size
+ ceph_osd_dmcrypt
+ ceph_osd_node_count
+ ceph_osd_bond_mode
+ ceph_hyper_converged
+ rbd_monitoring_enabled
+ cinder_backup_engine
+ compute_padding_with_zeros
+ designate_enabled
+ designate_backend
+ fluentd_enabled
+ rsync_fernet_rotation
+ gainsight_service_enabled
+ galera_ssl_enabled
+ internal_proxy_enabled
+ ironic_enabled
+ kqueen_custom_mail_enabled
+ kqueen_enabled
+ kubernetes_enabled
+ maas_dhcp_enabled
+ maas_enabled
+ manila_enabled
+ mcp_version
+ nova_vnc_tls_enabled
+ octavia_manager_cluster
+ octavia_amphora_topology
+ octavia_spare_amphora_pool_size
+ opencontrail_api_ssl_enabled
+ opencontrail_enabled
+ opencontrail_version
+ openldap_enabled
+ openscap_enabled
+ openstack_enabled
+ openstack_cluster_size
+ openstack_mysql_x509_enabled
+ openstack_nfv_dpdk_enabled
+ openstack_nfv_sriov_enabled
+ openstack_network_engine
+ openstack_nova_compute_nfv_req_enabled
+ openstack_octavia_enabled
+ openstack_ovs_dvr_enabled
+ openstack_rabbitmq_x509_enabled
+ openstack_version
+ platform
+ rabbitmq_ssl_enabled
+ openstack_rabbitmq_standalone_mode
+ secrets_encryption_enabled
+ stacklight_enabled
+ stacklight_ssl_enabled
+ stacklight_version
+ static_ips_on_deploy_network_enabled
+ tenant_telemetry_enabled
+ upstream_proxy_enabled
+ version
+
+)
+
+function split_string
+{
+ str=$1
+
+ IFS=':' # set as delimiter
+ read -r id value <<< "$str" # str is read into an array as tokens separated by IFS
+ unset IFS
+ echo ${value}
+}
+
+
+function collect_parameter
+{
+ env=$1
+ parameter_key=$2
+
+ key_value=$(grep -w $parameter_key: tcp_tests/templates/$env/salt-context-cookiecutter*.yaml);
+ echo $(split_string "$key_value")
+}
+
+function clean
+{
+ str=$1
+ # first, strip underscores
+ CLEAN=${str//[^a-zA-Z0-9_.\/\{\}:]/}
+ CLEAN=`echo -n $CLEAN | tr A-Z a-z`
+ echo $CLEAN
+}
+
+function html_color
+{
+ str=$(clean $1)
+ if [ "$str" == "False" -o "$str" == "false" ]; then
+ echo "bgcolor=\"#ff9999\"";
+ fi
+ if [ "$str" == "True" -o "$str" == "true" ]; then
+ echo "bgcolor=\"#99cc99\"";
+ fi
+
+}
+
+function unpack_parameters
+{
+ for param in ${all_parameters[@]}; do
+ echo "<tr>";
+ echo "<td>";
+ echo "$param"
+ echo "</td>";
+ for env in ${list_of_envs[@]}; do
+ value=$(collect_parameter "$env" "$param")
+ echo "<td $(html_color $value)>";
+ echo $(clean $value)
+ echo "</td>";
+ done;
+ echo "</tr>";
+ done;
+}
+
+function unpack_envs
+{
+
+ echo "<tr>";
+ echo "<th>"
+ echo "</th>"
+ for env in ${list_of_envs[@]}; do
+ echo "<th>";
+ echo "$env"
+ echo "</th>";
+ done;
+ echo "</tr>";
+
+}
+
+function write_page
+{
+ cat <<- _EOF_
+ <html>
+ <head>
+ <title>$TITLE</title>
+ </head>
+ <body>
+ <h1>$TITLE</h1>
+ <p>$TIME_STAMP</p>
+ <table border=1 style="border-collapse:collapse;border-spacing:0">
+ $(unpack_envs)
+ $(unpack_parameters)
+ </table>
+ </body>
+ </html>
+_EOF_
+}
+
+
+filename=report.html
+
+write_page > $filename
\ No newline at end of file
diff --git a/jobs/templates/cc_table.yml b/jobs/templates/cc_table.yml
new file mode 100644
index 0000000..d385fa5
--- /dev/null
+++ b/jobs/templates/cc_table.yml
@@ -0,0 +1,22 @@
+- job-template:
+ project-type: freestyle
+ description: '{job-description}'
+ concurrent: false
+ disabled: false
+ name: cc_table_jjb
+
+ publishers:
+ - archive:
+ artifacts: 'report.html'
+ allow-empty: false
+ scm:
+ - git:
+ branches:
+ - '*/master'
+ url: https://gerrit.mcp.mirantis.com/mcp/tcp-qa
+ triggers: []
+ builders:
+ - shell:
+ !include-raw-escape: '../scripts/cc_table.sh'
+
+
diff --git a/jobs/templates/packer-image-system.foundation.yml b/jobs/templates/packer-image-system.foundation.yml
new file mode 100644
index 0000000..8cd2af0
--- /dev/null
+++ b/jobs/templates/packer-image-system.foundation.yml
@@ -0,0 +1,86 @@
+- job-template:
+ project-type: pipeline
+ description: '{job-description}'
+ concurrent: false
+ disabled: false
+ name: packer-image-system.foundation
+ parameters:
+ - string:
+ name: IMAGE_NAME
+ default: system.foundation
+ description: 'Name of the resulting qcow2 image'
+ trim: 'false'
+ - string:
+ name: BUILD_CONFIG_DRIVE_PATH
+ default: tcp_tests/templates/_packer/foundation/config-drive
+ description: 'Relative path in tcp-qa to the directory with meta-data and user-data files'
+ trim: 'false'
+ - string:
+ name: BUILD_PACKER_CONFIG_PATH
+ default: tcp_tests/templates/_packer/foundation/packer.json
+ description: 'Relative path in tcp-qa to the file with packer config (JSON)'
+ trim: 'false'
+ - string:
+ name: BASE_IMAGE_URL
+ default: http://cloud-images.ubuntu.com/releases/xenial/release-20180306/ubuntu-16.04-server-cloudimg-amd64-disk1.img
+ description: 'Base image to build a new image'
+ trim: 'false'
+ - string:
+ name: BASE_IMAGE_MD5
+ default: 566efef1d6f12e7d3a994c2405bdb642
+ description: 'Base image MD5 checksum'
+ trim: 'false'
+ - string:
+ name: PACKER_URL
+ default: https://releases.hashicorp.com/packer/1.4.1/packer_1.4.1_linux_amd64.zip
+ description: 'URL to the zip archive with packer binary, see https://releases.hashicorp.com/packer/'
+ trim: 'false'
+ - string:
+ name: PACKER_ZIP_MD5
+ default: 35cc6dd2a2b2e50e76090197d7c12a90
+ description: 'MD5 of the zip archive with packer binary'
+ trim: 'false'
+ - string:
+ name: OS_AUTH_URL
+ default: https://keystone.ic-eu.ssl.mirantis.net/v3
+ description: 'Openstack keystone catalog endpoint, for example https://10.90.0.80:5000/v3'
+ trim: 'false'
+ - string:
+ name: OS_PROJECT_NAME
+ default: sre-team
+ description: ''
+ trim: 'false'
+ - string:
+ name: OS_USER_DOMAIN_NAME
+ default: ldap_mirantis
+ description: 'OpenStack user domain name'
+ trim: 'false'
+ - string:
+ name: OS_CREDENTIALS
+ default: sre-qa-ci-eu
+ description: 'Jenkins credentials ID with username and password to create a heat stack in OpenStack'
+ trim: 'false'
+ - bool:
+ name: UPLOAD_IMAGE_TO_GLANCE
+ default: True
+ description: 'If True: upload image to glance; if False: store as an artifact'
+ trim: 'false'
+ - string:
+ name: TCP_QA_REFS
+ default:
+ description: |
+ Example: refs/changes/89/411189/36
+ (for now - only one reference allowed)
+ trim: 'false'
+
+ pipeline-scm:
+ lightweight-checkout: false
+ scm:
+ - git:
+ branches:
+ - FETCH_HEAD
+ refspec: ${{TCP_QA_REFS}}
+ url: https://gerrit.mcp.mirantis.com/mcp/tcp-qa
+ script-path: jobs/pipelines/packer-image-create.groovy
+ logrotate:
+ daysToKeep: 30
\ No newline at end of file
diff --git a/jobs/templates/self-deploy-jobs.yaml b/jobs/templates/self-deploy-jobs.yaml
index 5660557..74d3310 100644
--- a/jobs/templates/self-deploy-jobs.yaml
+++ b/jobs/templates/self-deploy-jobs.yaml
@@ -9,7 +9,7 @@
- string:
default: 'master'
description: 'tcp-qa review refspec'
- name: TCP_QA_REFS
+ name: GERRIT_REFSPEC
trim: 'true'
- bool:
name: DRY_RUN
@@ -34,7 +34,7 @@
- git:
branches:
- FETCH_HEAD
- refspec: '${{TCP_QA_REFS}}'
+ refspec: '${{GERRIT_REFSPEC}}'
url: https://gerrit.mcp.mirantis.com/mcp/tcp-qa
script-path: jobs/pipelines/self-deploy-jobs.groovy
logrotate:
diff --git a/jobs/templates/swarm-bootstrap-salt-cluster-heat-jjb.yaml b/jobs/templates/swarm-bootstrap-salt-cluster-heat-jjb.yaml
index 2dbc28b..c53a1f4 100755
--- a/jobs/templates/swarm-bootstrap-salt-cluster-heat-jjb.yaml
+++ b/jobs/templates/swarm-bootstrap-salt-cluster-heat-jjb.yaml
@@ -189,7 +189,7 @@
trim: 'false'
- string:
name: CREATE_JENKINS_NODE_CREDENTIALS
- default: 'create_jenkins_node_cz8133'
+ default: 'maintenance-team'
description: |-
Credentials with username and password with rights to create a Jenkins slave node
trim: 'false'
diff --git a/jobs/templates/swarm-create-cfg-config-drive-jjb.yaml b/jobs/templates/swarm-create-cfg-config-drive-jjb.yaml
index 979ce74..3012008 100644
--- a/jobs/templates/swarm-create-cfg-config-drive-jjb.yaml
+++ b/jobs/templates/swarm-create-cfg-config-drive-jjb.yaml
@@ -121,7 +121,7 @@
trim: 'false'
- string:
name: GERRIT_MCP_CREDENTIALS_ID
- default: 'f4fb9dd6-ba63-4085-82f7-3fa601334d95'
+ default: 'maintenance-team-ssh'
description: 'Credentials ID to access gerrit.mcp.mirantis.net'
trim: 'false'
- string:
diff --git a/src/com/mirantis/system_qa/SharedPipeline.groovy b/src/com/mirantis/system_qa/SharedPipeline.groovy
index 110d406..66c24fb 100644
--- a/src/com/mirantis/system_qa/SharedPipeline.groovy
+++ b/src/com/mirantis/system_qa/SharedPipeline.groovy
@@ -488,10 +488,11 @@
def mcp_salt_repo_url = env.MCP_SALT_REPO_URL ?: ''
def mcp_salt_repo_key = env.MCP_SALT_REPO_KEY ?: ''
def deploy_network_mask = env.DEPLOY_NETWORK_NETMASK ?: ''
+ def jenkins_host = env.SCRIPT_JENKINS_IP ?: 'sre-ci.scc.mirantis.net'
def parameters = [
string(name: 'CLUSTER_NAME', value: "${LAB_CONFIG_NAME}"),
- string(name: 'MODEL_URL', value: "http://sre-ci.scc.mirantis.net:8098/${LAB_CONFIG_NAME}.git"),
+ string(name: 'MODEL_URL', value: "http://${jenkins_host}:8098/${LAB_CONFIG_NAME}.git"),
string(name: 'MODEL_URL_OBJECT_TYPE', value: "git"),
booleanParam(name: 'DOWNLOAD_CONFIG_DRIVE', value: true),
string(name: 'MCP_VERSION', value: "${MCP_VERSION}"),
diff --git a/tcp_tests/helpers/utils.py b/tcp_tests/helpers/utils.py
index e0ef723..46fc90d 100644
--- a/tcp_tests/helpers/utils.py
+++ b/tcp_tests/helpers/utils.py
@@ -23,8 +23,6 @@
import jinja2
import paramiko
import yaml
-import logging
-from multiprocessing import Process, BoundedSemaphore
from devops.helpers import ssh_client
from tcp_tests import logger
@@ -487,73 +485,3 @@
class TimeoutException(Exception):
pass
-
-
-pool = list()
-LOG_FORMAT = '%(asctime)s - %(levelname)s %(filename)s:%(lineno)d ' \
- '/%(processName)s/ -- %(message)s'
-
-
-class Worker:
- def __init__(self, limit=4, timeout=None):
- """
- limit of parallel thread to execute
- timeout of waiting threads in seconds
- """
- LOG.debug("Created multithreading Worker limited by {} "
- "threads".format(limit))
- self._sema = BoundedSemaphore(limit)
- self.timeout = timeout
- pass
-
- @property
- def pool(self):
- global pool
- return pool
-
- def _worker(self, func, args):
- try:
- # FIXME: logging doesn't work
- memory_handler = logging.handlers.MemoryHandler(
- 50,
- target=logger.console)
- formatter = logging.Formatter(fmt=LOG_FORMAT)
-
- LOG = logging.getLogger("{}{}".format(func, args))
- LOG.setLevel(logging.DEBUG)
- memory_handler.setFormatter(formatter)
- LOG.addHandler(memory_handler)
- # #######
- func(*args)
- # #######
- memory_handler.close()
- finally:
- # allow a new process to be started now that this one is exiting
- self._sema.release()
-
- def start(self, func, args, name=None):
- self._sema.acquire() # wait to start until another process is finished
- p = Process(target=self._worker,
- args=(func, args),
- name=name
- )
- self.pool.append(p)
- p.start()
-
- def are_completed(self):
- for t in self.pool:
- LOG.info("Joining {}....".format(t))
- t.join(timeout=self.timeout)
- return all([not (task.is_alive()) for task in self.pool])
-
- def clean_pool(self):
- for i in range(self.pool.__len__()):
- del self.pool[0]
-
- def all_tasks_successfully_completed(self):
- return all([task.exitcode == 0 for task in self.pool])
-
- def print_failed_tasks(self):
- return "\n".join([str(task)
- for task in self.pool
- if task.exitcode != 0])
diff --git a/tcp_tests/managers/execute_commands.py b/tcp_tests/managers/execute_commands.py
index 314c641..6dcf615 100644
--- a/tcp_tests/managers/execute_commands.py
+++ b/tcp_tests/managers/execute_commands.py
@@ -1,9 +1,8 @@
import time
-from tcp_tests import logger, settings
+from tcp_tests import logger
from tcp_tests.helpers.log_helpers import pretty_repr
-from tcp_tests.helpers.utils import Worker
LOG = logger.logger
@@ -37,8 +36,6 @@
'node_name': 'name of the node to run the command(s)',
# Optional:
'description': 'string with a readable command description',
- 'parallel': 'bool (True of False) to enable executing these
- type of command in multithreading'
'retry': {
'count': int, # How many times should be run the command
# until success
@@ -52,7 +49,6 @@
...
]
"""
- worker = Worker(limit=settings.THREADS, timeout=3*60)
for n, step in enumerate(commands):
# Required fields
action_cmd = step.get('cmd')
@@ -71,19 +67,7 @@
log_msg = "\n\n{0}\n{1}".format(msg, '=' * len(msg))
if action_cmd:
- if step.get('parallel'):
- name = description + " on " + step.get("node_name")
- worker.start(func=self.execute_command,
- args=(step, msg),
- name=name
- )
- else:
- while not worker.are_completed():
- LOG.info("Waiting {}".format(worker.pool))
- if worker.all_tasks_successfully_completed():
- worker.clean_pool()
- self.execute_command(step, msg)
-
+ self.execute_command(step, msg)
elif action_do:
self.command2(step, msg)
elif action_upload:
@@ -93,12 +77,6 @@
LOG.info(log_msg)
self.action_download(step)
- while not worker.are_completed():
- LOG.info("Waiting {}".format(worker.pool))
-
- assert worker.all_tasks_successfully_completed(), \
- worker.print_failed_tasks()
-
def execute_command(self, step, msg, return_res=None):
# Required fields
cmd = step.get('cmd')
@@ -112,6 +90,7 @@
timeout = step.get('timeout', None)
with self.__underlay.remote(node_name=node_name) as remote:
+
for x in range(retry_count, 0, -1):
time.sleep(3)
diff --git a/tcp_tests/managers/saltmanager.py b/tcp_tests/managers/saltmanager.py
index 0b4b698..7f2df1d 100644
--- a/tcp_tests/managers/saltmanager.py
+++ b/tcp_tests/managers/saltmanager.py
@@ -451,7 +451,7 @@
'export JENKINS_USER={user}\n'
'export JENKINS_PASS={password}\n'
'export JENKINS_START_TIMEOUT=60\n'
- 'export JENKINS_BUILD_TIMEOUT=1800\n'
+ 'export JENKINS_BUILD_TIMEOUT=2400\n'
'echo "export JENKINS_URL=${{JENKINS_URL}}'
' # Jenkins API URL"\n'
'echo "export JENKINS_USER=${{JENKINS_USER}}'
diff --git a/tcp_tests/requirements.txt b/tcp_tests/requirements.txt
index c3352df..92855d2 100644
--- a/tcp_tests/requirements.txt
+++ b/tcp_tests/requirements.txt
@@ -13,6 +13,7 @@
junit-xml
jinja2>=2.9
jira
+jsonfield<3.0.0
testrail<=0.3.8
functools32
kubernetes<9.0.0
@@ -24,6 +25,7 @@
PyYAML!=5.1
polling==0.3.2
retrying
+virtualbmc<2.0
# For Queens: https://github.com/openstack/requirements/blob/stable/queens/global-requirements.txt
diff --git a/tcp_tests/settings.py b/tcp_tests/settings.py
index f4ff7e0..18548fb 100644
--- a/tcp_tests/settings.py
+++ b/tcp_tests/settings.py
@@ -72,7 +72,6 @@
DOCKER_NAME = os.environ.get('DOCKER_NAME',
'mirantis/oscore/rally-tempest:latest')
DOCKER_IMAGES_SL_TAG = os.environ.get('DOCKER_IMAGES_SL_TAG', 'latest')
-THREADS = os.environ.get("THREADS", 10)
PATTERN = os.environ.get('PATTERN', None)
RUN_TEMPEST = get_var_as_bool('RUN_TEMPEST', False)
diff --git a/tcp_tests/templates/_packer/scripts/jenkins_virtualenvs.sh b/tcp_tests/templates/_packer/scripts/jenkins_virtualenvs.sh
index d3b2f4a..4e3f088 100644
--- a/tcp_tests/templates/_packer/scripts/jenkins_virtualenvs.sh
+++ b/tcp_tests/templates/_packer/scripts/jenkins_virtualenvs.sh
@@ -8,7 +8,7 @@
fi
if [ ! -d ${DEVOPS_VENV_PATH} ]; then
- virtualenv ${DEVOPS_VENV_PATH}
+ virtualenv --python=python2 ${DEVOPS_VENV_PATH}
fi
if [ ! -d ${REPORT_VENV_PATH} ]; then
virtualenv --python=python3.7 ${REPORT_VENV_PATH}
@@ -19,7 +19,7 @@
# Install tcp-qa requirements
. ${DEVOPS_VENV_PATH}/bin/activate
-pip install -r https://raw.githubusercontent.com/Mirantis/tcp-qa/master/tcp_tests/requirements.txt
+pip install -r tcp_tests/requirements.txt
pip install psycopg2 # workaround for setup with PostgreSQL , to keep requirements.txt for Sqlite3 only
deactivate
diff --git a/tcp_tests/templates/bm-e7-cicd-pike-ovs-maas/env_add.yml b/tcp_tests/templates/bm-e7-cicd-pike-ovs-maas/env_add.yml
index 5408fb4..77c518a 100644
--- a/tcp_tests/templates/bm-e7-cicd-pike-ovs-maas/env_add.yml
+++ b/tcp_tests/templates/bm-e7-cicd-pike-ovs-maas/env_add.yml
@@ -68,3 +68,25 @@
- bond0
require_interfaces:
- bond0
+ openstack_compute_rack01:
+ params:
+ linux_network_interfaces:
+ br_mesh:
+ address: ${_param:_esc}{_param:tenant_address}
+ enabled: true
+ netmask: ${_param:_esc}{_param:tenant_network_netmask}
+ proto: static
+ type: bridge
+ use_interfaces:
+ - bond0.${_param:_esc}{_param:tenant_vlan}
+ require_interfaces:
+ - bond0.${_param:_esc}{_param:tenant_vlan}
+ bond0.tenant_vlan:
+ name: bond0.${_param:_esc}{_param:tenant_vlan}
+ enabled: true
+ proto: manual
+ type: vlan
+ use_interfaces:
+ - bond0
+ require_interfaces:
+ - bond0
diff --git a/tcp_tests/templates/cookied-model-generator/salt_bm-e7-cicd-pike-ovs-maas.yaml b/tcp_tests/templates/cookied-model-generator/salt_bm-e7-cicd-pike-ovs-maas.yaml
index 4cefd30..e8dec92 100644
--- a/tcp_tests/templates/cookied-model-generator/salt_bm-e7-cicd-pike-ovs-maas.yaml
+++ b/tcp_tests/templates/cookied-model-generator/salt_bm-e7-cicd-pike-ovs-maas.yaml
@@ -77,3 +77,32 @@
node_name: {{ HOSTNAME_CFG01 }}
retry: {count: 1, delay: 5}
skip_fail: false
+
+- description: "Upload env_add.yml to cfg01.{{ DOMAIN_NAME }}"
+ upload:
+ local_path: {{ config.salt_deploy.templates_dir }}{{ LAB_CONFIG_NAME }}/
+ local_filename: env_add.yml
+ remote_path: /root/environment/
+ node_name: {{ HOSTNAME_CFG01 }}
+ retry: {count: 1, delay: 10}
+ skip_fail: false
+
+- description: "Upload infra_config_nodes_add.yml to cfg01.{{ DOMAIN_NAME }}"
+ upload:
+ local_path: {{ config.salt_deploy.templates_dir }}{{ LAB_CONFIG_NAME }}/
+ local_filename: infra_config_nodes_add.yml
+ remote_path: /root/environment/
+ node_name: {{ HOSTNAME_CFG01 }}
+ retry: {count: 1, delay: 10}
+ skip_fail: false
+
+- description: "Modify model to add interfaces to kvm nodes"
+ cmd: |
+ set -e;
+ set -x;
+ . /root/venv-reclass-tools/bin/activate;
+ reclass-tools merge-context /root/environment/env_add.yml /srv/salt/reclass/classes/environment/{{ LAB_CONFIG_NAME }}/init.yml;
+ reclass-tools merge-context /root/environment/infra_config_nodes_add.yml /srv/salt/reclass/classes/cluster/{{ LAB_CONFIG_NAME }}/infra/config/nodes.yml;
+ node_name: {{ HOSTNAME_CFG01 }}
+ retry: {count: 1, delay: 5}
+ skip_fail: false
diff --git a/tcp_tests/templates/shared-salt.yaml b/tcp_tests/templates/shared-salt.yaml
index cc38df3..1f43386 100644
--- a/tcp_tests/templates/shared-salt.yaml
+++ b/tcp_tests/templates/shared-salt.yaml
@@ -808,7 +808,6 @@
{%- if salt_roles %}
- description: Configure salt-minion on {{ ssh['node_name'] }}
- parallel: True
cmd: |
set -ex;
[ ! -d /etc/salt/minion.d ] && mkdir -p /etc/salt/minion.d;