Merge "Adjust OpenStack upgrade pipelines"
diff --git a/branch-git-repos.groovy b/branch-git-repos.groovy
index 383dba0..47c143a 100644
--- a/branch-git-repos.groovy
+++ b/branch-git-repos.groovy
@@ -78,7 +78,7 @@
if (gitSrcObj.contains('SUBS_SOURCE_REF')) {
echo ("Replacing 'SUBS_SOURCE_REF' => ${SOURCE_REVISION}")
- gitSrcObj.replace('SUBS_SOURCE_REF', srcObj)
+ gitSrcObj = gitSrcObj.replace('SUBS_SOURCE_REF', srcObj)
}
// Remove preifix `origin/` from gitSrcObj
diff --git a/cloud-deploy-pipeline.groovy b/cloud-deploy-pipeline.groovy
index bf7e238..25892df 100644
--- a/cloud-deploy-pipeline.groovy
+++ b/cloud-deploy-pipeline.groovy
@@ -374,8 +374,8 @@
}
// ensure certificates are generated properly
- salt.runSaltProcessStep(venvPepper, "* ${extra_tgt}", 'saltutil.refresh_pillar', [], null, true)
- salt.enforceState(venvPepper, "* ${extra_tgt}", ['salt.minion.cert'], true)
+ salt.runSaltProcessStep(venvPepper, "I@kubernetes:* ${extra_tgt}", 'saltutil.refresh_pillar', [], null, true)
+ salt.enforceState(venvPepper, "I@kubernetes:* ${extra_tgt}", ['salt.minion.cert'], true)
}
if (common.checkContains('STACK_INSTALL', 'contrail')) {
diff --git a/cloud-update.groovy b/cloud-update.groovy
index 19d563f..2729d98 100644
--- a/cloud-update.groovy
+++ b/cloud-update.groovy
@@ -38,6 +38,7 @@
* RESTORE_GALERA Restore Galera DB (bool)
* RESTORE_CONTRAIL_DB Restore Cassandra and Zookeeper DBs for OpenContrail (bool)
* RUN_CVP_TESTS Run cloud validation pipelines before and after upgrade
+ * MINIONS_TEST_TIMEOUT Time in seconds for a Salt result to receive a response when calling a minionsReachable method.
*
**/
def common = new com.mirantis.mk.Common()
@@ -57,6 +58,11 @@
def command
def commandKwargs
+def wait = 10
+if (common.validInputParam('MINIONS_TEST_TIMEOUT') && MINIONS_TEST_TIMEOUT.isInteger()) {
+ wait = "${MINIONS_TEST_TIMEOUT}".toInteger()
+}
+
def updatePkgs(pepperEnv, target, targetType="", targetPackages="") {
def salt = new com.mirantis.mk.Salt()
def common = new com.mirantis.mk.Common()
@@ -153,11 +159,11 @@
if (targetType == 'cfg') {
common.warningMsg('salt-master pkg upgrade, rerun the pipeline if disconnected')
salt.runSaltProcessStep(pepperEnv, target, 'pkg.install', ['salt-master'], null, true, 5)
- salt.minionsReachable(pepperEnv, 'I@salt:master', '*')
+ salt.minionsReachable(pepperEnv, 'I@salt:master', '*', null, wait)
}
// salt minion pkg
salt.runSaltProcessStep(pepperEnv, target, 'pkg.install', ['salt-minion'], null, true, 5)
- salt.minionsReachable(pepperEnv, 'I@salt:master', target)
+ salt.minionsReachable(pepperEnv, 'I@salt:master', target, null, wait)
common.infoMsg('Performing pkg upgrades ... ')
common.retry(3){
out = salt.runSaltCommand(pepperEnv, 'local', ['expression': target, 'type': 'compound'], command, true, packages, commandKwargs)
@@ -317,7 +323,7 @@
common.retry(3){
out = salt.runSaltProcessStep(pepperEnv, target, 'cmd.run', [args + ' install salt-minion'], null, true, 5)
}
- salt.minionsReachable(pepperEnv, 'I@salt:master', target)
+ salt.minionsReachable(pepperEnv, 'I@salt:master', target, null, wait)
common.retry(3){
out = salt.runSaltProcessStep(pepperEnv, target, 'cmd.run', [args + ' install ' + packages])
}
@@ -433,7 +439,7 @@
while(count < maxRetries) {
try {
sleep(10)
- salt.minionsReachable(pepperEnv, 'I@salt:master', target)
+ salt.minionsReachable(pepperEnv, 'I@salt:master', target, null, wait)
break
} catch (Exception e) {
common.warningMsg("${target} not ready yet. Waiting ...")
@@ -483,7 +489,7 @@
} else {
salt.runSaltProcessStep(pepperEnv, target, 'system.reboot', null, null, true, 5)
sleep 10
- salt.minionsReachable(pepperEnv, 'I@salt:master', target)
+ salt.minionsReachable(pepperEnv, 'I@salt:master', target, null, wait)
}
}
}
@@ -541,7 +547,7 @@
virsh.liveSnapshotMerge(pepperEnv, nodeProvider, target, SNAPSHOT_NAME)
}
}
- salt.minionsReachable(pepperEnv, 'I@salt:master', tgt)
+ salt.minionsReachable(pepperEnv, 'I@salt:master', tgt, null, wait)
}
diff --git a/deploy-aws-k8s-kqueen-pipeline.groovy b/deploy-aws-k8s-kqueen-pipeline.groovy
index 0a5903e..8fd92bf 100644
--- a/deploy-aws-k8s-kqueen-pipeline.groovy
+++ b/deploy-aws-k8s-kqueen-pipeline.groovy
@@ -124,8 +124,8 @@
}
// ensure certificates are generated properly
- salt.runSaltProcessStep(venvPepper, '*', 'saltutil.refresh_pillar', [], null, true)
- salt.enforceState(venvPepper, '*', ['salt.minion.cert'], true)
+ salt.runSaltProcessStep(venvPepper, 'I@kubernetes:*', 'saltutil.refresh_pillar', [], null, true)
+ salt.enforceState(venvPepper, 'I@kubernetes:*', ['salt.minion.cert'], true)
orchestrate.installKubernetesInfra(venvPepper)
}
diff --git a/deploy-heat-k8s-kqueen-pipeline.groovy b/deploy-heat-k8s-kqueen-pipeline.groovy
index 7071b96..6e5705e 100644
--- a/deploy-heat-k8s-kqueen-pipeline.groovy
+++ b/deploy-heat-k8s-kqueen-pipeline.groovy
@@ -122,8 +122,8 @@
}
// ensure certificates are generated properly
- salt.runSaltProcessStep(venvPepper, '*', 'saltutil.refresh_pillar', [], null, true)
- salt.enforceState(venvPepper, '*', ['salt.minion.cert'], true)
+ salt.runSaltProcessStep(venvPepper, 'I@kubernetes:*', 'saltutil.refresh_pillar', [], null, true)
+ salt.enforceState(venvPepper, 'I@kubernetes:*', ['salt.minion.cert'], true)
orchestrate.installKubernetesInfra(venvPepper)
}
diff --git a/docker-build-image-pipeline.groovy b/docker-build-image-pipeline.groovy
index b94928e..1fbd9f0 100644
--- a/docker-build-image-pipeline.groovy
+++ b/docker-build-image-pipeline.groovy
@@ -46,6 +46,8 @@
}
if (!imageTagsList.contains("latest")) {
imageTagsList << "latest"
+ //workaround for all of our docker images
+ imageTagsList << "nightly"
}
} catch (Exception e) {
common.infoMsg("Impossible to find any tag")
@@ -98,4 +100,3 @@
}
}
}
-
diff --git a/generate-cookiecutter-products.groovy b/generate-cookiecutter-products.groovy
index 553029e..aaba422 100644
--- a/generate-cookiecutter-products.groovy
+++ b/generate-cookiecutter-products.groovy
@@ -25,7 +25,7 @@
// options from CC contexts
// currently, just mix them together in one set
def testCfg01ExtraFormulas = 'glusterfs jenkins logrotate maas ntp rsyslog fluentd telegraf prometheus ' +
- 'grafana backupninja auditd'
+ 'grafana backupninja'
timeout(time: 2, unit: 'HOURS') {
@@ -58,6 +58,10 @@
user = env.BUILD_USER_ID
}
+ if (mcpVersion != '2018.4.0') {
+ testCfg01ExtraFormulas += ' auditd'
+ }
+
currentBuild.description = clusterName
print("Using context:\n" + COOKIECUTTER_TEMPLATE_CONTEXT)
@@ -243,6 +247,9 @@
smc['SALT_MASTER_DEPLOY_IP'] = templateContext['default_context']['salt_master_management_address']
smc['DEPLOY_NETWORK_GW'] = templateContext['default_context']['deploy_network_gateway']
smc['DEPLOY_NETWORK_NETMASK'] = templateContext['default_context']['deploy_network_netmask']
+ if (templateContext['default_context'].get('deploy_network_mtu')) {
+ smc['DEPLOY_NETWORK_MTU'] = templateContext['default_context']['deploy_network_mtu']
+ }
smc['DNS_SERVERS'] = templateContext['default_context']['dns_server01']
smc['MCP_VERSION'] = "${mcpVersion}"
if (templateContext['default_context']['local_repositories'] == 'True') {
diff --git a/git-merge-branches-pipeline.groovy b/git-merge-branches-pipeline.groovy
deleted file mode 100644
index d1c3ee2..0000000
--- a/git-merge-branches-pipeline.groovy
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Git merge branches pipeline
- * REPO_URL - Repository URL
- * TARGET_BRANCH - Target branch for merging
- * SOURCE_BRANCH - The branch will be merged to TARGET_BRANCH
- * CREDENTIALS_ID - Used credentails ID
- *
-**/
-
-def common = new com.mirantis.mk.Common()
-def git = new com.mirantis.mk.Git()
-timeout(time: 12, unit: 'HOURS') {
- node {
- try{
- stage("checkout") {
- git.checkoutGitRepository('repo', REPO_URL, TARGET_BRANCH, IMAGE_CREDENTIALS_ID)
- }
- stage("merge") {
- dir("repo"){
- sh("git fetch origin/${SOURCE_BRANCH} && git merge ${SOURCE_BRANCH} && git push origin ${TARGET_BRANCH}")
- }
- }
- } catch (Throwable e) {
- // If there was an error or exception thrown, the build failed
- currentBuild.result = "FAILURE"
- currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
- throw e
- }
- }
-}
-
diff --git a/promote-mirror-ubuntu-related.groovy b/promote-mirror-ubuntu-related.groovy
new file mode 100644
index 0000000..cd663cf
--- /dev/null
+++ b/promote-mirror-ubuntu-related.groovy
@@ -0,0 +1,27 @@
+/**
+ *
+ * Promote Ubuntu-related mirrors in same time.
+ * Promote ubuntu|maas|maas-ephermal should be always together.
+ *
+ * Expected parameters:
+ * MCP_VERSION
+ * SNAPSHOT_NAME - Snapshot name to set
+ * SNAPSHOT_ID - Set name for specified snapshot ID
+ */
+
+common = new com.mirantis.mk.Common()
+
+timeout(time: 1, unit: 'HOURS') {
+ node() {
+ stage("Promote") {
+ catchError {
+ for (String jobname : ['mirror-snapshot-name-maas-xenial', 'mirror-snapshot-name-ubuntu', 'mirror-snapshot-name-maas-ephemeral-v3']) {
+ build job: jobname, parameters: [
+ [$class: 'StringParameterValue', name: 'SNAPSHOT_NAME', value: SNAPSHOT_NAME],
+ [$class: 'StringParameterValue', name: 'SNAPSHOT_ID', value: SNAPSHOT_ID],
+ ]
+ }
+ }
+ }
+ }
+}
diff --git a/promote-vcp-images.groovy b/promote-vcp-images.groovy
new file mode 100644
index 0000000..181eafa
--- /dev/null
+++ b/promote-vcp-images.groovy
@@ -0,0 +1,134 @@
+/**
+ *
+ * Promote VCP(qcow2) images
+ *
+ * Expected parameters:
+ * VCP_IMAGE_LIST - multiline with qcow2 file names
+ * TAG - Target tag of image.Possible are: "nightly|testing|proposed|201X.X.X"
+ * SOURCE_TAG - Initial tag to be tagged with TAG. Will replace SUBS_SOURCE_VCP_IMAGE_TAG in VCP_IMAGE_LIST
+ * UPLOAD_URL - WebDav url with creds, from\to download images
+ *
+ */
+
+def common = new com.mirantis.mk.Common()
+def jenkinsUtils = new com.mirantis.mk.JenkinsUtils()
+
+// Better to chose slave with ssd and fast network to webDav host
+slaveNode = env.SLAVE_NODE ?: 'jsl23.mcp.mirantis.net'
+def job_env = env.getEnvironment().findAll { k, v -> v }
+def verify = job_env.VERIFY_DOWNLOAD ?: true
+
+
+timeout(time: 6, unit: 'HOURS') {
+ node(slaveNode) {
+
+ String description = ''
+ insufficientPermissions = false
+ try {
+ // Pre-run verify
+ // promote is restricted to users in aptly-promote-users LDAP group
+ if (!jenkinsUtils.currentUserInGroups(["mcp-cicd-admins", "aptly-promote-users"])) {
+ insufficientPermissions = true
+ error(String.format("You don't have permissions to make promote from source:%s to target:%s! Only CI/CD and QA team can perform promote.", job_env.SOURCE_TAG, job_env.TAG))
+ }
+ // Check for required opts
+ for (opt in ['UPLOAD_URL', 'SOURCE_TAG', 'TAG', 'VCP_IMAGE_LIST']) {
+ if (!job_env.get(opt, null)) {
+ error("Invalid input params, at least ${opt} param missing")
+ }
+ }
+ def images = job_env.VCP_IMAGE_LIST.trim().tokenize()
+ for (image in images) {
+ if (image.startsWith('#')) {
+ common.warningMsg("Skipping image ${image}")
+ continue
+ }
+ common.infoMsg("Replacing SUBS_SOURCE_VCP_IMAGE_TAG => ${job_env.SOURCE_TAG}")
+ sourceImage = image.replace('SUBS_SOURCE_VCP_IMAGE_TAG', job_env.SOURCE_TAG)
+ targetImage = image.replace('SUBS_SOURCE_VCP_IMAGE_TAG', job_env.TAG)
+
+ // TODO: normalize url's?
+ sourceImageUrl = job_env.UPLOAD_URL + '/' + sourceImage
+ sourceImageMd5Url = job_env.UPLOAD_URL + '/' + sourceImage + '.md5'
+ targetImageUrl = job_env.UPLOAD_URL + '/' + targetImage
+ targetImageMd5Url = job_env.UPLOAD_URL + '/' + targetImage + '.md5'
+
+ common.infoMsg("Attempt to download: ${sourceImage} => ${targetImage}")
+ common.retry(3, 5) {
+ sh(script: "wget --progress=dot:giga --auth-no-challenge -O ${targetImage} ${sourceImageUrl}")
+ }
+ def targetImageMd5 = common.cutOrDie("md5sum ${targetImage} | tee ${targetImage}.md5", 0)
+ if (verify.toBoolean()) {
+ common.infoMsg("Checking md5's ")
+ sh(script: "wget --progress=dot:giga --auth-no-challenge -O ${targetImage}_source_md5 ${sourceImageMd5Url}")
+ def sourceImageMd5 = readFile(file: "${targetImage}_source_md5").tokenize(' ')[0]
+ // Compare downloaded and remote files
+ if (sourceImageMd5 != targetImageMd5) {
+ error("Image ${targetImage} md5sum verify failed!")
+ } else {
+ common.infoMsg("sourceImageMd5: ${sourceImageMd5} == target to upload ImageMd5: ${targetImageMd5}")
+ }
+ // Compare downloaded file, and remote file-to-be-promoted. If same - no sense to promote same file
+ remoteImageMd5Status = sh(script: "wget --progress=dot:giga --auth-no-challenge -O ${targetImage}_expected_target_md5 ${targetImageMd5Url}", returnStatus: true)
+ if (remoteImageMd5Status == '8') {
+ common.infoMsg("target to upload ImageMd5 file not even exist.Continue..")
+ } else {
+ def remoteImageMd5 = readFile(file: "${targetImage}_expected_target_md5").tokenize(' ')[0]
+ if (sourceImageMd5 == remoteImageMd5) {
+ common.infoMsg("sourceImageMd5: ${sourceImageMd5} and target to upload ImageMd5: ${targetImageMd5} are same")
+ common.warningMsg("Skipping to upload: ${targetImage} since it already same")
+ description += "Skipping to upload: ${targetImage} since it already same\n"
+ continue
+ }
+ }
+ common.infoMsg("Check, that we are not going to overwrite released file..")
+ if (['proposed', 'testing', 'nightly'].contains(job_env.TAG)) {
+ common.infoMsg("Uploading to ${job_env.TAG} looks safe..")
+ } else if (['stable'].contains(job_env.TAG)) {
+ common.warningMsg("Uploading to ${job_env.TAG} not safe! But still possible")
+ } else {
+ common.warningMsg("Looks like uploading to new release: ${job_env.TAG}. Checking, that it is not exist yet..")
+ remoteImageStatus = ''
+ remoteImageStatus = sh(script: "wget --auth-no-challenge --spider ${targetImageUrl} 2>/dev/null", returnStatus: true)
+ // wget return code 8 ,if file not exist
+ if (remoteImageStatus != '8') {
+ error("Attempt to overwrite existing release! Target: ${targetImage} already exist!")
+ }
+ }
+ }
+
+ common.infoMsg("Attempt to UPLOAD: ${targetImage} => ${targetImageUrl}")
+ //
+ def uploadImageStatus = ''
+ def uploadImageMd5Status = ''
+ common.retry(3, 5) {
+ uploadImageStatus = sh(script: "curl -f -T ${targetImage} ${job_env.UPLOAD_URL}", returnStatus: true)
+ if (uploadImageStatus != 0) {
+ error("Uploading file: ${targetImage} failed!")
+ }
+ }
+ uploadImageMd5Status = sh(script: "curl -f -T ${targetImage}.md5 ${job_env.UPLOAD_URL}", returnStatus: true)
+ if (uploadImageMd5Status != 0) {
+ error("Uploading file: ${targetImage}.md5 failed!")
+ }
+
+ description += "<a href='http://apt.mirantis.net:8085/images/${targetImage}'>${job_env.SOURCE_TAG}=>${targetImage}</a>"
+ }
+ currentBuild.description = description
+ } catch (Throwable e) {
+ // If there was an error or exception thrown, the build failed
+ if (insufficientPermissions) {
+ currentBuild.result = "ABORTED"
+ currentBuild.description = "Promote aborted due to insufficient permissions"
+ } else {
+ currentBuild.result = "FAILURE"
+ currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
+ }
+ throw e
+ }
+ finally {
+ common.infoMsg("Cleanup..")
+ sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
+ }
+ }
+}
diff --git a/release-mcp-version.groovy b/release-mcp-version.groovy
index b1b3d77..1972465 100644
--- a/release-mcp-version.groovy
+++ b/release-mcp-version.groovy
@@ -16,92 +16,104 @@
* DOCKER_IMAGES
* GIT_CREDENTIALS
* GIT_REPO_LIST
+ * VCP_IMAGE_LIST - list of images
+ * RELEASE_VCP_IMAGES - boolean
* EMAIL_NOTIFY
* NOTIFY_RECIPIENTS
* NOTIFY_TEXT
*
-*/
+ */
common = new com.mirantis.mk.Common()
git = new com.mirantis.mk.Git()
-def triggerAptlyPromoteJob(aptlyUrl, components, diffOnly, dumpPublish, packages, recreate, source, storages, target){
- build job: "aptly-promote-all-testing-stable", parameters: [
- [$class: 'StringParameterValue', name: 'APTLY_URL', value: aptlyUrl],
- [$class: 'StringParameterValue', name: 'COMPONENTS', value: components],
- [$class: 'BooleanParameterValue', name: 'DIFF_ONLY', value: diffOnly],
- [$class: 'BooleanParameterValue', name: 'DUMP_PUBLISH', value: dumpPublish],
- [$class: 'StringParameterValue', name: 'PACKAGES', value: packages],
- [$class: 'BooleanParameterValue', name: 'RECREATE', value: recreate],
- [$class: 'StringParameterValue', name: 'SOURCE', value: source],
- [$class: 'StringParameterValue', name: 'STORAGES', value: storages],
- [$class: 'StringParameterValue', name: 'TARGET', value: target],
- ]
+def triggerAptlyPromoteJob(aptlyUrl, components, diffOnly, dumpPublish, packages, recreate, source, storages, target) {
+ build job: "aptly-promote-all-testing-stable", parameters: [
+ [$class: 'StringParameterValue', name: 'APTLY_URL', value: aptlyUrl],
+ [$class: 'StringParameterValue', name: 'COMPONENTS', value: components],
+ [$class: 'BooleanParameterValue', name: 'DIFF_ONLY', value: diffOnly],
+ [$class: 'BooleanParameterValue', name: 'DUMP_PUBLISH', value: dumpPublish],
+ [$class: 'StringParameterValue', name: 'PACKAGES', value: packages],
+ [$class: 'BooleanParameterValue', name: 'RECREATE', value: recreate],
+ [$class: 'StringParameterValue', name: 'SOURCE', value: source],
+ [$class: 'StringParameterValue', name: 'STORAGES', value: storages],
+ [$class: 'StringParameterValue', name: 'TARGET', value: target],
+ ]
}
def triggerDockerMirrorJob(dockerCredentials, dockerRegistryUrl, targetTag, imageList, sourceImageTag) {
- build job: "docker-images-mirror", parameters: [
- [$class: 'StringParameterValue', name: 'TARGET_REGISTRY_CREDENTIALS_ID', value: dockerCredentials],
- [$class: 'StringParameterValue', name: 'REGISTRY_URL', value: dockerRegistryUrl],
- [$class: 'StringParameterValue', name: 'IMAGE_TAG', value: targetTag],
- [$class: 'StringParameterValue', name: 'IMAGE_LIST', value: imageList],
- [$class: 'StringParameterValue', name: 'SOURCE_IMAGE_TAG', value: sourceImageTag],
- ]
+ build job: "docker-images-mirror", parameters: [
+ [$class: 'StringParameterValue', name: 'TARGET_REGISTRY_CREDENTIALS_ID', value: dockerCredentials],
+ [$class: 'StringParameterValue', name: 'REGISTRY_URL', value: dockerRegistryUrl],
+ [$class: 'StringParameterValue', name: 'IMAGE_TAG', value: targetTag],
+ [$class: 'StringParameterValue', name: 'IMAGE_LIST', value: imageList],
+ [$class: 'StringParameterValue', name: 'SOURCE_IMAGE_TAG', value: sourceImageTag],
+ ]
}
def triggerMirrorRepoJob(snapshotId, snapshotName) {
- build job: "mirror-snapshot-name-all", parameters: [
- [$class: 'StringParameterValue', name: 'SNAPSHOT_NAME', value: snapshotName],
- [$class: 'StringParameterValue', name: 'SNAPSHOT_ID', value: snapshotId],
- ]
+ build job: "mirror-snapshot-name-all", parameters: [
+ [$class: 'StringParameterValue', name: 'SNAPSHOT_NAME', value: snapshotName],
+ [$class: 'StringParameterValue', name: 'SNAPSHOT_ID', value: snapshotId],
+ ]
}
def triggerGitTagJob(gitRepoList, gitCredentials, tag, sourceTag) {
- build job: "tag-git-repos-stable", parameters: [
- [$class: 'StringParameterValue', name: 'GIT_REPO_LIST', value: gitRepoList],
- [$class: 'StringParameterValue', name: 'GIT_CREDENTIALS', value: gitCredentials],
- [$class: 'StringParameterValue', name: 'TAG', value: tag],
- [$class: 'StringParameterValue', name: 'SOURCE_TAG', value: sourceTag],
- ]
+ build job: "tag-git-repos-all", parameters: [
+ [$class: 'StringParameterValue', name: 'GIT_REPO_LIST', value: gitRepoList],
+ [$class: 'StringParameterValue', name: 'GIT_CREDENTIALS', value: gitCredentials],
+ [$class: 'StringParameterValue', name: 'TAG', value: tag],
+ [$class: 'StringParameterValue', name: 'SOURCE_TAG', value: sourceTag],
+ ]
+}
+
+def triggerPromoteVCPJob(VcpImageList, tag, sourceTag) {
+ build job: "promote-vcp-images-all", parameters: [
+ [$class: 'StringParameterValue', name: 'VCP_IMAGE_LIST', value: VcpImageList],
+ [$class: 'StringParameterValue', name: 'TAG', value: tag],
+ [$class: 'StringParameterValue', name: 'SOURCE_TAG', value: sourceTag]
+ ]
}
timeout(time: 12, unit: 'HOURS') {
- node() {
- try {
- stage("Promote"){
- if(RELEASE_APTLY.toBoolean())
- {
- common.infoMsg("Promoting Aptly")
- triggerAptlyPromoteJob(APTLY_URL, 'all', false, true, 'all', false, "(.*)/${SOURCE_REVISION}", APTLY_STORAGES, "{0}/${TARGET_REVISION}")
- }
+ node() {
+ try {
+ stage("Promote") {
+ if (RELEASE_APTLY.toBoolean()) {
+ common.infoMsg("Promoting Aptly")
+ triggerAptlyPromoteJob(APTLY_URL, 'all', false, true, 'all', false, "(.*)/${SOURCE_REVISION}", APTLY_STORAGES, "{0}/${TARGET_REVISION}")
+ }
- if(RELEASE_DEB_MIRRORS.toBoolean()){
- common.infoMsg("Promoting Debmirrors")
- triggerMirrorRepoJob(SOURCE_REVISION, TARGET_REVISION)
- }
+ if (RELEASE_DEB_MIRRORS.toBoolean()) {
+ common.infoMsg("Promoting Debmirrors")
+ triggerMirrorRepoJob(SOURCE_REVISION, TARGET_REVISION)
+ }
- if(RELEASE_DOCKER.toBoolean())
- {
- common.infoMsg("Promoting Docker images")
- triggerDockerMirrorJob(DOCKER_CREDENTIALS, DOCKER_URL, TARGET_REVISION, DOCKER_IMAGES, SOURCE_REVISION)
- }
+ if (RELEASE_DOCKER.toBoolean()) {
+ common.infoMsg("Promoting Docker images")
+ triggerDockerMirrorJob(DOCKER_CREDENTIALS, DOCKER_URL, TARGET_REVISION, DOCKER_IMAGES, SOURCE_REVISION)
+ }
- if(RELEASE_GIT.toBoolean())
- {
- common.infoMsg("Promoting Git repositories")
- triggerGitTagJob(GIT_REPO_LIST, GIT_CREDENTIALS, TARGET_REVISION, SOURCE_REVISION)
+ if (RELEASE_GIT.toBoolean()) {
+ common.infoMsg("Promoting Git repositories")
+ triggerGitTagJob(GIT_REPO_LIST, GIT_CREDENTIALS, TARGET_REVISION, SOURCE_REVISION)
- }
- if (EMAIL_NOTIFY.toBoolean()) {
- emailext(to: NOTIFY_RECIPIENTS,
- body: NOTIFY_TEXT,
- subject: "MCP Promotion has been done")
- }
- }
- } catch (Throwable e) {
+ }
+ if (RELEASE_VCP_IMAGES.toBoolean()) {
+ common.infoMsg("Promoting VCP images")
+ triggerPromoteVCPJob(VCP_IMAGE_LIST, TARGET_REVISION, SOURCE_REVISION)
+
+ }
+ if (EMAIL_NOTIFY.toBoolean()) {
+ emailext(to: NOTIFY_RECIPIENTS,
+ body: NOTIFY_TEXT,
+ subject: "MCP Promotion has been done")
+ }
+ }
+ } catch (Throwable e) {
// If there was an error or exception thrown, the build failed
currentBuild.result = "FAILURE"
throw e
- }
}
- }
+ }
+}
diff --git a/rollout-config-change.groovy b/rollout-config-change.groovy
deleted file mode 100644
index dcb9034..0000000
--- a/rollout-config-change.groovy
+++ /dev/null
@@ -1,96 +0,0 @@
-
-/**
- * Rollout changes to the node(s) configuration
- *
- * Expected parameters:
- * TST_SALT_MASTER_CREDENTIALS Credentials to the Salt API (QA environment).
- * TST_SALT_MASTER_URL Full Salt API address [https://10.10.10.1:8000].
- * PRD_SALT_MASTER_CREDENTIALS Credentials to the Salt API (PRD environment).
- * PRD_SALT_MASTER_URL Full Salt API address [https://10.10.10.1:8000].
- * Model parameters:
- * MODEL_REPO_CREDENTIALS Credentials to the Model.
- * MODEL_REPO_URL Full model repo address.
- * MODEL_REPO_SOURCE_BRANCH Source branch to merge from.
- * MODEL_REPO_TARGET_BRANCH Target branch to merge fo.
- * Change settings:
- * TARGET_SERVERS Salt compound target to match nodes to be updated [*, G@osfamily:debian].
- * TARGET_STATES States to be applied, empty string means running highstate [linux, linux,openssh, salt.minion.grains].
- * TARGET_SUBSET_TEST Number of nodes to test config changes, empty string means all targetted nodes.
- * TARGET_SUBSET_LIVE Number of selected noded to live apply selected config changes.
- * TARGET_BATCH_LIVE Batch size for the complete live config changes on all nodes, empty string means apply to all targetted nodes.
- * Test settings:
- * TEST_SERVICE Comma separated list of services to test
- * TEST_K8S_API_SERVER Kubernetes API address
- * TEST_K8S_CONFORMANCE_IMAGE Path to docker image with conformance e2e tests
- * TEST_DOCKER_INSTALL Install docker on the target if true
- * TEST_TEMPEST_IMAGE Tempest image link
- * TEST_TEMPEST_PATTERN If not false, run tests matched to pattern only
- * TEST_TEMPEST_TARGET Salt target for tempest node
- *
-**/
-
-def common = new com.mirantis.mk.Common()
-def salt = new com.mirantis.mk.Salt()
-timeout(time: 12, unit: 'HOURS') {
- node() {
- try {
-
- stage('Run config change on test env') {
- build job: "deploy-update-service-config", parameters: [
- [$class: 'StringParameterValue', name: 'SALT_MASTER_URL', value: TST_SALT_MASTER_URL],
- [$class: 'StringParameterValue', name: 'SALT_MASTER_CREDENTIALS', value: TST_SALT_MASTER_CREDENTIALS],
- [$class: 'StringParameterValue', name: 'TARGET_BATCH_LIVE', value: TARGET_BATCH_LIVE],
- [$class: 'StringParameterValue', name: 'TARGET_SERVERS', value: TARGET_SERVERS],
- [$class: 'StringParameterValue', name: 'TARGET_STATES', value: TARGET_STATES],
- [$class: 'StringParameterValue', name: 'TARGET_SUBSET_LIVE', value: TARGET_SUBSET_LIVE],
- [$class: 'StringParameterValue', name: 'TARGET_SUBSET_TEST', value: TARGET_SUBSET_TEST],
- ]
- }
-
- stage('Test config change on test env') {
- build job: "deploy-test-service", parameters: [
- [$class: 'StringParameterValue', name: 'SALT_MASTER_URL', value: TST_SALT_MASTER_URL],
- [$class: 'StringParameterValue', name: 'SALT_MASTER_CREDENTIALS', value: TST_SALT_MASTER_CREDENTIALS],
- [$class: 'StringParameterValue', name: 'TEST_SERVICE', value: TEST_SERVICE],
- [$class: 'StringParameterValue', name: 'TEST_K8S_API_SERVER', value: TEST_K8S_API_SERVER],
- [$class: 'StringParameterValue', name: 'TEST_K8S_CONFORMANCE_IMAGE', value: TEST_K8S_CONFORMANCE_IMAGE],
- ]
- }
-
- stage('Promote config change in repo') {
- build job: "git-merge-branches", parameters: [
- [$class: 'StringParameterValue', name: 'REPO_URL', value: MODEL_REPO_URL],
- [$class: 'StringParameterValue', name: 'CREDENTIALS_ID', value: MODEL_REPO_CREDENTIALS],
- [$class: 'StringParameterValue', name: 'SOURCE_BRANCH', value: MODEL_REPO_SOURCE_BRANCH],
- [$class: 'StringParameterValue', name: 'TARGET_BRANCH', value: MODEL_REPO_TARGET_BRANCH],
- ]
- }
-
- stage('Run config change on production env') {
- build job: "deploy-update-service-config", parameters: [
- [$class: 'StringParameterValue', name: 'SALT_MASTER_URL', value: PRD_SALT_MASTER_URL],
- [$class: 'StringParameterValue', name: 'SALT_MASTER_CREDENTIALS', value: PRD_SALT_MASTER_CREDENTIALS],
- [$class: 'StringParameterValue', name: 'TARGET_BATCH_LIVE', value: TARGET_BATCH_LIVE],
- [$class: 'StringParameterValue', name: 'TARGET_SERVERS', value: TARGET_SERVERS],
- [$class: 'StringParameterValue', name: 'TARGET_STATES', value: TARGET_STATES],
- [$class: 'StringParameterValue', name: 'TARGET_SUBSET_LIVE', value: TARGET_SUBSET_LIVE],
- [$class: 'StringParameterValue', name: 'TARGET_SUBSET_TEST', value: TARGET_SUBSET_TEST],
- ]
- }
-
- stage('Test config change on prod env') {
- def result = build job: "deploy-test-service", parameters: [
- [$class: 'StringParameterValue', name: 'SALT_MASTER_URL', value: PRD_SALT_MASTER_URL],
- [$class: 'StringParameterValue', name: 'SALT_MASTER_CREDENTIALS', value: PRD_SALT_MASTER_CREDENTIALS],
- [$class: 'StringParameterValue', name: 'TEST_SERVICE', value: TEST_SERVICE],
- [$class: 'StringParameterValue', name: 'TEST_K8S_API_SERVER', value: TEST_K8S_API_SERVER],
- [$class: 'StringParameterValue', name: 'TEST_K8S_CONFORMANCE_IMAGE', value: TEST_K8S_CONFORMANCE_IMAGE],
- ]
- }
-
- } catch (Throwable e) {
- currentBuild.result = 'FAILURE'
- throw e
- }
- }
-}
diff --git a/test-cookiecutter-reclass-chunk.groovy b/test-cookiecutter-reclass-chunk.groovy
index 9e34cea..58355c1 100644
--- a/test-cookiecutter-reclass-chunk.groovy
+++ b/test-cookiecutter-reclass-chunk.groovy
@@ -12,16 +12,18 @@
slaveNode = env.SLAVE_NODE ?: 'python&&docker'
timeout(time: 1, unit: 'HOURS') {
- node(slaveNode) {
- try {
- extraVars = readYaml text: EXTRA_VARIABLES_YAML
- currentBuild.description = extraVars.modelFile
- saltModelTesting.testCCModel(extraVars)
- } catch (Throwable e) {
- // If there was an error or exception thrown, the build failed
- currentBuild.result = "FAILURE"
- currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
- throw e
+ node(slaveNode) {
+ stage("RunTest") {
+ try {
+ extraVars = readYaml text: EXTRA_VARIABLES_YAML
+ currentBuild.description = extraVars.modelFile
+ saltModelTesting.testCCModel(extraVars)
+ } catch (Throwable e) {
+ // If there was an error or exception thrown, the build failed
+ currentBuild.result = "FAILURE"
+ currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
+ throw e
+ }
+ }
}
- }
}
diff --git a/test-cookiecutter-reclass.groovy b/test-cookiecutter-reclass.groovy
index e6d3070..daf9efe 100644
--- a/test-cookiecutter-reclass.groovy
+++ b/test-cookiecutter-reclass.groovy
@@ -1,16 +1,58 @@
+/*
+Able to be triggered from Gerrit if :
+Variators:
+Modes:
+1) manual run via job-build , possible to pass refspec
+ TODO: currently impossible to use custom COOKIECUTTER_TEMPLATE_URL| RECLASS_SYSTEM_URL Gerrit-one always used.
+ - for CC
+ - Reclass
+
+2) gerrit trigger
+ Automatically switches if GERRIT_PROJECT variable detected
+ Always test GERRIT_REFSPEC VS GERRIT_BRANCH-master version of opposite project
+ */
+
common = new com.mirantis.mk.Common()
gerrit = new com.mirantis.mk.Gerrit()
git = new com.mirantis.mk.Git()
python = new com.mirantis.mk.Python()
-gerritRef = env.GERRIT_REFSPEC ?: null
-slaveNode = (env.SLAVE_NODE ?: 'python&&docker')
-def alreadyMerged = false
+slaveNode = env.SLAVE_NODE ?: 'python&&docker'
-def reclassVersion = 'v1.5.4'
-if (common.validInputParam('RECLASS_VERSION')) {
- reclassVersion = RECLASS_VERSION
+// Global var's
+alreadyMerged = false
+gerritConData = [credentialsId : env.CREDENTIALS_ID,
+ gerritName : env.GERRIT_NAME ?: 'mcp-jenkins',
+ gerritHost : env.GERRIT_HOST ?: 'gerrit.mcp.mirantis.net',
+ gerritScheme : env.GERRIT_SCHEME ?: 'ssh',
+ gerritPort : env.GERRIT_PORT ?: '29418',
+ gerritRefSpec : null,
+ gerritProject : null,
+ withWipeOut : true,
+ GERRIT_CHANGE_NUMBER: null]
+//
+//ccTemplatesRepo = env.COOKIECUTTER_TEMPLATE_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.net:29418/mk/cookiecutter-templates'
+gerritDataCC = [:]
+gerritDataCC << gerritConData
+gerritDataCC['gerritBranch'] = env.COOKIECUTTER_TEMPLATE_BRANCH ?: 'master'
+gerritDataCC['gerritProject'] = 'mk/cookiecutter-templates'
+//
+//reclassSystemRepo = env.RECLASS_SYSTEM_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.net:29418/salt-models/reclass-system'
+gerritDataRS = [:]
+gerritDataRS << gerritConData
+gerritDataRS['gerritBranch'] = env.RECLASS_MODEL_BRANCH ?: 'master'
+gerritDataRS['gerritProject'] = 'salt-models/reclass-system'
+
+// version of debRepos, aka formulas\reclass
+testDistribRevision = env.DISTRIB_REVISION ?: 'nightly'
+reclassVersion = 'v1.5.4'
+if (env.RECLASS_VERSION) {
+ reclassVersion = env.RECLASS_VERSION
}
+// Name of sub-test chunk job
+chunkJobName = "test-mk-cookiecutter-templates-chunk"
+testModelBuildsData = [:]
+
def generateSaltMaster(modEnv, clusterDomain, clusterName) {
def nodeFile = "${modEnv}/nodes/cfg01.${clusterDomain}.yml"
@@ -33,7 +75,7 @@
/**
*
* @param contextFile - path to `contexts/XXX.yaml file`
- * @param virtualenv - pyvenv with CC and dep's
+ * @param virtualenv - pyvenv with CC and dep's
* @param templateEnvDir - root of CookieCutter
* @return
*/
@@ -89,28 +131,41 @@
}
}
+def getAndUnpackNodesInfoArtifact(jobName, copyTo, build) {
+ return {
+ dir(copyTo) {
+ copyArtifacts(projectName: jobName, selector: specific(build), filter: "nodesinfo.tar.gz")
+ sh "tar -xvf nodesinfo.tar.gz"
+ sh "rm -v nodesinfo.tar.gz"
+ }
+ }
+}
-def testModel(modelFile, reclassVersion = 'v1.5.4') {
+def testModel(modelFile, reclassArtifactName, artifactCopyPath) {
// modelFile - `modelfiname` from model/modelfiname/modelfiname.yaml
//* Grub all models and send it to check in paralell - by one in thread.
-
- _values_string = """
+ def _uuid = "${env.JOB_NAME.toLowerCase()}_${env.BUILD_TAG.toLowerCase()}_${modelFile.toLowerCase()}_" + UUID.randomUUID().toString().take(8)
+ def _values_string = """
---
- MODELS_TARGZ: "${env.BUILD_URL}/artifact/patched_reclass.tar.gz"
- DockerCName: "${env.JOB_NAME.toLowerCase()}_${env.BUILD_TAG.toLowerCase()}_${modelFile.toLowerCase()}"
+ MODELS_TARGZ: "${env.BUILD_URL}/artifact/${reclassArtifactName}"
+ DockerCName: "${_uuid}"
testReclassEnv: "model/${modelFile}/"
modelFile: "contexts/${modelFile}.yml"
- DISTRIB_REVISION: "${DISTRIB_REVISION}"
+ DISTRIB_REVISION: "${testDistribRevision}"
EXTRA_FORMULAS: "${env.EXTRA_FORMULAS}"
reclassVersion: "${reclassVersion}"
"""
- build job: "test-mk-cookiecutter-templates-chunk", parameters: [
+ def chunkJob = build job: chunkJobName, parameters: [
[$class: 'StringParameterValue', name: 'EXTRA_VARIABLES_YAML',
value : _values_string.stripIndent()],
]
+ // Put sub-job info into global map.
+ testModelBuildsData.put(_uuid, ['jobname' : chunkJob.fullProjectName,
+ 'copyToDir': "${artifactCopyPath}/${modelFile}",
+ 'buildId' : "${chunkJob.number}"])
}
-def StepTestModel(basename) {
+def StepTestModel(basename, reclassArtifactName, artifactCopyPath) {
// We need to wrap what we return in a Groovy closure, or else it's invoked
// when this method is called, not when we pass it to parallel.
// To do this, you need to wrap the code below in { }, and either return
@@ -118,32 +173,38 @@
// return node object
return {
node(slaveNode) {
- testModel(basename)
+ testModel(basename, reclassArtifactName, artifactCopyPath)
}
}
}
-def StepPrepareCCenv(refchange, templateEnvFolder) {
+def StepPrepareGit(templateEnvFolder, gerrit_data) {
// return git clone object
return {
+ def checkouted = false
+ common.infoMsg("StepPrepareGit: ${gerrit_data}")
// fetch needed sources
dir(templateEnvFolder) {
- if (refchange) {
- def gerritChange = gerrit.getGerritChange(GERRIT_NAME, GERRIT_HOST, GERRIT_CHANGE_NUMBER, CREDENTIALS_ID)
+ if (gerrit_data['gerritRefSpec']) {
+ // Those part might be not work,in case manual var's pass
+ def gerritChange = gerrit.getGerritChange(gerrit_data['gerritName'], gerrit_data['gerritHost'],
+ gerrit_data['GERRIT_CHANGE_NUMBER'], gerrit_data['credentialsId'])
merged = gerritChange.status == "MERGED"
if (!merged) {
- checkouted = gerrit.gerritPatchsetCheckout([
- credentialsId: CREDENTIALS_ID
- ])
+ checkouted = gerrit.gerritPatchsetCheckout(gerrit_data)
} else {
- // update global variable for success return from pipeline
- //alreadyMerged = true
- common.successMsg("Change ${GERRIT_CHANGE_NUMBER} is already merged, no need to gate them")
- currentBuild.result = 'ABORTED'
- throw new hudson.AbortException('change already merged')
+ // update global variable for pretty return from pipeline
+ alreadyMerged = true
+ common.successMsg("Change ${gerrit_data['GERRIT_CHANGE_NUMBER']} is already merged, no need to gate them")
+ error('change already merged')
}
} else {
- git.checkoutGitRepository(templateEnvFolder, COOKIECUTTER_TEMPLATE_URL, COOKIECUTTER_TEMPLATE_BRANCH, CREDENTIALS_ID)
+ // Get clean HEAD
+ gerrit_data['useGerritTriggerBuildChooser'] = false
+ checkouted = gerrit.gerritPatchsetCheckout(gerrit_data)
+ if (!checkouted) {
+ error("Failed to get repo:${gerrit_data}")
+ }
}
}
}
@@ -157,22 +218,110 @@
}
}
+def globalVariatorsUpdate() {
+ // Simple function, to check and define branch-around variables
+ // In general, simply make transition updates for non-master branch
+ // based on magic logic
+ def message = ''
+ if (env.GERRIT_PROJECT) {
+ if (!['nightly', 'testing', 'stable', 'proposed', 'master'].contains(env.GERRIT_BRANCH)) {
+ gerritDataCC['gerritBranch'] = env.GERRIT_BRANCH
+ gerritDataRS['gerritBranch'] = env.GERRIT_BRANCH
+ // 'binary' branch logic w\o 'release/' prefix
+ testDistribRevision = env.GERRIT_BRANCH.split('/')[-1]
+ // Check if we are going to test bleeding-edge release, which doesn't have binary release yet
+ if (!common.checkRemoteBinary([apt_mk_version: testDistribRevision]).linux_system_repo_url) {
+ common.errorMsg("Binary release: ${testDistribRevision} not exist. Fallback to 'proposed'! ")
+ testDistribRevision = 'proposed'
+ }
+ }
+ // Identify, who triggered. To whom we should pass refspec
+ if (env.GERRIT_PROJECT == 'salt-models/reclass-system') {
+ gerritDataRS['gerritRefSpec'] = env.GERRIT_REFSPEC
+ gerritDataRS['GERRIT_CHANGE_NUMBER'] = env.GERRIT_CHANGE_NUMBER
+ message = "<br/>RECLASS_SYSTEM_GIT_REF =>${gerritDataRS['gerritRefSpec']}"
+ } else if (env.GERRIT_PROJECT == 'mk/cookiecutter-templates') {
+ gerritDataCC['gerritRefSpec'] = env.GERRIT_REFSPEC
+ gerritDataCC['GERRIT_CHANGE_NUMBER'] = env.GERRIT_CHANGE_NUMBER
+ message = "<br/>COOKIECUTTER_TEMPLATE_REF =>${gerritDataCC['gerritRefSpec']}"
+ } else {
+ error("Unsuported gerrit-project triggered:${env.GERRIT_PROJECT}")
+ }
+
+ message = "<font color='red'>GerritTrigger detected! We are in auto-mode:</font>" +
+ "<br/>Test env variables has been changed:" +
+ "<br/>COOKIECUTTER_TEMPLATE_BRANCH => ${gerritDataCC['gerritBranch']}" +
+ "<br/>DISTRIB_REVISION =>${testDistribRevision}" +
+ "<br/>RECLASS_MODEL_BRANCH=> ${gerritDataRS['gerritBranch']}" + message
+ common.warningMsg(message)
+ currentBuild.description = currentBuild.description ? message + "<br/>" + currentBuild.description : message
+ } else {
+ // Check for passed variables:
+ if (env.RECLASS_SYSTEM_GIT_REF) {
+ gerritDataRS['gerritRefSpec'] = RECLASS_SYSTEM_GIT_REF
+ }
+ if (env.COOKIECUTTER_TEMPLATE_REF) {
+ gerritDataCC['gerritRefSpec'] = COOKIECUTTER_TEMPLATE_REF
+ }
+ message = "<font color='red'>Manual run detected!</font>" + "<br/>"
+ currentBuild.description = currentBuild.description ? message + "<br/>" + currentBuild.description : message
+ }
+}
+
+def linkReclassModels(contextList, envPath, archiveName) {
+ // to be able share reclass for all subenvs
+ // Also, makes artifact test more solid - use one reclass for all of sub-models.
+ // Archive Structure will be:
+ // tar.gz
+ // ├── contexts
+ // │ └── ceph.yml
+ // ├── global_reclass <<< reclass system
+ // ├── model
+ // │ └── ceph <<< from `context basename`
+ // │ ├── classes
+ // │ │ ├── cluster
+ // │ │ └── system -> ../../../global_reclass
+ // │ └── nodes
+ // │ └── cfg01.ceph-cluster-domain.local.yml
+ dir(envPath) {
+ for (String context : contextList) {
+ def basename = common.GetBaseName(context, '.yml')
+ dir("${envPath}/model/${basename}") {
+ sh(script: 'mkdir -p classes/; ln -sfv ../../../../global_reclass classes/system ')
+ }
+ }
+ // Save all models and all contexts. Warning! `h` flag must be used.
+ sh(script: "set -ex; tar -chzf ${archiveName} --exclude='*@tmp' model contexts", returnStatus: true)
+ archiveArtifacts artifacts: archiveName
+ // move for "Compare Pillars" stage
+ sh(script: "mv -v ${archiveName} ${env.WORKSPACE}")
+ }
+}
+
timeout(time: 1, unit: 'HOURS') {
node(slaveNode) {
+ globalVariatorsUpdate()
+ def gerritDataCCHEAD = [:]
def templateEnvHead = "${env.WORKSPACE}/EnvHead/"
def templateEnvPatched = "${env.WORKSPACE}/EnvPatched/"
def contextFileListHead = []
def contextFileListPatched = []
def vEnv = "${env.WORKSPACE}/venv"
-
+ def headReclassArtifactName = "head_reclass.tar.gz"
+ def patchedReclassArtifactName = "patched_reclass.tar.gz"
+ def reclassNodeInfoDir = "${env.WORKSPACE}/reclassNodeInfo_compare/"
+ def reclassInfoHeadPath = "${reclassNodeInfoDir}/old"
+ def reclassInfoPatchedPath = "${reclassNodeInfoDir}/new"
try {
sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
stage('Download and prepare CC env') {
// Prepare 2 env - for patchset, and for HEAD
- paralellEnvs = [:]
+ def paralellEnvs = [:]
paralellEnvs.failFast = true
- paralellEnvs['downloadEnvHead'] = StepPrepareCCenv('', templateEnvHead)
- paralellEnvs['downloadEnvPatched'] = StepPrepareCCenv(gerritRef, templateEnvPatched)
+ paralellEnvs['downloadEnvPatched'] = StepPrepareGit(templateEnvPatched, gerritDataCC)
+ gerritDataCCHEAD << gerritDataCC
+ gerritDataCCHEAD['gerritRefSpec'] = null; gerritDataCCHEAD['GERRIT_CHANGE_NUMBER'] = null
+ paralellEnvs['downloadEnvHead'] = StepPrepareGit(templateEnvHead, gerritDataCCHEAD)
parallel paralellEnvs
}
stage("Check workflow_definition") {
@@ -193,7 +342,7 @@
}
}
// Generate over 2env's - for patchset, and for HEAD
- paralellEnvs = [:]
+ def paralellEnvs = [:]
paralellEnvs.failFast = true
paralellEnvs['GenerateEnvPatched'] = StepGenerateModels(contextFileListPatched, vEnv, templateEnvPatched)
paralellEnvs['GenerateEnvHead'] = StepGenerateModels(contextFileListHead, vEnv, templateEnvHead)
@@ -206,95 +355,69 @@
archiveArtifacts artifacts: "model.tar.gz"
}
- // to be able share reclass for all subenvs
- // Also, makes artifact test more solid - use one reclass for all of sub-models.
- // Archive Structure will be:
- // tar.gz
- // ├── contexts
- // │ └── ceph.yml
- // ├── global_reclass <<< reclass system
- // ├── model
- // │ └── ceph <<< from `context basename`
- // │ ├── classes
- // │ │ ├── cluster
- // │ │ └── system -> ../../../global_reclass
- // │ └── nodes
- // │ └── cfg01.ceph-cluster-domain.local.yml
-
- if (SYSTEM_GIT_URL == "") {
- git.checkoutGitRepository("${env.WORKSPACE}/global_reclass/", RECLASS_MODEL_URL, RECLASS_MODEL_BRANCH, CREDENTIALS_ID)
- } else {
- dir("${env.WORKSPACE}/global_reclass/") {
- if (!gerrit.gerritPatchsetCheckout(SYSTEM_GIT_URL, SYSTEM_GIT_REF, "HEAD", CREDENTIALS_ID)) {
- common.errorMsg("Failed to obtain system reclass with url: ${SYSTEM_GIT_URL} and ${SYSTEM_GIT_REF}")
- throw new RuntimeException("Failed to obtain system reclass")
- }
- }
- }
+ StepPrepareGit("${env.WORKSPACE}/global_reclass/", gerritDataRS).call()
// link all models, to use one global reclass
// For HEAD
- dir(templateEnvHead) {
- for (String context : contextFileListHead) {
- def basename = common.GetBaseName(context, '.yml')
- dir("${templateEnvHead}/model/${basename}") {
- sh(script: 'mkdir -p classes/; ln -sfv ../../../../global_reclass classes/system ')
- }
- }
- // Save all models and all contexts. Warning! `h` flag must be used.
- sh(script: "tar -chzf head_reclass.tar.gz --exclude='*@tmp' model contexts global_reclass", returnStatus: true)
- archiveArtifacts artifacts: "head_reclass.tar.gz"
- // move for "Compare Pillars" stage
- sh(script: "mv -v head_reclass.tar.gz ${env.WORKSPACE}")
- }
+ linkReclassModels(contextFileListHead, templateEnvHead, headReclassArtifactName)
// For patched
- dir(templateEnvPatched) {
- for (String context : contextFileListPatched) {
- def basename = common.GetBaseName(context, '.yml')
- dir("${templateEnvPatched}/model/${basename}") {
- sh(script: 'mkdir -p classes/; ln -sfv ../../../../global_reclass classes/system ')
- }
- }
- // Save all models and all contexts. Warning! `h` flag must be used.
- sh(script: "tar -chzf patched_reclass.tar.gz --exclude='*@tmp' model contexts global_reclass", returnStatus: true)
- archiveArtifacts artifacts: "patched_reclass.tar.gz"
- // move for "Compare Pillars" stage
- sh(script: "mv -v patched_reclass.tar.gz ${env.WORKSPACE}")
- }
+ linkReclassModels(contextFileListPatched, templateEnvPatched, patchedReclassArtifactName)
}
- stage("Compare Pillars") {
+ stage("Compare cluster lvl Head/Patched") {
// Compare patched and HEAD reclass pillars
- compareRoot = "${env.WORKSPACE}/test_compare/"
+ compareRoot = "${env.WORKSPACE}/cluster_compare/"
sh(script: """
mkdir -pv ${compareRoot}/new ${compareRoot}/old
- tar -xzf patched_reclass.tar.gz --directory ${compareRoot}/new
- tar -xzf head_reclass.tar.gz --directory ${compareRoot}/old
+ tar -xzf ${patchedReclassArtifactName} --directory ${compareRoot}/new
+ tar -xzf ${headReclassArtifactName} --directory ${compareRoot}/old
""")
common.warningMsg('infra/secrets.yml has been skipped from compare!')
- rezult = common.comparePillars(compareRoot, env.BUILD_URL, "-Ev \'infra/secrets.yml\'")
- currentBuild.description = rezult
+ result = '\n' + common.comparePillars(compareRoot, env.BUILD_URL, "-Ev \'infra/secrets.yml\'")
+ currentBuild.description = currentBuild.description ? currentBuild.description + result : result
}
- stage("test-contexts") {
- // Test contexts for patched only
- stepsForParallel = [:]
+ stage("TestContexts Head/Patched") {
+ def stepsForParallel = [:]
+ stepsForParallel.failFast = true
+ common.infoMsg("Found: ${contextFileListHead.size()} HEAD contexts to test.")
+ for (String context : contextFileListHead) {
+ def basename = common.GetBaseName(context, '.yml')
+ stepsForParallel.put("ContextHeadTest:${basename}", StepTestModel(basename, headReclassArtifactName, reclassInfoHeadPath))
+ }
common.infoMsg("Found: ${contextFileListPatched.size()} patched contexts to test.")
for (String context : contextFileListPatched) {
def basename = common.GetBaseName(context, '.yml')
- stepsForParallel.put("ContextPatchTest:${basename}", StepTestModel(basename))
+ stepsForParallel.put("ContextPatchedTest:${basename}", StepTestModel(basename, patchedReclassArtifactName, reclassInfoPatchedPath))
}
parallel stepsForParallel
- common.infoMsg('All tests done')
+ common.infoMsg('All TestContexts tests done')
}
-
+ stage("Compare NodesInfo Head/Patched") {
+ // Download all artifacts
+ def stepsForParallel = [:]
+ stepsForParallel.failFast = true
+ common.infoMsg("Found: ${testModelBuildsData.size()} nodeinfo artifacts to download.")
+ testModelBuildsData.each { bname, bdata ->
+ stepsForParallel.put("FetchData:${bname}",
+ getAndUnpackNodesInfoArtifact(bdata.jobname, bdata.copyToDir, bdata.buildId))
+ }
+ parallel stepsForParallel
+ // Compare patched and HEAD reclass pillars
+ result = '\n' + common.comparePillars(reclassNodeInfoDir, env.BUILD_URL, '')
+ currentBuild.description = currentBuild.description ? currentBuild.description + result : result
+ }
sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
} catch (Throwable e) {
+ if (alreadyMerged) {
+ currentBuild.result = 'ABORTED'
+ currentBuild.description = "Change ${GERRIT_CHANGE_NUMBER} is already merged, no need to gate them"
+ return
+ }
currentBuild.result = "FAILURE"
currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
throw e
} finally {
def dummy = "dummy"
- //FAILING common.sendNotification(currentBuild.result,"",["slack"])
}
}
}
diff --git a/test-service.groovy b/test-service.groovy
deleted file mode 100644
index f9c34e3..0000000
--- a/test-service.groovy
+++ /dev/null
@@ -1,96 +0,0 @@
-/**
- *
- * Service test pipeline
- *
- * Expected parameters:
- * SALT_MASTER_URL URL of Salt master
- * SALT_MASTER_CREDENTIALS Credentials to the Salt API
- * Test settings:
- * TEST_SERVICE Comma separated list of services to test
- * TEST_K8S_API_SERVER Kubernetes API address
- * TEST_K8S_CONFORMANCE_IMAGE Path to docker image with conformance e2e tests
- * TEST_DOCKER_INSTALL Install docker on the target if true
- * TEST_TEMPEST_IMAGE Tempest image link
- * TEST_TEMPEST_PATTERN If not false, run tests matched to pattern only
- * TEST_TEMPEST_TARGET Salt target for tempest node
- *
- */
-
-common = new com.mirantis.mk.Common()
-git = new com.mirantis.mk.Git()
-salt = new com.mirantis.mk.Salt()
-test = new com.mirantis.mk.Test()
-def python = new com.mirantis.mk.Python()
-
-def pepperEnv = "pepperEnv"
-timeout(time: 12, unit: 'HOURS') {
- node("python") {
- try {
-
- stage('Setup virtualenv for Pepper') {
- python.setupPepperVirtualenv(pepperEnv, SALT_MASTER_URL, SALT_MASTER_CREDENTIALS)
- }
-
- //
- // Test
- //
- def artifacts_dir = '_artifacts/'
-
- if (common.checkContains('TEST_SERVICE', 'k8s')) {
- stage('Run k8s bootstrap tests') {
- def image = 'tomkukral/k8s-scripts'
- def output_file = image.replaceAll('/', '-') + '.output'
-
- // run image
- test.runConformanceTests(pepperEnv, 'ctl01*', TEST_K8S_API_SERVER, image)
-
- // collect output
- sh "mkdir -p ${artifacts_dir}"
- file_content = salt.getFileContent(pepperEnv, 'ctl01*', '/tmp/' + output_file)
- writeFile file: "${artifacts_dir}${output_file}", text: file_content
- sh "cat ${artifacts_dir}${output_file}"
-
- // collect artifacts
- archiveArtifacts artifacts: "${artifacts_dir}${output_file}"
- }
-
- stage('Run k8s conformance e2e tests') {
- def image = K8S_CONFORMANCE_IMAGE
- def output_file = image.replaceAll('/', '-') + '.output'
-
- // run image
- test.runConformanceTests(pepperEnv, 'ctl01*', TEST_K8S_API_SERVER, image)
-
- // collect output
- sh "mkdir -p ${artifacts_dir}"
- file_content = salt.getFileContent(pepperEnv, 'ctl01*', '/tmp/' + output_file)
- writeFile file: "${artifacts_dir}${output_file}", text: file_content
- sh "cat ${artifacts_dir}${output_file}"
-
- // collect artifacts
- archiveArtifacts artifacts: "${artifacts_dir}${output_file}"
- }
- }
-
- if (common.checkContains('TEST_SERVICE', 'openstack')) {
- if (common.checkContains('TEST_DOCKER_INSTALL', 'true')) {
- test.install_docker(pepperEnv, TEST_TEMPEST_TARGET)
- }
-
- stage('Run OpenStack tests') {
- test.runTempestTests(pepperEnv, TEST_TEMPEST_IMAGE, TEST_TEMPEST_TARGET, TEST_TEMPEST_PATTERN)
- }
-
- writeFile(file: 'report.xml', text: salt.getFileContent(pepperEnv, TEST_TEMPEST_TARGET, '/root/report.xml'))
- junit(keepLongStdio: true, testResults: 'report.xml', healthScaleFactor: Double.parseDouble(TEST_JUNIT_RATIO))
- def testResults = test.collectJUnitResults(currentBuild.rawBuild.getAction(hudson.tasks.test.AbstractTestResultAction.class))
- if(testResults){
- currentBuild.desc = String.format("result: %s", testResults["failed"] / testResults["total"])
- }
- }
- } catch (Throwable e) {
- currentBuild.result = 'FAILURE'
- throw e
- }
- }
-}
diff --git a/test-system-reclass-pipeline.groovy b/test-system-reclass-pipeline.groovy
index fa16739..afd2857 100644
--- a/test-system-reclass-pipeline.groovy
+++ b/test-system-reclass-pipeline.groovy
@@ -1,6 +1,9 @@
def gerrit = new com.mirantis.mk.Gerrit()
def common = new com.mirantis.mk.Common()
+
+slaveNode = env.SLAVE_NODE ?: 'python&&docker'
+
def gerritCredentials
try {
gerritCredentials = CREDENTIALS_ID
@@ -10,9 +13,9 @@
def gerritRef
try {
- gerritRef = GERRIT_REFSPEC
+ gerritRef = GERRIT_REFSPEC
} catch (MissingPropertyException e) {
- gerritRef = null
+ gerritRef = null
}
def defaultGitRef, defaultGitUrl
@@ -28,76 +31,76 @@
def systemRefspec = "HEAD"
def formulasRevision = 'testing'
timeout(time: 12, unit: 'HOURS') {
- node() {
- try {
- stage("Checkout") {
- if (gerritRef) {
- // job is triggered by Gerrit
- // test if change aren't already merged
- def gerritChange = gerrit.getGerritChange(GERRIT_NAME, GERRIT_HOST, GERRIT_CHANGE_NUMBER, gerritCredentials)
- merged = gerritChange.status == "MERGED"
- if(!merged){
- checkouted = gerrit.gerritPatchsetCheckout ([
- credentialsId : gerritCredentials
- ])
- systemRefspec = GERRIT_REFSPEC
- }
- // change defaultGit variables if job triggered from Gerrit
- defaultGitUrl = "${GERRIT_SCHEME}://${GERRIT_NAME}@${GERRIT_HOST}:${GERRIT_PORT}/${GERRIT_PROJECT}"
- } else if(defaultGitRef && defaultGitUrl) {
- checkouted = gerrit.gerritPatchsetCheckout(defaultGitUrl, defaultGitRef, "HEAD", gerritCredentials)
+ node(slaveNode) {
+ try {
+ stage("Checkout") {
+ if (gerritRef) {
+ // job is triggered by Gerrit
+ // test if change aren't already merged
+ def gerritChange = gerrit.getGerritChange(GERRIT_NAME, GERRIT_HOST, GERRIT_CHANGE_NUMBER, gerritCredentials)
+ merged = gerritChange.status == "MERGED"
+ if (!merged) {
+ checkouted = gerrit.gerritPatchsetCheckout([
+ credentialsId: gerritCredentials
+ ])
+ systemRefspec = GERRIT_REFSPEC
+ }
+ // change defaultGit variables if job triggered from Gerrit
+ defaultGitUrl = "${GERRIT_SCHEME}://${GERRIT_NAME}@${GERRIT_HOST}:${GERRIT_PORT}/${GERRIT_PROJECT}"
+ } else if (defaultGitRef && defaultGitUrl) {
+ checkouted = gerrit.gerritPatchsetCheckout(defaultGitUrl, defaultGitRef, "HEAD", gerritCredentials)
+ }
+ }
+
+ stage("Test") {
+ if (merged) {
+ common.successMsg("Gerrit change is already merged, no need to test them")
+ } else {
+ if (checkouted) {
+
+ def documentationOnly = false
+ if (gerritRef) {
+ documentationOnly = sh(script: "git diff-tree --no-commit-id --name-only -r HEAD | grep -v .releasenotes", returnStatus: true) == 1
+ }
+
+ sh("git diff-tree --no-commit-id --diff-filter=d --name-only -r HEAD | grep .yml | xargs -I {} python -c \"import yaml; yaml.load(open('{}', 'r'))\" \\;")
+
+ def branches = [:]
+ def testModels = documentationOnly ? [] : TEST_MODELS.split(',')
+ for (int i = 0; i < testModels.size(); i++) {
+ def cluster = testModels[i]
+ def clusterGitUrl = defaultGitUrl.substring(0, defaultGitUrl.lastIndexOf("/") + 1) + cluster
+ branches["${cluster}"] = {
+ build job: "test-salt-model-${cluster}", parameters: [
+ [$class: 'StringParameterValue', name: 'DEFAULT_GIT_URL', value: clusterGitUrl],
+ [$class: 'StringParameterValue', name: 'DEFAULT_GIT_REF', value: "HEAD"],
+ [$class: 'StringParameterValue', name: 'SYSTEM_GIT_URL', value: defaultGitUrl],
+ [$class: 'StringParameterValue', name: 'SYSTEM_GIT_REF', value: systemRefspec],
+ [$class: 'StringParameterValue', name: 'FORMULAS_REVISION', value: formulasRevision],
+ ]
+ }
+ }
+ branches["cookiecutter"] = {
+ build job: "test-mk-cookiecutter-templates", parameters: [
+ [$class: 'StringParameterValue', name: 'RECLASS_SYSTEM_URL', value: defaultGitUrl],
+ [$class: 'StringParameterValue', name: 'RECLASS_SYSTEM_GIT_REF', value: systemRefspec],
+ [$class: 'StringParameterValue', name: 'DISTRIB_REVISION', value: formulasRevision]
+
+ ]
+ }
+ parallel branches
+ } else {
+ throw new Exception("Cannot checkout gerrit patchset, GERRIT_REFSPEC and DEFAULT_GIT_REF is null")
+ }
+ }
+ }
+ } catch (Throwable e) {
+ // If there was an error or exception thrown, the build failed
+ currentBuild.result = "FAILURE"
+ currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
+ throw e
+ } finally {
+ common.sendNotification(currentBuild.result, "", ["slack"])
}
- }
-
- stage("Test") {
- if(merged){
- common.successMsg("Gerrit change is already merged, no need to test them")
- }else{
- if(checkouted){
-
- def documentationOnly = false
- if (gerritRef) {
- documentationOnly = sh(script: "git diff-tree --no-commit-id --name-only -r HEAD | grep -v .releasenotes", returnStatus: true) == 1
- }
-
- sh("git diff-tree --no-commit-id --diff-filter=d --name-only -r HEAD | grep .yml | xargs -I {} python -c \"import yaml; yaml.load(open('{}', 'r'))\" \\;")
-
- def branches = [:]
- def testModels = documentationOnly ? [] : TEST_MODELS.split(',')
- for (int i = 0; i < testModels.size(); i++) {
- def cluster = testModels[i]
- def clusterGitUrl = defaultGitUrl.substring(0, defaultGitUrl.lastIndexOf("/") + 1) + cluster
- branches["${cluster}"] = {
- build job: "test-salt-model-${cluster}", parameters: [
- [$class: 'StringParameterValue', name: 'DEFAULT_GIT_URL', value: clusterGitUrl],
- [$class: 'StringParameterValue', name: 'DEFAULT_GIT_REF', value: "HEAD"],
- [$class: 'StringParameterValue', name: 'SYSTEM_GIT_URL', value: defaultGitUrl],
- [$class: 'StringParameterValue', name: 'SYSTEM_GIT_REF', value: systemRefspec],
- [$class: 'StringParameterValue', name: 'FORMULAS_REVISION', value: formulasRevision],
- ]
- }
- }
- branches["cookiecutter"] = {
- build job: "test-mk-cookiecutter-templates", parameters: [
- [$class: 'StringParameterValue', name: 'SYSTEM_GIT_URL', value: defaultGitUrl],
- [$class: 'StringParameterValue', name: 'SYSTEM_GIT_REF', value: systemRefspec],
- [$class: 'StringParameterValue', name: 'DISTRIB_REVISION', value: formulasRevision]
-
- ]
- }
- parallel branches
- }else{
- throw new Exception("Cannot checkout gerrit patchset, GERRIT_REFSPEC and DEFAULT_GIT_REF is null")
- }
- }
- }
- } catch (Throwable e) {
- // If there was an error or exception thrown, the build failed
- currentBuild.result = "FAILURE"
- currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
- throw e
- } finally {
- common.sendNotification(currentBuild.result,"",["slack"])
}
- }
}
diff --git a/validate-cloud.groovy b/validate-cloud.groovy
index 3c27dce..fa9a7a6 100644
--- a/validate-cloud.groovy
+++ b/validate-cloud.groovy
@@ -16,6 +16,7 @@
* RUN_TEMPEST_TESTS If not false, run Tempest tests
* RUN_RALLY_TESTS If not false, run Rally tests
* K8S_RALLY If not false, run Kubernetes Rally tests
+ * STACKLIGHT_RALLY If not false, run additional Stacklight tests
* RUN_K8S_TESTS If not false, run Kubernetes e2e/conformance tests
* RUN_SPT_TESTS If not false, run SPT tests
* SPT_SSH_USER The name of the user which should be used for ssh to nodes
@@ -31,6 +32,7 @@
* RALLY_CONFIG_REPO Git repository with files for Rally
* RALLY_CONFIG_BRANCH Git branch which will be used during the checkout
* RALLY_SCENARIOS Path to file or directory with rally scenarios
+ * RALLY_SL_SCENARIOS Path to file or directory with stacklight rally scenarios
* RALLY_TASK_ARGS_FILE Path to file with rally tests arguments
* REPORT_DIR Path for reports outside docker image
* TEST_K8S_API_SERVER Kubernetes API address
@@ -81,20 +83,21 @@
stage('Run Rally tests') {
if (RUN_RALLY_TESTS.toBoolean() == true) {
def report_dir = env.REPORT_DIR ?: '/root/qa_results'
- def platform
- def rally_variables
+ def platform = ["type":"unknown", "stacklight_enabled":false]
+ def rally_variables = []
if (K8S_RALLY.toBoolean() == false) {
- platform = 'openstack'
+ platform['type'] = 'openstack'
rally_variables = ["floating_network=${FLOATING_NETWORK}",
"rally_image=${RALLY_IMAGE}",
"rally_flavor=${RALLY_FLAVOR}",
"availability_zone=${AVAILABILITY_ZONE}"]
} else {
- platform = 'k8s'
- rally_variables = ["plugins_repo":"${RALLY_PLUGINS_REPO}",
- "plugins_branch":"${RALLY_PLUGINS_BRANCH}"]
+ platform['type'] = 'k8s'
}
- validate.runRallyTests(pepperEnv, TARGET_NODE, TEST_IMAGE, platform, artifacts_dir, RALLY_CONFIG_REPO, RALLY_CONFIG_BRANCH, RALLY_SCENARIOS, RALLY_TASK_ARGS_FILE, rally_variables, report_dir, SKIP_LIST)
+ if (STACKLIGHT_RALLY.toBoolean() == true) {
+ platform['stacklight_enabled'] = true
+ }
+ validate.runRallyTests(pepperEnv, TARGET_NODE, TEST_IMAGE, platform, artifacts_dir, RALLY_CONFIG_REPO, RALLY_CONFIG_BRANCH, RALLY_PLUGINS_REPO, RALLY_PLUGINS_BRANCH, RALLY_SCENARIOS, RALLY_SL_SCENARIOS, RALLY_TASK_ARGS_FILE, rally_variables, report_dir, SKIP_LIST)
} else {
common.infoMsg("Skipping Rally tests")
}
diff --git a/xtrabackup-restore-mysql-db.groovy b/xtrabackup-restore-mysql-db.groovy
index da3d463..b1d4a4e 100644
--- a/xtrabackup-restore-mysql-db.groovy
+++ b/xtrabackup-restore-mysql-db.groovy
@@ -67,7 +67,7 @@
if(backup_dir == null || backup_dir.isEmpty()) { backup_dir='/var/backups/mysql/xtrabackup' }
print(backup_dir)
salt.runSaltProcessStep(pepperEnv, 'I@galera:master', 'file.remove', ["${backup_dir}/dbrestored"], null, true)
- salt.cmdRun(pepperEnv, 'I@xtrabackup:client', "su root -c 'salt-call state.sls xtrabackup'")
+ salt.runSaltProcessStep(pepperEnv, 'I@galera:master', 'state.apply', ["xtrabackup.client.restore"], null, true)
salt.runSaltProcessStep(pepperEnv, 'I@galera:master', 'service.start', ['mysql'], null, true)
// wait until mysql service on galera master is up