Merge "fix partition removal for ceph nautilus"
diff --git a/src/com/mirantis/mcp/Validate.groovy b/src/com/mirantis/mcp/Validate.groovy
index 9c7a5b6..308ff14 100644
--- a/src/com/mirantis/mcp/Validate.groovy
+++ b/src/com/mirantis/mcp/Validate.groovy
@@ -57,7 +57,6 @@
default_mounts = ["/etc/ssl/certs/": "/etc/ssl/certs/",
"/srv/salt/pki/${cluster_name}/": "/etc/certs",
"/root/test/": "/root/tempest/",
- "/tmp/": "/tmp/",
"/etc/hosts": "/etc/hosts"]
params.mounts = default_mounts + params.mounts
if ( salt.cmdRun(params.master, params.target, "docker ps -f name=^${params.name}\$ -q", false, null, false)['return'][0].values()[0] ) {
diff --git a/src/com/mirantis/mk/Common.groovy b/src/com/mirantis/mk/Common.groovy
index 031bf6f..031cd03 100644
--- a/src/com/mirantis/mk/Common.groovy
+++ b/src/com/mirantis/mk/Common.groovy
@@ -6,6 +6,8 @@
import com.cloudbees.groovy.cps.NonCPS
import groovy.json.JsonSlurperClassic
+import org.jenkinsci.plugins.workflow.cps.EnvActionImpl
+
/**
*
* Common functions
@@ -933,15 +935,60 @@
* @param extraVars - Multiline YAML text with extra vars
*/
def mergeEnv(envVar, extraVars) {
- def common = new com.mirantis.mk.Common()
try {
def extraParams = readYaml text: extraVars
for(String key in extraParams.keySet()) {
envVar[key] = extraParams[key]
- common.warningMsg("Parameter ${key} is updated from EXTRA vars.")
+ println("INFO: Parameter ${key} is updated from EXTRA vars.")
}
} catch (Exception e) {
- common.errorMsg("Can't update env parameteres, because: ${e.toString()}")
+ println("ERR: Can't update env parameteres, because: ${e.toString()}")
+ }
+}
+
+def setMapDefaults(Object base, Object defaults, Boolean recursive = false) {
+/**
+ * Function to update dict with params, if its not set yet
+ * Will not fail entire job in case any issues.
+ * Those function will not overwrite current options if already passed.
+ * @param base - dict
+ * @param defaults - dict
+ */
+ if (base instanceof Map && defaults instanceof Map) {
+ defaults.inject(base) { result, key, value ->
+ if (result.containsKey(key)) {
+ setMapDefaults(result[key], value, recursive = true)
+ } else {
+ result.put(key, value)
+ }
+ return result
+ }
+ } else if (!recursive) {
+ echo("Can't update map parameters, wrong input data, skipping")
+ }
+}
+
+def setEnvDefaults(Object envVar, Object defaults) {
+ /**
+ * Function to set default values of an environment variables object
+ * at runtime(patches existing 'env' instance).
+ * @param env - instance of either EnvActionImpl
+ * @param defaults - Map with default values
+ * Example: setEnvDefaults(env, ['ENV_NAME': 'newENV_NAME', 'newvar': 'newval'])
+ * */
+
+ if (!(envVar instanceof EnvActionImpl)) {
+ error("setEnvDefaults 'env' is not an instance of EnvActionImpl")
+ } else if (!(defaults instanceof Map)) {
+ error("setEnvDefaults 'defaults' is not a Map")
+ }
+ defaults.each { key, value ->
+ if (envVar.getEnvironment().containsKey(key)) {
+ println("INFO:setEnvDefaults env variable ${key} already exist, not overwriting")
+ } else {
+ envVar[key] = value
+ println("INFO:setEnvDefaults env variable ${key} has been added")
+ }
}
}
@@ -1073,4 +1120,4 @@
// Official regex for Semver2 (https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string)
String semVerRegex = /^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/
return version ==~ semVerRegex
-}
\ No newline at end of file
+}
diff --git a/src/com/mirantis/mk/Debian.groovy b/src/com/mirantis/mk/Debian.groovy
index 6e74379..47cd772 100644
--- a/src/com/mirantis/mk/Debian.groovy
+++ b/src/com/mirantis/mk/Debian.groovy
@@ -294,8 +294,20 @@
common.retry(3, 5) {
salt.cmdRun(env, target, 'salt-call pkg.refresh_db failhard=true', true, batch)
}
+
+ /* first try to upgrade salt components since they demand asynchronous upgrade */
+ upgradeSaltPackages(env, target)
def cmd = "export DEBIAN_FRONTEND=noninteractive; apt-get -y -q --allow-downgrades -o Dpkg::Options::=\"--force-confdef\" -o Dpkg::Options::=\"--force-confold\" ${mode}"
- salt.cmdRun(env, target, cmd, true, batch)
+
+ /*
+ * This is a long running batch operation that may return empty response
+ * which is a pretty typical salt behavior. This does not represent an error
+ * but might hide the error if it's ignored. If there is no persistent error
+ * with the procedure itself, the consequent run will succeed.
+ */
+ common.retry(2, 120) {
+ salt.cmdRun(env, target, cmd, true, batch)
+ }
rebootRequired = salt.runSaltProcessStep(env, target, 'file.file_exists', ['/var/run/reboot-required'], batch, true, 5)['return'][0].values()[0].toBoolean()
if (rebootRequired) {
@@ -310,3 +322,36 @@
common.errorMsg("Invalid upgrade mode specified: ${mode}. Has to be 'upgrade' or 'dist-upgrade'")
}
}
+
+/**
+* Upgrade salt packages on target asynchronously, wait minions' availability.
+*
+* @param env Salt Connection object or env Salt command map
+* @param target Salt target to upgrade packages on.
+* @param timeout Sleep timeout when doing retries.
+* @param attempts Number of attemps to wait for.
+*/
+def upgradeSaltPackages(env, target, timeout=60, attempts=20) {
+ def common = new com.mirantis.mk.Common()
+ def salt = new com.mirantis.mk.Salt()
+ def saltUpgradeCmd =
+ 'export DEBIAN_FRONTEND=noninteractive; apt-get -y -q ' +
+ '-o Dpkg::Options::=\"--force-confdef\" -o Dpkg::Options::=\"--force-confold\" ' +
+ 'install --only-upgrade salt-master salt-common salt-api salt-minion'
+
+ common.infoMsg("Upgrading SaltStack on ${target}")
+ salt.cmdRun(env, target, saltUpgradeCmd, false, null, true, [], [], true)
+ /* wait for 2 mins before checking the availability of minions to give
+ apt some time to finish updating so the dpkg releases its locks */
+ sleep(120)
+ /* taken from upgrade-mcp-release */
+ common.retry(attempts, timeout) {
+ salt.minionsReachable(env, 'I@salt:master', target)
+ def running = salt.runSaltProcessStep(env, target, 'saltutil.running', [], null, true, 5)
+ for (value in running.get("return")[0].values()) {
+ if (value != []) {
+ throw new Exception("Not all salt-minions are ready for execution")
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/com/mirantis/mk/Git.groovy b/src/com/mirantis/mk/Git.groovy
index f699b3e..d724b8f 100644
--- a/src/com/mirantis/mk/Git.groovy
+++ b/src/com/mirantis/mk/Git.groovy
@@ -129,6 +129,12 @@
} else {
gitOpts = ''
}
+ def gitEnv = [
+ "GIT_AUTHOR_NAME=${gitName}",
+ "GIT_AUTHOR_EMAIL=${gitEmail}",
+ "GIT_COMMITTER_NAME=${gitName}",
+ "GIT_COMMITTER_EMAIL=${gitEmail}",
+ ]
dir(path) {
sh "git config ${global_arg} user.email '${gitEmail}'"
sh "git config ${global_arg} user.name '${gitName}'"
@@ -137,10 +143,12 @@
script: 'git add -A',
returnStdout: true
).trim()
- git_cmd = sh(
- script: "git commit ${gitOpts} -m '${message}'",
- returnStdout: true
- ).trim()
+ withEnv(gitEnv) {
+ git_cmd = sh(
+ script: "git commit ${gitOpts} -m '${message}'",
+ returnStdout: true
+ ).trim()
+ }
}
return git_cmd
}
diff --git a/src/com/mirantis/mk/KaasUtils.groovy b/src/com/mirantis/mk/KaasUtils.groovy
index c28e00c..b6dc191 100644
--- a/src/com/mirantis/mk/KaasUtils.groovy
+++ b/src/com/mirantis/mk/KaasUtils.groovy
@@ -8,33 +8,33 @@
/**
-* Determine scope of test suite against per-commit KaaS deployment based on keywords
-* Keyword list: https://gerrit.mcp.mirantis.com/plugins/gitiles/kaas/core/+/refs/heads/master/.git-message-template#50
-*
-* Used for components team to combine test-suites and forward desired parameters to kaas/core deployment jobs
-* Example scheme:
-* New CR pushed in kubernetes/lcm-ansible -> parsing it's commit body and combine test-suite -> trigger deployment jobs from kaas/core
-* manage test-suite through Jenkins Job Parameters
-*
-* @return (map)[
-* deployChildEnabled: (bool) True if need to deploy child cluster during demo-run
-* runUie2eEnabled: (bool) True if need to run ui-e2e cluster during demo-run
-* ]
-*/
+ * Determine scope of test suite against per-commit KaaS deployment based on keywords
+ * Keyword list: https://gerrit.mcp.mirantis.com/plugins/gitiles/kaas/core/+/refs/heads/master/.git-message-template#50
+ *
+ * Used for components team to combine test-suites and forward desired parameters to kaas/core deployment jobs
+ * Example scheme:
+ * New CR pushed in kubernetes/lcm-ansible -> parsing it's commit body and combine test-suite -> trigger deployment jobs from kaas/core
+ * manage test-suite through Jenkins Job Parameters
+ *
+ * @return (map)[
+ * deployChildEnabled: (bool) True if need to deploy child cluster during demo-run
+ * runUie2eEnabled: (bool) True if need to run ui-e2e cluster during demo-run
+ * ]
+ */
def checkDeploymentTestSuite() {
def common = new com.mirantis.mk.Common()
// Available triggers and its sane defaults
- def deployChild = (env.DEPLOY_CHILD_CLUSTER != null ) ? env.DEPLOY_CHILD_CLUSTER.toBoolean() : false
- def upgradeChild = (env.UPGRADE_CHILD_CLUSTER != null ) ? env.UPGRADE_CHILD_CLUSTER.toBoolean() : false
- def upgradeMgmt = (env.UPGRADE_MGMT_CLUSTER != null ) ? env.UPGRADE_MGMT_CLUSTER.toBoolean() : false
- def runUie2e = (env.RUN_UI_E2E != null ) ? env.RUN_UI_E2E.toBoolean() : false
- def runMgmtConformance = (env.RUN_MGMT_CFM != null ) ? env.RUN_MGMT_CFM.toBoolean() : false
- def runChildConformance = (env.RUN_CHILD_CFM != null ) ? env.RUN_CHILD_CFM.toBoolean() : false
- def fetchServiceBinaries = (env.FETCH_BINARIES_FROM_UPSTREAM != null) ? env.FETCH_BINARIES_FROM_UPSTREAM.toBoolean() : false
- def awsOnDemandDemo = (env.RUN_AWS_ON_DEMAND_DEMO != null) ? env.RUN_AWS_ON_DEMAND_DEMO.toBoolean() : false
+ def deployChild = env.DEPLOY_CHILD_CLUSTER ? env.DEPLOY_CHILD_CLUSTER.toBoolean() : false
+ def upgradeChild = env.UPGRADE_CHILD_CLUSTER ? env.UPGRADE_CHILD_CLUSTER.toBoolean() : false
+ def upgradeMgmt = env.UPGRADE_MGMT_CLUSTER ? env.UPGRADE_MGMT_CLUSTER.toBoolean() : false
+ def runUie2e = env.RUN_UI_E2E ? env.RUN_UI_E2E.toBoolean() : false
+ def runMgmtConformance = env.RUN_MGMT_CFM ? env.RUN_MGMT_CFM.toBoolean() : false
+ def runChildConformance = env.RUN_CHILD_CFM ? env.RUN_CHILD_CFM.toBoolean() : false
+ def fetchServiceBinaries = env.FETCH_BINARIES_FROM_UPSTREAM ? env.FETCH_BINARIES_FROM_UPSTREAM.toBoolean() : false
+ def awsOnDemandDemo = env.ALLOW_AWS_ON_DEMAND ? env.ALLOW_AWS_ON_DEMAND.toBoolean() : false
- def commitMsg = (env.GERRIT_CHANGE_COMMIT_MESSAGE != null) ? new String(env.GERRIT_CHANGE_COMMIT_MESSAGE.decodeBase64()) : ''
+ def commitMsg = env.GERRIT_CHANGE_COMMIT_MESSAGE ? new String(env.GERRIT_CHANGE_COMMIT_MESSAGE.decodeBase64()) : ''
if (commitMsg ==~ /(?s).*\[child-deploy\].*/ || env.GERRIT_EVENT_COMMENT_TEXT ==~ /(?s).*child-deploy.*/ || upgradeChild || runChildConformance) {
deployChild = true
}
@@ -85,14 +85,14 @@
Service binaries fetching scheduled: ${fetchServiceBinaries}
Triggers: https://gerrit.mcp.mirantis.com/plugins/gitiles/kaas/core/+/refs/heads/master/.git-message-template#50""")
return [
- deployChildEnabled: deployChild,
- upgradeChildEnabled: upgradeChild,
- runChildConformanceEnabled: runChildConformance,
- upgradeMgmtEnabled: upgradeMgmt,
- runUie2eEnabled: runUie2e,
- runMgmtConformanceEnabled: runMgmtConformance,
+ deployChildEnabled : deployChild,
+ upgradeChildEnabled : upgradeChild,
+ runChildConformanceEnabled : runChildConformance,
+ upgradeMgmtEnabled : upgradeMgmt,
+ runUie2eEnabled : runUie2e,
+ runMgmtConformanceEnabled : runMgmtConformance,
fetchServiceBinariesEnabled: fetchServiceBinaries,
- awsOnDemandDemoEnabled: awsOnDemandDemo]
+ awsOnDemandDemoEnabled : awsOnDemandDemo]
}
/**
@@ -104,8 +104,7 @@
* New CR pushed in kubernetes/lcm-ansible -> parsing it's commit body and get custom test refspecs -> trigger deployment jobs from kaas/core
* manage refspecs through Jenkins Job Parameters
*
- * @return (map)[
- * siTests: (string) final refspec for si-tests
+ * @return (map)[* siTests: (string) final refspec for si-tests
* siPipelines: (string) final refspec for si-pipelines
* ]
*/
@@ -113,15 +112,23 @@
def common = new com.mirantis.mk.Common()
// Available triggers and its sane defaults
- def siTestsRefspec = (env.SI_TESTS_REFSPEC != null ) ? env.SI_TESTS_REFSPEC : 'master'
- def siPipelinesRefspec = (env.SI_PIPELINES_REFSPEC != null ) ? env.SI_PIPELINES_REFSPEC : 'master'
- def commitMsg = (env.GERRIT_CHANGE_COMMIT_MESSAGE != null) ? new String(env.GERRIT_CHANGE_COMMIT_MESSAGE.decodeBase64()) : ''
+ def siTestsRefspec = env.SI_TESTS_REFSPEC ?: 'master'
+ def siTestsFeatureFlags = env.SI_TESTS_FEATURE_FLAGS ?: ''
+ def siPipelinesRefspec = env.SI_PIPELINES_REFSPEC ?: 'master'
+ def siTestsDockerImage = env.SI_TESTS_DOCKER_IMAGE ?: 'docker-dev-kaas-local.docker.mirantis.net/mirantis/kaas/si-test:master'
+ def commitMsg = env.GERRIT_CHANGE_COMMIT_MESSAGE ? new String(env.GERRIT_CHANGE_COMMIT_MESSAGE.decodeBase64()) : ''
def siTestMatches = (commitMsg =~ /(\[si-tests-ref\s*refs\/changes\/.*?\])/)
+ def siFeatureFlagsMatches = (commitMsg =~ /(\[si-feature-flags\s.*?\])/)
def siPipelinesMatches = (commitMsg =~ /(\[si-pipelines-ref\s*refs\/changes\/.*?\])/)
if (siTestMatches.size() > 0) {
siTestsRefspec = siTestMatches[0][0].split('si-tests-ref')[1].replaceAll('[\\[\\]]', '').trim()
+ siTestsDockerImage = "docker-dev-local.docker.mirantis.net/review/" +
+ "kaas-si-test-${siTestsRefspec.split('/')[-2]}:${siTestsRefspec.split('/')[-1]}"
+ }
+ if (siFeatureFlagsMatches.size() > 0) {
+ siTestsFeatureFlags = siFeatureFlagsMatches[0][0].split('si-feature-flags')[1].replaceAll('[\\[\\]]', '').trim()
}
if (siPipelinesMatches.size() > 0) {
siPipelinesRefspec = siPipelinesMatches[0][0].split('si-pipelines-ref')[1].replaceAll('[\\[\\]]', '').trim()
@@ -130,6 +137,124 @@
common.infoMsg("""
kaas/si-pipelines will be fetched from: ${siPipelinesRefspec}
kaas/si-tests will be fetched from: ${siTestsRefspec}
+ kaas/si-tests as dockerImage will be fetched from: ${siTestsDockerImage}
+ kaas/si-tests additional feature flags applied: [${siTestsFeatureFlags}]
Keywords: https://gerrit.mcp.mirantis.com/plugins/gitiles/kaas/core/+/refs/heads/master/.git-message-template#59""")
- return [siTests: siTestsRefspec, siPipelines: siPipelinesRefspec]
+ return [siTests: siTestsRefspec, siFeatureFlags: siTestsFeatureFlags, siPipelines: siPipelinesRefspec, siTestsDockerImage: siTestsDockerImage]
+}
+
+/**
+ * Determine if custom kaas core/pipelines refspec forwarded from gerrit change request
+
+ * Keyword list: https://gerrit.mcp.mirantis.com/plugins/gitiles/kaas/core/+/refs/heads/master/.git-message-template#59
+ * Used for components team to test component changes w/ custom Core refspecs using kaas/core deployment jobs
+ * Example scheme:
+ * New CR pushed in kubernetes/lcm-ansible -> parsing it's commit body and get custom test refspecs -> trigger deployment jobs from kaas/core
+ * manage refspecs through Jenkins Job Parameters
+ *
+ * @return (map)[ core: (string) final refspec for kaas/core
+ * corePipelines: (string) final refspec for pipelines in kaas/core
+ * ]
+ */
+def checkCustomCoreRefspec() {
+ def common = new com.mirantis.mk.Common()
+
+ // Available triggers and its sane defaults
+ def coreRefspec = env.KAAS_CORE_REFSPEC ?: 'master'
+ // by default using value of GERRIT_REFSPEC parameter in *kaas/core jobs*
+ def corePipelinesRefspec = env.KAAS_PIPELINE_REFSPEC ?: '\$GERRIT_REFSPEC'
+ def commitMsg = env.GERRIT_CHANGE_COMMIT_MESSAGE ? new String(env.GERRIT_CHANGE_COMMIT_MESSAGE.decodeBase64()) : ''
+
+ def coreMatches = (commitMsg =~ /(\[core-ref\s*refs\/changes\/.*?\])/)
+ def corePipelinesMatches = (commitMsg =~ /(\[core-pipelines-ref\s*refs\/changes\/.*?\])/)
+
+ if (coreMatches.size() > 0) {
+ coreRefspec = coreMatches[0][0].split('core-ref')[1].replaceAll('[\\[\\]]', '').trim()
+ }
+ if (corePipelinesMatches.size() > 0) {
+ corePipelinesRefspec = corePipelinesMatches[0][0].split('core-pipelines-ref')[1].replaceAll('[\\[\\]]', '').trim()
+ }
+
+ common.infoMsg("""
+ kaas/core will be fetched from: ${coreRefspec}
+ kaas/core pipelines will be fetched from: ${corePipelinesRefspec}
+ Keywords: https://gerrit.mcp.mirantis.com/plugins/gitiles/kaas/core/+/refs/heads/master/.git-message-template#59""")
+ return [core: coreRefspec, corePipelines: corePipelinesRefspec]
+}
+
+
+/**
+ * Trigger KaaS demo jobs based on AWS/OS providers with customized test suite, parsed from external sources (gerrit commit/jj vars)
+ * Keyword list: https://gerrit.mcp.mirantis.com/plugins/gitiles/kaas/core/+/refs/heads/master/.git-message-template
+ * Used for components team to test component changes w/ customized SI tests/refspecs using kaas/core deployment jobs
+ *
+ * @param: (string) component name [iam, lcm, stacklight]
+ * @param: (string) Patch for kaas/cluster releases in json format
+ */
+def triggerPatchedComponentDemo(component, patchSpec) {
+ def common = new com.mirantis.mk.Common()
+ // Determine if custom trigger keywords forwarded from gerrit
+ def triggers = checkDeploymentTestSuite()
+ // Determine SI refspecs
+ def siRefspec = checkCustomSIRefspec()
+ // Determine Core refspecs
+ def coreRefspec = checkCustomCoreRefspec()
+
+ def jobs = [:]
+ def parameters = [
+ string(name: 'GERRIT_REFSPEC', value: coreRefspec.core),
+ string(name: 'KAAS_PIPELINE_REFSPEC', value: coreRefspec.corePipelines),
+ string(name: 'SI_TESTS_REFSPEC', value: siRefspec.siTests),
+ string(name: 'SI_TESTS_FEATURE_FLAGS', value: siRefspec.siFeatureFlags),
+ string(name: 'SI_PIPELINES_REFSPEC', value: siRefspec.siPipelines),
+ string(name: 'CUSTOM_RELEASE_PATCH_SPEC', value: patchSpec),
+ booleanParam(name: 'UPGRADE_MGMT', value: triggers.upgradeMgmtEnabled),
+ booleanParam(name: 'RUN_UI_E2E', value: triggers.runUie2eEnabled),
+ booleanParam(name: 'RUN_MGMT_CONFORMANCE', value: triggers.runMgmtConformanceEnabled),
+ booleanParam(name: 'DEPLOY_CHILD', value: triggers.deployChildEnabled),
+ booleanParam(name: 'UPGRADE_CHILD', value: triggers.upgradeChildEnabled),
+ booleanParam(name: 'RUN_CHILD_CONFORMANCE', value: triggers.runChildConformanceEnabled),
+ booleanParam(name: 'ALLOW_AWS_ON_DEMAND', value: triggers.awsOnDemandDemoEnabled),
+ ]
+
+ def jobResults = []
+ jobs["kaas-core-openstack-patched-${component}"] = {
+ try {
+ common.infoMsg('Deploy: patched KaaS demo with Openstack provider')
+ def job_info = build job: "kaas-testing-core-openstack-workflow-${component}", parameters: parameters, wait: true
+ def build_description = job_info.getDescription()
+ if (build_description) {
+ currentBuild.description += build_description
+ }
+ } finally {
+ def build_result = job_info.getResult()
+ common.infoMsg("Patched KaaS demo with Openstack provider finished with status: ${build_result}")
+ jobResults.add(build_result)
+ }
+ }
+ if (triggers.awsOnDemandDemoEnabled) {
+ jobs["kaas-core-aws-patched-${component}"] = {
+ try {
+ common.infoMsg('Deploy: patched KaaS demo with AWS provider')
+ def job_info = build job: "kaas-testing-core-aws-workflow-${component}", parameters: parameters, wait: true
+ def build_description = job_info.getDescription()
+ if (build_description) {
+ currentBuild.description += build_description
+ }
+ } finally {
+ def build_result = job_info.getResult()
+ common.infoMsg("Patched KaaS demo with AWS provider finished with status: ${build_result}")
+ jobResults.add(build_result)
+ }
+ }
+ }
+
+ common.infoMsg('Trigger KaaS demo deployments according to defined provider set')
+ // Limit build concurency workaround examples: https://issues.jenkins-ci.org/browse/JENKINS-44085
+ parallel jobs
+
+ if (jobResults.contains('FAILURE')) {
+ common.infoMsg('One of parallel downstream jobs is failed, mark executor job as failed')
+ currentBuild.result = 'FAILURE'
+ }
}
diff --git a/src/com/mirantis/mk/ReleaseWorkflow.groovy b/src/com/mirantis/mk/ReleaseWorkflow.groovy
index 1302bbe..66e52d6 100644
--- a/src/com/mirantis/mk/ReleaseWorkflow.groovy
+++ b/src/com/mirantis/mk/ReleaseWorkflow.groovy
@@ -85,6 +85,8 @@
* - crTopic
* - crAuthorName
* - crAuthorEmail
+ * - valuesFromFile (allows to pass json strings, write them to temp files and pass files to
+ * app)
* @param dirdepth level of creation dirs from key param
*/
@@ -103,6 +105,7 @@
String crTopic = params.get('crTopic', '')
String changeAuthorName = params.get('crAuthorName', 'MCP-CI')
String changeAuthorEmail = params.get('crAuthorEmail', 'mcp-ci-jenkins@ci.mcp.mirantis.net')
+ Boolean valuesFromFile = params.get('valuesFromFile', false)
def cred = common.getCredentials(gitCredentialsId, 'key')
String gerritUser = cred.username
@@ -147,7 +150,23 @@
if (keyArr.size() == valueArr.size()) {
docker.image(toxDockerImage).inside("--volume ${repoDir}:/workspace") {
for (i in 0..keyArr.size()-1) {
- sh "cd /workspace && tox -qq -e metadata -- update --create --key '${keyArr[i]}' --value '${valueArr[i]}'"
+ def valueExpression = "--value '${valueArr[i]}'"
+ def tmpFile
+ if (valuesFromFile){
+ def data = readJSON text: valueArr[i]
+ // just print temp file name, so writeyaml can write it
+ tmpFile = sh(script: "mktemp -u -p ${workspace} meta_key_file.XXXXXX", returnStdout: true).trim()
+ // yaml is native format for meta app for loading values
+ writeYaml data: data, file: tmpFile
+ valueExpression = "--file ${tmpFile}"
+ }
+ try {
+ sh "cd /workspace && tox -qq -e metadata -- update --create --key '${keyArr[i]}' ${valueExpression}"
+ } finally {
+ if (valuesFromFile){
+ sh "rm ${tmpFile}"
+ }
+ }
}
}
}
diff --git a/src/com/mirantis/mk/Ruby.groovy b/src/com/mirantis/mk/Ruby.groovy
index 8fac324..4681d07 100644
--- a/src/com/mirantis/mk/Ruby.groovy
+++ b/src/com/mirantis/mk/Ruby.groovy
@@ -11,7 +11,7 @@
def ensureRubyEnv(rubyVersion="2.4.1"){
if (!fileExists("/var/lib/jenkins/.rbenv/versions/${rubyVersion}/bin/ruby")){
//XXX: patch ruby-build because debian package is quite old
- sh "git clone https://github.com/rbenv/ruby-build.git ~/.rbenv/plugins/ruby-build || git -C ~/.rbenv/plugins/ruby-build pull"
+ sh "git clone https://github.com/rbenv/ruby-build.git ~/.rbenv/plugins/ruby-build || git -C ~/.rbenv/plugins/ruby-build pull origin master"
sh "rbenv install ${rubyVersion} -sv";
}
sh "rbenv local ${rubyVersion};rbenv exec gem update --system"
diff --git a/src/com/mirantis/mk/Salt.groovy b/src/com/mirantis/mk/Salt.groovy
index 30957e5..48309ff 100644
--- a/src/com/mirantis/mk/Salt.groovy
+++ b/src/com/mirantis/mk/Salt.groovy
@@ -332,9 +332,10 @@
* @param output do you want to print output
* @param saltArgs additional salt args eq. ["runas=aptly"]
* @param replacing list with maps for deletion in info message (passwords, logins, etc)
+ * @param async run commands with async client (default false)
* @return output of salt command
*/
-def cmdRun(saltId, target, cmd, checkResponse = true, batch=null, output = true, saltArgs = [], replacing = []) {
+def cmdRun(saltId, target, cmd, checkResponse = true, batch=null, output = true, saltArgs = [], replacing = [], async = false) {
def common = new com.mirantis.mk.Common()
def originalCmd = cmd
common.infoSensitivityMsg("Running command ${cmd} on ${target}", true, replacing)
@@ -345,7 +346,8 @@
// add cmd name to salt args list
saltArgs << cmd
- def out = runSaltCommand(saltId, 'local', ['expression': target, 'type': 'compound'], 'cmd.run', batch, saltArgs.reverse())
+ def client = async ? 'local_async' : 'local'
+ def out = runSaltCommand(saltId, client, ['expression': target, 'type': 'compound'], 'cmd.run', batch, saltArgs.reverse())
if (checkResponse) {
// iterate over all affected nodes and check success return code
if (out["return"]){
@@ -355,14 +357,14 @@
def nodeKey = node.keySet()[j]
if (node[nodeKey] instanceof String) {
if (!node[nodeKey].contains("Salt command execution success")) {
- throw new Exception("Execution of cmd ${originalCmd} failed. Server returns: ${node[nodeKey]}")
+ throw new Exception("Execution of cmd ${originalCmd} failed. ${nodeKey} returns: ${node[nodeKey]}")
}
} else if (node[nodeKey] instanceof Boolean) {
if (!node[nodeKey]) {
- throw new Exception("Execution of cmd ${originalCmd} failed. Server returns: ${node[nodeKey]}")
+ throw new Exception("Execution of cmd ${originalCmd} failed. ${nodeKey} returns: ${node[nodeKey]}")
}
} else {
- throw new Exception("Execution of cmd ${originalCmd} failed. Server returns unexpected data type: ${node[nodeKey]}")
+ throw new Exception("Execution of cmd ${originalCmd} failed. ${nodeKey} returns unexpected data type: ${node[nodeKey]}")
}
}
}
diff --git a/src/com/mirantis/mk/Workflow.groovy b/src/com/mirantis/mk/Workflow.groovy
index aa38c0c..694a33a 100644
--- a/src/com/mirantis/mk/Workflow.groovy
+++ b/src/com/mirantis/mk/Workflow.groovy
@@ -152,6 +152,71 @@
}
}
+/**
+ * Update workflow job build description
+ *
+ * @param jobs_data Map with all job names and result statuses, to showing it in description
+ */
+def updateDescription(jobs_data) {
+ table = ''
+ child_jobs_description = '<strong>Descriptions from jobs:</strong><br>'
+ table_template_start = "<div><table style='border: solid 1px;'><tr><th>Job:</th><th>Status:</th></tr>"
+ table_template_end = "</table></div>"
+
+ for (jobdata in jobs_data) {
+ // Grey background for 'finally' jobs in list
+ if (jobdata['type'] == 'finally') {
+ trstyle = "<tr style='background: #DDDDDD;'>"
+ } else {
+ trstyle = "<tr>"
+ }
+
+ // 'description' instead of job name if it exists
+ if(jobdata['desc'].toString() != "") {
+ display_name = jobdata['desc']
+ } else {
+ display_name = jobdata['name']
+ }
+
+ // Attach url for already builded jobs
+ if(jobdata['build_url'] != "0") {
+ build_url = "<a href=${jobdata['build_url']}>$display_name</a>"
+ } else {
+ build_url = display_name
+ }
+
+ // Styling the status of job result
+ switch(jobdata['status'].toString()) {
+ case "SUCCESS":
+ status_style = "<td style='color: green;'><img src='/images/16x16/blue.png' alt='SUCCESS'>"
+ break
+ case "UNSTABLE":
+ status_style = "<td style='color: #FF5733;'><img src='/images/16x16/yellow.png' alt='UNSTABLE'>"
+ break
+ case "ABORTED":
+ status_style = "<td style='color: red;'><img src='/images/16x16/aborted.png' alt='ABORTED'>"
+ break
+ case "NOT_BUILT":
+ status_style = "<td style='color: red;'><img src='/images/16x16/aborted.png' alt='NOT_BUILT'>"
+ break
+ case "FAILURE":
+ status_style = "<td style='color: red;'><img src='/images/16x16/red.png' alt='FAILURE'>"
+ break
+ default:
+ status_style = "<td>-"
+ }
+
+ // Collect table
+ table += "$trstyle<td>$build_url</td>$status_style</td></tr>"
+
+ // Collecting descriptions of builded child jobs
+ if (jobdata['child_desc'] != "") {
+ child_jobs_description += "<b><small><a href=${jobdata['build_url']}>- ${jobdata['name']} (${jobdata['status']}):</a></small></b><br>"
+ child_jobs_description += "<small>${jobdata['child_desc']}</small><br>"
+ }
+ }
+ currentBuild.description = table_template_start + table + table_template_end + child_jobs_description
+}
/**
* Run the workflow or final steps one by one
@@ -159,13 +224,19 @@
* @param steps List of steps (Jenkins jobs) to execute
* @param global_variables Map where the collected artifact URLs and 'env' objects are stored
* @param failed_jobs Map with failed job names and result statuses, to report it later
+ * @param jobs_data Map with all job names and result statuses, to showing it in description
+ * @param step_id Counter for matching step ID with cell ID in description table
* @param propagate Boolean. If false: allows to collect artifacts after job is finished, even with FAILURE status
* If true: immediatelly fails the pipeline. DO NOT USE 'true' with runScenario().
*/
-def runSteps(steps, global_variables, failed_jobs, Boolean propagate = false) {
+def runSteps(steps, global_variables, failed_jobs, jobs_data, step_id, Boolean propagate = false) {
+ // Show expected jobs list in description
+ updateDescription(jobs_data)
+
for (step in steps) {
stage("Running job ${step['job']}") {
-
+ def engine = new groovy.text.GStringTemplateEngine()
+ def desc = step['description'] ?: ''
def job_name = step['job']
def job_parameters = [:]
def step_parameters = step['parameters'] ?: [:]
@@ -183,12 +254,16 @@
def build_description = job_info.getDescription()
def build_id = job_info.getId()
- currentBuild.description += "<a href=${build_url}>${job_name}</a>: ${job_result}<br>"
- // Import the remote build description into the current build
- if (build_description) { // TODO - add also the job status
- currentBuild.description += build_description
+ // Update jobs_data for updating description
+ jobs_data[step_id]['build_url'] = build_url
+ jobs_data[step_id]['status'] = job_result
+ jobs_data[step_id]['desc'] = engine.createTemplate(desc).make(global_variables)
+ if (build_description) {
+ jobs_data[step_id]['child_desc'] = build_description
}
+ updateDescription(jobs_data)
+
// Store links to the resulting artifacts into 'global_variables'
storeArtifacts(build_url, step['artifacts'], global_variables, job_name, build_id)
@@ -214,6 +289,8 @@
} // if (job_result != 'SUCCESS')
println "Job ${build_url} finished with result: ${job_result}"
} // stage ("Running job ${step['job']}")
+ // Jump to next ID for updating next job data in description table
+ step_id++
} // for (step in scenario['workflow'])
}
@@ -232,6 +309,7 @@
* workflow:
* - job: deploy-kaas
* ignore_failed: false
+ * description: "Management cluster ${KAAS_VERSION}"
* parameters:
* KAAS_VERSION:
* type: StringParameterValue
@@ -295,10 +373,36 @@
global_variables = [:]
// List of failed jobs to show at the end
failed_jobs = [:]
+ // Jobs data to use for wf job build description
+ def jobs_data = []
+ // Counter for matching step ID with cell ID in description table
+ step_id = 0
+
+ // Generate expected list jobs for description
+ list_id = 0
+ for (step in scenario['workflow']) {
+ if(step['description'] != null && step['description'].toString() != "") {
+ display_name = step['description']
+ } else {
+ display_name = step['job']
+ }
+ jobs_data.add([list_id: "$list_id", type: "workflow", name: "$display_name", build_url: "0", status: "-", desc: "", child_desc: ""])
+ list_id += 1
+ }
+ finally_step_id = list_id
+ for (step in scenario['finally']) {
+ if(step['description'] != null && step['description'].toString() != "") {
+ display_name = step['description']
+ } else {
+ display_name = step['job']
+ }
+ jobs_data.add([list_id: "$list_id", type: "finally", name: "$display_name", build_url: "0", status: "-", desc: "", child_desc: ""])
+ list_id += 1
+ }
try {
// Run the 'workflow' jobs
- runSteps(scenario['workflow'], global_variables, failed_jobs)
+ runSteps(scenario['workflow'], global_variables, failed_jobs, jobs_data, step_id)
} catch (InterruptedException x) {
error "The job was aborted"
@@ -307,8 +411,10 @@
error("Build failed: " + e.toString())
} finally {
+ // Switching to 'finally' step index
+ step_id = finally_step_id
// Run the 'finally' jobs
- runSteps(scenario['finally'], global_variables, failed_jobs)
+ runSteps(scenario['finally'], global_variables, failed_jobs, jobs_data, step_id)
if (failed_jobs) {
statuses = []
@@ -325,6 +431,8 @@
currentBuild.result = 'FAILURE'
}
println "Failed jobs: ${failed_jobs}"
+ } else {
+ currentBuild.result = 'SUCCESS'
}
if (slackReportChannel) {