Merge "[fix][core] remove var definitions from finally block"
diff --git a/src/com/mirantis/mcp/Validate.groovy b/src/com/mirantis/mcp/Validate.groovy
index 9c7a5b6..308ff14 100644
--- a/src/com/mirantis/mcp/Validate.groovy
+++ b/src/com/mirantis/mcp/Validate.groovy
@@ -57,7 +57,6 @@
default_mounts = ["/etc/ssl/certs/": "/etc/ssl/certs/",
"/srv/salt/pki/${cluster_name}/": "/etc/certs",
"/root/test/": "/root/tempest/",
- "/tmp/": "/tmp/",
"/etc/hosts": "/etc/hosts"]
params.mounts = default_mounts + params.mounts
if ( salt.cmdRun(params.master, params.target, "docker ps -f name=^${params.name}\$ -q", false, null, false)['return'][0].values()[0] ) {
diff --git a/src/com/mirantis/mk/Ceph.groovy b/src/com/mirantis/mk/Ceph.groovy
index c959810..8660233 100644
--- a/src/com/mirantis/mk/Ceph.groovy
+++ b/src/com/mirantis/mk/Ceph.groovy
@@ -93,7 +93,7 @@
}
}
if (lvm_enabled && type != 'lockbox') {
- salt.cmdRun(master, target, "ceph-volume lvm zap ${partition_uuid} --destroy")
+ salt.cmdRun(master, target, "ceph-volume lvm zap /dev/disk/by-partuuid/${partition_uuid} --destroy")
} else if (dev != '') {
salt.cmdRun(master, target, "parted ${dev} rm ${part_id}")
} else {
diff --git a/src/com/mirantis/mk/Gerrit.groovy b/src/com/mirantis/mk/Gerrit.groovy
index c33dc5e..88da957 100644
--- a/src/com/mirantis/mk/Gerrit.groovy
+++ b/src/com/mirantis/mk/Gerrit.groovy
@@ -173,21 +173,22 @@
* Return gerrit change object from gerrit API
* @param gerritName gerrit user name (usually GERRIT_NAME property)
* @param gerritHost gerrit host (usually GERRIT_HOST property)
+ * @param gerritPort gerrit port (usually GERRIT_PORT property, default 29418)
* @param gerritChangeNumber gerrit change number (usually GERRIT_CHANGE_NUMBER property)
* @param credentialsId jenkins credentials id for gerrit
* @param includeCurrentPatchset do you want to include current (last) patchset
* @return gerrit change object
*/
-def getGerritChange(gerritName, gerritHost, gerritChangeNumber, credentialsId, includeCurrentPatchset = false){
+def getGerritChange(gerritName, gerritHost, gerritChangeNumber, credentialsId, includeCurrentPatchset = false, gerritPort = '29418'){
def common = new com.mirantis.mk.Common()
def ssh = new com.mirantis.mk.Ssh()
ssh.prepareSshAgentKey(credentialsId)
- ssh.ensureKnownHosts(gerritHost)
+ ssh.ensureKnownHosts("${gerritHost}:${gerritPort}")
def curPatchset = "";
if(includeCurrentPatchset){
curPatchset = "--current-patch-set"
}
- return common.parseJSON(ssh.agentSh(String.format("ssh -p 29418 %s@%s gerrit query ${curPatchset} --format=JSON change:%s", gerritName, gerritHost, gerritChangeNumber)))
+ return common.parseJSON(ssh.agentSh(String.format("ssh -p %s %s@%s gerrit query ${curPatchset} --format=JSON change:%s", gerritPort, gerritName, gerritHost, gerritChangeNumber)))
}
/**
diff --git a/src/com/mirantis/mk/Git.groovy b/src/com/mirantis/mk/Git.groovy
index f699b3e..d724b8f 100644
--- a/src/com/mirantis/mk/Git.groovy
+++ b/src/com/mirantis/mk/Git.groovy
@@ -129,6 +129,12 @@
} else {
gitOpts = ''
}
+ def gitEnv = [
+ "GIT_AUTHOR_NAME=${gitName}",
+ "GIT_AUTHOR_EMAIL=${gitEmail}",
+ "GIT_COMMITTER_NAME=${gitName}",
+ "GIT_COMMITTER_EMAIL=${gitEmail}",
+ ]
dir(path) {
sh "git config ${global_arg} user.email '${gitEmail}'"
sh "git config ${global_arg} user.name '${gitName}'"
@@ -137,10 +143,12 @@
script: 'git add -A',
returnStdout: true
).trim()
- git_cmd = sh(
- script: "git commit ${gitOpts} -m '${message}'",
- returnStdout: true
- ).trim()
+ withEnv(gitEnv) {
+ git_cmd = sh(
+ script: "git commit ${gitOpts} -m '${message}'",
+ returnStdout: true
+ ).trim()
+ }
}
return git_cmd
}
diff --git a/src/com/mirantis/mk/Ruby.groovy b/src/com/mirantis/mk/Ruby.groovy
index 8fac324..4681d07 100644
--- a/src/com/mirantis/mk/Ruby.groovy
+++ b/src/com/mirantis/mk/Ruby.groovy
@@ -11,7 +11,7 @@
def ensureRubyEnv(rubyVersion="2.4.1"){
if (!fileExists("/var/lib/jenkins/.rbenv/versions/${rubyVersion}/bin/ruby")){
//XXX: patch ruby-build because debian package is quite old
- sh "git clone https://github.com/rbenv/ruby-build.git ~/.rbenv/plugins/ruby-build || git -C ~/.rbenv/plugins/ruby-build pull"
+ sh "git clone https://github.com/rbenv/ruby-build.git ~/.rbenv/plugins/ruby-build || git -C ~/.rbenv/plugins/ruby-build pull origin master"
sh "rbenv install ${rubyVersion} -sv";
}
sh "rbenv local ${rubyVersion};rbenv exec gem update --system"
diff --git a/src/com/mirantis/mk/Workflow.groovy b/src/com/mirantis/mk/Workflow.groovy
index 7706859..694a33a 100644
--- a/src/com/mirantis/mk/Workflow.groovy
+++ b/src/com/mirantis/mk/Workflow.groovy
@@ -152,6 +152,71 @@
}
}
+/**
+ * Update workflow job build description
+ *
+ * @param jobs_data Map with all job names and result statuses, to showing it in description
+ */
+def updateDescription(jobs_data) {
+ table = ''
+ child_jobs_description = '<strong>Descriptions from jobs:</strong><br>'
+ table_template_start = "<div><table style='border: solid 1px;'><tr><th>Job:</th><th>Status:</th></tr>"
+ table_template_end = "</table></div>"
+
+ for (jobdata in jobs_data) {
+ // Grey background for 'finally' jobs in list
+ if (jobdata['type'] == 'finally') {
+ trstyle = "<tr style='background: #DDDDDD;'>"
+ } else {
+ trstyle = "<tr>"
+ }
+
+ // 'description' instead of job name if it exists
+ if(jobdata['desc'].toString() != "") {
+ display_name = jobdata['desc']
+ } else {
+ display_name = jobdata['name']
+ }
+
+ // Attach url for already builded jobs
+ if(jobdata['build_url'] != "0") {
+ build_url = "<a href=${jobdata['build_url']}>$display_name</a>"
+ } else {
+ build_url = display_name
+ }
+
+ // Styling the status of job result
+ switch(jobdata['status'].toString()) {
+ case "SUCCESS":
+ status_style = "<td style='color: green;'><img src='/images/16x16/blue.png' alt='SUCCESS'>"
+ break
+ case "UNSTABLE":
+ status_style = "<td style='color: #FF5733;'><img src='/images/16x16/yellow.png' alt='UNSTABLE'>"
+ break
+ case "ABORTED":
+ status_style = "<td style='color: red;'><img src='/images/16x16/aborted.png' alt='ABORTED'>"
+ break
+ case "NOT_BUILT":
+ status_style = "<td style='color: red;'><img src='/images/16x16/aborted.png' alt='NOT_BUILT'>"
+ break
+ case "FAILURE":
+ status_style = "<td style='color: red;'><img src='/images/16x16/red.png' alt='FAILURE'>"
+ break
+ default:
+ status_style = "<td>-"
+ }
+
+ // Collect table
+ table += "$trstyle<td>$build_url</td>$status_style</td></tr>"
+
+ // Collecting descriptions of builded child jobs
+ if (jobdata['child_desc'] != "") {
+ child_jobs_description += "<b><small><a href=${jobdata['build_url']}>- ${jobdata['name']} (${jobdata['status']}):</a></small></b><br>"
+ child_jobs_description += "<small>${jobdata['child_desc']}</small><br>"
+ }
+ }
+ currentBuild.description = table_template_start + table + table_template_end + child_jobs_description
+}
/**
* Run the workflow or final steps one by one
@@ -159,13 +224,19 @@
* @param steps List of steps (Jenkins jobs) to execute
* @param global_variables Map where the collected artifact URLs and 'env' objects are stored
* @param failed_jobs Map with failed job names and result statuses, to report it later
+ * @param jobs_data Map with all job names and result statuses, to showing it in description
+ * @param step_id Counter for matching step ID with cell ID in description table
* @param propagate Boolean. If false: allows to collect artifacts after job is finished, even with FAILURE status
* If true: immediatelly fails the pipeline. DO NOT USE 'true' with runScenario().
*/
-def runSteps(steps, global_variables, failed_jobs, Boolean propagate = false) {
+def runSteps(steps, global_variables, failed_jobs, jobs_data, step_id, Boolean propagate = false) {
+ // Show expected jobs list in description
+ updateDescription(jobs_data)
+
for (step in steps) {
stage("Running job ${step['job']}") {
-
+ def engine = new groovy.text.GStringTemplateEngine()
+ def desc = step['description'] ?: ''
def job_name = step['job']
def job_parameters = [:]
def step_parameters = step['parameters'] ?: [:]
@@ -183,12 +254,16 @@
def build_description = job_info.getDescription()
def build_id = job_info.getId()
- currentBuild.description += "<a href=${build_url}>${job_name}</a>: ${job_result}<br>"
- // Import the remote build description into the current build
- if (build_description) { // TODO - add also the job status
- currentBuild.description += build_description
+ // Update jobs_data for updating description
+ jobs_data[step_id]['build_url'] = build_url
+ jobs_data[step_id]['status'] = job_result
+ jobs_data[step_id]['desc'] = engine.createTemplate(desc).make(global_variables)
+ if (build_description) {
+ jobs_data[step_id]['child_desc'] = build_description
}
+ updateDescription(jobs_data)
+
// Store links to the resulting artifacts into 'global_variables'
storeArtifacts(build_url, step['artifacts'], global_variables, job_name, build_id)
@@ -214,6 +289,8 @@
} // if (job_result != 'SUCCESS')
println "Job ${build_url} finished with result: ${job_result}"
} // stage ("Running job ${step['job']}")
+ // Jump to next ID for updating next job data in description table
+ step_id++
} // for (step in scenario['workflow'])
}
@@ -232,6 +309,7 @@
* workflow:
* - job: deploy-kaas
* ignore_failed: false
+ * description: "Management cluster ${KAAS_VERSION}"
* parameters:
* KAAS_VERSION:
* type: StringParameterValue
@@ -295,10 +373,36 @@
global_variables = [:]
// List of failed jobs to show at the end
failed_jobs = [:]
+ // Jobs data to use for wf job build description
+ def jobs_data = []
+ // Counter for matching step ID with cell ID in description table
+ step_id = 0
+
+ // Generate expected list jobs for description
+ list_id = 0
+ for (step in scenario['workflow']) {
+ if(step['description'] != null && step['description'].toString() != "") {
+ display_name = step['description']
+ } else {
+ display_name = step['job']
+ }
+ jobs_data.add([list_id: "$list_id", type: "workflow", name: "$display_name", build_url: "0", status: "-", desc: "", child_desc: ""])
+ list_id += 1
+ }
+ finally_step_id = list_id
+ for (step in scenario['finally']) {
+ if(step['description'] != null && step['description'].toString() != "") {
+ display_name = step['description']
+ } else {
+ display_name = step['job']
+ }
+ jobs_data.add([list_id: "$list_id", type: "finally", name: "$display_name", build_url: "0", status: "-", desc: "", child_desc: ""])
+ list_id += 1
+ }
try {
// Run the 'workflow' jobs
- runSteps(scenario['workflow'], global_variables, failed_jobs)
+ runSteps(scenario['workflow'], global_variables, failed_jobs, jobs_data, step_id)
} catch (InterruptedException x) {
error "The job was aborted"
@@ -307,8 +411,10 @@
error("Build failed: " + e.toString())
} finally {
+ // Switching to 'finally' step index
+ step_id = finally_step_id
// Run the 'finally' jobs
- runSteps(scenario['finally'], global_variables, failed_jobs)
+ runSteps(scenario['finally'], global_variables, failed_jobs, jobs_data, step_id)
if (failed_jobs) {
statuses = []