Merge "[MCPArtifactory] Add function uploadImagesToArtifactory()"
diff --git a/src/com/mirantis/mk/KaasUtils.groovy b/src/com/mirantis/mk/KaasUtils.groovy
index 7f34eb7..d7f7fdb 100644
--- a/src/com/mirantis/mk/KaasUtils.groovy
+++ b/src/com/mirantis/mk/KaasUtils.groovy
@@ -127,9 +127,9 @@
if (commitMsg ==~ /(?s).*\[fetch.*binaries\].*/ || env.GERRIT_EVENT_COMMENT_TEXT ==~ /(?s).*fetch.*binaries.*/) {
fetchServiceBinaries = true
}
- if (commitMsg ==~ /(?s).*\[equinix-demo\].*/ || env.GERRIT_EVENT_COMMENT_TEXT ==~ /(?s).*equinix-demo.*/) {
+ if (commitMsg ==~ /(?s).*\[equinix-on-aws\].*/ || env.GERRIT_EVENT_COMMENT_TEXT ==~ /(?s).*equinix-on-aws.*/) {
equinixOnAwsDemo = true
- common.warningMsg('Forced running child cluster deployment on EQUINIX METAL provider based on AWS management cluster, triggered on patchset using custom keyword: \'[equinix-demo]\' ')
+ common.warningMsg('Forced running child cluster deployment on EQUINIX METAL provider based on AWS management cluster, triggered on patchset using custom keyword: \'[equinix-on-aws]\' ')
}
if (commitMsg ==~ /(?s).*\[aws-demo\].*/ || env.GERRIT_EVENT_COMMENT_TEXT ==~ /(?s).*aws-demo.*/ || attachBYO || upgradeBYO || seedMacOs || equinixOnAwsDemo) {
awsOnDemandDemo = true
@@ -137,7 +137,7 @@
common.warningMsg('Forced running additional kaas deployment with AWS provider, due applied trigger cross dependencies, follow docs to clarify info')
}
}
- if (commitMsg ==~ /(?s).*\[equinix-mgmt\].*/ || env.GERRIT_EVENT_COMMENT_TEXT ==~ /(?s).*equinix-mgmt\.*/) {
+ if (commitMsg ==~ /(?s).*\[equinix-demo\].*/ || env.GERRIT_EVENT_COMMENT_TEXT ==~ /(?s).*equinix-demo\.*/) {
equinixOnDemandDemo = true
}
if (commitMsg ==~ /(?s).*\[disable-os-demo\].*/ || env.GERRIT_EVENT_COMMENT_TEXT ==~ /(?s).*disable-os-demo\.*/) {
@@ -185,6 +185,15 @@
error('incompatible triggers: [disable-os-demo] and multiregional deployment based on OSt management region cannot be applied simultaneously')
}
break
+ case 'vsphere':
+ if (enableVsphereDemo == false) {
+ error('incompatible triggers: [disable-vsphere-demo] and multiregional deployment based on Vsphere management region cannot be applied simultaneously')
+ }
+ break
+ case 'equinix':
+ common.warningMsg('Forced running additional kaas deployment with Equinix provider according multiregional demo request')
+ equinixOnDemandDemo = true
+ break
}
// CDN configuration
@@ -214,8 +223,8 @@
Mgmt UI e2e testing scheduled: ${runUie2e}
AWS provider deployment scheduled: ${awsOnDemandDemo}
Equinix provider deployment scheduled: ${equinixOnDemandDemo}
+ Equinix@AWS child cluster deployment scheduled: ${equinixOnAwsDemo}
VSPHERE provider deployment scheduled: ${enableVsphereDemo}
- EQUINIX child cluster deployment scheduled: ${equinixOnAwsDemo}
OS provider deployment scheduled: ${enableOSDemo}
BM provider deployment scheduled: ${enableBMDemo}
Multiregional configuration: ${multiregionalMappings}
@@ -237,9 +246,9 @@
fetchServiceBinariesEnabled: fetchServiceBinaries,
awsOnDemandDemoEnabled : awsOnDemandDemo,
equinixOnDemandDemoEnabled : equinixOnDemandDemo,
+ equinixOnAwsDemoEnabled : equinixOnAwsDemo,
vsphereDemoEnabled : enableVsphereDemo,
vsphereOnDemandDemoEnabled : enableVsphereDemo, // TODO: remove after MCC 2.7 is out
- equinixOnAwsDemoEnabled : equinixOnAwsDemo,
bmDemoEnabled : enableBMDemo,
osDemoEnabled : enableOSDemo,
multiregionalConfiguration : multiregionalMappings,
@@ -260,8 +269,8 @@
*/
def multiregionWorkflowParser(keyword) {
def common = new com.mirantis.mk.Common()
- def supportedManagementProviders = ['os', 'aws', 'vsphere']
- def supportedRegionalProviders = ['os', 'vsphere']
+ def supportedManagementProviders = ['os', 'aws', 'vsphere', 'equinix']
+ def supportedRegionalProviders = ['os', 'vsphere', 'equinix']
def clusterTypes = ''
if (keyword.toString().contains('multiregion')) {
diff --git a/src/com/mirantis/mk/Workflow.groovy b/src/com/mirantis/mk/Workflow.groovy
index 81c7687..8e5d192 100644
--- a/src/com/mirantis/mk/Workflow.groovy
+++ b/src/com/mirantis/mk/Workflow.groovy
@@ -16,7 +16,6 @@
*
*/
-
/**
* Get Jenkins parameter names, values and types from jobName
* @param jobName job name
@@ -99,6 +98,32 @@
return job_info
}
+def runOrGetJob(job_name, job_parameters, global_variables, propagate, String fullTaskName = '') {
+ /**
+ * Run job directly or try to find already executed build
+ * Flow, in case CI_JOBS_OVERRIDES passed:
+ *
+ *
+ * CI_JOBS_OVERRIDES = text in yaml|json format
+ * CI_JOBS_OVERRIDES = 'kaas-testing-core-release-artifact' : 3505
+ * 'reindex-testing-core-release-index-with-rc' : 2822
+ * 'si-test-release-sanity-check-prepare-configuration': 1877
+ */
+ common = new com.mirantis.mk.Common()
+ def jobsOverrides = readYaml(text: env.CI_JOBS_OVERRIDES ?: '---') ?: [:]
+ // get id of overriding job
+ def jobOverrideID = jobsOverrides.getOrDefault(fullTaskName, '')
+
+ if (fullTaskName in jobsOverrides.keySet()) {
+ common.warningMsg("Overriding: ${fullTaskName}/${job_name} <<< ${jobOverrideID}")
+ common.infoMsg("For debug pin use:\n'${fullTaskName}' : ${jobOverrideID}")
+ return Jenkins.instance.getItemByFullName(job_name,
+ hudson.model.Job.class).getBuildByNumber(jobOverrideID.toInteger())
+ } else {
+ return runJob(job_name, job_parameters, global_variables, propagate)
+ }
+}
+
/**
* Store URLs of the specified artifacts to the global_variables
*
@@ -113,15 +138,16 @@
* will be empty.
*
*/
-def storeArtifacts(build_url, step_artifacts, global_variables, job_name, build_num) {
+def storeArtifacts(build_url, step_artifacts, global_variables, job_name, build_num, artifactory_url = '') {
def common = new com.mirantis.mk.Common()
def http = new com.mirantis.mk.Http()
+ if (!artifactory_url) {
+ artifactory_url = 'https://artifactory.mcp.mirantis.net/api/storage/si-local/jenkins-job-artifacts'
+ }
def baseJenkins = [:]
def baseArtifactory = [:]
build_url = build_url.replaceAll(~/\/+$/, "")
- artifactory_url = "https://artifactory.mcp.mirantis.net/api/storage/si-local/jenkins-job-artifacts"
baseArtifactory["url"] = artifactory_url + "/${job_name}/${build_num}"
-
baseJenkins["url"] = build_url
def job_config = http.restGet(baseJenkins, "/api/json/")
def job_artifacts = job_config['artifacts']
@@ -173,9 +199,9 @@
// 'description' instead of job name if it exists
if (jobdata['desc'].toString() != "") {
- display_name = jobdata['desc']
+ display_name = "'${jobdata['desc']}': ${jobdata['build_id']}"
} else {
- display_name = jobdata['name']
+ display_name = "'${jobdata['name']}': ${jobdata['build_id']}"
}
// Attach url for already builded jobs
@@ -229,14 +255,14 @@
* @param propagate Boolean. If false: allows to collect artifacts after job is finished, even with FAILURE status
* If true: immediatelly fails the pipeline. DO NOT USE 'true' with runScenario().
*/
-def runSteps(steps, global_variables, failed_jobs, jobs_data, step_id, Boolean propagate = false) {
+def runSteps(steps, global_variables, failed_jobs, jobs_data, step_id, Boolean propagate = false, artifactoryBaseUrl = '') {
// Show expected jobs list in description
updateDescription(jobs_data)
for (step in steps) {
stage("Running job ${step['job']}") {
def engine = new groovy.text.GStringTemplateEngine()
- def desc = step['description'] ?: ''
+ String desc = step['description'] ?: ''
def job_name = step['job']
def job_parameters = [:]
def step_parameters = step['parameters'] ?: [:]
@@ -248,14 +274,16 @@
job_parameters << step_parameters
// Collect job parameters and run the job
- def job_info = runJob(job_name, job_parameters, global_variables, propagate)
- def job_result = job_info.getResult()
- def build_url = job_info.getAbsoluteUrl()
- def build_description = job_info.getDescription()
- def build_id = job_info.getId()
+ // WARN(alexz): desc must not contain invalid chars for yaml
+ def job_info = runOrGetJob(job_name, job_parameters, global_variables, propagate, desc)
+ def job_result = job_info.getResult().toString()
+ def build_url = job_info.getAbsoluteUrl().toString()
+ def build_description = job_info.getDescription().toString()
+ def build_id = job_info.getId().toString()
// Update jobs_data for updating description
jobs_data[step_id]['build_url'] = build_url
+ jobs_data[step_id]['build_id'] = build_id
jobs_data[step_id]['status'] = job_result
jobs_data[step_id]['desc'] = engine.createTemplate(desc).make(global_variables)
if (build_description) {
@@ -265,7 +293,7 @@
updateDescription(jobs_data)
// Store links to the resulting artifacts into 'global_variables'
- storeArtifacts(build_url, step['artifacts'], global_variables, job_name, build_id)
+ storeArtifacts(build_url, step['artifacts'], global_variables, job_name, build_id, artifactoryBaseUrl)
// Check job result, in case of SUCCESS, move to next step.
// In case job has status NOT_BUILT, fail the build or keep going depending on 'ignore_not_built' flag
@@ -369,8 +397,7 @@
* parameters: dict. parameters name and type to inherit from parent to child job, or from artifact to child job
*/
-def runScenario(scenario, slackReportChannel = '') {
-
+def runScenario(scenario, slackReportChannel = '', artifactoryBaseUrl = '') {
// Clear description before adding new messages
currentBuild.description = ''
// Collect the parameters for the jobs here
@@ -390,7 +417,14 @@
} else {
display_name = step['job']
}
- jobs_data.add([list_id: "$list_id", type: "workflow", name: "$display_name", build_url: "0", status: "-", desc: "", child_desc: ""])
+ jobs_data.add([list_id : "$list_id",
+ type : "workflow",
+ name : "$display_name",
+ build_url : "0",
+ build_id : "-",
+ status : "-",
+ desc : "",
+ child_desc: ""])
list_id += 1
}
finally_step_id = list_id
@@ -400,25 +434,29 @@
} else {
display_name = step['job']
}
- jobs_data.add([list_id: "$list_id", type: "finally", name: "$display_name", build_url: "0", status: "-", desc: "", child_desc: ""])
+ jobs_data.add([list_id : "$list_id",
+ type : "finally",
+ name : "$display_name",
+ build_url : "0",
+ build_id : "-",
+ status : "-",
+ desc : "",
+ child_desc: ""])
list_id += 1
}
try {
// Run the 'workflow' jobs
- runSteps(scenario['workflow'], global_variables, failed_jobs, jobs_data, step_id)
-
+ runSteps(scenario['workflow'], global_variables, failed_jobs, jobs_data, step_id, false, artifactoryBaseUrl)
} catch (InterruptedException x) {
error "The job was aborted"
-
} catch (e) {
error("Build failed: " + e.toString())
-
} finally {
// Switching to 'finally' step index
step_id = finally_step_id
// Run the 'finally' jobs
- runSteps(scenario['finally'], global_variables, failed_jobs, jobs_data, step_id)
+ runSteps(scenario['finally'], global_variables, failed_jobs, jobs_data, step_id, false, artifactoryBaseUrl)
if (failed_jobs) {
statuses = []