Merge "Update pipeline for package updates"
diff --git a/test-cookiecutter-reclass-chunk.groovy b/test-cookiecutter-reclass-chunk.groovy
index 8c804a1..de30e65 100644
--- a/test-cookiecutter-reclass-chunk.groovy
+++ b/test-cookiecutter-reclass-chunk.groovy
@@ -15,7 +15,7 @@
extraVars = readYaml text: EXTRA_VARIABLES_YAML
try {
currentBuild.description = extraVars.modelFile
- sh(script: 'find . -mindepth 1 -delete || true', returnStatus: true)
+ sh(script: 'find . -mindepth 1 -delete || true', returnStatus: true)
sh(script: """
wget --progress=dot:mega --auth-no-challenge -O models.tar.gz ${extraVars.MODELS_TARGZ}
tar -xzf models.tar.gz
@@ -25,14 +25,14 @@
def content = readFile(file: extraVars.modelFile)
def templateContext = readYaml text: content
def config = [
- 'dockerHostname': "cfg01",
- 'domain': "${templateContext.default_context.cluster_domain}",
- 'clusterName': templateContext.default_context.cluster_name,
- 'reclassEnv': extraVars.testReclassEnv,
- 'distribRevision': extraVars.DISTRIB_REVISION,
+ 'dockerHostname' : "cfg01",
+ 'domain' : "${templateContext.default_context.cluster_domain}",
+ 'clusterName' : templateContext.default_context.cluster_name,
+ 'reclassEnv' : extraVars.testReclassEnv,
+ 'distribRevision' : extraVars.DISTRIB_REVISION,
'dockerContainerName': extraVars.DockerCName,
- 'testContext': extraVars.modelFile,
- 'dockerExtraOpts': [ '--memory=3g' ]
+ 'testContext' : extraVars.modelFile,
+ 'dockerExtraOpts' : ['--memory=3g']
]
if (extraVars.DISTRIB_REVISION == 'nightly') {
config['nodegenerator'] = true
@@ -50,9 +50,9 @@
} finally {
stage('Save artifacts to Artifactory') {
def artifactory = new com.mirantis.mcp.MCPArtifactory()
- def envGerritVars = [ "GERRIT_PROJECT=${extraVars.get('GERRIT_PROJECT', '')}", "GERRIT_CHANGE_NUMBER=${extraVars.get('GERRIT_CHANGE_NUMBER', '')}",
- "GERRIT_PATCHSET_NUMBER=${extraVars.get('GERRIT_PATCHSET_NUMBER', '')}", "GERRIT_CHANGE_ID=${extraVars.get('GERRIT_CHANGE_ID', '')}",
- "GERRIT_PATCHSET_REVISION=${extraVars.get('GERRIT_PATCHSET_REVISION', '')}" ]
+ def envGerritVars = ["GERRIT_PROJECT=${extraVars.get('GERRIT_PROJECT', '')}", "GERRIT_CHANGE_NUMBER=${extraVars.get('GERRIT_CHANGE_NUMBER', '')}",
+ "GERRIT_PATCHSET_NUMBER=${extraVars.get('GERRIT_PATCHSET_NUMBER', '')}", "GERRIT_CHANGE_ID=${extraVars.get('GERRIT_CHANGE_ID', '')}",
+ "GERRIT_PATCHSET_REVISION=${extraVars.get('GERRIT_PATCHSET_REVISION', '')}"]
withEnv(envGerritVars) {
def artifactoryLink = artifactory.uploadJobArtifactsToArtifactory(['artifactory': 'mcp-ci', 'artifactoryRepo': "drivetrain-local/${JOB_NAME}/${BUILD_NUMBER}"])
currentBuild.description += "<br/>${artifactoryLink}"
diff --git a/test-cookiecutter-reclass.groovy b/test-cookiecutter-reclass.groovy
index aa695f2..b7004f6 100644
--- a/test-cookiecutter-reclass.groovy
+++ b/test-cookiecutter-reclass.groovy
@@ -209,6 +209,12 @@
gerritDataRSHEAD << gerritDataRS
gerritDataRSHEAD['gerritRefSpec'] = null
gerritDataRSHEAD['GERRIT_CHANGE_NUMBER'] = null
+ // check for test XXX vs RELEASE branch, to get correct formulas
+ if (gerritDataCC['gerritBranch'].contains('release/')) {
+ testDistribRevision = gerritDataCC['gerritBranch']
+ } else if (gerritDataRS['gerritBranch'].contains('release')) {
+ testDistribRevision = gerritDataRS['gerritBranch']
+ }
// 'binary' branch logic w\o 'release/' prefix
if (testDistribRevision.contains('/')) {
testDistribRevision = testDistribRevision.split('/')[-1]
@@ -219,8 +225,8 @@
if (!binTest.linux_system_repo_url || !binTest.linux_system_repo_ubuntu_url) {
common.errorMsg("Binary release: ${testDistribRevision} not exist or not full. Fallback to 'proposed'! ")
testDistribRevision = 'proposed'
- messages.add("DISTRIB_REVISION => ${testDistribRevision}")
}
+ messages.add("DISTRIB_REVISION => ${testDistribRevision}")
def message = messages.join(newline) + newline
currentBuild.description = currentBuild.description ? message + currentBuild.description : message
}
@@ -390,7 +396,7 @@
result = '\n' + common.comparePillars(compareRoot, env.BUILD_URL, "-Ev \'infra/secrets.yml|\\.git\'")
currentBuild.description = currentBuild.description ? currentBuild.description + result : result
}
- stage("TestContexts Head/Patched") {
+ stage('TestContexts Head/Patched') {
def stepsForParallel = [:]
stepsForParallel.failFast = true
common.infoMsg("Found: ${contextFileListHead.size()} HEAD contexts to test.")
@@ -406,7 +412,7 @@
parallel stepsForParallel
common.infoMsg('All TestContexts tests done')
}
- stage("Compare NodesInfo Head/Patched") {
+ stage('Compare NodesInfo Head/Patched') {
// Download all artifacts
def stepsForParallel = [:]
stepsForParallel.failFast = true
@@ -480,3 +486,4 @@
}
}
}
+
diff --git a/test-model-generator.groovy b/test-model-generator.groovy
index 39723c6..aff8b8a 100644
--- a/test-model-generator.groovy
+++ b/test-model-generator.groovy
@@ -158,6 +158,7 @@
export TEST_PASSWORD=default
export TEST_MODELD_URL=127.0.0.1
export TEST_MODELD_PORT=3000
+ export TEST_TIMEOUT=30
cd /var/lib/trymcp-tests
pytest -m 'not trymcp' ${component}
"""
@@ -188,11 +189,18 @@
}
sh "rm -rf ${env.WORKSPACE}/venv/"
}
- if (apiImage && apiImage.id) {
- sh "docker rmi ${apiImage.id}"
- }
- if (uiImage && uiImage.id) {
- sh "docker rmi ${uiImage.id}"
+ try {
+ // to avoid issue PROD-29393 "pipeline freezes in `docker rmi` action"
+ timeout(time: 4, unit: 'MINUTES') {
+ if (apiImage && apiImage.id) {
+ sh "docker rmi ${apiImage.id}"
+ }
+ if (uiImage && uiImage.id) {
+ sh "docker rmi ${uiImage.id}"
+ }
+ }
+ } catch (Exception e) {
+ echo "Failed while cleaning docker images: ${e.toString()}"
}
// Remove everything what is owned by root
testImage.inside(testImageOptions) {
diff --git a/upgrade-mcp-release.groovy b/upgrade-mcp-release.groovy
index 02e9270..08d4783 100644
--- a/upgrade-mcp-release.groovy
+++ b/upgrade-mcp-release.groovy
@@ -88,9 +88,9 @@
sh "diff -u ${workspace}/${oldSuffix}/${minion} ${workspace}/${newSuffix}/${minion} > ${fileName} || true"
}
}
- archiveArtifacts artifacts: "${workspace}/${oldSuffix}"
- archiveArtifacts artifacts: "${workspace}/${newSuffix}"
- archiveArtifacts artifacts: "${workspace}/${diffDir}"
+ archiveArtifacts artifacts: "${oldSuffix}/*"
+ archiveArtifacts artifacts: "${newSuffix}/*"
+ archiveArtifacts artifacts: "${diffDir}/*"
}
if (common.validInputParam('PIPELINE_TIMEOUT')) {
@@ -206,12 +206,31 @@
salt.cmdRun(venvPepper, 'I@salt:master', "cd /srv/salt/reclass/classes/system && git checkout ${reclassSystemBranch}")
// Add kubernetes-extra repo
if (salt.testTarget(venvPepper, "I@kubernetes:master")) {
+ // docker-engine conflicts with the recent containerd versions, so it's removed during upgrade. Thus update source engine
+ salt.cmdRun(venvPepper, 'I@salt:master', "cd /srv/salt/reclass/classes/cluster/$cluster_name && " +
+ "grep -r -l 'engine: docker_hybrid' kubernetes | xargs --no-run-if-empty sed -i 's/engine: docker_hybrid/engine: archive/g'")
common.infoMsg("Add kubernetes-extra repo")
salt.cmdRun(venvPepper, 'I@salt:master', "cd /srv/salt/reclass/classes/cluster/$cluster_name && " +
"grep -q system.linux.system.repo.mcp.apt_mirantis.update.kubernetes_extra kubernetes/common.yml || sed -i '/classes:/ a - system.linux.system.repo.mcp.apt_mirantis.update.kubernetes_extra' kubernetes/common.yml")
salt.cmdRun(venvPepper, 'I@salt:master', "cd /srv/salt/reclass/classes/cluster/$cluster_name && " +
"grep -q system.linux.system.repo.mcp.apt_mirantis.kubernetes_extra kubernetes/common.yml || sed -i '/classes:/ a - system.linux.system.repo.mcp.apt_mirantis.kubernetes_extra' kubernetes/common.yml")
}
+ // Add all update repositories
+ def repoIncludeBase = '- system.linux.system.repo.mcp.apt_mirantis.'
+ def updateRepoList = [ 'cassandra', 'ceph', 'contrail', 'docker', 'elastic', 'extra', 'openstack', 'percona', 'salt-formulas', 'saltstack', 'ubuntu' ]
+ updateRepoList.each { repo ->
+ def repoNameUpdateInclude = "${repoIncludeBase}update.${repo}"
+ def filesWithInclude = salt.cmdRun(venvPepper, 'I@salt:master', "cd /srv/salt/reclass/classes/cluster/$cluster_name && grep -Plr '\\${repoIncludeBase}${repo}\$' . || true", false).get('return')[0].values()[0].trim().tokenize('\n')
+ filesWithInclude.each { file ->
+ def updateRepoIncludeExist = salt.cmdRun(venvPepper, 'I@salt:master', "cd /srv/salt/reclass/classes/cluster/$cluster_name && grep -P '\\${repoNameUpdateInclude}\$' ${file} || echo not_found", false, null, true).get('return')[0].values()[0].trim()
+ if (updateRepoIncludeExist == 'not_found') {
+ // Include needs to be added
+ salt.cmdRun(venvPepper, 'I@salt:master', "cd /srv/salt/reclass/classes/cluster/$cluster_name && " +
+ "sed -i 's/\\( *\\)${repoIncludeBase}${repo}\$/&\\n\\1${repoNameUpdateInclude}/g' ${file}")
+ common.infoMsg("Update repo for ${repo} is added to ${file}")
+ }
+ }
+ }
// Add new defaults
common.infoMsg("Add new defaults")
salt.cmdRun(venvPepper, 'I@salt:master', "grep '^ mcp_version: ' /srv/salt/reclass/classes/cluster/$cluster_name/infra/init.yml || " +