Merge "Prevent start contrail services with old version after upgrade"
diff --git a/cloud-update.groovy b/cloud-update.groovy
index 2729d98..8802c1b 100644
--- a/cloud-update.groovy
+++ b/cloud-update.groovy
@@ -388,7 +388,7 @@
} else {
def salt = new com.mirantis.mk.Salt()
for (s in services) {
- def outputServicesStr = salt.getReturnValues(salt.cmdRun(pepperEnv, "${probe}*", "service --status-all | grep ${s} | awk \'{print \$4}\'"))
+ def outputServicesStr = salt.getReturnValues(salt.cmdRun(pepperEnv, probe, "service --status-all | grep ${s} | awk \'{print \$4}\'"))
def servicesList = outputServicesStr.tokenize("\n").init() //init() returns the items from the Iterable excluding the last item
if (servicesList) {
for (name in servicesList) {
diff --git a/docker-build-image-pipeline.groovy b/docker-build-image-pipeline.groovy
index 1fbd9f0..a39051f 100644
--- a/docker-build-image-pipeline.groovy
+++ b/docker-build-image-pipeline.groovy
@@ -9,94 +9,101 @@
* REGISTRY_URL - Docker registry URL (can be empty)
* ARTIFACTORY_URL - URL to artifactory
* ARTIFACTORY_NAMESPACE - Artifactory namespace (oss, cicd,...)
+ * UPLOAD_TO_DOCKER_HUB - True\False
* REGISTRY_CREDENTIALS_ID - Docker hub credentials id
*
-**/
+ **/
def common = new com.mirantis.mk.Common()
def gerrit = new com.mirantis.mk.Gerrit()
def git = new com.mirantis.mk.Git()
def dockerLib = new com.mirantis.mk.Docker()
def artifactory = new com.mirantis.mcp.MCPArtifactory()
+
+slaveNode = env.SLAVE_NODE ?: 'docker'
+uploadToDockerHub = env.UPLOAD_TO_DOCKER_HUB ?: false
+
timeout(time: 12, unit: 'HOURS') {
- node("docker") {
- def workspace = common.getWorkspace()
- def imageTagsList = IMAGE_TAGS.tokenize(" ")
- try{
+ node(slaveNode) {
+ def workspace = common.getWorkspace()
+ def imageTagsList = env.IMAGE_TAGS.tokenize(" ")
+ try {
- def buildArgs = []
- try {
- buildArgs = IMAGE_BUILD_PARAMS.tokenize(' ')
- } catch (Throwable e) {
- buildArgs = []
- }
- def dockerApp
- stage("checkout") {
- git.checkoutGitRepository('.', IMAGE_GIT_URL, IMAGE_BRANCH, IMAGE_CREDENTIALS_ID)
- }
+ def buildArgs = []
+ try {
+ buildArgs = IMAGE_BUILD_PARAMS.tokenize(' ')
+ } catch (Throwable e) {
+ buildArgs = []
+ }
+ def dockerApp
+ stage("checkout") {
+ git.checkoutGitRepository('.', IMAGE_GIT_URL, IMAGE_BRANCH, IMAGE_CREDENTIALS_ID)
+ }
- if (IMAGE_BRANCH == "master") {
- try {
- def tag = sh(script: "git describe --tags --abbrev=0", returnStdout: true).trim()
- def revision = sh(script: "git describe --tags --abbrev=4 | grep -oP \"^${tag}-\\K.*\" | awk -F\\- '{print \$1}'", returnStdout: true).trim()
- imageTagsList << tag
- revision = revision ? revision : "0"
- if(Integer.valueOf(revision) > 0){
- imageTagsList << "${tag}-${revision}"
+ if (IMAGE_BRANCH == "master") {
+ try {
+ def tag = sh(script: "git describe --tags --abbrev=0", returnStdout: true).trim()
+ def revision = sh(script: "git describe --tags --abbrev=4 | grep -oP \"^${tag}-\\K.*\" | awk -F\\- '{print \$1}'", returnStdout: true).trim()
+ imageTagsList << tag
+ revision = revision ? revision : "0"
+ if (Integer.valueOf(revision) > 0) {
+ imageTagsList << "${tag}-${revision}"
+ }
+ if (!imageTagsList.contains("latest")) {
+ imageTagsList << "latest"
+ }
+ } catch (Exception e) {
+ common.infoMsg("Impossible to find any tag")
+ }
}
- if (!imageTagsList.contains("latest")) {
- imageTagsList << "latest"
- //workaround for all of our docker images
- imageTagsList << "nightly"
- }
- } catch (Exception e) {
- common.infoMsg("Impossible to find any tag")
- }
- }
- stage("build") {
- common.infoMsg("Building docker image ${IMAGE_NAME}")
- dockerApp = dockerLib.buildDockerImage(IMAGE_NAME, "", "${workspace}/${DOCKERFILE_PATH}", imageTagsList[0], buildArgs)
- if(!dockerApp){
- throw new Exception("Docker build image failed")
- }
- }
- stage("upload to docker hub"){
- docker.withRegistry(REGISTRY_URL, REGISTRY_CREDENTIALS_ID) {
- for(int i=0;i<imageTagsList.size();i++){
- common.infoMsg("Uploading image ${IMAGE_NAME} with tag ${imageTagsList[i]} to dockerhub")
- dockerApp.push(imageTagsList[i])
+ stage("build") {
+ common.infoMsg("Building docker image ${IMAGE_NAME}")
+ dockerApp = dockerLib.buildDockerImage(IMAGE_NAME, "", "${workspace}/${DOCKERFILE_PATH}", imageTagsList[0], buildArgs)
+ if (!dockerApp) {
+ throw new Exception("Docker build image failed")
+ }
}
- }
+ stage("upload to docker hub") {
+ if (uploadToDockerHub) {
+ docker.withRegistry(REGISTRY_URL, REGISTRY_CREDENTIALS_ID) {
+ for (int i = 0; i < imageTagsList.size(); i++) {
+ common.infoMsg("Uploading image ${IMAGE_NAME} with tag ${imageTagsList[i]} to dockerhub")
+ dockerApp.push(imageTagsList[i])
+ }
+ }
+ } else {
+ common.infoMsg('upload to docker hub skipped')
+ }
+ }
+ stage("upload to artifactory") {
+ if (common.validInputParam("ARTIFACTORY_URL") && common.validInputParam("ARTIFACTORY_NAMESPACE")) {
+ def artifactoryName = "mcp-ci";
+ def artifactoryServer = Artifactory.server(artifactoryName)
+ def shortImageName = IMAGE_NAME
+ if (IMAGE_NAME.contains("/")) {
+ shortImageName = IMAGE_NAME.tokenize("/")[1]
+ }
+ for (imageTag in imageTagsList) {
+ sh "docker tag ${IMAGE_NAME}:${imageTagsList[0]} ${ARTIFACTORY_URL}/mirantis/${ARTIFACTORY_NAMESPACE}/${shortImageName}:${imageTag}"
+ for (artifactoryRepo in ["docker-dev-local", "docker-prod-local"]) {
+ common.infoMsg("Uploading image ${IMAGE_NAME} with tag ${imageTag} to artifactory ${artifactoryName} using repo ${artifactoryRepo}")
+ artifactory.uploadImageToArtifactory(artifactoryServer, ARTIFACTORY_URL,
+ "mirantis/${ARTIFACTORY_NAMESPACE}/${shortImageName}",
+ imageTag, artifactoryRepo)
+ }
+ }
+ } else {
+ common.warningMsg("ARTIFACTORY_URL not given, upload to artifactory skipped")
+ }
+ }
+ } catch (Throwable e) {
+ // If there was an error or exception thrown, the build failed
+ currentBuild.result = "FAILURE"
+ currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
+ throw e
+ } finally {
+ common.sendNotification(currentBuild.result, "", ["slack"])
}
- stage("upload to artifactory"){
- if(common.validInputParam("ARTIFACTORY_URL") && common.validInputParam("ARTIFACTORY_NAMESPACE")) {
- def artifactoryName = "mcp-ci";
- def artifactoryServer = Artifactory.server(artifactoryName)
- def shortImageName = IMAGE_NAME
- if (IMAGE_NAME.contains("/")) {
- shortImageName = IMAGE_NAME.tokenize("/")[1]
- }
- for (imageTag in imageTagsList) {
- sh "docker tag ${IMAGE_NAME} ${ARTIFACTORY_URL}/mirantis/${ARTIFACTORY_NAMESPACE}/${shortImageName}:${imageTag}"
- for(artifactoryRepo in ["docker-dev-local", "docker-prod-local"]){
- common.infoMsg("Uploading image ${IMAGE_NAME} with tag ${imageTag} to artifactory ${artifactoryName} using repo ${artifactoryRepo}")
- artifactory.uploadImageToArtifactory(artifactoryServer, ARTIFACTORY_URL,
- "mirantis/${ARTIFACTORY_NAMESPACE}/${shortImageName}",
- imageTag, artifactoryRepo)
- }
- }
- }else{
- common.warningMsg("ARTIFACTORY_URL not given, upload to artifactory skipped")
- }
- }
- } catch (Throwable e) {
- // If there was an error or exception thrown, the build failed
- currentBuild.result = "FAILURE"
- currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
- throw e
- } finally {
- common.sendNotification(currentBuild.result,"",["slack"])
}
- }
}
diff --git a/gating-pipeline.groovy b/gating-pipeline.groovy
index e42524b..99f487d 100644
--- a/gating-pipeline.groovy
+++ b/gating-pipeline.groovy
@@ -3,79 +3,83 @@
* CREDENTIALS_ID - Gerrit credentails ID
* JOBS_NAMESPACE - Gerrit gating jobs namespace (mk, contrail, ...)
*
-**/
+ **/
def common = new com.mirantis.mk.Common()
def gerrit = new com.mirantis.mk.Gerrit()
def ssh = new com.mirantis.mk.Ssh()
-timeout(time: 12, unit: 'HOURS') {
- node("python") {
- try{
- // test if change is not already merged
- ssh.prepareSshAgentKey(CREDENTIALS_ID)
- ssh.ensureKnownHosts(GERRIT_HOST)
- def gerritChange = gerrit.getGerritChange(GERRIT_NAME, GERRIT_HOST, GERRIT_CHANGE_NUMBER, CREDENTIALS_ID, true)
- def doSubmit = false
- def giveVerify = false
- stage("test") {
- if (gerritChange.status != "MERGED" && !SKIP_TEST.equals("true")){
- // test max CodeReview
- if(gerrit.patchsetHasApproval(gerritChange.currentPatchSet,"Code-Review", "+")){
- doSubmit = true
- def gerritProjectArray = GERRIT_PROJECT.tokenize("/")
- def gerritProject = gerritProjectArray[gerritProjectArray.size() - 1]
- def jobsNamespace = JOBS_NAMESPACE
- def plural_namespaces = ['salt-formulas', 'salt-models']
- // remove plural s on the end of job namespace
- if (JOBS_NAMESPACE in plural_namespaces){
- jobsNamespace = JOBS_NAMESPACE.substring(0, JOBS_NAMESPACE.length() - 1)
- }
- // salt-formulas tests have -latest on end of the name
- if(JOBS_NAMESPACE.equals("salt-formulas")){
- gerritProject=gerritProject+"-latest"
- }
- def testJob = String.format("test-%s-%s", jobsNamespace, gerritProject)
- if (_jobExists(testJob)) {
- common.infoMsg("Test job ${testJob} found, running")
- def patchsetVerified = gerrit.patchsetHasApproval(gerritChange.currentPatchSet,"Verified", "+")
- build job: testJob, parameters: [
- [$class: 'StringParameterValue', name: 'DEFAULT_GIT_URL', value: "${GERRIT_SCHEME}://${GERRIT_NAME}@${GERRIT_HOST}:${GERRIT_PORT}/${GERRIT_PROJECT}"],
- [$class: 'StringParameterValue', name: 'DEFAULT_GIT_REF', value: GERRIT_REFSPEC]
- ]
- giveVerify = true
- } else {
- common.infoMsg("Test job ${testJob} not found")
- }
- } else {
- common.errorMsg("Change don't have a CodeReview, skipping gate")
- }
- } else {
- common.infoMsg("Test job skipped")
- }
- }
- stage("submit review"){
- if(gerritChange.status == "MERGED"){
- common.successMsg("Change ${GERRIT_CHANGE_NUMBER} is already merged, no need to gate them")
- }else if(doSubmit){
- if(giveVerify){
- common.warningMsg("Change ${GERRIT_CHANGE_NUMBER} don't have a Verified, but tests were successful, so adding Verified and submitting")
- ssh.agentSh(String.format("ssh -p 29418 %s@%s gerrit review --verified +1 --submit %s,%s", GERRIT_NAME, GERRIT_HOST, GERRIT_CHANGE_NUMBER, GERRIT_PATCHSET_NUMBER))
- }else{
- ssh.agentSh(String.format("ssh -p 29418 %s@%s gerrit review --submit %s,%s", GERRIT_NAME, GERRIT_HOST, GERRIT_CHANGE_NUMBER, GERRIT_PATCHSET_NUMBER))
- }
- common.infoMsg(String.format("Gerrit review %s,%s submitted", GERRIT_CHANGE_NUMBER, GERRIT_PATCHSET_NUMBER))
- }
- }
- } catch (Throwable e) {
- // If there was an error or exception thrown, the build failed
- currentBuild.result = "FAILURE"
- currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
- throw e
- }
- }
-}
+
@NonCPS
-def _jobExists(jobName){
- return Jenkins.instance.items.find{it -> it.name.equals(jobName)}
+def isJobExists(jobName) {
+ return Jenkins.instance.items.find { it -> it.name.equals(jobName) }
}
+
+slaveNode = env.SLAVE_NODE ?: 'docker'
+
+timeout(time: 12, unit: 'HOURS') {
+ node(slaveNode) {
+ try {
+ // test if change is not already merged
+ ssh.prepareSshAgentKey(CREDENTIALS_ID)
+ ssh.ensureKnownHosts(GERRIT_HOST)
+ def gerritChange = gerrit.getGerritChange(GERRIT_NAME, GERRIT_HOST, GERRIT_CHANGE_NUMBER, CREDENTIALS_ID, true)
+ def doSubmit = false
+ def giveVerify = false
+ stage("test") {
+ if (gerritChange.status != "MERGED" && !SKIP_TEST.equals("true")) {
+ // test max CodeReview
+ if (gerrit.patchsetHasApproval(gerritChange.currentPatchSet, "Code-Review", "+")) {
+ doSubmit = true
+ def gerritProjectArray = GERRIT_PROJECT.tokenize("/")
+ def gerritProject = gerritProjectArray[gerritProjectArray.size() - 1]
+ def jobsNamespace = JOBS_NAMESPACE
+ def plural_namespaces = ['salt-formulas', 'salt-models']
+ // remove plural s on the end of job namespace
+ if (JOBS_NAMESPACE in plural_namespaces) {
+ jobsNamespace = JOBS_NAMESPACE.substring(0, JOBS_NAMESPACE.length() - 1)
+ }
+ // salt-formulas tests have -latest on end of the name
+ if (JOBS_NAMESPACE.equals("salt-formulas")) {
+ gerritProject = gerritProject + "-latest"
+ }
+ def testJob = String.format("test-%s-%s", jobsNamespace, gerritProject)
+ if (isJobExists(testJob)) {
+ common.infoMsg("Test job ${testJob} found, running")
+ def patchsetVerified = gerrit.patchsetHasApproval(gerritChange.currentPatchSet, "Verified", "+")
+ build job: testJob, parameters: [
+ [$class: 'StringParameterValue', name: 'DEFAULT_GIT_URL', value: "${GERRIT_SCHEME}://${GERRIT_NAME}@${GERRIT_HOST}:${GERRIT_PORT}/${GERRIT_PROJECT}"],
+ [$class: 'StringParameterValue', name: 'DEFAULT_GIT_REF', value: GERRIT_REFSPEC]
+ ]
+ giveVerify = true
+ } else {
+ common.infoMsg("Test job ${testJob} not found")
+ }
+ } else {
+ common.errorMsg("Change don't have a CodeReview, skipping gate")
+ }
+ } else {
+ common.infoMsg("Test job skipped")
+ }
+ }
+ stage("submit review") {
+ if (gerritChange.status == "MERGED") {
+ common.successMsg("Change ${GERRIT_CHANGE_NUMBER} is already merged, no need to gate them")
+ } else if (doSubmit) {
+ if (giveVerify) {
+ common.warningMsg("Change ${GERRIT_CHANGE_NUMBER} don't have a Verified, but tests were successful, so adding Verified and submitting")
+ ssh.agentSh(String.format("ssh -p 29418 %s@%s gerrit review --verified +1 --submit %s,%s", GERRIT_NAME, GERRIT_HOST, GERRIT_CHANGE_NUMBER, GERRIT_PATCHSET_NUMBER))
+ } else {
+ ssh.agentSh(String.format("ssh -p 29418 %s@%s gerrit review --submit %s,%s", GERRIT_NAME, GERRIT_HOST, GERRIT_CHANGE_NUMBER, GERRIT_PATCHSET_NUMBER))
+ }
+ common.infoMsg(String.format("Gerrit review %s,%s submitted", GERRIT_CHANGE_NUMBER, GERRIT_PATCHSET_NUMBER))
+ }
+ }
+ } catch (Throwable e) {
+ // If there was an error or exception thrown, the build failed
+ currentBuild.result = "FAILURE"
+ currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
+ throw e
+ }
+ }
+}
\ No newline at end of file
diff --git a/generate-cookiecutter-products.groovy b/generate-cookiecutter-products.groovy
index c93b5c7..6e65f5e 100644
--- a/generate-cookiecutter-products.groovy
+++ b/generate-cookiecutter-products.groovy
@@ -220,11 +220,11 @@
])
sh "cp mcp-common-scripts/config-drive/create_config_drive.sh create-config-drive && chmod +x create-config-drive"
- sh "cp mcp-common-scripts/config-drive/master_config.sh user_data.sh"
+ sh "[ -f mcp-common-scripts/config-drive/master_config.sh ] && cp mcp-common-scripts/config-drive/master_config.sh user_data || cp mcp-common-scripts/config-drive/master_config.yaml user_data"
sh "git clone --mirror https://github.com/Mirantis/mk-pipelines.git ${pipelineEnv}/mk-pipelines"
sh "git clone --mirror https://github.com/Mirantis/pipeline-library.git ${pipelineEnv}/pipeline-library"
- args = "--user-data user_data.sh --hostname ${saltMaster} --model ${modelEnv} --mk-pipelines ${pipelineEnv}/mk-pipelines/ --pipeline-library ${pipelineEnv}/pipeline-library/ ${saltMaster}.${clusterDomain}-config.iso"
+ args = "--user-data user_data --hostname ${saltMaster} --model ${modelEnv} --mk-pipelines ${pipelineEnv}/mk-pipelines/ --pipeline-library ${pipelineEnv}/pipeline-library/ ${saltMaster}.${clusterDomain}-config.iso"
// load data from model
def smc = [:]
@@ -256,7 +256,7 @@
}
for (i in common.entries(smc)) {
- sh "sed -i 's,export ${i[0]}=.*,export ${i[0]}=${i[1]},' user_data.sh"
+ sh "sed -i 's,${i[0]}=.*,${i[0]}=${i[1]},' user_data"
}
// create cfg config-drive
@@ -268,8 +268,7 @@
if (templateContext['default_context']['local_repositories'] == 'True') {
def aptlyServerHostname = templateContext.default_context.aptly_server_hostname
- def user_data_script_apt_url = "https://raw.githubusercontent.com/Mirantis/mcp-common-scripts/master/config-drive/mirror_config.sh"
- sh "wget -O mirror_config.sh ${user_data_script_apt_url}"
+ sh "cp mcp-common-scripts/config-drive/mirror_config.sh mirror_config.sh"
def smc_apt = [:]
smc_apt['SALT_MASTER_DEPLOY_IP'] = templateContext['default_context']['salt_master_management_address']
@@ -294,7 +293,6 @@
sh(returnStatus: true, script: "tar -czf output-${clusterName}/${clusterName}.tar.gz --exclude='*@tmp' -C ${modelEnv} .")
archiveArtifacts artifacts: "output-${clusterName}/${clusterName}.tar.gz"
-
if (EMAIL_ADDRESS != null && EMAIL_ADDRESS != "") {
emailext(to: EMAIL_ADDRESS,
attachmentsPattern: "output-${clusterName}/*",
diff --git a/test-cookiecutter-reclass.groovy b/test-cookiecutter-reclass.groovy
index b380bfd..413e72e 100644
--- a/test-cookiecutter-reclass.groovy
+++ b/test-cookiecutter-reclass.groovy
@@ -17,7 +17,8 @@
git = new com.mirantis.mk.Git()
python = new com.mirantis.mk.Python()
-slaveNode = env.SLAVE_NODE ?: 'python&&docker'
+slaveNode = env.SLAVE_NODE ?: 'docker'
+checkIncludeOrder = env.CHECK_INCLUDE_ORDER ?: false
// Global var's
alreadyMerged = false
@@ -32,12 +33,14 @@
GERRIT_CHANGE_NUMBER: null]
//
//ccTemplatesRepo = env.COOKIECUTTER_TEMPLATE_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.net:29418/mk/cookiecutter-templates'
+gerritDataCCHEAD = [:]
gerritDataCC = [:]
gerritDataCC << gerritConData
gerritDataCC['gerritBranch'] = env.COOKIECUTTER_TEMPLATE_BRANCH ?: 'master'
gerritDataCC['gerritProject'] = 'mk/cookiecutter-templates'
//
//reclassSystemRepo = env.RECLASS_SYSTEM_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.net:29418/salt-models/reclass-system'
+gerritDataRSHEAD = [:]
gerritDataRS = [:]
gerritDataRS << gerritConData
gerritDataRS['gerritBranch'] = env.RECLASS_MODEL_BRANCH ?: 'master'
@@ -134,7 +137,7 @@
return {
dir(copyTo) {
copyArtifacts(projectName: jobName, selector: specific(build), filter: "nodesinfo.tar.gz")
- sh "tar -xvf nodesinfo.tar.gz"
+ sh "tar -xf nodesinfo.tar.gz"
sh "rm -v nodesinfo.tar.gz"
}
}
@@ -251,20 +254,23 @@
"<br/>Test env variables has been changed:" +
"<br/>COOKIECUTTER_TEMPLATE_BRANCH => ${gerritDataCC['gerritBranch']}" +
"<br/>DISTRIB_REVISION =>${testDistribRevision}" +
- "<br/>RECLASS_MODEL_BRANCH=> ${gerritDataRS['gerritBranch']}" + message
+ "<br/>RECLASS_MODEL_BRANCH=> ${gerritDataRS['gerritBranch']}" + message + "<br/>"
common.warningMsg(message)
currentBuild.description = currentBuild.description ? message + "<br/>" + currentBuild.description : message
} else {
// Check for passed variables:
- if (env.RECLASS_SYSTEM_GIT_REF) {
- gerritDataRS['gerritRefSpec'] = RECLASS_SYSTEM_GIT_REF
- }
- if (env.COOKIECUTTER_TEMPLATE_REF) {
- gerritDataCC['gerritRefSpec'] = COOKIECUTTER_TEMPLATE_REF
- }
- message = "<font color='red'>Manual run detected!</font>" + "<br/>"
+ gerritDataRS['gerritRefSpec'] = env.RECLASS_SYSTEM_GIT_REF ?: null
+ gerritDataCC['gerritRefSpec'] = env.COOKIECUTTER_TEMPLATE_REF ?: null
+ message = "<font color='red'>Non-gerrit trigger run detected!</font>" + "<br/>"
currentBuild.description = currentBuild.description ? message + "<br/>" + currentBuild.description : message
}
+ gerritDataCCHEAD << gerritDataCC
+ gerritDataCCHEAD['gerritRefSpec'] = null
+ gerritDataCCHEAD['GERRIT_CHANGE_NUMBER'] = null
+ gerritDataRSHEAD << gerritDataRS
+ gerritDataRSHEAD['gerritRefSpec'] = null
+ gerritDataRSHEAD['GERRIT_CHANGE_NUMBER'] = null
+
}
def replaceGeneratedValues(path) {
@@ -293,35 +299,32 @@
// tar.gz
// ├── contexts
// │ └── ceph.yml
- // ├── global_reclass <<< reclass system
+ // ├── ${reclassDirName} <<< reclass system
// ├── model
// │ └── ceph <<< from `context basename`
// │ ├── classes
// │ │ ├── cluster
- // │ │ └── system -> ../../../global_reclass
+ // │ │ └── system -> ../../../${reclassDirName}
// │ └── nodes
// │ └── cfg01.ceph-cluster-domain.local.yml
dir(envPath) {
for (String context : contextList) {
def basename = common.GetBaseName(context, '.yml')
dir("${envPath}/model/${basename}") {
- sh(script: 'mkdir -p classes/; ln -sfv ../../../../global_reclass classes/system ')
+ sh(script: "mkdir -p classes/; ln -sfv ../../../../${common.GetBaseName(archiveName, '.tar.gz')} classes/system ")
}
}
// replace all generated passwords/secrets/keys with hardcode value for infra/secrets.yaml
replaceGeneratedValues("${envPath}/model")
- // Save all models and all contexts. Warning! `h` flag must be used.
- sh(script: "set -ex; tar -chzf ${archiveName} --exclude='*@tmp' model contexts", returnStatus: true)
- archiveArtifacts artifacts: archiveName
- // move for "Compare Pillars" stage
- sh(script: "mv -v ${archiveName} ${env.WORKSPACE}")
+ // Save all models and all contexts. Warning! `h` flag must be used!
+ sh(script: "set -ex; tar -czhf ${env.WORKSPACE}/${archiveName} --exclude='*@tmp' model contexts", returnStatus: true)
}
+ archiveArtifacts artifacts: archiveName
}
timeout(time: 1, unit: 'HOURS') {
node(slaveNode) {
globalVariatorsUpdate()
- def gerritDataCCHEAD = [:]
def templateEnvHead = "${env.WORKSPACE}/EnvHead/"
def templateEnvPatched = "${env.WORKSPACE}/EnvPatched/"
def contextFileListHead = []
@@ -338,16 +341,24 @@
// Prepare 2 env - for patchset, and for HEAD
def paralellEnvs = [:]
paralellEnvs.failFast = true
- paralellEnvs['downloadEnvPatched'] = StepPrepareGit(templateEnvPatched, gerritDataCC)
- gerritDataCCHEAD << gerritDataCC
- gerritDataCCHEAD['gerritRefSpec'] = null; gerritDataCCHEAD['GERRIT_CHANGE_NUMBER'] = null
paralellEnvs['downloadEnvHead'] = StepPrepareGit(templateEnvHead, gerritDataCCHEAD)
- parallel paralellEnvs
+ if (gerritDataCC.get('gerritRefSpec', null)) {
+ paralellEnvs['downloadEnvPatched'] = StepPrepareGit(templateEnvPatched, gerritDataCC)
+ parallel paralellEnvs
+ } else {
+ paralellEnvs['downloadEnvPatched'] = { common.warningMsg('No need to process: downloadEnvPatched') }
+ parallel paralellEnvs
+ sh("rsync -a --exclude '*@tmp' ${templateEnvHead} ${templateEnvPatched}")
+ }
}
stage("Check workflow_definition") {
// Check only for patchset
python.setupVirtualenv(vEnv, 'python2', [], "${templateEnvPatched}/requirements.txt")
- common.infoMsg(python.runVirtualenvCommand(vEnv, "python ${templateEnvPatched}/workflow_definition_test.py"))
+ if (gerritDataCC.get('gerritRefSpec', null)) {
+ common.infoMsg(python.runVirtualenvCommand(vEnv, "python ${templateEnvPatched}/workflow_definition_test.py"))
+ } else {
+ common.infoMsg('No need to process: workflow_definition')
+ }
}
stage("generate models") {
@@ -364,18 +375,29 @@
// Generate over 2env's - for patchset, and for HEAD
def paralellEnvs = [:]
paralellEnvs.failFast = true
- paralellEnvs['GenerateEnvPatched'] = StepGenerateModels(contextFileListPatched, vEnv, templateEnvPatched)
paralellEnvs['GenerateEnvHead'] = StepGenerateModels(contextFileListHead, vEnv, templateEnvHead)
- parallel paralellEnvs
-
- // Collect artifacts
- dir(templateEnvPatched) {
- // Collect only models. For backward comparability - who know, probably someone use it..
- sh(script: "tar -czf model.tar.gz -C model ../contexts .", returnStatus: true)
- archiveArtifacts artifacts: "model.tar.gz"
+ if (gerritDataCC.get('gerritRefSpec', null)) {
+ paralellEnvs['GenerateEnvPatched'] = StepGenerateModels(contextFileListPatched, vEnv, templateEnvPatched)
+ parallel paralellEnvs
+ } else {
+ paralellEnvs['GenerateEnvPatched'] = { common.warningMsg('No need to process: GenerateEnvPatched') }
+ parallel paralellEnvs
+ sh("rsync -a --exclude '*@tmp' ${templateEnvHead} ${templateEnvPatched}")
}
- StepPrepareGit("${env.WORKSPACE}/global_reclass/", gerritDataRS).call()
+ // We need 2 git's, one for HEAD, one for PATCHed.
+ // if no patch, use head for both
+ RSHeadDir = common.GetBaseName(headReclassArtifactName, '.tar.gz')
+ RSPatchedDir = common.GetBaseName(patchedReclassArtifactName, '.tar.gz')
+ common.infoMsg("gerritDataRS= ${gerritDataRS}")
+ common.infoMsg("gerritDataRSHEAD= ${gerritDataRSHEAD}")
+ if (gerritDataRS.get('gerritRefSpec', null)) {
+ StepPrepareGit("${env.WORKSPACE}/${RSPatchedDir}/", gerritDataRS).call()
+ StepPrepareGit("${env.WORKSPACE}/${RSHeadDir}/", gerritDataRSHEAD).call()
+ } else {
+ StepPrepareGit("${env.WORKSPACE}/${RSHeadDir}/", gerritDataRS).call()
+ sh("cd ${env.WORKSPACE} ; ln -svf ${RSHeadDir} ${RSPatchedDir}")
+ }
// link all models, to use one global reclass
// For HEAD
linkReclassModels(contextFileListHead, templateEnvHead, headReclassArtifactName)
@@ -392,7 +414,7 @@
tar -xzf ${headReclassArtifactName} --directory ${compareRoot}/old
""")
common.warningMsg('infra/secrets.yml has been skipped from compare!')
- result = '\n' + common.comparePillars(compareRoot, env.BUILD_URL, "-Ev \'infra/secrets.yml\'")
+ result = '\n' + common.comparePillars(compareRoot, env.BUILD_URL, "-Ev \'infra/secrets.yml|\\.git\'")
currentBuild.description = currentBuild.description ? currentBuild.description + result : result
}
stage("TestContexts Head/Patched") {
@@ -427,6 +449,49 @@
result = '\n' + common.comparePillars(reclassNodeInfoDir, env.BUILD_URL, '')
currentBuild.description = currentBuild.description ? currentBuild.description + result : result
}
+ stage('Check include order') {
+ if (!checkIncludeOrder) {
+ common.infoMsg('Check include order require to much time, and currently disabled!')
+
+ } else {
+ def correctIncludeOrder = ["service", "system", "cluster"]
+ dir(reclassInfoPatchedPath) {
+ def nodeInfoFiles = findFiles(glob: "**/*.reclass.nodeinfo")
+ def messages = ["<b>Wrong include ordering found</b><ul>"]
+ def stepsForParallel = [:]
+ nodeInfoFiles.each { nodeInfo ->
+ stepsForParallel.put("Checking ${nodeInfo.path}:", {
+ def node = readYaml file: nodeInfo.path
+ def classes = node['classes']
+ def curClassID = 0
+ def prevClassID = 0
+ def wrongOrder = false
+ for (String className in classes) {
+ def currentClass = className.tokenize('.')[0]
+ curClassID = correctIncludeOrder.indexOf(currentClass)
+ if (currentClass != correctIncludeOrder[prevClassID]) {
+ if (prevClassID > curClassID) {
+ wrongOrder = true
+ common.warningMsg("File ${nodeInfo.path} contains wrong order of classes including: Includes for ${className} should be declared before ${correctIncludeOrder[prevClassID]} includes")
+ } else {
+ prevClassID = curClassID
+ }
+ }
+ }
+ if (wrongOrder) {
+ messages.add("<li>${nodeInfo.path} contains wrong order of classes including</li>")
+ }
+ })
+ }
+ parallel stepsForParallel
+ def includerOrder = '<b>No wrong include order</b>'
+ if (messages.size() != 1) {
+ includerOrder = messages.join('')
+ }
+ currentBuild.description = currentBuild.description ? currentBuild.description + includerOrder : includerOrder
+ }
+ }
+ }
sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
} catch (Throwable e) {
diff --git a/test-system-reclass-pipeline.groovy b/test-system-reclass-pipeline.groovy
index afd2857..47dde97 100644
--- a/test-system-reclass-pipeline.groovy
+++ b/test-system-reclass-pipeline.groovy
@@ -2,34 +2,17 @@
def common = new com.mirantis.mk.Common()
-slaveNode = env.SLAVE_NODE ?: 'python&&docker'
+def slaveNode = env.SLAVE_NODE ?: 'python&&docker'
+def gerritCredentials = env.CREDENTIALS_ID ?: 'gerrit'
-def gerritCredentials
-try {
- gerritCredentials = CREDENTIALS_ID
-} catch (MissingPropertyException e) {
- gerritCredentials = "gerrit"
-}
+def gerritRef = env.GERRIT_REFSPEC ?: null
+def defaultGitRef = env.DEFAULT_GIT_REF ?: null
+def defaultGitUrl = env.DEFAULT_GIT_URL ?: null
-def gerritRef
-try {
- gerritRef = GERRIT_REFSPEC
-} catch (MissingPropertyException e) {
- gerritRef = null
-}
-
-def defaultGitRef, defaultGitUrl
-try {
- defaultGitRef = DEFAULT_GIT_REF
- defaultGitUrl = DEFAULT_GIT_URL
-} catch (MissingPropertyException e) {
- defaultGitRef = null
- defaultGitUrl = null
-}
def checkouted = false
def merged = false
def systemRefspec = "HEAD"
-def formulasRevision = 'testing'
+
timeout(time: 12, unit: 'HOURS') {
node(slaveNode) {
try {
@@ -67,35 +50,35 @@
def branches = [:]
def testModels = documentationOnly ? [] : TEST_MODELS.split(',')
- for (int i = 0; i < testModels.size(); i++) {
- def cluster = testModels[i]
- def clusterGitUrl = defaultGitUrl.substring(0, defaultGitUrl.lastIndexOf("/") + 1) + cluster
- branches["${cluster}"] = {
- build job: "test-salt-model-${cluster}", parameters: [
- [$class: 'StringParameterValue', name: 'DEFAULT_GIT_URL', value: clusterGitUrl],
- [$class: 'StringParameterValue', name: 'DEFAULT_GIT_REF', value: "HEAD"],
- [$class: 'StringParameterValue', name: 'SYSTEM_GIT_URL', value: defaultGitUrl],
- [$class: 'StringParameterValue', name: 'SYSTEM_GIT_REF', value: systemRefspec],
- [$class: 'StringParameterValue', name: 'FORMULAS_REVISION', value: formulasRevision],
- ]
+ if (['master'].contains(env.GERRIT_BRANCH)) {
+ for (int i = 0; i < testModels.size(); i++) {
+ def cluster = testModels[i]
+ def clusterGitUrl = defaultGitUrl.substring(0, defaultGitUrl.lastIndexOf("/") + 1) + cluster
+ branches["${cluster}"] = {
+ build job: "test-salt-model-${cluster}", parameters: [
+ [$class: 'StringParameterValue', name: 'DEFAULT_GIT_URL', value: clusterGitUrl],
+ [$class: 'StringParameterValue', name: 'DEFAULT_GIT_REF', value: "HEAD"],
+ [$class: 'StringParameterValue', name: 'SYSTEM_GIT_URL', value: defaultGitUrl],
+ [$class: 'StringParameterValue', name: 'SYSTEM_GIT_REF', value: systemRefspec]
+ ]
+ }
}
+ } else {
+ common.warningMsg("Tests for ${testModels} skipped!")
}
branches["cookiecutter"] = {
build job: "test-mk-cookiecutter-templates", parameters: [
[$class: 'StringParameterValue', name: 'RECLASS_SYSTEM_URL', value: defaultGitUrl],
- [$class: 'StringParameterValue', name: 'RECLASS_SYSTEM_GIT_REF', value: systemRefspec],
- [$class: 'StringParameterValue', name: 'DISTRIB_REVISION', value: formulasRevision]
-
+ [$class: 'StringParameterValue', name: 'RECLASS_SYSTEM_GIT_REF', value: systemRefspec]
]
}
parallel branches
} else {
- throw new Exception("Cannot checkout gerrit patchset, GERRIT_REFSPEC and DEFAULT_GIT_REF is null")
+ error("Cannot checkout gerrit patchset, GERRIT_REFSPEC and DEFAULT_GIT_REF is null")
}
}
}
} catch (Throwable e) {
- // If there was an error or exception thrown, the build failed
currentBuild.result = "FAILURE"
currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
throw e