Merge "getFirstMinion returns minion id"
diff --git a/docker-build-image-pipeline.groovy b/docker-build-image-pipeline.groovy
index 1fbd9f0..a39051f 100644
--- a/docker-build-image-pipeline.groovy
+++ b/docker-build-image-pipeline.groovy
@@ -9,94 +9,101 @@
* REGISTRY_URL - Docker registry URL (can be empty)
* ARTIFACTORY_URL - URL to artifactory
* ARTIFACTORY_NAMESPACE - Artifactory namespace (oss, cicd,...)
+ * UPLOAD_TO_DOCKER_HUB - True\False
* REGISTRY_CREDENTIALS_ID - Docker hub credentials id
*
-**/
+ **/
def common = new com.mirantis.mk.Common()
def gerrit = new com.mirantis.mk.Gerrit()
def git = new com.mirantis.mk.Git()
def dockerLib = new com.mirantis.mk.Docker()
def artifactory = new com.mirantis.mcp.MCPArtifactory()
+
+slaveNode = env.SLAVE_NODE ?: 'docker'
+uploadToDockerHub = env.UPLOAD_TO_DOCKER_HUB ?: false
+
timeout(time: 12, unit: 'HOURS') {
- node("docker") {
- def workspace = common.getWorkspace()
- def imageTagsList = IMAGE_TAGS.tokenize(" ")
- try{
+ node(slaveNode) {
+ def workspace = common.getWorkspace()
+ def imageTagsList = env.IMAGE_TAGS.tokenize(" ")
+ try {
- def buildArgs = []
- try {
- buildArgs = IMAGE_BUILD_PARAMS.tokenize(' ')
- } catch (Throwable e) {
- buildArgs = []
- }
- def dockerApp
- stage("checkout") {
- git.checkoutGitRepository('.', IMAGE_GIT_URL, IMAGE_BRANCH, IMAGE_CREDENTIALS_ID)
- }
+ def buildArgs = []
+ try {
+ buildArgs = IMAGE_BUILD_PARAMS.tokenize(' ')
+ } catch (Throwable e) {
+ buildArgs = []
+ }
+ def dockerApp
+ stage("checkout") {
+ git.checkoutGitRepository('.', IMAGE_GIT_URL, IMAGE_BRANCH, IMAGE_CREDENTIALS_ID)
+ }
- if (IMAGE_BRANCH == "master") {
- try {
- def tag = sh(script: "git describe --tags --abbrev=0", returnStdout: true).trim()
- def revision = sh(script: "git describe --tags --abbrev=4 | grep -oP \"^${tag}-\\K.*\" | awk -F\\- '{print \$1}'", returnStdout: true).trim()
- imageTagsList << tag
- revision = revision ? revision : "0"
- if(Integer.valueOf(revision) > 0){
- imageTagsList << "${tag}-${revision}"
+ if (IMAGE_BRANCH == "master") {
+ try {
+ def tag = sh(script: "git describe --tags --abbrev=0", returnStdout: true).trim()
+ def revision = sh(script: "git describe --tags --abbrev=4 | grep -oP \"^${tag}-\\K.*\" | awk -F\\- '{print \$1}'", returnStdout: true).trim()
+ imageTagsList << tag
+ revision = revision ? revision : "0"
+ if (Integer.valueOf(revision) > 0) {
+ imageTagsList << "${tag}-${revision}"
+ }
+ if (!imageTagsList.contains("latest")) {
+ imageTagsList << "latest"
+ }
+ } catch (Exception e) {
+ common.infoMsg("Impossible to find any tag")
+ }
}
- if (!imageTagsList.contains("latest")) {
- imageTagsList << "latest"
- //workaround for all of our docker images
- imageTagsList << "nightly"
- }
- } catch (Exception e) {
- common.infoMsg("Impossible to find any tag")
- }
- }
- stage("build") {
- common.infoMsg("Building docker image ${IMAGE_NAME}")
- dockerApp = dockerLib.buildDockerImage(IMAGE_NAME, "", "${workspace}/${DOCKERFILE_PATH}", imageTagsList[0], buildArgs)
- if(!dockerApp){
- throw new Exception("Docker build image failed")
- }
- }
- stage("upload to docker hub"){
- docker.withRegistry(REGISTRY_URL, REGISTRY_CREDENTIALS_ID) {
- for(int i=0;i<imageTagsList.size();i++){
- common.infoMsg("Uploading image ${IMAGE_NAME} with tag ${imageTagsList[i]} to dockerhub")
- dockerApp.push(imageTagsList[i])
+ stage("build") {
+ common.infoMsg("Building docker image ${IMAGE_NAME}")
+ dockerApp = dockerLib.buildDockerImage(IMAGE_NAME, "", "${workspace}/${DOCKERFILE_PATH}", imageTagsList[0], buildArgs)
+ if (!dockerApp) {
+ throw new Exception("Docker build image failed")
+ }
}
- }
+ stage("upload to docker hub") {
+ if (uploadToDockerHub) {
+ docker.withRegistry(REGISTRY_URL, REGISTRY_CREDENTIALS_ID) {
+ for (int i = 0; i < imageTagsList.size(); i++) {
+ common.infoMsg("Uploading image ${IMAGE_NAME} with tag ${imageTagsList[i]} to dockerhub")
+ dockerApp.push(imageTagsList[i])
+ }
+ }
+ } else {
+ common.infoMsg('upload to docker hub skipped')
+ }
+ }
+ stage("upload to artifactory") {
+ if (common.validInputParam("ARTIFACTORY_URL") && common.validInputParam("ARTIFACTORY_NAMESPACE")) {
+ def artifactoryName = "mcp-ci";
+ def artifactoryServer = Artifactory.server(artifactoryName)
+ def shortImageName = IMAGE_NAME
+ if (IMAGE_NAME.contains("/")) {
+ shortImageName = IMAGE_NAME.tokenize("/")[1]
+ }
+ for (imageTag in imageTagsList) {
+ sh "docker tag ${IMAGE_NAME}:${imageTagsList[0]} ${ARTIFACTORY_URL}/mirantis/${ARTIFACTORY_NAMESPACE}/${shortImageName}:${imageTag}"
+ for (artifactoryRepo in ["docker-dev-local", "docker-prod-local"]) {
+ common.infoMsg("Uploading image ${IMAGE_NAME} with tag ${imageTag} to artifactory ${artifactoryName} using repo ${artifactoryRepo}")
+ artifactory.uploadImageToArtifactory(artifactoryServer, ARTIFACTORY_URL,
+ "mirantis/${ARTIFACTORY_NAMESPACE}/${shortImageName}",
+ imageTag, artifactoryRepo)
+ }
+ }
+ } else {
+ common.warningMsg("ARTIFACTORY_URL not given, upload to artifactory skipped")
+ }
+ }
+ } catch (Throwable e) {
+ // If there was an error or exception thrown, the build failed
+ currentBuild.result = "FAILURE"
+ currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
+ throw e
+ } finally {
+ common.sendNotification(currentBuild.result, "", ["slack"])
}
- stage("upload to artifactory"){
- if(common.validInputParam("ARTIFACTORY_URL") && common.validInputParam("ARTIFACTORY_NAMESPACE")) {
- def artifactoryName = "mcp-ci";
- def artifactoryServer = Artifactory.server(artifactoryName)
- def shortImageName = IMAGE_NAME
- if (IMAGE_NAME.contains("/")) {
- shortImageName = IMAGE_NAME.tokenize("/")[1]
- }
- for (imageTag in imageTagsList) {
- sh "docker tag ${IMAGE_NAME} ${ARTIFACTORY_URL}/mirantis/${ARTIFACTORY_NAMESPACE}/${shortImageName}:${imageTag}"
- for(artifactoryRepo in ["docker-dev-local", "docker-prod-local"]){
- common.infoMsg("Uploading image ${IMAGE_NAME} with tag ${imageTag} to artifactory ${artifactoryName} using repo ${artifactoryRepo}")
- artifactory.uploadImageToArtifactory(artifactoryServer, ARTIFACTORY_URL,
- "mirantis/${ARTIFACTORY_NAMESPACE}/${shortImageName}",
- imageTag, artifactoryRepo)
- }
- }
- }else{
- common.warningMsg("ARTIFACTORY_URL not given, upload to artifactory skipped")
- }
- }
- } catch (Throwable e) {
- // If there was an error or exception thrown, the build failed
- currentBuild.result = "FAILURE"
- currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
- throw e
- } finally {
- common.sendNotification(currentBuild.result,"",["slack"])
}
- }
}
diff --git a/generate-cookiecutter-products.groovy b/generate-cookiecutter-products.groovy
index 549a4d3..6e65f5e 100644
--- a/generate-cookiecutter-products.groovy
+++ b/generate-cookiecutter-products.groovy
@@ -201,7 +201,7 @@
// download create-config-drive
// FIXME: that should be refactored, to use git clone - to be able download it from custom repo.
- def mcpCommonScriptsBranch = templateContext.default_context.mcp_common_scripts_branch
+ def mcpCommonScriptsBranch = templateContext['default_context']['mcp_common_scripts_branch']
if (mcpCommonScriptsBranch == '') {
mcpCommonScriptsBranch = mcpVersion
// Don't have n/t/s for mcp-common-scripts repo, therefore use master
@@ -210,16 +210,21 @@
mcpCommonScriptsBranch = 'master'
}
}
- def config_drive_script_url = "https://raw.githubusercontent.com/Mirantis/mcp-common-scripts/${mcpCommonScriptsBranch}/config-drive/create_config_drive.sh"
- def user_data_script_url = "https://raw.githubusercontent.com/Mirantis/mcp-common-scripts/${mcpCommonScriptsBranch}/config-drive/master_config.sh"
- common.retry(3, 5) {
- sh "wget -O create-config-drive ${config_drive_script_url} && chmod +x create-config-drive"
- sh "wget -O user_data.sh ${user_data_script_url}"
- }
+
+ def commonScriptsRepoUrl = 'https://gerrit.mcp.mirantis.net/mcp/mcp-common-scripts'
+ checkout([
+ $class: 'GitSCM',
+ branches: [[name: 'FETCH_HEAD'],],
+ extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'mcp-common-scripts']],
+ userRemoteConfigs: [[url: commonScriptsRepoUrl, refspec: mcpCommonScriptsBranch],],
+ ])
+
+ sh "cp mcp-common-scripts/config-drive/create_config_drive.sh create-config-drive && chmod +x create-config-drive"
+ sh "[ -f mcp-common-scripts/config-drive/master_config.sh ] && cp mcp-common-scripts/config-drive/master_config.sh user_data || cp mcp-common-scripts/config-drive/master_config.yaml user_data"
sh "git clone --mirror https://github.com/Mirantis/mk-pipelines.git ${pipelineEnv}/mk-pipelines"
sh "git clone --mirror https://github.com/Mirantis/pipeline-library.git ${pipelineEnv}/pipeline-library"
- args = "--user-data user_data.sh --hostname ${saltMaster} --model ${modelEnv} --mk-pipelines ${pipelineEnv}/mk-pipelines/ --pipeline-library ${pipelineEnv}/pipeline-library/ ${saltMaster}.${clusterDomain}-config.iso"
+ args = "--user-data user_data --hostname ${saltMaster} --model ${modelEnv} --mk-pipelines ${pipelineEnv}/mk-pipelines/ --pipeline-library ${pipelineEnv}/pipeline-library/ ${saltMaster}.${clusterDomain}-config.iso"
// load data from model
def smc = [:]
@@ -251,7 +256,7 @@
}
for (i in common.entries(smc)) {
- sh "sed -i 's,export ${i[0]}=.*,export ${i[0]}=${i[1]},' user_data.sh"
+ sh "sed -i 's,${i[0]}=.*,${i[0]}=${i[1]},' user_data"
}
// create cfg config-drive
@@ -263,8 +268,7 @@
if (templateContext['default_context']['local_repositories'] == 'True') {
def aptlyServerHostname = templateContext.default_context.aptly_server_hostname
- def user_data_script_apt_url = "https://raw.githubusercontent.com/Mirantis/mcp-common-scripts/master/config-drive/mirror_config.sh"
- sh "wget -O mirror_config.sh ${user_data_script_apt_url}"
+ sh "cp mcp-common-scripts/config-drive/mirror_config.sh mirror_config.sh"
def smc_apt = [:]
smc_apt['SALT_MASTER_DEPLOY_IP'] = templateContext['default_context']['salt_master_management_address']
@@ -289,7 +293,6 @@
sh(returnStatus: true, script: "tar -czf output-${clusterName}/${clusterName}.tar.gz --exclude='*@tmp' -C ${modelEnv} .")
archiveArtifacts artifacts: "output-${clusterName}/${clusterName}.tar.gz"
-
if (EMAIL_ADDRESS != null && EMAIL_ADDRESS != "") {
emailext(to: EMAIL_ADDRESS,
attachmentsPattern: "output-${clusterName}/*",
diff --git a/test-system-reclass-pipeline.groovy b/test-system-reclass-pipeline.groovy
index afd2857..d6e38e4 100644
--- a/test-system-reclass-pipeline.groovy
+++ b/test-system-reclass-pipeline.groovy
@@ -2,34 +2,18 @@
def common = new com.mirantis.mk.Common()
-slaveNode = env.SLAVE_NODE ?: 'python&&docker'
+def slaveNode = env.SLAVE_NODE ?: 'python&&docker'
+def gerritCredentials = env.CREDENTIALS_ID ?: 'gerrit'
-def gerritCredentials
-try {
- gerritCredentials = CREDENTIALS_ID
-} catch (MissingPropertyException e) {
- gerritCredentials = "gerrit"
-}
+def gerritRef = env.GERRIT_REFSPEC ?: null
+def defaultGitRef = env.DEFAULT_GIT_REF ?: null
+def defaultGitUrl = env.DEFAULT_GIT_URL ?: null
-def gerritRef
-try {
- gerritRef = GERRIT_REFSPEC
-} catch (MissingPropertyException e) {
- gerritRef = null
-}
-
-def defaultGitRef, defaultGitUrl
-try {
- defaultGitRef = DEFAULT_GIT_REF
- defaultGitUrl = DEFAULT_GIT_URL
-} catch (MissingPropertyException e) {
- defaultGitRef = null
- defaultGitUrl = null
-}
def checkouted = false
def merged = false
def systemRefspec = "HEAD"
def formulasRevision = 'testing'
+
timeout(time: 12, unit: 'HOURS') {
node(slaveNode) {
try {
@@ -67,18 +51,22 @@
def branches = [:]
def testModels = documentationOnly ? [] : TEST_MODELS.split(',')
- for (int i = 0; i < testModels.size(); i++) {
- def cluster = testModels[i]
- def clusterGitUrl = defaultGitUrl.substring(0, defaultGitUrl.lastIndexOf("/") + 1) + cluster
- branches["${cluster}"] = {
- build job: "test-salt-model-${cluster}", parameters: [
- [$class: 'StringParameterValue', name: 'DEFAULT_GIT_URL', value: clusterGitUrl],
- [$class: 'StringParameterValue', name: 'DEFAULT_GIT_REF', value: "HEAD"],
- [$class: 'StringParameterValue', name: 'SYSTEM_GIT_URL', value: defaultGitUrl],
- [$class: 'StringParameterValue', name: 'SYSTEM_GIT_REF', value: systemRefspec],
- [$class: 'StringParameterValue', name: 'FORMULAS_REVISION', value: formulasRevision],
- ]
+ if (['master'].contains(env.GERRIT_BRANCH)) {
+ for (int i = 0; i < testModels.size(); i++) {
+ def cluster = testModels[i]
+ def clusterGitUrl = defaultGitUrl.substring(0, defaultGitUrl.lastIndexOf("/") + 1) + cluster
+ branches["${cluster}"] = {
+ build job: "test-salt-model-${cluster}", parameters: [
+ [$class: 'StringParameterValue', name: 'DEFAULT_GIT_URL', value: clusterGitUrl],
+ [$class: 'StringParameterValue', name: 'DEFAULT_GIT_REF', value: "HEAD"],
+ [$class: 'StringParameterValue', name: 'SYSTEM_GIT_URL', value: defaultGitUrl],
+ [$class: 'StringParameterValue', name: 'SYSTEM_GIT_REF', value: systemRefspec],
+ [$class: 'StringParameterValue', name: 'FORMULAS_REVISION', value: formulasRevision],
+ ]
+ }
}
+ } else {
+ common.warningMsg("Tests for ${testModels} skipped!")
}
branches["cookiecutter"] = {
build job: "test-mk-cookiecutter-templates", parameters: [
@@ -90,12 +78,11 @@
}
parallel branches
} else {
- throw new Exception("Cannot checkout gerrit patchset, GERRIT_REFSPEC and DEFAULT_GIT_REF is null")
+ error("Cannot checkout gerrit patchset, GERRIT_REFSPEC and DEFAULT_GIT_REF is null")
}
}
}
} catch (Throwable e) {
- // If there was an error or exception thrown, the build failed
currentBuild.result = "FAILURE"
currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
throw e