Merge "Pull the model at start of change-config pipeline"
diff --git a/build-debian-packages-pipeline.groovy b/build-debian-packages-pipeline.groovy
index 9e40944..0d9839f 100644
--- a/build-debian-packages-pipeline.groovy
+++ b/build-debian-packages-pipeline.groovy
@@ -28,6 +28,13 @@
uploadPpa = null
}
+def lintianCheck
+try {
+ lintianCheck = LINTIAN_CHECK.toBoolean()
+} catch (MissingPropertyException e) {
+ lintianCheck = true
+}
+
def uploadAptly
try {
uploadAptly = UPLOAD_APTLY.toBoolean()
@@ -73,13 +80,16 @@
)
archiveArtifacts artifacts: "build-area/*.deb"
}
- stage("lintian") {
- changes = sh script: "ls build-area/*_"+ARCH+".changes", returnStdout: true
- try {
- debian.runLintian(changes.trim(), OS, OS+":"+DIST)
- } catch (Exception e) {
- println "[WARN] Lintian returned non-zero exit status"
- currentBuild.result = 'UNSTABLE'
+
+ if (lintianCheck) {
+ stage("lintian") {
+ changes = sh script: "ls build-area/*_"+ARCH+".changes", returnStdout: true
+ try {
+ debian.runLintian(changes.trim(), OS, OS+":"+DIST)
+ } catch (Exception e) {
+ println "[WARN] Lintian returned non-zero exit status"
+ currentBuild.result = 'UNSTABLE'
+ }
}
}
diff --git a/cicd-lab-pipeline.groovy b/cicd-lab-pipeline.groovy
index 8902e1f..782a051 100644
--- a/cicd-lab-pipeline.groovy
+++ b/cicd-lab-pipeline.groovy
@@ -225,7 +225,7 @@
// Postgres client - initialize OSS services databases
timeout(300){
println "Waiting for postgresql database to come up.."
- salt.cmdRun(saltMaster, 'I@postgresql:client', 'while true; do if docker service logs postgresql_db | grep "ready to accept"; then break; else sleep 5; fi; done')
+ salt.cmdRun(saltMaster, 'I@postgresql:client', 'while true; do if docker service logs postgresql_postgresql-db | grep "ready to accept"; then break; else sleep 5; fi; done')
}
// XXX: first run usually fails on some inserts, but we need to create databases at first
salt.enforceState(saltMaster, 'I@postgresql:client', 'postgresql.client', true, false)
diff --git a/cloud-deploy-pipeline.groovy b/cloud-deploy-pipeline.groovy
index b7e22a8..3c8846c 100644
--- a/cloud-deploy-pipeline.groovy
+++ b/cloud-deploy-pipeline.groovy
@@ -133,7 +133,7 @@
//
wrap([$class: 'BuildUser']) {
if (env.BUILD_USER_ID && !env.BUILD_USER_ID.equals("jenkins") && !STACK_REUSE.toBoolean()) {
- def existingStacks = openstack.getStacksForNameContains(openstackCloud, "${env.BUILD_USER_ID}-${JOB_NAME}", venv)
+ def existingStacks = openstack.getStacksForNameContains(openstackCloud, "${env.BUILD_USER_ID}-${JOB_NAME}".replaceAll('_', '-'), venv)
if (existingStacks.size() >= _MAX_PERMITTED_STACKS) {
STACK_DELETE = "false"
throw new Exception("You cannot create new stack, you already have ${_MAX_PERMITTED_STACKS} stacks of this type (${JOB_NAME}). \nStack names: ${existingStacks}")
diff --git a/generate-cookiecutter-products.groovy b/generate-cookiecutter-products.groovy
index 288a4a5..4326e7e 100644
--- a/generate-cookiecutter-products.groovy
+++ b/generate-cookiecutter-products.groovy
@@ -28,6 +28,7 @@
def templateContext = readYaml text: COOKIECUTTER_TEMPLATE_CONTEXT
def clusterDomain = templateContext.default_context.cluster_domain
def clusterName = templateContext.default_context.cluster_name
+ def saltMaster = templateContext.default_context.salt_master_hostname
def cutterEnv = "${env.WORKSPACE}/cutter"
def jinjaEnv = "${env.WORKSPACE}/jinja"
def outputDestination = "${modelEnv}/classes/cluster/${clusterName}"
@@ -112,7 +113,7 @@
}
stage('Generate new SaltMaster node') {
- def nodeFile = "${modelEnv}/nodes/cfg01.${clusterDomain}.yml"
+ def nodeFile = "${modelEnv}/nodes/${saltMaster}.${clusterDomain}.yml"
def nodeString = """classes:
- cluster.${clusterName}.infra.config
parameters:
@@ -121,7 +122,7 @@
reclass_data_revision: master
linux:
system:
- name: cfg01
+ name: ${saltMaster}
domain: ${clusterDomain}
"""
sh "mkdir -p ${modelEnv}/nodes/"
@@ -133,7 +134,7 @@
stage("Test") {
if (SHARED_RECLASS_URL != "" && TEST_MODEL && TEST_MODEL.toBoolean()) {
sh("cp -r ${modelEnv} ${testEnv}")
- saltModelTesting.setupAndTestNode("cfg01.${clusterDomain}", "", testEnv)
+ saltModelTesting.setupAndTestNode("${saltMaster}.${clusterDomain}", "", testEnv)
}
}
@@ -148,11 +149,11 @@
sh "git clone https://github.com/Mirantis/mk-pipelines.git ${pipelineEnv}/mk-pipelines"
sh "git clone https://github.com/Mirantis/pipeline-library.git ${pipelineEnv}/pipeline-library"
- args = "--user-data user_data.sh --hostname cfg01 --model ${modelEnv} --mk-pipelines ${pipelineEnv}/mk-pipelines/ --pipeline-library ${pipelineEnv}/pipeline-library/ cfg01.${clusterDomain}-config.iso"
+ args = "--user-data user_data.sh --hostname ${saltMaster} --model ${modelEnv} --mk-pipelines ${pipelineEnv}/mk-pipelines/ --pipeline-library ${pipelineEnv}/pipeline-library/ ${saltMaster}.${clusterDomain}-config.iso"
// load data from model
def smc = [:]
- smc['SALT_MASTER_MINION_ID'] = "cfg01.${clusterDomain}"
+ smc['SALT_MASTER_MINION_ID'] = "${saltMaster}.${clusterDomain}"
smc['SALT_MASTER_DEPLOY_IP'] = templateContext['default_context']['salt_master_management_address']
smc['DEPLOY_NETWORK_GW'] = templateContext['default_context']['deploy_network_gateway']
smc['DEPLOY_NETWORK_NETMASK'] = templateContext['default_context']['deploy_network_netmask']
@@ -164,9 +165,9 @@
// create config-drive
sh "./create-config-drive ${args}"
- sh("mkdir output-${clusterName} && mv cfg01.${clusterDomain}-config.iso output-${clusterName}/")
+ sh("mkdir output-${clusterName} && mv ${saltMaster}.${clusterDomain}-config.iso output-${clusterName}/")
// save iso to artifacts
- archiveArtifacts artifacts: "output-${clusterName}/cfg01.${clusterDomain}-config.iso"
+ archiveArtifacts artifacts: "output-${clusterName}/${saltMaster}.${clusterDomain}-config.iso"
}
stage ('Save changes reclass model') {
diff --git a/test-salt-formulas-env.groovy b/test-salt-formulas-env.groovy
index 1f646b9..4edcdb7 100644
--- a/test-salt-formulas-env.groovy
+++ b/test-salt-formulas-env.groovy
@@ -19,74 +19,76 @@
def checkouted = false
-node("python") {
- try {
- stage("checkout") {
- if (defaultGitRef && defaultGitUrl) {
- checkouted = gerrit.gerritPatchsetCheckout(defaultGitUrl, defaultGitRef, "HEAD", CREDENTIALS_ID)
- } else {
- throw new Exception("Cannot checkout gerrit patchset, DEFAULT_GIT_REF is null")
- }
- }
- stage("cleanup") {
- if (checkouted) {
- sh("make clean")
- }
- }
- stage("kitchen") {
- if (checkouted) {
- if (fileExists(".kitchen.yml")) {
- common.infoMsg(".kitchen.yml found, running kitchen tests")
- ruby.ensureRubyEnv()
- if (fileExists(".travis.yml")) {
- common.infoMsg(".travis.yml found, running custom kitchen init")
- def kitchenConfigYML = readYaml(file: ".travis.yml")
- def kitchenInit = kitchenConfigYML["install"]
- def kitchenInstalled = false
- if (kitchenInit && !kitchenInit.isEmpty()) {
- for (int i = 0; i < kitchenInit.size(); i++) {
- if (kitchenInit[i].trim().startsWith("test -e Gemfile")) { //found Gemfile config
- common.infoMsg("Custom Gemfile configuration found, using them")
- ruby.installKitchen(kitchenInit[i].trim())
- kitchenInstalled = true
- }
- }
- }
- if (!kitchenInstalled) {
- ruby.installKitchen()
- }
- } else {
- common.infoMsg(".travis.yml not found, running default kitchen init")
- ruby.installKitchen()
- }
- common.infoMsg("Running part of kitchen test")
- if (KITCHEN_ENV != null && !KITCHEN_ENV.isEmpty() && KITCHEN_ENV != "") {
- def cleanEnv = KITCHEN_ENV.replaceAll("\\s?SUITE=[^\\s]*", "")
- def suite = ruby.getSuiteName(KITCHEN_ENV)
- if (suite && suite != "") {
- common.infoMsg("Running kitchen test with environment:" + KITCHEN_ENV.trim())
- ruby.runKitchenTests(cleanEnv, suite)
- } else {
- common.warningMsg("No SUITE was found. Running with all suites.")
- ruby.runKitchenTests(cleanEnv, "")
- }
- } else {
- throw new Exception("KITCHEN_ENV parameter is empty or invalid. This may indicate wrong env settings of initial test job or .travis.yml file.")
- }
+throttle(['test-formula']) {
+ node("python") {
+ try {
+ stage("checkout") {
+ if (defaultGitRef && defaultGitUrl) {
+ checkouted = gerrit.gerritPatchsetCheckout(defaultGitUrl, defaultGitRef, "HEAD", CREDENTIALS_ID)
} else {
- throw new Exception(".kitchen.yml file not found, no kitchen tests triggered.")
+ throw new Exception("Cannot checkout gerrit patchset, DEFAULT_GIT_REF is null")
}
}
- }
- } catch (Throwable e) {
- // If there was an error or exception thrown, the build failed
- currentBuild.result = "FAILURE"
- ruby.runKitchenCommand("destroy")
- throw e
- } finally {
- if (currentBuild.result == "FAILURE" && fileExists(".kitchen/logs/kitchen.log")) {
- common.errorMsg("----------------KITCHEN LOG:---------------")
- println readFile(".kitchen/logs/kitchen.log")
+ stage("cleanup") {
+ if (checkouted) {
+ sh("make clean")
+ }
+ }
+ stage("kitchen") {
+ if (checkouted) {
+ if (fileExists(".kitchen.yml")) {
+ common.infoMsg(".kitchen.yml found, running kitchen tests")
+ ruby.ensureRubyEnv()
+ if (fileExists(".travis.yml")) {
+ common.infoMsg(".travis.yml found, running custom kitchen init")
+ def kitchenConfigYML = readYaml(file: ".travis.yml")
+ def kitchenInit = kitchenConfigYML["install"]
+ def kitchenInstalled = false
+ if (kitchenInit && !kitchenInit.isEmpty()) {
+ for (int i = 0; i < kitchenInit.size(); i++) {
+ if (kitchenInit[i].trim().startsWith("test -e Gemfile")) { //found Gemfile config
+ common.infoMsg("Custom Gemfile configuration found, using them")
+ ruby.installKitchen(kitchenInit[i].trim())
+ kitchenInstalled = true
+ }
+ }
+ }
+ if (!kitchenInstalled) {
+ ruby.installKitchen()
+ }
+ } else {
+ common.infoMsg(".travis.yml not found, running default kitchen init")
+ ruby.installKitchen()
+ }
+ common.infoMsg("Running part of kitchen test")
+ if (KITCHEN_ENV != null && !KITCHEN_ENV.isEmpty() && KITCHEN_ENV != "") {
+ def cleanEnv = KITCHEN_ENV.replaceAll("\\s?SUITE=[^\\s]*", "")
+ def suite = ruby.getSuiteName(KITCHEN_ENV)
+ if (suite && suite != "") {
+ common.infoMsg("Running kitchen test with environment:" + KITCHEN_ENV.trim())
+ ruby.runKitchenTests(cleanEnv, suite)
+ } else {
+ common.warningMsg("No SUITE was found. Running with all suites.")
+ ruby.runKitchenTests(cleanEnv, "")
+ }
+ } else {
+ throw new Exception("KITCHEN_ENV parameter is empty or invalid. This may indicate wrong env settings of initial test job or .travis.yml file.")
+ }
+ } else {
+ throw new Exception(".kitchen.yml file not found, no kitchen tests triggered.")
+ }
+ }
+ }
+ } catch (Throwable e) {
+ // If there was an error or exception thrown, the build failed
+ currentBuild.result = "FAILURE"
+ ruby.runKitchenCommand("destroy")
+ throw e
+ } finally {
+ if (currentBuild.result == "FAILURE" && fileExists(".kitchen/logs/kitchen.log")) {
+ common.errorMsg("----------------KITCHEN LOG:---------------")
+ println readFile(".kitchen/logs/kitchen.log")
+ }
}
}
}
diff --git a/test-salt-formulas-pipeline.groovy b/test-salt-formulas-pipeline.groovy
index 4422ca6..1494aca 100644
--- a/test-salt-formulas-pipeline.groovy
+++ b/test-salt-formulas-pipeline.groovy
@@ -3,7 +3,6 @@
* DEFAULT_GIT_REF
* DEFAULT_GIT_URL
* CREDENTIALS_ID
- * KITCHEN_TESTS_PARALLEL
*/
def common = new com.mirantis.mk.Common()
def gerrit = new com.mirantis.mk.Gerrit()
@@ -16,13 +15,6 @@
gerritRef = null
}
-def parallelGroupSize
-try {
- parallelGroupSize = Integer.valueOf(PARALLEL_GROUP_SIZE)
-} catch (MissingPropertyException e) {
- parallelGroupSize = 8
-}
-
def defaultGitRef, defaultGitUrl
try {
defaultGitRef = DEFAULT_GIT_REF
@@ -92,15 +84,9 @@
common.infoMsg("CUSTOM_KITCHEN_ENVS not empty. Running with custom enviroments: ${kitchenEnvs}")
}
if (kitchenEnvs != null && kitchenEnvs != '') {
- def acc = 0
def kitchenTestRuns = [:]
common.infoMsg("Found " + kitchenEnvs.size() + " environment(s)")
for (int i = 0; i < kitchenEnvs.size(); i++) {
- if (acc >= parallelGroupSize) {
- parallel kitchenTestRuns
- kitchenTestRuns = [:]
- acc = 0
- }
def testEnv = kitchenEnvs[i]
kitchenTestRuns[testEnv] = {
build job: "test-salt-formulas-env", parameters: [
@@ -112,11 +98,8 @@
[$class: 'StringParameterValue', name: 'SALT_VERSION', value: SALT_VERSION]
]
}
- acc++;
}
- if (acc != 0) {
- parallel kitchenTestRuns
- }
+ parallel kitchenTestRuns
} else {
common.warningMsg(".kitchen.yml file not found, no kitchen tests triggered.")
}
diff --git a/test-salt-model-node.groovy b/test-salt-model-node.groovy
index bcc0c8b..eaf0104 100644
--- a/test-salt-model-node.groovy
+++ b/test-salt-model-node.groovy
@@ -23,44 +23,47 @@
def defaultGitUrl = DEFAULT_GIT_URL
def checkouted = false
-node("python") {
- try{
- stage("checkout") {
- if(defaultGitRef != "" && defaultGitUrl != "") {
- checkouted = gerrit.gerritPatchsetCheckout(defaultGitUrl, defaultGitRef, "HEAD", CREDENTIALS_ID)
- } else {
- throw new Exception("Cannot checkout gerrit patchset, DEFAULT_GIT_URL or DEFAULT_GIT_REF is null")
- }
- if(checkouted) {
- if (fileExists('classes/system')) {
- if (SYSTEM_GIT_URL == "") {
- ssh.prepareSshAgentKey(CREDENTIALS_ID)
- dir('classes/system') {
- remoteUrl = git.getGitRemote()
- ssh.ensureKnownHosts(remoteUrl)
- }
- ssh.agentSh("git submodule init; git submodule sync; git submodule update --recursive")
- } else {
- dir('classes/system') {
- if (!gerrit.gerritPatchsetCheckout(SYSTEM_GIT_URL, SYSTEM_GIT_REF, "HEAD", CREDENTIALS_ID)) {
- common.errorMsg("Failed to obtain system reclass with url: ${SYSTEM_GIT_URL} and ${SYSTEM_GIT_REF}")
+
+throttle(['test-model']) {
+ node("python") {
+ try{
+ stage("checkout") {
+ if(defaultGitRef != "" && defaultGitUrl != "") {
+ checkouted = gerrit.gerritPatchsetCheckout(defaultGitUrl, defaultGitRef, "HEAD", CREDENTIALS_ID)
+ } else {
+ throw new Exception("Cannot checkout gerrit patchset, DEFAULT_GIT_URL or DEFAULT_GIT_REF is null")
+ }
+ if(checkouted) {
+ if (fileExists('classes/system')) {
+ if (SYSTEM_GIT_URL == "") {
+ ssh.prepareSshAgentKey(CREDENTIALS_ID)
+ dir('classes/system') {
+ remoteUrl = git.getGitRemote()
+ ssh.ensureKnownHosts(remoteUrl)
+ }
+ ssh.agentSh("git submodule init; git submodule sync; git submodule update --recursive")
+ } else {
+ dir('classes/system') {
+ if (!gerrit.gerritPatchsetCheckout(SYSTEM_GIT_URL, SYSTEM_GIT_REF, "HEAD", CREDENTIALS_ID)) {
+ common.errorMsg("Failed to obtain system reclass with url: ${SYSTEM_GIT_URL} and ${SYSTEM_GIT_REF}")
+ }
}
}
}
}
}
- }
- stage("test node") {
- if (checkouted) {
- def workspace = common.getWorkspace()
- saltModelTesting.setupAndTestNode(NODE_TARGET, CLUSTER_NAME, EXTRA_FORMULAS, workspace, FORMULAS_SOURCE, FORMULAS_REVISION, MAX_CPU_PER_JOB.toInteger())
+ stage("test node") {
+ if (checkouted) {
+ def workspace = common.getWorkspace()
+ saltModelTesting.setupAndTestNode(NODE_TARGET, CLUSTER_NAME, EXTRA_FORMULAS, workspace, FORMULAS_SOURCE, FORMULAS_REVISION, MAX_CPU_PER_JOB.toInteger())
+ }
}
+ } catch (Throwable e) {
+ // If there was an error or exception thrown, the build failed
+ currentBuild.result = "FAILURE"
+ currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
+ throw e
}
- } catch (Throwable e) {
- // If there was an error or exception thrown, the build failed
- currentBuild.result = "FAILURE"
- currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
- throw e
}
}
diff --git a/test-salt-models-pipeline.groovy b/test-salt-models-pipeline.groovy
index 32ef43c..7099954 100644
--- a/test-salt-models-pipeline.groovy
+++ b/test-salt-models-pipeline.groovy
@@ -80,17 +80,23 @@
stage("test-nodes") {
if(checkouted) {
- def nodes = sh(script: "find ./nodes -type f -name '${config_node_name_pattern}*.yml'", returnStdout: true).tokenize()
+ def infraYMLs = sh(script: "find ./classes/ -regex '.*cluster/[-_a-zA-Z0-9]*/[infra/]*init\\.yml' -exec grep -il 'cluster_name' {} \\;", returnStdout: true).tokenize()
def branches = [:]
- def acc = 0
- for (int i = 0; i < nodes.size(); i++) {
- def testTarget = sh(script: "basename ${nodes[i]} .yml", returnStdout: true).trim()
- def clusterName = testTarget.substring(testTarget.indexOf(".") + 1, testTarget.lastIndexOf("."))
- if (acc >= PARALLEL_NODE_GROUP_SIZE.toInteger()) {
- parallel branches
- branches = [:]
- acc = 0
+ for (int i = 0; i < infraYMLs.size(); i++) {
+ def infraYMLConfig = readYaml(file: infraYMLs[i])
+ if(!infraYMLConfig["parameters"].containsKey("_param")){
+ common.warningMsg("ERROR: Cannot find soft params (_param) in file " + infraYMLs[i] + " for obtain a cluster info. Skipping test.")
+ continue
}
+ def infraParams = infraYMLConfig["parameters"]["_param"];
+ if(!infraParams.containsKey("infra_config_hostname") || !infraParams.containsKey("cluster_name") || !infraParams.containsKey("cluster_domain")){
+ common.warningMsg("ERROR: Cannot find _param:infra_config_hostname or _param:cluster_name or _param:cluster_domain in file " + infraYMLs[i] + " for obtain a cluster info. Skipping test.")
+ continue
+ }
+ def clusterName = infraParams["cluster_name"]
+ def clusterDomain = infraParams["cluster_domain"]
+ def configHostname = infraParams["infra_config_hostname"]
+ def testTarget = String.format("%s.%s", configHostname, clusterDomain)
branches[testTarget] = {
build job: "test-salt-model-node", parameters: [
@@ -106,11 +112,8 @@
[$class: 'StringParameterValue', name: 'MAX_CPU_PER_JOB', value: MAX_CPU_PER_JOB],
[$class: 'StringParameterValue', name: 'SYSTEM_GIT_REF', value: SYSTEM_GIT_REF]
]}
- acc++;
}
- if (acc != 0) {
- parallel branches
- }
+ parallel branches
}
}
} catch (Throwable e) {