Merge "Remove extra step for k8s state."
diff --git a/aptly-promote-pipeline.groovy b/aptly-promote-pipeline.groovy
index 00d41b8..eb10f40 100644
--- a/aptly-promote-pipeline.groovy
+++ b/aptly-promote-pipeline.groovy
@@ -26,8 +26,9 @@
   } catch (Throwable e) {
      // If there was an error or exception thrown, the build failed
      currentBuild.result = "FAILURE"
+     currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
      throw e
   } finally {
      common.sendNotification(currentBuild.result,"",["slack"])
   }
-}
\ No newline at end of file
+}
diff --git a/build-debian-packages-influxdb-relay.groovy b/build-debian-packages-influxdb-relay.groovy
new file mode 100644
index 0000000..6040849
--- /dev/null
+++ b/build-debian-packages-influxdb-relay.groovy
@@ -0,0 +1,106 @@
+def common = new com.mirantis.mk.Common()
+def git = new com.mirantis.mk.Git()
+def artifactory = new com.mirantis.mk.Artifactory()
+def aptly = new com.mirantis.mk.Aptly()
+
+def timestamp = common.getDatetime()
+def version = "1.0~${timestamp}"
+
+node('docker') {
+    try{
+
+        stage("cleanup") {
+            sh("rm -rf * || true")
+        }
+
+        def workingDir = "src/github.com/influxdata"
+        stage("checkout") {
+            git.checkoutGitRepository(
+                "${workingDir}/influxdb-relay",
+                "${SOURCE_URL}",
+                SOURCE_BRANCH,
+                SOURCE_CREDENTIALS,
+                true,
+                30,
+                1
+            )
+        }
+
+        try {
+
+            def jenkinsUID = sh (
+                script: 'id -u',
+                returnStdout: true
+            ).trim()
+            def imgName = "${OS}-${DIST}-${ARCH}"
+            def img
+
+            stage("build image") {
+                img = docker.build(
+                    "${imgName}:${timestamp}",
+                    [
+                        "--build-arg uid=${jenkinsUID}",
+                        "--build-arg timestamp=${timestamp}",
+                        "-f ${workingDir}/influxdb-relay/docker/${OS}-${DIST}-${ARCH}.Dockerfile",
+                        "."
+                    ].join(' ')
+                )
+            }
+            stage("build package") {
+                img.inside{
+                    sh("""wget https://storage.googleapis.com/golang/go1.9.linux-amd64.tar.gz &&
+                        tar xf go1.9.linux-amd64.tar.gz &&
+                        export GOROOT=\$PWD/go &&
+                        export PATH=\$PATH:\$GOROOT/bin &&
+                        export GOPATH=\$PWD &&
+                        cd src/github.com/influxdata/influxdb-relay &&
+                        ./build.py --package --version=\"${version}\" --platform=linux --arch=amd64""")
+                }
+                archiveArtifacts artifacts: "${workingDir}/influxdb-relay/build/*.deb"
+            }
+            if (UPLOAD_APTLY.toBoolean()) {
+                lock("aptly-api") {
+                    stage("upload") {
+                        def buildSteps = [:]
+                        def debFiles = sh script: "ls ${workingDir}/influxdb-relay/build/*.deb", returnStdout: true
+                        def debFilesArray = debFiles.trim().tokenize()
+                        def workspace = common.getWorkspace()
+                        for (int i = 0; i < debFilesArray.size(); i++) {
+
+                            def debFile = debFilesArray[i];
+                            buildSteps[debFiles[i]] = aptly.uploadPackageStep(
+                                "${workspace}/"+debFile,
+                                APTLY_URL,
+                                APTLY_REPO,
+                                true
+                            )
+                        }
+                        parallel buildSteps
+                    }
+                    stage("publish") {
+                        aptly.snapshotRepo(APTLY_URL, APTLY_REPO, timestamp)
+                        aptly.publish(APTLY_URL)
+                    }
+                }
+            }
+
+        } catch (Exception e) {
+            currentBuild.result = 'FAILURE'
+            println "Cleaning up docker images"
+            sh("docker images | grep -E '[-:\\ ]+${timestamp}[\\.\\ /\$]+' | awk '{print \$3}' | xargs docker rmi -f || true")
+            throw e
+        }
+
+    } catch (Throwable e) {
+       // If there was an exception thrown, the build failed
+       currentBuild.result = "FAILURE"
+       currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
+       throw e
+    } finally {
+       common.sendNotification(currentBuild.result,"",["slack"])
+
+       if (currentBuild.result != 'FAILURE') {
+          sh("rm -rf *")
+       }
+    }
+}
diff --git a/build-debian-packages-jmx-exporter.groovy b/build-debian-packages-jmx-exporter.groovy
index d356f69..71f626e 100644
--- a/build-debian-packages-jmx-exporter.groovy
+++ b/build-debian-packages-jmx-exporter.groovy
@@ -69,6 +69,7 @@
     } catch (Throwable e) {
        // If there was an exception thrown, the build failed
        currentBuild.result = "FAILURE"
+       currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
        throw e
     } finally {
        common.sendNotification(currentBuild.result,"",["slack"])
diff --git a/build-debian-packages-libvirt-exporter.groovy b/build-debian-packages-libvirt-exporter.groovy
index eb109cb..d373961 100644
--- a/build-debian-packages-libvirt-exporter.groovy
+++ b/build-debian-packages-libvirt-exporter.groovy
@@ -31,7 +31,7 @@
             }
         }
 
-        def img = dockerLib.getImage("tcpcloud/debian-build-ubuntu-xenial")
+        def img = dockerLib.getImage("tcpcloud/debian-build-ubuntu-${DIST}")
         stage("build package") {
             img.inside("-u root:root") {
                 sh("apt-get update && apt-get install ruby ruby-dev && gem install fpm")
@@ -72,6 +72,7 @@
     } catch (Throwable e) {
        // If there was an exception thrown, the build failed
        currentBuild.result = "FAILURE"
+       currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
        throw e
     } finally {
        common.sendNotification(currentBuild.result,"",["slack"])
diff --git a/build-debian-packages-pipeline.groovy b/build-debian-packages-pipeline.groovy
index bc4ed38..9e40944 100644
--- a/build-debian-packages-pipeline.groovy
+++ b/build-debian-packages-pipeline.groovy
@@ -116,8 +116,9 @@
   } catch (Throwable e) {
      // If there was an error or exception thrown, the build failed
      currentBuild.result = "FAILURE"
+     currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
      throw e
   } finally {
      common.sendNotification(currentBuild.result,"",["slack"])
   }
-}
\ No newline at end of file
+}
diff --git a/build-debian-packages-telegraf.groovy b/build-debian-packages-telegraf.groovy
index b946688..dde098e 100644
--- a/build-debian-packages-telegraf.groovy
+++ b/build-debian-packages-telegraf.groovy
@@ -81,6 +81,10 @@
                         aptly.snapshotRepo(APTLY_URL, APTLY_REPO, timestamp)
                         aptly.publish(APTLY_URL)
                     }
+
+                    stage("rebuild docker images") {
+                        build job: "docker-build-images-prometheus", parameters: []
+                    }
                 }
             }
 
@@ -94,6 +98,7 @@
     } catch (Throwable e) {
        // If there was an exception thrown, the build failed
        currentBuild.result = "FAILURE"
+       currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
        throw e
     } finally {
        common.sendNotification(currentBuild.result,"",["slack"])
diff --git a/build-extra-dpdk-pipeline.groovy b/build-extra-dpdk-pipeline.groovy
index 39928dc..357a9ad 100644
--- a/build-extra-dpdk-pipeline.groovy
+++ b/build-extra-dpdk-pipeline.groovy
@@ -59,8 +59,9 @@
      } catch (Throwable e) {
        // If there was an error or exception thrown, the build failed
        currentBuild.result = "FAILURE"
+       currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
        throw e
     } finally {
        common.sendNotification(currentBuild.result,"",["slack"])
     }
-}
\ No newline at end of file
+}
diff --git a/ceph-enforce-weights.groovy b/ceph-enforce-weights.groovy
new file mode 100644
index 0000000..4e06322
--- /dev/null
+++ b/ceph-enforce-weights.groovy
@@ -0,0 +1,60 @@
+/**
+ *
+ * Enforce OSD weights from model
+ *
+ * Requred parameters:
+ *  SALT_MASTER_URL             URL of Salt master
+ *  SALT_MASTER_CREDENTIALS     Credentials to the Salt API
+ *
+ *  ADMIN_HOST                  Host (minion id) with admin keyring
+ *
+ */
+
+common = new com.mirantis.mk.Common()
+salt = new com.mirantis.mk.Salt()
+
+// configure global variables
+def saltMaster
+
+def runCephCommand(master, cmd) {
+    return salt.cmdRun(master, ADMIN_HOST, cmd)
+}
+
+def grains
+
+node("python") {
+
+    stage('Load cluster information') {
+        // create connection to salt master
+        saltMaster = salt.connection(SALT_MASTER_URL, SALT_MASTER_CREDENTIALS)
+
+        // get list of disk from grains
+        grains = salt.getGrain(saltMaster, 'I@ceph:osd')['return'][0]
+        common.prettyPrint(grains)
+
+    }
+
+    stage('Enforce weights on OSDs') {
+
+        for (host in grains) {
+            // parse grains
+            def hostGrains = host.value
+            common.prettyPrint(hostGrains)
+
+            def hostname = hostGrains.host
+            def salt_id = hostGrains.id
+            def ceph_host_id = hostGrains.ceph_osd_host_id
+
+            common.infoMsg("Setting weights on host ${hostname} (${salt_id}), ceph_id ${ceph_host_id}")
+            for (disk in hostGrains.ceph_osd_disk) {
+                def osd_id = ceph_host_id + disk.key
+                print(osd_id)
+                print(disk.value)
+                print(disk.key)
+                def cmd = "ceph osd crush set ${osd_id} ${disk.value.weight} host=${hostname}"
+                print(runCephCommand(saltMaster, cmd))
+            }
+        }
+
+    }
+}
diff --git a/ceph-remove-osd.groovy b/ceph-remove-osd.groovy
index b171855..ac102eb 100644
--- a/ceph-remove-osd.groovy
+++ b/ceph-remove-osd.groovy
@@ -19,6 +19,7 @@
 // configure global variables
 def saltMaster
 def flags = CLUSTER_FLAGS.tokenize(',')
+def osds = OSD.tokenize(',')
 
 def runCephCommand(master, cmd) {
     return salt.cmdRun(master, ADMIN_HOST, cmd)
@@ -39,12 +40,22 @@
 
     // get list of disk at the osd
     def pillar_disks = salt.getPillar(saltMaster, HOST, 'ceph:osd:disk')['return'][0].values()[0]
-    def hostname = salt.getPillar(saltMaster, HOST, 'linux:system:name')['return'][0].values()[0]
-    def hostname_id = hostname.replaceAll('osd', '')
+    def hostname_id = salt.getPillar(saltMaster, HOST, 'ceph:osd:host_id')['return'][0].values()[0]
     def osd_ids = []
 
-    for (i in pillar_disks.keySet()) {
-        osd_ids.add('osd.' + (hostname_id + i).toInteger())
+    print("host_id is ${hostname_id}")
+    print("osds:")
+    print(osds)
+
+    for (i in pillar_disks) {
+        def osd_id = (hostname_id + i.key).toInteger().toString()
+        print("Evaluating ${osd_id}")
+        if (osd_id in osds || OSD == '*') {
+            osd_ids.add('osd.' + osd_id)
+            print("Will delete " + osd_id)
+        } else {
+            print("Skipping " + osd_id)
+        }
     }
 
     // `ceph osd out <id> <id>`
diff --git a/cicd-lab-pipeline.groovy b/cicd-lab-pipeline.groovy
index 6f228fe..8902e1f 100644
--- a/cicd-lab-pipeline.groovy
+++ b/cicd-lab-pipeline.groovy
@@ -227,6 +227,7 @@
                 println "Waiting for postgresql database to come up.."
                 salt.cmdRun(saltMaster, 'I@postgresql:client', 'while true; do if docker service logs postgresql_db | grep "ready to accept"; then break; else sleep 5; fi; done')
             }
+            // XXX: first run usually fails on some inserts, but we need to create databases at first 
             salt.enforceState(saltMaster, 'I@postgresql:client', 'postgresql.client', true, false)
 
             // Setup postgres database with integration between
@@ -249,7 +250,11 @@
                 println 'Waiting for Elasticsearch to come up..'
                 salt.cmdRun(saltMaster, 'I@elasticsearch:client', 'while true; do curl -sf 172.16.10.254:9200 >/dev/null && break; done')
             }
-            salt.enforceState(saltMaster, 'I@elasticsearch:client', 'elasticsearch.client', true)
+            retry(3){
+              sleep(10)
+              // XXX: first run sometimes fails on update indexes, so we need to wait
+              salt.enforceState(saltMaster, 'I@elasticsearch:client', 'elasticsearch.client', true)
+            }
         }
 
         stage("Finalize") {
@@ -300,7 +305,6 @@
             salt.enforceState(saltMaster, 'I@nginx:server', 'nginx')
 
             def failedSvc = salt.cmdRun(saltMaster, '*', """systemctl --failed | grep -E 'loaded[ \t]+failed' && echo 'Command execution failed' || true""")
-            print common.prettyPrint(failedSvc)
             if (failedSvc =~ /Command execution failed/) {
                 common.errorMsg("Some services are not running. Environment may not be fully functional!")
             }
@@ -333,6 +337,7 @@
     } catch (Throwable e) {
         // If there was an error or exception thrown, the build failed
         currentBuild.result = "FAILURE"
+        currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
         throw e
     } finally {
         // Cleanup
diff --git a/cloud-deploy-pipeline.groovy b/cloud-deploy-pipeline.groovy
index 2380ccf..291f799 100644
--- a/cloud-deploy-pipeline.groovy
+++ b/cloud-deploy-pipeline.groovy
@@ -65,6 +65,8 @@
 def saltMaster
 def venv
 
+def ipRegex = "\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}"
+
 if (STACK_TYPE == 'aws') {
     def aws_env_vars
 }
@@ -127,32 +129,30 @@
                 }
                 // launch stack
                 if (STACK_REUSE.toBoolean() == false) {
-                    stage('Launch new Heat stack') {
-                        envParams = [
-                            'cluster_zone': HEAT_STACK_ZONE,
-                            'cluster_public_net': HEAT_STACK_PUBLIC_NET
-                        ]
+                    envParams = [
+                        'cluster_zone': HEAT_STACK_ZONE,
+                        'cluster_public_net': HEAT_STACK_PUBLIC_NET
+                    ]
 
-                        // set reclass repo in heat env
-                        try {
-                            envParams.put('cfg_reclass_branch', STACK_RECLASS_BRANCH)
-                            envParams.put('cfg_reclass_address', STACK_RECLASS_ADDRESS)
-                        } catch (MissingPropertyException e) {
-                            common.infoMsg("Property STACK_RECLASS_BRANCH or STACK_RECLASS_ADDRESS not found! Using default values from template.")
-                        }
-
-                        def legacy_env = false;
-                        //FIXME:
-                        if (false && STACK_TEMPLATE.startsWith('virtual_') && !STACK_TEMPLATE.contains('aio')) {
-                            legacy_env = true;
-                        }
-
-                        openstack.createHeatStack(openstackCloud, STACK_NAME, STACK_TEMPLATE, envParams, HEAT_STACK_ENVIRONMENT, venv, legacy_env)
+                    // set reclass repo in heat env
+                    try {
+                        envParams.put('cfg_reclass_branch', STACK_RECLASS_BRANCH)
+                        envParams.put('cfg_reclass_address', STACK_RECLASS_ADDRESS)
+                    } catch (MissingPropertyException e) {
+                        common.infoMsg("Property STACK_RECLASS_BRANCH or STACK_RECLASS_ADDRESS not found! Using default values from template.")
                     }
+
+                    openstack.createHeatStack(openstackCloud, STACK_NAME, STACK_TEMPLATE, envParams, HEAT_STACK_ENVIRONMENT, venv)
                 }
 
                 // get SALT_MASTER_URL
                 saltMasterHost = openstack.getHeatStackOutputParam(openstackCloud, STACK_NAME, 'salt_master_ip', venv)
+                // check that saltMasterHost is valid
+                if (!saltMasterHost || !saltMasterHost.matches(ipRegex)) {
+                    common.errorMsg("saltMasterHost is not a valid ip, value is: ${saltMasterHost}")
+                    throw new Exception("saltMasterHost is not a valid ip")
+                }
+
                 currentBuild.description = "${STACK_NAME} ${saltMasterHost}"
 
                 SALT_MASTER_URL = "http://${saltMasterHost}:6969"
@@ -204,6 +204,12 @@
 
                 // get outputs
                 saltMasterHost = aws.getOutputs(venv, aws_env_vars, STACK_NAME, 'SaltMasterIP')
+                // check that saltMasterHost is valid
+                if (!saltMasterHost || !saltMasterHost.matches(ipRegex)) {
+                    common.errorMsg("saltMasterHost is not a valid ip, value is: ${saltMasterHost}")
+                    throw new Exception("saltMasterHost is not a valid ip")
+                }
+
                 currentBuild.description = "${STACK_NAME} ${saltMasterHost}"
                 SALT_MASTER_URL = "http://${saltMasterHost}:6969"
 
@@ -242,8 +248,18 @@
 
         // install ceph
         if (common.checkContains('STACK_INSTALL', 'ceph')) {
-            orchestrate.installCephMon(saltMaster)
-            orchestrate.installCephOsd(saltMaster)
+            stage('Install Ceph MONs') {
+                orchestrate.installCephMon(saltMaster)
+            }
+
+            stage('Install Ceph OSDs') {
+                orchestrate.installCephOsd(saltMaster)
+            }
+
+
+            stage('Install Ceph clients') {
+                orchestrate.installCephClient(saltMaster)
+            }
         }
 
         // install k8s
@@ -357,25 +373,8 @@
         def artifacts_dir = '_artifacts/'
 
         if (common.checkContains('STACK_TEST', 'k8s')) {
-            stage('Run k8s bootstrap tests') {
-                def image = 'tomkukral/k8s-scripts'
-                def output_file = image.replaceAll('/', '-') + '.output'
-
-                // run image
-                test.runConformanceTests(saltMaster, TEST_K8S_API_SERVER, image)
-
-                // collect output
-                sh "mkdir -p ${artifacts_dir}"
-                file_content = salt.getFileContent(saltMaster, 'ctl01*', '/tmp/' + output_file)
-                writeFile file: "${artifacts_dir}${output_file}", text: file_content
-                sh "cat ${artifacts_dir}${output_file}"
-
-                // collect artifacts
-                archiveArtifacts artifacts: "${artifacts_dir}${output_file}"
-            }
-
             stage('Run k8s conformance e2e tests') {
-                def image = K8S_CONFORMANCE_IMAGE
+                def image = TEST_K8S_CONFORMANCE_IMAGE
                 def output_file = image.replaceAll('/', '-') + '.output'
 
                 // run image
@@ -409,6 +408,17 @@
             }
         }
 
+
+        if (common.checkContains('STACK_TEST', 'ceph')) {
+            stage('Run infra tests') {
+                def cmd = "apt-get install -y python-pip && pip install -r /usr/share/salt-formulas/env/ceph/files/testinfra/requirements.txt && python -m pytest --junitxml=/root/report.xml /usr/share/salt-formulas/env/ceph/files/testinfra/"
+                salt.cmdRun(saltMaster, 'I@salt:master', cmd, false)
+                writeFile(file: 'report.xml', text: salt.getFileContent(saltMaster, 'I@salt:master', '/root/report.xml'))
+                junit(keepLongStdio: true, testResults: 'report.xml')
+            }
+        }
+
+
         if (common.checkContains('STACK_INSTALL', 'finalize')) {
             stage('Finalize') {
                 salt.runSaltProcessStep(saltMaster, '*', 'state.apply', [], null, true)
diff --git a/docker-build-image-pipeline.groovy b/docker-build-image-pipeline.groovy
index a22439a..c23f1c3 100644
--- a/docker-build-image-pipeline.groovy
+++ b/docker-build-image-pipeline.groovy
@@ -48,6 +48,7 @@
   } catch (Throwable e) {
      // If there was an error or exception thrown, the build failed
      currentBuild.result = "FAILURE"
+     currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
      throw e
   } finally {
      common.sendNotification(currentBuild.result,"",["slack"])
diff --git a/gating-pipeline.groovy b/gating-pipeline.groovy
index c113d28..1b62b53 100644
--- a/gating-pipeline.groovy
+++ b/gating-pipeline.groovy
@@ -68,6 +68,7 @@
   } catch (Throwable e) {
      // If there was an error or exception thrown, the build failed
      currentBuild.result = "FAILURE"
+     currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
      throw e
   }
 }
diff --git a/generate-cookiecutter-products.groovy b/generate-cookiecutter-products.groovy
index f6090c0..87b2c8e 100644
--- a/generate-cookiecutter-products.groovy
+++ b/generate-cookiecutter-products.groovy
@@ -153,8 +153,8 @@
             smc['DEPLOY_NETWORK_GW'] = templateContext['default_context']['deploy_network_gateway']
             smc['DEPLOY_NETWORK_NETMASK'] = templateContext['default_context']['deploy_network_netmask']
             smc['DNS_SERVERS'] = templateContext['default_context']['dns_server01']
-            smc['CICD_CONTROL_ADDRESS'] = templateContext['default_context']['cicd_control_address']
-            smc['INFRA_CONFIG_ADDRESS'] = templateContext['default_context']['infra_config_address']
+            smc['CICD_CONTROL_ADDRESS'] = templateContext['default_context']['cicd_control_vip_address']
+            smc['INFRA_CONFIG_ADDRESS'] = templateContext['default_context']['salt_master_address']
 
             for (i in common.entries(smc)) {
                 sh "sed -i \"s,export ${i[0]}=.*,export ${i[0]}=${i[1]},\" user_data.sh"
@@ -184,6 +184,7 @@
     } catch (Throwable e) {
          // If there was an error or exception thrown, the build failed
          currentBuild.result = "FAILURE"
+         currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
          throw e
     } finally {
         stage ('Clean workspace directories') {
diff --git a/git-merge-branch.groovy b/git-merge-branch.groovy
deleted file mode 100644
index e69de29..0000000
--- a/git-merge-branch.groovy
+++ /dev/null
diff --git a/git-merge-branches-pipeline.groovy b/git-merge-branches-pipeline.groovy
new file mode 100644
index 0000000..8293f87
--- /dev/null
+++ b/git-merge-branches-pipeline.groovy
@@ -0,0 +1,29 @@
+/**
+ * Git merge branches pipeline
+ * REPO_URL - Repository URL
+ * TARGET_BRANCH - Target branch for merging
+ * SOURCE_BRANCH - The branch will be merged to TARGET_BRANCH
+ * CREDENTIALS_ID - Used credentails ID
+ *
+**/
+
+def common = new com.mirantis.mk.Common()
+def git = new com.mirantis.mk.Git()
+node {
+  try{
+    stage("checkout") {
+      git.checkoutGitRepository('repo', REPO_URL, TARGET_BRANCH, IMAGE_CREDENTIALS_ID)
+    }
+    stage("merge") {
+      dir("repo"){
+        sh("git fetch origin/${SOURCE_BRANCH} && git merge ${SOURCE_BRANCH} && git push origin ${TARGET_BRANCH}")
+      }
+    }
+  } catch (Throwable e) {
+     // If there was an error or exception thrown, the build failed
+     currentBuild.result = "FAILURE"
+     currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
+     throw e
+  }
+}
+
diff --git a/git-mirror-2way-pipeline.groovy b/git-mirror-2way-pipeline.groovy
index c1c808c..c20af8f 100644
--- a/git-mirror-2way-pipeline.groovy
+++ b/git-mirror-2way-pipeline.groovy
@@ -38,6 +38,7 @@
     } catch (Throwable e) {
        // If there was an error or exception thrown, the build failed
        currentBuild.result = "FAILURE"
+       currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
        throw e
     } finally {
        common.sendNotification(currentBuild.result,"",["slack"])
diff --git a/git-mirror-pipeline.groovy b/git-mirror-pipeline.groovy
index 10fa9a0..5035fe6 100644
--- a/git-mirror-pipeline.groovy
+++ b/git-mirror-pipeline.groovy
@@ -17,6 +17,7 @@
     } catch (Throwable e) {
        // If there was an error or exception thrown, the build failed
        currentBuild.result = "FAILURE"
+       currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
        throw e
     } finally {
        common.sendNotification(currentBuild.result,"",["slack"])
diff --git a/opencontrail-upgrade.groovy b/opencontrail-upgrade.groovy
index 6bcb788..00b0e7f 100644
--- a/opencontrail-upgrade.groovy
+++ b/opencontrail-upgrade.groovy
@@ -273,6 +273,7 @@
         } catch (Throwable e) {
             // If there was an error or exception thrown, the build failed
             currentBuild.result = "FAILURE"
+            currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
             throw e
         }
     }
@@ -484,6 +485,7 @@
         } catch (Throwable e) {
             // If there was an error or exception thrown, the build failed
             currentBuild.result = "FAILURE"
+            currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
             throw e
         }
     }
diff --git a/openstack-compute-install.groovy b/openstack-compute-install.groovy
index 8e53396..535cde0 100644
--- a/openstack-compute-install.groovy
+++ b/openstack-compute-install.groovy
@@ -86,6 +86,7 @@
     } catch (Throwable e) {
         // If there was an error or exception thrown, the build failed
         currentBuild.result = "FAILURE"
+        currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
         throw e
     }
 }
diff --git a/openstack-compute-upgrade.groovy b/openstack-compute-upgrade.groovy
index 4a04531..095697d 100644
--- a/openstack-compute-upgrade.groovy
+++ b/openstack-compute-upgrade.groovy
@@ -199,6 +199,7 @@
     } catch (Throwable e) {
         // If there was an error or exception thrown, the build failed
         currentBuild.result = "FAILURE"
+        currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
         throw e
     }
 }
diff --git a/ovs-gateway-upgrade.groovy b/ovs-gateway-upgrade.groovy
index 70037a4..9cfa215 100644
--- a/ovs-gateway-upgrade.groovy
+++ b/ovs-gateway-upgrade.groovy
@@ -148,6 +148,7 @@
     } catch (Throwable e) {
         // If there was an error or exception thrown, the build failed
         currentBuild.result = "FAILURE"
+        currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
         throw e
     }
 }
diff --git a/release-salt-formulas-pipeline.groovy b/release-salt-formulas-pipeline.groovy
index 7660636..4aaaec9 100644
--- a/release-salt-formulas-pipeline.groovy
+++ b/release-salt-formulas-pipeline.groovy
@@ -24,6 +24,7 @@
   } catch (Throwable e) {
      // If there was an error or exception thrown, the build failed
      currentBuild.result = "FAILURE"
+     currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
      throw e
   } finally {
      common.sendNotification(currentBuild.result,"",["slack"])
diff --git a/rollout-config-change.groovy b/rollout-config-change.groovy
index 06219a3..5c83eee 100644
--- a/rollout-config-change.groovy
+++ b/rollout-config-change.groovy
@@ -58,11 +58,11 @@
         }
 
         stage('Promote config change in repo') {
-            build job: "gerrit-merge-branch", parameters: [
-              [$class: 'StringParameterValue', name: 'MODEL_REPO_URL', value: MODEL_REPO_URL],
-              [$class: 'StringParameterValue', name: 'MODEL_REPO_CREDENTIALS', value: MODEL_REPO_CREDENTIALS],
-              [$class: 'StringParameterValue', name: 'MODEL_REPO_SOURCE_BRANCH', value: MODEL_REPO_SOURCE_BRANCH],
-              [$class: 'StringParameterValue', name: 'MODEL_REPO_TARGET_BRANCH', value: MODEL_REPO_TARGET_BRANCH],
+            build job: "git-merge-branches", parameters: [
+              [$class: 'StringParameterValue', name: 'REPO_URL', value: MODEL_REPO_URL],
+              [$class: 'StringParameterValue', name: 'CREDENTIALS_ID', value: MODEL_REPO_CREDENTIALS],
+              [$class: 'StringParameterValue', name: 'SOURCE_BRANCH', value: MODEL_REPO_SOURCE_BRANCH],
+              [$class: 'StringParameterValue', name: 'TARGET_BRANCH', value: MODEL_REPO_TARGET_BRANCH],
             ]
         }
 
@@ -79,7 +79,7 @@
         }
 
         stage('Test config change on prod env') {
-            build job: "deploy-test-service", parameters: [
+            def result = build job: "deploy-test-service", parameters: [
               [$class: 'StringParameterValue', name: 'SALT_MASTER_URL', value: PRD_SALT_MASTER_URL],
               [$class: 'StringParameterValue', name: 'SALT_MASTER_CREDENTIALS', value: PRD_SALT_MASTER_CREDENTIALS],
               [$class: 'StringParameterValue', name: 'TEST_SERVICE', value: TEST_SERVICE],
diff --git a/test-cookiecutter-reclass.groovy b/test-cookiecutter-reclass.groovy
index 4b93751..e59f0ce 100644
--- a/test-cookiecutter-reclass.groovy
+++ b/test-cookiecutter-reclass.groovy
@@ -80,9 +80,10 @@
     def templateEnv = "${env.WORKSPACE}"
     def content = readFile(file: "${templateEnv}/contexts/${modelFile}.yml")
     def templateContext = readYaml text: content
+    def clusterName = templateContext.default_context.cluster_name
     def clusterDomain = templateContext.default_context.cluster_domain
     git.checkoutGitRepository("${testEnv}/classes/system", RECLASS_MODEL_URL, RECLASS_MODEL_BRANCH, CREDENTIALS_ID)
-    saltModelTesting.setupAndTestNode("cfg01.${clusterDomain}", EXTRA_FORMULAS, testEnv)
+    saltModelTesting.setupAndTestNode("cfg01.${clusterDomain}", clusterName, EXTRA_FORMULAS, testEnv)
 }
 
 def gerritRef
@@ -122,6 +123,10 @@
             python.setupCookiecutterVirtualenv(cutterEnv)
         }
 
+        stage("Check workflow_definition") {
+            sh "python ${env.WORKSPACE}/workflow_definition_test.py"
+        }
+
         def contextFiles
         dir("${templateEnv}/contexts") {
             contextFiles = findFiles(glob: "*.yml")
@@ -164,6 +169,7 @@
 
     } catch (Throwable e) {
          currentBuild.result = "FAILURE"
+         currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
          throw e
     } finally {
          def dummy = "dummy"
diff --git a/test-salt-formulas-env.groovy b/test-salt-formulas-env.groovy
new file mode 100644
index 0000000..1a1f27c
--- /dev/null
+++ b/test-salt-formulas-env.groovy
@@ -0,0 +1,97 @@
+/**
+ * Test salt formulas pipeline
+ *  DEFAULT_GIT_REF
+ *  DEFAULT_GIT_URL
+ *  CREDENTIALS_ID
+ */
+def common = new com.mirantis.mk.Common()
+def ruby = new com.mirantis.mk.Ruby()
+def gerrit = new com.mirantis.mk.Gerrit()
+
+def defaultGitRef, defaultGitUrl
+try {
+  defaultGitRef = DEFAULT_GIT_REF
+  defaultGitUrl = DEFAULT_GIT_URL
+} catch (MissingPropertyException e) {
+  defaultGitRef = null
+  defaultGitUrl = null
+}
+
+def checkouted = false
+
+node("python") {
+  try {
+    stage("checkout") {
+      if (defaultGitRef && defaultGitUrl) {
+        checkouted = gerrit.gerritPatchsetCheckout(defaultGitUrl, defaultGitRef, "HEAD", CREDENTIALS_ID)
+      } else {
+        throw new Exception("Cannot checkout gerrit patchset, DEFAULT_GIT_REF is null")
+      }
+    }
+    stage("test") {
+      if (checkouted) {
+        sh("make clean")
+        sh("[ $SALT_VERSION != 'latest' ] || export SALT_VERSION=''; make test")
+      }
+    }
+    stage("kitchen") {
+      if (checkouted) {
+        if (fileExists(".kitchen.yml")) {
+          common.infoMsg(".kitchen.yml found, running kitchen tests")
+          ruby.ensureRubyEnv()
+          if (fileExists(".travis.yml")) {
+            common.infoMsg(".travis.yml found, running custom kitchen init")
+            def kitchenConfigYML = readYaml(file: ".travis.yml")
+            def kitchenInit = kitchenConfigYML["install"]
+            def kitchenInstalled = false
+            if (kitchenInit && !kitchenInit.isEmpty()) {
+              for (int i = 0; i < kitchenInit.size(); i++) {
+                if (kitchenInit[i].trim().startsWith("test -e Gemfile")) { //found Gemfile config
+                  common.infoMsg("Custom Gemfile configuration found, using them")
+                  ruby.installKitchen(kitchenInit[i].trim())
+                  kitchenInstalled = true
+                }
+              }
+            }
+            if (!kitchenInstalled) {
+              ruby.installKitchen()
+            }
+          } else {
+            common.infoMsg(".travis.yml not found, running default kitchen init")
+            ruby.installKitchen()
+          }
+          common.infoMsg("Running part of kitchen test")
+          if (KITCHEN_ENV != null && !KITCHEN_ENV.isEmpty() && KITCHEN_ENV != "") {
+            def cleanEnv = KITCHEN_ENV.replaceAll("\\s?SUITE=[^\\s]*", "")
+            def suitePattern = java.util.regex.Pattern.compile("\\s?SUITE=([^\\s]*)")
+            def suiteMatcher = suitePattern.matcher(KITCHEN_ENV)
+            if (suiteMatcher.find()) {
+              def suite = suiteMatcher.group(1)
+              suiteMatcher = null
+              def cleanSuite = suite.replaceAll("_", "-")
+              common.infoMsg("Running kitchen test with environment:" + KITCHEN_ENV.trim())
+              ruby.runKitchenTests(cleanEnv, cleanSuite)
+            } else {
+              common.warningMsg("No SUITE was found. Running with all suites.")
+              ruby.runKitchenTests(cleanEnv, "")
+            }
+          } else {
+            throw new Exception("KITCHEN_ENV parameter is empty or invalid. This may indicate wrong env settings of initial test job or .travis.yml file.")
+          }
+        } else {
+          throw new Exception(".kitchen.yml file not found, no kitchen tests triggered.")
+        }
+      }
+    }
+  } catch (Throwable e) {
+    // If there was an error or exception thrown, the build failed
+    currentBuild.result = "FAILURE"
+    ruby.runKitchenCommand("destroy")
+    throw e
+  } finally {
+    if (currentBuild.result == "FAILURE" && fileExists(".kitchen/logs/kitchen.log")) {
+      common.errorMsg("----------------KITCHEN LOG:---------------")
+      println readFile(".kitchen/logs/kitchen.log")
+    }
+  }
+}
\ No newline at end of file
diff --git a/test-salt-formulas-pipeline.groovy b/test-salt-formulas-pipeline.groovy
index 8ad5f1c..a475e3a 100644
--- a/test-salt-formulas-pipeline.groovy
+++ b/test-salt-formulas-pipeline.groovy
@@ -16,115 +16,123 @@
   gerritRef = null
 }
 
+def parallelGroupSize
+try {
+  parallelGroupSize = Integer.valueOf(PARALLEL_GROUP_SIZE)
+} catch (MissingPropertyException e) {
+  parallelGroupSize = 4
+}
+
 def defaultGitRef, defaultGitUrl
 try {
-    defaultGitRef = DEFAULT_GIT_REF
-    defaultGitUrl = DEFAULT_GIT_URL
+  defaultGitRef = DEFAULT_GIT_REF
+  defaultGitUrl = DEFAULT_GIT_URL
 } catch (MissingPropertyException e) {
-    defaultGitRef = null
-    defaultGitUrl = null
+  defaultGitRef = null
+  defaultGitUrl = null
 }
 
 def checkouted = false
 
 node("python") {
-  try{
+  try {
     stage("checkout") {
       if (gerritRef) {
         // job is triggered by Gerrit
         def gerritChange = gerrit.getGerritChange(GERRIT_NAME, GERRIT_HOST, GERRIT_CHANGE_NUMBER, CREDENTIALS_ID, true)
         // test if gerrit change is already Verified
-        if(gerrit.patchsetHasApproval(gerritChange.currentPatchSet,"Verified","+")){
+        if (gerrit.patchsetHasApproval(gerritChange.currentPatchSet, "Verified", "+")) {
           common.successMsg("Gerrit change ${GERRIT_CHANGE_NUMBER} patchset ${GERRIT_PATCHSET_NUMBER} already has Verified, skipping tests") // do nothing
-        // test WIP contains in commit message
-        }else if(gerritChange.commitMessage.contains("WIP")){
+          // test WIP contains in commit message
+        } else if (gerritChange.commitMessage.contains("WIP")) {
           common.successMsg("Commit message contains WIP, skipping tests") // do nothing
-        }else{
+        } else {
           // test if change aren't already merged
           def merged = gerritChange.status == "MERGED"
-          if(!merged){
-            checkouted = gerrit.gerritPatchsetCheckout ([
-              credentialsId : CREDENTIALS_ID
+          if (!merged) {
+            checkouted = gerrit.gerritPatchsetCheckout([
+              credentialsId: CREDENTIALS_ID
             ])
-          } else{
+          } else {
             common.successMsg("Change ${GERRIT_CHANGE_NUMBER} is already merged, no need to test them")
           }
         }
-      } else if(defaultGitRef && defaultGitUrl) {
-          checkouted = gerrit.gerritPatchsetCheckout(defaultGitUrl, defaultGitRef, "HEAD", CREDENTIALS_ID)
+        defaultGitUrl = "${GERRIT_SCHEME}://${GERRIT_NAME}@${GERRIT_HOST}:${GERRIT_PORT}/${GERRIT_PROJECT}"
+        defaultGitRef = GERRIT_REFSPEC
+      } else if (defaultGitRef && defaultGitUrl) {
+        checkouted = gerrit.gerritPatchsetCheckout(defaultGitUrl, defaultGitRef, "HEAD", CREDENTIALS_ID)
       } else {
         throw new Exception("Cannot checkout gerrit patchset, GERRIT_REFSPEC and DEFAULT_GIT_REF is null")
       }
     }
     stage("test") {
-      if(checkouted){
+      if (checkouted) {
         sh("make clean")
         sh("[ $SALT_VERSION != 'latest' ] || export SALT_VERSION=''; make test")
       }
     }
     stage("kitchen") {
-      if(checkouted){
+      if (checkouted) {
         if (fileExists(".kitchen.yml")) {
           common.infoMsg(".kitchen.yml found, running kitchen tests")
-          ruby.ensureRubyEnv()
           def kitchenEnvs = []
           def filteredEnvs = []
-          if(fileExists(".travis.yml")){
-            common.infoMsg(".travis.yml found, running custom kitchen init")
+          if (fileExists(".travis.yml")) {
+            common.infoMsg(".travis.yml file found.")
             def kitchenConfigYML = readYaml(file: ".travis.yml")
-            if(kitchenConfigYML.containsKey("env")){
-              kitchenEnvs=kitchenConfigYML["env"]
-            }
-            def kitchenInit = kitchenConfigYML["install"]
-            def kitchenInstalled = false
-            if(kitchenInit && !kitchenInit.isEmpty()){
-              for(int i=0; i<kitchenInit.size(); i++){
-                if(kitchenInit[i].trim().startsWith("test -e Gemfile")){ //found Gemfile config
-                  common.infoMsg("Custom Gemfile configuration found, using them")
-                  ruby.installKitchen(kitchenInit[i].trim())
-                  kitchenInstalled = true
-                }
-              }
-            }
-            if(!kitchenInstalled){
-              ruby.installKitchen()
-            }
-          }else{
-            common.infoMsg(".travis.yml not found, running default kitchen init")
-            ruby.installKitchen()
-          }
-          common.infoMsg("Running kitchen testing, parallel mode: " + KITCHEN_TESTS_PARALLEL.toBoolean())
-
-          if(CUSTOM_KITCHEN_ENVS != null && CUSTOM_KITCHEN_ENVS != ''){
-              filteredEnvs = CUSTOM_KITCHEN_ENVS.tokenize('\n')
-            } else {
-              filteredEnvs = ruby.filterKitchenEnvs(kitchenEnvs).unique()
-            }
-            // Allow custom filteredEnvs in case of empty kitchenEnvs
-          if((kitchenEnvs && !kitchenEnvs.isEmpty() && !filteredEnvs.isEmpty()) || ((kitchenEnvs==null || kitchenEnvs=='') && !filteredEnvs.isEmpty())){
-            for(int i=0; i<filteredEnvs.size(); i++){
-              common.infoMsg("Found " + filteredEnvs.size() + " environment, kitchen running with env number " + (i+1) + ": " + filteredEnvs[i].trim())
-              ruby.runKitchenTests(filteredEnvs[i].trim(), KITCHEN_TESTS_PARALLEL.toBoolean())
+            if (kitchenConfigYML.containsKey("env")) {
+              kitchenEnvs = kitchenConfigYML["env"]
             }
           } else {
-            ruby.runKitchenTests("", KITCHEN_TESTS_PARALLEL.toBoolean())
+            common.warningMsg(".travis.yml file not found, suites must be passed via CUSTOM_KITCHEN_ENVS parameter.")
           }
-        } else {
-          common.infoMsg(".kitchen.yml not found")
+          common.infoMsg("Running kitchen testing in parallel mode")
+          if (CUSTOM_KITCHEN_ENVS != null && CUSTOM_KITCHEN_ENVS != '') {
+            kitchenEnvs = CUSTOM_KITCHEN_ENVS.tokenize('\n')
+            common.infoMsg("CUSTOM_KITCHEN_ENVS not empty. Running with custom enviroments: ${kitchenEnvs}")
+          }
+          if (kitchenEnvs != null && kitchenEnvs != '') {
+            def acc = 0
+            def kitchenTestRuns = [:]
+            common.infoMsg("Found " + kitchenEnvs.size() + " environment(s)")
+            for (int i = 0; i < kitchenEnvs.size(); i++) {
+              if (acc >= parallelGroupSize) {
+                parallel kitchenTestRuns
+                kitchenTestRuns = [:]
+                acc = 0
+              }
+              def testEnv = kitchenEnvs[i]
+              kitchenTestRuns[testEnv] = {
+                build job: "test-salt-formulas-env", parameters: [
+                  [$class: 'StringParameterValue', name: 'CREDENTIALS_ID', value: CREDENTIALS_ID],
+                  [$class: 'StringParameterValue', name: 'KITCHEN_ENV', value: testEnv],
+                  [$class: 'StringParameterValue', name: 'DEFAULT_GIT_REF', value: defaultGitRef],
+                  [$class: 'StringParameterValue', name: 'DEFAULT_GIT_URL', value: defaultGitUrl],
+                  [$class: 'StringParameterValue', name: 'SALT_OPTS', value: SALT_OPTS],
+                  [$class: 'StringParameterValue', name: 'SALT_VERSION', value: SALT_VERSION]
+                ]
+              }
+              acc++;
+            }
+            if (acc != 0) {
+              parallel kitchenTestRuns
+            }
+          } else {
+            common.warningMsg(".kitchen.yml file not found, no kitchen tests triggered.")
+          }
         }
       }
     }
   } catch (Throwable e) {
-     // If there was an error or exception thrown, the build failed
-     currentBuild.result = "FAILURE"
-     ruby.runKitchenCommand("destroy")
-     throw e
+    // If there was an error or exception thrown, the build failed
+    currentBuild.result = "FAILURE"
+    currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
+    throw e
   } finally {
-     if(currentBuild.result == "FAILURE" && fileExists(".kitchen/logs/kitchen.log")){
-        common.errorMsg("----------------KITCHEN LOG:---------------")
-        println readFile(".kitchen/logs/kitchen.log")
-     }
-     common.sendNotification(currentBuild.result,"",["slack"])
+    if (currentBuild.result == "FAILURE" && fileExists(".kitchen/logs/kitchen.log")) {
+      common.errorMsg("----------------KITCHEN LOG:---------------")
+      println readFile(".kitchen/logs/kitchen.log")
+    }
+    common.sendNotification(currentBuild.result, "", ["slack"])
   }
-}
-
+}
\ No newline at end of file
diff --git a/test-salt-model-node.groovy b/test-salt-model-node.groovy
index a2e6923..bcc0c8b 100644
--- a/test-salt-model-node.groovy
+++ b/test-salt-model-node.groovy
@@ -5,6 +5,7 @@
  *  DEFAULT_GIT_URL
  *  CREDENTIALS_ID
  *  EXTRA_FORMULAS
+ *  CLUSTER_NAME
  *  NODE_TARGET
  *  SYSTEM_GIT_URL
  *  SYSTEM_GIT_REF
@@ -53,12 +54,13 @@
     stage("test node") {
       if (checkouted) {
         def workspace = common.getWorkspace()
-        saltModelTesting.setupAndTestNode(NODE_TARGET, EXTRA_FORMULAS, workspace, FORMULAS_SOURCE, FORMULAS_REVISION, MAX_CPU_PER_JOB.toInteger())
+        saltModelTesting.setupAndTestNode(NODE_TARGET, CLUSTER_NAME, EXTRA_FORMULAS, workspace, FORMULAS_SOURCE, FORMULAS_REVISION, MAX_CPU_PER_JOB.toInteger())
       }
     }
   } catch (Throwable e) {
      // If there was an error or exception thrown, the build failed
      currentBuild.result = "FAILURE"
+     currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
      throw e
   }
 }
diff --git a/test-salt-models-pipeline.groovy b/test-salt-models-pipeline.groovy
index 5b97b5d..32ef43c 100644
--- a/test-salt-models-pipeline.groovy
+++ b/test-salt-models-pipeline.groovy
@@ -85,6 +85,7 @@
         def acc = 0
         for (int i = 0; i < nodes.size(); i++) {
           def testTarget = sh(script: "basename ${nodes[i]} .yml", returnStdout: true).trim()
+          def clusterName = testTarget.substring(testTarget.indexOf(".") + 1, testTarget.lastIndexOf("."))
           if (acc >= PARALLEL_NODE_GROUP_SIZE.toInteger()) {
             parallel branches
             branches = [:]
@@ -95,6 +96,7 @@
             build job: "test-salt-model-node", parameters: [
               [$class: 'StringParameterValue', name: 'DEFAULT_GIT_URL', value: defaultGitUrl],
               [$class: 'StringParameterValue', name: 'DEFAULT_GIT_REF', value: defaultGitRef],
+              [$class: 'StringParameterValue', name: 'CLUSTER_NAME', value: clusterName],
               [$class: 'StringParameterValue', name: 'NODE_TARGET', value: testTarget],
               [$class: 'StringParameterValue', name: 'FORMULAS_SOURCE', value: formulasSource],
               [$class: 'StringParameterValue', name: 'EXTRA_FORMULAS', value: EXTRA_FORMULAS],
@@ -113,6 +115,7 @@
     }
   } catch (Throwable e) {
      currentBuild.result = "FAILURE"
+     currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
      throw e
   } finally {
      common.sendNotification(currentBuild.result,"",["slack"])
diff --git a/test-service.groovy b/test-service.groovy
index cf1dcc6..f7cdd64 100644
--- a/test-service.groovy
+++ b/test-service.groovy
@@ -87,8 +87,11 @@
 
             writeFile(file: 'report.xml', text: salt.getFileContent(saltMaster, TEST_TEMPEST_TARGET, '/root/report.xml'))
             junit(keepLongStdio: true, testResults: 'report.xml', healthScaleFactor:  Double.parseDouble(TEST_JUNIT_RATIO))
+            def testResults = test.collectJUnitResults(currentBuild.rawBuild.getAction(hudson.tasks.test.AbstractTestResultAction.class))
+            if(testResults){
+                currentBuild.desc = String.format("result: %s", testResults["failed"] / testResults["total"])
+            }
         }
-
     } catch (Throwable e) {
         currentBuild.result = 'FAILURE'
         throw e
diff --git a/test-system-reclass-pipeline.groovy b/test-system-reclass-pipeline.groovy
index 8a8fbac..411edfc 100644
--- a/test-system-reclass-pipeline.groovy
+++ b/test-system-reclass-pipeline.groovy
@@ -76,6 +76,7 @@
 } catch (Throwable e) {
     // If there was an error or exception thrown, the build failed
     currentBuild.result = "FAILURE"
+    currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
     throw e
 } finally {
     common.sendNotification(currentBuild.result,"",["slack"])
diff --git a/update-jenkins-master-jobs.groovy b/update-jenkins-master-jobs.groovy
new file mode 100644
index 0000000..56edb10
--- /dev/null
+++ b/update-jenkins-master-jobs.groovy
@@ -0,0 +1,34 @@
+/**
+ * Update Jenkins master jobs
+ *
+ * Expected parameters:
+ *   SALT_MASTER_CREDENTIALS    Credentials to the Salt API.
+ *   SALT_MASTER_URL            Full Salt API address [https://10.10.10.1:8000].
+ *   TARGET_SERVERS             Server to update
+ *
+**/
+
+def common = new com.mirantis.mk.Common()
+def salt = new com.mirantis.mk.Salt()
+
+def saltMaster
+def target = ['expression': TARGET_SERVERS, 'type': 'compound']
+def result
+
+node("python") {
+    try {
+
+        stage('Connect to Salt master') {
+            saltMaster = salt.connection(SALT_MASTER_URL, SALT_MASTER_CREDENTIALS)
+        }
+
+        stage('Update Jenkins jobs') {
+            result = salt.runSaltCommand(saltMaster, 'local', target, 'state.apply', null, 'jenkins.client')
+            salt.checkResult(result)
+        }
+
+    } catch (Throwable e) {
+        currentBuild.result = 'FAILURE'
+        throw e
+    }
+}
diff --git a/update-package.groovy b/update-package.groovy
index ea2259c..c946123 100644
--- a/update-package.groovy
+++ b/update-package.groovy
@@ -53,7 +53,11 @@
         }
 
         stage("List package upgrades") {
+            common.infoMsg("Listing all the packages that have a new update available on test nodes: ${targetTestSubset}")
             salt.runSaltProcessStep(saltMaster, targetTestSubset, 'pkg.list_upgrades', [], null, true)
+            if(TARGET_PACKAGES != "" && TARGET_PACKAGES != "*"){
+                common.infoMsg("Note that only the ${TARGET_PACKAGES} would be installed from the above list of available updates on the ${targetTestSubset}")
+            }
         }
 
         stage('Confirm live package upgrades on sample') {
@@ -102,6 +106,7 @@
     } catch (Throwable e) {
         // If there was an error or exception thrown, the build failed
         currentBuild.result = "FAILURE"
+        currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
         throw e
     }
 }
diff --git a/update-reclass-metadata.groovy b/update-reclass-metadata.groovy
new file mode 100644
index 0000000..6fb539a
--- /dev/null
+++ b/update-reclass-metadata.groovy
@@ -0,0 +1,35 @@
+/**
+ * Update reclass model on salt master
+ *
+ * Expected parameters:
+ *   SALT_MASTER_CREDENTIALS    Credentials to the Salt API.
+ *   SALT_MASTER_URL            Full Salt API address [https://10.10.10.1:8000].
+ *   TARGET_SERVERS             Server to update
+ *
+**/
+
+def common = new com.mirantis.mk.Common()
+def salt = new com.mirantis.mk.Salt()
+
+def saltMaster
+def target = ['expression': TARGET_SERVERS, 'type': 'compound']
+def result
+
+node("python") {
+    try {
+
+        stage('Connect to Salt master') {
+            saltMaster = salt.connection(SALT_MASTER_URL, SALT_MASTER_CREDENTIALS)
+        }
+
+        stage('Update Reclass model') {
+            result = salt.runSaltCommand(saltMaster, 'local', target, 'state.apply', null, 'reclass.storage')
+            result = salt.runSaltCommand(saltMaster, 'local', target, 'state.apply', null, 'reclass.storage.node')
+            salt.checkResult(result)
+        }
+
+    } catch (Throwable e) {
+        currentBuild.result = 'FAILURE'
+        throw e
+    }
+}
diff --git a/update-salt-master-formulas.groovy b/update-salt-master-formulas.groovy
new file mode 100644
index 0000000..f3e7d1c
--- /dev/null
+++ b/update-salt-master-formulas.groovy
@@ -0,0 +1,34 @@
+/**
+ * Update formulas on salt master
+ *
+ * Expected parameters:
+ *   SALT_MASTER_CREDENTIALS    Credentials to the Salt API.
+ *   SALT_MASTER_URL            Full Salt API address [https://10.10.10.1:8000].
+ *   TARGET_SERVERS             Server to update
+ *
+**/
+
+def common = new com.mirantis.mk.Common()
+def salt = new com.mirantis.mk.Salt()
+
+def saltMaster
+def target = ['expression': TARGET_SERVERS, 'type': 'compound']
+def result
+
+node("python") {
+    try {
+
+        stage('Connect to Salt master') {
+            saltMaster = salt.connection(SALT_MASTER_URL, SALT_MASTER_CREDENTIALS)
+        }
+
+        stage('Update Salt formulas') {
+            result = salt.runSaltCommand(saltMaster, 'local', target, 'state.apply', null, 'salt.master.env')
+            salt.checkResult(result)
+        }
+
+    } catch (Throwable e) {
+        currentBuild.result = 'FAILURE'
+        throw e
+    }
+}
diff --git a/validate-cloud.groovy b/validate-cloud.groovy
index 6c25071..f2720c1 100644
--- a/validate-cloud.groovy
+++ b/validate-cloud.groovy
@@ -11,11 +11,15 @@
  *   TEMPEST_TEST_SET            If not false, run tests matched to pattern only
  *   RUN_TEMPEST_TESTS           If not false, run Tempest tests
  *   RUN_RALLY_TESTS             If not false, run Rally tests
+ *   RUN_K8S_TESTS               If not false, run Kubernetes tests
+ *   TEST_K8S_API_SERVER         Kubernetes API address
+ *   TEST_K8S_CONFORMANCE_IMAGE  Path to docker image with conformance e2e tests
  *
  */
 
 common = new com.mirantis.mk.Common()
 salt = new com.mirantis.mk.Salt()
+test = new com.mirantis.mk.Test()
 validate = new com.mirantis.mcp.Validate()
 
 def saltMaster
@@ -48,12 +52,46 @@
                 common.infoMsg("Skipping Rally tests")
             }
         }
+
+        stage('Run k8s bootstrap tests') {
+            if (RUN_K8S_TESTS.toBoolean() == true) {
+                def image = 'tomkukral/k8s-scripts'
+                def output_file = image.replaceAll('/', '-') + '.output'
+
+                // run image
+                test.runConformanceTests(saltMaster, TEST_K8S_API_SERVER, image)
+
+                // collect output
+                def file_content = salt.getFileContent(saltMaster, 'ctl01*', '/tmp/' + output_file)
+                writeFile file: "${artifacts_dir}${output_file}", text: file_content
+            } else {
+                common.infoMsg("Skipping k8s bootstrap tests")
+            }
+        }
+
+        stage('Run k8s conformance e2e tests') {
+            if (RUN_K8S_TESTS.toBoolean() == true) {
+                def image = TEST_K8S_CONFORMANCE_IMAGE
+                def output_file = image.replaceAll('/', '-') + '.output'
+
+                // run image
+                test.runConformanceTests(saltMaster, TEST_K8S_API_SERVER, image)
+
+                // collect output
+                def file_content = salt.getFileContent(saltMaster, 'ctl01*', '/tmp/' + output_file)
+                writeFile file: "${artifacts_dir}${output_file}", text: file_content
+            } else {
+                common.infoMsg("Skipping k8s conformance e2e tests")
+            }
+        }
+
         stage('Collect results') {
             archiveArtifacts artifacts: "${artifacts_dir}/*"
         }
     } catch (Throwable e) {
         // If there was an error or exception thrown, the build failed
         currentBuild.result = "FAILURE"
+        currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
         throw e
     } finally {
         validate.runCleanup(saltMaster, TARGET_NODE, artifacts_dir)