Merge "Add groovy script to (re-)create and rename branches"
diff --git a/build-debian-packages-pipeline.groovy b/build-debian-packages-pipeline.groovy
index 5d16339..864f0bc 100644
--- a/build-debian-packages-pipeline.groovy
+++ b/build-debian-packages-pipeline.groovy
@@ -64,12 +64,16 @@
           checkout changelog: true, poll: false,
             scm: [$class: 'GitSCM', branches: pollBranches, doGenerateSubmoduleConfigurations: false,
             extensions: extensions,  submoduleCfg: [], userRemoteConfigs: userRemoteConfigs]
+
+          /* There are 2 schemas of build spec keeping:
+                 1. Separate directory with specs.
+                 2. Separate branch with build specs. I.e. debian/xenial
+             Logic below makes package build compatible with both schemas.
+          */
+          if (fileExists('debian/changelog')) {
+              debian_branch = null
+          }
           if (debian_branch){
-            /* There are 2 schemas of build spec keeping:
-                   1. Separate branch with build specs. I.e. debian/xenial
-                   2. Separate directory with specs.
-               Logic below makes package build compatible with both schemas.
-            */
             def retStatus = sh(script: 'git checkout ' + DEBIAN_BRANCH, returnStatus: true)
             if (retStatus != 0) {
               common.warningMsg("Cannot checkout ${DEBIAN_BRANCH} branch. Going to build package by ${SOURCE_BRANCH} branch.")
diff --git a/build-mirror-image.groovy b/build-mirror-image.groovy
deleted file mode 100644
index 4c42b3e..0000000
--- a/build-mirror-image.groovy
+++ /dev/null
@@ -1,139 +0,0 @@
-/**
- *
- * Build mirror image pipeline
- *
- * Expected parameters:
- * IMAGE_NAME - Name of the result image.
- * OS_CREDENTIALS_ID - ID of credentials for OpenStack API stored in Jenkins.
- * OS_PROJECT - Project in OpenStack under the VM will be spawned.
- * OS_URL - Keystone auth endpoint of the OpenStack.
- * OS_VERSION - OpenStack version
- * UPLOAD_URL - URL of an WebDAV used to upload the image after creating.
- * VM_AVAILABILITY_ZONE - Availability zone in OpenStack in the VM will be spawned.
- * VM_FLAVOR - Flavor to be used for VM in OpenStack.
- * VM_FLOATING_IP_POOL - Floating IP pool to be used to assign floating IP to the VM.
- * VM_IMAGE - Name of the image to be used for VM in OpenStack.
- * VM_IP - Static IP that is assigned to the VM which belongs to the network used.
- * VM_NETWORK_ID - ID of the network that VM connects to.
- * EXTRA_VARIABLES - list of key:value variables required by template.json
- *
- */
-
-// Load shared libs
-def common = new com.mirantis.mk.Common()
-def openstack = new com.mirantis.mk.Openstack()
-def git = new com.mirantis.mk.Git()
-def date = new Date()
-def dateTime = date.format("ddMMyyyy-HHmmss")
-def rcFile = ""
-def openstackEnv = ""
-def uploadImageStatus = ""
-def uploadMd5Status = ""
-def creds
-ArrayList extra_vars = EXTRA_VARIABLES.readLines()
-IMAGE_NAME = IMAGE_NAME + "-" + dateTime
-
-timeout(time: 8, unit: 'HOURS') {
-  node("python&&disk-xl") {
-    try {
-      def workspace = common.getWorkspace()
-      openstackEnv = "${workspace}/venv"
-
-      stage("Prepare env") {
-        if (!fileExists("${workspace}/tmp")) {
-          sh "mkdir -p ${workspace}/tmp"
-        }
-        if (!fileExists("${workspace}/images")) {
-          sh "mkdir ${workspace}/images"
-        }
-        if (!fileExists("bin")) {
-          common.infoMsg("Downloading packer")
-          sh "mkdir -p bin"
-          dir("bin") {
-            sh "wget --quiet -O ${PACKER_ZIP} ${PACKER_URL}"
-            sh "echo \"${PACKER_ZIP_MD5} ${PACKER_ZIP}\" >> md5sum"
-            sh "md5sum -c --status md5sum"
-            sh "unzip ${PACKER_ZIP}"
-          }
-        }
-        // clean images dir before building
-        sh(script: "rm -rf ${BUILD_OS}/images/*", returnStatus: true)
-        // clean virtualenv is exists
-        sh(script: "rm -rf ${workspace}/venv", returnStatus: true)
-
-        openstack.setupOpenstackVirtualenv(openstackEnv, OS_VERSION)
-        git.checkoutGitRepository(PACKER_TEMPLATES_REPO_NAME, PACKER_TEMPLATES_REPO_URL, PACKER_TEMPLATES_BRANCH)
-        creds = common.getPasswordCredentials(OS_CREDENTIALS_ID)
-      }
-
-      stage("Build Instance") {
-        dir("${workspace}/${PACKER_TEMPLATES_REPO_NAME}/${BUILD_OS}/") {
-          withEnv(extra_vars + ["PATH=${env.PATH}:${workspace}/bin",
-                                "PACKER_LOG_PATH=${workspace}/packer.log",
-                                "PACKER_LOG=1",
-                                "TMPDIR=${workspace}/tmp",
-                                "IMAGE_NAME=${IMAGE_NAME}",
-                                "OS_USERNAME=${creds.username}",
-                                "OS_PASSWORD=${creds.password.toString()}"]) {
-            if (PACKER_DEBUG.toBoolean()) {
-              PACKER_ARGS = "${PACKER_ARGS} -debug"
-            }
-
-            sh "packer build -only=${BUILD_ONLY} ${PACKER_ARGS} -parallel=false template.json"
-
-            def packerStatus = sh(script: "grep \"Some builds didn't complete successfully and had errors\" ${PACKER_LOG_PATH}", returnStatus: true)
-            // grep returns 0 if find something
-            if (packerStatus != 0) {
-              common.infoMsg("Openstack instance complete")
-            } else {
-              throw new Exception("Packer build failed")
-            }
-          }
-        }
-      }
-
-      stage("Publish image") {
-        common.infoMsg("Saving image ${IMAGE_NAME}")
-        rcFile = openstack.createOpenstackEnv(workspace, OS_URL, OS_CREDENTIALS_ID, OS_PROJECT, "default", "", "default", "2", "")
-
-        common.retry(3, 5) {
-          openstack.runOpenstackCommand("openstack image save --file ${IMAGE_NAME}.qcow2 ${IMAGE_NAME}", rcFile, openstackEnv)
-        }
-        sh "md5sum ${IMAGE_NAME}.qcow2 > ${IMAGE_NAME}.qcow2.md5"
-
-        common.infoMsg("Uploading image ${IMAGE_NAME}")
-        common.retry(3, 5) {
-          uploadImageStatus = sh(script: "curl -f -T ${IMAGE_NAME}.qcow2 ${UPLOAD_URL}", returnStatus: true)
-          if (uploadImageStatus != 0) {
-            throw new Exception("Image upload failed")
-          }
-        }
-
-        common.retry(3, 5) {
-          uploadMd5Status = sh(script: "curl -f -T ${IMAGE_NAME}.qcow2.md5 ${UPLOAD_URL}", returnStatus: true)
-          if (uploadMd5Status != 0) {
-            throw new Exception("MD5 sum upload failed")
-          }
-        }
-        currentBuild.description = "<a href='http://ci.mcp.mirantis.net:8085/images/${IMAGE_NAME}.qcow2'>${IMAGE_NAME}.qcow2</a>"
-      }
-
-    } catch (Throwable e) {
-      // If there was an error or exception thrown, the build failed
-      currentBuild.result = "FAILURE"
-      throw e
-    } finally {
-      if (CLEANUP_AFTER) {
-          dir(workspace) {
-            sh "rm -rf ./*"
-          }
-      } else {
-        common.infoMsg("Env has not been cleanup!")
-        common.infoMsg("Packer private key:")
-        dir("${workspace}/${PACKER_TEMPLATES_REPO_NAME}/${BUILD_OS}/") {
-          sh "cat os_${BUILD_OS}.pem"
-        }
-      }
-    }
-  }
-}
diff --git a/cvp-spt.groovy b/cvp-spt.groovy
index ea4680f..b9d53d5 100644
--- a/cvp-spt.groovy
+++ b/cvp-spt.groovy
@@ -44,7 +44,7 @@
                  file: "report.xml",
                  nodeType: 'NODESET',
                  url: '',
-                 xpath: '/testsuite/testcase[@classname="cvp-spt.cvp_spt.tests.test_hw2hw"]/properties/property']]
+                 xpath: '/testsuite/testcase[@classname="cvp_spt.tests.test_hw2hw"]/properties/property']]
             plot csvFileName: 'plot-8634d2fe-dc48-4713-99f9-b69a381483bc.csv',
                  group: 'SPT',
                  style: 'line',
@@ -53,7 +53,7 @@
                  file: "report.xml",
                  nodeType: 'NODESET',
                  url: '',
-                 xpath: '/testsuite/testcase[@classname="cvp-spt.cvp_spt.tests.test_vm2vm"]/properties/property']]
+                 xpath: '/testsuite/testcase[@classname="cvp_spt.tests.test_vm2vm"]/properties/property']]
         }
     } catch (Throwable e) {
         // If there was an error or exception thrown, the build failed
diff --git a/docker-mirror-images.groovy b/docker-mirror-images.groovy
index ebbfc86..07a80e7 100644
--- a/docker-mirror-images.groovy
+++ b/docker-mirror-images.groovy
@@ -42,7 +42,7 @@
                     imagePath = imageArray[0]
                     if (imagePath.contains('SUBS_SOURCE_IMAGE_TAG')) {
                         common.warningMsg("Replacing SUBS_SOURCE_IMAGE_TAG => ${SOURCE_IMAGE_TAG}")
-                        imagePath.replace('SUBS_SOURCE_IMAGE_TAG', SOURCE_IMAGE_TAG)
+                        imagePath = imagePath.replace('SUBS_SOURCE_IMAGE_TAG', SOURCE_IMAGE_TAG)
                     }
                     targetRegistry = imageArray[1]
                     imageName = getImageName(imagePath)
diff --git a/generate-cookiecutter-products.groovy b/generate-cookiecutter-products.groovy
index 12dc88d..553029e 100644
--- a/generate-cookiecutter-products.groovy
+++ b/generate-cookiecutter-products.groovy
@@ -17,9 +17,19 @@
 if (common.validInputParam('RECLASS_VERSION')) {
   reclassVersion = RECLASS_VERSION
 }
+slaveNode = (env.SLAVE_NODE ?: 'python&&docker')
 
-timeout(time: 12, unit: 'HOURS') {
-  node("python&&docker") {
+// install extra formulas required only for rendering cfg01. All others - should be fetched automatically via
+// salt.master.env state, during salt-master bootstrap.
+// TODO: In the best - those data should fetched somewhere from CC, per env\context. Like option, process _enabled
+// options from CC contexts
+// currently, just mix them together in one set
+def testCfg01ExtraFormulas = 'glusterfs jenkins logrotate maas ntp rsyslog fluentd telegraf prometheus ' +
+                             'grafana backupninja auditd'
+
+
+timeout(time: 2, unit: 'HOURS') {
+  node(slaveNode) {
     def templateEnv = "${env.WORKSPACE}/template"
     def modelEnv = "${env.WORKSPACE}/model"
     def testEnv = "${env.WORKSPACE}/test"
@@ -43,6 +53,7 @@
       def templateDir = "${templateEnv}/template/dir"
       def templateOutputDir = templateBaseDir
       def user
+      def testResult = false
       wrap([$class: 'BuildUser']) {
         user = env.BUILD_USER_ID
       }
@@ -50,7 +61,8 @@
       currentBuild.description = clusterName
       print("Using context:\n" + COOKIECUTTER_TEMPLATE_CONTEXT)
 
-      stage ('Download Cookiecutter template') {
+      stage('Download Cookiecutter template') {
+        sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
         def cookiecutterTemplateUrl = templateContext.default_context.cookiecutter_template_url
         def cookiecutterTemplateBranch = templateContext.default_context.cookiecutter_template_branch
         git.checkoutGitRepository(templateEnv, cookiecutterTemplateUrl, 'master')
@@ -64,7 +76,7 @@
           if (cookiecutterTemplateBranch == '') {
             cookiecutterTemplateBranch = mcpVersion
             // Don't have nightly/testing/stable for cookiecutter-templates repo, therefore use master
-            if(mcpVersion == "nightly" || mcpVersion == "testing" || mcpVersion == "stable"){
+            if ([ "nightly" , "testing", "stable" ].contains(mcpVersion)) {
               cookiecutterTemplateBranch = 'master'
             }
           }
@@ -72,7 +84,7 @@
         }
       }
 
-      stage ('Create empty reclass model') {
+      stage('Create empty reclass model') {
         dir(path: modelEnv) {
           sh "rm -rfv .git"
           sh "git init"
@@ -89,8 +101,9 @@
           // Use mcpVersion git tag if not specified branch for reclass-system
           if (sharedReclassBranch == '') {
             sharedReclassBranch = mcpVersion
-            // Don't have nightly/testing/stable for reclass-system repo, therefore use master
-            if(mcpVersion == "nightly" || mcpVersion == "testing" || mcpVersion == "stable"){
+            // Don't have nightly/testing for reclass-system repo, therefore use master
+            if ([ "nightly" , "testing", "stable" ].contains(mcpVersion)) {
+              common.warningMsg("Fetching reclass-system from master!")
               sharedReclassBranch = 'master'
             }
           }
@@ -142,11 +155,11 @@
         }
       }
 
-      if(localRepositories && !offlineDeployment){
+      if (localRepositories && !offlineDeployment) {
         def aptlyModelUrl = templateContext.default_context.local_model_url
         dir(path: modelEnv) {
           ssh.agentSh "git submodule add \"${aptlyModelUrl}\" \"classes/cluster/${clusterName}/cicd/aptly\""
-          if(!(mcpVersion in ["nightly", "testing", "stable"])){
+          if (!(mcpVersion in ["nightly", "testing", "stable"])) {
             ssh.agentSh "cd \"classes/cluster/${clusterName}/cicd/aptly\";git fetch --tags;git checkout ${mcpVersion}"
           }
         }
@@ -173,16 +186,16 @@
 
       stage("Test") {
         if (TEST_MODEL.toBoolean() && sharedReclassUrl != '') {
-          def testResult = false
           sh("cp -r ${modelEnv} ${testEnv}")
           def DockerCName = "${env.JOB_NAME.toLowerCase()}_${env.BUILD_TAG.toLowerCase()}"
+          common.infoMsg("Attempt to run test against formula-version: ${mcpVersion}")
           testResult = saltModelTesting.setupAndTestNode(
               "${saltMaster}.${clusterDomain}",
               "",
-              "",
+              testCfg01ExtraFormulas,
               testEnv,
               'pkg',
-              'stable',
+              mcpVersion,
               reclassVersion,
               0,
               false,
@@ -193,8 +206,7 @@
           if (testResult) {
             common.infoMsg("Test finished: SUCCESS")
           } else {
-            common.infoMsg('Test finished: FAILURE')
-            throw new RuntimeException('Test stage finished: FAILURE')
+            common.warningMsg('Test finished: FAILURE')
           }
         } else {
           common.warningMsg("Test stage has been skipped!")
@@ -208,16 +220,18 @@
         def mcpCommonScriptsBranch = templateContext.default_context.mcp_common_scripts_branch
         if (mcpCommonScriptsBranch == '') {
           mcpCommonScriptsBranch = mcpVersion
-          // Don't have nightly for mcp-common-scripts repo, therefore use master
-          if(mcpVersion == "nightly"){
+          // Don't have n/t/s for mcp-common-scripts repo, therefore use master
+          if ([ "nightly" , "testing", "stable" ].contains(mcpVersion)) {
+            common.warningMsg("Fetching mcp-common-scripts from master!")
             mcpCommonScriptsBranch = 'master'
           }
         }
         def config_drive_script_url = "https://raw.githubusercontent.com/Mirantis/mcp-common-scripts/${mcpCommonScriptsBranch}/config-drive/create_config_drive.sh"
         def user_data_script_url = "https://raw.githubusercontent.com/Mirantis/mcp-common-scripts/${mcpCommonScriptsBranch}/config-drive/master_config.sh"
-
-        sh "wget -O create-config-drive ${config_drive_script_url} && chmod +x create-config-drive"
-        sh "wget -O user_data.sh ${user_data_script_url}"
+        common.retry(3, 5) {
+          sh "wget -O create-config-drive ${config_drive_script_url} && chmod +x create-config-drive"
+          sh "wget -O user_data.sh ${user_data_script_url}"
+        }
 
         sh "git clone --mirror https://github.com/Mirantis/mk-pipelines.git ${pipelineEnv}/mk-pipelines"
         sh "git clone --mirror https://github.com/Mirantis/pipeline-library.git ${pipelineEnv}/pipeline-library"
@@ -231,7 +245,7 @@
         smc['DEPLOY_NETWORK_NETMASK'] = templateContext['default_context']['deploy_network_netmask']
         smc['DNS_SERVERS'] = templateContext['default_context']['dns_server01']
         smc['MCP_VERSION'] = "${mcpVersion}"
-        if (templateContext['default_context']['local_repositories'] == 'True'){
+        if (templateContext['default_context']['local_repositories'] == 'True') {
           def localRepoIP = templateContext['default_context']['local_repo_url']
           smc['MCP_SALT_REPO_KEY'] = "http://${localRepoIP}/public.gpg"
           smc['MCP_SALT_REPO_URL'] = "http://${localRepoIP}/ubuntu-xenial"
@@ -239,8 +253,8 @@
           smc['PIPELINE_REPO_URL'] = "http://${localRepoIP}:8088"
           smc['LOCAL_REPOS'] = 'true'
         }
-        if (templateContext['default_context']['upstream_proxy_enabled'] == 'True'){
-          if (templateContext['default_context']['upstream_proxy_auth_enabled'] == 'True'){
+        if (templateContext['default_context']['upstream_proxy_enabled'] == 'True') {
+          if (templateContext['default_context']['upstream_proxy_auth_enabled'] == 'True') {
             smc['http_proxy'] = 'http://' + templateContext['default_context']['upstream_proxy_user'] + ':' + templateContext['default_context']['upstream_proxy_password'] + '@' + templateContext['default_context']['upstream_proxy_address'] + ':' + templateContext['default_context']['upstream_proxy_port']
             smc['https_proxy'] = 'http://' + templateContext['default_context']['upstream_proxy_user'] + ':' + templateContext['default_context']['upstream_proxy_password'] + '@' + templateContext['default_context']['upstream_proxy_address'] + ':' + templateContext['default_context']['upstream_proxy_port']
           } else {
@@ -260,7 +274,7 @@
         // save cfg iso to artifacts
         archiveArtifacts artifacts: "output-${clusterName}/${saltMaster}.${clusterDomain}-config.iso"
 
-        if (templateContext['default_context']['local_repositories'] == 'True'){
+        if (templateContext['default_context']['local_repositories'] == 'True') {
           def aptlyServerHostname = templateContext.default_context.aptly_server_hostname
           def user_data_script_apt_url = "https://raw.githubusercontent.com/Mirantis/mcp-common-scripts/master/config-drive/mirror_config.sh"
           sh "wget -O mirror_config.sh ${user_data_script_apt_url}"
@@ -284,8 +298,8 @@
         }
       }
 
-      stage ('Save changes reclass model') {
-        sh(returnStatus: true, script: "tar -zcf output-${clusterName}/${clusterName}.tar.gz -C ${modelEnv} .")
+      stage('Save changes reclass model') {
+        sh(returnStatus: true, script: "tar -czf output-${clusterName}/${clusterName}.tar.gz --exclude='*@tmp' -C ${modelEnv} .")
         archiveArtifacts artifacts: "output-${clusterName}/${clusterName}.tar.gz"
 
 
@@ -295,21 +309,24 @@
               body: "Mirantis Jenkins\n\nRequested reclass model ${clusterName} has been created and attached to this email.\nEnjoy!\n\nMirantis",
               subject: "Your Salt model ${clusterName}")
         }
-        dir("output-${clusterName}"){
+        dir("output-${clusterName}") {
           deleteDir()
         }
       }
 
+      // Fail, but leave possibility to get failed artifacts
+      if (!testResult && TEST_MODEL.toBoolean()) {
+        common.warningMsg('Test finished: FAILURE. Please check logs and\\or debug failed model manually!')
+        error('Test stage finished: FAILURE')
+      }
+
     } catch (Throwable e) {
-      // If there was an error or exception thrown, the build failed
       currentBuild.result = "FAILURE"
       currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
       throw e
     } finally {
-      stage ('Clean workspace directories') {
-        sh(returnStatus: true, script: "rm -rf ${templateEnv}")
-        sh(returnStatus: true, script: "rm -rf ${modelEnv}")
-        sh(returnStatus: true, script: "rm -rf ${pipelineEnv}")
+      stage('Clean workspace directories') {
+        sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
       }
       // common.sendNotification(currentBuild.result,"",["slack"])
     }
diff --git a/tag-git-repos.groovy b/tag-git-repos.groovy
index dabbb7f..312ec9e 100644
--- a/tag-git-repos.groovy
+++ b/tag-git-repos.groovy
@@ -44,7 +44,7 @@
         repoCommit = repoArray[2]
         if (repoCommit.contains('SUBS_SOURCE_REF')) {
           common.warningMsg("Replacing SUBS_SOURCE_REF => ${SOURCE_TAG}")
-          repoCommit.replace('SUBS_SOURCE_REF', SOURCE_TAG
+          repoCommit = repoCommit.replace('SUBS_SOURCE_REF', SOURCE_TAG
             )
         }
         gitRepoAddTag(repoUrl, repoName, TAG, GIT_CREDENTIALS, repoCommit)
diff --git a/test-cookiecutter-reclass-chunk.groovy b/test-cookiecutter-reclass-chunk.groovy
index 12428ba..9e34cea 100644
--- a/test-cookiecutter-reclass-chunk.groovy
+++ b/test-cookiecutter-reclass-chunk.groovy
@@ -1,23 +1,27 @@
 package com.mirantis.mk
+
 def common = new com.mirantis.mk.Common()
 def saltModelTesting = new com.mirantis.mk.SaltModelTesting()
 
 /**
  * Test CC model wrapper
  *  EXTRA_VARIABLES_YAML: yaml based string, to be directly passed into testCCModel
+ *  SLAVE_NODE:
  */
 
+slaveNode = env.SLAVE_NODE ?: 'python&&docker'
+
 timeout(time: 1, unit: 'HOURS') {
-node() {
-  try {
-    extra_vars = readYaml text: EXTRA_VARIABLES_YAML
-    currentBuild.description = extra_vars.modelFile
-    saltModelTesting.testCCModel(extra_vars)
+  node(slaveNode) {
+    try {
+      extraVars = readYaml text: EXTRA_VARIABLES_YAML
+      currentBuild.description = extraVars.modelFile
+      saltModelTesting.testCCModel(extraVars)
     } catch (Throwable e) {
-          // If there was an error or exception thrown, the build failed
-          currentBuild.result = "FAILURE"
-          currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
-          throw e
-        }
-      }
+      // If there was an error or exception thrown, the build failed
+      currentBuild.result = "FAILURE"
+      currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
+      throw e
     }
+  }
+}
diff --git a/test-cookiecutter-reclass.groovy b/test-cookiecutter-reclass.groovy
index 2a64990..e6d3070 100644
--- a/test-cookiecutter-reclass.groovy
+++ b/test-cookiecutter-reclass.groovy
@@ -2,17 +2,19 @@
 gerrit = new com.mirantis.mk.Gerrit()
 git = new com.mirantis.mk.Git()
 python = new com.mirantis.mk.Python()
-saltModelTesting = new com.mirantis.mk.SaltModelTesting()
 
-slave_node = 'python&&docker'
+gerritRef = env.GERRIT_REFSPEC ?: null
+slaveNode = (env.SLAVE_NODE ?: 'python&&docker')
+def alreadyMerged = false
+
 def reclassVersion = 'v1.5.4'
 if (common.validInputParam('RECLASS_VERSION')) {
-  reclassVersion = RECLASS_VERSION
+    reclassVersion = RECLASS_VERSION
 }
 
 def generateSaltMaster(modEnv, clusterDomain, clusterName) {
-  def nodeFile = "${modEnv}/nodes/cfg01.${clusterDomain}.yml"
-  def nodeString = """classes:
+    def nodeFile = "${modEnv}/nodes/cfg01.${clusterDomain}.yml"
+    def nodeString = """classes:
 - cluster.${clusterName}.infra.config
 parameters:
     _param:
@@ -23,82 +25,78 @@
             name: cfg01
             domain: ${clusterDomain}
 """
-  sh "mkdir -p ${modEnv}/nodes/"
-  println "Create file ${nodeFile}"
-  writeFile(file: nodeFile, text: nodeString)
+    sh "mkdir -p ${modEnv}/nodes/"
+    println "Create file ${nodeFile}"
+    writeFile(file: nodeFile, text: nodeString)
 }
 
-def GetBaseName(line, remove_ext) {
- filename = line.toString().split('/').last()
- if (remove_ext && filename.endsWith(remove_ext.toString())) {
-   filename = filename.take(filename.lastIndexOf(remove_ext.toString()))
- }
- return filename
-}
+/**
+ *
+ * @param contextFile - path to `contexts/XXX.yaml file`
+ * @param virtualenv  - pyvenv with CC and dep's
+ * @param templateEnvDir - root of CookieCutter
+ * @return
+ */
 
-def generateModel(modelFile, cutterEnv) {
-  def templateEnv = "${env.WORKSPACE}"
-  def modelEnv = "${env.WORKSPACE}/model"
-  def basename = GetBaseName(modelFile, '.yml')
-  def generatedModel = "${modelEnv}/${basename}"
-  def testEnv = "${env.WORKSPACE}/test"
-  def content = readFile(file: "${templateEnv}/contexts/${modelFile}")
-  def templateContext = readYaml text: content
-  def clusterDomain = templateContext.default_context.cluster_domain
-  def clusterName = templateContext.default_context.cluster_name
-  def outputDestination = "${generatedModel}/classes/cluster/${clusterName}"
-  def targetBranch = "feature/${clusterName}"
-  def templateBaseDir = "${env.WORKSPACE}"
-  def templateDir = "${templateEnv}/dir"
-  def templateOutputDir = templateBaseDir
-  sh(script: "rm -rf ${generatedModel} || true")
+def generateModel(contextFile, virtualenv, templateEnvDir) {
+    def modelEnv = "${templateEnvDir}/model"
+    def basename = common.GetBaseName(contextFile, '.yml')
+    def generatedModel = "${modelEnv}/${basename}"
+    def content = readFile(file: "${templateEnvDir}/contexts/${contextFile}")
+    def templateContext = readYaml text: content
+    def clusterDomain = templateContext.default_context.cluster_domain
+    def clusterName = templateContext.default_context.cluster_name
+    def outputDestination = "${generatedModel}/classes/cluster/${clusterName}"
+    def templateBaseDir = templateEnvDir
+    def templateDir = "${templateEnvDir}/dir"
+    def templateOutputDir = templateBaseDir
+    dir(templateEnvDir) {
+        sh(script: "rm -rf ${generatedModel} || true")
+        common.infoMsg("Generating model from context ${contextFile}")
+        def productList = ["infra", "cicd", "opencontrail", "kubernetes", "openstack", "oss", "stacklight", "ceph"]
+        for (product in productList) {
 
-  common.infoMsg("Generating model from context ${modelFile}")
+            // get templateOutputDir and productDir
+            if (product.startsWith("stacklight")) {
+                templateOutputDir = "${templateEnvDir}/output/stacklight"
+                try {
+                    productDir = "stacklight" + templateContext.default_context['stacklight_version']
+                } catch (Throwable e) {
+                    productDir = "stacklight1"
+                }
+            } else {
+                templateOutputDir = "${templateEnvDir}/output/${product}"
+                productDir = product
+            }
 
-  def productList = ["infra", "cicd", "opencontrail", "kubernetes", "openstack", "oss", "stacklight", "ceph"]
-  for (product in productList) {
+            if (product == "infra" || (templateContext.default_context["${product}_enabled"]
+                && templateContext.default_context["${product}_enabled"].toBoolean())) {
 
-    // get templateOutputDir and productDir
-    if (product.startsWith("stacklight")) {
-      templateOutputDir = "${env.WORKSPACE}/output/stacklight"
-      try {
-        productDir = "stacklight" + templateContext.default_context['stacklight_version']
-      } catch (Throwable e) {
-        productDir = "stacklight1"
-      }
-    } else {
-      templateOutputDir = "${env.WORKSPACE}/output/${product}"
-      productDir = product
+                templateDir = "${templateEnvDir}/cluster_product/${productDir}"
+                common.infoMsg("Generating product " + product + " from " + templateDir + " to " + templateOutputDir)
+
+                sh "rm -rf ${templateOutputDir} || true"
+                sh "mkdir -p ${templateOutputDir}"
+                sh "mkdir -p ${outputDestination}"
+
+                python.buildCookiecutterTemplate(templateDir, content, templateOutputDir, virtualenv, templateBaseDir)
+                sh "mv -v ${templateOutputDir}/${clusterName}/* ${outputDestination}"
+            } else {
+                common.warningMsg("Product " + product + " is disabled")
+            }
+        }
+        generateSaltMaster(generatedModel, clusterDomain, clusterName)
     }
-
-    if (product == "infra" || (templateContext.default_context["${product}_enabled"]
-        && templateContext.default_context["${product}_enabled"].toBoolean())) {
-
-      templateDir = "${templateEnv}/cluster_product/${productDir}"
-      common.infoMsg("Generating product " + product + " from " + templateDir + " to " + templateOutputDir)
-
-      sh "rm -rf ${templateOutputDir} || true"
-      sh "mkdir -p ${templateOutputDir}"
-      sh "mkdir -p ${outputDestination}"
-
-      python.buildCookiecutterTemplate(templateDir, content, templateOutputDir, cutterEnv, templateBaseDir)
-      sh "mv -v ${templateOutputDir}/${clusterName}/* ${outputDestination}"
-    } else {
-      common.warningMsg("Product " + product + " is disabled")
-    }
-  }
-  generateSaltMaster(generatedModel, clusterDomain, clusterName)
 }
 
 
-def testModel(modelFile, testEnv, reclassVersion='v1.5.4') {
-  // modelFile - `modelfiname` from model/modelfiname/modelfiname.yaml
-  // testEnv - path for model (model/modelfilename/)
-  //* Grub all models and send it to check in paralell - by one in thread.
+def testModel(modelFile, reclassVersion = 'v1.5.4') {
+    // modelFile - `modelfiname` from model/modelfiname/modelfiname.yaml
+    //* Grub all models and send it to check in paralell - by one in thread.
 
-  _values_string =  """
+    _values_string = """
   ---
-  MODELS_TARGZ: "${env.BUILD_URL}/artifact/reclass.tar.gz"
+  MODELS_TARGZ: "${env.BUILD_URL}/artifact/patched_reclass.tar.gz"
   DockerCName: "${env.JOB_NAME.toLowerCase()}_${env.BUILD_TAG.toLowerCase()}_${modelFile.toLowerCase()}"
   testReclassEnv: "model/${modelFile}/"
   modelFile: "contexts/${modelFile}.yml"
@@ -106,143 +104,197 @@
   EXTRA_FORMULAS: "${env.EXTRA_FORMULAS}"
   reclassVersion: "${reclassVersion}"
   """
-  build job: "test-mk-cookiecutter-templates-chunk", parameters: [
-  [$class: 'StringParameterValue', name: 'EXTRA_VARIABLES_YAML', value: _values_string.stripIndent() ],
-  ]
+    build job: "test-mk-cookiecutter-templates-chunk", parameters: [
+        [$class: 'StringParameterValue', name: 'EXTRA_VARIABLES_YAML',
+         value : _values_string.stripIndent()],
+    ]
 }
 
-def gerritRef
-try {
-  gerritRef = GERRIT_REFSPEC
-  } catch (MissingPropertyException e) {
-    gerritRef = null
-  }
-
-def testModelStep(basename,testEnv) {
-  // We need to wrap what we return in a Groovy closure, or else it's invoked
-  // when this method is called, not when we pass it to parallel.
-  // To do this, you need to wrap the code below in { }, and either return
-  // that explicitly, or use { -> } syntax.
-  return {
-    node(slave_node) {
-      testModel(basename, testEnv)
+def StepTestModel(basename) {
+    // We need to wrap what we return in a Groovy closure, or else it's invoked
+    // when this method is called, not when we pass it to parallel.
+    // To do this, you need to wrap the code below in { }, and either return
+    // that explicitly, or use { -> } syntax.
+    // return node object
+    return {
+        node(slaveNode) {
+            testModel(basename)
+        }
     }
-  }
 }
 
-timeout(time: 2, unit: 'HOURS') {
-  node(slave_node) {
-    def templateEnv = "${env.WORKSPACE}"
-    def cutterEnv = "${env.WORKSPACE}/cutter"
-    def jinjaEnv = "${env.WORKSPACE}/jinja"
-
-    try {
-      // Fixme. Just use 'cleanup workspace' option.
-      stage("Cleanup") {
-        sh(script:  'find . -mindepth 1 -delete > /dev/null || true')
-      }
-
-      stage('Download Cookiecutter template') {
-        if (gerritRef) {
-          def gerritChange = gerrit.getGerritChange(GERRIT_NAME, GERRIT_HOST, GERRIT_CHANGE_NUMBER, CREDENTIALS_ID)
-          merged = gerritChange.status == "MERGED"
-          if (!merged) {
-            checkouted = gerrit.gerritPatchsetCheckout([
-              credentialsId: CREDENTIALS_ID
-              ])
-            } else {
-              common.successMsg("Change ${GERRIT_CHANGE_NUMBER} is already merged, no need to gate them")
-            }
-            } else {
-              git.checkoutGitRepository(templateEnv, COOKIECUTTER_TEMPLATE_URL, COOKIECUTTER_TEMPLATE_BRANCH, CREDENTIALS_ID)
-            }
-          }
-
-          stage("Setup") {
-            python.setupCookiecutterVirtualenv(cutterEnv)
-          }
-
-          stage("Check workflow_definition") {
-            sh(script: "python ${env.WORKSPACE}/workflow_definition_test.py")
-          }
-
-          def contextFileList = []
-          dir("${templateEnv}/contexts") {
-            for (String x : findFiles(glob: "*.yml")) {
-              contextFileList.add(x)
-            }
-          }
-
-          stage("generate-model") {
-            for (contextFile in contextFileList) {
-              generateModel(contextFile, cutterEnv)
-            }
-          }
-
-          dir("${env.WORKSPACE}") {
-          // Collect only models. For backward compatability - who know, probably someone use it..
-          sh(script: "tar -czf model.tar.gz -C model ../contexts .", returnStatus: true)
-          archiveArtifacts artifacts: "model.tar.gz"
-          // to be able share reclass for all subenvs
-          // Also, makes artifact test more solid - use one reclass for all of sub-models.
-          // Archive Structure will be:
-          // tar.gz
-          // ├── contexts
-          // │   └── ceph.yml
-          // ├── global_reclass <<< reclass system
-          // ├── model
-          // │   └── ceph       <<< from `context basename`
-          // │       ├── classes
-          // │       │   ├── cluster
-          // │       │   └── system -> ../../../global_reclass
-          // │       └── nodes
-          // │           └── cfg01.ceph-cluster-domain.local.yml
-
-          if (SYSTEM_GIT_URL == "") {
-            git.checkoutGitRepository("${env.WORKSPACE}/global_reclass/", RECLASS_MODEL_URL, RECLASS_MODEL_BRANCH, CREDENTIALS_ID)
-            } else {
-              dir("${env.WORKSPACE}/global_reclass/") {
-                if (!gerrit.gerritPatchsetCheckout(SYSTEM_GIT_URL, SYSTEM_GIT_REF, "HEAD", CREDENTIALS_ID)) {
-                  common.errorMsg("Failed to obtain system reclass with url: ${SYSTEM_GIT_URL} and ${SYSTEM_GIT_REF}")
-                  throw new RuntimeException("Failed to obtain system reclass")
+def StepPrepareCCenv(refchange, templateEnvFolder) {
+    // return git clone  object
+    return {
+        // fetch needed sources
+        dir(templateEnvFolder) {
+            if (refchange) {
+                def gerritChange = gerrit.getGerritChange(GERRIT_NAME, GERRIT_HOST, GERRIT_CHANGE_NUMBER, CREDENTIALS_ID)
+                merged = gerritChange.status == "MERGED"
+                if (!merged) {
+                    checkouted = gerrit.gerritPatchsetCheckout([
+                        credentialsId: CREDENTIALS_ID
+                    ])
+                } else {
+                    // update global variable for success return from pipeline
+                    //alreadyMerged = true
+                    common.successMsg("Change ${GERRIT_CHANGE_NUMBER} is already merged, no need to gate them")
+                    currentBuild.result = 'ABORTED'
+                    throw new hudson.AbortException('change already merged')
                 }
-              }
+            } else {
+                git.checkoutGitRepository(templateEnvFolder, COOKIECUTTER_TEMPLATE_URL, COOKIECUTTER_TEMPLATE_BRANCH, CREDENTIALS_ID)
             }
-            // link all models, to use one global reclass
-            for (String context : contextFileList) {
-              def basename = GetBaseName(context, '.yml')
-              dir("${env.WORKSPACE}/model/${basename}"){
-                sh(script: 'mkdir -p classes/; ln -sfv ../../../global_reclass classes/system ')
-              }
-            }
-            // Save all models and all contexts. Warning! `h` flag has been used.
-            sh(script: "tar -chzf reclass.tar.gz --exclude='*@tmp' model contexts global_reclass", returnStatus: true)
-            archiveArtifacts artifacts: "reclass.tar.gz"
-          }
-
-          stage("test-contexts") {
-            stepsForParallel = [:]
-            common.infoMsg("Found: ${contextFileList.size()} contexts to test.")
-            for (String context : contextFileList) {
-              def basename = GetBaseName(context, '.yml')
-              def testEnv = "${env.WORKSPACE}/model/${basename}"
-              stepsForParallel.put("Test:${basename}", testModelStep(basename, testEnv))
-            }
-            parallel stepsForParallel
-            common.infoMsg('All tests done')
-          }
-
-          stage('Clean workspace directories') {
-            sh(script:  'find . -mindepth 1 -delete > /dev/null || true')
-          }
-
-} catch (Throwable e) {
-  currentBuild.result = "FAILURE"
-  currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
-  throw e
-  } finally {
-    def dummy = "dummy"
-      //FAILING common.sendNotification(currentBuild.result,"",["slack"])
+        }
     }
-  }
+}
+
+def StepGenerateModels(_contextFileList, _virtualenv, _templateEnvDir) {
+    return {
+        for (contextFile in _contextFileList) {
+            generateModel(contextFile, _virtualenv, _templateEnvDir)
+        }
+    }
+}
+
+timeout(time: 1, unit: 'HOURS') {
+    node(slaveNode) {
+        def templateEnvHead = "${env.WORKSPACE}/EnvHead/"
+        def templateEnvPatched = "${env.WORKSPACE}/EnvPatched/"
+        def contextFileListHead = []
+        def contextFileListPatched = []
+        def vEnv = "${env.WORKSPACE}/venv"
+
+        try {
+            sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
+            stage('Download and prepare CC env') {
+                // Prepare 2 env - for patchset, and for HEAD
+                paralellEnvs = [:]
+                paralellEnvs.failFast = true
+                paralellEnvs['downloadEnvHead'] = StepPrepareCCenv('', templateEnvHead)
+                paralellEnvs['downloadEnvPatched'] = StepPrepareCCenv(gerritRef, templateEnvPatched)
+                parallel paralellEnvs
+            }
+            stage("Check workflow_definition") {
+                // Check only for patchset
+                python.setupVirtualenv(vEnv, 'python2', [], "${templateEnvPatched}/requirements.txt")
+                common.infoMsg(python.runVirtualenvCommand(vEnv, "python ${templateEnvPatched}/workflow_definition_test.py"))
+            }
+
+            stage("generate models") {
+                dir("${templateEnvHead}/contexts") {
+                    for (String x : findFiles(glob: "*.yml")) {
+                        contextFileListHead.add(x)
+                    }
+                }
+                dir("${templateEnvPatched}/contexts") {
+                    for (String x : findFiles(glob: "*.yml")) {
+                        contextFileListPatched.add(x)
+                    }
+                }
+                // Generate over 2env's - for patchset, and for HEAD
+                paralellEnvs = [:]
+                paralellEnvs.failFast = true
+                paralellEnvs['GenerateEnvPatched'] = StepGenerateModels(contextFileListPatched, vEnv, templateEnvPatched)
+                paralellEnvs['GenerateEnvHead'] = StepGenerateModels(contextFileListHead, vEnv, templateEnvHead)
+                parallel paralellEnvs
+
+                // Collect artifacts
+                dir(templateEnvPatched) {
+                    // Collect only models. For backward comparability - who know, probably someone use it..
+                    sh(script: "tar -czf model.tar.gz -C model ../contexts .", returnStatus: true)
+                    archiveArtifacts artifacts: "model.tar.gz"
+                }
+
+                // to be able share reclass for all subenvs
+                // Also, makes artifact test more solid - use one reclass for all of sub-models.
+                // Archive Structure will be:
+                // tar.gz
+                // ├── contexts
+                // │   └── ceph.yml
+                // ├── global_reclass <<< reclass system
+                // ├── model
+                // │   └── ceph       <<< from `context basename`
+                // │       ├── classes
+                // │       │   ├── cluster
+                // │       │   └── system -> ../../../global_reclass
+                // │       └── nodes
+                // │           └── cfg01.ceph-cluster-domain.local.yml
+
+                if (SYSTEM_GIT_URL == "") {
+                    git.checkoutGitRepository("${env.WORKSPACE}/global_reclass/", RECLASS_MODEL_URL, RECLASS_MODEL_BRANCH, CREDENTIALS_ID)
+                } else {
+                    dir("${env.WORKSPACE}/global_reclass/") {
+                        if (!gerrit.gerritPatchsetCheckout(SYSTEM_GIT_URL, SYSTEM_GIT_REF, "HEAD", CREDENTIALS_ID)) {
+                            common.errorMsg("Failed to obtain system reclass with url: ${SYSTEM_GIT_URL} and ${SYSTEM_GIT_REF}")
+                            throw new RuntimeException("Failed to obtain system reclass")
+                        }
+                    }
+                }
+                // link all models, to use one global reclass
+                // For HEAD
+                dir(templateEnvHead) {
+                    for (String context : contextFileListHead) {
+                        def basename = common.GetBaseName(context, '.yml')
+                        dir("${templateEnvHead}/model/${basename}") {
+                            sh(script: 'mkdir -p classes/; ln -sfv ../../../../global_reclass classes/system ')
+                        }
+                    }
+                    // Save all models and all contexts. Warning! `h` flag must be used.
+                    sh(script: "tar -chzf head_reclass.tar.gz --exclude='*@tmp' model contexts global_reclass", returnStatus: true)
+                    archiveArtifacts artifacts: "head_reclass.tar.gz"
+                    // move for "Compare Pillars" stage
+                    sh(script: "mv -v head_reclass.tar.gz ${env.WORKSPACE}")
+                }
+                // For patched
+                dir(templateEnvPatched) {
+                    for (String context : contextFileListPatched) {
+                        def basename = common.GetBaseName(context, '.yml')
+                        dir("${templateEnvPatched}/model/${basename}") {
+                            sh(script: 'mkdir -p classes/; ln -sfv ../../../../global_reclass classes/system ')
+                        }
+                    }
+                    // Save all models and all contexts. Warning! `h` flag must be used.
+                    sh(script: "tar -chzf patched_reclass.tar.gz --exclude='*@tmp' model contexts global_reclass", returnStatus: true)
+                    archiveArtifacts artifacts: "patched_reclass.tar.gz"
+                    // move for "Compare Pillars" stage
+                    sh(script: "mv -v patched_reclass.tar.gz ${env.WORKSPACE}")
+                }
+            }
+
+            stage("Compare Pillars") {
+                // Compare patched and HEAD reclass pillars
+                compareRoot = "${env.WORKSPACE}/test_compare/"
+                sh(script: """
+                   mkdir -pv ${compareRoot}/new ${compareRoot}/old
+                   tar -xzf patched_reclass.tar.gz  --directory ${compareRoot}/new
+                   tar -xzf head_reclass.tar.gz  --directory ${compareRoot}/old
+                   """)
+                common.warningMsg('infra/secrets.yml has been skipped from compare!')
+                rezult = common.comparePillars(compareRoot, env.BUILD_URL, "-Ev \'infra/secrets.yml\'")
+                currentBuild.description = rezult
+            }
+            stage("test-contexts") {
+                // Test contexts for patched only
+                stepsForParallel = [:]
+                common.infoMsg("Found: ${contextFileListPatched.size()} patched contexts to test.")
+                for (String context : contextFileListPatched) {
+                    def basename = common.GetBaseName(context, '.yml')
+                    stepsForParallel.put("ContextPatchTest:${basename}", StepTestModel(basename))
+                }
+                parallel stepsForParallel
+                common.infoMsg('All tests done')
+            }
+
+            sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
+
+        } catch (Throwable e) {
+            currentBuild.result = "FAILURE"
+            currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
+            throw e
+        } finally {
+            def dummy = "dummy"
+            //FAILING common.sendNotification(currentBuild.result,"",["slack"])
+        }
+    }
 }
diff --git a/test-salt-formulas-pipeline.groovy b/test-salt-formulas-pipeline.groovy
index ca4eb67..0caef9c 100644
--- a/test-salt-formulas-pipeline.groovy
+++ b/test-salt-formulas-pipeline.groovy
@@ -4,28 +4,19 @@
  *  DEFAULT_GIT_URL
  *  CREDENTIALS_ID
  *  KITCHEN_TESTS_PARALLEL
- *  RUN_TEST_IN_DOCKER     If true, run test stage in docker
  *  SMOKE_TEST_DOCKER_IMG  Docker image for run test (default "ubuntu:16.04")
  */
 common = new com.mirantis.mk.Common()
 def gerrit = new com.mirantis.mk.Gerrit()
 def ruby = new com.mirantis.mk.Ruby()
 
-def gerritRef
-try {
-  gerritRef = GERRIT_REFSPEC
-} catch (MissingPropertyException e) {
-  gerritRef = null
-}
-
-def defaultGitRef, defaultGitUrl
-try {
-  defaultGitRef = DEFAULT_GIT_REF
-  defaultGitUrl = DEFAULT_GIT_URL
-} catch (MissingPropertyException e) {
-  defaultGitRef = null
-  defaultGitUrl = null
-}
+def gerritRef = env.GERRIT_REFSPEC ?: null
+def defaultGitRef = env.DEFAULT_GIT_REF ?: null
+def defaultGitUrl = env.DEFAULT_GIT_URL ?: null
+def slaveNode = env.SLAVE_NODE ?: 'python&&docker'
+def saltVersion = env.SALT_VERSION ?: ""
+def dockerLib = new com.mirantis.mk.Docker()
+def img = dockerLib.getImage(env.SMOKE_TEST_DOCKER_IMG, "ubuntu:16.04")
 
 def checkouted = false
 
@@ -70,9 +61,13 @@
     [$class: 'StringParameterValue', name: 'SALT_VERSION', value: SALT_VERSION]
   ]
 }
-timeout(time: 12, unit: 'HOURS') {
-  node("python") {
+timeout(time: 2, unit: 'HOURS') {
+  node(slaveNode) {
     try {
+      if (fileExists("tests/build")) {
+        common.infoMsg('Cleaning test env')
+        sh ("sudo rm -rf tests/build")
+      }
       stage("checkout") {
         if (gerritRef) {
           // job is triggered by Gerrit
@@ -102,39 +97,39 @@
           throw new Exception("Cannot checkout gerrit patchset, GERRIT_REFSPEC and DEFAULT_GIT_REF is null")
         }
     }
-    stage("test") {
-      if (checkouted) {
-        try {
-          saltVersion = SALT_VERSION
-            } catch (MissingPropertyException e) {
-          saltVersion = "" // default value is empty string, means latest
-        }
-        withEnv(["SALT_VERSION=${saltVersion}"]) {
-          boolean run_test_in_docker = (env.RUN_TEST_IN_DOCKER ?: false).toBoolean()
-          if (run_test_in_docker) {
-            def dockerLib = new com.mirantis.mk.Docker()
-            def img = dockerLib.getImage(env.SMOKE_TEST_DOCKER_IMG, "ubuntu:16.04")
-            def workspace = common.getWorkspace()
-            img.inside("-u root:root -v ${workspace}/:/formula/") {
-              sh("""cd /etc/apt/ && echo > sources.list \
-              && echo "deb [arch=amd64] http://cz.archive.ubuntu.com/ubuntu xenial main restricted universe multiverse" >> sources.list \
-              && echo "deb [arch=amd64] http://cz.archive.ubuntu.com/ubuntu xenial-updates main restricted universe multiverse" >> sources.list \
-              && echo "deb [arch=amd64] http://cz.archive.ubuntu.com/ubuntu xenial-backports main restricted universe multiverse" >> sources.list \
-              && echo 'Acquire::Languages "none";' > apt.conf.d/docker-no-languages \
-              && echo 'Acquire::GzipIndexes "true"; Acquire::CompressionTypes::Order:: "gz";' > apt.conf.d/docker-gzip-indexes \
-              && echo 'APT::Get::Install-Recommends "false"; APT::Get::Install-Suggests "false";' > apt.conf.d/docker-recommends \
-              && apt-get update \
-              && apt-get install -y git-core wget curl apt-transport-https \
-              && apt-get install -y python-pip python3-pip python-virtualenv python3-virtualenv python-yaml autoconf build-essential""")
-              sh("cd /formula/ && make clean && make test")
+      stage("test") {
+        if (checkouted) {
+          try {
+            // TODO add try\finally for image-stuck case. (copy-paste from SaltModelTesting)
+            withEnv(["SALT_VERSION=${saltVersion}"]) {
+              img.inside("-v ${env.WORKSPACE}/:/formula/ -u root:root --cpus=4 --ulimit nofile=4096:8192") {
+                sh('''#!/bin/bash -xe
+                      cd /etc/apt/
+                      echo "deb [arch=amd64] http://cz.archive.ubuntu.com/ubuntu xenial main restricted universe" > sources.list
+                      echo "deb [arch=amd64] http://cz.archive.ubuntu.com/ubuntu xenial-updates main restricted universe" >> sources.list
+                      echo 'Acquire::Languages "none";' > apt.conf.d/docker-no-languages
+                      echo 'Acquire::GzipIndexes "true"; Acquire::CompressionTypes::Order:: "gz";' > apt.conf.d/docker-gzip-indexes
+                      echo 'APT::Get::Install-Recommends "false"; APT::Get::Install-Suggests "false";' > apt.conf.d/docker-recommends
+                      apt-get update
+                      apt-get install -y git-core wget curl apt-transport-https
+                      apt-get install -y python-pip python3-pip python-virtualenv python3-virtualenv python-yaml autoconf build-essential
+                      cd /formula/
+                      make clean
+                      make test
+                      make clean
+                      ''')
+              }
             }
-          } else {
-            common.warningMsg("Those tests should be always be run in clean env! Recommends to use docker env!")
-            sh("make clean && make test")
+          }
+          finally {
+            if (fileExists("tests/build")) {
+              common.infoMsg('Cleaning test env')
+              sh ("sudo rm -rf tests/build")
+            }
           }
         }
+
       }
-    }
     stage("kitchen") {
         if (checkouted) {
           if (fileExists(".kitchen.yml")) {
diff --git a/update-package.groovy b/update-package.groovy
index 790e2ac..10f3a85 100644
--- a/update-package.groovy
+++ b/update-package.groovy
@@ -11,12 +11,10 @@
  *   TARGET_BATCH_LIVE          Batch size for the complete live package update on all nodes, empty string means apply to all targetted nodes.
  *
 **/
-
+pepperEnv = "pepperEnv"
+salt = new com.mirantis.mk.Salt()
 def common = new com.mirantis.mk.Common()
-def salt = new com.mirantis.mk.Salt()
 def python = new com.mirantis.mk.Python()
-
-def pepperEnv = "pepperEnv"
 def targetTestSubset
 def targetLiveSubset
 def targetLiveAll
@@ -25,6 +23,10 @@
 def packages
 def command
 def commandKwargs
+def installSaltStack(target, pkgs){
+    salt.runSaltProcessStep(pepperEnv, target, 'pkg.install', ["force_yes=True", "pkgs='$pkgs'"], null, true, 30)
+}
+
 timeout(time: 12, unit: 'HOURS') {
     node() {
         try {
@@ -89,8 +91,27 @@
             }
 
             stage('Apply package upgrades on sample') {
+                if(packages == null || packages.contains("salt-master") || packages.contains("salt-common") || packages.contains("salt-minion") || packages.contains("salt-api")){
+                    def saltTargets = (targetLiveSubset.split(' or ').collect{it as String})
+                    for(int i = 0; i < saltTargets.size(); i++ ){
+                        common.infoMsg("During salt-minion upgrade on cfg node, pipeline lose connectivy to salt-master for 2 min. If pipeline ended with error rerun pipeline again.")
+                        common.retry(10, 5) {
+                            if(salt.minionsReachable(pepperEnv, 'I@salt:master', "I@salt:master and ${saltTargets[i]}")){
+                                installSaltStack("I@salt:master and ${saltTargets[i]}", '["salt-master", "salt-common", "salt-api", "salt-minion"]')
+                            }
+                            if(salt.minionsReachable(pepperEnv, 'I@salt:master', "I@salt:minion and not I@salt:master and ${saltTargets[i]}")){
+                                installSaltStack("I@salt:minion and not I@salt:master and ${saltTargets[i]}", '["salt-minion"]')
+                            }
+                        }
+                    }
+                }
                 out = salt.runSaltCommand(pepperEnv, 'local', ['expression': targetLiveSubset, 'type': 'compound'], command, null, packages, commandKwargs)
                 salt.printSaltCommandResult(out)
+                for(value in out.get("return")[0].values()){
+                    if (value.containsKey('result') && value.result == false) {
+                        throw new Exception("The package upgrade on sample node has failed. Please check the Salt run result above for more information.")
+                    }
+                }
             }
 
             stage('Confirm package upgrades on all nodes') {
@@ -100,8 +121,30 @@
             }
 
             stage('Apply package upgrades on all nodes') {
+
+                if(packages == null || packages.contains("salt-master") || packages.contains("salt-common") || packages.contains("salt-minion") || packages.contains("salt-api")){
+                    def saltTargets = (targetLiveAll.split(' or ').collect{it as String})
+                    for(int i = 0; i < saltTargets.size(); i++ ){
+                        common.infoMsg("During salt-minion upgrade on cfg node, pipeline lose connectivy to salt-master for 2 min. If pipeline ended with error rerun pipeline again.")
+                        common.retry(10, 5) {
+                            if(salt.minionsReachable(pepperEnv, 'I@salt:master', "I@salt:master and ${saltTargets[i]}")){
+                                installSaltStack("I@salt:master and ${saltTargets[i]}", '["salt-master", "salt-common", "salt-api", "salt-minion"]')
+                            }
+                            if(salt.minionsReachable(pepperEnv, 'I@salt:master', "I@salt:minion and not I@salt:master and ${saltTargets[i]}")){
+                                installSaltStack("I@salt:minion and not I@salt:master and ${saltTargets[i]}", '["salt-minion"]')
+                            }
+                        }
+                    }
+                }
+
                 out = salt.runSaltCommand(pepperEnv, 'local', ['expression': targetLiveAll, 'type': 'compound'], command, null, packages, commandKwargs)
                 salt.printSaltCommandResult(out)
+                for(value in out.get("return")[0].values()){
+                    if (value.containsKey('result') && value.result == false) {
+                        throw new Exception("The package upgrade on sample node has failed. Please check the Salt run result above for more information.")
+                    }
+                }
+                common.warningMsg("Pipeline has finished successfully, but please, check if any packages have been kept back.")
             }
 
         } catch (Throwable e) {
diff --git a/upgrade-mcp-release.groovy b/upgrade-mcp-release.groovy
index fb291ab..d5c0e77 100644
--- a/upgrade-mcp-release.groovy
+++ b/upgrade-mcp-release.groovy
@@ -64,7 +64,12 @@
     archiveArtifacts artifacts: "$filename"
 }
 
-timeout(time: 12, unit: 'HOURS') {
+def pipelineTimeout = 12
+if (common.validInputParam('PIPELINE_TIMEOUT') && PIPELINE_TIMEOUT.isInteger()) {
+    pipelineTimeout = "${PIPELINE_TIMEOUT}".toInteger()
+}
+
+timeout(time: pipelineTimeout, unit: 'HOURS') {
     node("python") {
         try {
             def gitMcpVersion = MCP_VERSION