Merge "Add addons upgrade ability to kubernetes upgrade pipeline"
diff --git a/.gitreview b/.gitreview
index 9075ea3..ce0aa41 100644
--- a/.gitreview
+++ b/.gitreview
@@ -1,4 +1,4 @@
 [gerrit]
-host=gerrit.mcp.mirantis.net
+host=gerrit.mcp.mirantis.com
 port=29418
 project=mk/mk-pipelines.git
diff --git a/build-debian-packages-prometheus-relay.groovy b/build-debian-packages-prometheus-relay.groovy
index f101f57..ea19c9d 100644
--- a/build-debian-packages-prometheus-relay.groovy
+++ b/build-debian-packages-prometheus-relay.groovy
@@ -13,7 +13,7 @@
                 sh("rm -rf * || true")
             }
 
-            def workingDir = "src/gerrit.mcp.mirantis.net/debian"
+            def workingDir = "src/gerrit.mcp.mirantis.com/debian"
             stage("checkout") {
                 git.checkoutGitRepository(
                     "${workingDir}/prometheus-relay",
@@ -53,7 +53,7 @@
                             export GOROOT=\$PWD/go &&
                             export GOPATH=\$PWD &&
                             export PATH=\$PATH:\$GOPATH/bin:\$GOROOT/bin &&
-                            cd src/gerrit.mcp.mirantis.net/debian/prometheus-relay &&
+                            cd src/gerrit.mcp.mirantis.com/debian/prometheus-relay &&
                             make""")
                     }
                     archiveArtifacts artifacts: "${workingDir}/prometheus-relay/build/*.deb"
diff --git a/docker-mirror-images.groovy b/docker-mirror-images.groovy
index d88c9d1..92fea8e 100644
--- a/docker-mirror-images.groovy
+++ b/docker-mirror-images.groovy
@@ -64,7 +64,9 @@
                     // https://github.com/jenkinsci/docker-workflow-plugin/blob/docker-workflow-1.17/src/main/resources/org/jenkinsci/plugins/docker/workflow/Docker.groovy#L168-L170
                     sh("docker tag ${srcImage.id} ${targetImageFull}")
                     common.infoMsg("Attempt to push docker image into remote registry: ${env.REGISTRY_URL}")
-                    sh("docker push ${targetImageFull}")
+                    docker.withRegistry(env.REGISTRY_URL, env.TARGET_REGISTRY_CREDENTIALS_ID) {
+                        sh("docker push ${targetImageFull}")
+                    }
                     if (targetImageFull.contains(externalMarker)) {
                         external = true
                     }
diff --git a/gating-pipeline.groovy b/gating-pipeline.groovy
index 8db6806..aeaee9a 100644
--- a/gating-pipeline.groovy
+++ b/gating-pipeline.groovy
@@ -9,6 +9,8 @@
 def gerrit = new com.mirantis.mk.Gerrit()
 def ssh = new com.mirantis.mk.Ssh()
 
+slaveNode = env.SLAVE_NODE ?: 'docker'
+giveVerify = false
 
 @NonCPS
 def isJobExists(jobName) {
@@ -22,12 +24,15 @@
             gerritVars += "\n${envVar.key}: '${envVar.value}'"
         }
     }
-    build job: jobName, parameters: [
-        [$class: 'TextParameterValue', name: 'EXTRA_VARIABLES_YAML', value: gerritVars ]
+    testJob = build job: jobName, parameters: [
+        [$class: 'TextParameterValue', name: 'EXTRA_VARIABLES_YAML', value: gerritVars]
     ]
+    if (testJob.getResult() != 'SUCCESS') {
+        error("Gate job ${testJob.getBuildUrl().toString()}  finished with ${testJob.getResult()} !")
+    }
+    giveVerify = true
 }
 
-slaveNode = env.SLAVE_NODE ?: 'docker'
 
 timeout(time: 12, unit: 'HOURS') {
     node(slaveNode) {
@@ -37,7 +42,6 @@
             ssh.ensureKnownHosts(GERRIT_HOST)
             def gerritChange = gerrit.getGerritChange(GERRIT_NAME, GERRIT_HOST, GERRIT_CHANGE_NUMBER, CREDENTIALS_ID, true)
             def doSubmit = false
-            def giveVerify = false
             stage("test") {
                 if (gerritChange.status != "MERGED" && !SKIP_TEST.equals("true")) {
                     // test max CodeReview
@@ -56,10 +60,10 @@
                             gerritProject = gerritProject + "-latest"
                         }
                         def testJob = String.format("test-%s-%s", jobsNamespace, gerritProject)
-                        if (gerritProject == "cookiecutter-templates") {
-                            callJobWithExtraVars("test-mk-cookiecutter-templates")
-                        } else if (gerritProject == "reclass-system") {
-                            callJobWithExtraVars("test-salt-model-reclass-system")
+                        if (env.GERRIT_PROJECT == 'mk/cookiecutter-templates') {
+                            callJobWithExtraVars('test-mk-cookiecutter-templates')
+                        } else if (env.GERRIT_PROJECT == 'salt-models/reclass-system') {
+                            callJobWithExtraVars('test-salt-model-reclass-system')
                         } else {
                             if (isJobExists(testJob)) {
                                 common.infoMsg("Test job ${testJob} found, running")
@@ -100,4 +104,4 @@
             throw e
         }
     }
-}
\ No newline at end of file
+}
diff --git a/generate-cookiecutter-products.groovy b/generate-cookiecutter-products.groovy
index e1cd638..5e31d36 100644
--- a/generate-cookiecutter-products.groovy
+++ b/generate-cookiecutter-products.groovy
@@ -199,7 +199,7 @@
                     }
                 }
 
-                def commonScriptsRepoUrl = 'https://gerrit.mcp.mirantis.net/mcp/mcp-common-scripts'
+                def commonScriptsRepoUrl = 'https://gerrit.mcp.mirantis.com/mcp/mcp-common-scripts'
                 checkout([
                     $class           : 'GitSCM',
                     branches         : [[name: 'FETCH_HEAD'],],
diff --git a/opencontrail-upgrade.groovy b/opencontrail-upgrade.groovy
index af96600..4d9d498 100644
--- a/opencontrail-upgrade.groovy
+++ b/opencontrail-upgrade.groovy
@@ -33,7 +33,7 @@
 def CONTROL_PKGS = 'contrail-config contrail-config-openstack contrail-control contrail-dns contrail-lib contrail-nodemgr contrail-utils contrail-web-controller contrail-web-core neutron-plugin-contrail python-contrail'
 def ANALYTIC_PKGS = 'contrail-analytics contrail-lib contrail-nodemgr contrail-utils python-contrail'
 def CMP_PKGS = 'contrail-lib contrail-nodemgr contrail-utils contrail-vrouter-agent contrail-vrouter-utils python-contrail python-contrail-vrouter-api python-opencontrail-vrouter-netns contrail-vrouter-dkms contrail-nova-driver'
-def KERNEL_MODULE_RELOAD = 'service supervisor-vrouter stop;ifdown vhost0;rmmod vrouter;modprobe vrouter;ifup vhost0;service supervisor-vrouter start;'
+def KERNEL_MODULE_RELOAD = 'service supervisor-vrouter stop; rmmod vrouter; sync && echo 3 > /proc/sys/vm/drop_caches && echo 1 > /proc/sys/vm/compact_memory; service supervisor-vrouter start'
 
 def void runCommonCommands(target, command, args, check, salt, pepperEnv, common) {
 
diff --git a/opencontrail40-upgrade.groovy b/opencontrail40-upgrade.groovy
index bf35d97..52a0d23 100644
--- a/opencontrail40-upgrade.groovy
+++ b/opencontrail40-upgrade.groovy
@@ -31,7 +31,7 @@
 def thirdPartyAnalyticsPkgsToRemove = 'redis-server,supervisor'
 //def cmpPkgs = ['contrail-lib', 'contrail-nodemgr', 'contrail-utils', 'contrail-vrouter-agent', 'contrail-vrouter-utils', 'python-contrail', 'python-contrail-vrouter-api', 'python-opencontrail-vrouter-netns', 'contrail-vrouter-dkms']
 def CMP_PKGS = 'contrail-lib contrail-nodemgr contrail-utils contrail-vrouter-agent contrail-vrouter-utils python-contrail python-contrail-vrouter-api python-opencontrail-vrouter-netns contrail-vrouter-dkms'
-def KERNEL_MODULE_RELOAD = 'service supervisor-vrouter stop;ifdown vhost0;rmmod vrouter;modprobe vrouter;ifup vhost0;service supervisor-vrouter start;'
+def KERNEL_MODULE_RELOAD = 'service supervisor-vrouter stop; rmmod vrouter; sync && echo 3 > /proc/sys/vm/drop_caches && echo 1 > /proc/sys/vm/compact_memory; service contrail-vrouter-agent start; service contrail-vrouter-nodemgr start'
 def analyticsServices = ['supervisor-analytics', 'supervisor-database', 'zookeeper', 'redis-server']
 def configServices = ['contrail-webui-jobserver', 'contrail-webui-webserver', 'supervisor-config', 'supervisor-database', 'zookeeper']
 def controlServices = ['ifmap-server', 'supervisor-control', 'redis-server']
diff --git a/openstack-control-upgrade.groovy b/openstack-control-upgrade.groovy
index 6a6eea2..5febb3c 100644
--- a/openstack-control-upgrade.groovy
+++ b/openstack-control-upgrade.groovy
@@ -159,6 +159,7 @@
     for (target in upgradeTargets){
       common.stageWrapper(upgradeStageMap, "Pre upgrade", target, interactive) {
         openstack.runOpenStackUpgradePhase(env, target, 'pre')
+        openstack.runOpenStackUpgradePhase(env, target, 'verify')
       }
     }
 
diff --git a/promote-vcp-images.groovy b/promote-vcp-images.groovy
index 181eafa..7b4f80e 100644
--- a/promote-vcp-images.groovy
+++ b/promote-vcp-images.groovy
@@ -17,6 +17,8 @@
 slaveNode = env.SLAVE_NODE ?: 'jsl23.mcp.mirantis.net'
 def job_env = env.getEnvironment().findAll { k, v -> v }
 def verify = job_env.VERIFY_DOWNLOAD ?: true
+def overwrite = job_env.FORCE_OVERWRITE.toBoolean() ?: false
+
 
 
 timeout(time: 6, unit: 'HOURS') {
@@ -91,7 +93,7 @@
                         remoteImageStatus = ''
                         remoteImageStatus = sh(script: "wget  --auth-no-challenge --spider ${targetImageUrl} 2>/dev/null", returnStatus: true)
                         // wget return code 8 ,if file not exist
-                        if (remoteImageStatus != '8') {
+                        if (remoteImageStatus != 8 && !overwrite) {
                             error("Attempt to overwrite existing release! Target: ${targetImage} already exist!")
                         }
                     }
diff --git a/release-mcp-version.groovy b/release-mcp-version.groovy
index 4cae93c..470f338 100644
--- a/release-mcp-version.groovy
+++ b/release-mcp-version.groovy
@@ -46,7 +46,7 @@
         [$class: 'StringParameterValue', name: 'TARGET_REGISTRY_CREDENTIALS_ID', value: dockerCredentials],
         [$class: 'StringParameterValue', name: 'REGISTRY_URL', value: dockerRegistryUrl],
         [$class: 'StringParameterValue', name: 'IMAGE_TAG', value: targetTag],
-        [$class: 'StringParameterValue', name: 'IMAGE_LIST', value: imageList],
+        [$class: 'TextParameterValue', name: 'IMAGE_LIST', value: imageList],
         [$class: 'StringParameterValue', name: 'SOURCE_IMAGE_TAG', value: sourceImageTag],
     ]
 }
@@ -67,7 +67,7 @@
 
 def triggerGitTagJob(gitRepoList, gitCredentials, tag, sourceTag) {
     build job: "tag-git-repos-all", parameters: [
-        [$class: 'StringParameterValue', name: 'GIT_REPO_LIST', value: gitRepoList],
+        [$class: 'TextParameterValue', name: 'GIT_REPO_LIST', value: gitRepoList],
         [$class: 'StringParameterValue', name: 'GIT_CREDENTIALS', value: gitCredentials],
         [$class: 'StringParameterValue', name: 'TAG', value: tag],
         [$class: 'StringParameterValue', name: 'SOURCE_TAG', value: sourceTag],
@@ -76,9 +76,10 @@
 
 def triggerPromoteVCPJob(VcpImageList, tag, sourceTag) {
     build job: "promote-vcp-images-all", parameters: [
-        [$class: 'StringParameterValue', name: 'VCP_IMAGE_LIST', value: VcpImageList],
+        [$class: 'TextParameterValue', name: 'VCP_IMAGE_LIST', value: VcpImageList],
         [$class: 'StringParameterValue', name: 'TAG', value: tag],
-        [$class: 'StringParameterValue', name: 'SOURCE_TAG', value: sourceTag]
+        [$class: 'StringParameterValue', name: 'SOURCE_TAG', value: sourceTag],
+        [$class: 'BooleanParameterValue', name: 'FORCE_OVERWRITE', value: true],
     ]
 }
 
diff --git a/sync-http-to-s3.groovy b/sync-http-to-s3.groovy
new file mode 100644
index 0000000..108a394
--- /dev/null
+++ b/sync-http-to-s3.groovy
@@ -0,0 +1,29 @@
+def common = new com.mirantis.mk.Common()
+
+
+node("docker") {
+    stage('Prepare') {
+        img = docker.image(IMAGE)
+        img.pull()
+    }
+    stage('Upload') {
+        FILENAMES.split().each { filename ->
+            url = "${SOURCE}/${filename}"
+            img.withRun("--entrypoint='/bin/bash'") { c ->
+                withCredentials([[$class          : 'UsernamePasswordMultiBinding', credentialsId: 'aws-s3',
+                                  usernameVariable: 'S3_ACCESS_KEY', passwordVariable: 'S3_SECRET_KEY']]) {
+                    img.inside("-e S3_ACCESS_KEY=${S3_ACCESS_KEY} -e S3_SECRET_KEY=${S3_SECRET_KEY}") {
+                        common.retry(3, 5) {
+                            sh(script: "wget --progress=dot:giga -O ${filename} ${url}", returnStdout: true)
+                            sh(script: "/usr/local/bin/s4cmd put ${filename} ${DEST}/${filename}", returnStdout: true)
+                        }
+                    }
+                }
+
+
+            }
+            sh("rm ${filename}")
+        }
+    }
+    deleteDir()
+}
diff --git a/test-cookiecutter-reclass.groovy b/test-cookiecutter-reclass.groovy
index 17590c4..6f73570 100644
--- a/test-cookiecutter-reclass.groovy
+++ b/test-cookiecutter-reclass.groovy
@@ -29,7 +29,7 @@
 alreadyMerged = false
 gerritConData = [credentialsId       : env.CREDENTIALS_ID,
                  gerritName          : env.GERRIT_NAME ?: 'mcp-jenkins',
-                 gerritHost          : env.GERRIT_HOST ?: 'gerrit.mcp.mirantis.net',
+                 gerritHost          : env.GERRIT_HOST ?: 'gerrit.mcp.mirantis.com',
                  gerritScheme        : env.GERRIT_SCHEME ?: 'ssh',
                  gerritPort          : env.GERRIT_PORT ?: '29418',
                  gerritRefSpec       : null,
@@ -37,7 +37,7 @@
                  withWipeOut         : true,
                  GERRIT_CHANGE_NUMBER: null]
 //
-//ccTemplatesRepo = env.COOKIECUTTER_TEMPLATE_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.net:29418/mk/cookiecutter-templates'
+//ccTemplatesRepo = env.COOKIECUTTER_TEMPLATE_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.com:29418/mk/cookiecutter-templates'
 gerritDataCCHEAD = [:]
 gerritDataCC = [:]
 gerritDataCC << gerritConData
@@ -45,7 +45,7 @@
 gerritDataCC['gerritRefSpec'] = env.COOKIECUTTER_TEMPLATE_REF ?: null
 gerritDataCC['gerritProject'] = 'mk/cookiecutter-templates'
 //
-//reclassSystemRepo = env.RECLASS_SYSTEM_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.net:29418/salt-models/reclass-system'
+//reclassSystemRepo = env.RECLASS_SYSTEM_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.com:29418/salt-models/reclass-system'
 gerritDataRSHEAD = [:]
 gerritDataRS = [:]
 gerritDataRS << gerritConData
diff --git a/test-openscap-pipeline.groovy b/test-openscap-pipeline.groovy
index 9984b20..244126b 100644
--- a/test-openscap-pipeline.groovy
+++ b/test-openscap-pipeline.groovy
@@ -34,6 +34,9 @@
   * @param results              The scanning results
   */
 def uploadResultToDashboard(apiUrl, cloudName, nodeName, results) {
+    def common = new com.mirantis.mk.Common()
+    def http = new com.mirantis.mk.Http()
+
     // Yes, we do not care of performance and will create at least 4 requests per each result
     def requestData = [:]
 
@@ -110,11 +113,12 @@
     def scanUUID = UUID.randomUUID().toString()
 
     def artifactsArchiveName = "openscap-${scanUUID}.zip"
-    def resultsBaseDir = "/tmp/openscap/${scanUUID}"
-    def artifactsDir = "${env.WORKSPACE}/openscap/${scanUUID}/artifacts"
+    def resultsBaseDir = "/var/log/openscap/${scanUUID}"
+    def artifactsDir = "openscap"
 
     def liveMinions
 
+
     stage ('Setup virtualenv for Pepper') {
         python.setupPepperVirtualenv(pepperEnv, SALT_MASTER_URL, SALT_MASTER_CREDENTIALS)
     }
@@ -128,6 +132,11 @@
 
         common.infoMsg("Scan UUID: ${scanUUID}")
 
+        // Clean all results before proceeding with results from every minion
+        dir(artifactsDir) {
+            deleteDir()
+        }
+
         for (minion in liveMinions) {
 
             // Iterate oscap evaluation over the benchmarks
@@ -136,11 +145,19 @@
 
                 // Remove extension from the benchmark name
                 def benchmarkPathWithoutExtension = benchmarkFilePath.replaceFirst('[.][^.]+$', '')
+
+                // Get benchmark name
+                def benchmarkName = benchmarkPathWithoutExtension.tokenize('/')[-1]
+
                 // And build resultsDir based on this path
                 def resultsDir = "${resultsBaseDir}/${benchmarkPathWithoutExtension}"
 
                 def benchmarkFile = "${benchmarksDir}${benchmarkFilePath}"
 
+                def nodeShortName = minion.tokenize('.')[0]
+
+                def archiveName = "${scanUUID}_${nodeShortName}_${benchmarkName}.tar"
+
                 // Evaluate the benchmark
                 salt.runSaltProcessStep(pepperEnv, minion, 'oscap.eval', [
                     'xccdf', benchmarkFile, "results_dir=${resultsDir}",
@@ -148,6 +165,16 @@
                     "tailoring_id=${xccdfTailoringId}"
                 ])
 
+                salt.cmdRun(pepperEnv, minion, "tar -cf /tmp/${archiveName} -C ${resultsBaseDir} .")
+                fileContents = salt.cmdRun(pepperEnv, minion, "cat /tmp/${archiveName}", true, null, false)['return'][0].values()[0].replaceAll('Salt command execution success', '')
+
+                sh "mkdir -p ${artifactsDir}/${scanUUID}/${nodeShortName}"
+                writeFile file: "${archiveName}", text: fileContents
+                sh "tar --strip-components 1 -xf ${archiveName} --directory ${artifactsDir}/${scanUUID}/${nodeShortName}; rm -f ${archiveName}"
+
+                // Remove archive which is not needed anymore
+                salt.runSaltProcessStep(pepperEnv, minion, 'file.remove', "/tmp/${archiveName}")
+
                 // Attempt to upload the scanning results to the dashboard
                 if (UPLOAD_TO_DASHBOARD.toBoolean()) {
                     if (common.validInputParam('DASHBOARD_API_URL')) {
@@ -159,6 +186,12 @@
                 }
             }
         }
+
+        // Prepare archive
+        sh "tar -cJf ${artifactsDir}.tar.xz ${artifactsDir}"
+
+        // Archive the build output artifacts
+        archiveArtifacts artifacts: "*.xz"
     }
 
 /*  // Will be implemented later
diff --git a/test-salt-models-pipeline.groovy b/test-salt-models-pipeline.groovy
index f4467c1..729fdb4 100644
--- a/test-salt-models-pipeline.groovy
+++ b/test-salt-models-pipeline.groovy
@@ -61,36 +61,26 @@
 common = new com.mirantis.mk.Common()
 
 def setupRunner() {
-
-  def branches = [:]
-  for (int i = 0; i < PARALLEL_NODE_GROUP_SIZE.toInteger() && i < futureNodes.size(); i++) {
-    branches["Runner ${i}"] = {
-      while (futureNodes && !failedNodes) {
-        def currentNode = futureNodes[0] ? futureNodes[0] : null
+    def branches = [:]
+    branches.failFast = true
+    for(int i = 0; i < futureNodes.size(); i++) {
+        def currentNode = futureNodes[i] ? futureNodes[i] : null
         if (!currentNode) {
-          continue
+            continue
         }
-
-        def clusterName = currentNode[2]
-        futureNodes.remove(currentNode)
-        try {
-            triggerTestNodeJob(currentNode[0], currentNode[1], currentNode[2], currentNode[3], currentNode[4])
-        } catch (Exception e) {
-          if (e.getMessage().contains("completed with status ABORTED")) {
-            common.warningMsg("Test of ${clusterName} failed because the test was aborted :  ${e}")
-            futureNodes << currentNode
-          } else {
-            common.warningMsg("Test of ${clusterName} failed :  ${e}")
-            failedNodes = true
-          }
+        branches["Runner ${i}"] = {
+            try {
+                triggerTestNodeJob(currentNode[0], currentNode[1], currentNode[2], currentNode[3], currentNode[4])
+            } catch (Exception e) {
+                  common.warningMsg("Test of ${currentNode[2]} failed :  ${e}")
+                  throw e
+            }
         }
-      }
     }
-  }
 
-  if (branches) {
-    parallel branches
-  }
+    if (branches) {
+        common.runParallel(branches, PARALLEL_NODE_GROUP_SIZE.toInteger())
+    }
 }
 
 def triggerTestNodeJob(defaultGitUrl, defaultGitRef, clusterName, testTarget, formulasSource) {