Merge "Add missed definitions in stacklight upgrade pipeline"
diff --git a/cloud-update.groovy b/cloud-update.groovy
index d58d1e0..9fdeaad 100644
--- a/cloud-update.groovy
+++ b/cloud-update.groovy
@@ -573,14 +573,12 @@
     }
     try {
         salt.minionsReachable(pepperEnv, 'I@salt:master', tgt)
-        // purge and setup previous repos
-        salt.enforceState(pepperEnv, tgt, 'linux.system.repo')
     } catch (Exception e) {
         common.errorMsg(e)
         if (INTERACTIVE.toBoolean()) {
-            input message: "Salt state linux.system.repo on ${tgt} failed. Do you want to PROCEED?."
+            input message: "Not all minions ${tgt} returned after snapshot revert. Do you want to PROCEED?."
         } else {
-            throw new Exception("Salt state linux.system.repo on ${tgt} failed")
+            throw new Exception("Not all minions ${tgt} returned after snapshot revert")
         }
     }
 }
diff --git a/cvp-func.groovy b/cvp-func.groovy
index 120bb9d..0c657a5 100644
--- a/cvp-func.groovy
+++ b/cvp-func.groovy
@@ -26,7 +26,6 @@
 def saltMaster
 def artifacts_dir = 'validation_artifacts/'
 def remote_artifacts_dir = '/root/qa_results/'
-def container_name = "${env.JOB_NAME}"
 
 node() {
     try{
@@ -39,7 +38,7 @@
             if (!keystone_creds) {
                 keystone_creds = validate._get_keystone_creds_v2(saltMaster)
             }
-            validate.runContainer(saltMaster, TARGET_NODE, TEST_IMAGE, container_name, keystone_creds)
+            validate.runContainer(saltMaster, TARGET_NODE, TEST_IMAGE, 'cvp', keystone_creds)
             validate.configureContainer(saltMaster, TARGET_NODE, PROXY, TOOLS_REPO, TEMPEST_REPO, TEMPEST_ENDPOINT_TYPE)
         }
 
diff --git a/cvp-ha.groovy b/cvp-ha.groovy
index 649ac6a..414ab46 100644
--- a/cvp-ha.groovy
+++ b/cvp-ha.groovy
@@ -28,7 +28,6 @@
 def saltMaster
 def artifacts_dir = 'validation_artifacts/'
 def remote_artifacts_dir = '/root/qa_results/'
-def container_name = "${env.JOB_NAME}"
 def current_target_node = null
 def first_node = null
 def tempest_result = ''
@@ -45,7 +44,7 @@
                 if (!keystone_creds) {
                     keystone_creds = validate._get_keystone_creds_v2(saltMaster)
                 }
-                validate.runContainer(saltMaster, TARGET_NODE, TEST_IMAGE, container_name, keystone_creds)
+                validate.runContainer(saltMaster, TARGET_NODE, TEST_IMAGE, 'cvp', keystone_creds)
                 validate.configureContainer(saltMaster, TEMPEST_TARGET_NODE, PROXY, TOOLS_REPO, TEMPEST_REPO)
             }
 
diff --git a/cvp-perf.groovy b/cvp-perf.groovy
index 7938572..74c9a63 100644
--- a/cvp-perf.groovy
+++ b/cvp-perf.groovy
@@ -21,7 +21,6 @@
 
 def artifacts_dir = 'validation_artifacts/'
 def remote_artifacts_dir = '/root/qa_results/'
-def container_name = "${env.JOB_NAME}"
 def saltMaster
 
 node() {
@@ -35,7 +34,7 @@
             if (!keystone_creds) {
                 keystone_creds = validate._get_keystone_creds_v2(saltMaster)
             }
-            validate.runContainer(saltMaster, TARGET_NODE, TEST_IMAGE, container_name, keystone_creds)
+            validate.runContainer(saltMaster, TARGET_NODE, TEST_IMAGE, 'cvp', keystone_creds)
             validate.configureContainer(saltMaster, TARGET_NODE, PROXY, TOOLS_REPO, "")
         }
 
diff --git a/generate-cookiecutter-products.groovy b/generate-cookiecutter-products.groovy
index 7609103..25473fb 100644
--- a/generate-cookiecutter-products.groovy
+++ b/generate-cookiecutter-products.groovy
@@ -29,8 +29,6 @@
             def clusterDomain = templateContext.default_context.cluster_domain
             def clusterName = templateContext.default_context.cluster_name
             def saltMaster = templateContext.default_context.salt_master_hostname
-            def localRepositories = templateContext.default_context.local_repositories.toBoolean()
-            def offlineDeployment = templateContext.default_context.offline_deployment.toBoolean()
             def cutterEnv = "${env.WORKSPACE}/cutter"
             def jinjaEnv = "${env.WORKSPACE}/jinja"
             def outputDestination = "${modelEnv}/classes/cluster/${clusterName}"
@@ -99,63 +97,9 @@
                 git.commitGitChanges(modelEnv, "Added new shared reclass submodule", "${user}@localhost", "${user}")
             }
 
-            def productList = ["infra", "cicd", "opencontrail", "kubernetes", "openstack", "oss", "stacklight", "ceph"]
-            for (product in productList) {
-
-                // get templateOutputDir and productDir
-                templateOutputDir = "${env.WORKSPACE}/output/${product}"
-                productDir = product
-                templateDir = "${templateEnv}/cluster_product/${productDir}"
-                // Bw for 2018.8.1 and older releases
-                if (product.startsWith("stacklight") && (!fileExists(templateDir))) {
-                    common.warningMsg("Old release detected! productDir => 'stacklight2' ")
-                    productDir = "stacklight2"
-                    templateDir = "${templateEnv}/cluster_product/${productDir}"
-                }
-
-                if (product == "infra" || (templateContext.default_context["${product}_enabled"]
-                    && templateContext.default_context["${product}_enabled"].toBoolean())) {
-
-                    common.infoMsg("Generating product " + product + " from " + templateDir + " to " + templateOutputDir)
-
-                    sh "rm -rf ${templateOutputDir} || true"
-                    sh "mkdir -p ${templateOutputDir}"
-                    sh "mkdir -p ${outputDestination}"
-
-                    python.setupCookiecutterVirtualenv(cutterEnv)
-                    python.buildCookiecutterTemplate(templateDir, COOKIECUTTER_TEMPLATE_CONTEXT, templateOutputDir, cutterEnv, templateBaseDir)
-                    sh "mv -v ${templateOutputDir}/${clusterName}/* ${outputDestination}"
-                } else {
-                    common.warningMsg("Product " + product + " is disabled")
-                }
-            }
-
-            if (localRepositories && !offlineDeployment) {
-                def aptlyModelUrl = templateContext.default_context.local_model_url
-                dir(path: modelEnv) {
-                    ssh.agentSh "git submodule add \"${aptlyModelUrl}\" \"classes/cluster/${clusterName}/cicd/aptly\""
-                    if (!(mcpVersion in ["nightly", "testing", "stable"])) {
-                        ssh.agentSh "cd \"classes/cluster/${clusterName}/cicd/aptly\";git fetch --tags;git checkout ${mcpVersion}"
-                    }
-                }
-            }
-
-            stage('Generate new SaltMaster node') {
-                def nodeFile = "${modelEnv}/nodes/${saltMaster}.${clusterDomain}.yml"
-                def nodeString = """classes:
-- cluster.${clusterName}.infra.config
-parameters:
-  _param:
-    linux_system_codename: xenial
-    reclass_data_revision: master
-  linux:
-    system:
-      name: ${saltMaster}
-      domain: ${clusterDomain}
-    """
-                sh "mkdir -p ${modelEnv}/nodes/"
-                writeFile(file: nodeFile, text: nodeString)
-
+            stage('Generate model') {
+                python.setupCookiecutterVirtualenv(cutterEnv)
+                python.generateModel(COOKIECUTTER_TEMPLATE_CONTEXT, 'default_context', saltMaster, cutterEnv, modelEnv, templateEnv, false)
                 git.commitGitChanges(modelEnv, "Create model ${clusterName}", "${user}@localhost", "${user}")
             }
 
diff --git a/k8s-upgrade-pipeline.groovy b/k8s-upgrade-pipeline.groovy
index 530a256..98a4338 100644
--- a/k8s-upgrade-pipeline.groovy
+++ b/k8s-upgrade-pipeline.groovy
@@ -95,6 +95,13 @@
 
     stage("Upgrading Addons at ${target}") {
         salt.enforceState(pepperEnv, target, "kubernetes.master.kube-addons")
+    }
+}
+
+def updateAddonManager(pepperEnv, target) {
+    def salt = new com.mirantis.mk.Salt()
+
+    stage("Upgrading AddonManager at ${target}") {
         salt.enforceState(pepperEnv, target, "kubernetes.master.setup")
     }
 }
@@ -139,13 +146,19 @@
                                 upgradeDocker(pepperEnv, t)
                             }
                             performKubernetesControlUpdate(pepperEnv, t)
-                            updateAddons(pepperEnv, t)
+                            updateAddonManager(pepperEnv, t)
                             uncordonNode(pepperEnv, t)
                         }
                     }
                 } else {
                     performKubernetesControlUpdate(pepperEnv, target)
                 }
+                if (!SIMPLE_UPGRADE.toBoolean()) {
+                    // Addons upgrade should be performed after all nodes will upgraded
+                    updateAddons(pepperEnv, target)
+                    // Wait for 90 sec for addons reconciling
+                    sleep(90)
+                }
             }
 
             if (updates.contains("cmp")) {
diff --git a/test-cookiecutter-reclass.groovy b/test-cookiecutter-reclass.groovy
index c09c572..0bab394 100644
--- a/test-cookiecutter-reclass.groovy
+++ b/test-cookiecutter-reclass.groovy
@@ -17,7 +17,7 @@
 git = new com.mirantis.mk.Git()
 python = new com.mirantis.mk.Python()
 
-def extraVarsYAML = env.EXTRA_VARIABLES_YAML ?: false
+extraVarsYAML = env.EXTRA_VARIABLES_YAML.trim() ?: ''
 if (extraVarsYAML) {
     common.mergeEnv(env, extraVarsYAML)
 }
@@ -59,79 +59,6 @@
 chunkJobName = "test-mk-cookiecutter-templates-chunk"
 testModelBuildsData = [:]
 
-def generateSaltMaster(modEnv, clusterDomain, clusterName) {
-    def nodeFile = "${modEnv}/nodes/cfg01.${clusterDomain}.yml"
-    def nodeString = """classes:
-- cluster.${clusterName}.infra.config
-parameters:
-    _param:
-        linux_system_codename: xenial
-        reclass_data_revision: master
-    linux:
-        system:
-            name: cfg01
-            domain: ${clusterDomain}
-"""
-    sh "mkdir -p ${modEnv}/nodes/"
-    println "Create file ${nodeFile}"
-    writeFile(file: nodeFile, text: nodeString)
-}
-
-/**
- *
- * @param contextFile - path to `contexts/XXX.yaml file`
- * @param virtualenv - pyvenv with CC and dep's
- * @param templateEnvDir - root of CookieCutter
- * @return
- */
-
-def generateModel(contextFile, virtualenv, templateEnvDir) {
-    def modelEnv = "${templateEnvDir}/model"
-    def basename = common.GetBaseName(contextFile, '.yml')
-    def generatedModel = "${modelEnv}/${basename}"
-    def content = readFile(file: "${templateEnvDir}/contexts/${contextFile}")
-    def templateContext = readYaml text: content
-    def clusterDomain = templateContext.default_context.cluster_domain
-    def clusterName = templateContext.default_context.cluster_name
-    def outputDestination = "${generatedModel}/classes/cluster/${clusterName}"
-    def templateBaseDir = templateEnvDir
-    def templateDir = "${templateEnvDir}/dir"
-    def templateOutputDir = templateBaseDir
-    dir(templateEnvDir) {
-        sh(script: "rm -rf ${generatedModel} || true")
-        common.infoMsg("Generating model from context ${contextFile}")
-        def productList = ["infra", "cicd", "opencontrail", "kubernetes", "openstack", "oss", "stacklight", "ceph"]
-        for (product in productList) {
-
-            // get templateOutputDir and productDir
-            templateOutputDir = "${templateEnvDir}/output/${product}"
-            productDir = product
-            templateDir = "${templateEnvDir}/cluster_product/${productDir}"
-            // Bw for 2018.8.1 and older releases
-            if (product.startsWith("stacklight") && (!fileExists(templateDir))) {
-                common.warningMsg("Old release detected! productDir => 'stacklight2' ")
-                productDir = "stacklight2"
-                templateDir = "${templateEnvDir}/cluster_product/${productDir}"
-            }
-            if (product == "infra" || (templateContext.default_context["${product}_enabled"]
-                && templateContext.default_context["${product}_enabled"].toBoolean())) {
-
-                common.infoMsg("Generating product " + product + " from " + templateDir + " to " + templateOutputDir)
-
-                sh "rm -rf ${templateOutputDir} || true"
-                sh "mkdir -p ${templateOutputDir}"
-                sh "mkdir -p ${outputDestination}"
-
-                python.buildCookiecutterTemplate(templateDir, content, templateOutputDir, virtualenv, templateBaseDir)
-                sh "mv -v ${templateOutputDir}/${clusterName}/* ${outputDestination}"
-            } else {
-                common.warningMsg("Product " + product + " is disabled")
-            }
-        }
-        generateSaltMaster(generatedModel, clusterDomain, clusterName)
-    }
-}
-
 def getAndUnpackNodesInfoArtifact(jobName, copyTo, build) {
     return {
         dir(copyTo) {
@@ -212,7 +139,9 @@
 def StepGenerateModels(_contextFileList, _virtualenv, _templateEnvDir) {
     return {
         for (contextFile in _contextFileList) {
-            generateModel(contextFile, _virtualenv, _templateEnvDir)
+            def basename = common.GetBaseName(contextFile, '.yml')
+            def context = readFile(file: "${_templateEnvDir}/contexts/${contextFile}")
+            python.generateModel(context, basename, 'cfg01', _virtualenv, "${_templateEnvDir}/model", _templateEnvDir)
         }
     }
 }
@@ -221,8 +150,13 @@
     // Simple function, to check and define branch-around variables
     // In general, simply make transition updates for non-master branch
     // based on magic logic
-    def message = '<br/>'
+    def newline = '<br/>'
+    def messages = []
     if (env.GERRIT_PROJECT) {
+        messages.add("<font color='red'>GerritTrigger detected! We are in auto-mode:</font>")
+        messages.add("Test env variables has been changed:")
+        messages.add("COOKIECUTTER_TEMPLATE_BRANCH => ${gerritDataCC['gerritBranch']}")
+        messages.add("RECLASS_MODEL_BRANCH => ${gerritDataRS['gerritBranch']}")
         // TODO are we going to have such branches?
         if (!['nightly', 'testing', 'stable', 'proposed', 'master'].contains(env.GERRIT_BRANCH)) {
             gerritDataCC['gerritBranch'] = env.GERRIT_BRANCH
@@ -233,20 +167,16 @@
         if (env.GERRIT_PROJECT == 'salt-models/reclass-system') {
             gerritDataRS['gerritRefSpec'] = env.GERRIT_REFSPEC
             gerritDataRS['GERRIT_CHANGE_NUMBER'] = env.GERRIT_CHANGE_NUMBER
-            message = message + "<br/>RECLASS_SYSTEM_GIT_REF =>${gerritDataRS['gerritRefSpec']}"
+            messages.add("RECLASS_SYSTEM_GIT_REF => ${gerritDataRS['gerritRefSpec']}")
         } else if (env.GERRIT_PROJECT == 'mk/cookiecutter-templates') {
             gerritDataCC['gerritRefSpec'] = env.GERRIT_REFSPEC
             gerritDataCC['GERRIT_CHANGE_NUMBER'] = env.GERRIT_CHANGE_NUMBER
-            message = message + "<br/>COOKIECUTTER_TEMPLATE_REF =>${gerritDataCC['gerritRefSpec']}"
+            messages.add("COOKIECUTTER_TEMPLATE_REF => ${gerritDataCC['gerritRefSpec']}")
         } else {
             error("Unsuported gerrit-project triggered:${env.GERRIT_PROJECT}")
         }
-        message = "<font color='red'>GerritTrigger detected! We are in auto-mode:</font>" +
-            "<br/>Test env variables has been changed:" +
-            "<br/>COOKIECUTTER_TEMPLATE_BRANCH => ${gerritDataCC['gerritBranch']}" +
-            "<br/>RECLASS_MODEL_BRANCH=> ${gerritDataRS['gerritBranch']}" + message
     } else {
-        message = "<font color='red'>Non-gerrit trigger run detected!</font>" + message
+        messages.add("<font color='red'>Non-gerrit trigger run detected!</font>")
     }
     gerritDataCCHEAD << gerritDataCC
     gerritDataCCHEAD['gerritRefSpec'] = null
@@ -262,8 +192,9 @@
     if (!common.checkRemoteBinary([apt_mk_version: testDistribRevision]).linux_system_repo_url) {
         common.errorMsg("Binary release: ${testDistribRevision} not exist. Fallback to 'proposed'! ")
         testDistribRevision = 'proposed'
-        message = "<br/>DISTRIB_REVISION =>${testDistribRevision}" + message
+        messages.add("DISTRIB_REVISION => ${testDistribRevision}")
     }
+    def message = messages.join(newline) + newline
     currentBuild.description = currentBuild.description ? message + currentBuild.description : message
 }
 
@@ -293,25 +224,29 @@
     // tar.gz
     // ├── contexts
     // │   └── ceph.yml
-    // ├── ${reclassDirName} <<< reclass system
+    // ├── classes-system <<< reclass system
     // ├── model
     // │   └── ceph       <<< from `context basename`
     // │       ├── classes
     // │       │   ├── cluster
-    // │       │   └── system -> ../../../${reclassDirName}
+    // │       │   └── system -> ../../../classes-system
     // │       └── nodes
     // │           └── cfg01.ceph-cluster-domain.local.yml
+    def archiveBaseName = common.GetBaseName(archiveName, '.tar.gz')
+    def classesSystemDir = 'classes-system'
+    // copy reclass system under envPath with -R and trailing / to support symlinks direct copy
+    sh("cp -R ${archiveBaseName}/ ${envPath}/${classesSystemDir}")
     dir(envPath) {
         for (String context : contextList) {
             def basename = common.GetBaseName(context, '.yml')
-            dir("${envPath}/model/${basename}") {
-                sh(script: "mkdir -p classes/; ln -sfv ../../../../${common.GetBaseName(archiveName, '.tar.gz')} classes/system ")
+            dir("${envPath}/model/${basename}/classes") {
+                sh(script: "ln -sfv ../../../${classesSystemDir} system ")
             }
         }
         // replace all generated passwords/secrets/keys with hardcode value for infra/secrets.yaml
         replaceGeneratedValues("${envPath}/model")
         // Save all models and all contexts. Warning! `h` flag must be used!
-        sh(script: "set -ex; tar -czhf ${env.WORKSPACE}/${archiveName} --exclude='*@tmp' model contexts", returnStatus: true)
+        sh(script: "set -ex; tar -czhf ${env.WORKSPACE}/${archiveName} --exclude='*@tmp' contexts model ${classesSystemDir}", returnStatus: true)
     }
     archiveArtifacts artifacts: archiveName
 }
@@ -402,10 +337,12 @@
             stage("Compare cluster lvl Head/Patched") {
                 // Compare patched and HEAD reclass pillars
                 compareRoot = "${env.WORKSPACE}/cluster_compare/"
+                // extract archive and drop all copied classes/system before comparing
                 sh(script: """
                    mkdir -pv ${compareRoot}/new ${compareRoot}/old
                    tar -xzf ${patchedReclassArtifactName}  --directory ${compareRoot}/new
                    tar -xzf ${headReclassArtifactName}  --directory ${compareRoot}/old
+                   find ${compareRoot} -name classes -type d -exec rm -rf '{}/system' \\;
                    """)
                 common.warningMsg('infra/secrets.yml has been skipped from compare!')
                 result = '\n' + common.comparePillars(compareRoot, env.BUILD_URL, "-Ev \'infra/secrets.yml|\\.git\'")