Add EXTRA_VARS param for proposed jobs

Related-Prod:PROD-36997

Change-Id: I4fcadf614e37f3f81c89e10a802635d0f832a145
diff --git a/jobs/pipelines/deploy-cicd-and-run-tests.groovy b/jobs/pipelines/deploy-cicd-and-run-tests.groovy
index 769d576..ff8a9f8 100644
--- a/jobs/pipelines/deploy-cicd-and-run-tests.groovy
+++ b/jobs/pipelines/deploy-cicd-and-run-tests.groovy
@@ -143,117 +143,126 @@
 //throttle(['fuel_devops_environment']) {
   node ("${NODE_NAME}") {
     env.slave_workdir = pwd()
-    try {
-        // run deploy stages
-        deploy(shared, common, steps, env_manager, batch_size, dist_upgrade_nodes, upgrade_saltstack)
-        // run test stages
-        test(shared, common, steps, env_manager)
-    } catch (e) {
-        common.printMsg("Job is failed: " + e.message, "purple")
-        throw e
-    } finally {
-        if (make_snapshot_stages) {
-            // shutdown the environment if required
-            if ("${env.SHUTDOWN_ENV_ON_TEARDOWN}" == "true") {
+    def extra_vars = EXTRA_VARS ?: ''
+    if (extra_vars != '') {
+        extra_vars = extra_vars.split('\n').collect{it as String}
+    }
+    else {
+        extra_vars = []
+    }
+    withEnv(extra_vars) {
+        try {
+            // run deploy stages
+            deploy(shared, common, steps, env_manager, batch_size, dist_upgrade_nodes, upgrade_saltstack)
+            // run test stages
+            test(shared, common, steps, env_manager)
+        } catch (e) {
+            common.printMsg("Job is failed: " + e.message, "purple")
+            throw e
+        } finally {
+            if (make_snapshot_stages) {
+                // shutdown the environment if required
+                if ("${env.SHUTDOWN_ENV_ON_TEARDOWN}" == "true") {
+                    shared.run_cmd("""\
+                        dos.py destroy ${ENV_NAME} || true
+                    """)
+                }
+            }
+
+            stage("Archive all xml reports") {
+                dir("${env.slave_workdir }") {
+                    archiveArtifacts artifacts: "**/*.xml,**/*.ini,**/*.log,**/*.tar.gz"
+                    }
+            }
+            // delete directory with artifacts from swarm-run-pytest pipeline
+            // for excluding xml duplicates
+            if (fileExists("tmp")) {
                 shared.run_cmd("""\
-                    dos.py destroy ${ENV_NAME} || true
+                        rm -rf tmp/
                 """)
             }
-        }
 
-        stage("Archive all xml reports") {
-            dir("${env.slave_workdir }") {
-                archiveArtifacts artifacts: "**/*.xml,**/*.ini,**/*.log,**/*.tar.gz"
+            if ("${env.REPORT_TO_TESTRAIL}" != "false") {
+                stage("report results to testrail from jenkins master") {
+                    common.printMsg("Running on: " + node_with_reports, "blue")
+                    common.printMsg("Running on: " + env.NODE_NAME, "blue")
+                    shared.swarm_testrail_report(steps, env.NODE_NAME)
                 }
-        }
-        // delete directory with artifacts from swarm-run-pytest pipeline
-        // for excluding xml duplicates
-        if (fileExists("tmp")) {
-            shared.run_cmd("""\
-                    rm -rf tmp/
-            """)
-        }
-
-        if ("${env.REPORT_TO_TESTRAIL}" != "false") {
-            stage("report results to testrail from jenkins master") {
-                common.printMsg("Running on: " + node_with_reports, "blue")
-                common.printMsg("Running on: " + env.NODE_NAME, "blue")
-                shared.swarm_testrail_report(steps, env.NODE_NAME)
-            }
-            stage("Store TestRail reports to job description") {
-                if (fileExists("description.txt")) {
-                    def String description  = readFile("description.txt")
-                    currentBuild.description += "${description}"
+                stage("Store TestRail reports to job description") {
+                    if (fileExists("description.txt")) {
+                        def String description  = readFile("description.txt")
+                        currentBuild.description += "${description}"
+                    }
                 }
             }
-        }
 
-        if (fileExists("jenkins_agent_description.txt")) {
-            // if there is a separated foundation node on $jenkins_slave_node_name,
-            // then archive artifacts also on that node
-            if (jenkins_slave_node_name != env.NODE_NAME) {
-                node ("${jenkins_slave_node_name}") {
-                    dir("${env.slave_workdir }") {
-                        stage("Archive all xml reports from node ${jenkins_slave_node_name}") {
-                            archiveArtifacts artifacts: "**/*.xml,**/*.ini,**/*.log,**/*.tar.gz,*.xml,*.ini,*.log,*.tar.gz", allowEmptyArchive: true
-                        }
-                        if ("${env.REPORT_TO_TESTRAIL}" != "false") {
-                            stage("report results to testrail") {
-                                common.printMsg("Running on: " + node_with_reports, "blue")
-                                shared.swarm_testrail_report(steps, node_with_reports)
+            if (fileExists("jenkins_agent_description.txt")) {
+                // if there is a separated foundation node on $jenkins_slave_node_name,
+                // then archive artifacts also on that node
+                if (jenkins_slave_node_name != env.NODE_NAME) {
+                    node ("${jenkins_slave_node_name}") {
+                        dir("${env.slave_workdir }") {
+                            stage("Archive all xml reports from node ${jenkins_slave_node_name}") {
+                                archiveArtifacts artifacts: "**/*.xml,**/*.ini,**/*.log,**/*.tar.gz,*.xml,*.ini,*.log,*.tar.gz", allowEmptyArchive: true
                             }
-                            stage("Store TestRail reports to job description from ${jenkins_slave_node_name}") {
-                                if (fileExists("description.txt")) {
-                                    def String description  = readFile("description.txt")
-                                    currentBuild.description += "${description}"
+                            if ("${env.REPORT_TO_TESTRAIL}" != "false") {
+                                stage("report results to testrail") {
+                                    common.printMsg("Running on: " + node_with_reports, "blue")
+                                    shared.swarm_testrail_report(steps, node_with_reports)
+                                }
+                                stage("Store TestRail reports to job description from ${jenkins_slave_node_name}") {
+                                    if (fileExists("description.txt")) {
+                                        def String description  = readFile("description.txt")
+                                        currentBuild.description += "${description}"
+                                    }
                                 }
                             }
-                        }
-                    } //dir
-                } // node
+                        } //dir
+                    } // node
+                }
             }
-        }
-        //run upgrade env to proposed
-        if (env.RUN_UPGRADE_AFTER_JOB == "true" && currentBuild.result == 'SUCCESS') {
-            network_backend = env.PLATFORM_STACK_INSTALL.contains("contrail") ? 'contrail' : 'dvr'
-            upgrade_job = "mcp-update-${env.TEMPEST_IMAGE_VERSION}-${network_backend}-sl"
-            upgrade_to_tag = ''
-            contrail_upgrade_line = ''
-            if(env.UPGRADE_TO_TAG == "true") {
-                upgrade_to_tag = '--update-to-tag ${env.UPGRADE_VERSION_TAG} '
+            //run upgrade env to proposed
+            if (env.RUN_UPGRADE_AFTER_JOB == "true" && currentBuild.result == 'SUCCESS') {
+                network_backend = env.PLATFORM_STACK_INSTALL.contains("contrail") ? 'contrail' : 'dvr'
+                upgrade_job = "mcp-update-${env.TEMPEST_IMAGE_VERSION}-${network_backend}-sl"
+                upgrade_to_tag = ''
+                contrail_upgrade_line = ''
+                if(env.UPGRADE_TO_TAG == "true") {
+                    upgrade_to_tag = '--update-to-tag ${env.UPGRADE_VERSION_TAG} '
+                }
+                if(env.PLATFORM_STACK_INSTALL.contains("contrail")) {
+                    contrail_upgrade_line = "tcp_tests/tests/system/test_upgrade_contrail.py::TestUpdateContrail "
+                }
+                run_test_opts = """--keep-duplicates --maxfail=1 \
+                    tcp_tests/tests/system/test_mcp_update.py::TestUpdateMcpCluster::test_update_drivetrain ${upgrade_to_tag} \
+                    ${contrail_upgrade_line}\
+                    tcp_tests/tests/system/test_mcp_update.py::TestOpenstackUpdate \
+                    tcp_tests/tests/system/test_mcp_update.py::TestUpdateMcpCluster::test_update_galera --update-mysql-version 5.7 \
+                    tcp_tests/tests/system/test_mcp_update.py::TestUpdateMcpCluster::test_update_rabbit \
+                    tcp_tests/tests/system/test_mcp_update.py::TestUpdateMcpCluster::test_update_stacklight \
+                    tcp_tests/tests/system/test_mcp_update.py::TestUpdateMcpCluster::test_update_ceph \
+                    \
+                    tcp_tests/tests/system/test_cvp_pipelines.py::TestCvpPipelines::test_run_cvp_tempest \
+                    tcp_tests/tests/system/test_cvp_pipelines.py::TestCvpPipelines::test_run_cvp_func_sanity \
+                    tcp_tests/tests/system/test_cvp_pipelines.py::TestCvpPipelines::test_run_cvp_stacklight
+                """
+                if (env.IPMI_CREDS) {
+                    upgrade_job = "mcp-update-bm-b300-queens-ovs-maas"
+                }
+                def deploy = build job: "${upgrade_job}",
+                    parameters: [
+                        string(name: 'PARENT_NODE_NAME', value: "openstack_slave_${env.ENV_NAME}"),
+                        string(name: 'TCP_QA_REFS', value: env.TCP_QA_REFS),
+                        string(name: 'PASSED_STEPS', value: steps),
+                        string(name: 'TEMPEST_TEST_SUITE_NAME', value: env.TEMPEST_TEST_SUITE_NAME),
+                        string(name: 'NODE', value: "openstack_slave_${env.ENV_NAME}"),
+                        string(name: 'RUN_TEST_OPTS', value: run_test_opts)
+                    ],
+                    wait: false,
+                    propagate: false
             }
-            if(env.PLATFORM_STACK_INSTALL.contains("contrail")) {
-                contrail_upgrade_line = "tcp_tests/tests/system/test_upgrade_contrail.py::TestUpdateContrail "
-            }
-            run_test_opts = """--keep-duplicates --maxfail=1 \
-                tcp_tests/tests/system/test_mcp_update.py::TestUpdateMcpCluster::test_update_drivetrain ${upgrade_to_tag} \
-                ${contrail_upgrade_line}\
-                tcp_tests/tests/system/test_mcp_update.py::TestOpenstackUpdate \
-                tcp_tests/tests/system/test_mcp_update.py::TestUpdateMcpCluster::test_update_galera --update-mysql-version 5.7 \
-                tcp_tests/tests/system/test_mcp_update.py::TestUpdateMcpCluster::test_update_rabbit \
-                tcp_tests/tests/system/test_mcp_update.py::TestUpdateMcpCluster::test_update_stacklight \
-                tcp_tests/tests/system/test_mcp_update.py::TestUpdateMcpCluster::test_update_ceph \
-                \
-                tcp_tests/tests/system/test_cvp_pipelines.py::TestCvpPipelines::test_run_cvp_tempest \
-                tcp_tests/tests/system/test_cvp_pipelines.py::TestCvpPipelines::test_run_cvp_func_sanity \
-                tcp_tests/tests/system/test_cvp_pipelines.py::TestCvpPipelines::test_run_cvp_stacklight
-            """
-            if (env.IPMI_CREDS) {
-                upgrade_job = "mcp-update-bm-b300-queens-ovs-maas"
-            }
-            def deploy = build job: "${upgrade_job}",
-                parameters: [
-                    string(name: 'PARENT_NODE_NAME', value: "openstack_slave_${env.ENV_NAME}"),
-                    string(name: 'TCP_QA_REFS', value: env.TCP_QA_REFS),
-                    string(name: 'PASSED_STEPS', value: steps),
-                    string(name: 'TEMPEST_TEST_SUITE_NAME', value: env.TEMPEST_TEST_SUITE_NAME),
-                    string(name: 'NODE', value: "openstack_slave_${env.ENV_NAME}"),
-                    string(name: 'RUN_TEST_OPTS', value: run_test_opts)
-                ],
-                wait: false,
-                propagate: false
-        }
-    } // try
+        } // try
+    }
   } // node
 
 
diff --git a/jobs/templates/heat-cicd-pike-contrail-stb-sl.yml b/jobs/templates/heat-cicd-pike-contrail-stb-sl.yml
index 0a4ddf0..f0f62ec 100755
--- a/jobs/templates/heat-cicd-pike-contrail-stb-sl.yml
+++ b/jobs/templates/heat-cicd-pike-contrail-stb-sl.yml
@@ -216,6 +216,12 @@
         default: true
         description: 'Works starting from MCP 2019.2.10 or master. Whether to apply saltstack updates on all nodes in cluster before deployment'
         name: UPGRADE_SALTSTACK
+    - text:
+        default: ''
+        description: |-
+          Add env vars for deploy and testing
+        name: EXTRA_VARS
+        trim: 'false'
     pipeline-scm:
       lightweight-checkout: false
       scm:
diff --git a/jobs/templates/heat-cicd-pike-dvr-sl.yml b/jobs/templates/heat-cicd-pike-dvr-sl.yml
index 2ade702..04a28b7 100644
--- a/jobs/templates/heat-cicd-pike-dvr-sl.yml
+++ b/jobs/templates/heat-cicd-pike-dvr-sl.yml
@@ -210,6 +210,12 @@
         default: true
         description: 'Works starting from MCP 2019.2.10 or master. Whether to apply saltstack updates on all nodes in cluster before deployment'
         name: UPGRADE_SALTSTACK
+    - text:
+        default: ''
+        description: |-
+          Add env vars for deploy and testing
+        name: EXTRA_VARS
+        trim: 'false'
     pipeline-scm:
       lightweight-checkout: false
       scm:
diff --git a/jobs/templates/heat-cicd-queens-contrail41-sl.yml b/jobs/templates/heat-cicd-queens-contrail41-sl.yml
index 2eca95b..ea055de 100644
--- a/jobs/templates/heat-cicd-queens-contrail41-sl.yml
+++ b/jobs/templates/heat-cicd-queens-contrail41-sl.yml
@@ -208,6 +208,12 @@
         default: true
         description: 'Works starting from MCP 2019.2.10 or master. Whether to apply saltstack updates on all nodes in cluster before deployment'
         name: UPGRADE_SALTSTACK
+    - text:
+        default: ''
+        description: |-
+          Add env vars for deploy and testing
+        name: EXTRA_VARS
+        trim: 'false'
     pipeline-scm:
       lightweight-checkout: false
       scm:
diff --git a/jobs/templates/heat-cicd-queens-dvr-sl.yml b/jobs/templates/heat-cicd-queens-dvr-sl.yml
index bcaaa84..053851e 100644
--- a/jobs/templates/heat-cicd-queens-dvr-sl.yml
+++ b/jobs/templates/heat-cicd-queens-dvr-sl.yml
@@ -209,6 +209,12 @@
         default: true
         description: 'Works starting from MCP 2019.2.10 or master. Whether to apply saltstack updates on all nodes in cluster before deployment'
         name: UPGRADE_SALTSTACK
+    - text:
+        default: ''
+        description: |-
+          Add env vars for deploy and testing
+        name: EXTRA_VARS
+        trim: 'false'
     pipeline-scm:
       lightweight-checkout: false
       scm: