Merge "Add Dns and mon nodes"
diff --git a/.gitignore b/.gitignore
index ea68183..71462bb 100644
--- a/.gitignore
+++ b/.gitignore
@@ -45,6 +45,7 @@
 
 # Local settings
 local_settings.py
+id_rsa
 
 # Documentation
 doc/_build/
diff --git a/jobs/pipelines/deploy-cicd-and-run-tests.groovy b/jobs/pipelines/deploy-cicd-and-run-tests.groovy
index 2de7995..4e0dab3 100644
--- a/jobs/pipelines/deploy-cicd-and-run-tests.groovy
+++ b/jobs/pipelines/deploy-cicd-and-run-tests.groovy
@@ -4,54 +4,56 @@
 def shared = new com.mirantis.system_qa.SharedPipeline()
 def steps = "hardware,create_model,salt," + env.DRIVETRAIN_STACK_INSTALL + "," + env.PLATFORM_STACK_INSTALL
 
-node ("${NODE_NAME}") {
-  try {
+throttle(['fuel_devops_environment']) {
+  node ("${NODE_NAME}") {
+    try {
 
-    stage("Clean the environment and clone tcp-qa") {
-        shared.prepare_working_dir()
-    }
+        stage("Clean the environment and clone tcp-qa") {
+            shared.prepare_working_dir()
+        }
 
-    stage("Create environment, generate model, bootstrap the salt-cluster") {
-        // steps: "hardware,create_model,salt"
-        shared.swarm_bootstrap_salt_cluster_devops()
-    }
+        stage("Create environment, generate model, bootstrap the salt-cluster") {
+            // steps: "hardware,create_model,salt"
+            shared.swarm_bootstrap_salt_cluster_devops()
+        }
 
-    stage("Install core infrastructure and deploy CICD nodes") {
-        // steps: env.DRIVETRAIN_STACK_INSTALL
-        shared.swarm_deploy_cicd(env.DRIVETRAIN_STACK_INSTALL)
-    }
+        stage("Install core infrastructure and deploy CICD nodes") {
+            // steps: env.DRIVETRAIN_STACK_INSTALL
+            shared.swarm_deploy_cicd(env.DRIVETRAIN_STACK_INSTALL)
+        }
 
-    stage("Install core infrastructure and deploy CICD nodes") {
-        // steps: env.PLATFORM_STACK_INSTALL
-        shared.swarm_deploy_platform(env.PLATFORM_STACK_INSTALL)
-    }
+        stage("Install core infrastructure and deploy CICD nodes") {
+            // steps: env.PLATFORM_STACK_INSTALL
+            shared.swarm_deploy_platform(env.PLATFORM_STACK_INSTALL)
+        }
 
-    stage("Run tests") {
-        shared.swarm_run_pytest(steps)
-    }
+        stage("Run tests") {
+            shared.swarm_run_pytest(steps)
+        }
 
-  } catch (e) {
-      common.printMsg("Job failed", "red")
-      shared.run_cmd("""\
-          dos.py suspend ${ENV_NAME} || true
-          dos.py snapshot ${ENV_NAME} test_failed || true
-          """)
-      throw e
-  } finally {
-    // TODO(ddmitriev): analyze the "def currentResult = currentBuild.result ?: 'SUCCESS'"
-    // and report appropriate data to TestRail
-    if ("${env.SHUTDOWN_ENV_ON_TEARDOWN}" == "false") {
+    } catch (e) {
+        common.printMsg("Job failed", "red")
         shared.run_cmd("""\
-            dos.py resume ${ENV_NAME} || true
-            sleep 20    # Wait for I/O on the host calms down
-            dos.py time-sync ${ENV_NAME} || true
+            dos.py suspend ${ENV_NAME} || true
+            dos.py snapshot ${ENV_NAME} test_failed || true
         """)
-    } else {
-        shared.run_cmd("""\
-            dos.py destroy ${ENV_NAME} || true
-        """)
+        throw e
+    } finally {
+        // TODO(ddmitriev): analyze the "def currentResult = currentBuild.result ?: 'SUCCESS'"
+        // and report appropriate data to TestRail
+        if ("${env.SHUTDOWN_ENV_ON_TEARDOWN}" == "false") {
+            shared.run_cmd("""\
+                dos.py resume ${ENV_NAME} || true
+                sleep 20    # Wait for I/O on the host calms down
+                dos.py time-sync ${ENV_NAME} || true
+            """)
+        } else {
+            shared.run_cmd("""\
+                dos.py destroy ${ENV_NAME} || true
+            """)
+        }
+        shared.report_deploy_result(steps)
+        shared.report_test_result()
     }
-    shared.report_deploy_result(steps)
-    shared.report_test_result()
   }
-}
+}
\ No newline at end of file
diff --git a/src/com/mirantis/system_qa/SharedPipeline.groovy b/src/com/mirantis/system_qa/SharedPipeline.groovy
index 9a5ff36..1a6d22e 100644
--- a/src/com/mirantis/system_qa/SharedPipeline.groovy
+++ b/src/com/mirantis/system_qa/SharedPipeline.groovy
@@ -1,45 +1,115 @@
 package com.mirantis.system_qa
 
+import groovy.xml.XmlUtil
 
-def run_cmd(cmd, returnStdout=false) {
+def run_cmd(String cmd, Boolean returnStdout=false) {
     def common = new com.mirantis.mk.Common()
     common.printMsg("Run shell command:\n" + cmd, "blue")
     def VENV_PATH='/home/jenkins/fuel-devops30'
+    def stderr_path = "/tmp/${JOB_NAME}_${BUILD_NUMBER}_stderr.log"
     script = """\
         set +x;
         echo 'activate python virtualenv ${VENV_PATH}';
         . ${VENV_PATH}/bin/activate;
-        bash -c 'set -ex; set -ex; ${cmd.stripIndent()}'
+        bash -c 'set -ex; set -ex; ${cmd.stripIndent()}' 2>${stderr_path}
     """
-    return sh(script: script, returnStdout: returnStdout)
+    def result
+    try {
+        return sh(script: script, returnStdout: returnStdout)
+    } catch (e) {
+        def stderr = readFile("${stderr_path}")
+        def error_message = e.message + "\n<<<<<< STDERR: >>>>>>\n" + stderr
+        throw new Exception(error_message)
+    } finally {
+        sh(script: "rm ${stderr_path} || true")
+    }
 }
 
 def run_cmd_stdout(cmd) {
     return run_cmd(cmd, true)
 }
 
+def build_pipeline_job(job_name, parameters) {
+    //Build a job, grab the results if failed and use the results in exception
+    def common = new com.mirantis.mk.Common()
+    common.printMsg("Start building job '${job_name}' with parameters:", "purple")
+    common.prettyPrint(parameters)
+
+    def job_info = build job: "${job_name}",
+        parameters: parameters,
+        propagate: false
+
+    if (job_info.getResult() != "SUCCESS") {
+        currentBuild.result = job_info.getResult()
+        def build_number = job_info.getNumber()
+        common.printMsg("Job '${job_name}' failed, getting details", "red")
+        def workflow_details=run_cmd_stdout("""\
+            export JOB_NAME=${job_name}
+            export BUILD_NUMBER=${build_number}
+            python ./tcp_tests/utils/get_jenkins_job_stages.py
+            """)
+        throw new Exception(workflow_details)
+    }
+}
+
+def build_shell_job(job_name, parameters, junit_report_filename=null, junit_report_source_dir='**/') {
+    //Build a job, grab the results if failed and use the results in exception
+    //junit_report_filename: if not null, try to copy this JUnit report first from remote job
+    def common = new com.mirantis.mk.Common()
+    common.printMsg("Start building job '${job_name}' with parameters:", "purple")
+    common.prettyPrint(parameters)
+
+    def job_info = build job: "${job_name}",
+        parameters: parameters,
+        propagate: false
+
+    if (job_info.getResult() != "SUCCESS") {
+        def build_status = job_info.getResult()
+        def build_number = job_info.getNumber()
+        def build_url = job_info.getAbsoluteUrl()
+        def job_url = "${build_url}"
+        currentBuild.result = build_status
+        if (junit_report_filename) {
+            common.printMsg("Job '${job_url}' failed with status ${build_status}, getting details", "red")
+            step($class: 'hudson.plugins.copyartifact.CopyArtifact',
+                 projectName: job_name,
+                 selector: specific("${build_number}"),
+                 filter: "${junit_report_source_dir}/${junit_report_filename}",
+                 target: '.',
+                 flatten: true,
+                 fingerprintArtifacts: true)
+
+            def String junit_report_xml = readFile("${junit_report_filename}")
+            def String junit_report_xml_pretty = new XmlUtil().serialize(junit_report_xml)
+            def String msg = "Job '${job_url}' failed with status ${build_status}, JUnit report:\n"
+            throw new Exception(msg + junit_report_xml_pretty)
+        } else {
+            throw new Exception("Job '${job_url}' failed with status ${build_status}, please check the console output.")
+        }
+    }
+}
 
 def prepare_working_dir() {
         println "Clean the working directory ${env.WORKSPACE}"
         deleteDir()
 
-        //// do not fail if environment doesn't exists
-        // println "Remove environment ${ENV_NAME}"
-        // run_cmd("""\
-        //     dos.py erase ${ENV_NAME} || true
-        // """)
-        // println "Remove config drive ISO"
-        // run_cmd("""\
-        //    rm /home/jenkins/images/${CFG01_CONFIG_IMAGE_NAME} || true
-        // """)
+        // do not fail if environment doesn't exists
+        println "Remove environment ${ENV_NAME}"
+        run_cmd("""\
+            dos.py erase ${ENV_NAME} || true
+        """)
+        println "Remove config drive ISO"
+        run_cmd("""\
+            rm /home/jenkins/images/${CFG01_CONFIG_IMAGE_NAME} || true
+        """)
 
         run_cmd("""\
-        git clone https://github.com/Mirantis/tcp-qa.git ${env.WORKSPACE}
-        if [ -n "$TCP_QA_REFS" ]; then
-            set -e
-            git fetch https://review.gerrithub.io/Mirantis/tcp-qa $TCP_QA_REFS && git checkout FETCH_HEAD || exit \$?
-        fi
-        pip install --upgrade --upgrade-strategy=only-if-needed -r tcp_tests/requirements.txt
+            git clone https://github.com/Mirantis/tcp-qa.git ${env.WORKSPACE}
+            if [ -n "$TCP_QA_REFS" ]; then
+                set -e
+                git fetch https://review.gerrithub.io/Mirantis/tcp-qa $TCP_QA_REFS && git checkout FETCH_HEAD || exit \$?
+            fi
+            pip install --upgrade --upgrade-strategy=only-if-needed -r tcp_tests/requirements.txt
         """)
 }
 
@@ -61,10 +131,8 @@
                 string(name: 'SALT_MODELS_SYSTEM_COMMIT', value: "${SALT_MODELS_SYSTEM_COMMIT}"),
                 booleanParam(name: 'SHUTDOWN_ENV_ON_TEARDOWN', value: false),
             ]
-        common.printMsg("Start building job 'swarm-bootstrap-salt-cluster-devops' with parameters:", "purple")
-        common.prettyPrint(parameters)
-        build job: 'swarm-bootstrap-salt-cluster-devops',
-            parameters: parameters
+
+        build_pipeline_job('swarm-bootstrap-salt-cluster-devops', parameters)
 }
 
 def swarm_deploy_cicd(String stack_to_install='core,cicd') {
@@ -78,10 +146,7 @@
                 string(name: 'TCP_QA_REFS', value: "${TCP_QA_REFS}"),
                 booleanParam(name: 'SHUTDOWN_ENV_ON_TEARDOWN', value: false),
             ]
-        common.printMsg("Start building job 'swarm-deploy-cicd' with parameters:", "purple")
-        common.prettyPrint(parameters)
-        build job: 'swarm-deploy-cicd',
-            parameters: parameters
+        build_pipeline_job('swarm-deploy-cicd', parameters)
 }
 
 def swarm_deploy_platform(String stack_to_install) {
@@ -95,10 +160,7 @@
                 string(name: 'TCP_QA_REFS', value: "${TCP_QA_REFS}"),
                 booleanParam(name: 'SHUTDOWN_ENV_ON_TEARDOWN', value: false),
             ]
-        common.printMsg("Start building job 'swarm-deploy-platform' with parameters:", "purple")
-        common.prettyPrint(parameters)
-        build job: 'swarm-deploy-platform',
-            parameters: parameters
+        build_pipeline_job('swarm-deploy-platform', parameters)
 }
 
 def swarm_run_pytest(String passed_steps) {
@@ -151,10 +213,8 @@
                 string(name: 'IPV4_NET_TENANT', value: IPV4_NET_TENANT),
                 string(name: 'IPV4_NET_EXTERNAL', value: IPV4_NET_EXTERNAL),
             ]
-        common.printMsg("Start building job 'swarm-cookied-model-generator' with parameters:", "purple")
-        common.prettyPrint(parameters)
-        build job: 'swarm-cookied-model-generator',
-            parameters: parameters
+
+        build_shell_job('swarm-cookied-model-generator', parameters, "deploy_generate_model.xml")
 }
 
 def generate_configdrive_iso() {
@@ -182,53 +242,83 @@
                 string(name: 'PIPELINE_LIBRARY_REF', value: "${PIPELINE_LIBRARY_REF}"),
                 string(name: 'MK_PIPELINES_REF', value: "${MK_PIPELINES_REF}"),
             ]
-        common.printMsg("Start building job 'create-cfg-config-drive' with parameters:", "purple")
-        common.prettyPrint(parameters)
-        build job: 'create-cfg-config-drive',
-            parameters: parameters
+        build_pipeline_job('create-cfg-config-drive', parameters)
 }
 
 def run_job_on_day01_node(stack_to_install, timeout=1800) {
     // stack_to_install="core,cicd"
     def stack = "${stack_to_install}"
-    run_cmd("""\
-        export ENV_NAME=${ENV_NAME}
-        . ./tcp_tests/utils/env_salt
-        . ./tcp_tests/utils/env_jenkins_day01
-        export JENKINS_BUILD_TIMEOUT=${timeout}
-        JOB_PARAMETERS=\"{
-            \\\"SALT_MASTER_URL\\\": \\\"\${SALTAPI_URL}\\\",
-            \\\"STACK_INSTALL\\\": \\\"${stack}\\\"
-        }\"
-        JOB_PREFIX="[ {job_name}/{build_number}:${stack} {time} ] "
-        python ./tcp_tests/utils/run_jenkins_job.py --verbose --job-name=deploy_openstack --job-parameters="\$JOB_PARAMETERS" --job-output-prefix="\$JOB_PREFIX"
-    """)
+    try {
+        run_cmd("""\
+            export ENV_NAME=${ENV_NAME}
+            . ./tcp_tests/utils/env_salt
+            . ./tcp_tests/utils/env_jenkins_day01
+            export JENKINS_BUILD_TIMEOUT=${timeout}
+            JOB_PARAMETERS=\"{
+                \\\"SALT_MASTER_URL\\\": \\\"\${SALTAPI_URL}\\\",
+                \\\"STACK_INSTALL\\\": \\\"${stack}\\\"
+            }\"
+            JOB_PREFIX="[ ${ENV_NAME}/{build_number}:${stack} {time} ] "
+            python ./tcp_tests/utils/run_jenkins_job.py --verbose --job-name=deploy_openstack --job-parameters="\$JOB_PARAMETERS" --job-output-prefix="\$JOB_PREFIX"
+        """)
+    } catch (e) {
+        def common = new com.mirantis.mk.Common()
+        common.printMsg("Product job 'deploy_openstack' failed, getting details", "red")
+        def workflow_details=run_cmd_stdout("""\
+            . ./tcp_tests/utils/env_salt
+            . ./tcp_tests/utils/env_jenkins_day01
+            export JOB_NAME=deploy_openstack
+            export BUILD_NUMBER=lastBuild
+            python ./tcp_tests/utils/get_jenkins_job_stages.py
+            """)
+        throw new Exception(workflow_details)
+    }
 }
 
 def run_job_on_cicd_nodes(stack_to_install, timeout=1800) {
     // stack_to_install="k8s,calico,stacklight"
     def stack = "${stack_to_install}"
-    run_cmd("""\
-        export ENV_NAME=${ENV_NAME}
-        . ./tcp_tests/utils/env_salt
-        . ./tcp_tests/utils/env_jenkins_cicd
-        export JENKINS_BUILD_TIMEOUT=${timeout}
-        JOB_PARAMETERS=\"{
-            \\\"SALT_MASTER_URL\\\": \\\"\${SALTAPI_URL}\\\",
-            \\\"STACK_INSTALL\\\": \\\"${stack}\\\"
-        }\"
-        JOB_PREFIX="[ {job_name}/{build_number}:${stack} {time} ] "
-        python ./tcp_tests/utils/run_jenkins_job.py --verbose --job-name=deploy_openstack --job-parameters="\$JOB_PARAMETERS" --job-output-prefix="\$JOB_PREFIX"
-        sleep 60  # Wait for IO calm down on cluster nodes
-    """)
+    try {
+        run_cmd("""\
+            export ENV_NAME=${ENV_NAME}
+            . ./tcp_tests/utils/env_salt
+            . ./tcp_tests/utils/env_jenkins_cicd
+            export JENKINS_BUILD_TIMEOUT=${timeout}
+            JOB_PARAMETERS=\"{
+                \\\"SALT_MASTER_URL\\\": \\\"\${SALTAPI_URL}\\\",
+                \\\"STACK_INSTALL\\\": \\\"${stack}\\\"
+            }\"
+            JOB_PREFIX="[ ${ENV_NAME}/{build_number}:${stack} {time} ] "
+            python ./tcp_tests/utils/run_jenkins_job.py --verbose --job-name=deploy_openstack --job-parameters="\$JOB_PARAMETERS" --job-output-prefix="\$JOB_PREFIX"
+            sleep 60  # Wait for IO calm down on cluster nodes
+        """)
+    } catch (e) {
+        def common = new com.mirantis.mk.Common()
+        common.printMsg("Product job 'deploy_openstack' failed, getting details", "red")
+        def workflow_details=run_cmd_stdout("""\
+            . ./tcp_tests/utils/env_salt
+            . ./tcp_tests/utils/env_jenkins_cicd
+            export JOB_NAME=deploy_openstack
+            export BUILD_NUMBER=lastBuild
+            python ./tcp_tests/utils/get_jenkins_job_stages.py
+            """)
+        throw new Exception(workflow_details)
+    }
 }
 
 def sanity_check_component(stack) {
     // Run sanity check for the component ${stack}.
     // Result will be stored in JUnit XML file deploy_${stack}.xml
-    run_cmd("""\
-        py.test --junit-xml=deploy_${stack}.xml -m check_${stack}
-    """)
+    try {
+        run_cmd("""\
+            py.test --junit-xml=deploy_${stack}.xml -m check_${stack}
+        """)
+    } catch (e) {
+        def String junit_report_xml = readFile("deploy_${stack}.xml")
+        def String junit_report_xml_pretty = new XmlUtil().serialize(junit_report_xml)
+        def String msg = "Sanity check for '${stack}' failed, JUnit report:\n"
+        throw new Exception(msg + junit_report_xml_pretty)
+    }
 }
 
 def devops_snapshot(stack) {
@@ -258,4 +348,4 @@
 }
 
 def report_test_result() {
-}
\ No newline at end of file
+}
diff --git a/tcp_tests/managers/jenkins/client.py b/tcp_tests/managers/jenkins/client.py
index 2628e7a..fbd5c43 100644
--- a/tcp_tests/managers/jenkins/client.py
+++ b/tcp_tests/managers/jenkins/client.py
@@ -2,6 +2,7 @@
 import time
 
 import jenkins
+import json
 import requests
 
 from devops.helpers import helpers
@@ -35,10 +36,8 @@
 
 class JenkinsClient(object):
 
-    def __init__(self, host=None, username=None, password=None):
+    def __init__(self, host=None, username='admin', password='r00tme'):
         host = host or 'http://172.16.44.33:8081'
-        username = username or 'admin'
-        password = password or 'r00tme'
         # self.__client = jenkins.Jenkins(
         self.__client = JenkinsWrapper(
             host,
@@ -167,3 +166,27 @@
                 'GET',
                 self.__client._build_url(PROGRESSIVE_CONSOLE_OUTPUT, locals()))
         return(self.__client.jenkins_request(req))
+
+    def get_workflow(self, name, build_id, enode=None, mode='describe'):
+        '''Get workflow results from pipeline job
+
+        :param name: job name
+        :param build_id: str, build number or 'lastBuild'
+        :param enode: int, execution node in the workflow
+        :param mode: the stage or execution node description if 'describe',
+                     the execution node log if 'log'
+        '''
+        folder_url, short_name = self.__client._get_job_folder(name)
+
+        if enode:
+            WORKFLOW_DESCRIPTION = (
+                '%(folder_url)sjob/%(short_name)s/%(build_id)s/'
+                'execution/node/%(enode)d/wfapi/%(mode)s')
+        else:
+            WORKFLOW_DESCRIPTION = (
+                '%(folder_url)sjob/%(short_name)s/%(build_id)s/wfapi/%(mode)s')
+        req = requests.Request(
+                'GET',
+                self.__client._build_url(WORKFLOW_DESCRIPTION, locals()))
+        response = self.__client.jenkins_open(req)
+        return json.loads(response)
diff --git a/tcp_tests/templates/cookied-bm-contrail40/sl.yaml b/tcp_tests/templates/cookied-bm-contrail40/sl.yaml
index 87651f8..cdb0daf 100644
--- a/tcp_tests/templates/cookied-bm-contrail40/sl.yaml
+++ b/tcp_tests/templates/cookied-bm-contrail40/sl.yaml
@@ -10,7 +10,7 @@
 - description: Install docker swarm on master node
   cmd: salt --hard-crash --state-output=mixed --state-verbose=False -C 'I@docker:swarm:role:master' state.sls docker.swarm
   node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 10}
+  retry: {count: 3, delay: 10}
   skip_fail: false
 
 - description: Send grains to the swarm slave nodes
@@ -34,13 +34,13 @@
 - description:  Rerun swarm on slaves to proper token population
   cmd: salt --hard-crash --state-output=mixed --state-verbose=False -C 'I@docker:swarm:role:master' state.sls docker.swarm
   node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 10}
+  retry: {count: 2, delay: 10}
   skip_fail: false
 
 - description:  Configure slave nodes
   cmd: salt --hard-crash --state-output=mixed --state-verbose=False -C 'I@docker:swarm:role:manager' state.sls docker.swarm -b 1
   node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 10}
+  retry: {count: 3, delay: 10}
   skip_fail: false
 
 - description:  List registered Docker swarm nodes
@@ -73,7 +73,7 @@
       salt --hard-crash --state-output=mixed --state-verbose=False -C 'I@mongodb:server' state.sls mongodb
     fi
   node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 2, delay: 30}
+  retry: {count: 4, delay: 40}
   skip_fail: false
 
 - description: Configure Alerta if it is exists
@@ -82,7 +82,7 @@
       salt --hard-crash --state-output=mixed --state-verbose=False -C 'I@docker:swarm and I@prometheus:alerta' state.sls prometheus.alerta
     fi
   node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 10}
+  retry: {count: 4, delay: 10}
   skip_fail: false
 
 - description: launch prometheus containers
diff --git a/tcp_tests/templates/cookied-model-generator/salt_cookied-bm-mcp-ocata-contrail.yaml b/tcp_tests/templates/cookied-model-generator/salt_cookied-bm-mcp-ocata-contrail.yaml
index c5681be..0915547 100644
--- a/tcp_tests/templates/cookied-model-generator/salt_cookied-bm-mcp-ocata-contrail.yaml
+++ b/tcp_tests/templates/cookied-model-generator/salt_cookied-bm-mcp-ocata-contrail.yaml
@@ -13,30 +13,8 @@
 {% import 'shared-salt.yaml' as SHARED with context %}
 
 {{ SHARED.MACRO_INSTALL_PACKAGES_ON_NODES(HOSTNAME_CFG01) }}
-- description: Re-install all the fromulas
-  cmd: |
-    set -e;
-    apt-get install -y salt-formula-*
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
 
-- description: Sync formulas to service
-  cmd: |
-    set -e;
-    RECLASS_ROOT=${RECLASS_ROOT:-/srv/salt/reclass/};
-    FORMULAS_PATH=${FORMULAS_PATH:-/usr/share/salt-formulas};
-    [ ! -d ${RECLASS_ROOT}/classes/service ] && mkdir -p ${RECLASS_ROOT}/classes/service;
-    for formula_service in $(ls /usr/share/salt-formulas/reclass/service/); do
-        #Since some salt formula names contain "-" and in symlinks they should contain "_" adding replacement;
-        formula_service=${formula_service//-/$'_'};
-        if [ ! -L "${RECLASS_ROOT}/classes/service/${formula_service}" ]; then
-            ln -sf ${FORMULAS_PATH}/reclass/service/${formula_service} ${RECLASS_ROOT}/classes/service/${formula_service};
-        fi;
-    done
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
+{{ SHARED.MACRO_INSTALL_FORMULAS('\*') }}
 
 {{ SHARED.MACRO_GENERATE_COOKIECUTTER_MODEL(CONTROL_VLAN=CONTROL_VLAN, TENANT_VLAN=TENANT_VLAN) }}
 
diff --git a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-mitaka-dvr.yaml b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-mitaka-dvr.yaml
index 20d39b0..22297a6 100644
--- a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-mitaka-dvr.yaml
+++ b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-mitaka-dvr.yaml
@@ -24,30 +24,8 @@
 {% import 'shared-salt.yaml' as SHARED with context %}
 
 {{ SHARED.MACRO_INSTALL_PACKAGES_ON_NODES(HOSTNAME_CFG01) }}
-- description: Re-install all the fromulas
-  cmd: |
-    set -e;
-    apt-get install -y salt-formula-*
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
 
-- description: Sync formulas to service
-  cmd: |
-    set -e;
-    RECLASS_ROOT=${RECLASS_ROOT:-/srv/salt/reclass/};
-    FORMULAS_PATH=${FORMULAS_PATH:-/usr/share/salt-formulas};
-    [ ! -d ${RECLASS_ROOT}/classes/service ] && mkdir -p ${RECLASS_ROOT}/classes/service;
-    for formula_service in $(ls /usr/share/salt-formulas/reclass/service/); do
-        #Since some salt formula names contain "-" and in symlinks they should contain "_" adding replacement;
-        formula_service=${formula_service//-/$'_'};
-        if [ ! -L "${RECLASS_ROOT}/classes/service/${formula_service}" ]; then
-            ln -sf ${FORMULAS_PATH}/reclass/service/${formula_service} ${RECLASS_ROOT}/classes/service/${formula_service};
-        fi;
-    done
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
+{{ SHARED.MACRO_INSTALL_FORMULAS('\*') }}
 
 {{ SHARED.MACRO_GENERATE_COOKIECUTTER_MODEL() }}
 
diff --git a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-mitaka-ovs.yaml b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-mitaka-ovs.yaml
index d89f577..48e019a 100644
--- a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-mitaka-ovs.yaml
+++ b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-mitaka-ovs.yaml
@@ -22,30 +22,8 @@
 {% import 'shared-salt.yaml' as SHARED with context %}
 
 {{ SHARED.MACRO_INSTALL_PACKAGES_ON_NODES(HOSTNAME_CFG01) }}
-- description: Re-install all the fromulas
-  cmd: |
-    set -e;
-    apt-get install -y salt-formula-*
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
 
-- description: Sync formulas to service
-  cmd: |
-    set -e;
-    RECLASS_ROOT=${RECLASS_ROOT:-/srv/salt/reclass/};
-    FORMULAS_PATH=${FORMULAS_PATH:-/usr/share/salt-formulas};
-    [ ! -d ${RECLASS_ROOT}/classes/service ] && mkdir -p ${RECLASS_ROOT}/classes/service;
-    for formula_service in $(ls /usr/share/salt-formulas/reclass/service/); do
-        #Since some salt formula names contain "-" and in symlinks they should contain "_" adding replacement;
-        formula_service=${formula_service//-/$'_'};
-        if [ ! -L "${RECLASS_ROOT}/classes/service/${formula_service}" ]; then
-            ln -sf ${FORMULAS_PATH}/reclass/service/${formula_service} ${RECLASS_ROOT}/classes/service/${formula_service};
-        fi;
-    done
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
+{{ SHARED.MACRO_INSTALL_FORMULAS('\*') }}
 
 {{ SHARED.MACRO_GENERATE_COOKIECUTTER_MODEL() }}
 
diff --git a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-ocata-dop-sl2.yaml b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-ocata-dop-sl2.yaml
index 6193a47..0178514 100644
--- a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-ocata-dop-sl2.yaml
+++ b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-ocata-dop-sl2.yaml
@@ -14,30 +14,8 @@
 {% import 'shared-salt.yaml' as SHARED with context %}
 
 {{ SHARED.MACRO_INSTALL_PACKAGES_ON_NODES(HOSTNAME_CFG01) }}
-- description: Re-install all the fromulas
-  cmd: |
-    set -e;
-    apt-get install -y salt-formula-*
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
 
-- description: Sync formulas to service
-  cmd: |
-    set -e;
-    RECLASS_ROOT=${RECLASS_ROOT:-/srv/salt/reclass/};
-    FORMULAS_PATH=${FORMULAS_PATH:-/usr/share/salt-formulas};
-    [ ! -d ${RECLASS_ROOT}/classes/service ] && mkdir -p ${RECLASS_ROOT}/classes/service;
-    for formula_service in $(ls /usr/share/salt-formulas/reclass/service/); do
-        #Since some salt formula names contain "-" and in symlinks they should contain "_" adding replacement;
-        formula_service=${formula_service//-/$'_'};
-        if [ ! -L "${RECLASS_ROOT}/classes/service/${formula_service}" ]; then
-            ln -sf ${FORMULAS_PATH}/reclass/service/${formula_service} ${RECLASS_ROOT}/classes/service/${formula_service};
-        fi;
-    done
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
+{{ SHARED.MACRO_INSTALL_FORMULAS('\*') }}
 
 {{ SHARED.MACRO_GENERATE_COOKIECUTTER_MODEL() }}
 
diff --git a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-ocata-dvr-vxlan.yaml b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-ocata-dvr-vxlan.yaml
index 10835d9..38a1d10 100644
--- a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-ocata-dvr-vxlan.yaml
+++ b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-ocata-dvr-vxlan.yaml
@@ -11,30 +11,8 @@
 {% import 'shared-salt.yaml' as SHARED with context %}
 
 {{ SHARED.MACRO_INSTALL_PACKAGES_ON_NODES(HOSTNAME_CFG01) }}
-- description: Re-install all the fromulas
-  cmd: |
-    set -e;
-    apt-get install -y salt-formula-*
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
 
-- description: Sync formulas to service
-  cmd: |
-    set -e;
-    RECLASS_ROOT=${RECLASS_ROOT:-/srv/salt/reclass/};
-    FORMULAS_PATH=${FORMULAS_PATH:-/usr/share/salt-formulas};
-    [ ! -d ${RECLASS_ROOT}/classes/service ] && mkdir -p ${RECLASS_ROOT}/classes/service;
-    for formula_service in $(ls /usr/share/salt-formulas/reclass/service/); do
-        #Since some salt formula names contain "-" and in symlinks they should contain "_" adding replacement;
-        formula_service=${formula_service//-/$'_'};
-        if [ ! -L "${RECLASS_ROOT}/classes/service/${formula_service}" ]; then
-            ln -sf ${FORMULAS_PATH}/reclass/service/${formula_service} ${RECLASS_ROOT}/classes/service/${formula_service};
-        fi;
-    done
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
+{{ SHARED.MACRO_INSTALL_FORMULAS('\*') }}
 
 {{ SHARED.MACRO_GENERATE_COOKIECUTTER_MODEL() }}
 
diff --git a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-ocata-dvr.yaml b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-ocata-dvr.yaml
index 29423f9..55e8c2a 100644
--- a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-ocata-dvr.yaml
+++ b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-ocata-dvr.yaml
@@ -14,30 +14,8 @@
 {% import 'shared-salt.yaml' as SHARED with context %}
 
 {{ SHARED.MACRO_INSTALL_PACKAGES_ON_NODES(HOSTNAME_CFG01) }}
-- description: Re-install all the fromulas
-  cmd: |
-    set -e;
-    apt-get install -y salt-formula-*
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
 
-- description: Sync formulas to service
-  cmd: |
-    set -e;
-    RECLASS_ROOT=${RECLASS_ROOT:-/srv/salt/reclass/};
-    FORMULAS_PATH=${FORMULAS_PATH:-/usr/share/salt-formulas};
-    [ ! -d ${RECLASS_ROOT}/classes/service ] && mkdir -p ${RECLASS_ROOT}/classes/service;
-    for formula_service in $(ls /usr/share/salt-formulas/reclass/service/); do
-        #Since some salt formula names contain "-" and in symlinks they should contain "_" adding replacement;
-        formula_service=${formula_service//-/$'_'};
-        if [ ! -L "${RECLASS_ROOT}/classes/service/${formula_service}" ]; then
-            ln -sf ${FORMULAS_PATH}/reclass/service/${formula_service} ${RECLASS_ROOT}/classes/service/${formula_service};
-        fi;
-    done
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
+{{ SHARED.MACRO_INSTALL_FORMULAS('\*') }}
 
 {{ SHARED.MACRO_GENERATE_COOKIECUTTER_MODEL() }}
 
diff --git a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-ocata-ovs.yaml b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-ocata-ovs.yaml
index 7a5f849..911b935 100644
--- a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-ocata-ovs.yaml
+++ b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-ocata-ovs.yaml
@@ -14,30 +14,8 @@
 {% import 'shared-salt.yaml' as SHARED with context %}
 
 {{ SHARED.MACRO_INSTALL_PACKAGES_ON_NODES(HOSTNAME_CFG01) }}
-- description: Re-install all the fromulas
-  cmd: |
-    set -e;
-    apt-get install -y salt-formula-*
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
 
-- description: Sync formulas to service
-  cmd: |
-    set -e;
-    RECLASS_ROOT=${RECLASS_ROOT:-/srv/salt/reclass/};
-    FORMULAS_PATH=${FORMULAS_PATH:-/usr/share/salt-formulas};
-    [ ! -d ${RECLASS_ROOT}/classes/service ] && mkdir -p ${RECLASS_ROOT}/classes/service;
-    for formula_service in $(ls /usr/share/salt-formulas/reclass/service/); do
-        #Since some salt formula names contain "-" and in symlinks they should contain "_" adding replacement;
-        formula_service=${formula_service//-/$'_'};
-        if [ ! -L "${RECLASS_ROOT}/classes/service/${formula_service}" ]; then
-            ln -sf ${FORMULAS_PATH}/reclass/service/${formula_service} ${RECLASS_ROOT}/classes/service/${formula_service};
-        fi;
-    done
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
+{{ SHARED.MACRO_INSTALL_FORMULAS('\*') }}
 
 {{ SHARED.MACRO_GENERATE_COOKIECUTTER_MODEL() }}
 
diff --git a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-pike-dpdk.yaml b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-pike-dpdk.yaml
index 02a0fd8..97313de 100644
--- a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-pike-dpdk.yaml
+++ b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-pike-dpdk.yaml
@@ -11,30 +11,8 @@
 {% import 'shared-salt.yaml' as SHARED with context %}
 
 {{ SHARED.MACRO_INSTALL_PACKAGES_ON_NODES(HOSTNAME_CFG01) }}
-- description: Re-install all the fromulas
-  cmd: |
-    set -e;
-    apt-get install -y salt-formula-*
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
 
-- description: Sync formulas to service
-  cmd: |
-    set -e;
-    RECLASS_ROOT=${RECLASS_ROOT:-/srv/salt/reclass/};
-    FORMULAS_PATH=${FORMULAS_PATH:-/usr/share/salt-formulas};
-    [ ! -d ${RECLASS_ROOT}/classes/service ] && mkdir -p ${RECLASS_ROOT}/classes/service;
-    for formula_service in $(ls /usr/share/salt-formulas/reclass/service/); do
-        #Since some salt formula names contain "-" and in symlinks they should contain "_" adding replacement;
-        formula_service=${formula_service//-/$'_'};
-        if [ ! -L "${RECLASS_ROOT}/classes/service/${formula_service}" ]; then
-            ln -sf ${FORMULAS_PATH}/reclass/service/${formula_service} ${RECLASS_ROOT}/classes/service/${formula_service};
-        fi;
-    done
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
+{{ SHARED.MACRO_INSTALL_FORMULAS('\*') }}
 
 {{ SHARED.MACRO_GENERATE_COOKIECUTTER_MODEL() }}
 
diff --git a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-pike-dvr.yaml b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-pike-dvr.yaml
index f069181..838bc31 100644
--- a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-pike-dvr.yaml
+++ b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-pike-dvr.yaml
@@ -11,30 +11,8 @@
 {% import 'shared-salt.yaml' as SHARED with context %}
 
 {{ SHARED.MACRO_INSTALL_PACKAGES_ON_NODES(HOSTNAME_CFG01) }}
-- description: Re-install all the fromulas
-  cmd: |
-    set -e;
-    apt-get install -y salt-formula-*
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
 
-- description: Sync formulas to service
-  cmd: |
-    set -e;
-    RECLASS_ROOT=${RECLASS_ROOT:-/srv/salt/reclass/};
-    FORMULAS_PATH=${FORMULAS_PATH:-/usr/share/salt-formulas};
-    [ ! -d ${RECLASS_ROOT}/classes/service ] && mkdir -p ${RECLASS_ROOT}/classes/service;
-    for formula_service in $(ls /usr/share/salt-formulas/reclass/service/); do
-        #Since some salt formula names contain "-" and in symlinks they should contain "_" adding replacement;
-        formula_service=${formula_service//-/$'_'};
-        if [ ! -L "${RECLASS_ROOT}/classes/service/${formula_service}" ]; then
-            ln -sf ${FORMULAS_PATH}/reclass/service/${formula_service} ${RECLASS_ROOT}/classes/service/${formula_service};
-        fi;
-    done
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
+{{ SHARED.MACRO_INSTALL_FORMULAS('\*') }}
 
 {{ SHARED.MACRO_GENERATE_COOKIECUTTER_MODEL() }}
 
diff --git a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-pike-ovs.yaml b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-pike-ovs.yaml
index 0bad6a2..dd6c711 100644
--- a/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-pike-ovs.yaml
+++ b/tcp_tests/templates/cookied-model-generator/salt_cookied-mcp-pike-ovs.yaml
@@ -11,30 +11,8 @@
 {% import 'shared-salt.yaml' as SHARED with context %}
 
 {{ SHARED.MACRO_INSTALL_PACKAGES_ON_NODES(HOSTNAME_CFG01) }}
-- description: Re-install all the fromulas
-  cmd: |
-    set -e;
-    apt-get install -y salt-formula-*
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
 
-- description: Sync formulas to service
-  cmd: |
-    set -e;
-    RECLASS_ROOT=${RECLASS_ROOT:-/srv/salt/reclass/};
-    FORMULAS_PATH=${FORMULAS_PATH:-/usr/share/salt-formulas};
-    [ ! -d ${RECLASS_ROOT}/classes/service ] && mkdir -p ${RECLASS_ROOT}/classes/service;
-    for formula_service in $(ls /usr/share/salt-formulas/reclass/service/); do
-        #Since some salt formula names contain "-" and in symlinks they should contain "_" adding replacement;
-        formula_service=${formula_service//-/$'_'};
-        if [ ! -L "${RECLASS_ROOT}/classes/service/${formula_service}" ]; then
-            ln -sf ${FORMULAS_PATH}/reclass/service/${formula_service} ${RECLASS_ROOT}/classes/service/${formula_service};
-        fi;
-    done
-  node_name: {{ HOSTNAME_CFG01 }}
-  retry: {count: 1, delay: 1}
-  skip_fail: false
+{{ SHARED.MACRO_INSTALL_FORMULAS('\*') }}
 
 {{ SHARED.MACRO_GENERATE_COOKIECUTTER_MODEL() }}
 
diff --git a/tcp_tests/templates/shared-backup-restore.yaml b/tcp_tests/templates/shared-backup-restore.yaml
index 02fdeec..2cc3787 100644
--- a/tcp_tests/templates/shared-backup-restore.yaml
+++ b/tcp_tests/templates/shared-backup-restore.yaml
@@ -50,7 +50,7 @@
 
 - description: Apply the salt.minion state
   cmd: |
-    salt -C 'I@xtrabackup:client or I@xtrabackup:server' state.sls salt.minion
+    salt -C 'I@xtrabackup:client or I@xtrabackup:server' state.sls salt.minion && sleep 10
   node_name: {{ HOSTNAME_CFG01 }}
   retry: {count: 1, delay: 1}
   skip_fail: false
diff --git a/tcp_tests/utils/get_jenkins_job_stages.py b/tcp_tests/utils/get_jenkins_job_stages.py
new file mode 100755
index 0000000..143e1a2
--- /dev/null
+++ b/tcp_tests/utils/get_jenkins_job_stages.py
@@ -0,0 +1,136 @@
+#    Copyright 2017 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+import argparse
+import os
+import sys
+
+sys.path.append(os.getcwd())
+try:
+    from tcp_tests.managers.jenkins import client
+except ImportError:
+    print("ImportError: Run the application from the tcp-qa directory or "
+          "set the PYTHONPATH environment variable to directory which contains"
+          " ./tcp_tests")
+    sys.exit(1)
+
+
+def load_params():
+    """
+    Parse CLI arguments and environment variables
+
+    Returns: ArgumentParser instance
+    """
+    env_host = os.environ.get('JENKINS_URL', None)
+    env_username = os.environ.get('JENKINS_USER', None)
+    env_password = os.environ.get('JENKINS_PASS', None)
+    env_job_name = os.environ.get('JOB_NAME', 'deploy_openstack')
+    env_build_number = os.environ.get('BUILD_NUMBER', 'lastBuild')
+
+    parser = argparse.ArgumentParser(description=(
+        'Host, username and password may be specified either by the command '
+        'line arguments or using environment variables: JENKINS_URL, '
+        'JENKINS_USER, JENKINS_PASS, JENKINS_START_TIMEOUT, '
+        'JENKINS_BUILD_TIMEOUT. \nCommand line arguments have the highest '
+        'priority, after that the environment variables are used as defaults.'
+    ))
+    parser.add_argument('--host',
+                        metavar='JENKINS_URL',
+                        help='Jenkins Host',
+                        default=env_host)
+    parser.add_argument('--username',
+                        metavar='JENKINS_USER',
+                        help='Jenkins Username',
+                        default=env_username)
+    parser.add_argument('--password',
+                        metavar='JENKINS_PASS',
+                        help='Jenkins Password or API token',
+                        default=env_password)
+    parser.add_argument('--job-name',
+                        metavar='JOB_NAME',
+                        help='Jenkins job name',
+                        default=env_job_name)
+    parser.add_argument('--build-number',
+                        metavar='BUILD_NUMBER',
+                        help='Jenkins job build number',
+                        default=env_build_number)
+    return parser
+
+
+def get_deployment_result(opts):
+    """Get the pipeline job result from Jenkins
+
+    Get all the stages resutls from the specified job,
+    show error message if present.
+    """
+    jenkins = client.JenkinsClient(host=opts.host,
+                                   username=opts.username,
+                                   password=opts.password)
+
+    def get_stages(nodes, indent=0, show_status=True):
+        res = []
+        for node in nodes:
+            if show_status:
+                msg = " " * indent + "{}: {}".format(node['name'],
+                                                     node['status'])
+                if 'error' in node and 'message' in node['error']:
+                    msg += ", " + node['error']['message']
+                res.append(msg)
+
+            if node['status'] != 'SUCCESS':
+                wf = jenkins.get_workflow(opts.job_name, opts.build_number,
+                                          int(node['id']))
+                if wf is not None:
+                    if 'stageFlowNodes' in wf:
+                        res += get_stages(wf['stageFlowNodes'], indent + 2,
+                                          show_status=False)
+                    elif '_links' in wf and 'log' in wf['_links']:
+                        log = jenkins.get_workflow(opts.job_name,
+                                                   opts.build_number,
+                                                   int(node['id']),
+                                                   mode='log')
+                        if "text" in log:
+                            prefix = " " * (indent + 2)
+                            res.append("\n".join(
+                                prefix + line
+                                for line in log["text"].splitlines()))
+        return res
+
+    wf = jenkins.get_workflow(opts.job_name, opts.build_number)
+    info = jenkins.build_info(opts.job_name, int(wf['id']))
+    build_description = ("[" + info['fullDisplayName'] + "] " +
+                         info['url'] + " : " + info['result'])
+    stages = get_stages(wf['stages'], 0)
+    if not stages:
+        msg = wf['status'] + ":\n\n"
+        stages = [msg + jenkins.get_build_output(opts.job_name, int(wf['id']))]
+    return (build_description, stages)
+
+
+def main(args=None):
+    parser = load_params()
+    opts = parser.parse_args()
+
+    if opts.host is None:
+        print("JENKINS_URL is required!")
+        parser.print_help()
+        return 10
+    else:
+        (build_description, stages) = get_deployment_result(opts)
+        print(build_description)
+        print('\n'.join(stages))
+
+
+if __name__ == "__main__":
+    sys.exit(main())