Grab failure details from pipeline jobs

- if a job fails, try to get the stages from the job
- if there are stages, get from non-SUCCESS stages all the
  workflow 'nodes' (the commands performed in the stage, like
  'running shell script' or 'echo'), and get the log from the
  failed node only.
- if there are no stages found (assuming that this is
  a pipeline job), get the complete job console output

When a pipeline job failed, the parent job is failed throwing
the exception contaied the found details.

Change-Id: Ie5c2171e5373345b1951de55ba604b5d484340d3
diff --git a/.gitignore b/.gitignore
index ea68183..71462bb 100644
--- a/.gitignore
+++ b/.gitignore
@@ -45,6 +45,7 @@
 
 # Local settings
 local_settings.py
+id_rsa
 
 # Documentation
 doc/_build/
diff --git a/src/com/mirantis/system_qa/SharedPipeline.groovy b/src/com/mirantis/system_qa/SharedPipeline.groovy
index a64b950..460ed88 100644
--- a/src/com/mirantis/system_qa/SharedPipeline.groovy
+++ b/src/com/mirantis/system_qa/SharedPipeline.groovy
@@ -1,46 +1,128 @@
 package com.mirantis.system_qa
 
+import groovy.xml.XmlUtil
 
-def run_cmd(cmd, returnStdout=false) {
+def run_cmd(String cmd, Boolean returnStdout=false, Boolean exeption_with_logs=false) {
     def common = new com.mirantis.mk.Common()
     common.printMsg("Run shell command:\n" + cmd, "blue")
     def VENV_PATH='/home/jenkins/fuel-devops30'
+    def stdout_path = "/tmp/${JOB_NAME}_${BUILD_NUMBER}_stdout.log"
+    def stderr_path = "/tmp/${JOB_NAME}_${BUILD_NUMBER}_stderr.log"
     script = """\
         set +x;
         echo 'activate python virtualenv ${VENV_PATH}';
         . ${VENV_PATH}/bin/activate;
-        bash -c 'set -ex; set -ex; ${cmd.stripIndent()}'
+        bash -c 'set -ex; set -ex; ${cmd.stripIndent()}' 1>${stdout_path} 2>${stderr_path}
     """
-    return sh(script: script, returnStdout: returnStdout)
+    def result
+    try {
+        result = sh(script: script)
+        if (returnStdout) {
+            def stdout = readFile("${stdout_path}")
+            return stdout
+        } else {
+            return result
+        }
+    } catch (e) {
+        if (exeption_with_logs) {
+            def stdout = readFile("${stdout_path}")
+            def stderr = readFile("${stderr_path}")
+            def error_message = e.message + "\n<<<<<< STDOUT: >>>>>>\n" + stdout + "\n<<<<<< STDERR: >>>>>>\n" + stderr
+            throw new Exception(error_message)
+        } else {
+            throw e
+        }
+    } finally {
+        sh(script: "rm ${stdout_path} ${stderr_path} || true")
+    }
 }
 
 def run_cmd_stdout(cmd) {
-    return run_cmd(cmd, true)
+    return run_cmd(cmd, true, true)
 }
 
+def build_pipeline_job(job_name, parameters) {
+    //Build a job, grab the results if failed and use the results in exception
+    def common = new com.mirantis.mk.Common()
+    common.printMsg("Start building job '${job_name}' with parameters:", "purple")
+    common.prettyPrint(parameters)
+
+    def job_info = build job: "${job_name}",
+        parameters: parameters,
+        propagate: false
+
+    if (job_info.getResult() != "SUCCESS") {
+        currentBuild.result = job_info.getResult()
+        def build_number = job_info.getNumber()
+        common.printMsg("Job '${job_name}' failed, getting details", "red")
+        def workflow_details=run_cmd_stdout("""\
+            export JOB_NAME=${job_name}
+            export BUILD_NUMBER=${build_number}
+            python ./tcp_tests/utils/get_jenkins_job_stages.py
+            """)
+        throw new Exception(workflow_details)
+    }
+}
+
+def build_shell_job(job_name, parameters, junit_report_filename=null, junit_report_source_dir='**/') {
+    //Build a job, grab the results if failed and use the results in exception
+    //junit_report_filename: if not null, try to copy this JUnit report first from remote job
+    def common = new com.mirantis.mk.Common()
+    common.printMsg("Start building job '${job_name}' with parameters:", "purple")
+    common.prettyPrint(parameters)
+
+    def job_info = build job: "${job_name}",
+        parameters: parameters,
+        propagate: false
+
+    if (job_info.getResult() != "SUCCESS") {
+        def build_status = job_info.getResult()
+        def build_number = job_info.getNumber()
+        def build_url = job_info.getAbsoluteUrl()
+        def job_url = "${build_url}"
+        currentBuild.result = build_status
+        if (junit_report_filename) {
+            common.printMsg("Job '${job_url}' failed with status ${build_status}, getting details", "red")
+            step($class: 'hudson.plugins.copyartifact.CopyArtifact',
+                 projectName: job_name,
+                 selector: specific("${build_number}"),
+                 filter: "${junit_report_source_dir}/${junit_report_filename}",
+                 target: '.',
+                 flatten: true,
+                 fingerprintArtifacts: true)
+
+            def String junit_report_xml = readFile("${junit_report_filename}")
+            def String junit_report_xml_pretty = new XmlUtil().serialize(junit_report_xml)
+            def String msg = "Job '${job_url}' failed with status ${build_status}, JUnit report:\n"
+            throw new Exception(msg + junit_report_xml_pretty)
+        } else {
+            throw new Exception("Job '${job_url}' failed with status ${build_status}, please check the console output.")
+        }
+    }
+}
 
 def prepare_working_dir() {
         println "Clean the working directory ${env.WORKSPACE}"
         deleteDir()
 
-        //// do not fail if environment doesn't exists
-        // println "Remove environment ${ENV_NAME}"
-        // run_cmd("""\
-        //     dos.py erase ${ENV_NAME} || true
-        // """)
-        // println "Remove config drive ISO"
-        // run_cmd("""\
-        //    rm /home/jenkins/images/${CFG01_CONFIG_IMAGE_NAME} || true
-        // """)
+        // do not fail if environment doesn't exists
+        println "Remove environment ${ENV_NAME}"
+        run_cmd("""\
+            dos.py erase ${ENV_NAME} || true
+        """)
+        println "Remove config drive ISO"
+        run_cmd("""\
+            rm /home/jenkins/images/${CFG01_CONFIG_IMAGE_NAME} || true
+        """)
 
         run_cmd("""\
-        git clone https://github.com/Mirantis/tcp-qa.git ${env.WORKSPACE}
-        if [ -n "$TCP_QA_REFS" ]; then
-            set -e
-            git fetch https://review.gerrithub.io/Mirantis/tcp-qa $TCP_QA_REFS && git checkout FETCH_HEAD || exit \$?
-        fi
-        pip install --upgrade --upgrade-strategy=only-if-needed -r tcp_tests/requirements.txt
-        """)
+            git clone https://github.com/Mirantis/tcp-qa.git ${env.WORKSPACE}
+            if [ -n "$TCP_QA_REFS" ]; then
+                set -e
+                git fetch https://review.gerrithub.io/Mirantis/tcp-qa $TCP_QA_REFS && git checkout FETCH_HEAD || exit \$?
+            fi
+            pip install --upgrade --upgrade-strategy=only-if-needed -r tcp_tests/requirements.txt
+        """, false, true)
 }
 
 def swarm_bootstrap_salt_cluster_devops() {
@@ -61,10 +143,8 @@
                 string(name: 'SALT_MODELS_SYSTEM_COMMIT', value: "${SALT_MODELS_SYSTEM_COMMIT}"),
                 booleanParam(name: 'SHUTDOWN_ENV_ON_TEARDOWN', value: false),
             ]
-        common.printMsg("Start building job 'swarm-bootstrap-salt-cluster-devops' with parameters:", "purple")
-        common.prettyPrint(parameters)
-        build job: 'swarm-bootstrap-salt-cluster-devops',
-            parameters: parameters
+
+        build_pipeline_job('swarm-bootstrap-salt-cluster-devops', parameters)
 }
 
 def swarm_deploy_cicd(String stack_to_install='core,cicd') {
@@ -78,10 +158,7 @@
                 string(name: 'TCP_QA_REFS', value: "${TCP_QA_REFS}"),
                 booleanParam(name: 'SHUTDOWN_ENV_ON_TEARDOWN', value: false),
             ]
-        common.printMsg("Start building job 'swarm-deploy-cicd' with parameters:", "purple")
-        common.prettyPrint(parameters)
-        build job: 'swarm-deploy-cicd',
-            parameters: parameters
+        build_pipeline_job('swarm-deploy-cicd', parameters)
 }
 
 def swarm_deploy_platform(String stack_to_install) {
@@ -95,10 +172,7 @@
                 string(name: 'TCP_QA_REFS', value: "${TCP_QA_REFS}"),
                 booleanParam(name: 'SHUTDOWN_ENV_ON_TEARDOWN', value: false),
             ]
-        common.printMsg("Start building job 'swarm-deploy-platform' with parameters:", "purple")
-        common.prettyPrint(parameters)
-        build job: 'swarm-deploy-platform',
-            parameters: parameters
+        build_pipeline_job('swarm-deploy-platform', parameters)
 }
 
 def swarm_run_pytest(String passed_steps) {
@@ -151,10 +225,8 @@
                 string(name: 'IPV4_NET_TENANT', value: IPV4_NET_TENANT),
                 string(name: 'IPV4_NET_EXTERNAL', value: IPV4_NET_EXTERNAL),
             ]
-        common.printMsg("Start building job 'swarm-cookied-model-generator' with parameters:", "purple")
-        common.prettyPrint(parameters)
-        build job: 'swarm-cookied-model-generator',
-            parameters: parameters
+
+        build_shell_job('swarm-cookied-model-generator', parameters, "deploy_generate_model.xml")
 }
 
 def generate_configdrive_iso() {
@@ -182,53 +254,81 @@
                 string(name: 'PIPELINE_LIBRARY_REF', value: "${PIPELINE_LIBRARY_REF}"),
                 string(name: 'MK_PIPELINES_REF', value: "${MK_PIPELINES_REF}"),
             ]
-        common.printMsg("Start building job 'create-cfg-config-drive' with parameters:", "purple")
-        common.prettyPrint(parameters)
-        build job: 'create-cfg-config-drive',
-            parameters: parameters
+        build_pipeline_job('create-cfg-config-drive', parameters)
 }
 
 def run_job_on_day01_node(stack_to_install, timeout=1800) {
     // stack_to_install="core,cicd"
     def stack = "${stack_to_install}"
-    run_cmd("""\
-        export ENV_NAME=${ENV_NAME}
-        . ./tcp_tests/utils/env_salt
-        . ./tcp_tests/utils/env_jenkins_day01
-        export JENKINS_BUILD_TIMEOUT=${timeout}
-        JOB_PARAMETERS=\"{
-            \\\"SALT_MASTER_URL\\\": \\\"\${SALTAPI_URL}\\\",
-            \\\"STACK_INSTALL\\\": \\\"${stack}\\\"
-        }\"
-        JOB_PREFIX="[ {job_name}/{build_number}:${stack} {time} ] "
-        python ./tcp_tests/utils/run_jenkins_job.py --verbose --job-name=deploy_openstack --job-parameters="\$JOB_PARAMETERS" --job-output-prefix="\$JOB_PREFIX"
-    """)
+    try {
+        run_cmd("""\
+            export ENV_NAME=${ENV_NAME}
+            . ./tcp_tests/utils/env_salt
+            . ./tcp_tests/utils/env_jenkins_day01
+            export JENKINS_BUILD_TIMEOUT=${timeout}
+            JOB_PARAMETERS=\"{
+                \\\"SALT_MASTER_URL\\\": \\\"\${SALTAPI_URL}\\\",
+                \\\"STACK_INSTALL\\\": \\\"${stack}\\\"
+            }\"
+            JOB_PREFIX="[ ${ENV_NAME}/{build_number}:${stack} {time} ] "
+            python ./tcp_tests/utils/run_jenkins_job.py --verbose --job-name=deploy_openstack --job-parameters="\$JOB_PARAMETERS" --job-output-prefix="\$JOB_PREFIX"
+        """)
+    } catch (e) {
+        common.printMsg("Product job 'deploy_openstack' failed, getting details", "red")
+        def workflow_details=run_cmd_stdout("""\
+            . ./tcp_tests/utils/env_salt
+            . ./tcp_tests/utils/env_jenkins_day01
+            export JOB_NAME=deploy_openstack
+            export BUILD_NUMBER=lastBuild
+            python ./tcp_tests/utils/get_jenkins_job_stages.py
+            """)
+        throw new Exception(workflow_details)
+    }
 }
 
 def run_job_on_cicd_nodes(stack_to_install, timeout=1800) {
     // stack_to_install="k8s,calico,stacklight"
     def stack = "${stack_to_install}"
-    run_cmd("""\
-        export ENV_NAME=${ENV_NAME}
-        . ./tcp_tests/utils/env_salt
-        . ./tcp_tests/utils/env_jenkins_cicd
-        export JENKINS_BUILD_TIMEOUT=${timeout}
-        JOB_PARAMETERS=\"{
-            \\\"SALT_MASTER_URL\\\": \\\"\${SALTAPI_URL}\\\",
-            \\\"STACK_INSTALL\\\": \\\"${stack}\\\"
-        }\"
-        JOB_PREFIX="[ {job_name}/{build_number}:${stack} {time} ] "
-        python ./tcp_tests/utils/run_jenkins_job.py --verbose --job-name=deploy_openstack --job-parameters="\$JOB_PARAMETERS" --job-output-prefix="\$JOB_PREFIX"
-        sleep 60  # Wait for IO calm down on cluster nodes
-    """)
+    try {
+        run_cmd("""\
+            export ENV_NAME=${ENV_NAME}
+            . ./tcp_tests/utils/env_salt
+            . ./tcp_tests/utils/env_jenkins_cicd
+            export JENKINS_BUILD_TIMEOUT=${timeout}
+            JOB_PARAMETERS=\"{
+                \\\"SALT_MASTER_URL\\\": \\\"\${SALTAPI_URL}\\\",
+                \\\"STACK_INSTALL\\\": \\\"${stack}\\\"
+            }\"
+            JOB_PREFIX="[ ${ENV_NAME}/{build_number}:${stack} {time} ] "
+            python ./tcp_tests/utils/run_jenkins_job.py --verbose --job-name=deploy_openstack --job-parameters="\$JOB_PARAMETERS" --job-output-prefix="\$JOB_PREFIX"
+            sleep 60  # Wait for IO calm down on cluster nodes
+        """)
+    } catch (e) {
+        common.printMsg("Product job 'deploy_openstack' failed, getting details", "red")
+        def workflow_details=run_cmd_stdout("""\
+            . ./tcp_tests/utils/env_salt
+            . ./tcp_tests/utils/env_jenkins_cicd
+            export JOB_NAME=deploy_openstack
+            export BUILD_NUMBER=lastBuild
+            python ./tcp_tests/utils/get_jenkins_job_stages.py
+            """)
+        throw new Exception(workflow_details)
+    }
 }
 
 def sanity_check_component(stack) {
     // Run sanity check for the component ${stack}.
     // Result will be stored in JUnit XML file deploy_${stack}.xml
-    run_cmd("""\
-        py.test --junit-xml=deploy_${stack}.xml -m check_${stack}
-    """)
+    try {
+        run_cmd("""\
+            py.test --junit-xml=deploy_${stack}.xml -m check_${stack}
+        """)
+    } catch (e) {
+        def String junit_report_xml = readFile("deploy_${stack}.xml")
+        def String junit_report_xml_pretty = new XmlUtil().serialize(junit_report_xml)
+        def String msg = "Sanity check for '${stack}' failed, JUnit report:\n"
+        throw new Exception(msg + junit_report_xml_pretty)
+    }
 }
 
 def devops_snapshot(stack) {
@@ -245,7 +345,7 @@
         if [ -f \$(pwd)/${ENV_NAME}_salt_deployed.ini ]; then
             cp \$(pwd)/${ENV_NAME}_salt_deployed.ini \$(pwd)/${ENV_NAME}_${stack}_deployed.ini
         fi
-    """)
+    """, exeption_with_logs: true)
 }
 
 def get_steps_list(steps) {
@@ -257,4 +357,4 @@
 }
 
 def report_test_result() {
-}
\ No newline at end of file
+}
diff --git a/tcp_tests/managers/jenkins/client.py b/tcp_tests/managers/jenkins/client.py
index 2628e7a..fbd5c43 100644
--- a/tcp_tests/managers/jenkins/client.py
+++ b/tcp_tests/managers/jenkins/client.py
@@ -2,6 +2,7 @@
 import time
 
 import jenkins
+import json
 import requests
 
 from devops.helpers import helpers
@@ -35,10 +36,8 @@
 
 class JenkinsClient(object):
 
-    def __init__(self, host=None, username=None, password=None):
+    def __init__(self, host=None, username='admin', password='r00tme'):
         host = host or 'http://172.16.44.33:8081'
-        username = username or 'admin'
-        password = password or 'r00tme'
         # self.__client = jenkins.Jenkins(
         self.__client = JenkinsWrapper(
             host,
@@ -167,3 +166,27 @@
                 'GET',
                 self.__client._build_url(PROGRESSIVE_CONSOLE_OUTPUT, locals()))
         return(self.__client.jenkins_request(req))
+
+    def get_workflow(self, name, build_id, enode=None, mode='describe'):
+        '''Get workflow results from pipeline job
+
+        :param name: job name
+        :param build_id: str, build number or 'lastBuild'
+        :param enode: int, execution node in the workflow
+        :param mode: the stage or execution node description if 'describe',
+                     the execution node log if 'log'
+        '''
+        folder_url, short_name = self.__client._get_job_folder(name)
+
+        if enode:
+            WORKFLOW_DESCRIPTION = (
+                '%(folder_url)sjob/%(short_name)s/%(build_id)s/'
+                'execution/node/%(enode)d/wfapi/%(mode)s')
+        else:
+            WORKFLOW_DESCRIPTION = (
+                '%(folder_url)sjob/%(short_name)s/%(build_id)s/wfapi/%(mode)s')
+        req = requests.Request(
+                'GET',
+                self.__client._build_url(WORKFLOW_DESCRIPTION, locals()))
+        response = self.__client.jenkins_open(req)
+        return json.loads(response)
diff --git a/tcp_tests/utils/get_jenkins_job_stages.py b/tcp_tests/utils/get_jenkins_job_stages.py
new file mode 100755
index 0000000..143e1a2
--- /dev/null
+++ b/tcp_tests/utils/get_jenkins_job_stages.py
@@ -0,0 +1,136 @@
+#    Copyright 2017 Mirantis, Inc.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+import argparse
+import os
+import sys
+
+sys.path.append(os.getcwd())
+try:
+    from tcp_tests.managers.jenkins import client
+except ImportError:
+    print("ImportError: Run the application from the tcp-qa directory or "
+          "set the PYTHONPATH environment variable to directory which contains"
+          " ./tcp_tests")
+    sys.exit(1)
+
+
+def load_params():
+    """
+    Parse CLI arguments and environment variables
+
+    Returns: ArgumentParser instance
+    """
+    env_host = os.environ.get('JENKINS_URL', None)
+    env_username = os.environ.get('JENKINS_USER', None)
+    env_password = os.environ.get('JENKINS_PASS', None)
+    env_job_name = os.environ.get('JOB_NAME', 'deploy_openstack')
+    env_build_number = os.environ.get('BUILD_NUMBER', 'lastBuild')
+
+    parser = argparse.ArgumentParser(description=(
+        'Host, username and password may be specified either by the command '
+        'line arguments or using environment variables: JENKINS_URL, '
+        'JENKINS_USER, JENKINS_PASS, JENKINS_START_TIMEOUT, '
+        'JENKINS_BUILD_TIMEOUT. \nCommand line arguments have the highest '
+        'priority, after that the environment variables are used as defaults.'
+    ))
+    parser.add_argument('--host',
+                        metavar='JENKINS_URL',
+                        help='Jenkins Host',
+                        default=env_host)
+    parser.add_argument('--username',
+                        metavar='JENKINS_USER',
+                        help='Jenkins Username',
+                        default=env_username)
+    parser.add_argument('--password',
+                        metavar='JENKINS_PASS',
+                        help='Jenkins Password or API token',
+                        default=env_password)
+    parser.add_argument('--job-name',
+                        metavar='JOB_NAME',
+                        help='Jenkins job name',
+                        default=env_job_name)
+    parser.add_argument('--build-number',
+                        metavar='BUILD_NUMBER',
+                        help='Jenkins job build number',
+                        default=env_build_number)
+    return parser
+
+
+def get_deployment_result(opts):
+    """Get the pipeline job result from Jenkins
+
+    Get all the stages resutls from the specified job,
+    show error message if present.
+    """
+    jenkins = client.JenkinsClient(host=opts.host,
+                                   username=opts.username,
+                                   password=opts.password)
+
+    def get_stages(nodes, indent=0, show_status=True):
+        res = []
+        for node in nodes:
+            if show_status:
+                msg = " " * indent + "{}: {}".format(node['name'],
+                                                     node['status'])
+                if 'error' in node and 'message' in node['error']:
+                    msg += ", " + node['error']['message']
+                res.append(msg)
+
+            if node['status'] != 'SUCCESS':
+                wf = jenkins.get_workflow(opts.job_name, opts.build_number,
+                                          int(node['id']))
+                if wf is not None:
+                    if 'stageFlowNodes' in wf:
+                        res += get_stages(wf['stageFlowNodes'], indent + 2,
+                                          show_status=False)
+                    elif '_links' in wf and 'log' in wf['_links']:
+                        log = jenkins.get_workflow(opts.job_name,
+                                                   opts.build_number,
+                                                   int(node['id']),
+                                                   mode='log')
+                        if "text" in log:
+                            prefix = " " * (indent + 2)
+                            res.append("\n".join(
+                                prefix + line
+                                for line in log["text"].splitlines()))
+        return res
+
+    wf = jenkins.get_workflow(opts.job_name, opts.build_number)
+    info = jenkins.build_info(opts.job_name, int(wf['id']))
+    build_description = ("[" + info['fullDisplayName'] + "] " +
+                         info['url'] + " : " + info['result'])
+    stages = get_stages(wf['stages'], 0)
+    if not stages:
+        msg = wf['status'] + ":\n\n"
+        stages = [msg + jenkins.get_build_output(opts.job_name, int(wf['id']))]
+    return (build_description, stages)
+
+
+def main(args=None):
+    parser = load_params()
+    opts = parser.parse_args()
+
+    if opts.host is None:
+        print("JENKINS_URL is required!")
+        parser.print_help()
+        return 10
+    else:
+        (build_description, stages) = get_deployment_result(opts)
+        print(build_description)
+        print('\n'.join(stages))
+
+
+if __name__ == "__main__":
+    sys.exit(main())