Add tool get_logs.py to download node logs

Change-Id: I834b699ed8968d70b20bf343e4f46df465fc4f5e
diff --git a/jobs/pipelines/swarm-bootstrap-salt-cluster-devops.groovy b/jobs/pipelines/swarm-bootstrap-salt-cluster-devops.groovy
index 64c8783..570f47f 100644
--- a/jobs/pipelines/swarm-bootstrap-salt-cluster-devops.groovy
+++ b/jobs/pipelines/swarm-bootstrap-salt-cluster-devops.groovy
@@ -38,54 +38,54 @@
         error "'PARENT_WORKSPACE' contains path to non-existing directory ${PARENT_WORKSPACE} on the node '${PARENT_NODE_NAME}'."
     }
     dir("${PARENT_WORKSPACE}") {
+        stage("Cleanup: erase ${ENV_NAME} and remove config drive") {
+            println "Remove environment ${ENV_NAME}"
+            shared.run_cmd("""\
+                dos.py erase ${ENV_NAME} || true
+            """)
+            println "Remove config drive ISO"
+            shared.run_cmd("""\
+                rm /home/jenkins/images/${CFG01_CONFIG_IMAGE_NAME} || true
+            """)
+        }
+
+        if (env.TCP_QA_REFS) {
+            stage("Update working dir to patch ${TCP_QA_REFS}") {
+                shared.update_working_dir()
+            }
+        }
+
+        stage("Create an environment ${ENV_NAME} in disabled state") {
+            // deploy_hardware.xml
+            shared.run_cmd("""\
+                export ENV_NAME=${ENV_NAME}
+                export LAB_CONFIG_NAME=${LAB_CONFIG_NAME}
+                export MANAGER=devops
+                export PYTHONIOENCODING=UTF-8
+                export REPOSITORY_SUITE=${MCP_VERSION}
+                export TEST_GROUP=test_create_environment
+                py.test -vvv -s -p no:django -p no:ipdb --junit-xml=deploy_hardware.xml -k \${TEST_GROUP}
+            """)
+        }
+
+        stage("Generate the model") {
+            shared.generate_cookied_model()
+        }
+
+        stage("Generate config drive ISO") {
+            shared.generate_configdrive_iso()
+        }
+
+        stage("Upload generated config drive ISO into volume on cfg01 node") {
+            shared.run_cmd("""\
+                # Get SALT_MASTER_HOSTNAME to determine the volume name
+                . ./tcp_tests/utils/env_salt
+                virsh vol-upload ${ENV_NAME}_\${SALT_MASTER_HOSTNAME}_config /home/jenkins/images/${CFG01_CONFIG_IMAGE_NAME} --pool default
+                virsh pool-refresh --pool default
+            """)
+        }
+
         try {
-            stage("Cleanup: erase ${ENV_NAME} and remove config drive") {
-                println "Remove environment ${ENV_NAME}"
-                shared.run_cmd("""\
-                    dos.py erase ${ENV_NAME} || true
-                """)
-                println "Remove config drive ISO"
-                shared.run_cmd("""\
-                    rm /home/jenkins/images/${CFG01_CONFIG_IMAGE_NAME} || true
-                """)
-            }
-
-            if (env.TCP_QA_REFS) {
-                stage("Update working dir to patch ${TCP_QA_REFS}") {
-                    shared.update_working_dir()
-                }
-            }
-
-            stage("Create an environment ${ENV_NAME} in disabled state") {
-                // deploy_hardware.xml
-                shared.run_cmd("""\
-                    export ENV_NAME=${ENV_NAME}
-                    export LAB_CONFIG_NAME=${LAB_CONFIG_NAME}
-                    export MANAGER=devops
-                    export PYTHONIOENCODING=UTF-8
-                    export REPOSITORY_SUITE=${MCP_VERSION}
-                    export TEST_GROUP=test_create_environment
-                    py.test -vvv -s -p no:django -p no:ipdb --junit-xml=deploy_hardware.xml -k \${TEST_GROUP}
-                """)
-            }
-
-            stage("Generate the model") {
-                shared.generate_cookied_model()
-            }
-
-            stage("Generate config drive ISO") {
-                shared.generate_configdrive_iso()
-            }
-
-            stage("Upload generated config drive ISO into volume on cfg01 node") {
-                shared.run_cmd("""\
-                    # Get SALT_MASTER_HOSTNAME to determine the volume name
-                    . ./tcp_tests/utils/env_salt
-                    virsh vol-upload ${ENV_NAME}_\${SALT_MASTER_HOSTNAME}_config /home/jenkins/images/${CFG01_CONFIG_IMAGE_NAME} --pool default
-                    virsh pool-refresh --pool default
-                """)
-            }
-
             stage("Run the 'underlay' and 'salt-deployed' fixtures to bootstrap salt cluster") {
                 // deploy_salt.xml
                 shared.run_cmd("""\
@@ -103,7 +103,8 @@
             }
 
           } catch (e) {
-              common.printMsg("Job is failed", "purple")
+              common.printMsg("Saltstack cluster deploy is failed", "purple")
+              shared.download_logs("deploy_salt")
               throw e
           } finally {
             // TODO(ddmitriev): analyze the "def currentResult = currentBuild.result ?: 'SUCCESS'"
diff --git a/jobs/pipelines/swarm-deploy-cicd.groovy b/jobs/pipelines/swarm-deploy-cicd.groovy
index 5ace2ca..7d7fd63 100644
--- a/jobs/pipelines/swarm-deploy-cicd.groovy
+++ b/jobs/pipelines/swarm-deploy-cicd.groovy
@@ -29,29 +29,30 @@
         error "'PARENT_WORKSPACE' contains path to non-existing directory ${PARENT_WORKSPACE} on the node '${PARENT_NODE_NAME}'."
     }
     dir("${PARENT_WORKSPACE}") {
-        try {
 
-            if (! env.STACK_INSTALL) {
-                error "'STACK_INSTALL' must contain one or more comma separated stack names for [deploy_openstack] pipeline"
+        if (! env.STACK_INSTALL) {
+            error "'STACK_INSTALL' must contain one or more comma separated stack names for [deploy_openstack] pipeline"
+        }
+
+        if (env.TCP_QA_REFS) {
+            stage("Update working dir to patch ${TCP_QA_REFS}") {
+                shared.update_working_dir()
+            }
+        }
+
+        // Install core and cicd
+        def stack
+        def timeout
+
+        for (element in "${env.STACK_INSTALL}".split(",")) {
+            if (element.contains(':')) {
+                (stack, timeout) = element.split(':')
+            } else {
+                stack = element
+                timeout = '1800'
             }
 
-            if (env.TCP_QA_REFS) {
-                stage("Update working dir to patch ${TCP_QA_REFS}") {
-                    shared.update_working_dir()
-                }
-            }
-
-            // Install core and cicd
-            def stack
-            def timeout
-
-            for (element in "${env.STACK_INSTALL}".split(",")) {
-                if (element.contains(':')) {
-                    (stack, timeout) = element.split(':')
-                } else {
-                    stack = element
-                    timeout = '1800'
-                }
+            try {
                 stage("Run Jenkins job on salt-master [deploy_openstack:${stack}]") {
                     shared.run_job_on_day01_node(stack, timeout)
                 }
@@ -60,23 +61,25 @@
                     shared.sanity_check_component(stack)
                 }
 
-                stage("Make environment snapshot [${stack}_deployed]") {
-                    shared.devops_snapshot(stack)
+            } catch (e) {
+                common.printMsg("Job is failed", "purple")
+                shared.download_logs("deploy_${stack}")
+                throw e
+            } finally {
+                // TODO(ddmitriev): analyze the "def currentResult = currentBuild.result ?: 'SUCCESS'"
+                // and report appropriate data to TestRail
+                // TODO(ddmitriev): add checks for cicd cluster
+                if ("${env.SHUTDOWN_ENV_ON_TEARDOWN}" == "true") {
+                    shared.run_cmd("""\
+                        dos.py destroy ${ENV_NAME}
+                    """)
                 }
             }
 
-        } catch (e) {
-            common.printMsg("Job is failed", "purple")
-            throw e
-        } finally {
-            // TODO(ddmitriev): analyze the "def currentResult = currentBuild.result ?: 'SUCCESS'"
-            // and report appropriate data to TestRail
-            // TODO(ddmitriev): add checks for cicd cluster
-            if ("${env.SHUTDOWN_ENV_ON_TEARDOWN}" == "true") {
-                shared.run_cmd("""\
-                    dos.py destroy ${ENV_NAME}
-                """)
+            stage("Make environment snapshot [${stack}_deployed]") {
+                shared.devops_snapshot(stack)
             }
-        }
-    }
-}
+
+        } // for
+    } // dir
+} // node
diff --git a/jobs/pipelines/swarm-deploy-platform.groovy b/jobs/pipelines/swarm-deploy-platform.groovy
index 9a6b1d1..42ebc7e 100644
--- a/jobs/pipelines/swarm-deploy-platform.groovy
+++ b/jobs/pipelines/swarm-deploy-platform.groovy
@@ -29,29 +29,31 @@
         error "'PARENT_WORKSPACE' contains path to non-existing directory ${PARENT_WORKSPACE} on the node '${PARENT_NODE_NAME}'."
     }
     dir("${PARENT_WORKSPACE}") {
-        try {
 
-            if (! env.STACK_INSTALL) {
-                error "'STACK_INSTALL' must contain one or more comma separated stack names for [deploy_openstack] pipeline"
+        if (! env.STACK_INSTALL) {
+            error "'STACK_INSTALL' must contain one or more comma separated stack names for [deploy_openstack] pipeline"
+        }
+
+        if (env.TCP_QA_REFS) {
+            stage("Update working dir to patch ${TCP_QA_REFS}") {
+                shared.update_working_dir()
+            }
+        }
+
+        // Install the cluster
+        def stack
+        def timeout
+
+        for (element in "${STACK_INSTALL}".split(",")) {
+            if (element.contains(':')) {
+                (stack, timeout) = element.split(':')
+            } else {
+                stack = element
+                timeout = '1800'
             }
 
-            if (env.TCP_QA_REFS) {
-                stage("Update working dir to patch ${TCP_QA_REFS}") {
-                    shared.update_working_dir()
-                }
-            }
+            try {
 
-            // Install the cluster
-            def stack
-            def timeout
-
-            for (element in "${STACK_INSTALL}".split(",")) {
-                if (element.contains(':')) {
-                    (stack, timeout) = element.split(':')
-                } else {
-                    stack = element
-                    timeout = '1800'
-                }
                 stage("Run Jenkins job on CICD [deploy_openstack:${stack}]") {
                     shared.run_job_on_cicd_nodes(stack, timeout)
                 }
@@ -60,23 +62,25 @@
                     shared.sanity_check_component(stack)
                 }
 
-                stage("Make environment snapshot [${stack}_deployed]") {
-                    shared.devops_snapshot(stack)
+            } catch (e) {
+                common.printMsg("Job is failed", "purple")
+                shared.download_logs("deploy_${stack}")
+                throw e
+            } finally {
+                // TODO(ddmitriev): analyze the "def currentResult = currentBuild.result ?: 'SUCCESS'"
+                // and report appropriate data to TestRail
+                // TODO(ddmitriev): add checks for the installed stacks
+                if ("${env.SHUTDOWN_ENV_ON_TEARDOWN}" == "true") {
+                    shared.run_cmd("""\
+                        dos.py destroy ${ENV_NAME}
+                    """)
                 }
             }
 
-        } catch (e) {
-            common.printMsg("Job is failed", "purple")
-            throw e
-        } finally {
-            // TODO(ddmitriev): analyze the "def currentResult = currentBuild.result ?: 'SUCCESS'"
-            // and report appropriate data to TestRail
-            // TODO(ddmitriev): add checks for the installed stacks
-            if ("${env.SHUTDOWN_ENV_ON_TEARDOWN}" == "true") {
-                shared.run_cmd("""\
-                    dos.py destroy ${ENV_NAME}
-                """)
+            stage("Make environment snapshot [${stack}_deployed]") {
+                shared.devops_snapshot(stack)
             }
-        }
-    }
-}
+
+        } // for
+    } // dir
+} // node
diff --git a/jobs/pipelines/swarm-run-pytest.groovy b/jobs/pipelines/swarm-run-pytest.groovy
index 0dd2d7a..bc411f7 100644
--- a/jobs/pipelines/swarm-run-pytest.groovy
+++ b/jobs/pipelines/swarm-run-pytest.groovy
@@ -72,6 +72,7 @@
                     """)
 
                 def snapshot_name = "test_completed"
+                shared.download_logs("test_completed")
                 shared.run_cmd("""\
                     dos.py suspend ${ENV_NAME}
                     dos.py snapshot ${ENV_NAME} ${snapshot_name}
@@ -86,6 +87,9 @@
 
         } catch (e) {
             common.printMsg("Job is failed", "purple")
+            // Downloading logs usually not needed here
+            // because tests should use the decorator @pytest.mark.grab_versions
+            // shared.download_logs("test_failed")
             throw e
         } finally {
             // TODO(ddmitriev): analyze the "def currentResult = currentBuild.result ?: 'SUCCESS'"
diff --git a/jobs/pipelines/swarm-testrail-report.groovy b/jobs/pipelines/swarm-testrail-report.groovy
index e1fadc7..42027f0 100644
--- a/jobs/pipelines/swarm-testrail-report.groovy
+++ b/jobs/pipelines/swarm-testrail-report.groovy
@@ -76,7 +76,7 @@
                     report_url = report_result.split("\n").each {
                         if (it.contains("[TestRun URL]")) {
                             common.printMsg("Found report URL: " + it.trim().split().last(), "blue")
-                            description += "<a href=" + it.trim().split().last() + ">${testSuiteName}</a>"
+                            description += "\n<a href=" + it.trim().split().last() + ">${testSuiteName}</a>"
                         }
                     }
                 }
@@ -98,7 +98,7 @@
                     report_url = report_result.split("\n").each {
                         if (it.contains("[TestRun URL]")) {
                             common.printMsg("Found report URL: " + it.trim().split().last(), "blue")
-                            description += "<a href=" + it.trim().split().last() + ">${testSuiteName}</a>"
+                            description += "\n<a href=" + it.trim().split().last() + ">${testSuiteName}</a>"
                         }
                     }
                 }
@@ -115,7 +115,7 @@
                     report_url = report_result.split("\n").each {
                         if (it.contains("[TestRun URL]")) {
                             common.printMsg("Found report URL: " + it.trim().split().last(), "blue")
-                            description += "<a href=" + it.trim().split().last() + ">${testSuiteName}</a>"
+                            description += "\n<a href=" + it.trim().split().last() + ">${testSuiteName}</a>"
                         }
                     }
                 }
@@ -144,7 +144,7 @@
                     report_url = report_result.split("\n").each {
                         if (it.contains("[TestRun URL]")) {
                             common.printMsg("Found report URL: " + it.trim().split().last(), "blue")
-                            description += "<a href=" + it.trim().split().last() + ">${testSuiteName}</a>"
+                            description += "\n<a href=" + it.trim().split().last() + ">${testSuiteName}</a>"
                         }
                     }
                 }
@@ -161,7 +161,7 @@
                     report_url = report_result.split("\n").each {
                         if (it.contains("[TestRun URL]")) {
                             common.printMsg("Found report URL: " + it.trim().split().last(), "blue")
-                            description += "<a href=" + it.trim().split().last() + ">${testSuiteName}</a>"
+                            description += "\n<a href=" + it.trim().split().last() + ">${testSuiteName}</a>"
                         }
                     }
                 }