Add timestamps to jobs output

PROD-35295

Change-Id: Ib308e07a7720154b157b70ea4b028628605df7aa
diff --git a/jobs/pipelines/swarm-run-pytest.groovy b/jobs/pipelines/swarm-run-pytest.groovy
index f7ad7c3..8ef7122 100644
--- a/jobs/pipelines/swarm-run-pytest.groovy
+++ b/jobs/pipelines/swarm-run-pytest.groovy
@@ -34,85 +34,87 @@
 currentBuild.description = "${PARENT_NODE_NAME}:${ENV_NAME}"
 
 timeout(time: 10, unit: 'HOURS') {
-node ("${PARENT_NODE_NAME}") {
-    if (! fileExists("${PARENT_WORKSPACE}")) {
-        error "'PARENT_WORKSPACE' contains path to non-existing directory ${PARENT_WORKSPACE} on the node '${PARENT_NODE_NAME}'."
-    }
-    dir("${PARENT_WORKSPACE}") {
-        try {
-
-            if (env.TCP_QA_REFS) {
-                stage("Update working dir to patch ${TCP_QA_REFS}") {
-                    shared.update_working_dir()
-                }
+    timestamps {
+        node ("${PARENT_NODE_NAME}") {
+            if (! fileExists("${PARENT_WORKSPACE}")) {
+                error "'PARENT_WORKSPACE' contains path to non-existing directory ${PARENT_WORKSPACE} on the node '${PARENT_NODE_NAME}'."
             }
+            dir("${PARENT_WORKSPACE}") {
+                try {
 
-            stage("Run tests") {
-                def steps = shared.get_steps_list(PASSED_STEPS)
-                def sources = """\
-                    cd ${PARENT_WORKSPACE}
-                    export ENV_NAME=${ENV_NAME}
-                    . ./tcp_tests/utils/env_salt"""
-                if (steps.contains('k8s')) {
-                    sources += """
-                    . ./tcp_tests/utils/env_k8s\n"""
-                }
-                if (steps.contains('openstack')) {
-                    sources += """
-                    export TEMPEST_IMAGE_VERSION=${TEMPEST_IMAGE_VERSION}
-                    export TEMPEST_TARGET=${TEMPEST_TARGET}
-                    # TODO: . ./tcp_tests/utils/env_keystonercv3\n"""
-                }
-                def installed = steps.collect {"""\
-                    export ${it}_installed=true"""}.join("\n")
-
-                shared.run_sh(sources + installed + """
-                    export TESTS_CONFIGS=${ENV_NAME}_salt_deployed.ini
-                    export ENV_MANAGER=$ENV_MANAGER  # use 'hardware' fixture to manage fuel-devops environment
-                    export salt_master_host=\$SALT_MASTER_IP  # skip salt_deployed fixture
-                    export salt_master_port=6969
-                    export SALT_USER=\$SALTAPI_USER
-                    export SALT_PASSWORD=\$SALTAPI_PASS
-
-                    export LOG_NAME=swarm_run_pytest.log
-                    py.test --junit-xml=nosetests.xml ${RUN_TEST_OPTS}
-
-                    """)
-
-                def snapshot_name = "test_completed"
-                shared.download_logs("test_completed_${ENV_NAME}")
-
-                if (make_snapshot_stages) {
-                    shared.run_cmd("""\
-                        dos.py suspend ${ENV_NAME}
-                        dos.py snapshot ${ENV_NAME} ${snapshot_name}
-                    """)
-                    if ("${env.SHUTDOWN_ENV_ON_TEARDOWN}" == "false") {
-                        shared.run_cmd("""\
-                            dos.py resume ${ENV_NAME}
-                        """)
+                    if (env.TCP_QA_REFS) {
+                        stage("Update working dir to patch ${TCP_QA_REFS}") {
+                            shared.update_working_dir()
+                        }
                     }
-                    shared.devops_snapshot_info(snapshot_name)
-                }
-            }
 
-        } catch (e) {
-            common.printMsg("Job is failed", "purple")
-            // Downloading logs usually not needed here
-            // because tests should use the decorator @pytest.mark.grab_versions
-            // shared.download_logs("test_failed_${ENV_NAME}")
-            throw e
-        } finally {
-            // TODO(ddmitriev): analyze the "def currentResult = currentBuild.result ?: 'SUCCESS'"
-            // and report appropriate data to TestRail
-            if (make_snapshot_stages) {
-                if ("${env.SHUTDOWN_ENV_ON_TEARDOWN}" == "true") {
-                    shared.run_cmd("""\
-                        dos.py destroy ${ENV_NAME}
-                    """)
+                    stage("Run tests") {
+                        def steps = shared.get_steps_list(PASSED_STEPS)
+                        def sources = """\
+                            cd ${PARENT_WORKSPACE}
+                            export ENV_NAME=${ENV_NAME}
+                            . ./tcp_tests/utils/env_salt"""
+                        if (steps.contains('k8s')) {
+                            sources += """
+                            . ./tcp_tests/utils/env_k8s\n"""
+                        }
+                        if (steps.contains('openstack')) {
+                            sources += """
+                            export TEMPEST_IMAGE_VERSION=${TEMPEST_IMAGE_VERSION}
+                            export TEMPEST_TARGET=${TEMPEST_TARGET}
+                            # TODO: . ./tcp_tests/utils/env_keystonercv3\n"""
+                        }
+                        def installed = steps.collect {"""\
+                            export ${it}_installed=true"""}.join("\n")
+
+                        shared.run_sh(sources + installed + """
+                            export TESTS_CONFIGS=${ENV_NAME}_salt_deployed.ini
+                            export ENV_MANAGER=$ENV_MANAGER  # use 'hardware' fixture to manage fuel-devops environment
+                            export salt_master_host=\$SALT_MASTER_IP  # skip salt_deployed fixture
+                            export salt_master_port=6969
+                            export SALT_USER=\$SALTAPI_USER
+                            export SALT_PASSWORD=\$SALTAPI_PASS
+
+                            export LOG_NAME=swarm_run_pytest.log
+                            py.test --junit-xml=nosetests.xml ${RUN_TEST_OPTS}
+
+                            """)
+
+                        def snapshot_name = "test_completed"
+                        shared.download_logs("test_completed_${ENV_NAME}")
+
+                        if (make_snapshot_stages) {
+                            shared.run_cmd("""\
+                                dos.py suspend ${ENV_NAME}
+                                dos.py snapshot ${ENV_NAME} ${snapshot_name}
+                            """)
+                            if ("${env.SHUTDOWN_ENV_ON_TEARDOWN}" == "false") {
+                                shared.run_cmd("""\
+                                    dos.py resume ${ENV_NAME}
+                                """)
+                            }
+                            shared.devops_snapshot_info(snapshot_name)
+                        }
+                    }
+
+                } catch (e) {
+                    common.printMsg("Job is failed", "purple")
+                    // Downloading logs usually not needed here
+                    // because tests should use the decorator @pytest.mark.grab_versions
+                    // shared.download_logs("test_failed_${ENV_NAME}")
+                    throw e
+                } finally {
+                    // TODO(ddmitriev): analyze the "def currentResult = currentBuild.result ?: 'SUCCESS'"
+                    // and report appropriate data to TestRail
+                    if (make_snapshot_stages) {
+                        if ("${env.SHUTDOWN_ENV_ON_TEARDOWN}" == "true") {
+                            shared.run_cmd("""\
+                                dos.py destroy ${ENV_NAME}
+                            """)
+                        }
+                    }
                 }
             }
-        }
-    }
-}
+        } //node
+    } // timestamps
 } // timeout