Update images build with repos checkout from gerrit

Change-Id: I27d19e248a1ded6b2a6a2a30b2955da57f23bc3f
diff --git a/packer-image-buid.groovy b/packer-image-buid.groovy
index 2e82c44..a87918f 100644
--- a/packer-image-buid.groovy
+++ b/packer-image-buid.groovy
@@ -21,9 +21,9 @@
  *            ---
  *            IMAGE_NAME: 'ubuntu-16.04-proposed'
  *
- *  CREDENTIALS_ID  = Global jenkins cred. for clone DEFAULT_GIT_URL
+ *  CREDENTIALS_ID  = Global jenkins cred for clone DEFAULT_GIT_URL
  *  DEFAULT_GIT_URL
- *  DEFAULT_GIT_REF
+ *  REFSPEC
  *
  *  OS_VERSION         = OpenStack version
  *  OS_CREDENTIALS_ID  = ID of credentials for OpenStack API stored in Jenkins.
@@ -42,15 +42,16 @@
 def dateTime = date.format("ddMMyyyy-HHmmss")
 //
 def job_env = env.getEnvironment().findAll { k, v -> v }
+gerritCredentials = env.CREDENTIALS_ID ?: 'gerrit'
 
 /////
 extra_vars = readYaml text: job_env.get('EXTRA_VARIABLES_YAML','').trim()
 // FIXME: os_openrc should be refactored.
 os_openrc = readYaml text: job_env.get('OPENSTACK_OPENRC_YAML','').trim()
 if (job_env.get('TIMESTAMP_INAME', false).toBoolean()) {
-  imageName = job_env.IMAGE_NAME + "-" + dateTime
-}else {
-  imageName = job_env.IMAGE_NAME
+    imageName = job_env.IMAGE_NAME + "-" + dateTime
+} else {
+    imageName = job_env.IMAGE_NAME
 }
 // Overwrite IMAGE_NAME in template.json with expected
 extra_vars['IMAGE_NAME'] = imageName
@@ -61,246 +62,275 @@
 job_env['BUILD_ONLY'] = job_env.BUILD_ONLY.toLowerCase()
 job_env['PUBLISH_BACKEND'] = job_env.PUBLISH_BACKEND.toLowerCase()
 //
-defaultGitRef = job_env.get('DEFAULT_GIT_REF', 'HEAD')
+defaultGitRef = job_env.get('REFSPEC', 'HEAD')
 defaultGitUrl = job_env.get('DEFAULT_GIT_URL', null)
 slaveNode = (env.SLAVE_NODE ?: 'jsl23.mcp.mirantis.net')
 
 // Self-check
 for (String req_v : ['BUILD_OS', 'BUILD_ONLY','IMAGE_NAME'] ) {
-  if (!job_env.get(req_v, false)) {
-    throw new Exception("${req_v} not set!")
-  }
+    if (!job_env.get(req_v, false)) {
+        throw new Exception("${req_v} not set!")
+    }
 }
 
 def MapToList(input_map) {
 /**
  * Convert dict in bash-like list
  */
-  def data_list = []
-  for (i = 0; i < input_map.size(); i++) {
-    data = ''
-    data = input_map.keySet()[i] + "=" + input_map.values()[i]
-    data_list.add(data)
-  }
-  return data_list
+    def data_list = []
+    for (i = 0; i < input_map.size(); i++) {
+        data = ''
+        data = input_map.keySet()[i] + "=" + input_map.values()[i]
+        data_list.add(data)
+    }
+    return data_list
 }
 
 timeout(time: 6, unit: 'HOURS') {
-  node(slaveNode) {
-    def checkouted = false
-    def workspace = common.getWorkspace()
-    creds = common.getPasswordCredentials(job_env.CREDENTIALS_ID)
-    if (job_env.BUILD_ONLY == 'openstack' || job_env.PUBLISH_BACKEND == 'glance') {
-      rcFile = openstack.createOpenstackEnv(workspace, os_openrc.OS_AUTH_URL, job_env.OS_TENANT_ID, job_env.OS_TENANT_NAME, "default", "", "default", "2", "")
-      def openstackEnv = "${workspace}/venv"
-    }
-
-    try {
-      def _artifact_dir = "${workspace}/artifacts"
-      def _artifact_list = []
-      def ImagesCacheFolder = "${workspace}/../${env.JOB_NAME}_cache/"
-      stage("checkout") {
-        if (defaultGitRef && defaultGitUrl) {
-          checkouted = gerrit.gerritPatchsetCheckout(defaultGitUrl, defaultGitRef, "HEAD", job_env.CREDENTIALS_ID)
-        } else {
-          throw new Exception("Cannot checkout gerrit patchset: DEFAULT_GIT_URL is null")
-        }
-      }
-      stage("Prepare env") {
-        if (!fileExists("${workspace}/tmp")) {
-          sh "mkdir -p ${workspace}/tmp"
-        }
-        if (!fileExists(ImagesCacheFolder)) {
-          sh "mkdir -p ${ImagesCacheFolder}"
-        }
-        if (!fileExists(_artifact_dir)) {
-          sh "mkdir -p ${_artifact_dir}"
-        }
-        if (!fileExists("bin")) {
-          common.infoMsg("Downloading packer")
-          sh "mkdir -p bin"
-          dir("bin") {
-            def zipname = sh(script: "basename ${job_env.PACKER_URL}", returnStdout: true).trim()
-            sh(script: "wget --quiet ${job_env.PACKER_URL}", returnStdout: true)
-            sh "echo \"${job_env.PACKER_ZIP_MD5} ${zipname}\" >> md5sum"
-            sh(script: "md5sum -c --status md5sum", returnStdout: true)
-            sh "unzip ${zipname}"
-          }
-        }
-        if (!fileExists("${job_env.BUILD_OS}/images")) {
-          // clean images dir before building
-          sh(script: "rm -rf ${job_env.BUILD_OS}/images/*", returnStatus: true)
-        }
-      }
-
-      stage("Build Instance") {
-        def _packer_args = "${job_env.get(PACKER_ARGS, '')}"
-        def _packer_log = "${workspace}/packer.log"
-        // clean old log, for correct status grepping
-        if (fileExists(_packer_log)) {
-          sh "rm -v ${_packer_log}"
+    node(slaveNode) {
+        def workspace = common.getWorkspace()
+        creds = common.getPasswordCredentials(job_env.CREDENTIALS_ID)
+        if (job_env.BUILD_ONLY == 'openstack' || job_env.PUBLISH_BACKEND == 'glance') {
+            rcFile = openstack.createOpenstackEnv(workspace, os_openrc.OS_AUTH_URL, job_env.OS_TENANT_ID, job_env.OS_TENANT_NAME, "default", "", "default", "2", "")
+            def openstackEnv = "${workspace}/venv"
         }
 
-        dir("${workspace}/${job_env.BUILD_OS}/") {
-          if (fileExists("config-drive/user-data.yaml")) {
-            common.infoMsg("Creating cloud-config drive")
-            if (fileExists("config-drive/cloudata.iso")) {
-              sh "rm -v config-drive/cloudata.iso"
+        stage("checkout") {
+            if (defaultGitRef && defaultGitUrl) {
+                checkouted = gerrit.gerritPatchsetCheckout(defaultGitUrl, defaultGitRef, "HEAD", job_env.CREDENTIALS_ID)
+            } else {
+                throw new Exception("Cannot checkout gerrit patchset: DEFAULT_GIT_URL is null")
             }
-            sh "cloud-localds config-drive/cloudata.iso  config-drive/user-data.yaml"
-          }
         }
 
-        if (job_env.BUILD_ONLY == "openstack") {
-          dir("${workspace}/${job_env.BUILD_OS}/") {
-            extra_vars_list = MapToList(extra_vars)
-            withEnv(["PATH=${env.PATH}:${workspace}/bin",
-                     "PACKER_LOG_PATH=${_packer_log}",
-                     "PACKER_LOG=1",
-                     "TMPDIR=${workspace}/tmp",
-                     "OS_USERNAME=${creds.username.toString()}",
-                     "OS_PASSWORD=${creds.password.toString()}"] + extra_vars_list) {
-
-              common.infoMsg("Run build with:")
-              sh(script: 'printenv|sort')
-              sh(script: "set -xe; packer build -only='openstack' ${_packer_args} -parallel=false template.json" )
-              _os_private = "${workspace}/${job_env.BUILD_OS}/os_${job_env.BUILD_OS}.pem"
-              if (fileExists(_os_private)) {
-                common.infoMsg("Packer private key:")
-                sh "cat ${_os_private}"
-              }
-              def packerStatus = sh(script: "grep \"Some builds didn't complete successfully and had errors\" ${_packer_log}", returnStatus: true)
-              // grep returns 0 if find something
-              if (packerStatus != 0) {
-                common.infoMsg("Openstack instance build complete")
-              } else {
-                throw new Exception("Openstack Packer build failed")
-              }
-
-              common.retry(3, 5) {
-                common.infoMsg("Attempt download openstack image..")
-                openstack.runOpenstackCommand("openstack image save --file ${_artifact_dir}/${imageName}.qcow2 ${imageName}", rcFile, openstackEnv)
-              }
-            }
-          }
-
-        } else if (job_env.BUILD_ONLY == 'qemu') {
-
-          dir("${workspace}/${job_env.BUILD_OS}/") {
-            extra_vars_list = MapToList(extra_vars)
-            withEnv(["PATH=${env.PATH}:${workspace}/bin",
-                     "PACKER_LOG_PATH=${_packer_log}",
-                     "PACKER_LOG=1",
-                     "TMPDIR=${workspace}/tmp",
-                     "PACKER_IMAGES_CACHE=${ImagesCacheFolder}"] + extra_vars_list) {
-              common.infoMsg("Run build with:")
-              sh(script: 'printenv|sort')
-              sh(script: "set -xe ; packer build -on-error=ask -only='qemu' ${_packer_args} -parallel=false template.json".toString())
-
-              def packerStatus = sh(script: "grep \"Some builds didn't complete successfully and had errors\" ${PACKER_LOG_PATH}", returnStatus: true)
-              // grep returns 0 if find something
-              if (packerStatus != 0) {
-                common.infoMsg("qemu instance build completed successfully")
-              } else {
-                throw new Exception("qemu instance build failed")
-              }
-              // collect artifacts
-              // TODO make it possible, process multiply artifacts by one run.
-              dir('images/') {
-                sh(script: 'find .', returnStdout: true)
-                def _files = findFiles(glob: "*qemu*/${imageName}*")
-                if (_files.size() > 1) {
-                  common.warningMsg("Multiply artifacts detected!Only first one will be processed!")
-                } else if (_files.size() == 0) {
-                  throw new Exception("No artifacts detected!BUILD_ONLY=${env.BUILD_ONLY} failed!")
+        try {
+            def _artifact_dir = "${workspace}/artifacts"
+            def _artifact_list = []
+            def ImagesCacheFolder = "${workspace}/../${env.JOB_NAME}_cache/"
+            stage("Prepare env") {
+                if (!fileExists("${workspace}/tmp")) {
+                    sh "mkdir -p ${workspace}/tmp"
                 }
-                for (String x : _files) {
-                  _file = sh(script: "set -x ; readlink -f ${x}", returnStdout: true)
-                  sh(script: "mv -v ${x} ${_artifact_dir}/${imageName}.qcow2")
-                  // Save filename to list
-                  _artifact_list.add("${imageName}.qcow2")
+                if (!fileExists(ImagesCacheFolder)) {
+                    sh "mkdir -p ${ImagesCacheFolder}"
                 }
-              }
+                if (!fileExists(_artifact_dir)) {
+                    sh "mkdir -p ${_artifact_dir}"
+                }
+                if (!fileExists("bin")) {
+                    common.infoMsg("Downloading packer")
+                    sh "mkdir -p bin"
+                    dir("bin") {
+                        def zipname = sh(script: "basename ${job_env.PACKER_URL}", returnStdout: true).trim()
+                        sh(script: "wget --quiet ${job_env.PACKER_URL}", returnStdout: true)
+                        sh "echo \"${job_env.PACKER_ZIP_MD5} ${zipname}\" >> md5sum"
+                        sh(script: "md5sum -c --status md5sum", returnStdout: true)
+                        sh "unzip ${zipname}"
+                    }
+                }
+                if (!fileExists("${job_env.BUILD_OS}/images")) {
+                    // clean images dir before building
+                    sh(script: "rm -rf ${job_env.BUILD_OS}/images/*", returnStatus: true)
+                }
             }
-          }
 
-        } else {
-          throw new Exception("Unexpected BUILD_ONLY=${env.BUILD_ONLY} target!")
-        }
-      }
+            stage("Build Instance") {
+                def _packer_args = "${job_env.get(PACKER_ARGS, '')}"
+                def _packer_log = "${workspace}/packer.log"
+                // clean old log, for correct status grepping
+                if (fileExists(_packer_log)) {
+                    sh "rm -v ${_packer_log}"
+                }
 
-      stage("Publish artifacts") {
-        dir(_artifact_dir) {
-          common.infoMsg("Processing md5 for artifacts")
-          for (String x : _artifact_list) {
-            _md5 = sh(script: "md5sum ${x} > ${x}.md5; cat ${x}.md5", returnStdout: true).trim()
-            _size = sh(script: "ls -alh ${x}", returnStdout: true).trim()
-            common.infoMsg("Artifact file: ${_size}\n${_md5}")
-          }
-          if (job_env.PUBLISH_BACKEND == 'local') {
-            common.infoMsg("Uploading to: local")
-            common.infoMsg("For local publish target - nothing to do, all files in: ${_artifact_dir}")
+                dir("${workspace}/${job_env.BUILD_OS}/") {
+                    if (fileExists("config-drive")) {
+                        def model = extra_vars.get('CLUSTER_MODEL', '')
+                        if (model != "") {
+                            checkout([
+                                    $class             : 'GitSCM',
+                                    branches           : [[name: 'FETCH_HEAD']],
+                                    recursiveSubmodules: true,
+                                    extensions         : [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'config-drive/model']],
+                                    userRemoteConfigs  : [[url: model, refspec: extra_vars.get('CLUSTER_MODEL_REF', 'master'), credentialsId: gerritCredentials]]
+                            ])
+                        }
+                        def scripts = extra_vars.get('GIT_SALT_FORMULAS_SCRIPTS', '')
+                        if (scripts != "") {
+                            checkout([
+                                    $class           : 'GitSCM',
+                                    branches         : [[name: 'FETCH_HEAD']],
+                                    extensions       : [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'config-drive/salt_scripts']],
+                                    userRemoteConfigs: [[url: scripts, refspec: extra_vars.get('SCRIPTS_REF', 'master'), credentialsId: gerritCredentials]]
+                            ])
+                        }
+
+                        common.infoMsg("Creating cloud-config drive")
+                        def isoFile = "config-drive/cloudata.iso"
+                        if (fileExists(isoFile)) {
+                            sh "rm -v ${isoFile}"
+                        }
+                        // This is left for backward-compatibility
+                        if (fileExists("config-drive/user-data.yaml")) {
+                            sh "mv config-drive/user-data.yaml config-drive/user-data"
+                            if (!fileExists("config-drive/meta-data")) {
+                                sh "echo 'hostname: ubuntu' > config-drive/meta-data"
+                            }
+                        }
+                        sh "mkisofs -o ${isoFile} -V cidata -r -J --quiet config-drive"
+                        archiveArtifacts artifacts: "${isoFile}"
+                    }
+                }
+
+                if (job_env.BUILD_ONLY == "openstack") {
+                    dir("${workspace}/${job_env.BUILD_OS}/") {
+                        extra_vars_list = MapToList(extra_vars)
+                        withEnv(["PATH=${env.PATH}:${workspace}/bin",
+                                 "PACKER_LOG_PATH=${_packer_log}",
+                                 "PACKER_LOG=1",
+                                 "TMPDIR=${workspace}/tmp",
+                                 "OS_USERNAME=${creds.username.toString()}",
+                                 "OS_PASSWORD=${creds.password.toString()}"] + extra_vars_list) {
+
+                            common.infoMsg("Run build with:")
+                            sh(script: 'printenv|sort')
+                            sh(script: "set -xe; packer build -only='openstack' ${_packer_args} -parallel=false template.json")
+                            _os_private = "${workspace}/${job_env.BUILD_OS}/os_${job_env.BUILD_OS}.pem"
+                            if (fileExists(_os_private)) {
+                                common.infoMsg("Packer private key:")
+                                sh "cat ${_os_private}"
+                            }
+                            def packerStatus = sh(script: "grep \"Some builds didn't complete successfully and had errors\" ${_packer_log}", returnStatus: true)
+                            // grep returns 0 if find something
+                            if (packerStatus != 0) {
+                                common.infoMsg("Openstack instance build complete")
+                            } else {
+                                throw new Exception("Openstack Packer build failed")
+                            }
+
+                            common.retry(3, 5) {
+                                common.infoMsg("Attempt download openstack image..")
+                                openstack.runOpenstackCommand("openstack image save --file ${_artifact_dir}/${imageName}.qcow2 ${imageName}", rcFile, openstackEnv)
+                            }
+                        }
+                    }
+
+                } else if (job_env.BUILD_ONLY == 'qemu') {
+
+                    dir("${workspace}/${job_env.BUILD_OS}/") {
+                        extra_vars_list = MapToList(extra_vars)
+                        withEnv(["PATH=${env.PATH}:${workspace}/bin",
+                                 "PACKER_LOG_PATH=${_packer_log}",
+                                 "PACKER_LOG=1",
+                                 "TMPDIR=${workspace}/tmp",
+                                 "PACKER_IMAGES_CACHE=${ImagesCacheFolder}"] + extra_vars_list) {
+                            common.infoMsg("Run build with:")
+                            sh(script: 'printenv|sort')
+                            sh(script: "set -xe ; packer build -on-error=ask -only='qemu' ${_packer_args} -parallel=false template.json".toString())
+
+                            def packerStatus = sh(script: "grep \"Some builds didn't complete successfully and had errors\" ${PACKER_LOG_PATH}", returnStatus: true)
+                            // grep returns 0 if find something
+                            if (packerStatus != 0) {
+                                common.infoMsg("qemu instance build completed successfully")
+                            } else {
+                                throw new Exception("qemu instance build failed")
+                            }
+                            // collect artifacts
+                            // TODO make it possible, process multiply artifacts by one run.
+                            dir('images/') {
+                                sh(script: 'find .', returnStdout: true)
+                                def _files = findFiles(glob: "*qemu*/${imageName}*")
+                                if (_files.size() > 1) {
+                                    common.warningMsg("Multiply artifacts detected!Only first one will be processed!")
+                                } else if (_files.size() == 0) {
+                                    throw new Exception("No artifacts detected!BUILD_ONLY=${env.BUILD_ONLY} failed!")
+                                }
+                                for (String x : _files) {
+                                    _file = sh(script: "set -x ; readlink -f ${x}", returnStdout: true)
+                                    sh(script: "mv -v ${x} ${_artifact_dir}/${imageName}.qcow2")
+                                    // Save filename to list
+                                    _artifact_list.add("${imageName}.qcow2")
+                                }
+                            }
+                        }
+                    }
+
+                } else {
+                    throw new Exception("Unexpected BUILD_ONLY=${env.BUILD_ONLY} target!")
+                }
+            }
+
+            stage("Publish artifacts") {
+                dir(_artifact_dir) {
+                    common.infoMsg("Processing md5 for artifacts")
+                    for (String x : _artifact_list) {
+                        _md5 = sh(script: "md5sum ${x} > ${x}.md5; cat ${x}.md5", returnStdout: true).trim()
+                        _size = sh(script: "ls -alh ${x}", returnStdout: true).trim()
+                        common.infoMsg("Artifact file: ${_size}\n${_md5}")
+                    }
+                    if (job_env.PUBLISH_BACKEND == 'local') {
+                        common.infoMsg("Uploading to: local")
+                        common.infoMsg("For local publish target - nothing to do, all files in: ${_artifact_dir}")
+                        if (job_env.get('CLEANUP_AFTER', false)) {
+                            common.warningMsg("You are trying to use 'local' publish method, along with enabled CLEANUP_AFTER! ")
+                            common.warningMsg("Disabling CLEANUP_AFTER option, to save you'r data ;) ")
+                            job_env.CLEANUP_AFTER = false
+                        }
+                    } else if (job_env.PUBLISH_BACKEND == 'glance') {
+                        common.infoMsg("Uploading to: glance-openstack")
+                        if (fileExists("${workspace}/venv")) {
+                            common.infoMsg("cleaning virtualenv at:${workspace}/venv")
+                            sh(script: "rm -rf ${workspace}/venv", returnStatus: true)
+                        }
+                        openstack.setupOpenstackVirtualenv(openstackEnv, job_env.OS_VERSION)
+                        for (String x : findFiles(glob: "*.*")) {
+                            if (x.endsWith('.md5')) {
+                                common.warningMsg("Skipping:${x} from openstack upload!")
+                                _md5sum = sh(script: "cat ${x}", returnStdout: true).trim().split()[0]
+                                continue
+                            }
+                            _property = "--property data=${dateTime} --property md5sum=${_md5sum}"
+                            _cmdline = String.format("glance image-create --visibility " +
+                                    "public %s --name '%s' %s --file %s", _property, imageShortName, glanceRunArgs, imageName)
+                            openstack.runOpenstackCommand(_cmdline, rcFile, openstackEnv)
+                        }
+                        // TODO
+                        currentBuild.description = "${imageName}.qcow2 uploaded tenant: "
+
+                    } else if (job_env.PUBLISH_BACKEND == 'http') {
+                        for (String u_file : findFiles(glob: '*.*')) {
+                            common.infoMsg("Uploading image ${imageName}")
+                            def uploadImageStatus = ''
+                            common.retry(3, 5) {
+                                uploadImageStatus = sh(script: "curl -f -T ${u_file} ${job_env.UPLOAD_URL}", returnStatus: true)
+                                if (uploadImageStatus != 0) {
+                                    throw new Exception("Uploading file: ${u_file} failed!")
+                                }
+                            }
+                            // Fixme for correct path ?
+                            currentBuild.description = "<a href='http://images.mcp.mirantis.net/${imageName}.qcow2'>${imageName}.qcow2</a>"
+                        }
+
+                    } else {
+                        throw new Exception("Unsupported publish backend:${job_env.PUBLISH_BACKEND}")
+                    }
+
+                }
+            }
+        } catch (Throwable e) {
+            // If there was an error or exception thrown, the build failed
+            currentBuild.result = "FAILURE"
+            throw e
+        } finally {
             if (job_env.get('CLEANUP_AFTER', false)) {
-              common.warningMsg("You are trying to use 'local' publish method, along with enabled CLEANUP_AFTER! ")
-              common.warningMsg("Disabling CLEANUP_AFTER option, to save you'r data ;) ")
-              job_env.CLEANUP_AFTER = false
-            }
-          } else if (job_env.PUBLISH_BACKEND == 'glance') {
-            common.infoMsg("Uploading to: glance-openstack")
-            if (fileExists("${workspace}/venv")) {
-              common.infoMsg("cleaning virtualenv at:${workspace}/venv")
-              sh(script: "rm -rf ${workspace}/venv", returnStatus: true)
-            }
-            openstack.setupOpenstackVirtualenv(openstackEnv, job_env.OS_VERSION)
-              for (String x : findFiles(glob: "*.*")) {
-                if (x.endsWith('.md5')) {
-                  common.warningMsg("Skipping:${x} from openstack upload!")
-                  _md5sum = sh(script: "cat ${x}", returnStdout: true).trim().split()[0]
-                  continue
+                dir(workspace) {
+                    sh "find . -mindepth 1 -delete || true"
                 }
-                _property = "--property data=${dateTime} --property md5sum=${_md5sum}"
-                _cmdline = String.format("glance image-create --visibility " +
-                    "public %s --name '%s' %s --file %s", _property, imageShortName, glanceRunArgs, imageName)
-                openstack.runOpenstackCommand(_cmdline, rcFile, openstackEnv)
-              }
-            // TODO
-            currentBuild.description = "${imageName}.qcow2 uploaded tenant: "
-
-          } else if (job_env.PUBLISH_BACKEND == 'http') {
-            for (String u_file : findFiles(glob: '*.*')) {
-              common.infoMsg("Uploading image ${imageName}")
-              def uploadImageStatus = ''
-              common.retry(3, 5) {
-                uploadImageStatus = sh(script: "curl -f -T ${u_file} ${job_env.UPLOAD_URL}", returnStatus: true)
-                if (uploadImageStatus != 0) {
-                  throw new Exception("Uploading file: ${u_file} failed!")
+                if (job_env.BUILD_ONLY == 'openstack') {
+                    common.warningMsg("openstack Env cleanup not implemented yet!")
                 }
-              }
-              // Fixme for correct path ?
-              currentBuild.description = "<a href='http://images.mcp.mirantis.net/${imageName}.qcow2'>${imageName}.qcow2</a>"
+            } else {
+                common.warningMsg("Env has not been cleaned!Please cleanup it manualy!")
             }
-
-          } else {
-            throw new Exception("Unsupported publish backend:${job_env.PUBLISH_BACKEND}")
-          }
-
         }
-      }
-    } catch (Throwable e) {
-      // If there was an error or exception thrown, the build failed
-      currentBuild.result = "FAILURE"
-      throw e
-    } finally {
-      if (job_env.get('CLEANUP_AFTER', false)) {
-        dir(workspace) {
-          sh "find . -mindepth 1 -delete || true"
-        }
-        if (job_env.BUILD_ONLY == 'openstack') {
-          common.warningMsg("openstack Env cleanup not implemented yet!")
-        }
-      } else {
-        common.warningMsg("Env has not been cleaned!Please cleanup it manualy!")
-      }
     }
-  }
 }