Merge "Adding cleaning of salt-keys to the cleaning script"
diff --git a/packer-image-buid.groovy b/packer-image-buid.groovy
new file mode 100644
index 0000000..3f05f44
--- /dev/null
+++ b/packer-image-buid.groovy
@@ -0,0 +1,311 @@
+/**
+ *
+ * Build mirror/aptly/apt01 image pipeline
+ *
+ *  PACKER_URL       = 'https://releases.hashicorp.com/packer/1.2.4/packer_1.2.4_linux_amd64.zip'
+ *  PACKER_ZIP_MD5   = '997274e80ae41102eecf9df2e5b67860'
+ *  PACKER_ARGS      = '-debug'
+ *  BUILD_ONLY       = 'openstack|qemu'
+ *  CLEANUP_AFTER    = wipe workspace AFTER build
+ *  IMAGE_NAME       = Name of the result image.
+ *  TIMESTAMP_INAME  = True|false. If true - image will be uploaded to backend with IMAGE_NAME + timestamp
+ *
+ * OPENSTACK_OPENRC_YAML: aml of key:value variables required for openstack venv
+ * example:
+ *            ---
+ *             OS_TENANT_ID: 'xxxx'
+ *             OS_TENANT_NAME: "mcp-oscore-ci"
+ *
+ * EXTRA_VARIABLES_YAML - Yaml of key:value variables required for template.json
+ * example:
+ *            ---
+ *            IMAGE_NAME: 'ubuntu-16.04-proposed'
+ *
+ *  CREDENTIALS_ID  = Global jenkins cred. for clone DEFAULT_GIT_URL
+ *  DEFAULT_GIT_URL
+ *  DEFAULT_GIT_REF
+ *
+ *  OS_VERSION         = OpenStack version
+ *  OS_CREDENTIALS_ID  = ID of credentials for OpenStack API stored in Jenkins.
+ *  OS_URL             = Keystone auth endpoint of the OpenStack.
+ *  OS_PROJECT         =
+ *
+ *  PUBLISH_BACKEND = local|glance|http
+ *  UPLOAD_URL      = URL of an WebDAV used to upload the image after creating.(Only in case PUBLISH_BACKEND == http)
+ */
+
+// Load shared libs
+def common = new com.mirantis.mk.Common()
+def McpCommon = new com.mirantis.mcp.Common()
+def openstack = new com.mirantis.mk.Openstack()
+def date = new Date()
+def dateTime = date.format("ddMMyyyy-HHmmss")
+//
+def job_env = env.getEnvironment().findAll { k, v -> v }
+
+////
+// To be removed
+//job_env['BUILD_ONLY'] = 'qemu'
+job_env['TIMESTAMP_INAME'] = true
+//job_env['CLEANUP_AFTER'] = false
+//job_env['PACKER_ARGS'] = ' -debug'
+//job_env['BUILD_OS'] = 'ubuntu-16.04'
+//job_env['IMAGE_NAME'] = 'ubuntu-16-04-x64-test'
+//job_env['DEFAULT_GIT_REF'] = 'refs/changes/06/22106/5'
+//job_env['DEFAULT_GIT_URL'] = 'ssh://mcp-jenkins@gerrit.mcp.mirantis.net:29418/mk/packer-templates'
+//job_env['CREDENTIALS_ID'] = 'gerrit'
+//job_env['PUBLISH_BACKEND'] = 'http'
+//job_env['PACKER_URL'] = "https://releases.hashicorp.com/packer/1.2.4/packer_1.2.4_linux_amd64.zip"
+//job_env['PACKER_ZIP_MD5'] = '997274e80ae41102eecf9df2e5b67860'
+//job_env['EXTRA_VARIABLES_YAML'] = "\n" +
+//    "IMAGE_NAME : \"ubuntu-16-04-x64\"\n" +
+//    "UBUNTU_BASEURL: \"http://mirror.mirantis.com/proposed/ubuntu/\"\n" +
+//    "SALTSTACK_REPO: \"deb [arch=amd64] http://mirror.mirantis.com/proposed/saltstack-2017.7/xenial xenial main\"\n" +
+//    "SALTSTACK_GPG: \"https://repo.saltstack.com/apt/ubuntu/16.04/amd64/2017.7/SALTSTACK-GPG-KEY.pub\""
+/////
+extra_vars = readYaml text: job_env.get('EXTRA_VARIABLES_YAML','').trim()
+if (job_env.get('TIMESTAMP_INAME', false)) {
+  IMAGE_NAME = IMAGE_NAME + "-" + dateTime
+}
+// Overwrite IMAGE_NAME in template.json with expected
+extra_vars['IMAGE_NAME'] = IMAGE_NAME
+def gerrit = new com.mirantis.mk.Gerrit()
+defaultGitRef = job_env.get('DEFAULT_GIT_REF', 'HEAD')
+defaultGitUrl = job_env.get('DEFAULT_GIT_URL', null)
+
+// Self-check
+for (String req_v : ['BUILD_OS', 'BUILD_ONLY','IMAGE_NAME'] ) {
+  if (!job_env.get(req_v, false)) {
+    throw new Exception("${req_v} not set!")
+  }
+}
+
+def MapToList(input_map) {
+/**
+ * Convert dict in bash-like list
+ */
+
+  def data_list = []
+
+  for (i = 0; i < input_map.size(); i++) {
+    data = ''
+    data = input_map.keySet()[i] + "=" + input_map.values()[i]
+    data_list.add(data)
+  }
+
+  return data_list
+
+}
+
+timeout(time: 6, unit: 'HOURS') {
+  node("jsl15.mcp.mirantis.net") {
+    def checkouted = false
+    def creds = ''
+    creds = common.getPasswordCredentials(job_env.OS_CREDENTIALS_ID)
+    rcFile = openstack.createOpenstackEnv(workspace, job_env.OS_URL, job_env.OS_CREDENTIALS_ID, job_env.OS_PROJECT, "default", "", "default", "2", "")
+
+    try {
+      def workspace = common.getWorkspace()
+      def openstackEnv = "${workspace}/venv"
+      def _artifact_dir = "${workspace}/artifacts"
+      def ImagesCacheFolder = "${workspace}/../${env.JOB_NAME}_cache/"
+      stage("checkout") {
+        if (defaultGitRef && defaultGitUrl) {
+          checkouted = gerrit.gerritPatchsetCheckout(defaultGitUrl, defaultGitRef, "HEAD", job_env.CREDENTIALS_ID)
+        } else {
+          throw new Exception("Cannot checkout gerrit patchset: DEFAULT_GIT_URL is null")
+        }
+      }
+      stage("Prepare env") {
+        if (!fileExists("${workspace}/tmp")) {
+          sh "mkdir -p ${workspace}/tmp"
+        }
+        if (!fileExists(ImagesCacheFolder)) {
+          sh "mkdir -p ${ImagesCacheFolder}"
+        }
+        if (!fileExists(_artifact_dir)) {
+          sh "mkdir -p ${_artifact_dir}"
+        }
+        if (!fileExists("bin")) {
+          common.infoMsg("Downloading packer")
+          sh "mkdir -p bin"
+          dir("bin") {
+            def zipname = sh(script: "basename ${job_env.PACKER_URL}", returnStdout: true).trim()
+            sh(script: "wget --quiet ${job_env.PACKER_URL}", returnStdout: true)
+            sh "echo \"${job_env.PACKER_ZIP_MD5} ${zipname}\" >> md5sum"
+            sh(script: "md5sum -c --status md5sum", returnStdout: true)
+            sh "unzip ${zipname}"
+          }
+        }
+        if (!fileExists("${job_env.BUILD_OS}/images")) {
+          // clean images dir before building
+          sh(script: "rm -rf ${job_env.BUILD_OS}/images/*", returnStatus: true)
+        }
+      }
+
+      stage("Build Instance") {
+        def _packer_args = "${job_env.get(PACKER_ARGS, '')}"
+        def _packer_log = "${workspace}/packer.log"
+        def _artifact_list = []
+        // clean old log, for correct status grepping
+        if (fileExists(_packer_log)) {
+          sh "rm -v ${_packer_log}"
+        }
+
+        dir("${workspace}/${job_env.BUILD_OS}/") {
+          if (fileExists("config-drive/user-data.yaml")) {
+            common.infoMsg("Creating cloud-config drive")
+            if (fileExists("config-drive/cloudata.iso")) {
+              sh "rm -v config-drive/cloudata.iso"
+            }
+            sh "cloud-localds config-drive/cloudata.iso  config-drive/user-data.yaml"
+          }
+        }
+
+        if (job_env.BUILD_ONLY.toLowerCase() == "openstack") {
+
+          dir("${workspace}/${job_env.BUILD_OS}/") {
+            extra_vars_list = MapToList(extra_vars)
+            withEnv(["PATH=${env.PATH}:${workspace}/bin",
+                     "PACKER_LOG_PATH=${_packer_log}",
+                     "PACKER_LOG=1",
+                     "TMPDIR=${workspace}/tmp",
+                     "OS_USERNAME=${creds.username.toString()}",
+                     "OS_PASSWORD=${creds.password.toString()}"] + extra_vars_list) {
+
+              common.infoMsg("Run build with:")
+              sh(script: 'printenv|sort')
+              sh(script: "set -xe; packer build -only='openstack' ${_packer_args} -parallel=false template.json" )
+              _os_private = "${workspace}/${job_env.BUILD_OS}/os_${job_env.BUILD_OS}.pem"
+              if (fileExists(_os_private)) {
+                common.infoMsg("Packer private key:")
+                sh "cat ${_os_private}"
+              }
+              def packerStatus = sh(script: "grep \"Some builds didn't complete successfully and had errors\" ${_packer_log}", returnStatus: true)
+              // grep returns 0 if find something
+              if (packerStatus != 0) {
+                common.infoMsg("Openstack instance build complete")
+              } else {
+                throw new Exception("Openstack Packer build failed")
+              }
+
+              common.retry(3, 5) {
+                common.infoMsg("Attempt download openstack image..")
+                openstack.runOpenstackCommand("openstack image save --file ${_artifact_dir}/${job_env.IMAGE_NAME}.qcow2 ${IMAGE_NAME}", rcFile, openstackEnv)
+              }
+            }
+          }
+
+        } else if (job_env.BUILD_ONLY.toLowerCase() == 'qemu') {
+
+          dir("${workspace}/${job_env.BUILD_OS}/") {
+            extra_vars_list = MapToList(extra_vars)
+            withEnv(["PATH=${env.PATH}:${workspace}/bin",
+                     "PACKER_LOG_PATH=${_packer_log}",
+                     "PACKER_LOG=1",
+                     "TMPDIR=${workspace}/tmp",
+                     "PACKER_IMAGES_CACHE=${ImagesCacheFolder}"] + extra_vars_list) {
+              common.infoMsg("Run build with:")
+              sh(script: 'printenv|sort')
+              sh(script: "set -xe ; packer build -on-error=ask -only='qemu' ${_packer_args} -parallel=false template.json".toString())
+
+              def packerStatus = sh(script: "grep \"Some builds didn't complete successfully and had errors\" ${PACKER_LOG_PATH}", returnStatus: true)
+              // grep returns 0 if find something
+              if (packerStatus != 0) {
+                common.infoMsg("qemu instance build completed successfully")
+              } else {
+                throw new Exception("qemu instance build failed")
+              }
+              // collect artifacts
+              // TODO make it possible, process multiply artifacts by one run.
+              dir('images/') {
+                def _files = findFiles(glob: "**/${job_env.IMAGE_NAME}*")
+                if (_files.size() > 1) {
+                  common.warningMsg("Multiply artifacts detected!Only first one will be processed!")
+                } else if (_files.size() == 0) {
+                  common.warningMsg("No artifacts detected!")
+                }
+                for (String x : _files) {
+                  _file = sh(script: "set -x ; readlink -f ${x}", returnStdout: true)
+                  _artifact_list.add(_file.split('/').last())
+                  sh(script: "mv -v images/${x} ${_artifact_dir}/${job_env.IMAGE_NAME}.qcow2")
+                }
+              }
+            }
+          }
+
+        } else {
+          throw new Exception("Unexpected BUILD_ONLY=${env.BUILD_ONLY} target!")
+        }
+
+
+      }
+
+      stage("Publish artifacts") {
+        dir(_artifact_dir) {
+          common.infoMsg("Processing md5 for artifacts")
+          for (String x : _artifact_list) {
+            _md5 = sh(script: "md5sum ${x} > ${x}.md5", returnStdout: true).trim()
+            _size = sh(script: "ls -alh ${x}", returnStdout: true).trim()
+            common.infoMsg("Artifact file: ${_size}\n${_md5}")
+          }
+          if (job_env.PUBLISH_BACKEND.toLowerCase() == 'local') {
+            common.infoMsg("Uploading to: local")
+            common.infoMsg("For local publish target - nothing to do, all files in: ${_artifact_dir}")
+            if (job_env.get('CLEANUP_AFTER', false)) {
+              common.warningMsg("You are trying to use 'local' publish method, along with enabled CLEANUP_AFTER! ")
+              common.warningMsg("Disabling CLEANUP_AFTER option, to save you'r data ;) ")
+              job_env.CLEANUP_AFTER = false
+            }
+          } else if (job_env.PUBLISH_BACKEND.toLowerCase() == 'glance') {
+            common.infoMsg("Uploading to: openstack")
+            if (fileExists("${workspace}/venv")) {
+              common.infoMsg("cleaning virtualenv at:${workspace}/venv")
+              sh(script: "rm -rf ${workspace}/venv", returnStatus: true)
+            }
+            openstack.setupOpenstackVirtualenv(openstackEnv, job_env.OS_VERSION)
+            for (String x : findFiles(glob: "${_artifact_dir}/*")) {
+              if (x.endsWith('.md5')) {
+                common.warningMsg("Skipping:${x} from openstack upload!")
+                continue
+              }
+              openstack.runOpenstackCommand(String.format("glance image-create --name '%s' %s --file %s", imageShortName, glanceRunArgs, imageName), rcFile, openstackEnv)
+              // do upload
+            }
+
+
+          } else if (job_env.PUBLISH_BACKEND.toLowerCase() == 'http') {
+            for (String u_file : findFiles(glob: "${_artifact_dir}/*")) {
+              common.infoMsg("Uploading image ${IMAGE_NAME}")
+              def uploadImageStatus = ''
+              common.retry(3, 5) {
+                uploadImageStatus = sh(script: "echo curl -f -T ${u_file} ${job_env.UPLOAD_URL}", returnStatus: true)
+                if (uploadImageStatus != 0) {
+                  throw new Exception("Uploading file: ${u_file} failed!")
+                }
+              }
+
+            }
+            // Fixme for correct path ?
+            currentBuild.description = "<a href='http://ci.mcp.mirantis.net:8085/images/${IMAGE_NAME}.qcow2'>${IMAGE_NAME}.qcow2</a>"
+          } else {
+            throw new Exception("Unsupported publish backend:${job_env.PUBLISH_BACKEND}")
+          }
+
+        }
+      }
+    } catch (Throwable e) {
+      // If there was an error or exception thrown, the build failed
+      currentBuild.result = "FAILURE"
+      throw e
+    } finally {
+      if (job_env.get('CLEANUP_AFTER', false)) {
+        dir(workspace) {
+          sh "find . -mindepth 1 -delete"
+        }
+      } else {
+        common.infoMsg("Env has not been cleaned!")
+      }
+    }
+  }
+}