| /** |
| * |
| * Build mirror/aptly/apt01 image pipeline |
| * |
| * PACKER_URL = 'https://releases.hashicorp.com/packer/1.2.4/packer_1.2.4_linux_amd64.zip' |
| * PACKER_ZIP_MD5 = '997274e80ae41102eecf9df2e5b67860' |
| * PACKER_ARGS = '-debug' |
| * BUILD_ONLY = 'openstack|qemu' |
| * CLEANUP_AFTER = wipe workspace AFTER build |
| * IMAGE_NAME = Name of the result image. |
| * TIMESTAMP_INAME = True|false. If true - image will be uploaded to backend with IMAGE_NAME + timestamp |
| * |
| * OPENSTACK_OPENRC_YAML: aml of key:value variables required for openstack venv |
| * example: |
| * --- |
| * OS_TENANT_ID: 'xxxx' |
| * OS_TENANT_NAME: "mcp-oscore-ci" |
| * |
| * EXTRA_VARIABLES_YAML - Yaml of key:value variables required for template.json |
| * example: |
| * --- |
| * IMAGE_NAME: 'ubuntu-16.04-proposed' |
| * |
| * CREDENTIALS_ID = Global jenkins cred. for clone DEFAULT_GIT_URL |
| * DEFAULT_GIT_URL |
| * DEFAULT_GIT_REF |
| * |
| * OS_VERSION = OpenStack version |
| * OS_CREDENTIALS_ID = ID of credentials for OpenStack API stored in Jenkins. |
| * OS_URL = Keystone auth endpoint of the OpenStack. |
| * OS_PROJECT = |
| * |
| * PUBLISH_BACKEND = local|glance|http |
| * UPLOAD_URL = URL of an WebDAV used to upload the image after creating.(Only in case PUBLISH_BACKEND == http) |
| */ |
| |
| // Load shared libs |
| def common = new com.mirantis.mk.Common() |
| def openstack = new com.mirantis.mk.Openstack() |
| def gerrit = new com.mirantis.mk.Gerrit() |
| def date = new Date() |
| def dateTime = date.format("ddMMyyyy-HHmmss") |
| // |
| def job_env = env.getEnvironment().findAll { k, v -> v } |
| |
| ///// |
| extra_vars = readYaml text: job_env.get('EXTRA_VARIABLES_YAML','').trim() |
| // FIXME: os_openrc should be refactored. |
| os_openrc = readYaml text: job_env.get('OPENSTACK_OPENRC_YAML','').trim() |
| if (job_env.get('TIMESTAMP_INAME', false).toBoolean()) { |
| imageName = job_env.IMAGE_NAME + "-" + dateTime |
| }else { |
| imageName = job_env.IMAGE_NAME |
| } |
| // Overwrite IMAGE_NAME in template.json with expected |
| extra_vars['IMAGE_NAME'] = imageName |
| // Fix some variables |
| job_env['CLEANUP_AFTER'] = job_env.CLEANUP_AFTER.toBoolean() |
| job_env['SKIP_UPLOAD'] = job_env.SKIP_UPLOAD.toBoolean() |
| job_env['CLEANUP_AFTER'] = job_env.CLEANUP_AFTER.toBoolean() |
| job_env['BUILD_ONLY'] = job_env.BUILD_ONLY.toLowerCase() |
| job_env['PUBLISH_BACKEND'] = job_env.PUBLISH_BACKEND.toLowerCase() |
| // |
| defaultGitRef = job_env.get('DEFAULT_GIT_REF', 'HEAD') |
| defaultGitUrl = job_env.get('DEFAULT_GIT_URL', null) |
| slaveNode = (env.SLAVE_NODE ?: 'jsl23.mcp.mirantis.net') |
| |
| // Self-check |
| for (String req_v : ['BUILD_OS', 'BUILD_ONLY','IMAGE_NAME'] ) { |
| if (!job_env.get(req_v, false)) { |
| throw new Exception("${req_v} not set!") |
| } |
| } |
| |
| def MapToList(input_map) { |
| /** |
| * Convert dict in bash-like list |
| */ |
| def data_list = [] |
| for (i = 0; i < input_map.size(); i++) { |
| data = '' |
| data = input_map.keySet()[i] + "=" + input_map.values()[i] |
| data_list.add(data) |
| } |
| return data_list |
| } |
| |
| timeout(time: 6, unit: 'HOURS') { |
| node(slaveNode) { |
| def checkouted = false |
| def workspace = common.getWorkspace() |
| creds = common.getPasswordCredentials(job_env.CREDENTIALS_ID) |
| if (job_env.BUILD_ONLY == 'openstack' || job_env.PUBLISH_BACKEND == 'glance') { |
| rcFile = openstack.createOpenstackEnv(workspace, os_openrc.OS_AUTH_URL, job_env.OS_TENANT_ID, job_env.OS_TENANT_NAME, "default", "", "default", "2", "") |
| def openstackEnv = "${workspace}/venv" |
| } |
| |
| try { |
| def _artifact_dir = "${workspace}/artifacts" |
| def _artifact_list = [] |
| def ImagesCacheFolder = "${workspace}/../${env.JOB_NAME}_cache/" |
| stage("checkout") { |
| if (defaultGitRef && defaultGitUrl) { |
| checkouted = gerrit.gerritPatchsetCheckout(defaultGitUrl, defaultGitRef, "HEAD", job_env.CREDENTIALS_ID) |
| } else { |
| throw new Exception("Cannot checkout gerrit patchset: DEFAULT_GIT_URL is null") |
| } |
| } |
| stage("Prepare env") { |
| if (!fileExists("${workspace}/tmp")) { |
| sh "mkdir -p ${workspace}/tmp" |
| } |
| if (!fileExists(ImagesCacheFolder)) { |
| sh "mkdir -p ${ImagesCacheFolder}" |
| } |
| if (!fileExists(_artifact_dir)) { |
| sh "mkdir -p ${_artifact_dir}" |
| } |
| if (!fileExists("bin")) { |
| common.infoMsg("Downloading packer") |
| sh "mkdir -p bin" |
| dir("bin") { |
| def zipname = sh(script: "basename ${job_env.PACKER_URL}", returnStdout: true).trim() |
| sh(script: "wget --quiet ${job_env.PACKER_URL}", returnStdout: true) |
| sh "echo \"${job_env.PACKER_ZIP_MD5} ${zipname}\" >> md5sum" |
| sh(script: "md5sum -c --status md5sum", returnStdout: true) |
| sh "unzip ${zipname}" |
| } |
| } |
| if (!fileExists("${job_env.BUILD_OS}/images")) { |
| // clean images dir before building |
| sh(script: "rm -rf ${job_env.BUILD_OS}/images/*", returnStatus: true) |
| } |
| } |
| |
| stage("Build Instance") { |
| def _packer_args = "${job_env.get(PACKER_ARGS, '')}" |
| def _packer_log = "${workspace}/packer.log" |
| // clean old log, for correct status grepping |
| if (fileExists(_packer_log)) { |
| sh "rm -v ${_packer_log}" |
| } |
| |
| dir("${workspace}/${job_env.BUILD_OS}/") { |
| if (fileExists("config-drive/user-data.yaml")) { |
| common.infoMsg("Creating cloud-config drive") |
| if (fileExists("config-drive/cloudata.iso")) { |
| sh "rm -v config-drive/cloudata.iso" |
| } |
| sh "cloud-localds config-drive/cloudata.iso config-drive/user-data.yaml" |
| } |
| } |
| |
| if (job_env.BUILD_ONLY == "openstack") { |
| dir("${workspace}/${job_env.BUILD_OS}/") { |
| extra_vars_list = MapToList(extra_vars) |
| withEnv(["PATH=${env.PATH}:${workspace}/bin", |
| "PACKER_LOG_PATH=${_packer_log}", |
| "PACKER_LOG=1", |
| "TMPDIR=${workspace}/tmp", |
| "OS_USERNAME=${creds.username.toString()}", |
| "OS_PASSWORD=${creds.password.toString()}"] + extra_vars_list) { |
| |
| common.infoMsg("Run build with:") |
| sh(script: 'printenv|sort') |
| sh(script: "set -xe; packer build -only='openstack' ${_packer_args} -parallel=false template.json" ) |
| _os_private = "${workspace}/${job_env.BUILD_OS}/os_${job_env.BUILD_OS}.pem" |
| if (fileExists(_os_private)) { |
| common.infoMsg("Packer private key:") |
| sh "cat ${_os_private}" |
| } |
| def packerStatus = sh(script: "grep \"Some builds didn't complete successfully and had errors\" ${_packer_log}", returnStatus: true) |
| // grep returns 0 if find something |
| if (packerStatus != 0) { |
| common.infoMsg("Openstack instance build complete") |
| } else { |
| throw new Exception("Openstack Packer build failed") |
| } |
| |
| common.retry(3, 5) { |
| common.infoMsg("Attempt download openstack image..") |
| openstack.runOpenstackCommand("openstack image save --file ${_artifact_dir}/${imageName}.qcow2 ${imageName}", rcFile, openstackEnv) |
| } |
| } |
| } |
| |
| } else if (job_env.BUILD_ONLY == 'qemu') { |
| |
| dir("${workspace}/${job_env.BUILD_OS}/") { |
| extra_vars_list = MapToList(extra_vars) |
| withEnv(["PATH=${env.PATH}:${workspace}/bin", |
| "PACKER_LOG_PATH=${_packer_log}", |
| "PACKER_LOG=1", |
| "TMPDIR=${workspace}/tmp", |
| "PACKER_IMAGES_CACHE=${ImagesCacheFolder}"] + extra_vars_list) { |
| common.infoMsg("Run build with:") |
| sh(script: 'printenv|sort') |
| sh(script: "set -xe ; packer build -on-error=ask -only='qemu' ${_packer_args} -parallel=false template.json".toString()) |
| |
| def packerStatus = sh(script: "grep \"Some builds didn't complete successfully and had errors\" ${PACKER_LOG_PATH}", returnStatus: true) |
| // grep returns 0 if find something |
| if (packerStatus != 0) { |
| common.infoMsg("qemu instance build completed successfully") |
| } else { |
| throw new Exception("qemu instance build failed") |
| } |
| // collect artifacts |
| // TODO make it possible, process multiply artifacts by one run. |
| dir('images/') { |
| sh(script: 'find .', returnStdout: true) |
| def _files = findFiles(glob: "*qemu*/${imageName}*") |
| if (_files.size() > 1) { |
| common.warningMsg("Multiply artifacts detected!Only first one will be processed!") |
| } else if (_files.size() == 0) { |
| throw new Exception("No artifacts detected!BUILD_ONLY=${env.BUILD_ONLY} failed!") |
| } |
| for (String x : _files) { |
| _file = sh(script: "set -x ; readlink -f ${x}", returnStdout: true) |
| sh(script: "mv -v ${x} ${_artifact_dir}/${imageName}.qcow2") |
| // Save filename to list |
| _artifact_list.add("${imageName}.qcow2") |
| } |
| } |
| } |
| } |
| |
| } else { |
| throw new Exception("Unexpected BUILD_ONLY=${env.BUILD_ONLY} target!") |
| } |
| } |
| |
| stage("Publish artifacts") { |
| dir(_artifact_dir) { |
| common.infoMsg("Processing md5 for artifacts") |
| for (String x : _artifact_list) { |
| _md5 = sh(script: "md5sum ${x} > ${x}.md5; cat ${x}.md5", returnStdout: true).trim() |
| _size = sh(script: "ls -alh ${x}", returnStdout: true).trim() |
| common.infoMsg("Artifact file: ${_size}\n${_md5}") |
| } |
| if (job_env.PUBLISH_BACKEND == 'local') { |
| common.infoMsg("Uploading to: local") |
| common.infoMsg("For local publish target - nothing to do, all files in: ${_artifact_dir}") |
| if (job_env.get('CLEANUP_AFTER', false)) { |
| common.warningMsg("You are trying to use 'local' publish method, along with enabled CLEANUP_AFTER! ") |
| common.warningMsg("Disabling CLEANUP_AFTER option, to save you'r data ;) ") |
| job_env.CLEANUP_AFTER = false |
| } |
| } else if (job_env.PUBLISH_BACKEND == 'glance') { |
| common.infoMsg("Uploading to: glance-openstack") |
| if (fileExists("${workspace}/venv")) { |
| common.infoMsg("cleaning virtualenv at:${workspace}/venv") |
| sh(script: "rm -rf ${workspace}/venv", returnStatus: true) |
| } |
| openstack.setupOpenstackVirtualenv(openstackEnv, job_env.OS_VERSION) |
| for (String x : findFiles(glob: "*.*")) { |
| if (x.endsWith('.md5')) { |
| common.warningMsg("Skipping:${x} from openstack upload!") |
| _md5sum = sh(script: "cat ${x}", returnStdout: true).trim().split()[0] |
| continue |
| } |
| _property = "--property data=${dateTime} --property md5sum=${_md5sum}" |
| _cmdline = String.format("glance image-create --visibility " + |
| "public %s --name '%s' %s --file %s", _property, imageShortName, glanceRunArgs, imageName) |
| openstack.runOpenstackCommand(_cmdline, rcFile, openstackEnv) |
| } |
| // TODO |
| currentBuild.description = "${imageName}.qcow2 uploaded tenant: " |
| |
| } else if (job_env.PUBLISH_BACKEND == 'http') { |
| for (String u_file : findFiles(glob: '*.*')) { |
| common.infoMsg("Uploading image ${imageName}") |
| def uploadImageStatus = '' |
| common.retry(3, 5) { |
| uploadImageStatus = sh(script: "curl -f -T ${u_file} ${job_env.UPLOAD_URL}", returnStatus: true) |
| if (uploadImageStatus != 0) { |
| throw new Exception("Uploading file: ${u_file} failed!") |
| } |
| } |
| // Fixme for correct path ? |
| currentBuild.description = "<a href='http://ci.mcp.mirantis.net:8085/images/${imageName}.qcow2'>${imageName}.qcow2</a>" |
| } |
| |
| } else { |
| throw new Exception("Unsupported publish backend:${job_env.PUBLISH_BACKEND}") |
| } |
| |
| } |
| } |
| } catch (Throwable e) { |
| // If there was an error or exception thrown, the build failed |
| currentBuild.result = "FAILURE" |
| throw e |
| } finally { |
| if (job_env.get('CLEANUP_AFTER', false)) { |
| dir(workspace) { |
| sh "find . -mindepth 1 -delete || true" |
| } |
| if (job_env.BUILD_ONLY == 'openstack') { |
| common.warningMsg("openstack Env cleanup not implemented yet!") |
| } |
| } else { |
| common.warningMsg("Env has not been cleaned!Please cleanup it manualy!") |
| } |
| } |
| } |
| } |