blob: 32c499d0fcd0ca6011386469423cb8d002a85869 [file] [log] [blame]
azvyagintsev68786a12018-07-10 14:42:29 +03001/**
2 *
3 * Build mirror/aptly/apt01 image pipeline
4 *
5 * PACKER_URL = 'https://releases.hashicorp.com/packer/1.2.4/packer_1.2.4_linux_amd64.zip'
6 * PACKER_ZIP_MD5 = '997274e80ae41102eecf9df2e5b67860'
7 * PACKER_ARGS = '-debug'
8 * BUILD_ONLY = 'openstack|qemu'
9 * CLEANUP_AFTER = wipe workspace AFTER build
10 * IMAGE_NAME = Name of the result image.
11 * TIMESTAMP_INAME = True|false. If true - image will be uploaded to backend with IMAGE_NAME + timestamp
12 *
13 * OPENSTACK_OPENRC_YAML: aml of key:value variables required for openstack venv
14 * example:
15 * ---
16 * OS_TENANT_ID: 'xxxx'
17 * OS_TENANT_NAME: "mcp-oscore-ci"
18 *
19 * EXTRA_VARIABLES_YAML - Yaml of key:value variables required for template.json
20 * example:
21 * ---
22 * IMAGE_NAME: 'ubuntu-16.04-proposed'
23 *
24 * CREDENTIALS_ID = Global jenkins cred. for clone DEFAULT_GIT_URL
25 * DEFAULT_GIT_URL
26 * DEFAULT_GIT_REF
27 *
28 * OS_VERSION = OpenStack version
29 * OS_CREDENTIALS_ID = ID of credentials for OpenStack API stored in Jenkins.
30 * OS_URL = Keystone auth endpoint of the OpenStack.
31 * OS_PROJECT =
32 *
33 * PUBLISH_BACKEND = local|glance|http
34 * UPLOAD_URL = URL of an WebDAV used to upload the image after creating.(Only in case PUBLISH_BACKEND == http)
35 */
36
37// Load shared libs
38def common = new com.mirantis.mk.Common()
azvyagintsev68786a12018-07-10 14:42:29 +030039def openstack = new com.mirantis.mk.Openstack()
azvyagintsevf2c70cc2018-07-12 12:19:11 +030040def gerrit = new com.mirantis.mk.Gerrit()
azvyagintsev68786a12018-07-10 14:42:29 +030041def date = new Date()
42def dateTime = date.format("ddMMyyyy-HHmmss")
43//
44def job_env = env.getEnvironment().findAll { k, v -> v }
45
azvyagintsev68786a12018-07-10 14:42:29 +030046/////
47extra_vars = readYaml text: job_env.get('EXTRA_VARIABLES_YAML','').trim()
azvyagintsevf2c70cc2018-07-12 12:19:11 +030048// FIXME: os_openrc should be refactored.
49os_openrc = readYaml text: job_env.get('OPENSTACK_OPENRC_YAML','').trim()
azvyagintsev68786a12018-07-10 14:42:29 +030050if (job_env.get('TIMESTAMP_INAME', false)) {
azvyagintsevf2c70cc2018-07-12 12:19:11 +030051 imageName = job_env.IMAGE_NAME + "-" + dateTime
52}else {
53 imageName = job_env.IMAGE_NAME
azvyagintsev68786a12018-07-10 14:42:29 +030054}
55// Overwrite IMAGE_NAME in template.json with expected
azvyagintsevf2c70cc2018-07-12 12:19:11 +030056extra_vars['IMAGE_NAME'] = imageName
57job_env['BUILD_ONLY'] = job_env.BUILD_ONLY.toLowerCase()
58job_env['PUBLISH_BACKEND'] = job_env.PUBLISH_BACKEND.toLowerCase()
59//
azvyagintsev68786a12018-07-10 14:42:29 +030060defaultGitRef = job_env.get('DEFAULT_GIT_REF', 'HEAD')
61defaultGitUrl = job_env.get('DEFAULT_GIT_URL', null)
62
63// Self-check
64for (String req_v : ['BUILD_OS', 'BUILD_ONLY','IMAGE_NAME'] ) {
65 if (!job_env.get(req_v, false)) {
66 throw new Exception("${req_v} not set!")
67 }
68}
69
70def MapToList(input_map) {
71/**
72 * Convert dict in bash-like list
73 */
azvyagintsev68786a12018-07-10 14:42:29 +030074 def data_list = []
azvyagintsev68786a12018-07-10 14:42:29 +030075 for (i = 0; i < input_map.size(); i++) {
76 data = ''
77 data = input_map.keySet()[i] + "=" + input_map.values()[i]
78 data_list.add(data)
79 }
azvyagintsev68786a12018-07-10 14:42:29 +030080 return data_list
azvyagintsev68786a12018-07-10 14:42:29 +030081}
82
83timeout(time: 6, unit: 'HOURS') {
84 node("jsl15.mcp.mirantis.net") {
85 def checkouted = false
azvyagintsevf2c70cc2018-07-12 12:19:11 +030086 def workspace = common.getWorkspace()
87 creds = common.getPasswordCredentials(job_env.CREDENTIALS_ID)
88 if (job_env.BUILD_ONLY == 'openstack' || job_env.PUBLISH_BACKEND == 'glance') {
89 rcFile = openstack.createOpenstackEnv(workspace, os_openrc.OS_AUTH_URL, job_env.OS_TENANT_ID, job_env.OS_TENANT_NAME, "default", "", "default", "2", "")
90 def openstackEnv = "${workspace}/venv"
91 }
azvyagintsev68786a12018-07-10 14:42:29 +030092
93 try {
azvyagintsev68786a12018-07-10 14:42:29 +030094 def _artifact_dir = "${workspace}/artifacts"
azvyagintsevf2c70cc2018-07-12 12:19:11 +030095 def _artifact_list = []
azvyagintsev68786a12018-07-10 14:42:29 +030096 def ImagesCacheFolder = "${workspace}/../${env.JOB_NAME}_cache/"
97 stage("checkout") {
98 if (defaultGitRef && defaultGitUrl) {
99 checkouted = gerrit.gerritPatchsetCheckout(defaultGitUrl, defaultGitRef, "HEAD", job_env.CREDENTIALS_ID)
100 } else {
101 throw new Exception("Cannot checkout gerrit patchset: DEFAULT_GIT_URL is null")
102 }
103 }
104 stage("Prepare env") {
105 if (!fileExists("${workspace}/tmp")) {
106 sh "mkdir -p ${workspace}/tmp"
107 }
108 if (!fileExists(ImagesCacheFolder)) {
109 sh "mkdir -p ${ImagesCacheFolder}"
110 }
111 if (!fileExists(_artifact_dir)) {
112 sh "mkdir -p ${_artifact_dir}"
113 }
114 if (!fileExists("bin")) {
115 common.infoMsg("Downloading packer")
116 sh "mkdir -p bin"
117 dir("bin") {
118 def zipname = sh(script: "basename ${job_env.PACKER_URL}", returnStdout: true).trim()
119 sh(script: "wget --quiet ${job_env.PACKER_URL}", returnStdout: true)
120 sh "echo \"${job_env.PACKER_ZIP_MD5} ${zipname}\" >> md5sum"
121 sh(script: "md5sum -c --status md5sum", returnStdout: true)
122 sh "unzip ${zipname}"
123 }
124 }
125 if (!fileExists("${job_env.BUILD_OS}/images")) {
126 // clean images dir before building
127 sh(script: "rm -rf ${job_env.BUILD_OS}/images/*", returnStatus: true)
128 }
129 }
130
131 stage("Build Instance") {
132 def _packer_args = "${job_env.get(PACKER_ARGS, '')}"
133 def _packer_log = "${workspace}/packer.log"
azvyagintsev68786a12018-07-10 14:42:29 +0300134 // clean old log, for correct status grepping
135 if (fileExists(_packer_log)) {
136 sh "rm -v ${_packer_log}"
137 }
138
139 dir("${workspace}/${job_env.BUILD_OS}/") {
140 if (fileExists("config-drive/user-data.yaml")) {
141 common.infoMsg("Creating cloud-config drive")
142 if (fileExists("config-drive/cloudata.iso")) {
143 sh "rm -v config-drive/cloudata.iso"
144 }
145 sh "cloud-localds config-drive/cloudata.iso config-drive/user-data.yaml"
146 }
147 }
148
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300149 if (job_env.BUILD_ONLY == "openstack") {
azvyagintsev68786a12018-07-10 14:42:29 +0300150 dir("${workspace}/${job_env.BUILD_OS}/") {
151 extra_vars_list = MapToList(extra_vars)
152 withEnv(["PATH=${env.PATH}:${workspace}/bin",
153 "PACKER_LOG_PATH=${_packer_log}",
154 "PACKER_LOG=1",
155 "TMPDIR=${workspace}/tmp",
156 "OS_USERNAME=${creds.username.toString()}",
157 "OS_PASSWORD=${creds.password.toString()}"] + extra_vars_list) {
158
159 common.infoMsg("Run build with:")
160 sh(script: 'printenv|sort')
161 sh(script: "set -xe; packer build -only='openstack' ${_packer_args} -parallel=false template.json" )
162 _os_private = "${workspace}/${job_env.BUILD_OS}/os_${job_env.BUILD_OS}.pem"
163 if (fileExists(_os_private)) {
164 common.infoMsg("Packer private key:")
165 sh "cat ${_os_private}"
166 }
167 def packerStatus = sh(script: "grep \"Some builds didn't complete successfully and had errors\" ${_packer_log}", returnStatus: true)
168 // grep returns 0 if find something
169 if (packerStatus != 0) {
170 common.infoMsg("Openstack instance build complete")
171 } else {
172 throw new Exception("Openstack Packer build failed")
173 }
174
175 common.retry(3, 5) {
176 common.infoMsg("Attempt download openstack image..")
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300177 openstack.runOpenstackCommand("openstack image save --file ${_artifact_dir}/${imageName}.qcow2 ${imageName}", rcFile, openstackEnv)
azvyagintsev68786a12018-07-10 14:42:29 +0300178 }
179 }
180 }
181
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300182 } else if (job_env.BUILD_ONLY == 'qemu') {
azvyagintsev68786a12018-07-10 14:42:29 +0300183
184 dir("${workspace}/${job_env.BUILD_OS}/") {
185 extra_vars_list = MapToList(extra_vars)
186 withEnv(["PATH=${env.PATH}:${workspace}/bin",
187 "PACKER_LOG_PATH=${_packer_log}",
188 "PACKER_LOG=1",
189 "TMPDIR=${workspace}/tmp",
190 "PACKER_IMAGES_CACHE=${ImagesCacheFolder}"] + extra_vars_list) {
191 common.infoMsg("Run build with:")
192 sh(script: 'printenv|sort')
193 sh(script: "set -xe ; packer build -on-error=ask -only='qemu' ${_packer_args} -parallel=false template.json".toString())
194
195 def packerStatus = sh(script: "grep \"Some builds didn't complete successfully and had errors\" ${PACKER_LOG_PATH}", returnStatus: true)
196 // grep returns 0 if find something
197 if (packerStatus != 0) {
198 common.infoMsg("qemu instance build completed successfully")
199 } else {
200 throw new Exception("qemu instance build failed")
201 }
202 // collect artifacts
203 // TODO make it possible, process multiply artifacts by one run.
204 dir('images/') {
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300205 def _files = findFiles(glob: "*qemu*/${imageName}*")
azvyagintsev68786a12018-07-10 14:42:29 +0300206 if (_files.size() > 1) {
207 common.warningMsg("Multiply artifacts detected!Only first one will be processed!")
208 } else if (_files.size() == 0) {
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300209 throw new Exception("No artifacts detected!BUILD_ONLY=${env.BUILD_ONLY} failed!")
azvyagintsev68786a12018-07-10 14:42:29 +0300210 }
211 for (String x : _files) {
212 _file = sh(script: "set -x ; readlink -f ${x}", returnStdout: true)
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300213 sh(script: "mv -v ${x} ${_artifact_dir}/${imageName}.qcow2")
214 // Save filename to list
215 _artifact_list.add("${imageName}.qcow2")
azvyagintsev68786a12018-07-10 14:42:29 +0300216 }
217 }
218 }
219 }
220
221 } else {
222 throw new Exception("Unexpected BUILD_ONLY=${env.BUILD_ONLY} target!")
223 }
azvyagintsev68786a12018-07-10 14:42:29 +0300224 }
225
226 stage("Publish artifacts") {
227 dir(_artifact_dir) {
228 common.infoMsg("Processing md5 for artifacts")
229 for (String x : _artifact_list) {
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300230 _md5 = sh(script: "md5sum ${x} > ${x}.md5; cat ${x}.md5", returnStdout: true).trim()
azvyagintsev68786a12018-07-10 14:42:29 +0300231 _size = sh(script: "ls -alh ${x}", returnStdout: true).trim()
232 common.infoMsg("Artifact file: ${_size}\n${_md5}")
233 }
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300234 if (job_env.PUBLISH_BACKEND == 'local') {
azvyagintsev68786a12018-07-10 14:42:29 +0300235 common.infoMsg("Uploading to: local")
236 common.infoMsg("For local publish target - nothing to do, all files in: ${_artifact_dir}")
237 if (job_env.get('CLEANUP_AFTER', false)) {
238 common.warningMsg("You are trying to use 'local' publish method, along with enabled CLEANUP_AFTER! ")
239 common.warningMsg("Disabling CLEANUP_AFTER option, to save you'r data ;) ")
240 job_env.CLEANUP_AFTER = false
241 }
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300242 } else if (job_env.PUBLISH_BACKEND == 'glance') {
243 common.infoMsg("Uploading to: glance-openstack")
azvyagintsev68786a12018-07-10 14:42:29 +0300244 if (fileExists("${workspace}/venv")) {
245 common.infoMsg("cleaning virtualenv at:${workspace}/venv")
246 sh(script: "rm -rf ${workspace}/venv", returnStatus: true)
247 }
248 openstack.setupOpenstackVirtualenv(openstackEnv, job_env.OS_VERSION)
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300249 for (String x : findFiles(glob: "*.*")) {
250 if (x.endsWith('.md5')) {
251 common.warningMsg("Skipping:${x} from openstack upload!")
252 _md5sum = sh(script: "cat ${x}", returnStdout: true).trim().split()[0]
253 continue
254 }
255 _property = "--property data=${dateTime} --property md5sum=${_md5sum}"
256 _cmdline = String.format("glance image-create --visibility " +
257 "public %s --name '%s' %s --file %s", _property, imageShortName, glanceRunArgs, imageName)
258 openstack.runOpenstackCommand(_cmdline, rcFile, openstackEnv)
azvyagintsev68786a12018-07-10 14:42:29 +0300259 }
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300260 // TODO
261 currentBuild.description = "${imageName}.qcow2 uploaded tenant: "
azvyagintsev68786a12018-07-10 14:42:29 +0300262
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300263 } else if (job_env.PUBLISH_BACKEND == 'http') {
264 for (String u_file : findFiles(glob: '*.*')) {
265 common.infoMsg("Uploading image ${imageName}")
azvyagintsev68786a12018-07-10 14:42:29 +0300266 def uploadImageStatus = ''
267 common.retry(3, 5) {
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300268 uploadImageStatus = sh(script: "curl -f -T ${u_file} ${job_env.UPLOAD_URL}", returnStatus: true)
azvyagintsev68786a12018-07-10 14:42:29 +0300269 if (uploadImageStatus != 0) {
270 throw new Exception("Uploading file: ${u_file} failed!")
271 }
272 }
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300273 // Fixme for correct path ?
274 currentBuild.description = "<a href='http://ci.mcp.mirantis.net:8085/images/${imageName}.qcow2'>${imageName}.qcow2</a>"
azvyagintsev68786a12018-07-10 14:42:29 +0300275 }
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300276
azvyagintsev68786a12018-07-10 14:42:29 +0300277 } else {
278 throw new Exception("Unsupported publish backend:${job_env.PUBLISH_BACKEND}")
279 }
280
281 }
282 }
283 } catch (Throwable e) {
284 // If there was an error or exception thrown, the build failed
285 currentBuild.result = "FAILURE"
286 throw e
287 } finally {
288 if (job_env.get('CLEANUP_AFTER', false)) {
289 dir(workspace) {
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300290 sh "find . -mindepth 1 -delete || true"
291 }
292 if (job_env.BUILD_ONLY == 'openstack') {
293 common.warningMsg("openstack Env cleanup not implemented yet!")
azvyagintsev68786a12018-07-10 14:42:29 +0300294 }
295 } else {
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300296 common.warningMsg("Env has not been cleaned!Please cleanup it manualy!")
azvyagintsev68786a12018-07-10 14:42:29 +0300297 }
298 }
299 }
300}