blob: 87ef652b73424fe455d88192fccd0637c63989e2 [file] [log] [blame]
azvyagintsev68786a12018-07-10 14:42:29 +03001/**
2 *
3 * Build mirror/aptly/apt01 image pipeline
4 *
5 * PACKER_URL = 'https://releases.hashicorp.com/packer/1.2.4/packer_1.2.4_linux_amd64.zip'
6 * PACKER_ZIP_MD5 = '997274e80ae41102eecf9df2e5b67860'
7 * PACKER_ARGS = '-debug'
8 * BUILD_ONLY = 'openstack|qemu'
9 * CLEANUP_AFTER = wipe workspace AFTER build
10 * IMAGE_NAME = Name of the result image.
11 * TIMESTAMP_INAME = True|false. If true - image will be uploaded to backend with IMAGE_NAME + timestamp
12 *
13 * OPENSTACK_OPENRC_YAML: aml of key:value variables required for openstack venv
14 * example:
15 * ---
16 * OS_TENANT_ID: 'xxxx'
17 * OS_TENANT_NAME: "mcp-oscore-ci"
18 *
19 * EXTRA_VARIABLES_YAML - Yaml of key:value variables required for template.json
20 * example:
21 * ---
22 * IMAGE_NAME: 'ubuntu-16.04-proposed'
23 *
Ivan Berezovskiy23035a22018-11-14 13:25:51 +040024 * CREDENTIALS_ID = Global jenkins cred for clone DEFAULT_GIT_URL
azvyagintsev68786a12018-07-10 14:42:29 +030025 * DEFAULT_GIT_URL
Ivan Berezovskiy23035a22018-11-14 13:25:51 +040026 * REFSPEC
azvyagintsev68786a12018-07-10 14:42:29 +030027 *
28 * OS_VERSION = OpenStack version
29 * OS_CREDENTIALS_ID = ID of credentials for OpenStack API stored in Jenkins.
30 * OS_URL = Keystone auth endpoint of the OpenStack.
31 * OS_PROJECT =
32 *
33 * PUBLISH_BACKEND = local|glance|http
34 * UPLOAD_URL = URL of an WebDAV used to upload the image after creating.(Only in case PUBLISH_BACKEND == http)
35 */
36
37// Load shared libs
38def common = new com.mirantis.mk.Common()
azvyagintsev68786a12018-07-10 14:42:29 +030039def openstack = new com.mirantis.mk.Openstack()
azvyagintsevf2c70cc2018-07-12 12:19:11 +030040def gerrit = new com.mirantis.mk.Gerrit()
azvyagintsev68786a12018-07-10 14:42:29 +030041def date = new Date()
42def dateTime = date.format("ddMMyyyy-HHmmss")
43//
44def job_env = env.getEnvironment().findAll { k, v -> v }
Ivan Berezovskiy23035a22018-11-14 13:25:51 +040045gerritCredentials = env.CREDENTIALS_ID ?: 'gerrit'
azvyagintsev68786a12018-07-10 14:42:29 +030046
azvyagintsev68786a12018-07-10 14:42:29 +030047/////
48extra_vars = readYaml text: job_env.get('EXTRA_VARIABLES_YAML','').trim()
azvyagintsevf2c70cc2018-07-12 12:19:11 +030049// FIXME: os_openrc should be refactored.
50os_openrc = readYaml text: job_env.get('OPENSTACK_OPENRC_YAML','').trim()
azvyagintsev4f9239d2018-07-18 16:47:15 +030051if (job_env.get('TIMESTAMP_INAME', false).toBoolean()) {
Ivan Berezovskiy23035a22018-11-14 13:25:51 +040052 imageName = job_env.IMAGE_NAME + "-" + dateTime
53} else {
54 imageName = job_env.IMAGE_NAME
azvyagintsev68786a12018-07-10 14:42:29 +030055}
56// Overwrite IMAGE_NAME in template.json with expected
azvyagintsevf2c70cc2018-07-12 12:19:11 +030057extra_vars['IMAGE_NAME'] = imageName
azvyagintsev4f9239d2018-07-18 16:47:15 +030058// Fix some variables
59job_env['CLEANUP_AFTER'] = job_env.CLEANUP_AFTER.toBoolean()
60job_env['SKIP_UPLOAD'] = job_env.SKIP_UPLOAD.toBoolean()
61job_env['CLEANUP_AFTER'] = job_env.CLEANUP_AFTER.toBoolean()
azvyagintsevf2c70cc2018-07-12 12:19:11 +030062job_env['BUILD_ONLY'] = job_env.BUILD_ONLY.toLowerCase()
63job_env['PUBLISH_BACKEND'] = job_env.PUBLISH_BACKEND.toLowerCase()
64//
Ivan Berezovskiy23035a22018-11-14 13:25:51 +040065defaultGitRef = job_env.get('REFSPEC', 'HEAD')
azvyagintsev68786a12018-07-10 14:42:29 +030066defaultGitUrl = job_env.get('DEFAULT_GIT_URL', null)
azvyagintsev1f5028c2018-08-10 20:22:27 +030067slaveNode = (env.SLAVE_NODE ?: 'jsl23.mcp.mirantis.net')
azvyagintsev68786a12018-07-10 14:42:29 +030068
69// Self-check
70for (String req_v : ['BUILD_OS', 'BUILD_ONLY','IMAGE_NAME'] ) {
Ivan Berezovskiy23035a22018-11-14 13:25:51 +040071 if (!job_env.get(req_v, false)) {
72 throw new Exception("${req_v} not set!")
73 }
azvyagintsev68786a12018-07-10 14:42:29 +030074}
75
76def MapToList(input_map) {
77/**
78 * Convert dict in bash-like list
79 */
Ivan Berezovskiy23035a22018-11-14 13:25:51 +040080 def data_list = []
81 for (i = 0; i < input_map.size(); i++) {
82 data = ''
83 data = input_map.keySet()[i] + "=" + input_map.values()[i]
84 data_list.add(data)
85 }
86 return data_list
azvyagintsev68786a12018-07-10 14:42:29 +030087}
88
89timeout(time: 6, unit: 'HOURS') {
Ivan Berezovskiy23035a22018-11-14 13:25:51 +040090 node(slaveNode) {
91 def workspace = common.getWorkspace()
92 creds = common.getPasswordCredentials(job_env.CREDENTIALS_ID)
93 if (job_env.BUILD_ONLY == 'openstack' || job_env.PUBLISH_BACKEND == 'glance') {
94 rcFile = openstack.createOpenstackEnv(workspace, os_openrc.OS_AUTH_URL, job_env.OS_TENANT_ID, job_env.OS_TENANT_NAME, "default", "", "default", "2", "")
95 def openstackEnv = "${workspace}/venv"
azvyagintsev68786a12018-07-10 14:42:29 +030096 }
97
Ivan Berezovskiy23035a22018-11-14 13:25:51 +040098 stage("checkout") {
99 if (defaultGitRef && defaultGitUrl) {
100 checkouted = gerrit.gerritPatchsetCheckout(defaultGitUrl, defaultGitRef, "HEAD", job_env.CREDENTIALS_ID)
101 } else {
102 throw new Exception("Cannot checkout gerrit patchset: DEFAULT_GIT_URL is null")
azvyagintsev68786a12018-07-10 14:42:29 +0300103 }
azvyagintsev68786a12018-07-10 14:42:29 +0300104 }
105
Ivan Berezovskiy23035a22018-11-14 13:25:51 +0400106 try {
107 def _artifact_dir = "${workspace}/artifacts"
108 def _artifact_list = []
109 def ImagesCacheFolder = "${workspace}/../${env.JOB_NAME}_cache/"
110 stage("Prepare env") {
111 if (!fileExists("${workspace}/tmp")) {
112 sh "mkdir -p ${workspace}/tmp"
azvyagintsev68786a12018-07-10 14:42:29 +0300113 }
Ivan Berezovskiy23035a22018-11-14 13:25:51 +0400114 if (!fileExists(ImagesCacheFolder)) {
115 sh "mkdir -p ${ImagesCacheFolder}"
azvyagintsev68786a12018-07-10 14:42:29 +0300116 }
Ivan Berezovskiy23035a22018-11-14 13:25:51 +0400117 if (!fileExists(_artifact_dir)) {
118 sh "mkdir -p ${_artifact_dir}"
119 }
120 if (!fileExists("bin")) {
121 common.infoMsg("Downloading packer")
122 sh "mkdir -p bin"
123 dir("bin") {
124 def zipname = sh(script: "basename ${job_env.PACKER_URL}", returnStdout: true).trim()
125 sh(script: "wget --quiet ${job_env.PACKER_URL}", returnStdout: true)
126 sh "echo \"${job_env.PACKER_ZIP_MD5} ${zipname}\" >> md5sum"
127 sh(script: "md5sum -c --status md5sum", returnStdout: true)
128 sh "unzip ${zipname}"
129 }
130 }
131 if (!fileExists("${job_env.BUILD_OS}/images")) {
132 // clean images dir before building
133 sh(script: "rm -rf ${job_env.BUILD_OS}/images/*", returnStatus: true)
134 }
azvyagintsev68786a12018-07-10 14:42:29 +0300135 }
azvyagintsev68786a12018-07-10 14:42:29 +0300136
Ivan Berezovskiy23035a22018-11-14 13:25:51 +0400137 stage("Build Instance") {
Aleksey Zvyagintsev42e76952018-12-14 13:40:44 +0000138 def _packer_args = job_env.get('PACKER_ARGS', '')
Ivan Berezovskiy23035a22018-11-14 13:25:51 +0400139 def _packer_log = "${workspace}/packer.log"
140 // clean old log, for correct status grepping
141 if (fileExists(_packer_log)) {
142 sh "rm -v ${_packer_log}"
143 }
azvyagintsev68786a12018-07-10 14:42:29 +0300144
Ivan Berezovskiy23035a22018-11-14 13:25:51 +0400145 dir("${workspace}/${job_env.BUILD_OS}/") {
146 if (fileExists("config-drive")) {
147 def model = extra_vars.get('CLUSTER_MODEL', '')
148 if (model != "") {
149 checkout([
150 $class : 'GitSCM',
151 branches : [[name: 'FETCH_HEAD']],
152 recursiveSubmodules: true,
153 extensions : [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'config-drive/model']],
154 userRemoteConfigs : [[url: model, refspec: extra_vars.get('CLUSTER_MODEL_REF', 'master'), credentialsId: gerritCredentials]]
155 ])
156 }
157 def scripts = extra_vars.get('GIT_SALT_FORMULAS_SCRIPTS', '')
158 if (scripts != "") {
159 checkout([
160 $class : 'GitSCM',
161 branches : [[name: 'FETCH_HEAD']],
162 extensions : [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'config-drive/salt_scripts']],
163 userRemoteConfigs: [[url: scripts, refspec: extra_vars.get('SCRIPTS_REF', 'master'), credentialsId: gerritCredentials]]
164 ])
165 }
166
167 common.infoMsg("Creating cloud-config drive")
168 def isoFile = "config-drive/cloudata.iso"
169 if (fileExists(isoFile)) {
170 sh "rm -v ${isoFile}"
171 }
172 // This is left for backward-compatibility
173 if (fileExists("config-drive/user-data.yaml")) {
174 sh "mv config-drive/user-data.yaml config-drive/user-data"
175 if (!fileExists("config-drive/meta-data")) {
176 sh "echo 'hostname: ubuntu' > config-drive/meta-data"
177 }
178 }
179 sh "mkisofs -o ${isoFile} -V cidata -r -J --quiet config-drive"
180 archiveArtifacts artifacts: "${isoFile}"
181 }
182 }
183
184 if (job_env.BUILD_ONLY == "openstack") {
185 dir("${workspace}/${job_env.BUILD_OS}/") {
186 extra_vars_list = MapToList(extra_vars)
187 withEnv(["PATH=${env.PATH}:${workspace}/bin",
188 "PACKER_LOG_PATH=${_packer_log}",
189 "PACKER_LOG=1",
190 "TMPDIR=${workspace}/tmp",
191 "OS_USERNAME=${creds.username.toString()}",
192 "OS_PASSWORD=${creds.password.toString()}"] + extra_vars_list) {
193
194 common.infoMsg("Run build with:")
195 sh(script: 'printenv|sort')
196 sh(script: "set -xe; packer build -only='openstack' ${_packer_args} -parallel=false template.json")
197 _os_private = "${workspace}/${job_env.BUILD_OS}/os_${job_env.BUILD_OS}.pem"
198 if (fileExists(_os_private)) {
199 common.infoMsg("Packer private key:")
200 sh "cat ${_os_private}"
201 }
202 def packerStatus = sh(script: "grep \"Some builds didn't complete successfully and had errors\" ${_packer_log}", returnStatus: true)
203 // grep returns 0 if find something
204 if (packerStatus != 0) {
205 common.infoMsg("Openstack instance build complete")
206 } else {
207 throw new Exception("Openstack Packer build failed")
208 }
209
210 common.retry(3, 5) {
211 common.infoMsg("Attempt download openstack image..")
212 openstack.runOpenstackCommand("openstack image save --file ${_artifact_dir}/${imageName}.qcow2 ${imageName}", rcFile, openstackEnv)
213 }
214 }
215 }
216
217 } else if (job_env.BUILD_ONLY == 'qemu') {
218
219 dir("${workspace}/${job_env.BUILD_OS}/") {
220 extra_vars_list = MapToList(extra_vars)
221 withEnv(["PATH=${env.PATH}:${workspace}/bin",
222 "PACKER_LOG_PATH=${_packer_log}",
223 "PACKER_LOG=1",
224 "TMPDIR=${workspace}/tmp",
225 "PACKER_IMAGES_CACHE=${ImagesCacheFolder}"] + extra_vars_list) {
226 common.infoMsg("Run build with:")
227 sh(script: 'printenv|sort')
228 sh(script: "set -xe ; packer build -on-error=ask -only='qemu' ${_packer_args} -parallel=false template.json".toString())
229
230 def packerStatus = sh(script: "grep \"Some builds didn't complete successfully and had errors\" ${PACKER_LOG_PATH}", returnStatus: true)
231 // grep returns 0 if find something
232 if (packerStatus != 0) {
233 common.infoMsg("qemu instance build completed successfully")
234 } else {
235 throw new Exception("qemu instance build failed")
236 }
237 // collect artifacts
238 // TODO make it possible, process multiply artifacts by one run.
239 dir('images/') {
Aleksey Zvyagintsev42e76952018-12-14 13:40:44 +0000240 sh(script: 'find .')
241 def _files = findFiles(glob: "*qemu*/*${imageName}*.qcow2")
Ivan Berezovskiy23035a22018-11-14 13:25:51 +0400242 if (_files.size() > 1) {
243 common.warningMsg("Multiply artifacts detected!Only first one will be processed!")
244 } else if (_files.size() == 0) {
245 throw new Exception("No artifacts detected!BUILD_ONLY=${env.BUILD_ONLY} failed!")
246 }
247 for (String x : _files) {
248 _file = sh(script: "set -x ; readlink -f ${x}", returnStdout: true)
249 sh(script: "mv -v ${x} ${_artifact_dir}/${imageName}.qcow2")
250 // Save filename to list
251 _artifact_list.add("${imageName}.qcow2")
252 }
253 }
254 }
255 }
256
257 } else {
258 throw new Exception("Unexpected BUILD_ONLY=${env.BUILD_ONLY} target!")
259 }
260 }
261
262 stage("Publish artifacts") {
263 dir(_artifact_dir) {
264 common.infoMsg("Processing md5 for artifacts")
265 for (String x : _artifact_list) {
266 _md5 = sh(script: "md5sum ${x} > ${x}.md5; cat ${x}.md5", returnStdout: true).trim()
267 _size = sh(script: "ls -alh ${x}", returnStdout: true).trim()
268 common.infoMsg("Artifact file: ${_size}\n${_md5}")
269 }
270 if (job_env.PUBLISH_BACKEND == 'local') {
271 common.infoMsg("Uploading to: local")
272 common.infoMsg("For local publish target - nothing to do, all files in: ${_artifact_dir}")
273 if (job_env.get('CLEANUP_AFTER', false)) {
274 common.warningMsg("You are trying to use 'local' publish method, along with enabled CLEANUP_AFTER! ")
275 common.warningMsg("Disabling CLEANUP_AFTER option, to save you'r data ;) ")
276 job_env.CLEANUP_AFTER = false
277 }
278 } else if (job_env.PUBLISH_BACKEND == 'glance') {
279 common.infoMsg("Uploading to: glance-openstack")
280 if (fileExists("${workspace}/venv")) {
281 common.infoMsg("cleaning virtualenv at:${workspace}/venv")
282 sh(script: "rm -rf ${workspace}/venv", returnStatus: true)
283 }
284 openstack.setupOpenstackVirtualenv(openstackEnv, job_env.OS_VERSION)
285 for (String x : findFiles(glob: "*.*")) {
286 if (x.endsWith('.md5')) {
287 common.warningMsg("Skipping:${x} from openstack upload!")
288 _md5sum = sh(script: "cat ${x}", returnStdout: true).trim().split()[0]
289 continue
290 }
291 _property = "--property data=${dateTime} --property md5sum=${_md5sum}"
292 _cmdline = String.format("glance image-create --visibility " +
293 "public %s --name '%s' %s --file %s", _property, imageShortName, glanceRunArgs, imageName)
294 openstack.runOpenstackCommand(_cmdline, rcFile, openstackEnv)
295 }
296 // TODO
297 currentBuild.description = "${imageName}.qcow2 uploaded tenant: "
298
299 } else if (job_env.PUBLISH_BACKEND == 'http') {
300 for (String u_file : findFiles(glob: '*.*')) {
301 common.infoMsg("Uploading image ${imageName}")
302 def uploadImageStatus = ''
303 common.retry(3, 5) {
304 uploadImageStatus = sh(script: "curl -f -T ${u_file} ${job_env.UPLOAD_URL}", returnStatus: true)
305 if (uploadImageStatus != 0) {
306 throw new Exception("Uploading file: ${u_file} failed!")
307 }
308 }
309 // Fixme for correct path ?
310 currentBuild.description = "<a href='http://images.mcp.mirantis.net/${imageName}.qcow2'>${imageName}.qcow2</a>"
311 }
312
313 } else {
314 throw new Exception("Unsupported publish backend:${job_env.PUBLISH_BACKEND}")
315 }
316
317 }
318 }
319 } catch (Throwable e) {
320 // If there was an error or exception thrown, the build failed
321 currentBuild.result = "FAILURE"
322 throw e
323 } finally {
azvyagintsev68786a12018-07-10 14:42:29 +0300324 if (job_env.get('CLEANUP_AFTER', false)) {
Ivan Berezovskiy23035a22018-11-14 13:25:51 +0400325 dir(workspace) {
326 sh "find . -mindepth 1 -delete || true"
azvyagintsevf2c70cc2018-07-12 12:19:11 +0300327 }
Ivan Berezovskiy23035a22018-11-14 13:25:51 +0400328 if (job_env.BUILD_ONLY == 'openstack') {
329 common.warningMsg("openstack Env cleanup not implemented yet!")
azvyagintsev68786a12018-07-10 14:42:29 +0300330 }
Ivan Berezovskiy23035a22018-11-14 13:25:51 +0400331 } else {
332 common.warningMsg("Env has not been cleaned!Please cleanup it manualy!")
azvyagintsev68786a12018-07-10 14:42:29 +0300333 }
azvyagintsev68786a12018-07-10 14:42:29 +0300334 }
azvyagintsev68786a12018-07-10 14:42:29 +0300335 }
azvyagintsev68786a12018-07-10 14:42:29 +0300336}