blob: 3f05f4404128ab97b73617949823c240441e1e49 [file] [log] [blame]
azvyagintsev68786a12018-07-10 14:42:29 +03001/**
2 *
3 * Build mirror/aptly/apt01 image pipeline
4 *
5 * PACKER_URL = 'https://releases.hashicorp.com/packer/1.2.4/packer_1.2.4_linux_amd64.zip'
6 * PACKER_ZIP_MD5 = '997274e80ae41102eecf9df2e5b67860'
7 * PACKER_ARGS = '-debug'
8 * BUILD_ONLY = 'openstack|qemu'
9 * CLEANUP_AFTER = wipe workspace AFTER build
10 * IMAGE_NAME = Name of the result image.
11 * TIMESTAMP_INAME = True|false. If true - image will be uploaded to backend with IMAGE_NAME + timestamp
12 *
13 * OPENSTACK_OPENRC_YAML: aml of key:value variables required for openstack venv
14 * example:
15 * ---
16 * OS_TENANT_ID: 'xxxx'
17 * OS_TENANT_NAME: "mcp-oscore-ci"
18 *
19 * EXTRA_VARIABLES_YAML - Yaml of key:value variables required for template.json
20 * example:
21 * ---
22 * IMAGE_NAME: 'ubuntu-16.04-proposed'
23 *
24 * CREDENTIALS_ID = Global jenkins cred. for clone DEFAULT_GIT_URL
25 * DEFAULT_GIT_URL
26 * DEFAULT_GIT_REF
27 *
28 * OS_VERSION = OpenStack version
29 * OS_CREDENTIALS_ID = ID of credentials for OpenStack API stored in Jenkins.
30 * OS_URL = Keystone auth endpoint of the OpenStack.
31 * OS_PROJECT =
32 *
33 * PUBLISH_BACKEND = local|glance|http
34 * UPLOAD_URL = URL of an WebDAV used to upload the image after creating.(Only in case PUBLISH_BACKEND == http)
35 */
36
37// Load shared libs
38def common = new com.mirantis.mk.Common()
39def McpCommon = new com.mirantis.mcp.Common()
40def openstack = new com.mirantis.mk.Openstack()
41def date = new Date()
42def dateTime = date.format("ddMMyyyy-HHmmss")
43//
44def job_env = env.getEnvironment().findAll { k, v -> v }
45
46////
47// To be removed
48//job_env['BUILD_ONLY'] = 'qemu'
49job_env['TIMESTAMP_INAME'] = true
50//job_env['CLEANUP_AFTER'] = false
51//job_env['PACKER_ARGS'] = ' -debug'
52//job_env['BUILD_OS'] = 'ubuntu-16.04'
53//job_env['IMAGE_NAME'] = 'ubuntu-16-04-x64-test'
54//job_env['DEFAULT_GIT_REF'] = 'refs/changes/06/22106/5'
55//job_env['DEFAULT_GIT_URL'] = 'ssh://mcp-jenkins@gerrit.mcp.mirantis.net:29418/mk/packer-templates'
56//job_env['CREDENTIALS_ID'] = 'gerrit'
57//job_env['PUBLISH_BACKEND'] = 'http'
58//job_env['PACKER_URL'] = "https://releases.hashicorp.com/packer/1.2.4/packer_1.2.4_linux_amd64.zip"
59//job_env['PACKER_ZIP_MD5'] = '997274e80ae41102eecf9df2e5b67860'
60//job_env['EXTRA_VARIABLES_YAML'] = "\n" +
61// "IMAGE_NAME : \"ubuntu-16-04-x64\"\n" +
62// "UBUNTU_BASEURL: \"http://mirror.mirantis.com/proposed/ubuntu/\"\n" +
63// "SALTSTACK_REPO: \"deb [arch=amd64] http://mirror.mirantis.com/proposed/saltstack-2017.7/xenial xenial main\"\n" +
64// "SALTSTACK_GPG: \"https://repo.saltstack.com/apt/ubuntu/16.04/amd64/2017.7/SALTSTACK-GPG-KEY.pub\""
65/////
66extra_vars = readYaml text: job_env.get('EXTRA_VARIABLES_YAML','').trim()
67if (job_env.get('TIMESTAMP_INAME', false)) {
68 IMAGE_NAME = IMAGE_NAME + "-" + dateTime
69}
70// Overwrite IMAGE_NAME in template.json with expected
71extra_vars['IMAGE_NAME'] = IMAGE_NAME
72def gerrit = new com.mirantis.mk.Gerrit()
73defaultGitRef = job_env.get('DEFAULT_GIT_REF', 'HEAD')
74defaultGitUrl = job_env.get('DEFAULT_GIT_URL', null)
75
76// Self-check
77for (String req_v : ['BUILD_OS', 'BUILD_ONLY','IMAGE_NAME'] ) {
78 if (!job_env.get(req_v, false)) {
79 throw new Exception("${req_v} not set!")
80 }
81}
82
83def MapToList(input_map) {
84/**
85 * Convert dict in bash-like list
86 */
87
88 def data_list = []
89
90 for (i = 0; i < input_map.size(); i++) {
91 data = ''
92 data = input_map.keySet()[i] + "=" + input_map.values()[i]
93 data_list.add(data)
94 }
95
96 return data_list
97
98}
99
100timeout(time: 6, unit: 'HOURS') {
101 node("jsl15.mcp.mirantis.net") {
102 def checkouted = false
103 def creds = ''
104 creds = common.getPasswordCredentials(job_env.OS_CREDENTIALS_ID)
105 rcFile = openstack.createOpenstackEnv(workspace, job_env.OS_URL, job_env.OS_CREDENTIALS_ID, job_env.OS_PROJECT, "default", "", "default", "2", "")
106
107 try {
108 def workspace = common.getWorkspace()
109 def openstackEnv = "${workspace}/venv"
110 def _artifact_dir = "${workspace}/artifacts"
111 def ImagesCacheFolder = "${workspace}/../${env.JOB_NAME}_cache/"
112 stage("checkout") {
113 if (defaultGitRef && defaultGitUrl) {
114 checkouted = gerrit.gerritPatchsetCheckout(defaultGitUrl, defaultGitRef, "HEAD", job_env.CREDENTIALS_ID)
115 } else {
116 throw new Exception("Cannot checkout gerrit patchset: DEFAULT_GIT_URL is null")
117 }
118 }
119 stage("Prepare env") {
120 if (!fileExists("${workspace}/tmp")) {
121 sh "mkdir -p ${workspace}/tmp"
122 }
123 if (!fileExists(ImagesCacheFolder)) {
124 sh "mkdir -p ${ImagesCacheFolder}"
125 }
126 if (!fileExists(_artifact_dir)) {
127 sh "mkdir -p ${_artifact_dir}"
128 }
129 if (!fileExists("bin")) {
130 common.infoMsg("Downloading packer")
131 sh "mkdir -p bin"
132 dir("bin") {
133 def zipname = sh(script: "basename ${job_env.PACKER_URL}", returnStdout: true).trim()
134 sh(script: "wget --quiet ${job_env.PACKER_URL}", returnStdout: true)
135 sh "echo \"${job_env.PACKER_ZIP_MD5} ${zipname}\" >> md5sum"
136 sh(script: "md5sum -c --status md5sum", returnStdout: true)
137 sh "unzip ${zipname}"
138 }
139 }
140 if (!fileExists("${job_env.BUILD_OS}/images")) {
141 // clean images dir before building
142 sh(script: "rm -rf ${job_env.BUILD_OS}/images/*", returnStatus: true)
143 }
144 }
145
146 stage("Build Instance") {
147 def _packer_args = "${job_env.get(PACKER_ARGS, '')}"
148 def _packer_log = "${workspace}/packer.log"
149 def _artifact_list = []
150 // clean old log, for correct status grepping
151 if (fileExists(_packer_log)) {
152 sh "rm -v ${_packer_log}"
153 }
154
155 dir("${workspace}/${job_env.BUILD_OS}/") {
156 if (fileExists("config-drive/user-data.yaml")) {
157 common.infoMsg("Creating cloud-config drive")
158 if (fileExists("config-drive/cloudata.iso")) {
159 sh "rm -v config-drive/cloudata.iso"
160 }
161 sh "cloud-localds config-drive/cloudata.iso config-drive/user-data.yaml"
162 }
163 }
164
165 if (job_env.BUILD_ONLY.toLowerCase() == "openstack") {
166
167 dir("${workspace}/${job_env.BUILD_OS}/") {
168 extra_vars_list = MapToList(extra_vars)
169 withEnv(["PATH=${env.PATH}:${workspace}/bin",
170 "PACKER_LOG_PATH=${_packer_log}",
171 "PACKER_LOG=1",
172 "TMPDIR=${workspace}/tmp",
173 "OS_USERNAME=${creds.username.toString()}",
174 "OS_PASSWORD=${creds.password.toString()}"] + extra_vars_list) {
175
176 common.infoMsg("Run build with:")
177 sh(script: 'printenv|sort')
178 sh(script: "set -xe; packer build -only='openstack' ${_packer_args} -parallel=false template.json" )
179 _os_private = "${workspace}/${job_env.BUILD_OS}/os_${job_env.BUILD_OS}.pem"
180 if (fileExists(_os_private)) {
181 common.infoMsg("Packer private key:")
182 sh "cat ${_os_private}"
183 }
184 def packerStatus = sh(script: "grep \"Some builds didn't complete successfully and had errors\" ${_packer_log}", returnStatus: true)
185 // grep returns 0 if find something
186 if (packerStatus != 0) {
187 common.infoMsg("Openstack instance build complete")
188 } else {
189 throw new Exception("Openstack Packer build failed")
190 }
191
192 common.retry(3, 5) {
193 common.infoMsg("Attempt download openstack image..")
194 openstack.runOpenstackCommand("openstack image save --file ${_artifact_dir}/${job_env.IMAGE_NAME}.qcow2 ${IMAGE_NAME}", rcFile, openstackEnv)
195 }
196 }
197 }
198
199 } else if (job_env.BUILD_ONLY.toLowerCase() == 'qemu') {
200
201 dir("${workspace}/${job_env.BUILD_OS}/") {
202 extra_vars_list = MapToList(extra_vars)
203 withEnv(["PATH=${env.PATH}:${workspace}/bin",
204 "PACKER_LOG_PATH=${_packer_log}",
205 "PACKER_LOG=1",
206 "TMPDIR=${workspace}/tmp",
207 "PACKER_IMAGES_CACHE=${ImagesCacheFolder}"] + extra_vars_list) {
208 common.infoMsg("Run build with:")
209 sh(script: 'printenv|sort')
210 sh(script: "set -xe ; packer build -on-error=ask -only='qemu' ${_packer_args} -parallel=false template.json".toString())
211
212 def packerStatus = sh(script: "grep \"Some builds didn't complete successfully and had errors\" ${PACKER_LOG_PATH}", returnStatus: true)
213 // grep returns 0 if find something
214 if (packerStatus != 0) {
215 common.infoMsg("qemu instance build completed successfully")
216 } else {
217 throw new Exception("qemu instance build failed")
218 }
219 // collect artifacts
220 // TODO make it possible, process multiply artifacts by one run.
221 dir('images/') {
222 def _files = findFiles(glob: "**/${job_env.IMAGE_NAME}*")
223 if (_files.size() > 1) {
224 common.warningMsg("Multiply artifacts detected!Only first one will be processed!")
225 } else if (_files.size() == 0) {
226 common.warningMsg("No artifacts detected!")
227 }
228 for (String x : _files) {
229 _file = sh(script: "set -x ; readlink -f ${x}", returnStdout: true)
230 _artifact_list.add(_file.split('/').last())
231 sh(script: "mv -v images/${x} ${_artifact_dir}/${job_env.IMAGE_NAME}.qcow2")
232 }
233 }
234 }
235 }
236
237 } else {
238 throw new Exception("Unexpected BUILD_ONLY=${env.BUILD_ONLY} target!")
239 }
240
241
242 }
243
244 stage("Publish artifacts") {
245 dir(_artifact_dir) {
246 common.infoMsg("Processing md5 for artifacts")
247 for (String x : _artifact_list) {
248 _md5 = sh(script: "md5sum ${x} > ${x}.md5", returnStdout: true).trim()
249 _size = sh(script: "ls -alh ${x}", returnStdout: true).trim()
250 common.infoMsg("Artifact file: ${_size}\n${_md5}")
251 }
252 if (job_env.PUBLISH_BACKEND.toLowerCase() == 'local') {
253 common.infoMsg("Uploading to: local")
254 common.infoMsg("For local publish target - nothing to do, all files in: ${_artifact_dir}")
255 if (job_env.get('CLEANUP_AFTER', false)) {
256 common.warningMsg("You are trying to use 'local' publish method, along with enabled CLEANUP_AFTER! ")
257 common.warningMsg("Disabling CLEANUP_AFTER option, to save you'r data ;) ")
258 job_env.CLEANUP_AFTER = false
259 }
260 } else if (job_env.PUBLISH_BACKEND.toLowerCase() == 'glance') {
261 common.infoMsg("Uploading to: openstack")
262 if (fileExists("${workspace}/venv")) {
263 common.infoMsg("cleaning virtualenv at:${workspace}/venv")
264 sh(script: "rm -rf ${workspace}/venv", returnStatus: true)
265 }
266 openstack.setupOpenstackVirtualenv(openstackEnv, job_env.OS_VERSION)
267 for (String x : findFiles(glob: "${_artifact_dir}/*")) {
268 if (x.endsWith('.md5')) {
269 common.warningMsg("Skipping:${x} from openstack upload!")
270 continue
271 }
272 openstack.runOpenstackCommand(String.format("glance image-create --name '%s' %s --file %s", imageShortName, glanceRunArgs, imageName), rcFile, openstackEnv)
273 // do upload
274 }
275
276
277 } else if (job_env.PUBLISH_BACKEND.toLowerCase() == 'http') {
278 for (String u_file : findFiles(glob: "${_artifact_dir}/*")) {
279 common.infoMsg("Uploading image ${IMAGE_NAME}")
280 def uploadImageStatus = ''
281 common.retry(3, 5) {
282 uploadImageStatus = sh(script: "echo curl -f -T ${u_file} ${job_env.UPLOAD_URL}", returnStatus: true)
283 if (uploadImageStatus != 0) {
284 throw new Exception("Uploading file: ${u_file} failed!")
285 }
286 }
287
288 }
289 // Fixme for correct path ?
290 currentBuild.description = "<a href='http://ci.mcp.mirantis.net:8085/images/${IMAGE_NAME}.qcow2'>${IMAGE_NAME}.qcow2</a>"
291 } else {
292 throw new Exception("Unsupported publish backend:${job_env.PUBLISH_BACKEND}")
293 }
294
295 }
296 }
297 } catch (Throwable e) {
298 // If there was an error or exception thrown, the build failed
299 currentBuild.result = "FAILURE"
300 throw e
301 } finally {
302 if (job_env.get('CLEANUP_AFTER', false)) {
303 dir(workspace) {
304 sh "find . -mindepth 1 -delete"
305 }
306 } else {
307 common.infoMsg("Env has not been cleaned!")
308 }
309 }
310 }
311}