blob: 859ca8f7903dd5fe32a5f60c170da89be55d401d [file] [log] [blame]
Tomáš Kukrál7ded3642017-03-27 15:52:51 +02001/**
2 * Generate cookiecutter cluster by individual products
3 *
4 * Expected parameters:
Tomáš Kukrál7ded3642017-03-27 15:52:51 +02005 * COOKIECUTTER_TEMPLATE_CONTEXT Context parameters for the template generation.
Tomáš Kukrál91e49252017-05-09 14:40:26 +02006 * EMAIL_ADDRESS Email to send a created tar file
Sergey Galkin8b87f6e2018-10-24 18:40:13 +04007 * CREDENTIALS_ID Credentials id for git
azvyagintsev3ed704f2018-07-09 15:49:27 +03008 **/
Aleksey Zvyagintsevb16902d2018-10-29 12:33:48 +00009import static groovy.json.JsonOutput.toJson
10import static groovy.json.JsonOutput.prettyPrint
Tomáš Kukrál7ded3642017-03-27 15:52:51 +020011
12common = new com.mirantis.mk.Common()
Aleksey Zvyagintsevb16902d2018-10-29 12:33:48 +000013common2 = new com.mirantis.mcp.Common()
Tomáš Kukrál7ded3642017-03-27 15:52:51 +020014git = new com.mirantis.mk.Git()
15python = new com.mirantis.mk.Python()
chnyda89191012017-05-29 15:38:35 +020016saltModelTesting = new com.mirantis.mk.SaltModelTesting()
Tomáš Kukrál7ded3642017-03-27 15:52:51 +020017
azvyagintsevb8ddb492018-09-12 14:59:45 +030018slaveNode = env.SLAVE_NODE ?: 'python&&docker'
Ivan Berezovskiy46b7bbc2018-10-30 22:32:13 +040019gerritCredentials = env.CREDENTIALS_ID ?: 'gerrit'
azvyagintsev866b19a2018-11-20 18:21:43 +020020distribRevision = 'proposed'
21gitGuessedVersion = false
22
23
24def globalVariatorsUpdate() {
25 def templateContext = readYaml text: env.COOKIECUTTER_TEMPLATE_CONTEXT
26 def context = templateContext['default_context']
27 // TODO add more check's for critical var's
28 // Since we can't pin to any '_branch' variable from context, to identify 'default git revision' -
29 // because each of them, might be 'refs/' variable, we need to add some tricky trigger of using
30 // 'release/XXX' logic. This is totall guess - so,if even those one failed, to definitely must pass
31 // correct variable finally!
32 [context.get('cookiecutter_template_branch'), context.get('shared_reclass_branch'), context.get('mcp_common_scripts_branch')].any { branch ->
33 if (branch.toString().startsWith('release/')) {
34 gitGuessedVersion = branch
35 return true
36 }
37 }
38
39 // Use mcpVersion git tag if not specified branch for cookiecutter-templates
40 if (!context.get('cookiecutter_template_branch')) {
41 context['cookiecutter_template_branch'] = gitGuessedVersion ?: context['mcp_version']
42 }
43 // Don't have n/t/s for cookiecutter-templates repo, therefore use master
44 if (["nightly", "testing", "stable"].contains(context['cookiecutter_template_branch'])) {
45 context['cookiecutter_template_branch'] = 'master'
46 }
47 if (!context.get('shared_reclass_branch')) {
48 context['shared_reclass_branch'] = gitGuessedVersion ?: context['mcp_version']
49 }
50 // Don't have nightly/testing for reclass-system repo, therefore use master
51 if (["nightly", "testing", "stable"].contains(context['shared_reclass_branch'])) {
52 context['shared_reclass_branch'] = 'master'
53 }
54 if (!context.get('mcp_common_scripts_branch')) {
55 // Pin exactly to CC branch, since it might use 'release/XXX' format
56 context['mcp_common_scripts_branch'] = gitGuessedVersion ?: context['mcp_version']
57 }
58 // Don't have n/t/s for mcp-common-scripts repo, therefore use master
59 if (["nightly", "testing", "stable"].contains(context['mcp_common_scripts_branch'])) {
60 context['mcp_common_scripts_branch'] = 'master'
61 }
62 //
63 distribRevision = context['mcp_version']
64 if (['master'].contains(context['mcp_version'])) {
65 distribRevision = 'nightly'
66 }
67 if (distribRevision.contains('/')) {
68 distribRevision = distribRevision.split('/')[-1]
69 }
70 // Check if we are going to test bleeding-edge release, which doesn't have binary release yet
71 if (!common.checkRemoteBinary([mcp_version: distribRevision]).linux_system_repo_url) {
72 common.warningMsg("Binary release: ${distribRevision} not exist. Fallback to 'proposed'! ")
73 distribRevision = 'proposed'
74 }
75 common.warningMsg("Fetching:\n" +
76 "DISTRIB_REVISION from ${distribRevision}")
77 common.infoMsg("Using context:\n" + context)
78 print prettyPrint(toJson(context))
79 return context
80
81}
azvyagintsevf252b592018-08-13 18:39:14 +030082
Aleksey Zvyagintsevb16902d2018-10-29 12:33:48 +000083timeout(time: 1, unit: 'HOURS') {
azvyagintsev636493c2018-09-12 17:17:05 +030084 node(slaveNode) {
azvyagintsev866b19a2018-11-20 18:21:43 +020085 def context = globalVariatorsUpdate()
Aleksey Zvyagintsevb16902d2018-10-29 12:33:48 +000086 def templateEnv = "${env.WORKSPACE}/template"
87 def modelEnv = "${env.WORKSPACE}/model"
88 def testEnv = "${env.WORKSPACE}/test"
89 def pipelineEnv = "${env.WORKSPACE}/pipelines"
Tomáš Kukrál9f6260f2017-03-29 23:58:26 +020090
Aleksey Zvyagintsevb16902d2018-10-29 12:33:48 +000091 try {
Aleksey Zvyagintsevb16902d2018-10-29 12:33:48 +000092 //
93 def cutterEnv = "${env.WORKSPACE}/cutter"
94 def systemEnv = "${modelEnv}/classes/system"
95 def testResult = false
96 def user
97 wrap([$class: 'BuildUser']) {
98 user = env.BUILD_USER_ID
99 }
100 currentBuild.description = context['cluster_name']
azvyagintsev866b19a2018-11-20 18:21:43 +0200101
Aleksey Zvyagintsevb16902d2018-10-29 12:33:48 +0000102 stage('Download Cookiecutter template') {
103 sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
104 checkout([
105 $class : 'GitSCM',
106 branches : [[name: 'FETCH_HEAD'],],
107 extensions : [[$class: 'RelativeTargetDirectory', relativeTargetDir: templateEnv]],
108 userRemoteConfigs: [[url: context['cookiecutter_template_url'], refspec: context['cookiecutter_template_branch'], credentialsId: gerritCredentials],],
109 ])
110 }
111 stage('Create empty reclass model') {
112 dir(path: modelEnv) {
113 sh "rm -rfv .git; git init"
114 sshagent(credentials: [gerritCredentials]) {
115 sh "git submodule add ${context['shared_reclass_url']} 'classes/system'"
116 }
117 }
118 checkout([
119 $class : 'GitSCM',
120 branches : [[name: 'FETCH_HEAD'],],
121 extensions : [[$class: 'RelativeTargetDirectory', relativeTargetDir: systemEnv]],
122 userRemoteConfigs: [[url: context['shared_reclass_url'], refspec: context['shared_reclass_branch'], credentialsId: gerritCredentials],],
123 ])
124 git.commitGitChanges(modelEnv, "Added new shared reclass submodule", "${user}@localhost", "${user}")
125 }
126
127 stage('Generate model') {
128 python.setupCookiecutterVirtualenv(cutterEnv)
azvyagintsevab3f48c2018-11-05 15:16:07 +0200129 // FIXME refactor generateModel
Aleksey Zvyagintsevb16902d2018-10-29 12:33:48 +0000130 python.generateModel(common2.dumpYAML(['default_context': context]), 'default_context', context['salt_master_hostname'], cutterEnv, modelEnv, templateEnv, false)
131 git.commitGitChanges(modelEnv, "Create model ${context['cluster_name']}", "${user}@localhost", "${user}")
132 }
133
134 stage("Test") {
135 if (env.TEST_MODEL.toBoolean()) {
136 // Check if we are going to test bleeding-edge release, which doesn't have binary release yet
azvyagintsev0a38ec22018-11-19 19:18:02 +0200137 if (!common.checkRemoteBinary([mcp_version: distribRevision]).linux_system_repo_url) {
Aleksey Zvyagintsevb16902d2018-10-29 12:33:48 +0000138 common.errorMsg("Binary release: ${distribRevision} not exist. Fallback to 'proposed'! ")
139 distribRevision = 'proposed'
140 }
141 sh("cp -r ${modelEnv} ${testEnv}")
142 def DockerCName = "${env.JOB_NAME.toLowerCase()}_${env.BUILD_TAG.toLowerCase()}"
143 common.infoMsg("Attempt to run test against distribRevision: ${distribRevision}")
144 try {
145 def config = [
146 'dockerHostname' : "${context['salt_master_hostname']}.${context['cluster_domain']}",
147 'reclassEnv' : testEnv,
148 'distribRevision' : distribRevision,
149 'dockerContainerName': DockerCName,
150 'testContext' : 'salt-model-node'
151 ]
152 testResult = saltModelTesting.testNode(config)
153 common.infoMsg("Test finished: SUCCESS")
154 } catch (Exception ex) {
155 common.warningMsg("Test finished: FAILED")
156 testResult = false
157 }
158 } else {
159 common.warningMsg("Test stage has been skipped!")
160 }
161 }
162 stage("Generate config drives") {
163 // apt package genisoimage is required for this stage
164
165 // download create-config-drive
166 // FIXME: that should be refactored, to use git clone - to be able download it from custom repo.
Aleksey Zvyagintsevb16902d2018-10-29 12:33:48 +0000167 def commonScriptsRepoUrl = context['mcp_common_scripts_repo'] ?: 'ssh://gerrit.mcp.mirantis.com:29418/mcp/mcp-common-scripts'
168 checkout([
169 $class : 'GitSCM',
170 branches : [[name: 'FETCH_HEAD'],],
171 extensions : [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'mcp-common-scripts']],
azvyagintsev866b19a2018-11-20 18:21:43 +0200172 userRemoteConfigs: [[url: commonScriptsRepoUrl, refspec: context['mcp_common_scripts_branch'], credentialsId: gerritCredentials],],
Aleksey Zvyagintsevb16902d2018-10-29 12:33:48 +0000173 ])
174
175 sh 'cp mcp-common-scripts/config-drive/create_config_drive.sh create-config-drive && chmod +x create-config-drive'
176 sh '[ -f mcp-common-scripts/config-drive/master_config.sh ] && cp mcp-common-scripts/config-drive/master_config.sh user_data || cp mcp-common-scripts/config-drive/master_config.yaml user_data'
177
178 sh "git clone --mirror https://github.com/Mirantis/mk-pipelines.git ${pipelineEnv}/mk-pipelines"
179 sh "git clone --mirror https://github.com/Mirantis/pipeline-library.git ${pipelineEnv}/pipeline-library"
180 args = "--user-data user_data --hostname ${context['salt_master_hostname']} --model ${modelEnv} --mk-pipelines ${pipelineEnv}/mk-pipelines/ --pipeline-library ${pipelineEnv}/pipeline-library/ ${context['salt_master_hostname']}.${context['cluster_domain']}-config.iso"
181
182 // load data from model
183 def smc = [:]
184 smc['SALT_MASTER_MINION_ID'] = "${context['salt_master_hostname']}.${context['cluster_domain']}"
185 smc['SALT_MASTER_DEPLOY_IP'] = context['salt_master_management_address']
186 smc['DEPLOY_NETWORK_GW'] = context['deploy_network_gateway']
187 smc['DEPLOY_NETWORK_NETMASK'] = context['deploy_network_netmask']
188 if (context.get('deploy_network_mtu')) {
189 smc['DEPLOY_NETWORK_MTU'] = context['deploy_network_mtu']
190 }
191 smc['DNS_SERVERS'] = context['dns_server01']
192 smc['MCP_VERSION'] = "${context['mcp_version']}"
193 if (context['local_repositories'] == 'True') {
194 def localRepoIP = context['local_repo_url']
195 smc['MCP_SALT_REPO_KEY'] = "http://${localRepoIP}/public.gpg"
196 smc['MCP_SALT_REPO_URL'] = "http://${localRepoIP}/ubuntu-xenial"
197 smc['PIPELINES_FROM_ISO'] = 'false'
198 smc['PIPELINE_REPO_URL'] = "http://${localRepoIP}:8088"
199 smc['LOCAL_REPOS'] = 'true'
200 }
201 if (context['upstream_proxy_enabled'] == 'True') {
202 if (context['upstream_proxy_auth_enabled'] == 'True') {
203 smc['http_proxy'] = 'http://' + context['upstream_proxy_user'] + ':' + context['upstream_proxy_password'] + '@' + context['upstream_proxy_address'] + ':' + context['upstream_proxy_port']
204 smc['https_proxy'] = 'http://' + context['upstream_proxy_user'] + ':' + context['upstream_proxy_password'] + '@' + context['upstream_proxy_address'] + ':' + context['upstream_proxy_port']
205 } else {
206 smc['http_proxy'] = 'http://' + context['upstream_proxy_address'] + ':' + context['upstream_proxy_port']
207 smc['https_proxy'] = 'http://' + context['upstream_proxy_address'] + ':' + context['upstream_proxy_port']
208 }
209 }
210
211 for (i in common.entries(smc)) {
212 sh "sed -i 's,${i[0]}=.*,${i[0]}=${i[1]},' user_data"
213 }
214
215 // create cfg config-drive
216 sh "./create-config-drive ${args}"
217 sh("mkdir output-${context['cluster_name']} && mv ${context['salt_master_hostname']}.${context['cluster_domain']}-config.iso output-${context['cluster_name']}/")
218
219 // save cfg iso to artifacts
220 archiveArtifacts artifacts: "output-${context['cluster_name']}/${context['salt_master_hostname']}.${context['cluster_domain']}-config.iso"
221
222 if (context['local_repositories'] == 'True') {
223 def aptlyServerHostname = context.aptly_server_hostname
224 sh "[ -f mcp-common-scripts/config-drive/mirror_config.yaml ] && cp mcp-common-scripts/config-drive/mirror_config.yaml mirror_config || cp mcp-common-scripts/config-drive/mirror_config.sh mirror_config"
225
226 def smc_apt = [:]
227 smc_apt['SALT_MASTER_DEPLOY_IP'] = context['salt_master_management_address']
228 smc_apt['APTLY_DEPLOY_IP'] = context['aptly_server_deploy_address']
229 smc_apt['APTLY_DEPLOY_NETMASK'] = context['deploy_network_netmask']
230 smc_apt['APTLY_MINION_ID'] = "${aptlyServerHostname}.${context['cluster_domain']}"
231
232 for (i in common.entries(smc_apt)) {
233 sh "sed -i \"s,export ${i[0]}=.*,export ${i[0]}=${i[1]},\" mirror_config"
234 }
235
236 // create apt config-drive
237 sh "./create-config-drive --user-data mirror_config --hostname ${aptlyServerHostname} ${aptlyServerHostname}.${context['cluster_domain']}-config.iso"
238 sh("mv ${aptlyServerHostname}.${context['cluster_domain']}-config.iso output-${context['cluster_name']}/")
239
240 // save apt iso to artifacts
241 archiveArtifacts artifacts: "output-${context['cluster_name']}/${aptlyServerHostname}.${context['cluster_domain']}-config.iso"
242 }
243 }
244
245 stage('Save changes reclass model') {
246 sh(returnStatus: true, script: "tar -czf output-${context['cluster_name']}/${context['cluster_name']}.tar.gz --exclude='*@tmp' -C ${modelEnv} .")
247 archiveArtifacts artifacts: "output-${context['cluster_name']}/${context['cluster_name']}.tar.gz"
248
249 if (EMAIL_ADDRESS != null && EMAIL_ADDRESS != "") {
250 emailext(to: EMAIL_ADDRESS,
251 attachmentsPattern: "output-${context['cluster_name']}/*",
252 body: "Mirantis Jenkins\n\nRequested reclass model ${context['cluster_name']} has been created and attached to this email.\nEnjoy!\n\nMirantis",
253 subject: "Your Salt model ${context['cluster_name']}")
254 }
255 dir("output-${context['cluster_name']}") {
256 deleteDir()
257 }
258 }
259
260 // Fail, but leave possibility to get failed artifacts
261 if (!testResult && env.TEST_MODEL.toBoolean()) {
262 common.warningMsg('Test finished: FAILURE. Please check logs and\\or debug failed model manually!')
263 error('Test stage finished: FAILURE')
264 }
265
266 } catch (Throwable e) {
267 currentBuild.result = "FAILURE"
268 currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
269 throw e
270 } finally {
271 stage('Clean workspace directories') {
272 sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
273 }
274 // common.sendNotification(currentBuild.result,"",["slack"])
Ruslan Kamaldinov6feef402017-08-02 16:55:58 +0400275 }
Tomáš Kukrál7ded3642017-03-27 15:52:51 +0200276 }
Mikhail Ivanov9f812922017-11-07 18:52:02 +0400277}