blob: ac5bc1fad99f0d5054943fb362f999eec90e5b61 [file] [log] [blame]
chnydae80bb922017-05-29 17:48:40 +02001common = new com.mirantis.mk.Common()
chnydabc63c9a2017-05-30 15:37:54 +02002gerrit = new com.mirantis.mk.Gerrit()
chnydae80bb922017-05-29 17:48:40 +02003git = new com.mirantis.mk.Git()
4python = new com.mirantis.mk.Python()
chnydae80bb922017-05-29 17:48:40 +02005
azvyagintsev5c0313d2018-08-13 17:13:35 +03006gerritRef = env.GERRIT_REFSPEC ?: null
7slaveNode = (env.SLAVE_NODE ?: 'python&&docker')
8def alreadyMerged = false
9
Vasyl Saienko682043d2018-07-23 16:04:10 +030010def reclassVersion = 'v1.5.4'
Vasyl Saienko772e1232018-07-23 14:42:24 +030011if (common.validInputParam('RECLASS_VERSION')) {
azvyagintsev5c0313d2018-08-13 17:13:35 +030012 reclassVersion = RECLASS_VERSION
Vasyl Saienko772e1232018-07-23 14:42:24 +030013}
14
chnyda467f10f2017-05-30 17:25:07 +020015def generateSaltMaster(modEnv, clusterDomain, clusterName) {
azvyagintsev5c0313d2018-08-13 17:13:35 +030016 def nodeFile = "${modEnv}/nodes/cfg01.${clusterDomain}.yml"
17 def nodeString = """classes:
chnydae80bb922017-05-29 17:48:40 +020018- cluster.${clusterName}.infra.config
19parameters:
20 _param:
21 linux_system_codename: xenial
22 reclass_data_revision: master
23 linux:
24 system:
25 name: cfg01
26 domain: ${clusterDomain}
27"""
azvyagintsev5c0313d2018-08-13 17:13:35 +030028 sh "mkdir -p ${modEnv}/nodes/"
29 println "Create file ${nodeFile}"
30 writeFile(file: nodeFile, text: nodeString)
azvyagintsev87985532018-07-10 20:49:38 +030031}
32
azvyagintsev5c0313d2018-08-13 17:13:35 +030033def generateModel(contextFile, virtualenv, templateEnvDir) {
34 def modelEnv = "${templateEnvDir}/model"
35 def basename = common.GetBaseName(contextFile, '.yml')
36 def generatedModel = "${modelEnv}/${basename}"
37 def content = readFile(file: "${templateEnvDir}/contexts/${contextFile}")
38 def templateContext = readYaml text: content
39 def clusterDomain = templateContext.default_context.cluster_domain
40 def clusterName = templateContext.default_context.cluster_name
41 def outputDestination = "${generatedModel}/classes/cluster/${clusterName}"
42 def templateBaseDir = templateEnvDir
43 def templateDir = "${templateEnvDir}/dir"
44 def templateOutputDir = templateBaseDir
45 sh(script: "rm -rf ${generatedModel} || true")
chnydae80bb922017-05-29 17:48:40 +020046
azvyagintsev5c0313d2018-08-13 17:13:35 +030047 common.infoMsg("Generating model from context ${contextFile}")
chnydae80bb922017-05-29 17:48:40 +020048
azvyagintsev5c0313d2018-08-13 17:13:35 +030049 def productList = ["infra", "cicd", "opencontrail", "kubernetes", "openstack", "oss", "stacklight", "ceph"]
50 for (product in productList) {
chnydae79b94e2017-07-10 09:46:25 +020051
azvyagintsev5c0313d2018-08-13 17:13:35 +030052 // get templateOutputDir and productDir
53 if (product.startsWith("stacklight")) {
54 templateOutputDir = "${templateEnvDir}/output/stacklight"
55 try {
56 productDir = "stacklight" + templateContext.default_context['stacklight_version']
57 } catch (Throwable e) {
58 productDir = "stacklight1"
59 }
60 } else {
61 templateOutputDir = "${templateEnvDir}/output/${product}"
62 productDir = product
63 }
Tomáš Kukrál9a6821e2017-07-24 11:07:01 +020064
azvyagintsev5c0313d2018-08-13 17:13:35 +030065 if (product == "infra" || (templateContext.default_context["${product}_enabled"]
66 && templateContext.default_context["${product}_enabled"].toBoolean())) {
67
68 templateDir = "${templateEnvDir}/cluster_product/${productDir}"
69 common.infoMsg("Generating product " + product + " from " + templateDir + " to " + templateOutputDir)
70
71 sh "rm -rf ${templateOutputDir} || true"
72 sh "mkdir -p ${templateOutputDir}"
73 sh "mkdir -p ${outputDestination}"
74
75 python.buildCookiecutterTemplate(templateDir, content, templateOutputDir, virtualenv, templateBaseDir)
76 sh "mv -v ${templateOutputDir}/${clusterName}/* ${outputDestination}"
77 } else {
78 common.warningMsg("Product " + product + " is disabled")
79 }
chnydae80bb922017-05-29 17:48:40 +020080 }
azvyagintsev5c0313d2018-08-13 17:13:35 +030081 generateSaltMaster(generatedModel, clusterDomain, clusterName)
chnydae80bb922017-05-29 17:48:40 +020082}
83
azvyagintsev87985532018-07-10 20:49:38 +030084
azvyagintsev5c0313d2018-08-13 17:13:35 +030085def testModel(modelFile, reclassVersion = 'v1.5.4') {
86 // modelFile - `modelfiname` from model/modelfiname/modelfiname.yaml
87 //* Grub all models and send it to check in paralell - by one in thread.
chnyda7dd8cd92017-12-18 10:19:25 +010088
azvyagintsev5c0313d2018-08-13 17:13:35 +030089 _values_string = """
azvyagintsev87985532018-07-10 20:49:38 +030090 ---
azvyagintsev5c0313d2018-08-13 17:13:35 +030091 MODELS_TARGZ: "${env.BUILD_URL}/artifact/patched_reclass.tar.gz"
azvyagintsev87985532018-07-10 20:49:38 +030092 DockerCName: "${env.JOB_NAME.toLowerCase()}_${env.BUILD_TAG.toLowerCase()}_${modelFile.toLowerCase()}"
93 testReclassEnv: "model/${modelFile}/"
94 modelFile: "contexts/${modelFile}.yml"
95 DISTRIB_REVISION: "${DISTRIB_REVISION}"
96 EXTRA_FORMULAS: "${env.EXTRA_FORMULAS}"
97 reclassVersion: "${reclassVersion}"
98 """
azvyagintsev5c0313d2018-08-13 17:13:35 +030099 build job: "test-mk-cookiecutter-templates-chunk", parameters: [
100 [$class: 'StringParameterValue', name: 'EXTRA_VARIABLES_YAML',
101 value: _values_string.stripIndent()],
102 ]
chnydabc63c9a2017-05-30 15:37:54 +0200103}
104
azvyagintsev5c0313d2018-08-13 17:13:35 +0300105def StepTestModel(basename) {
106 // We need to wrap what we return in a Groovy closure, or else it's invoked
107 // when this method is called, not when we pass it to parallel.
108 // To do this, you need to wrap the code below in { }, and either return
109 // that explicitly, or use { -> } syntax.
110 // return node object
111 return {
112 node(slaveNode) {
113 testModel(basename)
114 }
chnydae80bb922017-05-29 17:48:40 +0200115 }
azvyagintsev87985532018-07-10 20:49:38 +0300116}
117
azvyagintsev5c0313d2018-08-13 17:13:35 +0300118def StepPrepareCCenv(refchange, templateEnvFolder) {
119 // return git clone object
120 return {
121 // fetch needed sources
122 dir(templateEnvFolder) {
123 if (refchange) {
124 def gerritChange = gerrit.getGerritChange(GERRIT_NAME, GERRIT_HOST, GERRIT_CHANGE_NUMBER, CREDENTIALS_ID)
125 merged = gerritChange.status == "MERGED"
126 if (!merged) {
127 checkouted = gerrit.gerritPatchsetCheckout([
128 credentialsId: CREDENTIALS_ID
129 ])
130 } else {
131 // update global variable for success return from pipeline
132 //alreadyMerged = true
133 common.successMsg("Change ${GERRIT_CHANGE_NUMBER} is already merged, no need to gate them")
134 currentBuild.result = 'ABORTED'
135 throw new hudson.AbortException('change already merged')
azvyagintsev87985532018-07-10 20:49:38 +0300136 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300137 } else {
138 git.checkoutGitRepository(templateEnvFolder, COOKIECUTTER_TEMPLATE_URL, COOKIECUTTER_TEMPLATE_BRANCH, CREDENTIALS_ID)
azvyagintsev87985532018-07-10 20:49:38 +0300139 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300140 }
azvyagintsev87985532018-07-10 20:49:38 +0300141 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300142}
143
144def StepGenerateModels(_contextFileList, _virtualenv, _templateEnvDir) {
145 return {
146 for (contextFile in _contextFileList) {
147 generateModel(contextFile, _virtualenv, _templateEnvDir)
148 }
149 }
150}
151
152timeout(time: 1, unit: 'HOURS') {
153 node(slaveNode) {
154 def templateEnvHead = "${env.WORKSPACE}/env_head/"
155 def templateEnvPatched = "${env.WORKSPACE}/env_patched/"
156 def contextFileListHead = []
157 def contextFileListPatched = []
158 def vEnv = "${env.WORKSPACE}/venv"
159
160 try {
161 sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
162 stage('Download and prepare CC env') {
163 // Prepare 2 env - for patchset, and for HEAD
164 paralellEnvs = [:]
165 paralellEnvs.failFast = true
166 paralellEnvs['downloadEnvHead'] = StepPrepareCCenv('', templateEnvHead)
167 paralellEnvs['downloadEnvPatched'] = StepPrepareCCenv(gerritRef, templateEnvPatched)
168 parallel paralellEnvs
169 }
170 stage("Check workflow_definition") {
171 // Check only for patchset
172 python.setupVirtualenv(vEnv, 'python2', [], "${templateEnvPatched}/requirements.txt")
173 common.infoMsg(python.runVirtualenvCommand(vEnv, "python ${templateEnvPatched}/workflow_definition_test.py"))
174 }
175
176 stage("generate models") {
177 dir("${templateEnvHead}/contexts") {
178 for (String x : findFiles(glob: "*.yml")) {
179 contextFileListHead.add(x)
180 }
181 }
182 dir("${templateEnvPatched}/contexts") {
183 for (String x : findFiles(glob: "*.yml")) {
184 contextFileListPatched.add(x)
185 }
186 }
187 // Generate over 2env's - for patchset, and for HEAD
188 paralellEnvs = [:]
189 paralellEnvs.failFast = true
190 paralellEnvs['GenerateEnvHead'] = StepGenerateModels(contextFileListPatched, vEnv, templateEnvPatched)
191 paralellEnvs['GenerateEnvPatched'] = StepGenerateModels(contextFileListHead, vEnv, templateEnvHead)
192 parallel paralellEnvs
193
194 // Collect artifacts
195 dir(templateEnvPatched) {
196 // Collect only models. For backward comparability - who know, probably someone use it..
197 sh(script: "tar -czf model.tar.gz -C model ../contexts .", returnStatus: true)
198 archiveArtifacts artifacts: "model.tar.gz"
199 }
200
201 // to be able share reclass for all subenvs
202 // Also, makes artifact test more solid - use one reclass for all of sub-models.
203 // Archive Structure will be:
204 // tar.gz
205 // ├── contexts
206 // │   └── ceph.yml
207 // ├── global_reclass <<< reclass system
208 // ├── model
209 // │   └── ceph <<< from `context basename`
210 // │   ├── classes
211 // │   │   ├── cluster
212 // │   │   └── system -> ../../../global_reclass
213 // │   └── nodes
214 // │   └── cfg01.ceph-cluster-domain.local.yml
215
216 if (SYSTEM_GIT_URL == "") {
217 git.checkoutGitRepository("${env.WORKSPACE}/global_reclass/", RECLASS_MODEL_URL, RECLASS_MODEL_BRANCH, CREDENTIALS_ID)
218 } else {
219 dir("${env.WORKSPACE}/global_reclass/") {
220 if (!gerrit.gerritPatchsetCheckout(SYSTEM_GIT_URL, SYSTEM_GIT_REF, "HEAD", CREDENTIALS_ID)) {
221 common.errorMsg("Failed to obtain system reclass with url: ${SYSTEM_GIT_URL} and ${SYSTEM_GIT_REF}")
222 throw new RuntimeException("Failed to obtain system reclass")
223 }
224 }
225 }
226 // link all models, to use one global reclass
227 // For HEAD
228 dir(templateEnvHead) {
229 for (String context : contextFileListHead) {
230 def basename = common.GetBaseName(context, '.yml')
231 dir("${templateEnvHead}/model/${basename}") {
232 sh(script: 'mkdir -p classes/; ln -sfv ../../../../global_reclass classes/system ')
233 }
234 }
235 // Save all models and all contexts. Warning! `h` flag must be used.
236 sh(script: "tar -chzf head_reclass.tar.gz --exclude='*@tmp' model contexts global_reclass", returnStatus: true)
237 archiveArtifacts artifacts: "head_reclass.tar.gz"
238 // move for "Compare Pillars" stage
239 sh(script: "mv -v head_reclass.tar.gz ${env.WORKSPACE}")
240 }
241 // For patched
242 dir(templateEnvPatched) {
243 for (String context : contextFileListPatched) {
244 def basename = common.GetBaseName(context, '.yml')
245 dir("${templateEnvPatched}/model/${basename}") {
246 sh(script: 'mkdir -p classes/; ln -sfv ../../../../global_reclass classes/system ')
247 }
248 }
249 // Save all models and all contexts. Warning! `h` flag must be used.
250 sh(script: "tar -chzf patched_reclass.tar.gz --exclude='*@tmp' model contexts global_reclass", returnStatus: true)
251 archiveArtifacts artifacts: "patched_reclass.tar.gz"
252 // move for "Compare Pillars" stage
253 sh(script: "mv -v patched_reclass.tar.gz ${env.WORKSPACE}")
254 }
255 }
256
257 stage("Compare Pillars") {
258 // Compare patched and HEAD reclass pillars
259 compareRoot = "${env.WORKSPACE}/test_compare/"
260 sh(script: """
261 mkdir -pv ${compareRoot}/new ${compareRoot}/old
262 tar -xzf patched_reclass.tar.gz --directory ${compareRoot}/new
263 tar -xzf head_reclass.tar.gz --directory ${compareRoot}/old
264 """)
265 common.warningMsg('infra/secrets.yml has been skipped from compare!')
266 rezult = common.comparePillars(compareRoot, env.BUILD_URL, "-Ev 'infra/secrets.yml'")
267 currentBuild.description = rezult
268 }
269 stage("test-contexts") {
270 // Test contexts for patched only
271 stepsForParallel = [:]
272 common.infoMsg("Found: ${contextFileListPatched.size()} patched contexts to test.")
273 for (String context : contextFileListPatched) {
274 def basename = common.GetBaseName(context, '.yml')
275 stepsForParallel.put("ContextPatchTest:${basename}", StepTestModel(basename))
276 }
277 parallel stepsForParallel
278 common.infoMsg('All tests done')
279 }
280
281 sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
282
283 } catch (Throwable e) {
284 currentBuild.result = "FAILURE"
285 currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
286 throw e
287 } finally {
288 def dummy = "dummy"
289 //FAILING common.sendNotification(currentBuild.result,"",["slack"])
290 }
291 }
chnydae80bb922017-05-29 17:48:40 +0200292}