blob: 8b0f776cbd11d4285aefa0807d013a78dbca25fa [file] [log] [blame]
azvyagintsev9df82e52018-09-06 19:17:18 +03001/*
2Able to be triggered from Gerrit if :
3Variators:
4Modes:
51) manual run via job-build , possible to pass refspec
Denis Egorenkod8748942018-09-07 12:26:20 +04006 TODO: currently impossible to use custom COOKIECUTTER_TEMPLATE_URL| RECLASS_SYSTEM_URL Gerrit-one always used.
azvyagintsev9df82e52018-09-06 19:17:18 +03007 - for CC
8 - Reclass
Denis Egorenkod8748942018-09-07 12:26:20 +04009
102) gerrit trigger
azvyagintsev9df82e52018-09-06 19:17:18 +030011 Automatically switches if GERRIT_PROJECT variable detected
12 Always test GERRIT_REFSPEC VS GERRIT_BRANCH-master version of opposite project
13 */
14
chnydae80bb922017-05-29 17:48:40 +020015common = new com.mirantis.mk.Common()
chnydabc63c9a2017-05-30 15:37:54 +020016gerrit = new com.mirantis.mk.Gerrit()
chnydae80bb922017-05-29 17:48:40 +020017git = new com.mirantis.mk.Git()
18python = new com.mirantis.mk.Python()
chnydae80bb922017-05-29 17:48:40 +020019
azvyagintsev9df82e52018-09-06 19:17:18 +030020slaveNode = env.SLAVE_NODE ?: 'python&&docker'
azvyagintsev5c0313d2018-08-13 17:13:35 +030021
azvyagintsev9df82e52018-09-06 19:17:18 +030022// Global var's
23alreadyMerged = false
24gerritConData = [credentialsId : env.CREDENTIALS_ID,
25 gerritName : env.GERRIT_NAME ?: 'mcp-jenkins',
26 gerritHost : env.GERRIT_HOST ?: 'gerrit.mcp.mirantis.net',
27 gerritRefSpec : null,
28 gerritProject : null,
29 withWipeOut : true,
30 GERRIT_CHANGE_NUMBER: null]
31//
32//ccTemplatesRepo = env.COOKIECUTTER_TEMPLATE_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.net:29418/mk/cookiecutter-templates'
33gerritDataCC = [:]
34gerritDataCC << gerritConData
35gerritDataCC['gerritBranch'] = env.COOKIECUTTER_TEMPLATE_BRANCH ?: 'master'
36gerritDataCC['gerritProject'] = 'mk/cookiecutter-templates'
37//
38//reclassSystemRepo = env.RECLASS_SYSTEM_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.net:29418/salt-models/reclass-system'
39gerritDataRS = [:]
40gerritDataRS << gerritConData
41gerritDataRS['gerritBranch'] = env.RECLASS_MODEL_BRANCH ?: 'master'
42gerritDataRS['gerritProject'] = 'salt-models/reclass-system'
43
44// version of debRepos, aka formulas\reclass
azvyagintsev10e24012018-09-10 21:36:32 +030045testDistribRevision = env.DISTRIB_REVISION ?: 'nightly'
46reclassVersion = 'v1.5.4'
azvyagintsev9df82e52018-09-06 19:17:18 +030047if (common.validInputParam(env.RECLASS_VERSION)) {
48 reclassVersion = env.RECLASS_VERSION
Vasyl Saienko772e1232018-07-23 14:42:24 +030049}
Denis Egorenkod8748942018-09-07 12:26:20 +040050// Name of sub-test chunk job
51chunkJobName = "test-mk-cookiecutter-templates-chunk"
52testModelBuildsData = [:]
53
Vasyl Saienko772e1232018-07-23 14:42:24 +030054
chnyda467f10f2017-05-30 17:25:07 +020055def generateSaltMaster(modEnv, clusterDomain, clusterName) {
azvyagintsev5c0313d2018-08-13 17:13:35 +030056 def nodeFile = "${modEnv}/nodes/cfg01.${clusterDomain}.yml"
57 def nodeString = """classes:
chnydae80bb922017-05-29 17:48:40 +020058- cluster.${clusterName}.infra.config
59parameters:
60 _param:
61 linux_system_codename: xenial
62 reclass_data_revision: master
63 linux:
64 system:
65 name: cfg01
66 domain: ${clusterDomain}
67"""
azvyagintsev5c0313d2018-08-13 17:13:35 +030068 sh "mkdir -p ${modEnv}/nodes/"
69 println "Create file ${nodeFile}"
70 writeFile(file: nodeFile, text: nodeString)
azvyagintsev87985532018-07-10 20:49:38 +030071}
72
azvyagintsev30bc82e2018-08-22 12:26:06 +030073/**
74 *
75 * @param contextFile - path to `contexts/XXX.yaml file`
azvyagintsev9df82e52018-09-06 19:17:18 +030076 * @param virtualenv - pyvenv with CC and dep's
azvyagintsev30bc82e2018-08-22 12:26:06 +030077 * @param templateEnvDir - root of CookieCutter
78 * @return
79 */
80
azvyagintsev5c0313d2018-08-13 17:13:35 +030081def generateModel(contextFile, virtualenv, templateEnvDir) {
82 def modelEnv = "${templateEnvDir}/model"
83 def basename = common.GetBaseName(contextFile, '.yml')
84 def generatedModel = "${modelEnv}/${basename}"
85 def content = readFile(file: "${templateEnvDir}/contexts/${contextFile}")
86 def templateContext = readYaml text: content
87 def clusterDomain = templateContext.default_context.cluster_domain
88 def clusterName = templateContext.default_context.cluster_name
89 def outputDestination = "${generatedModel}/classes/cluster/${clusterName}"
90 def templateBaseDir = templateEnvDir
91 def templateDir = "${templateEnvDir}/dir"
92 def templateOutputDir = templateBaseDir
azvyagintsev30bc82e2018-08-22 12:26:06 +030093 dir(templateEnvDir) {
94 sh(script: "rm -rf ${generatedModel} || true")
95 common.infoMsg("Generating model from context ${contextFile}")
96 def productList = ["infra", "cicd", "opencontrail", "kubernetes", "openstack", "oss", "stacklight", "ceph"]
97 for (product in productList) {
chnydae80bb922017-05-29 17:48:40 +020098
azvyagintsev30bc82e2018-08-22 12:26:06 +030099 // get templateOutputDir and productDir
100 if (product.startsWith("stacklight")) {
101 templateOutputDir = "${templateEnvDir}/output/stacklight"
102 try {
103 productDir = "stacklight" + templateContext.default_context['stacklight_version']
104 } catch (Throwable e) {
105 productDir = "stacklight1"
106 }
107 } else {
108 templateOutputDir = "${templateEnvDir}/output/${product}"
109 productDir = product
azvyagintsev5c0313d2018-08-13 17:13:35 +0300110 }
azvyagintsev30bc82e2018-08-22 12:26:06 +0300111
112 if (product == "infra" || (templateContext.default_context["${product}_enabled"]
113 && templateContext.default_context["${product}_enabled"].toBoolean())) {
114
115 templateDir = "${templateEnvDir}/cluster_product/${productDir}"
116 common.infoMsg("Generating product " + product + " from " + templateDir + " to " + templateOutputDir)
117
118 sh "rm -rf ${templateOutputDir} || true"
119 sh "mkdir -p ${templateOutputDir}"
120 sh "mkdir -p ${outputDestination}"
121
122 python.buildCookiecutterTemplate(templateDir, content, templateOutputDir, virtualenv, templateBaseDir)
123 sh "mv -v ${templateOutputDir}/${clusterName}/* ${outputDestination}"
124 } else {
125 common.warningMsg("Product " + product + " is disabled")
126 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300127 }
azvyagintsev30bc82e2018-08-22 12:26:06 +0300128 generateSaltMaster(generatedModel, clusterDomain, clusterName)
chnydae80bb922017-05-29 17:48:40 +0200129 }
chnydae80bb922017-05-29 17:48:40 +0200130}
131
Denis Egorenkod8748942018-09-07 12:26:20 +0400132def getAndUnpackNodesInfoArtifact(jobName, copyTo, build) {
133 return {
134 dir(copyTo) {
135 copyArtifacts(projectName: jobName, selector: specific(build), filter: "nodesinfo.tar.gz")
136 sh "tar -xvf nodesinfo.tar.gz"
137 sh "rm -v nodesinfo.tar.gz"
138 }
139 }
140}
azvyagintsev87985532018-07-10 20:49:38 +0300141
Denis Egorenkod8748942018-09-07 12:26:20 +0400142def testModel(modelFile, reclassArtifactName, artifactCopyPath) {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300143 // modelFile - `modelfiname` from model/modelfiname/modelfiname.yaml
144 //* Grub all models and send it to check in paralell - by one in thread.
Denis Egorenkod8748942018-09-07 12:26:20 +0400145 def _uuid = "${env.JOB_NAME.toLowerCase()}_${env.BUILD_TAG.toLowerCase()}_${modelFile.toLowerCase()}_" + UUID.randomUUID().toString().take(8)
146 def _values_string = """
azvyagintsev87985532018-07-10 20:49:38 +0300147 ---
Denis Egorenkod8748942018-09-07 12:26:20 +0400148 MODELS_TARGZ: "${env.BUILD_URL}/artifact/${reclassArtifactName}"
149 DockerCName: "${_uuid}"
azvyagintsev87985532018-07-10 20:49:38 +0300150 testReclassEnv: "model/${modelFile}/"
151 modelFile: "contexts/${modelFile}.yml"
azvyagintsev9df82e52018-09-06 19:17:18 +0300152 DISTRIB_REVISION: "${testDistribRevision}"
azvyagintsev87985532018-07-10 20:49:38 +0300153 EXTRA_FORMULAS: "${env.EXTRA_FORMULAS}"
154 reclassVersion: "${reclassVersion}"
155 """
Denis Egorenkod8748942018-09-07 12:26:20 +0400156 def chunkJob = build job: chunkJobName, parameters: [
azvyagintsev5c0313d2018-08-13 17:13:35 +0300157 [$class: 'StringParameterValue', name: 'EXTRA_VARIABLES_YAML',
azvyagintsev30bc82e2018-08-22 12:26:06 +0300158 value : _values_string.stripIndent()],
azvyagintsev5c0313d2018-08-13 17:13:35 +0300159 ]
Denis Egorenkod8748942018-09-07 12:26:20 +0400160 // Put sub-job info into global map.
161 testModelBuildsData.put(_uuid, ['jobname' : chunkJob.fullProjectName,
162 'copyToDir': "${artifactCopyPath}/${modelFile}",
163 'buildId' : "${chunkJob.number}"])
chnydabc63c9a2017-05-30 15:37:54 +0200164}
165
Denis Egorenkod8748942018-09-07 12:26:20 +0400166def StepTestModel(basename, reclassArtifactName, artifactCopyPath) {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300167 // We need to wrap what we return in a Groovy closure, or else it's invoked
168 // when this method is called, not when we pass it to parallel.
169 // To do this, you need to wrap the code below in { }, and either return
170 // that explicitly, or use { -> } syntax.
171 // return node object
172 return {
173 node(slaveNode) {
Denis Egorenkod8748942018-09-07 12:26:20 +0400174 testModel(basename, reclassArtifactName, artifactCopyPath)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300175 }
chnydae80bb922017-05-29 17:48:40 +0200176 }
azvyagintsev87985532018-07-10 20:49:38 +0300177}
178
azvyagintsev9df82e52018-09-06 19:17:18 +0300179def StepPrepareGit(templateEnvFolder, gerrit_data) {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300180 // return git clone object
181 return {
azvyagintsev9df82e52018-09-06 19:17:18 +0300182 def checkouted = false
183 common.infoMsg("StepPrepareGit: ${gerrit_data}")
azvyagintsev5c0313d2018-08-13 17:13:35 +0300184 // fetch needed sources
185 dir(templateEnvFolder) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300186 if (gerrit_data['gerritRefSpec']) {
187 // Those part might be not work,in case manual var's pass
188 def gerritChange = gerrit.getGerritChange(gerrit_data['gerritName'], gerrit_data['gerritHost'],
189 gerrit_data['GERRIT_CHANGE_NUMBER'], gerrit_data['credentialsId'])
azvyagintsev5c0313d2018-08-13 17:13:35 +0300190 merged = gerritChange.status == "MERGED"
191 if (!merged) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300192 checkouted = gerrit.gerritPatchsetCheckout(gerrit_data)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300193 } else {
azvyagintsev9df82e52018-09-06 19:17:18 +0300194 // update global variable for pretty return from pipeline
195 alreadyMerged = true
196 common.successMsg("Change ${gerrit_data['GERRIT_CHANGE_NUMBER']} is already merged, no need to gate them")
197 error('change already merged')
azvyagintsev87985532018-07-10 20:49:38 +0300198 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300199 } else {
azvyagintsev9df82e52018-09-06 19:17:18 +0300200 // Get clean HEAD
201 gerrit_data['useGerritTriggerBuildChooser'] = false
202 checkouted = gerrit.gerritPatchsetCheckout(gerrit_data)
203 if (!checkouted) {
204 error("Failed to get repo:${gerrit_data}")
205 }
azvyagintsev87985532018-07-10 20:49:38 +0300206 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300207 }
azvyagintsev87985532018-07-10 20:49:38 +0300208 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300209}
210
211def StepGenerateModels(_contextFileList, _virtualenv, _templateEnvDir) {
212 return {
213 for (contextFile in _contextFileList) {
214 generateModel(contextFile, _virtualenv, _templateEnvDir)
215 }
216 }
217}
218
azvyagintsev9df82e52018-09-06 19:17:18 +0300219def globalVariatorsUpdate() {
220 // Simple function, to check and define branch-around variables
221 // In general, simply make transition updates for non-master branch
222 // based on magic logic
223 def message = ''
224 if (!common.validInputParam(env.GERRIT_PROJECT)) {
225 if (!['nightly', 'testing', 'stable', 'proposed', 'master'].contains(env.GERRIT_BRANCH)) {
226 gerritDataCC['gerritBranch'] = env.GERRIT_BRANCH
227 gerritDataRS['gerritBranch'] = env.GERRIT_BRANCH
228 // 'binary' branch logic w\o 'release/' prefix
229 testDistribRevision = env.GERRIT_BRANCH.split('/')[-1]
230 // Check if we are going to test bleeding-edge release, which doesn't have binary release yet
231 if (!common.checkRemoteBinary([apt_mk_version: testDistribRevision]).linux_system_repo_url) {
232 common.errorMsg("Binary release: ${testDistribRevision} not exist. Fallback to 'proposed'! ")
233 testDistribRevision = 'proposed'
234 }
235 }
236 // Identify, who triggered. To whom we should pass refspec
237 if (env.GERRIT_PROJECT == 'salt-models/reclass-system') {
238 gerritDataRS['gerritRefSpec'] = env.GERRIT_REFSPEC
239 gerritDataRS['GERRIT_CHANGE_NUMBER'] = env.GERRIT_CHANGE_NUMBER
240 message = "<br/>RECLASS_SYSTEM_GIT_REF =>${gerritDataRS['gerritRefSpec']}"
241 } else if (env.GERRIT_PROJECT == 'mk/cookiecutter-templates') {
242 gerritDataCC['gerritRefSpec'] = env.GERRIT_REFSPEC
243 gerritDataCC['GERRIT_CHANGE_NUMBER'] = env.GERRIT_CHANGE_NUMBER
244 message = "<br/>COOKIECUTTER_TEMPLATE_REF =>${gerritDataCC['gerritRefSpec']}"
245 } else {
246 error("Unsuported gerrit-project triggered:${env.GERRIT_PROJECT}")
247 }
248
249 message = "<font color='red'>GerritTrigger detected! We are in auto-mode:</font>" +
250 "<br/>Test env variables has been changed:" +
251 "<br/>COOKIECUTTER_TEMPLATE_BRANCH => ${gerritDataCC['gerritBranch']}" +
252 "<br/>DISTRIB_REVISION =>${testDistribRevision}" +
253 "<br/>RECLASS_MODEL_BRANCH=> ${gerritDataRS['gerritBranch']}" + message
254 common.warningMsg(message)
255 currentBuild.description = currentBuild.description ? message + "<br/>" + currentBuild.description : message
256 } else {
257 // Check for passed variables:
258 if (common.validInputParam(env.RECLASS_SYSTEM_GIT_REF)) {
259 gerritDataRS['gerritRefSpec'] = RECLASS_SYSTEM_GIT_REF
260 }
261 if (common.validInputParam(env.COOKIECUTTER_TEMPLATE_REF)) {
262 gerritDataCC['gerritRefSpec'] = COOKIECUTTER_TEMPLATE_REF
263 }
264 message = "<font color='red'>Manual run detected!</font>" + "<br/>"
265 currentBuild.description = currentBuild.description ? message + "<br/>" + currentBuild.description : message
266 }
Denis Egorenkod8748942018-09-07 12:26:20 +0400267}
azvyagintsev9df82e52018-09-06 19:17:18 +0300268
Denis Egorenkod8748942018-09-07 12:26:20 +0400269def linkReclassModels(contextList, envPath, archiveName) {
270 // to be able share reclass for all subenvs
271 // Also, makes artifact test more solid - use one reclass for all of sub-models.
272 // Archive Structure will be:
273 // tar.gz
274 // ├── contexts
275 // │   └── ceph.yml
276 // ├── global_reclass <<< reclass system
277 // ├── model
278 // │   └── ceph <<< from `context basename`
279 // │   ├── classes
280 // │   │   ├── cluster
281 // │   │   └── system -> ../../../global_reclass
282 // │   └── nodes
283 // │   └── cfg01.ceph-cluster-domain.local.yml
284 dir(envPath) {
285 for (String context : contextList) {
286 def basename = common.GetBaseName(context, '.yml')
287 dir("${envPath}/model/${basename}") {
288 sh(script: 'mkdir -p classes/; ln -sfv ../../../../global_reclass classes/system ')
289 }
290 }
291 // Save all models and all contexts. Warning! `h` flag must be used.
292 sh(script: "set -ex; tar -chzf ${archiveName} --exclude='*@tmp' model contexts", returnStatus: true)
293 archiveArtifacts artifacts: archiveName
294 // move for "Compare Pillars" stage
295 sh(script: "mv -v ${archiveName} ${env.WORKSPACE}")
296 }
azvyagintsev9df82e52018-09-06 19:17:18 +0300297}
298
azvyagintsev5c0313d2018-08-13 17:13:35 +0300299timeout(time: 1, unit: 'HOURS') {
300 node(slaveNode) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300301 globalVariatorsUpdate()
302 def gerritDataCCHEAD = [:]
azvyagintsev30bc82e2018-08-22 12:26:06 +0300303 def templateEnvHead = "${env.WORKSPACE}/EnvHead/"
304 def templateEnvPatched = "${env.WORKSPACE}/EnvPatched/"
azvyagintsev5c0313d2018-08-13 17:13:35 +0300305 def contextFileListHead = []
306 def contextFileListPatched = []
307 def vEnv = "${env.WORKSPACE}/venv"
Denis Egorenkod8748942018-09-07 12:26:20 +0400308 def headReclassArtifactName = "head_reclass.tar.gz"
309 def patchedReclassArtifactName = "patched_reclass.tar.gz"
310 def reclassNodeInfoDir = "${env.WORKSPACE}/reclassNodeInfo_compare/"
311 def reclassInfoHeadPath = "${reclassNodeInfoDir}/old"
312 def reclassInfoPatchedPath = "${reclassNodeInfoDir}/new"
azvyagintsev5c0313d2018-08-13 17:13:35 +0300313
314 try {
315 sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
316 stage('Download and prepare CC env') {
317 // Prepare 2 env - for patchset, and for HEAD
azvyagintsev9df82e52018-09-06 19:17:18 +0300318 def paralellEnvs = [:]
azvyagintsev5c0313d2018-08-13 17:13:35 +0300319 paralellEnvs.failFast = true
azvyagintsev9df82e52018-09-06 19:17:18 +0300320 paralellEnvs['downloadEnvPatched'] = StepPrepareGit(templateEnvPatched, gerritDataCC)
321 gerritDataCCHEAD << gerritDataCC
322 gerritDataCCHEAD['gerritRefSpec'] = null; gerritDataCCHEAD['GERRIT_CHANGE_NUMBER'] = null
323 paralellEnvs['downloadEnvHead'] = StepPrepareGit(templateEnvHead, gerritDataCCHEAD)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300324 parallel paralellEnvs
325 }
326 stage("Check workflow_definition") {
327 // Check only for patchset
328 python.setupVirtualenv(vEnv, 'python2', [], "${templateEnvPatched}/requirements.txt")
329 common.infoMsg(python.runVirtualenvCommand(vEnv, "python ${templateEnvPatched}/workflow_definition_test.py"))
330 }
331
332 stage("generate models") {
333 dir("${templateEnvHead}/contexts") {
334 for (String x : findFiles(glob: "*.yml")) {
335 contextFileListHead.add(x)
336 }
337 }
338 dir("${templateEnvPatched}/contexts") {
339 for (String x : findFiles(glob: "*.yml")) {
340 contextFileListPatched.add(x)
341 }
342 }
343 // Generate over 2env's - for patchset, and for HEAD
azvyagintsev9df82e52018-09-06 19:17:18 +0300344 def paralellEnvs = [:]
azvyagintsev5c0313d2018-08-13 17:13:35 +0300345 paralellEnvs.failFast = true
azvyagintsev30bc82e2018-08-22 12:26:06 +0300346 paralellEnvs['GenerateEnvPatched'] = StepGenerateModels(contextFileListPatched, vEnv, templateEnvPatched)
347 paralellEnvs['GenerateEnvHead'] = StepGenerateModels(contextFileListHead, vEnv, templateEnvHead)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300348 parallel paralellEnvs
349
350 // Collect artifacts
351 dir(templateEnvPatched) {
352 // Collect only models. For backward comparability - who know, probably someone use it..
353 sh(script: "tar -czf model.tar.gz -C model ../contexts .", returnStatus: true)
354 archiveArtifacts artifacts: "model.tar.gz"
355 }
356
azvyagintsev9df82e52018-09-06 19:17:18 +0300357 StepPrepareGit("${env.WORKSPACE}/global_reclass/", gerritDataRS).call()
azvyagintsev5c0313d2018-08-13 17:13:35 +0300358 // link all models, to use one global reclass
359 // For HEAD
Denis Egorenkod8748942018-09-07 12:26:20 +0400360 linkReclassModels(contextFileListHead, templateEnvHead, headReclassArtifactName)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300361 // For patched
Denis Egorenkod8748942018-09-07 12:26:20 +0400362 linkReclassModels(contextFileListPatched, templateEnvPatched, patchedReclassArtifactName)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300363 }
364
Denis Egorenkod8748942018-09-07 12:26:20 +0400365 stage("Compare cluster lvl Head/Patched") {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300366 // Compare patched and HEAD reclass pillars
Denis Egorenkod8748942018-09-07 12:26:20 +0400367 compareRoot = "${env.WORKSPACE}/cluster_compare/"
azvyagintsev5c0313d2018-08-13 17:13:35 +0300368 sh(script: """
369 mkdir -pv ${compareRoot}/new ${compareRoot}/old
Denis Egorenkod8748942018-09-07 12:26:20 +0400370 tar -xzf ${patchedReclassArtifactName} --directory ${compareRoot}/new
371 tar -xzf ${headReclassArtifactName} --directory ${compareRoot}/old
azvyagintsev5c0313d2018-08-13 17:13:35 +0300372 """)
373 common.warningMsg('infra/secrets.yml has been skipped from compare!')
Denis Egorenkod8748942018-09-07 12:26:20 +0400374 result = '\n' + common.comparePillars(compareRoot, env.BUILD_URL, "-Ev \'infra/secrets.yml\'")
375 currentBuild.description = currentBuild.description ? currentBuild.description + result : result
azvyagintsev5c0313d2018-08-13 17:13:35 +0300376 }
Denis Egorenkod8748942018-09-07 12:26:20 +0400377 stage("TestContexts Head/Patched") {
378 def stepsForParallel = [:]
379 stepsForParallel.failFast = true
380 common.infoMsg("Found: ${contextFileListHead.size()} HEAD contexts to test.")
381 for (String context : contextFileListHead) {
382 def basename = common.GetBaseName(context, '.yml')
383 stepsForParallel.put("ContextHeadTest:${basename}", StepTestModel(basename, headReclassArtifactName, reclassInfoHeadPath))
384 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300385 common.infoMsg("Found: ${contextFileListPatched.size()} patched contexts to test.")
386 for (String context : contextFileListPatched) {
387 def basename = common.GetBaseName(context, '.yml')
Denis Egorenkod8748942018-09-07 12:26:20 +0400388 stepsForParallel.put("ContextPatchedTest:${basename}", StepTestModel(basename, patchedReclassArtifactName, reclassInfoPatchedPath))
azvyagintsev5c0313d2018-08-13 17:13:35 +0300389 }
390 parallel stepsForParallel
Denis Egorenkod8748942018-09-07 12:26:20 +0400391 common.infoMsg('All TestContexts tests done')
392 }
393 stage("Compare NodesInfo Head/Patched") {
394 // Download all artifacts
395 def stepsForParallel = [:]
396 stepsForParallel.failFast = true
397 common.infoMsg("Found: ${testModelBuildsData.size()} nodeinfo artifacts to download.")
398 testModelBuildsData.each { bname, bdata ->
399 stepsForParallel.put("FetchData:${bname}",
400 getAndUnpackNodesInfoArtifact(bdata.jobname, bdata.copyToDir, bdata.buildId))
401 }
402 parallel stepsForParallel
403 // Compare patched and HEAD reclass pillars
404 result = '\n' + common.comparePillars(reclassNodeInfoDir, env.BUILD_URL, '')
405 currentBuild.description = currentBuild.description ? currentBuild.description + result : result
azvyagintsev5c0313d2018-08-13 17:13:35 +0300406 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300407 sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
408
409 } catch (Throwable e) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300410 if (alreadyMerged) {
411 currentBuild.result = 'ABORTED'
412 currentBuild.description = "Change ${GERRIT_CHANGE_NUMBER} is already merged, no need to gate them"
413 return
414 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300415 currentBuild.result = "FAILURE"
416 currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
417 throw e
418 } finally {
419 def dummy = "dummy"
azvyagintsev5c0313d2018-08-13 17:13:35 +0300420 }
421 }
chnydae80bb922017-05-29 17:48:40 +0200422}