blob: 99943235a5fe4ac608cfff93f12816147a8801b2 [file] [log] [blame]
azvyagintsev9df82e52018-09-06 19:17:18 +03001/*
2Able to be triggered from Gerrit if :
3Variators:
4Modes:
51) manual run via job-build , possible to pass refspec
Denis Egorenkod8748942018-09-07 12:26:20 +04006 TODO: currently impossible to use custom COOKIECUTTER_TEMPLATE_URL| RECLASS_SYSTEM_URL Gerrit-one always used.
azvyagintsev9df82e52018-09-06 19:17:18 +03007 - for CC
8 - Reclass
Denis Egorenkod8748942018-09-07 12:26:20 +04009
102) gerrit trigger
azvyagintsev9df82e52018-09-06 19:17:18 +030011 Automatically switches if GERRIT_PROJECT variable detected
12 Always test GERRIT_REFSPEC VS GERRIT_BRANCH-master version of opposite project
13 */
14
chnydae80bb922017-05-29 17:48:40 +020015common = new com.mirantis.mk.Common()
chnydabc63c9a2017-05-30 15:37:54 +020016gerrit = new com.mirantis.mk.Gerrit()
chnydae80bb922017-05-29 17:48:40 +020017git = new com.mirantis.mk.Git()
18python = new com.mirantis.mk.Python()
chnydae80bb922017-05-29 17:48:40 +020019
azvyagintsev9df82e52018-09-06 19:17:18 +030020slaveNode = env.SLAVE_NODE ?: 'python&&docker'
azvyagintsev5c0313d2018-08-13 17:13:35 +030021
azvyagintsev9df82e52018-09-06 19:17:18 +030022// Global var's
23alreadyMerged = false
24gerritConData = [credentialsId : env.CREDENTIALS_ID,
25 gerritName : env.GERRIT_NAME ?: 'mcp-jenkins',
26 gerritHost : env.GERRIT_HOST ?: 'gerrit.mcp.mirantis.net',
azvyagintsevb266ad22018-09-11 12:11:11 +030027 gerritScheme : env.GERRIT_SCHEME ?: 'ssh',
28 gerritPort : env.GERRIT_PORT ?: '29418',
azvyagintsev9df82e52018-09-06 19:17:18 +030029 gerritRefSpec : null,
30 gerritProject : null,
31 withWipeOut : true,
32 GERRIT_CHANGE_NUMBER: null]
33//
34//ccTemplatesRepo = env.COOKIECUTTER_TEMPLATE_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.net:29418/mk/cookiecutter-templates'
35gerritDataCC = [:]
36gerritDataCC << gerritConData
37gerritDataCC['gerritBranch'] = env.COOKIECUTTER_TEMPLATE_BRANCH ?: 'master'
38gerritDataCC['gerritProject'] = 'mk/cookiecutter-templates'
39//
40//reclassSystemRepo = env.RECLASS_SYSTEM_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.net:29418/salt-models/reclass-system'
41gerritDataRS = [:]
42gerritDataRS << gerritConData
43gerritDataRS['gerritBranch'] = env.RECLASS_MODEL_BRANCH ?: 'master'
44gerritDataRS['gerritProject'] = 'salt-models/reclass-system'
45
46// version of debRepos, aka formulas\reclass
azvyagintsev10e24012018-09-10 21:36:32 +030047testDistribRevision = env.DISTRIB_REVISION ?: 'nightly'
48reclassVersion = 'v1.5.4'
azvyagintsevb266ad22018-09-11 12:11:11 +030049if (env.RECLASS_VERSION) {
azvyagintsev9df82e52018-09-06 19:17:18 +030050 reclassVersion = env.RECLASS_VERSION
Vasyl Saienko772e1232018-07-23 14:42:24 +030051}
Denis Egorenkod8748942018-09-07 12:26:20 +040052// Name of sub-test chunk job
53chunkJobName = "test-mk-cookiecutter-templates-chunk"
54testModelBuildsData = [:]
azvyagintseve0eb6cd2018-09-12 13:00:40 +030055extraFormulasList = env.EXTRA_FORMULAS.tokenize() ?: ['linux', 'openssh']
Vasyl Saienko772e1232018-07-23 14:42:24 +030056
chnyda467f10f2017-05-30 17:25:07 +020057def generateSaltMaster(modEnv, clusterDomain, clusterName) {
azvyagintsev5c0313d2018-08-13 17:13:35 +030058 def nodeFile = "${modEnv}/nodes/cfg01.${clusterDomain}.yml"
59 def nodeString = """classes:
chnydae80bb922017-05-29 17:48:40 +020060- cluster.${clusterName}.infra.config
61parameters:
62 _param:
63 linux_system_codename: xenial
64 reclass_data_revision: master
65 linux:
66 system:
67 name: cfg01
68 domain: ${clusterDomain}
69"""
azvyagintsev5c0313d2018-08-13 17:13:35 +030070 sh "mkdir -p ${modEnv}/nodes/"
71 println "Create file ${nodeFile}"
72 writeFile(file: nodeFile, text: nodeString)
azvyagintsev87985532018-07-10 20:49:38 +030073}
74
azvyagintsev30bc82e2018-08-22 12:26:06 +030075/**
76 *
77 * @param contextFile - path to `contexts/XXX.yaml file`
azvyagintsev9df82e52018-09-06 19:17:18 +030078 * @param virtualenv - pyvenv with CC and dep's
azvyagintsev30bc82e2018-08-22 12:26:06 +030079 * @param templateEnvDir - root of CookieCutter
80 * @return
81 */
82
azvyagintsev5c0313d2018-08-13 17:13:35 +030083def generateModel(contextFile, virtualenv, templateEnvDir) {
84 def modelEnv = "${templateEnvDir}/model"
85 def basename = common.GetBaseName(contextFile, '.yml')
86 def generatedModel = "${modelEnv}/${basename}"
87 def content = readFile(file: "${templateEnvDir}/contexts/${contextFile}")
88 def templateContext = readYaml text: content
89 def clusterDomain = templateContext.default_context.cluster_domain
90 def clusterName = templateContext.default_context.cluster_name
91 def outputDestination = "${generatedModel}/classes/cluster/${clusterName}"
92 def templateBaseDir = templateEnvDir
93 def templateDir = "${templateEnvDir}/dir"
94 def templateOutputDir = templateBaseDir
azvyagintsev30bc82e2018-08-22 12:26:06 +030095 dir(templateEnvDir) {
96 sh(script: "rm -rf ${generatedModel} || true")
97 common.infoMsg("Generating model from context ${contextFile}")
98 def productList = ["infra", "cicd", "opencontrail", "kubernetes", "openstack", "oss", "stacklight", "ceph"]
99 for (product in productList) {
chnydae80bb922017-05-29 17:48:40 +0200100
azvyagintsev30bc82e2018-08-22 12:26:06 +0300101 // get templateOutputDir and productDir
102 if (product.startsWith("stacklight")) {
103 templateOutputDir = "${templateEnvDir}/output/stacklight"
104 try {
105 productDir = "stacklight" + templateContext.default_context['stacklight_version']
106 } catch (Throwable e) {
107 productDir = "stacklight1"
108 }
109 } else {
110 templateOutputDir = "${templateEnvDir}/output/${product}"
111 productDir = product
azvyagintsev5c0313d2018-08-13 17:13:35 +0300112 }
azvyagintsev30bc82e2018-08-22 12:26:06 +0300113
114 if (product == "infra" || (templateContext.default_context["${product}_enabled"]
115 && templateContext.default_context["${product}_enabled"].toBoolean())) {
116
117 templateDir = "${templateEnvDir}/cluster_product/${productDir}"
118 common.infoMsg("Generating product " + product + " from " + templateDir + " to " + templateOutputDir)
119
120 sh "rm -rf ${templateOutputDir} || true"
121 sh "mkdir -p ${templateOutputDir}"
122 sh "mkdir -p ${outputDestination}"
123
124 python.buildCookiecutterTemplate(templateDir, content, templateOutputDir, virtualenv, templateBaseDir)
125 sh "mv -v ${templateOutputDir}/${clusterName}/* ${outputDestination}"
126 } else {
127 common.warningMsg("Product " + product + " is disabled")
128 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300129 }
azvyagintsev30bc82e2018-08-22 12:26:06 +0300130 generateSaltMaster(generatedModel, clusterDomain, clusterName)
chnydae80bb922017-05-29 17:48:40 +0200131 }
chnydae80bb922017-05-29 17:48:40 +0200132}
133
Denis Egorenkod8748942018-09-07 12:26:20 +0400134def getAndUnpackNodesInfoArtifact(jobName, copyTo, build) {
135 return {
136 dir(copyTo) {
137 copyArtifacts(projectName: jobName, selector: specific(build), filter: "nodesinfo.tar.gz")
138 sh "tar -xvf nodesinfo.tar.gz"
139 sh "rm -v nodesinfo.tar.gz"
140 }
141 }
142}
azvyagintsev87985532018-07-10 20:49:38 +0300143
Denis Egorenkod8748942018-09-07 12:26:20 +0400144def testModel(modelFile, reclassArtifactName, artifactCopyPath) {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300145 // modelFile - `modelfiname` from model/modelfiname/modelfiname.yaml
146 //* Grub all models and send it to check in paralell - by one in thread.
Denis Egorenkod8748942018-09-07 12:26:20 +0400147 def _uuid = "${env.JOB_NAME.toLowerCase()}_${env.BUILD_TAG.toLowerCase()}_${modelFile.toLowerCase()}_" + UUID.randomUUID().toString().take(8)
148 def _values_string = """
azvyagintsev87985532018-07-10 20:49:38 +0300149 ---
Denis Egorenkod8748942018-09-07 12:26:20 +0400150 MODELS_TARGZ: "${env.BUILD_URL}/artifact/${reclassArtifactName}"
151 DockerCName: "${_uuid}"
azvyagintsev87985532018-07-10 20:49:38 +0300152 testReclassEnv: "model/${modelFile}/"
153 modelFile: "contexts/${modelFile}.yml"
azvyagintsev9df82e52018-09-06 19:17:18 +0300154 DISTRIB_REVISION: "${testDistribRevision}"
azvyagintseve0eb6cd2018-09-12 13:00:40 +0300155 EXTRA_FORMULAS: "${extraFormulasList.join(' ')}"
azvyagintsev87985532018-07-10 20:49:38 +0300156 reclassVersion: "${reclassVersion}"
157 """
Denis Egorenkod8748942018-09-07 12:26:20 +0400158 def chunkJob = build job: chunkJobName, parameters: [
azvyagintsev08b34e32018-09-12 12:29:42 +0300159 [$class: 'TextParameterValue', name: 'EXTRA_VARIABLES_YAML',
azvyagintsev30bc82e2018-08-22 12:26:06 +0300160 value : _values_string.stripIndent()],
azvyagintsev5c0313d2018-08-13 17:13:35 +0300161 ]
Denis Egorenkod8748942018-09-07 12:26:20 +0400162 // Put sub-job info into global map.
163 testModelBuildsData.put(_uuid, ['jobname' : chunkJob.fullProjectName,
164 'copyToDir': "${artifactCopyPath}/${modelFile}",
165 'buildId' : "${chunkJob.number}"])
chnydabc63c9a2017-05-30 15:37:54 +0200166}
167
Denis Egorenkod8748942018-09-07 12:26:20 +0400168def StepTestModel(basename, reclassArtifactName, artifactCopyPath) {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300169 // We need to wrap what we return in a Groovy closure, or else it's invoked
170 // when this method is called, not when we pass it to parallel.
171 // To do this, you need to wrap the code below in { }, and either return
172 // that explicitly, or use { -> } syntax.
173 // return node object
174 return {
175 node(slaveNode) {
Denis Egorenkod8748942018-09-07 12:26:20 +0400176 testModel(basename, reclassArtifactName, artifactCopyPath)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300177 }
chnydae80bb922017-05-29 17:48:40 +0200178 }
azvyagintsev87985532018-07-10 20:49:38 +0300179}
180
azvyagintsev9df82e52018-09-06 19:17:18 +0300181def StepPrepareGit(templateEnvFolder, gerrit_data) {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300182 // return git clone object
183 return {
azvyagintsev9df82e52018-09-06 19:17:18 +0300184 def checkouted = false
185 common.infoMsg("StepPrepareGit: ${gerrit_data}")
azvyagintsev5c0313d2018-08-13 17:13:35 +0300186 // fetch needed sources
187 dir(templateEnvFolder) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300188 if (gerrit_data['gerritRefSpec']) {
189 // Those part might be not work,in case manual var's pass
190 def gerritChange = gerrit.getGerritChange(gerrit_data['gerritName'], gerrit_data['gerritHost'],
191 gerrit_data['GERRIT_CHANGE_NUMBER'], gerrit_data['credentialsId'])
azvyagintsev5c0313d2018-08-13 17:13:35 +0300192 merged = gerritChange.status == "MERGED"
193 if (!merged) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300194 checkouted = gerrit.gerritPatchsetCheckout(gerrit_data)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300195 } else {
azvyagintsev9df82e52018-09-06 19:17:18 +0300196 // update global variable for pretty return from pipeline
197 alreadyMerged = true
198 common.successMsg("Change ${gerrit_data['GERRIT_CHANGE_NUMBER']} is already merged, no need to gate them")
199 error('change already merged')
azvyagintsev87985532018-07-10 20:49:38 +0300200 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300201 } else {
azvyagintsev9df82e52018-09-06 19:17:18 +0300202 // Get clean HEAD
203 gerrit_data['useGerritTriggerBuildChooser'] = false
204 checkouted = gerrit.gerritPatchsetCheckout(gerrit_data)
205 if (!checkouted) {
206 error("Failed to get repo:${gerrit_data}")
207 }
azvyagintsev87985532018-07-10 20:49:38 +0300208 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300209 }
azvyagintsev87985532018-07-10 20:49:38 +0300210 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300211}
212
213def StepGenerateModels(_contextFileList, _virtualenv, _templateEnvDir) {
214 return {
215 for (contextFile in _contextFileList) {
216 generateModel(contextFile, _virtualenv, _templateEnvDir)
217 }
218 }
219}
220
azvyagintsev9df82e52018-09-06 19:17:18 +0300221def globalVariatorsUpdate() {
222 // Simple function, to check and define branch-around variables
223 // In general, simply make transition updates for non-master branch
224 // based on magic logic
225 def message = ''
azvyagintsevb266ad22018-09-11 12:11:11 +0300226 if (env.GERRIT_PROJECT) {
azvyagintseve0eb6cd2018-09-12 13:00:40 +0300227 // TODO are we going to have such branches?
azvyagintsev9df82e52018-09-06 19:17:18 +0300228 if (!['nightly', 'testing', 'stable', 'proposed', 'master'].contains(env.GERRIT_BRANCH)) {
229 gerritDataCC['gerritBranch'] = env.GERRIT_BRANCH
230 gerritDataRS['gerritBranch'] = env.GERRIT_BRANCH
231 // 'binary' branch logic w\o 'release/' prefix
232 testDistribRevision = env.GERRIT_BRANCH.split('/')[-1]
azvyagintseve0eb6cd2018-09-12 13:00:40 +0300233 /// FIXME: ugly hack, for versioning tests.
234 // Leave it in this place - to don't fail after release.
235 if (testDistribRevision == '2018.8.1') {
236 if (extraFormulasList.remove('openscap')) {
237 common.infoMsg('Removing openscap from tests extraFormulasList !')
238 }
239 }
azvyagintsev9df82e52018-09-06 19:17:18 +0300240 // Check if we are going to test bleeding-edge release, which doesn't have binary release yet
241 if (!common.checkRemoteBinary([apt_mk_version: testDistribRevision]).linux_system_repo_url) {
242 common.errorMsg("Binary release: ${testDistribRevision} not exist. Fallback to 'proposed'! ")
243 testDistribRevision = 'proposed'
244 }
245 }
246 // Identify, who triggered. To whom we should pass refspec
247 if (env.GERRIT_PROJECT == 'salt-models/reclass-system') {
248 gerritDataRS['gerritRefSpec'] = env.GERRIT_REFSPEC
249 gerritDataRS['GERRIT_CHANGE_NUMBER'] = env.GERRIT_CHANGE_NUMBER
250 message = "<br/>RECLASS_SYSTEM_GIT_REF =>${gerritDataRS['gerritRefSpec']}"
251 } else if (env.GERRIT_PROJECT == 'mk/cookiecutter-templates') {
252 gerritDataCC['gerritRefSpec'] = env.GERRIT_REFSPEC
253 gerritDataCC['GERRIT_CHANGE_NUMBER'] = env.GERRIT_CHANGE_NUMBER
254 message = "<br/>COOKIECUTTER_TEMPLATE_REF =>${gerritDataCC['gerritRefSpec']}"
255 } else {
256 error("Unsuported gerrit-project triggered:${env.GERRIT_PROJECT}")
257 }
258
259 message = "<font color='red'>GerritTrigger detected! We are in auto-mode:</font>" +
260 "<br/>Test env variables has been changed:" +
261 "<br/>COOKIECUTTER_TEMPLATE_BRANCH => ${gerritDataCC['gerritBranch']}" +
262 "<br/>DISTRIB_REVISION =>${testDistribRevision}" +
263 "<br/>RECLASS_MODEL_BRANCH=> ${gerritDataRS['gerritBranch']}" + message
264 common.warningMsg(message)
265 currentBuild.description = currentBuild.description ? message + "<br/>" + currentBuild.description : message
266 } else {
267 // Check for passed variables:
azvyagintsevb266ad22018-09-11 12:11:11 +0300268 if (env.RECLASS_SYSTEM_GIT_REF) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300269 gerritDataRS['gerritRefSpec'] = RECLASS_SYSTEM_GIT_REF
270 }
azvyagintsevb266ad22018-09-11 12:11:11 +0300271 if (env.COOKIECUTTER_TEMPLATE_REF) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300272 gerritDataCC['gerritRefSpec'] = COOKIECUTTER_TEMPLATE_REF
273 }
274 message = "<font color='red'>Manual run detected!</font>" + "<br/>"
275 currentBuild.description = currentBuild.description ? message + "<br/>" + currentBuild.description : message
276 }
Denis Egorenkod8748942018-09-07 12:26:20 +0400277}
azvyagintsev9df82e52018-09-06 19:17:18 +0300278
Denis Egorenkoa6117fc2018-09-11 13:40:00 +0400279def replaceGeneratedValues(path) {
280 def files = sh(script: "find ${path} -name 'secrets.yml'", returnStdout: true)
281 def stepsForParallel = [:]
282 stepsForParallel.failFast = true
283 files.tokenize().each {
284 stepsForParallel.put("Removing generated passwords/secrets from ${it}",
285 {
286 def secrets = readYaml file: it
azvyagintsev08b34e32018-09-12 12:29:42 +0300287 for (String key in secrets['parameters']['_param'].keySet()) {
Denis Egorenkoa6117fc2018-09-11 13:40:00 +0400288 secrets['parameters']['_param'][key] = 'generated'
289 }
290 // writeYaml can't write to already existing file
291 writeYaml file: "${it}.tmp", data: secrets
292 sh "mv ${it}.tmp ${it}"
293 })
294 }
295 parallel stepsForParallel
296}
297
Denis Egorenkod8748942018-09-07 12:26:20 +0400298def linkReclassModels(contextList, envPath, archiveName) {
299 // to be able share reclass for all subenvs
300 // Also, makes artifact test more solid - use one reclass for all of sub-models.
301 // Archive Structure will be:
302 // tar.gz
303 // ├── contexts
304 // │   └── ceph.yml
305 // ├── global_reclass <<< reclass system
306 // ├── model
307 // │   └── ceph <<< from `context basename`
308 // │   ├── classes
309 // │   │   ├── cluster
310 // │   │   └── system -> ../../../global_reclass
311 // │   └── nodes
312 // │   └── cfg01.ceph-cluster-domain.local.yml
313 dir(envPath) {
314 for (String context : contextList) {
315 def basename = common.GetBaseName(context, '.yml')
316 dir("${envPath}/model/${basename}") {
317 sh(script: 'mkdir -p classes/; ln -sfv ../../../../global_reclass classes/system ')
318 }
319 }
Denis Egorenkoa6117fc2018-09-11 13:40:00 +0400320 // replace all generated passwords/secrets/keys with hardcode value for infra/secrets.yaml
321 replaceGeneratedValues("${envPath}/model")
Denis Egorenkod8748942018-09-07 12:26:20 +0400322 // Save all models and all contexts. Warning! `h` flag must be used.
323 sh(script: "set -ex; tar -chzf ${archiveName} --exclude='*@tmp' model contexts", returnStatus: true)
324 archiveArtifacts artifacts: archiveName
325 // move for "Compare Pillars" stage
326 sh(script: "mv -v ${archiveName} ${env.WORKSPACE}")
327 }
azvyagintsev9df82e52018-09-06 19:17:18 +0300328}
329
azvyagintsev5c0313d2018-08-13 17:13:35 +0300330timeout(time: 1, unit: 'HOURS') {
331 node(slaveNode) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300332 globalVariatorsUpdate()
333 def gerritDataCCHEAD = [:]
azvyagintsev30bc82e2018-08-22 12:26:06 +0300334 def templateEnvHead = "${env.WORKSPACE}/EnvHead/"
335 def templateEnvPatched = "${env.WORKSPACE}/EnvPatched/"
azvyagintsev5c0313d2018-08-13 17:13:35 +0300336 def contextFileListHead = []
337 def contextFileListPatched = []
338 def vEnv = "${env.WORKSPACE}/venv"
Denis Egorenkod8748942018-09-07 12:26:20 +0400339 def headReclassArtifactName = "head_reclass.tar.gz"
340 def patchedReclassArtifactName = "patched_reclass.tar.gz"
341 def reclassNodeInfoDir = "${env.WORKSPACE}/reclassNodeInfo_compare/"
342 def reclassInfoHeadPath = "${reclassNodeInfoDir}/old"
343 def reclassInfoPatchedPath = "${reclassNodeInfoDir}/new"
azvyagintsev5c0313d2018-08-13 17:13:35 +0300344 try {
345 sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
346 stage('Download and prepare CC env') {
347 // Prepare 2 env - for patchset, and for HEAD
azvyagintsev9df82e52018-09-06 19:17:18 +0300348 def paralellEnvs = [:]
azvyagintsev5c0313d2018-08-13 17:13:35 +0300349 paralellEnvs.failFast = true
azvyagintsev9df82e52018-09-06 19:17:18 +0300350 paralellEnvs['downloadEnvPatched'] = StepPrepareGit(templateEnvPatched, gerritDataCC)
351 gerritDataCCHEAD << gerritDataCC
352 gerritDataCCHEAD['gerritRefSpec'] = null; gerritDataCCHEAD['GERRIT_CHANGE_NUMBER'] = null
353 paralellEnvs['downloadEnvHead'] = StepPrepareGit(templateEnvHead, gerritDataCCHEAD)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300354 parallel paralellEnvs
355 }
356 stage("Check workflow_definition") {
357 // Check only for patchset
358 python.setupVirtualenv(vEnv, 'python2', [], "${templateEnvPatched}/requirements.txt")
359 common.infoMsg(python.runVirtualenvCommand(vEnv, "python ${templateEnvPatched}/workflow_definition_test.py"))
360 }
361
362 stage("generate models") {
363 dir("${templateEnvHead}/contexts") {
364 for (String x : findFiles(glob: "*.yml")) {
365 contextFileListHead.add(x)
366 }
367 }
368 dir("${templateEnvPatched}/contexts") {
369 for (String x : findFiles(glob: "*.yml")) {
370 contextFileListPatched.add(x)
371 }
372 }
373 // Generate over 2env's - for patchset, and for HEAD
azvyagintsev9df82e52018-09-06 19:17:18 +0300374 def paralellEnvs = [:]
azvyagintsev5c0313d2018-08-13 17:13:35 +0300375 paralellEnvs.failFast = true
azvyagintsev30bc82e2018-08-22 12:26:06 +0300376 paralellEnvs['GenerateEnvPatched'] = StepGenerateModels(contextFileListPatched, vEnv, templateEnvPatched)
377 paralellEnvs['GenerateEnvHead'] = StepGenerateModels(contextFileListHead, vEnv, templateEnvHead)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300378 parallel paralellEnvs
379
380 // Collect artifacts
381 dir(templateEnvPatched) {
382 // Collect only models. For backward comparability - who know, probably someone use it..
383 sh(script: "tar -czf model.tar.gz -C model ../contexts .", returnStatus: true)
384 archiveArtifacts artifacts: "model.tar.gz"
385 }
386
azvyagintsev9df82e52018-09-06 19:17:18 +0300387 StepPrepareGit("${env.WORKSPACE}/global_reclass/", gerritDataRS).call()
azvyagintsev5c0313d2018-08-13 17:13:35 +0300388 // link all models, to use one global reclass
389 // For HEAD
Denis Egorenkod8748942018-09-07 12:26:20 +0400390 linkReclassModels(contextFileListHead, templateEnvHead, headReclassArtifactName)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300391 // For patched
Denis Egorenkod8748942018-09-07 12:26:20 +0400392 linkReclassModels(contextFileListPatched, templateEnvPatched, patchedReclassArtifactName)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300393 }
394
Denis Egorenkod8748942018-09-07 12:26:20 +0400395 stage("Compare cluster lvl Head/Patched") {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300396 // Compare patched and HEAD reclass pillars
Denis Egorenkod8748942018-09-07 12:26:20 +0400397 compareRoot = "${env.WORKSPACE}/cluster_compare/"
azvyagintsev5c0313d2018-08-13 17:13:35 +0300398 sh(script: """
399 mkdir -pv ${compareRoot}/new ${compareRoot}/old
Denis Egorenkod8748942018-09-07 12:26:20 +0400400 tar -xzf ${patchedReclassArtifactName} --directory ${compareRoot}/new
401 tar -xzf ${headReclassArtifactName} --directory ${compareRoot}/old
azvyagintsev5c0313d2018-08-13 17:13:35 +0300402 """)
403 common.warningMsg('infra/secrets.yml has been skipped from compare!')
azvyagintsevb266ad22018-09-11 12:11:11 +0300404 result = '\n' + common.comparePillars(compareRoot, env.BUILD_URL, "-Ev \'infra/secrets.yml\'")
Denis Egorenkod8748942018-09-07 12:26:20 +0400405 currentBuild.description = currentBuild.description ? currentBuild.description + result : result
azvyagintsev5c0313d2018-08-13 17:13:35 +0300406 }
Denis Egorenkod8748942018-09-07 12:26:20 +0400407 stage("TestContexts Head/Patched") {
408 def stepsForParallel = [:]
409 stepsForParallel.failFast = true
410 common.infoMsg("Found: ${contextFileListHead.size()} HEAD contexts to test.")
411 for (String context : contextFileListHead) {
412 def basename = common.GetBaseName(context, '.yml')
413 stepsForParallel.put("ContextHeadTest:${basename}", StepTestModel(basename, headReclassArtifactName, reclassInfoHeadPath))
414 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300415 common.infoMsg("Found: ${contextFileListPatched.size()} patched contexts to test.")
416 for (String context : contextFileListPatched) {
417 def basename = common.GetBaseName(context, '.yml')
Denis Egorenkod8748942018-09-07 12:26:20 +0400418 stepsForParallel.put("ContextPatchedTest:${basename}", StepTestModel(basename, patchedReclassArtifactName, reclassInfoPatchedPath))
azvyagintsev5c0313d2018-08-13 17:13:35 +0300419 }
420 parallel stepsForParallel
Denis Egorenkod8748942018-09-07 12:26:20 +0400421 common.infoMsg('All TestContexts tests done')
422 }
423 stage("Compare NodesInfo Head/Patched") {
424 // Download all artifacts
425 def stepsForParallel = [:]
426 stepsForParallel.failFast = true
427 common.infoMsg("Found: ${testModelBuildsData.size()} nodeinfo artifacts to download.")
428 testModelBuildsData.each { bname, bdata ->
429 stepsForParallel.put("FetchData:${bname}",
430 getAndUnpackNodesInfoArtifact(bdata.jobname, bdata.copyToDir, bdata.buildId))
431 }
432 parallel stepsForParallel
Denis Egorenkoa6117fc2018-09-11 13:40:00 +0400433 // remove timestamp field from rendered files
434 sh("find ${reclassNodeInfoDir} -type f -exec sed -i '/ timestamp: .*/d' {} \\;")
Denis Egorenkod8748942018-09-07 12:26:20 +0400435 // Compare patched and HEAD reclass pillars
436 result = '\n' + common.comparePillars(reclassNodeInfoDir, env.BUILD_URL, '')
437 currentBuild.description = currentBuild.description ? currentBuild.description + result : result
azvyagintsev5c0313d2018-08-13 17:13:35 +0300438 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300439 sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
440
441 } catch (Throwable e) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300442 if (alreadyMerged) {
443 currentBuild.result = 'ABORTED'
444 currentBuild.description = "Change ${GERRIT_CHANGE_NUMBER} is already merged, no need to gate them"
445 return
446 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300447 currentBuild.result = "FAILURE"
448 currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
449 throw e
450 } finally {
451 def dummy = "dummy"
azvyagintsev5c0313d2018-08-13 17:13:35 +0300452 }
453 }
chnydae80bb922017-05-29 17:48:40 +0200454}