blob: 16a57b64a59459c613666a3b3264a5cb68ca04ba [file] [log] [blame]
azvyagintsev9df82e52018-09-06 19:17:18 +03001/*
2Able to be triggered from Gerrit if :
3Variators:
4Modes:
51) manual run via job-build , possible to pass refspec
Denis Egorenkod8748942018-09-07 12:26:20 +04006 TODO: currently impossible to use custom COOKIECUTTER_TEMPLATE_URL| RECLASS_SYSTEM_URL Gerrit-one always used.
azvyagintsev9df82e52018-09-06 19:17:18 +03007 - for CC
8 - Reclass
Denis Egorenkod8748942018-09-07 12:26:20 +04009
102) gerrit trigger
azvyagintsev9df82e52018-09-06 19:17:18 +030011 Automatically switches if GERRIT_PROJECT variable detected
12 Always test GERRIT_REFSPEC VS GERRIT_BRANCH-master version of opposite project
13 */
14
Alexander Evseev02fe5eb2018-11-15 13:58:36 +010015import groovy.json.JsonOutput
16
chnydae80bb922017-05-29 17:48:40 +020017common = new com.mirantis.mk.Common()
azvyagintsev1385db92019-03-22 16:05:10 +020018mcpCommon = new com.mirantis.mcp.Common()
chnydabc63c9a2017-05-30 15:37:54 +020019gerrit = new com.mirantis.mk.Gerrit()
chnydae80bb922017-05-29 17:48:40 +020020python = new com.mirantis.mk.Python()
chnydae80bb922017-05-29 17:48:40 +020021
Denis Egorenkod67a6d22018-10-18 16:00:46 +040022extraVarsYAML = env.EXTRA_VARIABLES_YAML.trim() ?: ''
Denis Egorenko535d5b62018-09-28 11:28:10 +040023if (extraVarsYAML) {
24 common.mergeEnv(env, extraVarsYAML)
Alexander Evseev02fe5eb2018-11-15 13:58:36 +010025 extraVars = readYaml text: extraVarsYAML
26} else {
27 extraVars = [:]
Denis Egorenko535d5b62018-09-28 11:28:10 +040028}
29
Aleksey Zvyagintsev4a804212019-02-07 13:02:31 +000030slaveNode = env.SLAVE_NODE ?: 'virtual'
azvyagintseve7b8e792018-09-21 17:27:01 +030031checkIncludeOrder = env.CHECK_INCLUDE_ORDER ?: false
azvyagintsev5c0313d2018-08-13 17:13:35 +030032
azvyagintsev9df82e52018-09-06 19:17:18 +030033// Global var's
azvyagintsev9df82e52018-09-06 19:17:18 +030034gerritConData = [credentialsId : env.CREDENTIALS_ID,
35 gerritName : env.GERRIT_NAME ?: 'mcp-jenkins',
Roman Vyalovf0c596e2018-10-01 17:56:09 +030036 gerritHost : env.GERRIT_HOST ?: 'gerrit.mcp.mirantis.com',
azvyagintsevb266ad22018-09-11 12:11:11 +030037 gerritScheme : env.GERRIT_SCHEME ?: 'ssh',
38 gerritPort : env.GERRIT_PORT ?: '29418',
azvyagintsev9df82e52018-09-06 19:17:18 +030039 gerritRefSpec : null,
40 gerritProject : null,
41 withWipeOut : true,
42 GERRIT_CHANGE_NUMBER: null]
43//
Roman Vyalovf0c596e2018-10-01 17:56:09 +030044//ccTemplatesRepo = env.COOKIECUTTER_TEMPLATE_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.com:29418/mk/cookiecutter-templates'
azvyagintsev3a80e342018-09-19 19:22:48 +030045gerritDataCCHEAD = [:]
azvyagintsev9df82e52018-09-06 19:17:18 +030046gerritDataCC = [:]
47gerritDataCC << gerritConData
48gerritDataCC['gerritBranch'] = env.COOKIECUTTER_TEMPLATE_BRANCH ?: 'master'
azvyagintsev458278c2018-09-25 18:38:30 +030049gerritDataCC['gerritRefSpec'] = env.COOKIECUTTER_TEMPLATE_REF ?: null
azvyagintsev9df82e52018-09-06 19:17:18 +030050gerritDataCC['gerritProject'] = 'mk/cookiecutter-templates'
51//
Roman Vyalovf0c596e2018-10-01 17:56:09 +030052//reclassSystemRepo = env.RECLASS_SYSTEM_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.com:29418/salt-models/reclass-system'
azvyagintsev3a80e342018-09-19 19:22:48 +030053gerritDataRSHEAD = [:]
azvyagintsev9df82e52018-09-06 19:17:18 +030054gerritDataRS = [:]
55gerritDataRS << gerritConData
Denis Egorenkoe7040ac2018-10-24 20:11:04 +040056gerritDataRS['gerritBranch'] = env.RECLASS_SYSTEM_BRANCH ?: 'master'
azvyagintsev458278c2018-09-25 18:38:30 +030057gerritDataRS['gerritRefSpec'] = env.RECLASS_SYSTEM_GIT_REF ?: null
azvyagintsev9df82e52018-09-06 19:17:18 +030058gerritDataRS['gerritProject'] = 'salt-models/reclass-system'
59
azvyagintsevec055b42018-10-11 12:59:05 +030060// version of debRepos, aka formulas|reclass|ubuntu
azvyagintsev10e24012018-09-10 21:36:32 +030061testDistribRevision = env.DISTRIB_REVISION ?: 'nightly'
Denis Egorenko14f0f0f2019-11-19 13:50:25 +040062updatesVersion = ''
Denis Egorenko086aff12018-10-18 17:54:59 +040063
Denis Egorenkod8748942018-09-07 12:26:20 +040064// Name of sub-test chunk job
65chunkJobName = "test-mk-cookiecutter-templates-chunk"
66testModelBuildsData = [:]
Vasyl Saienko772e1232018-07-23 14:42:24 +030067
Denis Egorenkod8748942018-09-07 12:26:20 +040068def getAndUnpackNodesInfoArtifact(jobName, copyTo, build) {
69 return {
70 dir(copyTo) {
71 copyArtifacts(projectName: jobName, selector: specific(build), filter: "nodesinfo.tar.gz")
azvyagintsev3a80e342018-09-19 19:22:48 +030072 sh "tar -xf nodesinfo.tar.gz"
Denis Egorenkod8748942018-09-07 12:26:20 +040073 sh "rm -v nodesinfo.tar.gz"
74 }
75 }
76}
azvyagintsev87985532018-07-10 20:49:38 +030077
Denis Egorenko086aff12018-10-18 17:54:59 +040078def testModel(modelFile, reclassArtifactName, artifactCopyPath, useExtraRepos = false) {
azvyagintsev5c0313d2018-08-13 17:13:35 +030079 // modelFile - `modelfiname` from model/modelfiname/modelfiname.yaml
80 //* Grub all models and send it to check in paralell - by one in thread.
Denis Egorenkod8748942018-09-07 12:26:20 +040081 def _uuid = "${env.JOB_NAME.toLowerCase()}_${env.BUILD_TAG.toLowerCase()}_${modelFile.toLowerCase()}_" + UUID.randomUUID().toString().take(8)
Alexander Evseev02fe5eb2018-11-15 13:58:36 +010082 def _values = [
azvyagintsev4220bff2019-01-09 21:32:11 +020083 MODELS_TARGZ : "${env.BUILD_URL}/artifact/${reclassArtifactName}",
84 DockerCName : _uuid,
85 testReclassEnv : "model/${modelFile}/",
86 modelFile : "contexts/${modelFile}.yml",
Alexander Evseev02fe5eb2018-11-15 13:58:36 +010087 DISTRIB_REVISION: testDistribRevision,
azvyagintsev4220bff2019-01-09 21:32:11 +020088 useExtraRepos : useExtraRepos,
Denis Egorenko19a793d2019-11-18 20:50:59 +040089 updatesVersion : updatesVersion,
Alexander Evseev02fe5eb2018-11-15 13:58:36 +010090 ]
91 def _values_string = JsonOutput.toJson(_values << extraVars)
Denis Egorenkod8748942018-09-07 12:26:20 +040092 def chunkJob = build job: chunkJobName, parameters: [
azvyagintsev08b34e32018-09-12 12:29:42 +030093 [$class: 'TextParameterValue', name: 'EXTRA_VARIABLES_YAML',
azvyagintsev30bc82e2018-08-22 12:26:06 +030094 value : _values_string.stripIndent()],
azvyagintsev5c0313d2018-08-13 17:13:35 +030095 ]
Denis Egorenkod8748942018-09-07 12:26:20 +040096 // Put sub-job info into global map.
97 testModelBuildsData.put(_uuid, ['jobname' : chunkJob.fullProjectName,
98 'copyToDir': "${artifactCopyPath}/${modelFile}",
99 'buildId' : "${chunkJob.number}"])
chnydabc63c9a2017-05-30 15:37:54 +0200100}
101
azvyagintsev0e503c72019-04-08 18:13:58 +0300102def StepTestModel(_basename, _reclassArtifactName, _artifactCopyPath, _useExtraRepos = false) {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300103 // We need to wrap what we return in a Groovy closure, or else it's invoked
104 // when this method is called, not when we pass it to parallel.
105 // To do this, you need to wrap the code below in { }, and either return
106 // that explicitly, or use { -> } syntax.
107 // return node object
108 return {
109 node(slaveNode) {
azvyagintsev0e503c72019-04-08 18:13:58 +0300110 testModel(_basename, _reclassArtifactName, _artifactCopyPath, _useExtraRepos)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300111 }
chnydae80bb922017-05-29 17:48:40 +0200112 }
azvyagintsev87985532018-07-10 20:49:38 +0300113}
114
azvyagintsev9df82e52018-09-06 19:17:18 +0300115def StepPrepareGit(templateEnvFolder, gerrit_data) {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300116 // return git clone object
117 return {
azvyagintsev9df82e52018-09-06 19:17:18 +0300118 common.infoMsg("StepPrepareGit: ${gerrit_data}")
azvyagintsev5c0313d2018-08-13 17:13:35 +0300119 // fetch needed sources
120 dir(templateEnvFolder) {
azvyagintsev4f34f7a2019-02-26 18:47:37 +0200121 if (!gerrit_data['gerritRefSpec']) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300122 // Get clean HEAD
123 gerrit_data['useGerritTriggerBuildChooser'] = false
Denis Egorenko80d45b22019-01-31 22:28:38 +0400124 }
125 def checkouted = gerrit.gerritPatchsetCheckout(gerrit_data)
126 if (!checkouted) {
127 error("Failed to get repo:${gerrit_data}")
azvyagintsev87985532018-07-10 20:49:38 +0300128 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300129 }
azvyagintsev87985532018-07-10 20:49:38 +0300130 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300131}
132
133def StepGenerateModels(_contextFileList, _virtualenv, _templateEnvDir) {
134 return {
azvyagintsev0e503c72019-04-08 18:13:58 +0300135 if (fileExists(new File(_templateEnvDir, 'tox.ini').toString())) {
136 // Merge contexts for nice base.yml based diff
137 dir(_templateEnvDir) {
138 sh('tox -ve merge_contexts')
139 }
140 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300141 for (contextFile in _contextFileList) {
Denis Egorenkof3469092018-10-17 17:05:50 +0400142 def basename = common.GetBaseName(contextFile, '.yml')
azvyagintsev1385db92019-03-22 16:05:10 +0200143 def contextYaml = readYaml text: readFile(file: "${_templateEnvDir}/contexts/${contextFile}")
144 // secrets_encryption overcomplicated for expected 'fast syntax tests'
145 // So, lets disable it. It would be tested only in generate-cookiecutter-products.groovy pipeline
146 if (contextYaml['default_context'].get('secrets_encryption_enabled')) {
147 common.warningMsg('Disabling secrets_encryption_enabled for tests!')
148 contextYaml['default_context']['secrets_encryption_enabled'] = 'False'
149 }
azvyagintsev0e503c72019-04-08 18:13:58 +0300150
Denis Egorenko5a7339c2019-09-26 12:14:59 +0400151 // disabling strong_usernames for tests to reduce diff between head and patched model
152 common.warningMsg('Disabling strong_usernames for tests!')
153 contextYaml['default_context']['strong_usernames'] = 'False'
154
azvyagintsev0e503c72019-04-08 18:13:58 +0300155 def context = mcpCommon.dumpYAML(contextYaml)
azvyagintsev4f34f7a2019-02-26 18:47:37 +0200156 if (!fileExists(new File(_templateEnvDir, 'tox.ini').toString())) {
157 common.warningMsg('Forming NEW reclass-root structure...')
158 python.generateModel(context, basename, 'cfg01', _virtualenv, "${_templateEnvDir}/model", _templateEnvDir)
159 } else {
160 // tox-based CC generated structure of reclass,from the root. Otherwise for bw compat, modelEnv
161 // still expect only lower lvl of project, aka model/classes/cluster/XXX/. So,lets dump result into
162 // temp dir, and then copy it over initial structure.
163 def reclassTempRootDir = sh(script: "mktemp -d -p ${env.WORKSPACE}", returnStdout: true).trim()
164 python.generateModel(context, basename, 'cfg01', _virtualenv, reclassTempRootDir, _templateEnvDir)
165 dir("${_templateEnvDir}/model/${basename}/") {
166 if (fileExists(new File(reclassTempRootDir, 'reclass').toString())) {
167 common.warningMsg('Forming NEW reclass-root structure...')
168 sh("cp -ra ${reclassTempRootDir}/reclass/* .")
169 } else {
170 // those hack needed only for period release/2019.2.0 => current patch.
171 common.warningMsg('Forming OLD reclass-root structure...')
172 sh("mkdir -p classes/cluster/ ; cd classes/cluster/; cp -ra ${reclassTempRootDir}/* .")
173 }
174 }
175 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300176 }
177 }
178}
179
azvyagintsev9df82e52018-09-06 19:17:18 +0300180def globalVariatorsUpdate() {
181 // Simple function, to check and define branch-around variables
182 // In general, simply make transition updates for non-master branch
183 // based on magic logic
Denis Egorenkofe9a3362018-10-17 15:33:25 +0400184 def newline = '<br/>'
185 def messages = []
azvyagintsevb266ad22018-09-11 12:11:11 +0300186 if (env.GERRIT_PROJECT) {
Denis Egorenkofe9a3362018-10-17 15:33:25 +0400187 messages.add("<font color='red'>GerritTrigger detected! We are in auto-mode:</font>")
188 messages.add("Test env variables has been changed:")
azvyagintseve0eb6cd2018-09-12 13:00:40 +0300189 // TODO are we going to have such branches?
azvyagintsev9df82e52018-09-06 19:17:18 +0300190 if (!['nightly', 'testing', 'stable', 'proposed', 'master'].contains(env.GERRIT_BRANCH)) {
191 gerritDataCC['gerritBranch'] = env.GERRIT_BRANCH
192 gerritDataRS['gerritBranch'] = env.GERRIT_BRANCH
azvyagintsev458278c2018-09-25 18:38:30 +0300193 testDistribRevision = env.GERRIT_BRANCH
azvyagintsev9df82e52018-09-06 19:17:18 +0300194 }
Denis Egorenko60e45c12018-11-12 12:00:11 +0400195 messages.add("COOKIECUTTER_TEMPLATE_BRANCH => ${gerritDataCC['gerritBranch']}")
196 messages.add("RECLASS_SYSTEM_BRANCH => ${gerritDataRS['gerritBranch']}")
azvyagintsev9df82e52018-09-06 19:17:18 +0300197 // Identify, who triggered. To whom we should pass refspec
198 if (env.GERRIT_PROJECT == 'salt-models/reclass-system') {
199 gerritDataRS['gerritRefSpec'] = env.GERRIT_REFSPEC
200 gerritDataRS['GERRIT_CHANGE_NUMBER'] = env.GERRIT_CHANGE_NUMBER
Denis Egorenkofe9a3362018-10-17 15:33:25 +0400201 messages.add("RECLASS_SYSTEM_GIT_REF => ${gerritDataRS['gerritRefSpec']}")
azvyagintsev9df82e52018-09-06 19:17:18 +0300202 } else if (env.GERRIT_PROJECT == 'mk/cookiecutter-templates') {
203 gerritDataCC['gerritRefSpec'] = env.GERRIT_REFSPEC
204 gerritDataCC['GERRIT_CHANGE_NUMBER'] = env.GERRIT_CHANGE_NUMBER
Denis Egorenkofe9a3362018-10-17 15:33:25 +0400205 messages.add("COOKIECUTTER_TEMPLATE_REF => ${gerritDataCC['gerritRefSpec']}")
azvyagintsev9df82e52018-09-06 19:17:18 +0300206 } else {
207 error("Unsuported gerrit-project triggered:${env.GERRIT_PROJECT}")
208 }
azvyagintsev9df82e52018-09-06 19:17:18 +0300209 } else {
Denis Egorenkofe9a3362018-10-17 15:33:25 +0400210 messages.add("<font color='red'>Non-gerrit trigger run detected!</font>")
azvyagintsev9df82e52018-09-06 19:17:18 +0300211 }
azvyagintsev3a80e342018-09-19 19:22:48 +0300212 gerritDataCCHEAD << gerritDataCC
213 gerritDataCCHEAD['gerritRefSpec'] = null
214 gerritDataCCHEAD['GERRIT_CHANGE_NUMBER'] = null
215 gerritDataRSHEAD << gerritDataRS
216 gerritDataRSHEAD['gerritRefSpec'] = null
217 gerritDataRSHEAD['GERRIT_CHANGE_NUMBER'] = null
azvyagintsev7d982812019-04-15 23:21:35 +0300218 // check for test XXX vs RELEASE branch, to get correct formulas
219 if (gerritDataCC['gerritBranch'].contains('release/')) {
220 testDistribRevision = gerritDataCC['gerritBranch']
221 } else if (gerritDataRS['gerritBranch'].contains('release')) {
222 testDistribRevision = gerritDataRS['gerritBranch']
223 }
azvyagintsev458278c2018-09-25 18:38:30 +0300224 // 'binary' branch logic w\o 'release/' prefix
225 if (testDistribRevision.contains('/')) {
Denis Egorenko19a793d2019-11-18 20:50:59 +0400226 if (testDistribRevision.contains('proposed')) {
227 updatesVersion = 'proposed'
228 }
Denis Egorenko14f0f0f2019-11-19 13:50:25 +0400229 testDistribRevision = testDistribRevision.split('/')[-1]
azvyagintsev458278c2018-09-25 18:38:30 +0300230 }
231 // Check if we are going to test bleeding-edge release, which doesn't have binary release yet
azvyagintsev5c200e72018-12-28 15:58:09 +0200232 // After 2018q4 releases, need to also check 'static' repo, for example ubuntu.
azvyagintsev4220bff2019-01-09 21:32:11 +0200233 binTest = common.checkRemoteBinary(['mcp_version': testDistribRevision])
azvyagintsev5c200e72018-12-28 15:58:09 +0200234 if (!binTest.linux_system_repo_url || !binTest.linux_system_repo_ubuntu_url) {
235 common.errorMsg("Binary release: ${testDistribRevision} not exist or not full. Fallback to 'proposed'! ")
azvyagintsev458278c2018-09-25 18:38:30 +0300236 testDistribRevision = 'proposed'
azvyagintsev458278c2018-09-25 18:38:30 +0300237 }
azvyagintsev7d982812019-04-15 23:21:35 +0300238 messages.add("DISTRIB_REVISION => ${testDistribRevision}")
Denis Egorenkofe9a3362018-10-17 15:33:25 +0400239 def message = messages.join(newline) + newline
azvyagintsev458278c2018-09-25 18:38:30 +0300240 currentBuild.description = currentBuild.description ? message + currentBuild.description : message
Denis Egorenkod8748942018-09-07 12:26:20 +0400241}
azvyagintsev9df82e52018-09-06 19:17:18 +0300242
Denis Egorenkoa6117fc2018-09-11 13:40:00 +0400243def replaceGeneratedValues(path) {
244 def files = sh(script: "find ${path} -name 'secrets.yml'", returnStdout: true)
245 def stepsForParallel = [:]
246 stepsForParallel.failFast = true
247 files.tokenize().each {
248 stepsForParallel.put("Removing generated passwords/secrets from ${it}",
249 {
250 def secrets = readYaml file: it
azvyagintsev08b34e32018-09-12 12:29:42 +0300251 for (String key in secrets['parameters']['_param'].keySet()) {
Denis Egorenkoa6117fc2018-09-11 13:40:00 +0400252 secrets['parameters']['_param'][key] = 'generated'
253 }
254 // writeYaml can't write to already existing file
255 writeYaml file: "${it}.tmp", data: secrets
256 sh "mv ${it}.tmp ${it}"
257 })
258 }
259 parallel stepsForParallel
260}
261
Denis Egorenkod8748942018-09-07 12:26:20 +0400262def linkReclassModels(contextList, envPath, archiveName) {
263 // to be able share reclass for all subenvs
264 // Also, makes artifact test more solid - use one reclass for all of sub-models.
265 // Archive Structure will be:
266 // tar.gz
267 // ├── contexts
268 // │   └── ceph.yml
Denis Egorenko7aa5ac52018-10-17 15:14:56 +0400269 // ├── classes-system <<< reclass system
Denis Egorenkod8748942018-09-07 12:26:20 +0400270 // ├── model
271 // │   └── ceph <<< from `context basename`
272 // │   ├── classes
273 // │   │   ├── cluster
Denis Egorenko7aa5ac52018-10-17 15:14:56 +0400274 // │   │   └── system -> ../../../classes-system
Denis Egorenkod8748942018-09-07 12:26:20 +0400275 // │   └── nodes
276 // │   └── cfg01.ceph-cluster-domain.local.yml
Denis Egorenko7aa5ac52018-10-17 15:14:56 +0400277 def archiveBaseName = common.GetBaseName(archiveName, '.tar.gz')
278 def classesSystemDir = 'classes-system'
279 // copy reclass system under envPath with -R and trailing / to support symlinks direct copy
280 sh("cp -R ${archiveBaseName}/ ${envPath}/${classesSystemDir}")
Denis Egorenkod8748942018-09-07 12:26:20 +0400281 dir(envPath) {
azvyagintsev0e503c72019-04-08 18:13:58 +0300282 for (String _context : contextList) {
283 def basename = common.GetBaseName(_context, '.yml')
Denis Egorenko7aa5ac52018-10-17 15:14:56 +0400284 dir("${envPath}/model/${basename}/classes") {
285 sh(script: "ln -sfv ../../../${classesSystemDir} system ")
Denis Egorenkod8748942018-09-07 12:26:20 +0400286 }
287 }
Denis Egorenkoa6117fc2018-09-11 13:40:00 +0400288 // replace all generated passwords/secrets/keys with hardcode value for infra/secrets.yaml
289 replaceGeneratedValues("${envPath}/model")
azvyagintsev3a80e342018-09-19 19:22:48 +0300290 // Save all models and all contexts. Warning! `h` flag must be used!
Denis Egorenko7aa5ac52018-10-17 15:14:56 +0400291 sh(script: "set -ex; tar -czhf ${env.WORKSPACE}/${archiveName} --exclude='*@tmp' contexts model ${classesSystemDir}", returnStatus: true)
Denis Egorenkod8748942018-09-07 12:26:20 +0400292 }
azvyagintsev3a80e342018-09-19 19:22:48 +0300293 archiveArtifacts artifacts: archiveName
azvyagintsev9df82e52018-09-06 19:17:18 +0300294}
295
azvyagintsev5c0313d2018-08-13 17:13:35 +0300296timeout(time: 1, unit: 'HOURS') {
297 node(slaveNode) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300298 globalVariatorsUpdate()
azvyagintsev30bc82e2018-08-22 12:26:06 +0300299 def templateEnvHead = "${env.WORKSPACE}/EnvHead/"
300 def templateEnvPatched = "${env.WORKSPACE}/EnvPatched/"
azvyagintsev5c0313d2018-08-13 17:13:35 +0300301 def contextFileListHead = []
302 def contextFileListPatched = []
303 def vEnv = "${env.WORKSPACE}/venv"
Denis Egorenkod8748942018-09-07 12:26:20 +0400304 def headReclassArtifactName = "head_reclass.tar.gz"
305 def patchedReclassArtifactName = "patched_reclass.tar.gz"
306 def reclassNodeInfoDir = "${env.WORKSPACE}/reclassNodeInfo_compare/"
307 def reclassInfoHeadPath = "${reclassNodeInfoDir}/old"
308 def reclassInfoPatchedPath = "${reclassNodeInfoDir}/new"
azvyagintsev5c0313d2018-08-13 17:13:35 +0300309 try {
310 sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
311 stage('Download and prepare CC env') {
312 // Prepare 2 env - for patchset, and for HEAD
azvyagintsev9df82e52018-09-06 19:17:18 +0300313 def paralellEnvs = [:]
azvyagintsev5c0313d2018-08-13 17:13:35 +0300314 paralellEnvs.failFast = true
azvyagintsev9df82e52018-09-06 19:17:18 +0300315 paralellEnvs['downloadEnvHead'] = StepPrepareGit(templateEnvHead, gerritDataCCHEAD)
azvyagintsev3a80e342018-09-19 19:22:48 +0300316 if (gerritDataCC.get('gerritRefSpec', null)) {
317 paralellEnvs['downloadEnvPatched'] = StepPrepareGit(templateEnvPatched, gerritDataCC)
318 parallel paralellEnvs
319 } else {
320 paralellEnvs['downloadEnvPatched'] = { common.warningMsg('No need to process: downloadEnvPatched') }
321 parallel paralellEnvs
322 sh("rsync -a --exclude '*@tmp' ${templateEnvHead} ${templateEnvPatched}")
323 }
Denis Egorenko3de070c2018-11-20 13:57:35 +0400324 if (env.CUSTOM_COOKIECUTTER_CONTEXT) {
325 // readYaml to check custom context structure
326 def customContext = readYaml text: env.CUSTOM_COOKIECUTTER_CONTEXT
327 writeYaml file: "${templateEnvHead}/contexts/custom_context.yml", data: customContext
328 writeYaml file: "${templateEnvPatched}/contexts/custom_context.yml", data: customContext
329 common.infoMsg("Using custom context provided from job parameter 'CUSTOM_COOKIECUTTER_CONTEXT'")
330 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300331 }
azvyagintsev8ab34fd2019-01-10 15:32:26 +0200332 stage('Check workflow_definition') {
333 // Prepare venv for old env's, aka non-tox based
334 if (!fileExists(new File(templateEnvPatched, 'tox.ini').toString()) || !fileExists(new File(templateEnvHead, 'tox.ini').toString())) {
335 python.setupVirtualenv(vEnv, 'python2', [], "${templateEnvPatched}/requirements.txt")
336 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300337 // Check only for patchset
azvyagintsev1037fb42019-01-10 13:52:38 +0200338 if (fileExists(new File(templateEnvPatched, 'tox.ini').toString())) {
339 dir(templateEnvPatched) {
340 output = sh(returnStdout: true, script: "tox -ve test")
341 common.infoMsg("[Cookiecutter test] Result: ${output}")
azvyagintsev4220bff2019-01-09 21:32:11 +0200342 }
azvyagintsev1037fb42019-01-10 13:52:38 +0200343
azvyagintsev3a80e342018-09-19 19:22:48 +0300344 } else {
azvyagintsev1037fb42019-01-10 13:52:38 +0200345 common.warningMsg('Old Cookiecutter env detected!')
azvyagintsev1037fb42019-01-10 13:52:38 +0200346 common.infoMsg(python.runVirtualenvCommand(vEnv, "python ${templateEnvPatched}/workflow_definition_test.py"))
azvyagintsev3a80e342018-09-19 19:22:48 +0300347 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300348 }
349
azvyagintsev8ab34fd2019-01-10 15:32:26 +0200350 stage('generate models') {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300351 dir("${templateEnvHead}/contexts") {
352 for (String x : findFiles(glob: "*.yml")) {
353 contextFileListHead.add(x)
354 }
355 }
356 dir("${templateEnvPatched}/contexts") {
357 for (String x : findFiles(glob: "*.yml")) {
358 contextFileListPatched.add(x)
359 }
360 }
361 // Generate over 2env's - for patchset, and for HEAD
azvyagintsev9df82e52018-09-06 19:17:18 +0300362 def paralellEnvs = [:]
azvyagintsev5c0313d2018-08-13 17:13:35 +0300363 paralellEnvs.failFast = true
azvyagintsev30bc82e2018-08-22 12:26:06 +0300364 paralellEnvs['GenerateEnvHead'] = StepGenerateModels(contextFileListHead, vEnv, templateEnvHead)
azvyagintsev3a80e342018-09-19 19:22:48 +0300365 if (gerritDataCC.get('gerritRefSpec', null)) {
366 paralellEnvs['GenerateEnvPatched'] = StepGenerateModels(contextFileListPatched, vEnv, templateEnvPatched)
367 parallel paralellEnvs
368 } else {
369 paralellEnvs['GenerateEnvPatched'] = { common.warningMsg('No need to process: GenerateEnvPatched') }
370 parallel paralellEnvs
371 sh("rsync -a --exclude '*@tmp' ${templateEnvHead} ${templateEnvPatched}")
azvyagintsev5c0313d2018-08-13 17:13:35 +0300372 }
373
azvyagintsev3a80e342018-09-19 19:22:48 +0300374 // We need 2 git's, one for HEAD, one for PATCHed.
375 // if no patch, use head for both
376 RSHeadDir = common.GetBaseName(headReclassArtifactName, '.tar.gz')
377 RSPatchedDir = common.GetBaseName(patchedReclassArtifactName, '.tar.gz')
378 common.infoMsg("gerritDataRS= ${gerritDataRS}")
379 common.infoMsg("gerritDataRSHEAD= ${gerritDataRSHEAD}")
380 if (gerritDataRS.get('gerritRefSpec', null)) {
381 StepPrepareGit("${env.WORKSPACE}/${RSPatchedDir}/", gerritDataRS).call()
382 StepPrepareGit("${env.WORKSPACE}/${RSHeadDir}/", gerritDataRSHEAD).call()
383 } else {
384 StepPrepareGit("${env.WORKSPACE}/${RSHeadDir}/", gerritDataRS).call()
385 sh("cd ${env.WORKSPACE} ; ln -svf ${RSHeadDir} ${RSPatchedDir}")
386 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300387 // link all models, to use one global reclass
388 // For HEAD
Denis Egorenkod8748942018-09-07 12:26:20 +0400389 linkReclassModels(contextFileListHead, templateEnvHead, headReclassArtifactName)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300390 // For patched
Denis Egorenkod8748942018-09-07 12:26:20 +0400391 linkReclassModels(contextFileListPatched, templateEnvPatched, patchedReclassArtifactName)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300392 }
393
Denis Egorenkod8748942018-09-07 12:26:20 +0400394 stage("Compare cluster lvl Head/Patched") {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300395 // Compare patched and HEAD reclass pillars
Denis Egorenkod8748942018-09-07 12:26:20 +0400396 compareRoot = "${env.WORKSPACE}/cluster_compare/"
Denis Egorenko7aa5ac52018-10-17 15:14:56 +0400397 // extract archive and drop all copied classes/system before comparing
azvyagintsev5c0313d2018-08-13 17:13:35 +0300398 sh(script: """
399 mkdir -pv ${compareRoot}/new ${compareRoot}/old
Denis Egorenkod8748942018-09-07 12:26:20 +0400400 tar -xzf ${patchedReclassArtifactName} --directory ${compareRoot}/new
401 tar -xzf ${headReclassArtifactName} --directory ${compareRoot}/old
Denis Egorenko7aa5ac52018-10-17 15:14:56 +0400402 find ${compareRoot} -name classes -type d -exec rm -rf '{}/system' \\;
azvyagintsev5c0313d2018-08-13 17:13:35 +0300403 """)
404 common.warningMsg('infra/secrets.yml has been skipped from compare!')
azvyagintsev3a80e342018-09-19 19:22:48 +0300405 result = '\n' + common.comparePillars(compareRoot, env.BUILD_URL, "-Ev \'infra/secrets.yml|\\.git\'")
Denis Egorenkod8748942018-09-07 12:26:20 +0400406 currentBuild.description = currentBuild.description ? currentBuild.description + result : result
azvyagintsev5c0313d2018-08-13 17:13:35 +0300407 }
azvyagintsev7d982812019-04-15 23:21:35 +0300408 stage('TestContexts Head/Patched') {
Denis Egorenkod8748942018-09-07 12:26:20 +0400409 def stepsForParallel = [:]
410 stepsForParallel.failFast = true
411 common.infoMsg("Found: ${contextFileListHead.size()} HEAD contexts to test.")
412 for (String context : contextFileListHead) {
413 def basename = common.GetBaseName(context, '.yml')
414 stepsForParallel.put("ContextHeadTest:${basename}", StepTestModel(basename, headReclassArtifactName, reclassInfoHeadPath))
415 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300416 common.infoMsg("Found: ${contextFileListPatched.size()} patched contexts to test.")
417 for (String context : contextFileListPatched) {
418 def basename = common.GetBaseName(context, '.yml')
Denis Egorenko086aff12018-10-18 17:54:59 +0400419 stepsForParallel.put("ContextPatchedTest:${basename}", StepTestModel(basename, patchedReclassArtifactName, reclassInfoPatchedPath, true))
azvyagintsev5c0313d2018-08-13 17:13:35 +0300420 }
421 parallel stepsForParallel
Denis Egorenkod8748942018-09-07 12:26:20 +0400422 common.infoMsg('All TestContexts tests done')
423 }
azvyagintsev7d982812019-04-15 23:21:35 +0300424 stage('Compare NodesInfo Head/Patched') {
Denis Egorenkod8748942018-09-07 12:26:20 +0400425 // Download all artifacts
426 def stepsForParallel = [:]
427 stepsForParallel.failFast = true
428 common.infoMsg("Found: ${testModelBuildsData.size()} nodeinfo artifacts to download.")
429 testModelBuildsData.each { bname, bdata ->
430 stepsForParallel.put("FetchData:${bname}",
431 getAndUnpackNodesInfoArtifact(bdata.jobname, bdata.copyToDir, bdata.buildId))
432 }
433 parallel stepsForParallel
Denis Egorenkoa6117fc2018-09-11 13:40:00 +0400434 // remove timestamp field from rendered files
435 sh("find ${reclassNodeInfoDir} -type f -exec sed -i '/ timestamp: .*/d' {} \\;")
Denis Egorenkod8748942018-09-07 12:26:20 +0400436 // Compare patched and HEAD reclass pillars
437 result = '\n' + common.comparePillars(reclassNodeInfoDir, env.BUILD_URL, '')
438 currentBuild.description = currentBuild.description ? currentBuild.description + result : result
azvyagintsev5c0313d2018-08-13 17:13:35 +0300439 }
Denis Egorenko04ca9f22018-09-19 16:06:49 +0400440 stage('Check include order') {
azvyagintseve7b8e792018-09-21 17:27:01 +0300441 if (!checkIncludeOrder) {
442 common.infoMsg('Check include order require to much time, and currently disabled!')
443
444 } else {
445 def correctIncludeOrder = ["service", "system", "cluster"]
446 dir(reclassInfoPatchedPath) {
447 def nodeInfoFiles = findFiles(glob: "**/*.reclass.nodeinfo")
448 def messages = ["<b>Wrong include ordering found</b><ul>"]
449 def stepsForParallel = [:]
450 nodeInfoFiles.each { nodeInfo ->
451 stepsForParallel.put("Checking ${nodeInfo.path}:", {
452 def node = readYaml file: nodeInfo.path
453 def classes = node['classes']
454 def curClassID = 0
455 def prevClassID = 0
456 def wrongOrder = false
457 for (String className in classes) {
458 def currentClass = className.tokenize('.')[0]
459 curClassID = correctIncludeOrder.indexOf(currentClass)
460 if (currentClass != correctIncludeOrder[prevClassID]) {
461 if (prevClassID > curClassID) {
462 wrongOrder = true
463 common.warningMsg("File ${nodeInfo.path} contains wrong order of classes including: Includes for ${className} should be declared before ${correctIncludeOrder[prevClassID]} includes")
464 } else {
465 prevClassID = curClassID
466 }
Denis Egorenko04ca9f22018-09-19 16:06:49 +0400467 }
468 }
azvyagintseve7b8e792018-09-21 17:27:01 +0300469 if (wrongOrder) {
470 messages.add("<li>${nodeInfo.path} contains wrong order of classes including</li>")
471 }
472 })
473 }
474 parallel stepsForParallel
475 def includerOrder = '<b>No wrong include order</b>'
476 if (messages.size() != 1) {
477 includerOrder = messages.join('')
478 }
479 currentBuild.description = currentBuild.description ? currentBuild.description + includerOrder : includerOrder
Denis Egorenko04ca9f22018-09-19 16:06:49 +0400480 }
Denis Egorenko04ca9f22018-09-19 16:06:49 +0400481 }
482 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300483 sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
484
485 } catch (Throwable e) {
486 currentBuild.result = "FAILURE"
487 currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
488 throw e
489 } finally {
Denis Egorenko34c4a3b2019-03-12 12:48:15 +0400490 stage('Save artifacts to Artifactory') {
491 def artifactory = new com.mirantis.mcp.MCPArtifactory()
492 def artifactoryLink = artifactory.uploadJobArtifactsToArtifactory(['artifactory': 'mcp-ci', 'artifactoryRepo': "drivetrain-local/${JOB_NAME}/${BUILD_NUMBER}"])
493 currentBuild.description += "<br/>${artifactoryLink}"
494 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300495 }
496 }
chnydae80bb922017-05-29 17:48:40 +0200497}
azvyagintsev7d982812019-04-15 23:21:35 +0300498