blob: 0bab39483d9e86cb9fbcf79b60c0b3d923d52d20 [file] [log] [blame]
azvyagintsev9df82e52018-09-06 19:17:18 +03001/*
2Able to be triggered from Gerrit if :
3Variators:
4Modes:
51) manual run via job-build , possible to pass refspec
Denis Egorenkod8748942018-09-07 12:26:20 +04006 TODO: currently impossible to use custom COOKIECUTTER_TEMPLATE_URL| RECLASS_SYSTEM_URL Gerrit-one always used.
azvyagintsev9df82e52018-09-06 19:17:18 +03007 - for CC
8 - Reclass
Denis Egorenkod8748942018-09-07 12:26:20 +04009
102) gerrit trigger
azvyagintsev9df82e52018-09-06 19:17:18 +030011 Automatically switches if GERRIT_PROJECT variable detected
12 Always test GERRIT_REFSPEC VS GERRIT_BRANCH-master version of opposite project
13 */
14
chnydae80bb922017-05-29 17:48:40 +020015common = new com.mirantis.mk.Common()
chnydabc63c9a2017-05-30 15:37:54 +020016gerrit = new com.mirantis.mk.Gerrit()
chnydae80bb922017-05-29 17:48:40 +020017git = new com.mirantis.mk.Git()
18python = new com.mirantis.mk.Python()
chnydae80bb922017-05-29 17:48:40 +020019
Denis Egorenkod67a6d22018-10-18 16:00:46 +040020extraVarsYAML = env.EXTRA_VARIABLES_YAML.trim() ?: ''
Denis Egorenko535d5b62018-09-28 11:28:10 +040021if (extraVarsYAML) {
22 common.mergeEnv(env, extraVarsYAML)
23}
24
azvyagintsev3a80e342018-09-19 19:22:48 +030025slaveNode = env.SLAVE_NODE ?: 'docker'
azvyagintseve7b8e792018-09-21 17:27:01 +030026checkIncludeOrder = env.CHECK_INCLUDE_ORDER ?: false
azvyagintsev5c0313d2018-08-13 17:13:35 +030027
azvyagintsev9df82e52018-09-06 19:17:18 +030028// Global var's
29alreadyMerged = false
30gerritConData = [credentialsId : env.CREDENTIALS_ID,
31 gerritName : env.GERRIT_NAME ?: 'mcp-jenkins',
Roman Vyalovf0c596e2018-10-01 17:56:09 +030032 gerritHost : env.GERRIT_HOST ?: 'gerrit.mcp.mirantis.com',
azvyagintsevb266ad22018-09-11 12:11:11 +030033 gerritScheme : env.GERRIT_SCHEME ?: 'ssh',
34 gerritPort : env.GERRIT_PORT ?: '29418',
azvyagintsev9df82e52018-09-06 19:17:18 +030035 gerritRefSpec : null,
36 gerritProject : null,
37 withWipeOut : true,
38 GERRIT_CHANGE_NUMBER: null]
39//
Roman Vyalovf0c596e2018-10-01 17:56:09 +030040//ccTemplatesRepo = env.COOKIECUTTER_TEMPLATE_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.com:29418/mk/cookiecutter-templates'
azvyagintsev3a80e342018-09-19 19:22:48 +030041gerritDataCCHEAD = [:]
azvyagintsev9df82e52018-09-06 19:17:18 +030042gerritDataCC = [:]
43gerritDataCC << gerritConData
44gerritDataCC['gerritBranch'] = env.COOKIECUTTER_TEMPLATE_BRANCH ?: 'master'
azvyagintsev458278c2018-09-25 18:38:30 +030045gerritDataCC['gerritRefSpec'] = env.COOKIECUTTER_TEMPLATE_REF ?: null
azvyagintsev9df82e52018-09-06 19:17:18 +030046gerritDataCC['gerritProject'] = 'mk/cookiecutter-templates'
47//
Roman Vyalovf0c596e2018-10-01 17:56:09 +030048//reclassSystemRepo = env.RECLASS_SYSTEM_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.com:29418/salt-models/reclass-system'
azvyagintsev3a80e342018-09-19 19:22:48 +030049gerritDataRSHEAD = [:]
azvyagintsev9df82e52018-09-06 19:17:18 +030050gerritDataRS = [:]
51gerritDataRS << gerritConData
52gerritDataRS['gerritBranch'] = env.RECLASS_MODEL_BRANCH ?: 'master'
azvyagintsev458278c2018-09-25 18:38:30 +030053gerritDataRS['gerritRefSpec'] = env.RECLASS_SYSTEM_GIT_REF ?: null
azvyagintsev9df82e52018-09-06 19:17:18 +030054gerritDataRS['gerritProject'] = 'salt-models/reclass-system'
55
azvyagintsevec055b42018-10-11 12:59:05 +030056// version of debRepos, aka formulas|reclass|ubuntu
azvyagintsev10e24012018-09-10 21:36:32 +030057testDistribRevision = env.DISTRIB_REVISION ?: 'nightly'
Denis Egorenkod8748942018-09-07 12:26:20 +040058// Name of sub-test chunk job
59chunkJobName = "test-mk-cookiecutter-templates-chunk"
60testModelBuildsData = [:]
Vasyl Saienko772e1232018-07-23 14:42:24 +030061
Denis Egorenkod8748942018-09-07 12:26:20 +040062def getAndUnpackNodesInfoArtifact(jobName, copyTo, build) {
63 return {
64 dir(copyTo) {
65 copyArtifacts(projectName: jobName, selector: specific(build), filter: "nodesinfo.tar.gz")
azvyagintsev3a80e342018-09-19 19:22:48 +030066 sh "tar -xf nodesinfo.tar.gz"
Denis Egorenkod8748942018-09-07 12:26:20 +040067 sh "rm -v nodesinfo.tar.gz"
68 }
69 }
70}
azvyagintsev87985532018-07-10 20:49:38 +030071
Denis Egorenkod11f7922018-10-18 12:59:54 +000072def testModel(modelFile, reclassArtifactName, artifactCopyPath) {
azvyagintsev5c0313d2018-08-13 17:13:35 +030073 // modelFile - `modelfiname` from model/modelfiname/modelfiname.yaml
74 //* Grub all models and send it to check in paralell - by one in thread.
Denis Egorenkod8748942018-09-07 12:26:20 +040075 def _uuid = "${env.JOB_NAME.toLowerCase()}_${env.BUILD_TAG.toLowerCase()}_${modelFile.toLowerCase()}_" + UUID.randomUUID().toString().take(8)
76 def _values_string = """
azvyagintsev87985532018-07-10 20:49:38 +030077 ---
Denis Egorenkod8748942018-09-07 12:26:20 +040078 MODELS_TARGZ: "${env.BUILD_URL}/artifact/${reclassArtifactName}"
79 DockerCName: "${_uuid}"
azvyagintsev87985532018-07-10 20:49:38 +030080 testReclassEnv: "model/${modelFile}/"
81 modelFile: "contexts/${modelFile}.yml"
azvyagintsev9df82e52018-09-06 19:17:18 +030082 DISTRIB_REVISION: "${testDistribRevision}"
azvyagintsev87985532018-07-10 20:49:38 +030083 """
Denis Egorenkod8748942018-09-07 12:26:20 +040084 def chunkJob = build job: chunkJobName, parameters: [
azvyagintsev08b34e32018-09-12 12:29:42 +030085 [$class: 'TextParameterValue', name: 'EXTRA_VARIABLES_YAML',
azvyagintsev30bc82e2018-08-22 12:26:06 +030086 value : _values_string.stripIndent()],
azvyagintsev5c0313d2018-08-13 17:13:35 +030087 ]
Denis Egorenkod8748942018-09-07 12:26:20 +040088 // Put sub-job info into global map.
89 testModelBuildsData.put(_uuid, ['jobname' : chunkJob.fullProjectName,
90 'copyToDir': "${artifactCopyPath}/${modelFile}",
91 'buildId' : "${chunkJob.number}"])
chnydabc63c9a2017-05-30 15:37:54 +020092}
93
Denis Egorenkod11f7922018-10-18 12:59:54 +000094def StepTestModel(basename, reclassArtifactName, artifactCopyPath) {
azvyagintsev5c0313d2018-08-13 17:13:35 +030095 // We need to wrap what we return in a Groovy closure, or else it's invoked
96 // when this method is called, not when we pass it to parallel.
97 // To do this, you need to wrap the code below in { }, and either return
98 // that explicitly, or use { -> } syntax.
99 // return node object
100 return {
101 node(slaveNode) {
Denis Egorenkod11f7922018-10-18 12:59:54 +0000102 testModel(basename, reclassArtifactName, artifactCopyPath)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300103 }
chnydae80bb922017-05-29 17:48:40 +0200104 }
azvyagintsev87985532018-07-10 20:49:38 +0300105}
106
azvyagintsev9df82e52018-09-06 19:17:18 +0300107def StepPrepareGit(templateEnvFolder, gerrit_data) {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300108 // return git clone object
109 return {
azvyagintsev9df82e52018-09-06 19:17:18 +0300110 def checkouted = false
111 common.infoMsg("StepPrepareGit: ${gerrit_data}")
azvyagintsev5c0313d2018-08-13 17:13:35 +0300112 // fetch needed sources
113 dir(templateEnvFolder) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300114 if (gerrit_data['gerritRefSpec']) {
115 // Those part might be not work,in case manual var's pass
116 def gerritChange = gerrit.getGerritChange(gerrit_data['gerritName'], gerrit_data['gerritHost'],
117 gerrit_data['GERRIT_CHANGE_NUMBER'], gerrit_data['credentialsId'])
azvyagintsev5c0313d2018-08-13 17:13:35 +0300118 merged = gerritChange.status == "MERGED"
119 if (!merged) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300120 checkouted = gerrit.gerritPatchsetCheckout(gerrit_data)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300121 } else {
azvyagintsev9df82e52018-09-06 19:17:18 +0300122 // update global variable for pretty return from pipeline
123 alreadyMerged = true
124 common.successMsg("Change ${gerrit_data['GERRIT_CHANGE_NUMBER']} is already merged, no need to gate them")
125 error('change already merged')
azvyagintsev87985532018-07-10 20:49:38 +0300126 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300127 } else {
azvyagintsev9df82e52018-09-06 19:17:18 +0300128 // Get clean HEAD
129 gerrit_data['useGerritTriggerBuildChooser'] = false
130 checkouted = gerrit.gerritPatchsetCheckout(gerrit_data)
131 if (!checkouted) {
132 error("Failed to get repo:${gerrit_data}")
133 }
azvyagintsev87985532018-07-10 20:49:38 +0300134 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300135 }
azvyagintsev87985532018-07-10 20:49:38 +0300136 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300137}
138
139def StepGenerateModels(_contextFileList, _virtualenv, _templateEnvDir) {
140 return {
141 for (contextFile in _contextFileList) {
Denis Egorenkof3469092018-10-17 17:05:50 +0400142 def basename = common.GetBaseName(contextFile, '.yml')
143 def context = readFile(file: "${_templateEnvDir}/contexts/${contextFile}")
144 python.generateModel(context, basename, 'cfg01', _virtualenv, "${_templateEnvDir}/model", _templateEnvDir)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300145 }
146 }
147}
148
azvyagintsev9df82e52018-09-06 19:17:18 +0300149def globalVariatorsUpdate() {
150 // Simple function, to check and define branch-around variables
151 // In general, simply make transition updates for non-master branch
152 // based on magic logic
Denis Egorenkofe9a3362018-10-17 15:33:25 +0400153 def newline = '<br/>'
154 def messages = []
azvyagintsevb266ad22018-09-11 12:11:11 +0300155 if (env.GERRIT_PROJECT) {
Denis Egorenkofe9a3362018-10-17 15:33:25 +0400156 messages.add("<font color='red'>GerritTrigger detected! We are in auto-mode:</font>")
157 messages.add("Test env variables has been changed:")
158 messages.add("COOKIECUTTER_TEMPLATE_BRANCH => ${gerritDataCC['gerritBranch']}")
159 messages.add("RECLASS_MODEL_BRANCH => ${gerritDataRS['gerritBranch']}")
azvyagintseve0eb6cd2018-09-12 13:00:40 +0300160 // TODO are we going to have such branches?
azvyagintsev9df82e52018-09-06 19:17:18 +0300161 if (!['nightly', 'testing', 'stable', 'proposed', 'master'].contains(env.GERRIT_BRANCH)) {
162 gerritDataCC['gerritBranch'] = env.GERRIT_BRANCH
163 gerritDataRS['gerritBranch'] = env.GERRIT_BRANCH
azvyagintsev458278c2018-09-25 18:38:30 +0300164 testDistribRevision = env.GERRIT_BRANCH
azvyagintsev9df82e52018-09-06 19:17:18 +0300165 }
166 // Identify, who triggered. To whom we should pass refspec
167 if (env.GERRIT_PROJECT == 'salt-models/reclass-system') {
168 gerritDataRS['gerritRefSpec'] = env.GERRIT_REFSPEC
169 gerritDataRS['GERRIT_CHANGE_NUMBER'] = env.GERRIT_CHANGE_NUMBER
Denis Egorenkofe9a3362018-10-17 15:33:25 +0400170 messages.add("RECLASS_SYSTEM_GIT_REF => ${gerritDataRS['gerritRefSpec']}")
azvyagintsev9df82e52018-09-06 19:17:18 +0300171 } else if (env.GERRIT_PROJECT == 'mk/cookiecutter-templates') {
172 gerritDataCC['gerritRefSpec'] = env.GERRIT_REFSPEC
173 gerritDataCC['GERRIT_CHANGE_NUMBER'] = env.GERRIT_CHANGE_NUMBER
Denis Egorenkofe9a3362018-10-17 15:33:25 +0400174 messages.add("COOKIECUTTER_TEMPLATE_REF => ${gerritDataCC['gerritRefSpec']}")
azvyagintsev9df82e52018-09-06 19:17:18 +0300175 } else {
176 error("Unsuported gerrit-project triggered:${env.GERRIT_PROJECT}")
177 }
azvyagintsev9df82e52018-09-06 19:17:18 +0300178 } else {
Denis Egorenkofe9a3362018-10-17 15:33:25 +0400179 messages.add("<font color='red'>Non-gerrit trigger run detected!</font>")
azvyagintsev9df82e52018-09-06 19:17:18 +0300180 }
azvyagintsev3a80e342018-09-19 19:22:48 +0300181 gerritDataCCHEAD << gerritDataCC
182 gerritDataCCHEAD['gerritRefSpec'] = null
183 gerritDataCCHEAD['GERRIT_CHANGE_NUMBER'] = null
184 gerritDataRSHEAD << gerritDataRS
185 gerritDataRSHEAD['gerritRefSpec'] = null
186 gerritDataRSHEAD['GERRIT_CHANGE_NUMBER'] = null
azvyagintsev458278c2018-09-25 18:38:30 +0300187 // 'binary' branch logic w\o 'release/' prefix
188 if (testDistribRevision.contains('/')) {
189 testDistribRevision = testDistribRevision.split('/')[-1]
190 }
191 // Check if we are going to test bleeding-edge release, which doesn't have binary release yet
192 if (!common.checkRemoteBinary([apt_mk_version: testDistribRevision]).linux_system_repo_url) {
193 common.errorMsg("Binary release: ${testDistribRevision} not exist. Fallback to 'proposed'! ")
194 testDistribRevision = 'proposed'
Denis Egorenkofe9a3362018-10-17 15:33:25 +0400195 messages.add("DISTRIB_REVISION => ${testDistribRevision}")
azvyagintsev458278c2018-09-25 18:38:30 +0300196 }
Denis Egorenkofe9a3362018-10-17 15:33:25 +0400197 def message = messages.join(newline) + newline
azvyagintsev458278c2018-09-25 18:38:30 +0300198 currentBuild.description = currentBuild.description ? message + currentBuild.description : message
Denis Egorenkod8748942018-09-07 12:26:20 +0400199}
azvyagintsev9df82e52018-09-06 19:17:18 +0300200
Denis Egorenkoa6117fc2018-09-11 13:40:00 +0400201def replaceGeneratedValues(path) {
202 def files = sh(script: "find ${path} -name 'secrets.yml'", returnStdout: true)
203 def stepsForParallel = [:]
204 stepsForParallel.failFast = true
205 files.tokenize().each {
206 stepsForParallel.put("Removing generated passwords/secrets from ${it}",
207 {
208 def secrets = readYaml file: it
azvyagintsev08b34e32018-09-12 12:29:42 +0300209 for (String key in secrets['parameters']['_param'].keySet()) {
Denis Egorenkoa6117fc2018-09-11 13:40:00 +0400210 secrets['parameters']['_param'][key] = 'generated'
211 }
212 // writeYaml can't write to already existing file
213 writeYaml file: "${it}.tmp", data: secrets
214 sh "mv ${it}.tmp ${it}"
215 })
216 }
217 parallel stepsForParallel
218}
219
Denis Egorenkod8748942018-09-07 12:26:20 +0400220def linkReclassModels(contextList, envPath, archiveName) {
221 // to be able share reclass for all subenvs
222 // Also, makes artifact test more solid - use one reclass for all of sub-models.
223 // Archive Structure will be:
224 // tar.gz
225 // ├── contexts
226 // │   └── ceph.yml
Denis Egorenko7aa5ac52018-10-17 15:14:56 +0400227 // ├── classes-system <<< reclass system
Denis Egorenkod8748942018-09-07 12:26:20 +0400228 // ├── model
229 // │   └── ceph <<< from `context basename`
230 // │   ├── classes
231 // │   │   ├── cluster
Denis Egorenko7aa5ac52018-10-17 15:14:56 +0400232 // │   │   └── system -> ../../../classes-system
Denis Egorenkod8748942018-09-07 12:26:20 +0400233 // │   └── nodes
234 // │   └── cfg01.ceph-cluster-domain.local.yml
Denis Egorenko7aa5ac52018-10-17 15:14:56 +0400235 def archiveBaseName = common.GetBaseName(archiveName, '.tar.gz')
236 def classesSystemDir = 'classes-system'
237 // copy reclass system under envPath with -R and trailing / to support symlinks direct copy
238 sh("cp -R ${archiveBaseName}/ ${envPath}/${classesSystemDir}")
Denis Egorenkod8748942018-09-07 12:26:20 +0400239 dir(envPath) {
240 for (String context : contextList) {
241 def basename = common.GetBaseName(context, '.yml')
Denis Egorenko7aa5ac52018-10-17 15:14:56 +0400242 dir("${envPath}/model/${basename}/classes") {
243 sh(script: "ln -sfv ../../../${classesSystemDir} system ")
Denis Egorenkod8748942018-09-07 12:26:20 +0400244 }
245 }
Denis Egorenkoa6117fc2018-09-11 13:40:00 +0400246 // replace all generated passwords/secrets/keys with hardcode value for infra/secrets.yaml
247 replaceGeneratedValues("${envPath}/model")
azvyagintsev3a80e342018-09-19 19:22:48 +0300248 // Save all models and all contexts. Warning! `h` flag must be used!
Denis Egorenko7aa5ac52018-10-17 15:14:56 +0400249 sh(script: "set -ex; tar -czhf ${env.WORKSPACE}/${archiveName} --exclude='*@tmp' contexts model ${classesSystemDir}", returnStatus: true)
Denis Egorenkod8748942018-09-07 12:26:20 +0400250 }
azvyagintsev3a80e342018-09-19 19:22:48 +0300251 archiveArtifacts artifacts: archiveName
azvyagintsev9df82e52018-09-06 19:17:18 +0300252}
253
azvyagintsev5c0313d2018-08-13 17:13:35 +0300254timeout(time: 1, unit: 'HOURS') {
255 node(slaveNode) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300256 globalVariatorsUpdate()
azvyagintsev30bc82e2018-08-22 12:26:06 +0300257 def templateEnvHead = "${env.WORKSPACE}/EnvHead/"
258 def templateEnvPatched = "${env.WORKSPACE}/EnvPatched/"
azvyagintsev5c0313d2018-08-13 17:13:35 +0300259 def contextFileListHead = []
260 def contextFileListPatched = []
261 def vEnv = "${env.WORKSPACE}/venv"
Denis Egorenkod8748942018-09-07 12:26:20 +0400262 def headReclassArtifactName = "head_reclass.tar.gz"
263 def patchedReclassArtifactName = "patched_reclass.tar.gz"
264 def reclassNodeInfoDir = "${env.WORKSPACE}/reclassNodeInfo_compare/"
265 def reclassInfoHeadPath = "${reclassNodeInfoDir}/old"
266 def reclassInfoPatchedPath = "${reclassNodeInfoDir}/new"
azvyagintsev5c0313d2018-08-13 17:13:35 +0300267 try {
268 sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
269 stage('Download and prepare CC env') {
270 // Prepare 2 env - for patchset, and for HEAD
azvyagintsev9df82e52018-09-06 19:17:18 +0300271 def paralellEnvs = [:]
azvyagintsev5c0313d2018-08-13 17:13:35 +0300272 paralellEnvs.failFast = true
azvyagintsev9df82e52018-09-06 19:17:18 +0300273 paralellEnvs['downloadEnvHead'] = StepPrepareGit(templateEnvHead, gerritDataCCHEAD)
azvyagintsev3a80e342018-09-19 19:22:48 +0300274 if (gerritDataCC.get('gerritRefSpec', null)) {
275 paralellEnvs['downloadEnvPatched'] = StepPrepareGit(templateEnvPatched, gerritDataCC)
276 parallel paralellEnvs
277 } else {
278 paralellEnvs['downloadEnvPatched'] = { common.warningMsg('No need to process: downloadEnvPatched') }
279 parallel paralellEnvs
280 sh("rsync -a --exclude '*@tmp' ${templateEnvHead} ${templateEnvPatched}")
281 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300282 }
283 stage("Check workflow_definition") {
284 // Check only for patchset
285 python.setupVirtualenv(vEnv, 'python2', [], "${templateEnvPatched}/requirements.txt")
azvyagintsev3a80e342018-09-19 19:22:48 +0300286 if (gerritDataCC.get('gerritRefSpec', null)) {
287 common.infoMsg(python.runVirtualenvCommand(vEnv, "python ${templateEnvPatched}/workflow_definition_test.py"))
288 } else {
289 common.infoMsg('No need to process: workflow_definition')
290 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300291 }
292
293 stage("generate models") {
294 dir("${templateEnvHead}/contexts") {
295 for (String x : findFiles(glob: "*.yml")) {
296 contextFileListHead.add(x)
297 }
298 }
299 dir("${templateEnvPatched}/contexts") {
300 for (String x : findFiles(glob: "*.yml")) {
301 contextFileListPatched.add(x)
302 }
303 }
304 // Generate over 2env's - for patchset, and for HEAD
azvyagintsev9df82e52018-09-06 19:17:18 +0300305 def paralellEnvs = [:]
azvyagintsev5c0313d2018-08-13 17:13:35 +0300306 paralellEnvs.failFast = true
azvyagintsev30bc82e2018-08-22 12:26:06 +0300307 paralellEnvs['GenerateEnvHead'] = StepGenerateModels(contextFileListHead, vEnv, templateEnvHead)
azvyagintsev3a80e342018-09-19 19:22:48 +0300308 if (gerritDataCC.get('gerritRefSpec', null)) {
309 paralellEnvs['GenerateEnvPatched'] = StepGenerateModels(contextFileListPatched, vEnv, templateEnvPatched)
310 parallel paralellEnvs
311 } else {
312 paralellEnvs['GenerateEnvPatched'] = { common.warningMsg('No need to process: GenerateEnvPatched') }
313 parallel paralellEnvs
314 sh("rsync -a --exclude '*@tmp' ${templateEnvHead} ${templateEnvPatched}")
azvyagintsev5c0313d2018-08-13 17:13:35 +0300315 }
316
azvyagintsev3a80e342018-09-19 19:22:48 +0300317 // We need 2 git's, one for HEAD, one for PATCHed.
318 // if no patch, use head for both
319 RSHeadDir = common.GetBaseName(headReclassArtifactName, '.tar.gz')
320 RSPatchedDir = common.GetBaseName(patchedReclassArtifactName, '.tar.gz')
321 common.infoMsg("gerritDataRS= ${gerritDataRS}")
322 common.infoMsg("gerritDataRSHEAD= ${gerritDataRSHEAD}")
323 if (gerritDataRS.get('gerritRefSpec', null)) {
324 StepPrepareGit("${env.WORKSPACE}/${RSPatchedDir}/", gerritDataRS).call()
325 StepPrepareGit("${env.WORKSPACE}/${RSHeadDir}/", gerritDataRSHEAD).call()
326 } else {
327 StepPrepareGit("${env.WORKSPACE}/${RSHeadDir}/", gerritDataRS).call()
328 sh("cd ${env.WORKSPACE} ; ln -svf ${RSHeadDir} ${RSPatchedDir}")
329 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300330 // link all models, to use one global reclass
331 // For HEAD
Denis Egorenkod8748942018-09-07 12:26:20 +0400332 linkReclassModels(contextFileListHead, templateEnvHead, headReclassArtifactName)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300333 // For patched
Denis Egorenkod8748942018-09-07 12:26:20 +0400334 linkReclassModels(contextFileListPatched, templateEnvPatched, patchedReclassArtifactName)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300335 }
336
Denis Egorenkod8748942018-09-07 12:26:20 +0400337 stage("Compare cluster lvl Head/Patched") {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300338 // Compare patched and HEAD reclass pillars
Denis Egorenkod8748942018-09-07 12:26:20 +0400339 compareRoot = "${env.WORKSPACE}/cluster_compare/"
Denis Egorenko7aa5ac52018-10-17 15:14:56 +0400340 // extract archive and drop all copied classes/system before comparing
azvyagintsev5c0313d2018-08-13 17:13:35 +0300341 sh(script: """
342 mkdir -pv ${compareRoot}/new ${compareRoot}/old
Denis Egorenkod8748942018-09-07 12:26:20 +0400343 tar -xzf ${patchedReclassArtifactName} --directory ${compareRoot}/new
344 tar -xzf ${headReclassArtifactName} --directory ${compareRoot}/old
Denis Egorenko7aa5ac52018-10-17 15:14:56 +0400345 find ${compareRoot} -name classes -type d -exec rm -rf '{}/system' \\;
azvyagintsev5c0313d2018-08-13 17:13:35 +0300346 """)
347 common.warningMsg('infra/secrets.yml has been skipped from compare!')
azvyagintsev3a80e342018-09-19 19:22:48 +0300348 result = '\n' + common.comparePillars(compareRoot, env.BUILD_URL, "-Ev \'infra/secrets.yml|\\.git\'")
Denis Egorenkod8748942018-09-07 12:26:20 +0400349 currentBuild.description = currentBuild.description ? currentBuild.description + result : result
azvyagintsev5c0313d2018-08-13 17:13:35 +0300350 }
Denis Egorenkod8748942018-09-07 12:26:20 +0400351 stage("TestContexts Head/Patched") {
352 def stepsForParallel = [:]
353 stepsForParallel.failFast = true
354 common.infoMsg("Found: ${contextFileListHead.size()} HEAD contexts to test.")
355 for (String context : contextFileListHead) {
356 def basename = common.GetBaseName(context, '.yml')
357 stepsForParallel.put("ContextHeadTest:${basename}", StepTestModel(basename, headReclassArtifactName, reclassInfoHeadPath))
358 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300359 common.infoMsg("Found: ${contextFileListPatched.size()} patched contexts to test.")
360 for (String context : contextFileListPatched) {
361 def basename = common.GetBaseName(context, '.yml')
Denis Egorenkod11f7922018-10-18 12:59:54 +0000362 stepsForParallel.put("ContextPatchedTest:${basename}", StepTestModel(basename, patchedReclassArtifactName, reclassInfoPatchedPath))
azvyagintsev5c0313d2018-08-13 17:13:35 +0300363 }
364 parallel stepsForParallel
Denis Egorenkod8748942018-09-07 12:26:20 +0400365 common.infoMsg('All TestContexts tests done')
366 }
367 stage("Compare NodesInfo Head/Patched") {
368 // Download all artifacts
369 def stepsForParallel = [:]
370 stepsForParallel.failFast = true
371 common.infoMsg("Found: ${testModelBuildsData.size()} nodeinfo artifacts to download.")
372 testModelBuildsData.each { bname, bdata ->
373 stepsForParallel.put("FetchData:${bname}",
374 getAndUnpackNodesInfoArtifact(bdata.jobname, bdata.copyToDir, bdata.buildId))
375 }
376 parallel stepsForParallel
Denis Egorenkoa6117fc2018-09-11 13:40:00 +0400377 // remove timestamp field from rendered files
378 sh("find ${reclassNodeInfoDir} -type f -exec sed -i '/ timestamp: .*/d' {} \\;")
Denis Egorenkod8748942018-09-07 12:26:20 +0400379 // Compare patched and HEAD reclass pillars
380 result = '\n' + common.comparePillars(reclassNodeInfoDir, env.BUILD_URL, '')
381 currentBuild.description = currentBuild.description ? currentBuild.description + result : result
azvyagintsev5c0313d2018-08-13 17:13:35 +0300382 }
Denis Egorenko04ca9f22018-09-19 16:06:49 +0400383 stage('Check include order') {
azvyagintseve7b8e792018-09-21 17:27:01 +0300384 if (!checkIncludeOrder) {
385 common.infoMsg('Check include order require to much time, and currently disabled!')
386
387 } else {
388 def correctIncludeOrder = ["service", "system", "cluster"]
389 dir(reclassInfoPatchedPath) {
390 def nodeInfoFiles = findFiles(glob: "**/*.reclass.nodeinfo")
391 def messages = ["<b>Wrong include ordering found</b><ul>"]
392 def stepsForParallel = [:]
393 nodeInfoFiles.each { nodeInfo ->
394 stepsForParallel.put("Checking ${nodeInfo.path}:", {
395 def node = readYaml file: nodeInfo.path
396 def classes = node['classes']
397 def curClassID = 0
398 def prevClassID = 0
399 def wrongOrder = false
400 for (String className in classes) {
401 def currentClass = className.tokenize('.')[0]
402 curClassID = correctIncludeOrder.indexOf(currentClass)
403 if (currentClass != correctIncludeOrder[prevClassID]) {
404 if (prevClassID > curClassID) {
405 wrongOrder = true
406 common.warningMsg("File ${nodeInfo.path} contains wrong order of classes including: Includes for ${className} should be declared before ${correctIncludeOrder[prevClassID]} includes")
407 } else {
408 prevClassID = curClassID
409 }
Denis Egorenko04ca9f22018-09-19 16:06:49 +0400410 }
411 }
azvyagintseve7b8e792018-09-21 17:27:01 +0300412 if (wrongOrder) {
413 messages.add("<li>${nodeInfo.path} contains wrong order of classes including</li>")
414 }
415 })
416 }
417 parallel stepsForParallel
418 def includerOrder = '<b>No wrong include order</b>'
419 if (messages.size() != 1) {
420 includerOrder = messages.join('')
421 }
422 currentBuild.description = currentBuild.description ? currentBuild.description + includerOrder : includerOrder
Denis Egorenko04ca9f22018-09-19 16:06:49 +0400423 }
Denis Egorenko04ca9f22018-09-19 16:06:49 +0400424 }
425 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300426 sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
427
428 } catch (Throwable e) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300429 if (alreadyMerged) {
430 currentBuild.result = 'ABORTED'
431 currentBuild.description = "Change ${GERRIT_CHANGE_NUMBER} is already merged, no need to gate them"
432 return
433 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300434 currentBuild.result = "FAILURE"
435 currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
436 throw e
437 } finally {
438 def dummy = "dummy"
azvyagintsev5c0313d2018-08-13 17:13:35 +0300439 }
440 }
chnydae80bb922017-05-29 17:48:40 +0200441}