blob: 598ab01428015da032800ddb19ad7e0deb297add [file] [log] [blame]
azvyagintsev9df82e52018-09-06 19:17:18 +03001/*
2Able to be triggered from Gerrit if :
3Variators:
4Modes:
51) manual run via job-build , possible to pass refspec
Denis Egorenkod8748942018-09-07 12:26:20 +04006 TODO: currently impossible to use custom COOKIECUTTER_TEMPLATE_URL| RECLASS_SYSTEM_URL Gerrit-one always used.
azvyagintsev9df82e52018-09-06 19:17:18 +03007 - for CC
8 - Reclass
Denis Egorenkod8748942018-09-07 12:26:20 +04009
102) gerrit trigger
azvyagintsev9df82e52018-09-06 19:17:18 +030011 Automatically switches if GERRIT_PROJECT variable detected
12 Always test GERRIT_REFSPEC VS GERRIT_BRANCH-master version of opposite project
13 */
14
chnydae80bb922017-05-29 17:48:40 +020015common = new com.mirantis.mk.Common()
chnydabc63c9a2017-05-30 15:37:54 +020016gerrit = new com.mirantis.mk.Gerrit()
chnydae80bb922017-05-29 17:48:40 +020017git = new com.mirantis.mk.Git()
18python = new com.mirantis.mk.Python()
chnydae80bb922017-05-29 17:48:40 +020019
Denis Egorenko137c2252018-09-25 18:00:09 +040020if (env.EXTRA) {
21 common.mergeEnv(env, env.EXTRA)
22}
23
azvyagintsev3a80e342018-09-19 19:22:48 +030024slaveNode = env.SLAVE_NODE ?: 'docker'
azvyagintseve7b8e792018-09-21 17:27:01 +030025checkIncludeOrder = env.CHECK_INCLUDE_ORDER ?: false
azvyagintsev5c0313d2018-08-13 17:13:35 +030026
azvyagintsev9df82e52018-09-06 19:17:18 +030027// Global var's
28alreadyMerged = false
29gerritConData = [credentialsId : env.CREDENTIALS_ID,
30 gerritName : env.GERRIT_NAME ?: 'mcp-jenkins',
31 gerritHost : env.GERRIT_HOST ?: 'gerrit.mcp.mirantis.net',
azvyagintsevb266ad22018-09-11 12:11:11 +030032 gerritScheme : env.GERRIT_SCHEME ?: 'ssh',
33 gerritPort : env.GERRIT_PORT ?: '29418',
azvyagintsev9df82e52018-09-06 19:17:18 +030034 gerritRefSpec : null,
35 gerritProject : null,
36 withWipeOut : true,
37 GERRIT_CHANGE_NUMBER: null]
38//
39//ccTemplatesRepo = env.COOKIECUTTER_TEMPLATE_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.net:29418/mk/cookiecutter-templates'
azvyagintsev3a80e342018-09-19 19:22:48 +030040gerritDataCCHEAD = [:]
azvyagintsev9df82e52018-09-06 19:17:18 +030041gerritDataCC = [:]
42gerritDataCC << gerritConData
43gerritDataCC['gerritBranch'] = env.COOKIECUTTER_TEMPLATE_BRANCH ?: 'master'
azvyagintsev458278c2018-09-25 18:38:30 +030044gerritDataCC['gerritRefSpec'] = env.COOKIECUTTER_TEMPLATE_REF ?: null
azvyagintsev9df82e52018-09-06 19:17:18 +030045gerritDataCC['gerritProject'] = 'mk/cookiecutter-templates'
46//
47//reclassSystemRepo = env.RECLASS_SYSTEM_URL ?: 'ssh://mcp-jenkins@gerrit.mcp.mirantis.net:29418/salt-models/reclass-system'
azvyagintsev3a80e342018-09-19 19:22:48 +030048gerritDataRSHEAD = [:]
azvyagintsev9df82e52018-09-06 19:17:18 +030049gerritDataRS = [:]
50gerritDataRS << gerritConData
51gerritDataRS['gerritBranch'] = env.RECLASS_MODEL_BRANCH ?: 'master'
azvyagintsev458278c2018-09-25 18:38:30 +030052gerritDataRS['gerritRefSpec'] = env.RECLASS_SYSTEM_GIT_REF ?: null
azvyagintsev9df82e52018-09-06 19:17:18 +030053gerritDataRS['gerritProject'] = 'salt-models/reclass-system'
54
55// version of debRepos, aka formulas\reclass
azvyagintsev10e24012018-09-10 21:36:32 +030056testDistribRevision = env.DISTRIB_REVISION ?: 'nightly'
57reclassVersion = 'v1.5.4'
azvyagintsevb266ad22018-09-11 12:11:11 +030058if (env.RECLASS_VERSION) {
azvyagintsev9df82e52018-09-06 19:17:18 +030059 reclassVersion = env.RECLASS_VERSION
Vasyl Saienko772e1232018-07-23 14:42:24 +030060}
Denis Egorenkod8748942018-09-07 12:26:20 +040061// Name of sub-test chunk job
62chunkJobName = "test-mk-cookiecutter-templates-chunk"
63testModelBuildsData = [:]
Vasyl Saienko772e1232018-07-23 14:42:24 +030064
chnyda467f10f2017-05-30 17:25:07 +020065def generateSaltMaster(modEnv, clusterDomain, clusterName) {
azvyagintsev5c0313d2018-08-13 17:13:35 +030066 def nodeFile = "${modEnv}/nodes/cfg01.${clusterDomain}.yml"
67 def nodeString = """classes:
chnydae80bb922017-05-29 17:48:40 +020068- cluster.${clusterName}.infra.config
69parameters:
70 _param:
71 linux_system_codename: xenial
72 reclass_data_revision: master
73 linux:
74 system:
75 name: cfg01
76 domain: ${clusterDomain}
77"""
azvyagintsev5c0313d2018-08-13 17:13:35 +030078 sh "mkdir -p ${modEnv}/nodes/"
79 println "Create file ${nodeFile}"
80 writeFile(file: nodeFile, text: nodeString)
azvyagintsev87985532018-07-10 20:49:38 +030081}
82
azvyagintsev30bc82e2018-08-22 12:26:06 +030083/**
84 *
85 * @param contextFile - path to `contexts/XXX.yaml file`
azvyagintsev9df82e52018-09-06 19:17:18 +030086 * @param virtualenv - pyvenv with CC and dep's
azvyagintsev30bc82e2018-08-22 12:26:06 +030087 * @param templateEnvDir - root of CookieCutter
88 * @return
89 */
90
azvyagintsev5c0313d2018-08-13 17:13:35 +030091def generateModel(contextFile, virtualenv, templateEnvDir) {
92 def modelEnv = "${templateEnvDir}/model"
93 def basename = common.GetBaseName(contextFile, '.yml')
94 def generatedModel = "${modelEnv}/${basename}"
95 def content = readFile(file: "${templateEnvDir}/contexts/${contextFile}")
96 def templateContext = readYaml text: content
97 def clusterDomain = templateContext.default_context.cluster_domain
98 def clusterName = templateContext.default_context.cluster_name
99 def outputDestination = "${generatedModel}/classes/cluster/${clusterName}"
100 def templateBaseDir = templateEnvDir
101 def templateDir = "${templateEnvDir}/dir"
102 def templateOutputDir = templateBaseDir
azvyagintsev30bc82e2018-08-22 12:26:06 +0300103 dir(templateEnvDir) {
104 sh(script: "rm -rf ${generatedModel} || true")
105 common.infoMsg("Generating model from context ${contextFile}")
106 def productList = ["infra", "cicd", "opencontrail", "kubernetes", "openstack", "oss", "stacklight", "ceph"]
107 for (product in productList) {
chnydae80bb922017-05-29 17:48:40 +0200108
azvyagintsev30bc82e2018-08-22 12:26:06 +0300109 // get templateOutputDir and productDir
Michal Kobus197f6112018-09-21 14:30:16 +0200110 templateOutputDir = "${templateEnvDir}/output/${product}"
111 productDir = product
azvyagintsev7cdcc7a2018-09-25 16:47:24 +0300112 templateDir = "${templateEnvDir}/cluster_product/${productDir}"
113 // Bw for 2018.8.1 and older releases
114 if (product.startsWith("stacklight") && (!fileExists(templateDir))) {
115 common.warningMsg("Old release detected! productDir => 'stacklight2' ")
116 productDir = "stacklight2"
117 templateDir = "${templateEnvDir}/cluster_product/${productDir}"
118 }
azvyagintsev30bc82e2018-08-22 12:26:06 +0300119 if (product == "infra" || (templateContext.default_context["${product}_enabled"]
120 && templateContext.default_context["${product}_enabled"].toBoolean())) {
121
azvyagintsev30bc82e2018-08-22 12:26:06 +0300122 common.infoMsg("Generating product " + product + " from " + templateDir + " to " + templateOutputDir)
123
124 sh "rm -rf ${templateOutputDir} || true"
125 sh "mkdir -p ${templateOutputDir}"
126 sh "mkdir -p ${outputDestination}"
127
128 python.buildCookiecutterTemplate(templateDir, content, templateOutputDir, virtualenv, templateBaseDir)
129 sh "mv -v ${templateOutputDir}/${clusterName}/* ${outputDestination}"
130 } else {
131 common.warningMsg("Product " + product + " is disabled")
132 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300133 }
azvyagintsev30bc82e2018-08-22 12:26:06 +0300134 generateSaltMaster(generatedModel, clusterDomain, clusterName)
chnydae80bb922017-05-29 17:48:40 +0200135 }
chnydae80bb922017-05-29 17:48:40 +0200136}
137
Denis Egorenkod8748942018-09-07 12:26:20 +0400138def getAndUnpackNodesInfoArtifact(jobName, copyTo, build) {
139 return {
140 dir(copyTo) {
141 copyArtifacts(projectName: jobName, selector: specific(build), filter: "nodesinfo.tar.gz")
azvyagintsev3a80e342018-09-19 19:22:48 +0300142 sh "tar -xf nodesinfo.tar.gz"
Denis Egorenkod8748942018-09-07 12:26:20 +0400143 sh "rm -v nodesinfo.tar.gz"
144 }
145 }
146}
azvyagintsev87985532018-07-10 20:49:38 +0300147
Denis Egorenkod8748942018-09-07 12:26:20 +0400148def testModel(modelFile, reclassArtifactName, artifactCopyPath) {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300149 // modelFile - `modelfiname` from model/modelfiname/modelfiname.yaml
150 //* Grub all models and send it to check in paralell - by one in thread.
Denis Egorenkod8748942018-09-07 12:26:20 +0400151 def _uuid = "${env.JOB_NAME.toLowerCase()}_${env.BUILD_TAG.toLowerCase()}_${modelFile.toLowerCase()}_" + UUID.randomUUID().toString().take(8)
152 def _values_string = """
azvyagintsev87985532018-07-10 20:49:38 +0300153 ---
Denis Egorenkod8748942018-09-07 12:26:20 +0400154 MODELS_TARGZ: "${env.BUILD_URL}/artifact/${reclassArtifactName}"
155 DockerCName: "${_uuid}"
azvyagintsev87985532018-07-10 20:49:38 +0300156 testReclassEnv: "model/${modelFile}/"
157 modelFile: "contexts/${modelFile}.yml"
azvyagintsev9df82e52018-09-06 19:17:18 +0300158 DISTRIB_REVISION: "${testDistribRevision}"
azvyagintsev87985532018-07-10 20:49:38 +0300159 reclassVersion: "${reclassVersion}"
160 """
Denis Egorenkod8748942018-09-07 12:26:20 +0400161 def chunkJob = build job: chunkJobName, parameters: [
azvyagintsev08b34e32018-09-12 12:29:42 +0300162 [$class: 'TextParameterValue', name: 'EXTRA_VARIABLES_YAML',
azvyagintsev30bc82e2018-08-22 12:26:06 +0300163 value : _values_string.stripIndent()],
azvyagintsev5c0313d2018-08-13 17:13:35 +0300164 ]
Denis Egorenkod8748942018-09-07 12:26:20 +0400165 // Put sub-job info into global map.
166 testModelBuildsData.put(_uuid, ['jobname' : chunkJob.fullProjectName,
167 'copyToDir': "${artifactCopyPath}/${modelFile}",
168 'buildId' : "${chunkJob.number}"])
chnydabc63c9a2017-05-30 15:37:54 +0200169}
170
Denis Egorenkod8748942018-09-07 12:26:20 +0400171def StepTestModel(basename, reclassArtifactName, artifactCopyPath) {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300172 // We need to wrap what we return in a Groovy closure, or else it's invoked
173 // when this method is called, not when we pass it to parallel.
174 // To do this, you need to wrap the code below in { }, and either return
175 // that explicitly, or use { -> } syntax.
176 // return node object
177 return {
178 node(slaveNode) {
Denis Egorenkod8748942018-09-07 12:26:20 +0400179 testModel(basename, reclassArtifactName, artifactCopyPath)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300180 }
chnydae80bb922017-05-29 17:48:40 +0200181 }
azvyagintsev87985532018-07-10 20:49:38 +0300182}
183
azvyagintsev9df82e52018-09-06 19:17:18 +0300184def StepPrepareGit(templateEnvFolder, gerrit_data) {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300185 // return git clone object
186 return {
azvyagintsev9df82e52018-09-06 19:17:18 +0300187 def checkouted = false
188 common.infoMsg("StepPrepareGit: ${gerrit_data}")
azvyagintsev5c0313d2018-08-13 17:13:35 +0300189 // fetch needed sources
190 dir(templateEnvFolder) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300191 if (gerrit_data['gerritRefSpec']) {
192 // Those part might be not work,in case manual var's pass
193 def gerritChange = gerrit.getGerritChange(gerrit_data['gerritName'], gerrit_data['gerritHost'],
194 gerrit_data['GERRIT_CHANGE_NUMBER'], gerrit_data['credentialsId'])
azvyagintsev5c0313d2018-08-13 17:13:35 +0300195 merged = gerritChange.status == "MERGED"
196 if (!merged) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300197 checkouted = gerrit.gerritPatchsetCheckout(gerrit_data)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300198 } else {
azvyagintsev9df82e52018-09-06 19:17:18 +0300199 // update global variable for pretty return from pipeline
200 alreadyMerged = true
201 common.successMsg("Change ${gerrit_data['GERRIT_CHANGE_NUMBER']} is already merged, no need to gate them")
202 error('change already merged')
azvyagintsev87985532018-07-10 20:49:38 +0300203 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300204 } else {
azvyagintsev9df82e52018-09-06 19:17:18 +0300205 // Get clean HEAD
206 gerrit_data['useGerritTriggerBuildChooser'] = false
207 checkouted = gerrit.gerritPatchsetCheckout(gerrit_data)
208 if (!checkouted) {
209 error("Failed to get repo:${gerrit_data}")
210 }
azvyagintsev87985532018-07-10 20:49:38 +0300211 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300212 }
azvyagintsev87985532018-07-10 20:49:38 +0300213 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300214}
215
216def StepGenerateModels(_contextFileList, _virtualenv, _templateEnvDir) {
217 return {
218 for (contextFile in _contextFileList) {
219 generateModel(contextFile, _virtualenv, _templateEnvDir)
220 }
221 }
222}
223
azvyagintsev9df82e52018-09-06 19:17:18 +0300224def globalVariatorsUpdate() {
225 // Simple function, to check and define branch-around variables
226 // In general, simply make transition updates for non-master branch
227 // based on magic logic
azvyagintsev458278c2018-09-25 18:38:30 +0300228 def message = '<br/>'
azvyagintsevb266ad22018-09-11 12:11:11 +0300229 if (env.GERRIT_PROJECT) {
azvyagintseve0eb6cd2018-09-12 13:00:40 +0300230 // TODO are we going to have such branches?
azvyagintsev9df82e52018-09-06 19:17:18 +0300231 if (!['nightly', 'testing', 'stable', 'proposed', 'master'].contains(env.GERRIT_BRANCH)) {
232 gerritDataCC['gerritBranch'] = env.GERRIT_BRANCH
233 gerritDataRS['gerritBranch'] = env.GERRIT_BRANCH
azvyagintsev458278c2018-09-25 18:38:30 +0300234 testDistribRevision = env.GERRIT_BRANCH
azvyagintsev9df82e52018-09-06 19:17:18 +0300235 }
236 // Identify, who triggered. To whom we should pass refspec
237 if (env.GERRIT_PROJECT == 'salt-models/reclass-system') {
238 gerritDataRS['gerritRefSpec'] = env.GERRIT_REFSPEC
239 gerritDataRS['GERRIT_CHANGE_NUMBER'] = env.GERRIT_CHANGE_NUMBER
azvyagintsev458278c2018-09-25 18:38:30 +0300240 message = message + "<br/>RECLASS_SYSTEM_GIT_REF =>${gerritDataRS['gerritRefSpec']}"
azvyagintsev9df82e52018-09-06 19:17:18 +0300241 } else if (env.GERRIT_PROJECT == 'mk/cookiecutter-templates') {
242 gerritDataCC['gerritRefSpec'] = env.GERRIT_REFSPEC
243 gerritDataCC['GERRIT_CHANGE_NUMBER'] = env.GERRIT_CHANGE_NUMBER
azvyagintsev458278c2018-09-25 18:38:30 +0300244 message = message + "<br/>COOKIECUTTER_TEMPLATE_REF =>${gerritDataCC['gerritRefSpec']}"
azvyagintsev9df82e52018-09-06 19:17:18 +0300245 } else {
246 error("Unsuported gerrit-project triggered:${env.GERRIT_PROJECT}")
247 }
azvyagintsev9df82e52018-09-06 19:17:18 +0300248 message = "<font color='red'>GerritTrigger detected! We are in auto-mode:</font>" +
249 "<br/>Test env variables has been changed:" +
250 "<br/>COOKIECUTTER_TEMPLATE_BRANCH => ${gerritDataCC['gerritBranch']}" +
azvyagintsev458278c2018-09-25 18:38:30 +0300251 "<br/>RECLASS_MODEL_BRANCH=> ${gerritDataRS['gerritBranch']}" + message
azvyagintsev9df82e52018-09-06 19:17:18 +0300252 } else {
azvyagintsev458278c2018-09-25 18:38:30 +0300253 message = "<font color='red'>Non-gerrit trigger run detected!</font>" + message
azvyagintsev9df82e52018-09-06 19:17:18 +0300254 }
azvyagintsev3a80e342018-09-19 19:22:48 +0300255 gerritDataCCHEAD << gerritDataCC
256 gerritDataCCHEAD['gerritRefSpec'] = null
257 gerritDataCCHEAD['GERRIT_CHANGE_NUMBER'] = null
258 gerritDataRSHEAD << gerritDataRS
259 gerritDataRSHEAD['gerritRefSpec'] = null
260 gerritDataRSHEAD['GERRIT_CHANGE_NUMBER'] = null
azvyagintsev458278c2018-09-25 18:38:30 +0300261 // 'binary' branch logic w\o 'release/' prefix
262 if (testDistribRevision.contains('/')) {
263 testDistribRevision = testDistribRevision.split('/')[-1]
264 }
265 // Check if we are going to test bleeding-edge release, which doesn't have binary release yet
266 if (!common.checkRemoteBinary([apt_mk_version: testDistribRevision]).linux_system_repo_url) {
267 common.errorMsg("Binary release: ${testDistribRevision} not exist. Fallback to 'proposed'! ")
268 testDistribRevision = 'proposed'
269 message = "<br/>DISTRIB_REVISION =>${testDistribRevision}" + message
270 }
271 currentBuild.description = currentBuild.description ? message + currentBuild.description : message
Denis Egorenkod8748942018-09-07 12:26:20 +0400272}
azvyagintsev9df82e52018-09-06 19:17:18 +0300273
Denis Egorenkoa6117fc2018-09-11 13:40:00 +0400274def replaceGeneratedValues(path) {
275 def files = sh(script: "find ${path} -name 'secrets.yml'", returnStdout: true)
276 def stepsForParallel = [:]
277 stepsForParallel.failFast = true
278 files.tokenize().each {
279 stepsForParallel.put("Removing generated passwords/secrets from ${it}",
280 {
281 def secrets = readYaml file: it
azvyagintsev08b34e32018-09-12 12:29:42 +0300282 for (String key in secrets['parameters']['_param'].keySet()) {
Denis Egorenkoa6117fc2018-09-11 13:40:00 +0400283 secrets['parameters']['_param'][key] = 'generated'
284 }
285 // writeYaml can't write to already existing file
286 writeYaml file: "${it}.tmp", data: secrets
287 sh "mv ${it}.tmp ${it}"
288 })
289 }
290 parallel stepsForParallel
291}
292
Denis Egorenkod8748942018-09-07 12:26:20 +0400293def linkReclassModels(contextList, envPath, archiveName) {
294 // to be able share reclass for all subenvs
295 // Also, makes artifact test more solid - use one reclass for all of sub-models.
296 // Archive Structure will be:
297 // tar.gz
298 // ├── contexts
299 // │   └── ceph.yml
azvyagintsev3a80e342018-09-19 19:22:48 +0300300 // ├── ${reclassDirName} <<< reclass system
Denis Egorenkod8748942018-09-07 12:26:20 +0400301 // ├── model
302 // │   └── ceph <<< from `context basename`
303 // │   ├── classes
304 // │   │   ├── cluster
azvyagintsev3a80e342018-09-19 19:22:48 +0300305 // │   │   └── system -> ../../../${reclassDirName}
Denis Egorenkod8748942018-09-07 12:26:20 +0400306 // │   └── nodes
307 // │   └── cfg01.ceph-cluster-domain.local.yml
308 dir(envPath) {
309 for (String context : contextList) {
310 def basename = common.GetBaseName(context, '.yml')
311 dir("${envPath}/model/${basename}") {
azvyagintsev3a80e342018-09-19 19:22:48 +0300312 sh(script: "mkdir -p classes/; ln -sfv ../../../../${common.GetBaseName(archiveName, '.tar.gz')} classes/system ")
Denis Egorenkod8748942018-09-07 12:26:20 +0400313 }
314 }
Denis Egorenkoa6117fc2018-09-11 13:40:00 +0400315 // replace all generated passwords/secrets/keys with hardcode value for infra/secrets.yaml
316 replaceGeneratedValues("${envPath}/model")
azvyagintsev3a80e342018-09-19 19:22:48 +0300317 // Save all models and all contexts. Warning! `h` flag must be used!
318 sh(script: "set -ex; tar -czhf ${env.WORKSPACE}/${archiveName} --exclude='*@tmp' model contexts", returnStatus: true)
Denis Egorenkod8748942018-09-07 12:26:20 +0400319 }
azvyagintsev3a80e342018-09-19 19:22:48 +0300320 archiveArtifacts artifacts: archiveName
azvyagintsev9df82e52018-09-06 19:17:18 +0300321}
322
azvyagintsev5c0313d2018-08-13 17:13:35 +0300323timeout(time: 1, unit: 'HOURS') {
324 node(slaveNode) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300325 globalVariatorsUpdate()
azvyagintsev30bc82e2018-08-22 12:26:06 +0300326 def templateEnvHead = "${env.WORKSPACE}/EnvHead/"
327 def templateEnvPatched = "${env.WORKSPACE}/EnvPatched/"
azvyagintsev5c0313d2018-08-13 17:13:35 +0300328 def contextFileListHead = []
329 def contextFileListPatched = []
330 def vEnv = "${env.WORKSPACE}/venv"
Denis Egorenkod8748942018-09-07 12:26:20 +0400331 def headReclassArtifactName = "head_reclass.tar.gz"
332 def patchedReclassArtifactName = "patched_reclass.tar.gz"
333 def reclassNodeInfoDir = "${env.WORKSPACE}/reclassNodeInfo_compare/"
334 def reclassInfoHeadPath = "${reclassNodeInfoDir}/old"
335 def reclassInfoPatchedPath = "${reclassNodeInfoDir}/new"
azvyagintsev5c0313d2018-08-13 17:13:35 +0300336 try {
337 sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
338 stage('Download and prepare CC env') {
339 // Prepare 2 env - for patchset, and for HEAD
azvyagintsev9df82e52018-09-06 19:17:18 +0300340 def paralellEnvs = [:]
azvyagintsev5c0313d2018-08-13 17:13:35 +0300341 paralellEnvs.failFast = true
azvyagintsev9df82e52018-09-06 19:17:18 +0300342 paralellEnvs['downloadEnvHead'] = StepPrepareGit(templateEnvHead, gerritDataCCHEAD)
azvyagintsev3a80e342018-09-19 19:22:48 +0300343 if (gerritDataCC.get('gerritRefSpec', null)) {
344 paralellEnvs['downloadEnvPatched'] = StepPrepareGit(templateEnvPatched, gerritDataCC)
345 parallel paralellEnvs
346 } else {
347 paralellEnvs['downloadEnvPatched'] = { common.warningMsg('No need to process: downloadEnvPatched') }
348 parallel paralellEnvs
349 sh("rsync -a --exclude '*@tmp' ${templateEnvHead} ${templateEnvPatched}")
350 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300351 }
352 stage("Check workflow_definition") {
353 // Check only for patchset
354 python.setupVirtualenv(vEnv, 'python2', [], "${templateEnvPatched}/requirements.txt")
azvyagintsev3a80e342018-09-19 19:22:48 +0300355 if (gerritDataCC.get('gerritRefSpec', null)) {
356 common.infoMsg(python.runVirtualenvCommand(vEnv, "python ${templateEnvPatched}/workflow_definition_test.py"))
357 } else {
358 common.infoMsg('No need to process: workflow_definition')
359 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300360 }
361
362 stage("generate models") {
363 dir("${templateEnvHead}/contexts") {
364 for (String x : findFiles(glob: "*.yml")) {
365 contextFileListHead.add(x)
366 }
367 }
368 dir("${templateEnvPatched}/contexts") {
369 for (String x : findFiles(glob: "*.yml")) {
370 contextFileListPatched.add(x)
371 }
372 }
373 // Generate over 2env's - for patchset, and for HEAD
azvyagintsev9df82e52018-09-06 19:17:18 +0300374 def paralellEnvs = [:]
azvyagintsev5c0313d2018-08-13 17:13:35 +0300375 paralellEnvs.failFast = true
azvyagintsev30bc82e2018-08-22 12:26:06 +0300376 paralellEnvs['GenerateEnvHead'] = StepGenerateModels(contextFileListHead, vEnv, templateEnvHead)
azvyagintsev3a80e342018-09-19 19:22:48 +0300377 if (gerritDataCC.get('gerritRefSpec', null)) {
378 paralellEnvs['GenerateEnvPatched'] = StepGenerateModels(contextFileListPatched, vEnv, templateEnvPatched)
379 parallel paralellEnvs
380 } else {
381 paralellEnvs['GenerateEnvPatched'] = { common.warningMsg('No need to process: GenerateEnvPatched') }
382 parallel paralellEnvs
383 sh("rsync -a --exclude '*@tmp' ${templateEnvHead} ${templateEnvPatched}")
azvyagintsev5c0313d2018-08-13 17:13:35 +0300384 }
385
azvyagintsev3a80e342018-09-19 19:22:48 +0300386 // We need 2 git's, one for HEAD, one for PATCHed.
387 // if no patch, use head for both
388 RSHeadDir = common.GetBaseName(headReclassArtifactName, '.tar.gz')
389 RSPatchedDir = common.GetBaseName(patchedReclassArtifactName, '.tar.gz')
390 common.infoMsg("gerritDataRS= ${gerritDataRS}")
391 common.infoMsg("gerritDataRSHEAD= ${gerritDataRSHEAD}")
392 if (gerritDataRS.get('gerritRefSpec', null)) {
393 StepPrepareGit("${env.WORKSPACE}/${RSPatchedDir}/", gerritDataRS).call()
394 StepPrepareGit("${env.WORKSPACE}/${RSHeadDir}/", gerritDataRSHEAD).call()
395 } else {
396 StepPrepareGit("${env.WORKSPACE}/${RSHeadDir}/", gerritDataRS).call()
397 sh("cd ${env.WORKSPACE} ; ln -svf ${RSHeadDir} ${RSPatchedDir}")
398 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300399 // link all models, to use one global reclass
400 // For HEAD
Denis Egorenkod8748942018-09-07 12:26:20 +0400401 linkReclassModels(contextFileListHead, templateEnvHead, headReclassArtifactName)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300402 // For patched
Denis Egorenkod8748942018-09-07 12:26:20 +0400403 linkReclassModels(contextFileListPatched, templateEnvPatched, patchedReclassArtifactName)
azvyagintsev5c0313d2018-08-13 17:13:35 +0300404 }
405
Denis Egorenkod8748942018-09-07 12:26:20 +0400406 stage("Compare cluster lvl Head/Patched") {
azvyagintsev5c0313d2018-08-13 17:13:35 +0300407 // Compare patched and HEAD reclass pillars
Denis Egorenkod8748942018-09-07 12:26:20 +0400408 compareRoot = "${env.WORKSPACE}/cluster_compare/"
azvyagintsev5c0313d2018-08-13 17:13:35 +0300409 sh(script: """
410 mkdir -pv ${compareRoot}/new ${compareRoot}/old
Denis Egorenkod8748942018-09-07 12:26:20 +0400411 tar -xzf ${patchedReclassArtifactName} --directory ${compareRoot}/new
412 tar -xzf ${headReclassArtifactName} --directory ${compareRoot}/old
azvyagintsev5c0313d2018-08-13 17:13:35 +0300413 """)
414 common.warningMsg('infra/secrets.yml has been skipped from compare!')
azvyagintsev3a80e342018-09-19 19:22:48 +0300415 result = '\n' + common.comparePillars(compareRoot, env.BUILD_URL, "-Ev \'infra/secrets.yml|\\.git\'")
Denis Egorenkod8748942018-09-07 12:26:20 +0400416 currentBuild.description = currentBuild.description ? currentBuild.description + result : result
azvyagintsev5c0313d2018-08-13 17:13:35 +0300417 }
Denis Egorenkod8748942018-09-07 12:26:20 +0400418 stage("TestContexts Head/Patched") {
419 def stepsForParallel = [:]
420 stepsForParallel.failFast = true
421 common.infoMsg("Found: ${contextFileListHead.size()} HEAD contexts to test.")
422 for (String context : contextFileListHead) {
423 def basename = common.GetBaseName(context, '.yml')
424 stepsForParallel.put("ContextHeadTest:${basename}", StepTestModel(basename, headReclassArtifactName, reclassInfoHeadPath))
425 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300426 common.infoMsg("Found: ${contextFileListPatched.size()} patched contexts to test.")
427 for (String context : contextFileListPatched) {
428 def basename = common.GetBaseName(context, '.yml')
Denis Egorenkod8748942018-09-07 12:26:20 +0400429 stepsForParallel.put("ContextPatchedTest:${basename}", StepTestModel(basename, patchedReclassArtifactName, reclassInfoPatchedPath))
azvyagintsev5c0313d2018-08-13 17:13:35 +0300430 }
431 parallel stepsForParallel
Denis Egorenkod8748942018-09-07 12:26:20 +0400432 common.infoMsg('All TestContexts tests done')
433 }
434 stage("Compare NodesInfo Head/Patched") {
435 // Download all artifacts
436 def stepsForParallel = [:]
437 stepsForParallel.failFast = true
438 common.infoMsg("Found: ${testModelBuildsData.size()} nodeinfo artifacts to download.")
439 testModelBuildsData.each { bname, bdata ->
440 stepsForParallel.put("FetchData:${bname}",
441 getAndUnpackNodesInfoArtifact(bdata.jobname, bdata.copyToDir, bdata.buildId))
442 }
443 parallel stepsForParallel
Denis Egorenkoa6117fc2018-09-11 13:40:00 +0400444 // remove timestamp field from rendered files
445 sh("find ${reclassNodeInfoDir} -type f -exec sed -i '/ timestamp: .*/d' {} \\;")
Denis Egorenkod8748942018-09-07 12:26:20 +0400446 // Compare patched and HEAD reclass pillars
447 result = '\n' + common.comparePillars(reclassNodeInfoDir, env.BUILD_URL, '')
448 currentBuild.description = currentBuild.description ? currentBuild.description + result : result
azvyagintsev5c0313d2018-08-13 17:13:35 +0300449 }
Denis Egorenko04ca9f22018-09-19 16:06:49 +0400450 stage('Check include order') {
azvyagintseve7b8e792018-09-21 17:27:01 +0300451 if (!checkIncludeOrder) {
452 common.infoMsg('Check include order require to much time, and currently disabled!')
453
454 } else {
455 def correctIncludeOrder = ["service", "system", "cluster"]
456 dir(reclassInfoPatchedPath) {
457 def nodeInfoFiles = findFiles(glob: "**/*.reclass.nodeinfo")
458 def messages = ["<b>Wrong include ordering found</b><ul>"]
459 def stepsForParallel = [:]
460 nodeInfoFiles.each { nodeInfo ->
461 stepsForParallel.put("Checking ${nodeInfo.path}:", {
462 def node = readYaml file: nodeInfo.path
463 def classes = node['classes']
464 def curClassID = 0
465 def prevClassID = 0
466 def wrongOrder = false
467 for (String className in classes) {
468 def currentClass = className.tokenize('.')[0]
469 curClassID = correctIncludeOrder.indexOf(currentClass)
470 if (currentClass != correctIncludeOrder[prevClassID]) {
471 if (prevClassID > curClassID) {
472 wrongOrder = true
473 common.warningMsg("File ${nodeInfo.path} contains wrong order of classes including: Includes for ${className} should be declared before ${correctIncludeOrder[prevClassID]} includes")
474 } else {
475 prevClassID = curClassID
476 }
Denis Egorenko04ca9f22018-09-19 16:06:49 +0400477 }
478 }
azvyagintseve7b8e792018-09-21 17:27:01 +0300479 if (wrongOrder) {
480 messages.add("<li>${nodeInfo.path} contains wrong order of classes including</li>")
481 }
482 })
483 }
484 parallel stepsForParallel
485 def includerOrder = '<b>No wrong include order</b>'
486 if (messages.size() != 1) {
487 includerOrder = messages.join('')
488 }
489 currentBuild.description = currentBuild.description ? currentBuild.description + includerOrder : includerOrder
Denis Egorenko04ca9f22018-09-19 16:06:49 +0400490 }
Denis Egorenko04ca9f22018-09-19 16:06:49 +0400491 }
492 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300493 sh(script: 'find . -mindepth 1 -delete > /dev/null || true')
494
495 } catch (Throwable e) {
azvyagintsev9df82e52018-09-06 19:17:18 +0300496 if (alreadyMerged) {
497 currentBuild.result = 'ABORTED'
498 currentBuild.description = "Change ${GERRIT_CHANGE_NUMBER} is already merged, no need to gate them"
499 return
500 }
azvyagintsev5c0313d2018-08-13 17:13:35 +0300501 currentBuild.result = "FAILURE"
502 currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message
503 throw e
504 } finally {
505 def dummy = "dummy"
azvyagintsev5c0313d2018-08-13 17:13:35 +0300506 }
507 }
chnydae80bb922017-05-29 17:48:40 +0200508}