chnyda | e80bb92 | 2017-05-29 17:48:40 +0200 | [diff] [blame] | 1 | common = new com.mirantis.mk.Common() |
chnyda | bc63c9a | 2017-05-30 15:37:54 +0200 | [diff] [blame] | 2 | gerrit = new com.mirantis.mk.Gerrit() |
chnyda | e80bb92 | 2017-05-29 17:48:40 +0200 | [diff] [blame] | 3 | git = new com.mirantis.mk.Git() |
| 4 | python = new com.mirantis.mk.Python() |
chnyda | e80bb92 | 2017-05-29 17:48:40 +0200 | [diff] [blame] | 5 | |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 6 | gerritRef = env.GERRIT_REFSPEC ?: null |
| 7 | slaveNode = (env.SLAVE_NODE ?: 'python&&docker') |
| 8 | def alreadyMerged = false |
| 9 | |
Vasyl Saienko | 682043d | 2018-07-23 16:04:10 +0300 | [diff] [blame] | 10 | def reclassVersion = 'v1.5.4' |
Vasyl Saienko | 772e123 | 2018-07-23 14:42:24 +0300 | [diff] [blame] | 11 | if (common.validInputParam('RECLASS_VERSION')) { |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 12 | reclassVersion = RECLASS_VERSION |
Vasyl Saienko | 772e123 | 2018-07-23 14:42:24 +0300 | [diff] [blame] | 13 | } |
| 14 | |
chnyda | 467f10f | 2017-05-30 17:25:07 +0200 | [diff] [blame] | 15 | def generateSaltMaster(modEnv, clusterDomain, clusterName) { |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 16 | def nodeFile = "${modEnv}/nodes/cfg01.${clusterDomain}.yml" |
| 17 | def nodeString = """classes: |
chnyda | e80bb92 | 2017-05-29 17:48:40 +0200 | [diff] [blame] | 18 | - cluster.${clusterName}.infra.config |
| 19 | parameters: |
| 20 | _param: |
| 21 | linux_system_codename: xenial |
| 22 | reclass_data_revision: master |
| 23 | linux: |
| 24 | system: |
| 25 | name: cfg01 |
| 26 | domain: ${clusterDomain} |
| 27 | """ |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 28 | sh "mkdir -p ${modEnv}/nodes/" |
| 29 | println "Create file ${nodeFile}" |
| 30 | writeFile(file: nodeFile, text: nodeString) |
azvyagintsev | 8798553 | 2018-07-10 20:49:38 +0300 | [diff] [blame] | 31 | } |
| 32 | |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 33 | def generateModel(contextFile, virtualenv, templateEnvDir) { |
| 34 | def modelEnv = "${templateEnvDir}/model" |
| 35 | def basename = common.GetBaseName(contextFile, '.yml') |
| 36 | def generatedModel = "${modelEnv}/${basename}" |
| 37 | def content = readFile(file: "${templateEnvDir}/contexts/${contextFile}") |
| 38 | def templateContext = readYaml text: content |
| 39 | def clusterDomain = templateContext.default_context.cluster_domain |
| 40 | def clusterName = templateContext.default_context.cluster_name |
| 41 | def outputDestination = "${generatedModel}/classes/cluster/${clusterName}" |
| 42 | def templateBaseDir = templateEnvDir |
| 43 | def templateDir = "${templateEnvDir}/dir" |
| 44 | def templateOutputDir = templateBaseDir |
| 45 | sh(script: "rm -rf ${generatedModel} || true") |
chnyda | e80bb92 | 2017-05-29 17:48:40 +0200 | [diff] [blame] | 46 | |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 47 | common.infoMsg("Generating model from context ${contextFile}") |
chnyda | e80bb92 | 2017-05-29 17:48:40 +0200 | [diff] [blame] | 48 | |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 49 | def productList = ["infra", "cicd", "opencontrail", "kubernetes", "openstack", "oss", "stacklight", "ceph"] |
| 50 | for (product in productList) { |
chnyda | e79b94e | 2017-07-10 09:46:25 +0200 | [diff] [blame] | 51 | |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 52 | // get templateOutputDir and productDir |
| 53 | if (product.startsWith("stacklight")) { |
| 54 | templateOutputDir = "${templateEnvDir}/output/stacklight" |
| 55 | try { |
| 56 | productDir = "stacklight" + templateContext.default_context['stacklight_version'] |
| 57 | } catch (Throwable e) { |
| 58 | productDir = "stacklight1" |
| 59 | } |
| 60 | } else { |
| 61 | templateOutputDir = "${templateEnvDir}/output/${product}" |
| 62 | productDir = product |
| 63 | } |
Tomáš Kukrál | 9a6821e | 2017-07-24 11:07:01 +0200 | [diff] [blame] | 64 | |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 65 | if (product == "infra" || (templateContext.default_context["${product}_enabled"] |
| 66 | && templateContext.default_context["${product}_enabled"].toBoolean())) { |
| 67 | |
| 68 | templateDir = "${templateEnvDir}/cluster_product/${productDir}" |
| 69 | common.infoMsg("Generating product " + product + " from " + templateDir + " to " + templateOutputDir) |
| 70 | |
| 71 | sh "rm -rf ${templateOutputDir} || true" |
| 72 | sh "mkdir -p ${templateOutputDir}" |
| 73 | sh "mkdir -p ${outputDestination}" |
| 74 | |
| 75 | python.buildCookiecutterTemplate(templateDir, content, templateOutputDir, virtualenv, templateBaseDir) |
| 76 | sh "mv -v ${templateOutputDir}/${clusterName}/* ${outputDestination}" |
| 77 | } else { |
| 78 | common.warningMsg("Product " + product + " is disabled") |
| 79 | } |
chnyda | e80bb92 | 2017-05-29 17:48:40 +0200 | [diff] [blame] | 80 | } |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 81 | generateSaltMaster(generatedModel, clusterDomain, clusterName) |
chnyda | e80bb92 | 2017-05-29 17:48:40 +0200 | [diff] [blame] | 82 | } |
| 83 | |
azvyagintsev | 8798553 | 2018-07-10 20:49:38 +0300 | [diff] [blame] | 84 | |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 85 | def testModel(modelFile, reclassVersion = 'v1.5.4') { |
| 86 | // modelFile - `modelfiname` from model/modelfiname/modelfiname.yaml |
| 87 | //* Grub all models and send it to check in paralell - by one in thread. |
chnyda | 7dd8cd9 | 2017-12-18 10:19:25 +0100 | [diff] [blame] | 88 | |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 89 | _values_string = """ |
azvyagintsev | 8798553 | 2018-07-10 20:49:38 +0300 | [diff] [blame] | 90 | --- |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 91 | MODELS_TARGZ: "${env.BUILD_URL}/artifact/patched_reclass.tar.gz" |
azvyagintsev | 8798553 | 2018-07-10 20:49:38 +0300 | [diff] [blame] | 92 | DockerCName: "${env.JOB_NAME.toLowerCase()}_${env.BUILD_TAG.toLowerCase()}_${modelFile.toLowerCase()}" |
| 93 | testReclassEnv: "model/${modelFile}/" |
| 94 | modelFile: "contexts/${modelFile}.yml" |
| 95 | DISTRIB_REVISION: "${DISTRIB_REVISION}" |
| 96 | EXTRA_FORMULAS: "${env.EXTRA_FORMULAS}" |
| 97 | reclassVersion: "${reclassVersion}" |
| 98 | """ |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 99 | build job: "test-mk-cookiecutter-templates-chunk", parameters: [ |
| 100 | [$class: 'StringParameterValue', name: 'EXTRA_VARIABLES_YAML', |
| 101 | value: _values_string.stripIndent()], |
| 102 | ] |
chnyda | bc63c9a | 2017-05-30 15:37:54 +0200 | [diff] [blame] | 103 | } |
| 104 | |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 105 | def StepTestModel(basename) { |
| 106 | // We need to wrap what we return in a Groovy closure, or else it's invoked |
| 107 | // when this method is called, not when we pass it to parallel. |
| 108 | // To do this, you need to wrap the code below in { }, and either return |
| 109 | // that explicitly, or use { -> } syntax. |
| 110 | // return node object |
| 111 | return { |
| 112 | node(slaveNode) { |
| 113 | testModel(basename) |
| 114 | } |
chnyda | e80bb92 | 2017-05-29 17:48:40 +0200 | [diff] [blame] | 115 | } |
azvyagintsev | 8798553 | 2018-07-10 20:49:38 +0300 | [diff] [blame] | 116 | } |
| 117 | |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 118 | def StepPrepareCCenv(refchange, templateEnvFolder) { |
| 119 | // return git clone object |
| 120 | return { |
| 121 | // fetch needed sources |
| 122 | dir(templateEnvFolder) { |
| 123 | if (refchange) { |
| 124 | def gerritChange = gerrit.getGerritChange(GERRIT_NAME, GERRIT_HOST, GERRIT_CHANGE_NUMBER, CREDENTIALS_ID) |
| 125 | merged = gerritChange.status == "MERGED" |
| 126 | if (!merged) { |
| 127 | checkouted = gerrit.gerritPatchsetCheckout([ |
| 128 | credentialsId: CREDENTIALS_ID |
| 129 | ]) |
| 130 | } else { |
| 131 | // update global variable for success return from pipeline |
| 132 | //alreadyMerged = true |
| 133 | common.successMsg("Change ${GERRIT_CHANGE_NUMBER} is already merged, no need to gate them") |
| 134 | currentBuild.result = 'ABORTED' |
| 135 | throw new hudson.AbortException('change already merged') |
azvyagintsev | 8798553 | 2018-07-10 20:49:38 +0300 | [diff] [blame] | 136 | } |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 137 | } else { |
| 138 | git.checkoutGitRepository(templateEnvFolder, COOKIECUTTER_TEMPLATE_URL, COOKIECUTTER_TEMPLATE_BRANCH, CREDENTIALS_ID) |
azvyagintsev | 8798553 | 2018-07-10 20:49:38 +0300 | [diff] [blame] | 139 | } |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 140 | } |
azvyagintsev | 8798553 | 2018-07-10 20:49:38 +0300 | [diff] [blame] | 141 | } |
azvyagintsev | 5c0313d | 2018-08-13 17:13:35 +0300 | [diff] [blame] | 142 | } |
| 143 | |
| 144 | def StepGenerateModels(_contextFileList, _virtualenv, _templateEnvDir) { |
| 145 | return { |
| 146 | for (contextFile in _contextFileList) { |
| 147 | generateModel(contextFile, _virtualenv, _templateEnvDir) |
| 148 | } |
| 149 | } |
| 150 | } |
| 151 | |
| 152 | timeout(time: 1, unit: 'HOURS') { |
| 153 | node(slaveNode) { |
| 154 | def templateEnvHead = "${env.WORKSPACE}/env_head/" |
| 155 | def templateEnvPatched = "${env.WORKSPACE}/env_patched/" |
| 156 | def contextFileListHead = [] |
| 157 | def contextFileListPatched = [] |
| 158 | def vEnv = "${env.WORKSPACE}/venv" |
| 159 | |
| 160 | try { |
| 161 | sh(script: 'find . -mindepth 1 -delete > /dev/null || true') |
| 162 | stage('Download and prepare CC env') { |
| 163 | // Prepare 2 env - for patchset, and for HEAD |
| 164 | paralellEnvs = [:] |
| 165 | paralellEnvs.failFast = true |
| 166 | paralellEnvs['downloadEnvHead'] = StepPrepareCCenv('', templateEnvHead) |
| 167 | paralellEnvs['downloadEnvPatched'] = StepPrepareCCenv(gerritRef, templateEnvPatched) |
| 168 | parallel paralellEnvs |
| 169 | } |
| 170 | stage("Check workflow_definition") { |
| 171 | // Check only for patchset |
| 172 | python.setupVirtualenv(vEnv, 'python2', [], "${templateEnvPatched}/requirements.txt") |
| 173 | common.infoMsg(python.runVirtualenvCommand(vEnv, "python ${templateEnvPatched}/workflow_definition_test.py")) |
| 174 | } |
| 175 | |
| 176 | stage("generate models") { |
| 177 | dir("${templateEnvHead}/contexts") { |
| 178 | for (String x : findFiles(glob: "*.yml")) { |
| 179 | contextFileListHead.add(x) |
| 180 | } |
| 181 | } |
| 182 | dir("${templateEnvPatched}/contexts") { |
| 183 | for (String x : findFiles(glob: "*.yml")) { |
| 184 | contextFileListPatched.add(x) |
| 185 | } |
| 186 | } |
| 187 | // Generate over 2env's - for patchset, and for HEAD |
| 188 | paralellEnvs = [:] |
| 189 | paralellEnvs.failFast = true |
| 190 | paralellEnvs['GenerateEnvHead'] = StepGenerateModels(contextFileListPatched, vEnv, templateEnvPatched) |
| 191 | paralellEnvs['GenerateEnvPatched'] = StepGenerateModels(contextFileListHead, vEnv, templateEnvHead) |
| 192 | parallel paralellEnvs |
| 193 | |
| 194 | // Collect artifacts |
| 195 | dir(templateEnvPatched) { |
| 196 | // Collect only models. For backward comparability - who know, probably someone use it.. |
| 197 | sh(script: "tar -czf model.tar.gz -C model ../contexts .", returnStatus: true) |
| 198 | archiveArtifacts artifacts: "model.tar.gz" |
| 199 | } |
| 200 | |
| 201 | // to be able share reclass for all subenvs |
| 202 | // Also, makes artifact test more solid - use one reclass for all of sub-models. |
| 203 | // Archive Structure will be: |
| 204 | // tar.gz |
| 205 | // ├── contexts |
| 206 | // │ └── ceph.yml |
| 207 | // ├── global_reclass <<< reclass system |
| 208 | // ├── model |
| 209 | // │ └── ceph <<< from `context basename` |
| 210 | // │ ├── classes |
| 211 | // │ │ ├── cluster |
| 212 | // │ │ └── system -> ../../../global_reclass |
| 213 | // │ └── nodes |
| 214 | // │ └── cfg01.ceph-cluster-domain.local.yml |
| 215 | |
| 216 | if (SYSTEM_GIT_URL == "") { |
| 217 | git.checkoutGitRepository("${env.WORKSPACE}/global_reclass/", RECLASS_MODEL_URL, RECLASS_MODEL_BRANCH, CREDENTIALS_ID) |
| 218 | } else { |
| 219 | dir("${env.WORKSPACE}/global_reclass/") { |
| 220 | if (!gerrit.gerritPatchsetCheckout(SYSTEM_GIT_URL, SYSTEM_GIT_REF, "HEAD", CREDENTIALS_ID)) { |
| 221 | common.errorMsg("Failed to obtain system reclass with url: ${SYSTEM_GIT_URL} and ${SYSTEM_GIT_REF}") |
| 222 | throw new RuntimeException("Failed to obtain system reclass") |
| 223 | } |
| 224 | } |
| 225 | } |
| 226 | // link all models, to use one global reclass |
| 227 | // For HEAD |
| 228 | dir(templateEnvHead) { |
| 229 | for (String context : contextFileListHead) { |
| 230 | def basename = common.GetBaseName(context, '.yml') |
| 231 | dir("${templateEnvHead}/model/${basename}") { |
| 232 | sh(script: 'mkdir -p classes/; ln -sfv ../../../../global_reclass classes/system ') |
| 233 | } |
| 234 | } |
| 235 | // Save all models and all contexts. Warning! `h` flag must be used. |
| 236 | sh(script: "tar -chzf head_reclass.tar.gz --exclude='*@tmp' model contexts global_reclass", returnStatus: true) |
| 237 | archiveArtifacts artifacts: "head_reclass.tar.gz" |
| 238 | // move for "Compare Pillars" stage |
| 239 | sh(script: "mv -v head_reclass.tar.gz ${env.WORKSPACE}") |
| 240 | } |
| 241 | // For patched |
| 242 | dir(templateEnvPatched) { |
| 243 | for (String context : contextFileListPatched) { |
| 244 | def basename = common.GetBaseName(context, '.yml') |
| 245 | dir("${templateEnvPatched}/model/${basename}") { |
| 246 | sh(script: 'mkdir -p classes/; ln -sfv ../../../../global_reclass classes/system ') |
| 247 | } |
| 248 | } |
| 249 | // Save all models and all contexts. Warning! `h` flag must be used. |
| 250 | sh(script: "tar -chzf patched_reclass.tar.gz --exclude='*@tmp' model contexts global_reclass", returnStatus: true) |
| 251 | archiveArtifacts artifacts: "patched_reclass.tar.gz" |
| 252 | // move for "Compare Pillars" stage |
| 253 | sh(script: "mv -v patched_reclass.tar.gz ${env.WORKSPACE}") |
| 254 | } |
| 255 | } |
| 256 | |
| 257 | stage("Compare Pillars") { |
| 258 | // Compare patched and HEAD reclass pillars |
| 259 | compareRoot = "${env.WORKSPACE}/test_compare/" |
| 260 | sh(script: """ |
| 261 | mkdir -pv ${compareRoot}/new ${compareRoot}/old |
| 262 | tar -xzf patched_reclass.tar.gz --directory ${compareRoot}/new |
| 263 | tar -xzf head_reclass.tar.gz --directory ${compareRoot}/old |
| 264 | """) |
| 265 | common.warningMsg('infra/secrets.yml has been skipped from compare!') |
| 266 | rezult = common.comparePillars(compareRoot, env.BUILD_URL, "-Ev 'infra/secrets.yml'") |
| 267 | currentBuild.description = rezult |
| 268 | } |
| 269 | stage("test-contexts") { |
| 270 | // Test contexts for patched only |
| 271 | stepsForParallel = [:] |
| 272 | common.infoMsg("Found: ${contextFileListPatched.size()} patched contexts to test.") |
| 273 | for (String context : contextFileListPatched) { |
| 274 | def basename = common.GetBaseName(context, '.yml') |
| 275 | stepsForParallel.put("ContextPatchTest:${basename}", StepTestModel(basename)) |
| 276 | } |
| 277 | parallel stepsForParallel |
| 278 | common.infoMsg('All tests done') |
| 279 | } |
| 280 | |
| 281 | sh(script: 'find . -mindepth 1 -delete > /dev/null || true') |
| 282 | |
| 283 | } catch (Throwable e) { |
| 284 | currentBuild.result = "FAILURE" |
| 285 | currentBuild.description = currentBuild.description ? e.message + " " + currentBuild.description : e.message |
| 286 | throw e |
| 287 | } finally { |
| 288 | def dummy = "dummy" |
| 289 | //FAILING common.sendNotification(currentBuild.result,"",["slack"]) |
| 290 | } |
| 291 | } |
chnyda | e80bb92 | 2017-05-29 17:48:40 +0200 | [diff] [blame] | 292 | } |