blob: 46d50108a9cc5ce73c2f9e5a7f857eba444d6b2d [file] [log] [blame]
import java.text.SimpleDateFormat
def gerrit = new com.mirantis.mk.Gerrit()
def dateFormat = new SimpleDateFormat("yyyyMMddHHmm")
def date = new Date()
def common_scripts_commit = "${COMMON_SCRIPTS_COMMIT}"
def iso_name = "${CONFIG_DRIVE_ISO_NAME}" ?: "cfg01.${CLUSTER_NAME}-config-${dateFormat.format(date)}.iso"
def node_name = "${NODE_NAME}"
def smc = [:]
smc['SALT_MASTER_MINION_ID'] = "cfg01.${CLUSTER_NAME}.local"
smc['SALT_MASTER_DEPLOY_IP'] = "${SALT_MASTER_DEPLOY_IP}"
smc['DEPLOY_NETWORK_GW'] = "${DEPLOY_NETWORK_GW}"
smc['DEPLOY_NETWORK_NETMASK'] = "${DEPLOY_NETWORK_NETMASK}"
smc['DNS_SERVERS'] = "${DNS_SERVERS}"
smc['PIPELINES_FROM_ISO'] = '${PIPELINES_FROM_ISO}'
smc['PIPELINE_REPO_URL'] = '${PIPELINE_REPO_URL}'
smc['MCP_VERSION'] = "${MCP_VERSION}"
// smc['LOCAL_REPOS'] = 'true'
smc['MCP_SALT_REPO_KEY'] = "${MCP_SALT_REPO_KEY}"
smc['MCP_SALT_REPO_URL'] = "${MCP_SALT_REPO_URL}"
def entries(m) {
m.collect {k, v -> [k, v]}
}
node (node_name) {
timestamps(){
stage("Clean Workspace") {
step([$class: 'WsCleanup'])
}
stage("Get mk-pipelines, pipeline-library and mcp-common-scripts repositories") {
def cloned = true
withCredentials([[$class: 'SSHUserPrivateKeyBinding',
keyFileVariable: "GERRIT_KEY",
credentialsId: env.GERRIT_MCP_CREDENTIALS_ID,
usernameVariable: "GERRIT_USERNAME",
passwordVariable: "GERRIT_PASSWORD"]]) {
dir("mcp-common-scripts-git") {
cloned = gerrit.gerritPatchsetCheckout([
credentialsId : "${GERRIT_MCP_CREDENTIALS_ID}",
gerritBranch: "${MCP_VERSION}",
gerritRefSpec: "${MCP_COMMON_SCRIPTS_REFS}",
gerritScheme: "ssh",
gerritName: "${GERRIT_USERNAME}",
gerritHost: "gerrit.mcp.mirantis.net",
gerritPort: "29418",
gerritProject: "mcp/mcp-common-scripts"
])
}
if (!cloned) {
error("Failed to clone the repository mcp/mcp-common-scripts")
}
sh ("""\
set -ex
eval \$(ssh-agent)
ssh-add ${GERRIT_KEY}
git clone --mirror ssh://${GERRIT_USERNAME}@gerrit.mcp.mirantis.net:29418/mk/mk-pipelines mk-pipelines
git clone --mirror ssh://${GERRIT_USERNAME}@gerrit.mcp.mirantis.net:29418/mcp-ci/pipeline-library pipeline-library
""")
if (PIPELINE_LIBRARY_REF != '') {
sh ("""\
set -ex
eval \$(ssh-agent)
ssh-add ${GERRIT_KEY}
cd pipeline-library
git fetch https://gerrit.mcp.mirantis.net/mcp-ci/pipeline-library ${PIPELINE_LIBRARY_REF}
git tag ${MCP_VERSION} FETCH_HEAD -f
""")
}
if (MK_PIPELINES_REF != '') {
sh ("""\
set -ex
eval \$(ssh-agent)
ssh-add ${GERRIT_KEY}
cd mk-pipelines
git fetch https://gerrit.mcp.mirantis.net/mcp-ci/mk-pipelines ${MK_PIPELINES_REF}
git tag ${MCP_VERSION} FETCH_HEAD -f
""")
}
// dir("mk-pipelines-git") {
// cloned = gerrit.gerritPatchsetCheckout([
// credentialsId : "${GERRIT_MCP_CREDENTIALS_ID}",
// gerritRefSpec: "${MK_PIPELINES_REF}",
// gerritScheme: "ssh",
// gerritName: "${GERRIT_USERNAME}",
// gerritHost: "gerrit.mcp.mirantis.net",
// gerritPort: "29418",
// gerritProject: "mk/mk-pipelines"
// ])
// }
// if (!cloned) {
// error("Failed to clone the repository mk/mk-pipelines")
// }
// dir("pipeline-library-git") {
// cloned = gerrit.gerritPatchsetCheckout([
// credentialsId : "${GERRIT_MCP_CREDENTIALS_ID}",
// gerritRefSpec: "${PIPELINE_LIBRARY_REF}",
// gerritScheme: "ssh",
// gerritName: "${GERRIT_USERNAME}",
// gerritHost: "gerrit.mcp.mirantis.net",
// gerritPort: "29418",
// gerritProject: "mcp-ci/pipeline-library"
// ])
// }
// if (!cloned) {
// error("Failed to clone the repository mcp-ci/pipeline-library")
// }
}
//if (PIPELINE_LIBRARY_REF != '') {
// sh "cd pipeline-library; git tag ${MCP_VERSION} FETCH_HEAD -f ; cd .."
//}
//if (MK_PIPELINES_REF != '') {
// sh "cd mk-pipelines; git tag ${MCP_VERSION} FETCH_HEAD -f; cd .."
//}
// gerrit.gerritPatchsetCheckout() doesn't support clonning bare repository
// sh "git clone --mirror mk-pipelines-git mk-pipelines"
// sh "git clone --mirror pipeline-library-git pipeline-library"
}
stage("Prepare arguments for generation config drive") {
config_drive_script_path = "mcp-common-scripts-git/config-drive/create_config_drive.sh"
user_data_script_path = "mcp-common-scripts-git/config-drive/master_config.yaml"
sh "chmod +x ${config_drive_script_path}"
//args = "--user-data user_data --vendor-data user_data2 --hostname cfg01 --model model --mk-pipelines mk-pipelines/ --pipeline-library pipeline-library/ ${iso_name}"
args = "--user-data user_data2 --vendor-data ${user_data_script_path} --hostname cfg01 --model model --mk-pipelines mk-pipelines/ --pipeline-library pipeline-library/ ${iso_name}"
}
stage("Get cluster model") {
def model_url = "${MODEL_URL}"
sh "rm -rf model"
if (MODEL_URL_OBJECT_TYPE == 'tar.gz') {
sh "wget -O model.tar.gz '${model_url}'"
sh "mkdir model && cd model && tar zxfv ../model.tar.gz"
} else {
sh "git clone --recursive $model_url -b ${MCP_VERSION} model"
// remove .git file with hardcoded path
sh "rm model/classes/system/.git"
}
}
stage("Set data"){
for (i in entries(smc)) {
// Replace only if the variable is non-empty, leave default otherwise
if (i[1]) {
sh "sed -i \"s,export ${i[0]}=.*,export ${i[0]}=${i[1]},\" ${user_data_script_path}"
}
}
}
stage("Create user_data2"){
//http://jen20.com/2015/10/04/cloudconfig-merging.html
//TODO(ddmitriev): allow to read such file from
// ./tcp_tests/templates/${LAB_CONFIG_NAME}/ directory for each lab
def user_data2 = """\
#cloud-config, see http://cloudinit.readthedocs.io/en/latest/topics/examples.html
#write_files: # write_files don't work as expected because overwrites this key from mcp-common-scripts YAML, losing data
# - path: /etc/default/grub.d/97-enable-grub-menu.cfg
# content: |
# GRUB_RECORDFAIL_TIMEOUT=30
# GRUB_TIMEOUT=10
# GRUB_TIMEOUT_STYLE=menu
#
# - path: /root/interfaces
# content: |
# auto lo
# iface lo inet loopback
#
# auto ens3
# iface ens3 inet dhcp
#
# - path: /root/.ssh/config
# owner: root:root
# permissions: '0600'
# content: |
# Host *
# ServerAliveInterval 60
# ServerAliveCountMax 0
# StrictHostKeyChecking no
# UserKnownHostsFile /dev/null
#
# - path: /etc/cloud/master_environment_override
# owner: root:root
# permissions: '0600'
# content: |
# export SALT_MASTER_MINION_ID="cfg01.${CLUSTER_NAME}.local"
# export SALT_MASTER_DEPLOY_IP="${SALT_MASTER_DEPLOY_IP}"
# export DEPLOY_NETWORK_GW="${DEPLOY_NETWORK_GW}"
# export DEPLOY_NETWORK_NETMASK="${DEPLOY_NETWORK_NETMASK}"
# export DNS_SERVERS="${DNS_SERVERS}"
# export PIPELINES_FROM_ISO="${PIPELINES_FROM_ISO}"
# export PIPELINE_REPO_URL="${PIPELINE_REPO_URL}"
# export MCP_VERSION="${MCP_VERSION}"
# export LOCAL_REPOS="true"
# export MCP_SALT_REPO_KEY="${MCP_SALT_REPO_KEY}"
# export MCP_SALT_REPO_URL="${MCP_SALT_REPO_URL}"
output:
all: '| tee -a /var/log/cloud-init-output.log /dev/tty0'
ssh_pwauth: True
users:
- name: root
sudo: ALL=(ALL) NOPASSWD:ALL
shell: /bin/bash
disable_root: false
chpasswd:
list: |
root:r00tme
expire: False
bootcmd:
# Block access to SSH while node is preparing
- cloud-init-per once sudo touch /is_cloud_init_started
# Enable root access
- sed -i -e '/^PermitRootLogin/s/.*/PermitRootLogin yes/' /etc/ssh/sshd_config
- service sshd restart
merge_how: "dict(recurse_array)+list(append)"
"""
writeFile(file: "user_data2", text: user_data2, encoding: "UTF-8")
}
stage("Create config-drive"){
// create cfg config-drive
// apt package genisoimage is required for this stage
sh "./${config_drive_script_path} ${args}"
}
stage("Save artifacts") {
archiveArtifacts allowEmptyArchive: false,
artifacts: "${iso_name}"
}
stage("Download config drive to slave") {
if (DOWNLOAD_CONFIG_DRIVE == 'true') {
def b_res = build job: 'download-config-drive',
parameters: [
string(name: 'IMAGE_URL', value: "${BUILD_URL}/artifact/${iso_name}"),
string(name: 'NODE_NAME', value: "${NODE_NAME}")
]
} else {
echo "Drive only generated. But didn't download"
}
}
}
}