Merge "Add ceph cluster nodes, roles"
diff --git a/jobs/pipelines/deploy-cicd-and-run-tests.groovy b/jobs/pipelines/deploy-cicd-and-run-tests.groovy
index f9273e1..de92a25 100644
--- a/jobs/pipelines/deploy-cicd-and-run-tests.groovy
+++ b/jobs/pipelines/deploy-cicd-and-run-tests.groovy
@@ -13,7 +13,7 @@
make_snapshot_stages = false
}
-currentBuild.description = "${NODE_NAME}:${ENV_NAME}"
+currentBuild.description = "${NODE_NAME}:${ENV_NAME}<br>"
def deploy(shared, common, steps, env_manager) {
def report_text = ''
@@ -131,6 +131,21 @@
}
}
+ if (fileExists("jenkins_agent_description.txt")) {
+ def String jenkins_agent_description = readFile("jenkins_agent_description.txt")
+ currentBuild.description += "${jenkins_agent_description}"
+
+ // if there is a separated foundation node on $jenkins_slave_node_name,
+ // then archive artifacts also on that node
+ if (jenkins_slave_node_name != env.NODE_NAME) {
+ node ("${jenkins_slave_node_name}") {
+ stage("Archive all xml reports from node ${}") {
+ archiveArtifacts artifacts: "**/*.xml,**/*.ini,**/*.log,**/*.tar.gz"
+ }
+ }
+ }
+ }
+
stage("Archive all xml reports") {
archiveArtifacts artifacts: "**/*.xml,**/*.ini,**/*.log,**/*.tar.gz"
}
@@ -140,9 +155,11 @@
}
stage("Store TestRail reports to job description") {
def String description = readFile("description.txt")
- currentBuild.description += "\n${description}"
+ currentBuild.description += "${description}"
}
}
- }
- }
+ } // try
+ } // node
+
+
//}
\ No newline at end of file
diff --git a/jobs/pipelines/swarm-bootstrap-salt-cluster-devops.groovy b/jobs/pipelines/swarm-bootstrap-salt-cluster-devops.groovy
index 392be7c..802a0a5 100644
--- a/jobs/pipelines/swarm-bootstrap-salt-cluster-devops.groovy
+++ b/jobs/pipelines/swarm-bootstrap-salt-cluster-devops.groovy
@@ -70,6 +70,7 @@
export PYTHONIOENCODING=UTF-8
export REPOSITORY_SUITE=${MCP_VERSION}
export TEST_GROUP=test_create_environment
+ export LOG_NAME=swarm_test_create_environment.log
py.test -vvv -s -p no:django -p no:ipdb --junit-xml=deploy_hardware.xml -k \${TEST_GROUP}
""")
}
@@ -122,6 +123,7 @@
export PYTHONIOENCODING=UTF-8
export REPOSITORY_SUITE=${MCP_VERSION}
export TEST_GROUP=test_bootstrap_salt
+ export LOG_NAME=swarm_test_bootstrap_salt.log
py.test -vvv -s -p no:django -p no:ipdb --junit-xml=${xml_report_name} -k \${TEST_GROUP}
""")
// Wait for jenkins to start and IO calm down
diff --git a/jobs/pipelines/swarm-bootstrap-salt-cluster-heat.groovy b/jobs/pipelines/swarm-bootstrap-salt-cluster-heat.groovy
index cfb080f..8c10291 100644
--- a/jobs/pipelines/swarm-bootstrap-salt-cluster-heat.groovy
+++ b/jobs/pipelines/swarm-bootstrap-salt-cluster-heat.groovy
@@ -97,7 +97,7 @@
}
stage("Generate config drive ISO") {
- def SALT_MASTER_IP=shared.run_cmd_stdout("./tcp_tests/utils/get_param_heat_template.py management_subnet_cfg01_ip").trim().split().last()
+ SALT_MASTER_IP=shared.run_cmd_stdout("./tcp_tests/utils/get_param_heat_template.py management_subnet_cfg01_ip").trim().split().last()
def ADMIN_NETWORK_GW=shared.run_cmd_stdout("./tcp_tests/utils/get_param_heat_template.py management_subnet_gateway_ip").trim().split().last()
shared.generate_configdrive_iso(SALT_MASTER_IP, ADMIN_NETWORK_GW)
}
@@ -157,6 +157,7 @@
export ENV_NAME=${ENV_NAME}
export LAB_CONFIG_NAME=${LAB_CONFIG_NAME}
export LAB_PARAM_DEFAULTS=${LAB_PARAM_DEFAULTS}
+ export LOG_NAME=swarm_test_create_environment.log
py.test --cache-clear -vvv -s -p no:django -p no:ipdb --junit-xml=deploy_hardware.xml -k \${TEST_GROUP}
""")
}
@@ -233,11 +234,16 @@
shared.verbose_sh(script_delete_agent, true, false, true)
shared.verbose_sh(script_create_agent, true, false, true)
+ // Store jenkins agent IP address
+ jenkins_agent_description = "ssh jenkins@${jenkins_slave_ip} # foundation node with Jenkins agent <a href=${JENKINS_URL}/computer/${JENKINS_SLAVE_NODE_NAME}>${JENKINS_SLAVE_NODE_NAME}</a><br>ssh root@${SALT_MASTER_IP} # cfg01 node<br>"
+ writeFile(file: "jenkins_agent_description.txt", text: jenkins_agent_description, encoding: "UTF-8")
+
} // withCredentials
}// stage
} // withCredentials
+
} // dir
} // node
@@ -278,6 +284,7 @@
export PYTHONIOENCODING=UTF-8
export REPOSITORY_SUITE=${MCP_VERSION}
export TEST_GROUP=test_bootstrap_salt
+ export LOG_NAME=swarm_test_bootstrap_salt.log
py.test -vvv -s -p no:django -p no:ipdb --junit-xml=${xml_report_name} -k \${TEST_GROUP}
""")
// Wait for jenkins to start and IO calm down
diff --git a/jobs/pipelines/swarm-deploy-cicd.groovy b/jobs/pipelines/swarm-deploy-cicd.groovy
index b5e1ff7..0183016 100644
--- a/jobs/pipelines/swarm-deploy-cicd.groovy
+++ b/jobs/pipelines/swarm-deploy-cicd.groovy
@@ -56,15 +56,24 @@
for (stack in "${env.STACK_INSTALL}".split(",")) {
stage("Sanity check the deployed component [${stack}]") {
shared.sanity_check_component(stack)
- }
- if (make_snapshot_stages) {
- stage("Make environment snapshot [${stack}_deployed]") {
- shared.devops_snapshot(stack)
- }
+ // If oslo_config INI file ${ENV_NAME}_salt_deployed.ini exists,
+ // then make a copy for the created snapshot to allow the system
+ // tests to revert this snapshot along with the metadata from the INI file.
+ shared.run_cmd("""\
+ if [ -f \$(pwd)/${ENV_NAME}_salt_deployed.ini ]; then
+ cp \$(pwd)/${ENV_NAME}_salt_deployed.ini \$(pwd)/${ENV_NAME}_${stack}_deployed.ini
+ fi
+ """)
}
} // for
+ if (make_snapshot_stages) {
+ stage("Make environment snapshots for [${env.STACK_INSTALL}]") {
+ shared.devops_snapshot(env.STACK_INSTALL)
+ }
+ }
+
} catch (e) {
common.printMsg("Job is failed", "purple")
shared.download_logs("deploy_drivetrain_${ENV_NAME}")
diff --git a/jobs/pipelines/swarm-deploy-platform-without-cicd.groovy b/jobs/pipelines/swarm-deploy-platform-without-cicd.groovy
index 8d3eb22..ae46b22 100644
--- a/jobs/pipelines/swarm-deploy-platform-without-cicd.groovy
+++ b/jobs/pipelines/swarm-deploy-platform-without-cicd.groovy
@@ -54,12 +54,18 @@
for (stack in "${env.STACK_INSTALL}".split(",")) {
stage("Sanity check the deployed component [${stack}]") {
shared.sanity_check_component(stack)
- }
- stage("Make environment snapshot [${stack}_deployed]") {
- shared.devops_snapshot(stack)
+ shared.run_cmd("""\
+ if [ -f \$(pwd)/${ENV_NAME}_salt_deployed.ini ]; then
+ cp \$(pwd)/${ENV_NAME}_salt_deployed.ini \$(pwd)/${ENV_NAME}_${stack}_deployed.ini
+ fi
+ """)
}
} // for
+ stage("Make environment snapshots for [${env.STACK_INSTALL}]") {
+ shared.devops_snapshot(env.STACK_INSTALL)
+ }
+
} catch (e) {
common.printMsg("Job is failed", "purple")
shared.download_logs("deploy_platform_${ENV_NAME}")
diff --git a/jobs/pipelines/swarm-deploy-platform.groovy b/jobs/pipelines/swarm-deploy-platform.groovy
index 061e555..277ed0a 100644
--- a/jobs/pipelines/swarm-deploy-platform.groovy
+++ b/jobs/pipelines/swarm-deploy-platform.groovy
@@ -56,14 +56,24 @@
for (stack in "${env.STACK_INSTALL}".split(",")) {
stage("Sanity check the deployed component [${stack}]") {
shared.sanity_check_component(stack)
- }
- if (make_snapshot_stages) {
- stage("Make environment snapshot [${stack}_deployed]") {
- shared.devops_snapshot(stack)
- }
+
+ // If oslo_config INI file ${ENV_NAME}_salt_deployed.ini exists,
+ // then make a copy for the created snapshot to allow the system
+ // tests to revert this snapshot along with the metadata from the INI file.
+ shared.run_cmd("""\
+ if [ -f \$(pwd)/${ENV_NAME}_salt_deployed.ini ]; then
+ cp \$(pwd)/${ENV_NAME}_salt_deployed.ini \$(pwd)/${ENV_NAME}_${stack}_deployed.ini
+ fi
+ """)
}
} // for
+ if (make_snapshot_stages) {
+ stage("Make environment snapshots for [${env.STACK_INSTALL}]") {
+ shared.devops_snapshot(env.STACK_INSTALL)
+ }
+ }
+
} catch (e) {
common.printMsg("Job is failed", "purple")
shared.download_logs("deploy_platform_${ENV_NAME}")
diff --git a/jobs/pipelines/swarm-run-pytest.groovy b/jobs/pipelines/swarm-run-pytest.groovy
index 1e4c849..d403861 100644
--- a/jobs/pipelines/swarm-run-pytest.groovy
+++ b/jobs/pipelines/swarm-run-pytest.groovy
@@ -73,6 +73,7 @@
export SALT_USER=\$SALTAPI_USER
export SALT_PASSWORD=\$SALTAPI_PASS
+ export LOG_NAME=swarm_run_pytest.log
py.test --junit-xml=nosetests.xml ${RUN_TEST_OPTS}
""")
diff --git a/jobs/pipelines/swarm-testrail-report.groovy b/jobs/pipelines/swarm-testrail-report.groovy
index 42be763..2d2f993 100644
--- a/jobs/pipelines/swarm-testrail-report.groovy
+++ b/jobs/pipelines/swarm-testrail-report.groovy
@@ -87,7 +87,7 @@
report_url = report_result.split("\n").each {
if (it.contains("[TestRun URL]")) {
common.printMsg("Found report URL: " + it.trim().split().last(), "blue")
- description += "\n<a href=" + it.trim().split().last() + ">${testSuiteName}</a>"
+ description += "<a href=" + it.trim().split().last() + ">${testSuiteName}</a><br>"
}
}
}
@@ -108,7 +108,7 @@
report_url = report_result.split("\n").each {
if (it.contains("[TestRun URL]")) {
common.printMsg("Found report URL: " + it.trim().split().last(), "blue")
- description += "\n<a href=" + it.trim().split().last() + ">${testSuiteName}</a>"
+ description += "<a href=" + it.trim().split().last() + ">${testSuiteName}</a><br>"
}
}
}
@@ -124,7 +124,7 @@
report_url = report_result.split("\n").each {
if (it.contains("[TestRun URL]")) {
common.printMsg("Found report URL: " + it.trim().split().last(), "blue")
- description += "\n<a href=" + it.trim().split().last() + ">${testSuiteName}</a>"
+ description += "<a href=" + it.trim().split().last() + ">${testSuiteName}</a><br>"
}
}
}
@@ -150,7 +150,7 @@
report_url = report_result.split("\n").each {
if (it.contains("[TestRun URL]")) {
common.printMsg("Found report URL: " + it.trim().split().last(), "blue")
- description += "\n<a href=" + it.trim().split().last() + ">${testSuiteName}</a>"
+ description += "<a href=" + it.trim().split().last() + ">${testSuiteName}</a><br>"
}
}
}
@@ -172,7 +172,7 @@
report_url = report_result.split("\n").each {
if (it.contains("[TestRun URL]")) {
common.printMsg("Found report URL: " + it.trim().split().last(), "blue")
- description += "\n<a href=" + it.trim().split().last() + ">${testSuiteName}</a>"
+ description += "<a href=" + it.trim().split().last() + ">${testSuiteName}</a><br>"
}
}
}
@@ -188,7 +188,7 @@
report_url = report_result.split("\n").each {
if (it.contains("[TestRun URL]")) {
common.printMsg("Found report URL: " + it.trim().split().last(), "blue")
- description += "\n<a href=" + it.trim().split().last() + ">${testSuiteName}</a>"
+ description += "<a href=" + it.trim().split().last() + ">${testSuiteName}</a><br>"
}
}
}
@@ -210,7 +210,7 @@
report_url = report_result.split("\n").each {
if (it.contains("[TestRun URL]")) {
common.printMsg("Found report URL: " + it.trim().split().last(), "blue")
- description += "\n<a href=" + it.trim().split().last() + ">${testSuiteName}</a>"
+ description += "<a href=" + it.trim().split().last() + ">${testSuiteName}</a><br>"
}
}
}
diff --git a/src/com/mirantis/system_qa/SharedPipeline.groovy b/src/com/mirantis/system_qa/SharedPipeline.groovy
index 8c438fb..d74b600 100644
--- a/src/com/mirantis/system_qa/SharedPipeline.groovy
+++ b/src/com/mirantis/system_qa/SharedPipeline.groovy
@@ -541,6 +541,7 @@
// Result will be stored in JUnit XML file deploy_${stack}.xml
try {
run_cmd("""\
+ export LOG_NAME=deploy_${stack}_test.log
py.test --junit-xml=deploy_${stack}.xml -m check_${stack}
""")
} catch (e) {
@@ -587,27 +588,28 @@
""", "cyan")
}
-def devops_snapshot(stack) {
- // Make the snapshot with name "${stack}_deployed"
+def devops_snapshot(stacks) {
+ // Make snapshots with names "${stack}_deployed" for each stack
// for all VMs in the environment.
- // If oslo_config INI file ${ENV_NAME}_salt_deployed.ini exists,
- // then make a copy for the created snapshot to allow the system
- // tests to revert this snapshot along with the metadata from the INI file.
+
run_cmd("""\
- set -ex
dos.py suspend ${ENV_NAME}
- dos.py snapshot ${ENV_NAME} ${stack}_deployed
+ """)
+
+ for (stack in "${stacks}".split(",")) {
+ run_cmd("""\
+ dos.py snapshot ${ENV_NAME} ${stack}_deployed
+ """)
+ devops_snapshot_info("${stack}_deployed")
+ }
+
+ run_cmd("""\
dos.py resume ${ENV_NAME}
sleep 20 # Wait for I/O on the host calms down
CFG01_NAME=\$(dos.py show-resources ${ENV_NAME} | grep ^cfg01 | cut -d" " -f1)
dos.py time-sync ${ENV_NAME} --skip-sync \${CFG01_NAME}
-
- if [ -f \$(pwd)/${ENV_NAME}_salt_deployed.ini ]; then
- cp \$(pwd)/${ENV_NAME}_salt_deployed.ini \$(pwd)/${ENV_NAME}_${stack}_deployed.ini
- fi
""")
- devops_snapshot_info("${stack}_deployed")
}
def get_steps_list(steps) {
diff --git a/tcp_tests/logger.py b/tcp_tests/logger.py
index 2c73364..1d1c1f1 100644
--- a/tcp_tests/logger.py
+++ b/tcp_tests/logger.py
@@ -24,7 +24,8 @@
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s - %(levelname)s %(filename)s:'
'%(lineno)d -- %(message)s',
- filename=os.path.join(settings.LOGS_DIR, 'tests.log'),
+ filename=os.path.join(settings.LOGS_DIR,
+ settings.LOG_NAME),
filemode='w')
console = logging.StreamHandler()
diff --git a/tcp_tests/settings.py b/tcp_tests/settings.py
index f98981d..e8968be 100644
--- a/tcp_tests/settings.py
+++ b/tcp_tests/settings.py
@@ -24,6 +24,7 @@
LOGS_DIR = os.environ.get('LOGS_DIR', os.getcwd())
+LOG_NAME = os.environ.get('LOG_NAME', 'tests.log')
TIMESTAT_PATH_YAML = os.environ.get(
'TIMESTAT_PATH_YAML', os.path.join(
LOGS_DIR, 'timestat_{}.yaml'.format(time.strftime("%Y%m%d"))))
diff --git a/tcp_tests/templates/bm-cicd-pike-ovs-maas/salt-context-cookiecutter-openstack_ovs.yaml b/tcp_tests/templates/bm-cicd-pike-ovs-maas/salt-context-cookiecutter-openstack_ovs.yaml
index dc5a486..09690cf 100644
--- a/tcp_tests/templates/bm-cicd-pike-ovs-maas/salt-context-cookiecutter-openstack_ovs.yaml
+++ b/tcp_tests/templates/bm-cicd-pike-ovs-maas/salt-context-cookiecutter-openstack_ovs.yaml
@@ -928,4 +928,6 @@
hBP6Fk8iNWuOWQD+ohM/vMMnvIhk5jwlcwn+kF0ra04gi5KBFWSh/ddWMJxUtPC1
2htvlEc6zQAR6QfqXHmwhg1hP81JcpqpicQzCMhkzLoR1DC6stXdLg==
-----END RSA PRIVATE KEY-----
- octavia_public_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC2Oc8MmxBOgcG2ioijXmZ1Jil+LzPoMUyLwZujQoI3fc5Sfm45y1t22NR966G8jqnHVIKe/JaLT0W3x5bCr4rAsIYptTEu+oqW24nsrcsisZeS36apk3g71cp5Up9kf6ZBaSTFFaBfavxEo1XcaR0213vhWOE/5HpdIolDVnxvt4czXS/oiNNj+M9zOMr57IJ4SPiptKdXx4qWouGGq65JBGZQ7YNFKMtV2l1/YEHj8F1YWwNg6ZfuZvySkSv29D5zUkoxcPAPp6HPJTyQT7WRWbnM54TLgd1ggym9R83j0/VqdFXYhJDVkT6vbYgAwqXS16SsYfR7/U0/UMXmsg0z
+ octavia_public_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC2Oc8MmxBOgcG2ioijXmZ1Jil+LzPoMUyLwZujQoI3fc5Sfm45y1t22NR966G8jqnHVIKe/JaLT0W3x5bCr4rAsIYptTEu+oqW24nsrcsisZeS36apk3g71cp5Up9kf6ZBaSTFFaBfavxEo1XcaR0213vhWOE/5HpdIolDVnxvt4czXS/oiNNj+M9zOMr57IJ4SPiptKdXx4qWouGGq65JBGZQ7YNFKMtV2l1/YEHj8F1YWwNg6ZfuZvySkSv29D5zUkoxcPAPp6HPJTyQT7WRWbnM54TLgd1ggym9R83j0/VqdFXYhJDVkT6vbYgAwqXS16SsYfR7/U0/UMXmsg0z
+ cinder_backup_engine: 'ceph'
+ cinder_ceph_backup_pool_name: 'backups'
diff --git a/tcp_tests/templates/bm-cicd-queens-ovs-maas/salt-context-cookiecutter-openstack_ovs.yaml b/tcp_tests/templates/bm-cicd-queens-ovs-maas/salt-context-cookiecutter-openstack_ovs.yaml
index f0c5cda..1d269c3 100644
--- a/tcp_tests/templates/bm-cicd-queens-ovs-maas/salt-context-cookiecutter-openstack_ovs.yaml
+++ b/tcp_tests/templates/bm-cicd-queens-ovs-maas/salt-context-cookiecutter-openstack_ovs.yaml
@@ -846,3 +846,5 @@
secrets_encryption_key_id: 'F5CB2ADC36159B03'
# Used on CI only.
secrets_encryption_private_key: ''
+ cinder_backup_engine: 'ceph'
+ cinder_ceph_backup_pool_name: 'backups'
diff --git a/tcp_tests/templates/cookied-cicd-k8s-calico-sl/underlay.yaml b/tcp_tests/templates/cookied-cicd-k8s-calico-sl/underlay.yaml
index dbb578a..81d4cb8 100644
--- a/tcp_tests/templates/cookied-cicd-k8s-calico-sl/underlay.yaml
+++ b/tcp_tests/templates/cookied-cicd-k8s-calico-sl/underlay.yaml
@@ -203,7 +203,7 @@
- name: {{ HOSTNAME_CFG01 }}
role: salt_master
params:
- vcpu: {{ os_env('CFG_NODE_CPU', 3) }}
+ vcpu: {{ os_env('CFG_NODE_CPU', 4) }}
memory: {{ os_env('CFG_NODE_MEMORY', 12288) }}
boot:
- hd
diff --git a/tcp_tests/templates/cookied-cicd-pike-dvr-ceph/cookiecutter-context-dvr-ceph.yaml b/tcp_tests/templates/cookied-cicd-pike-dvr-ceph/cookiecutter-context-dvr-ceph.yaml
index db7be75..2c94f11 100644
--- a/tcp_tests/templates/cookied-cicd-pike-dvr-ceph/cookiecutter-context-dvr-ceph.yaml
+++ b/tcp_tests/templates/cookied-cicd-pike-dvr-ceph/cookiecutter-context-dvr-ceph.yaml
@@ -355,3 +355,5 @@
octavia_lb_mgmt_cidr: 192.168.1.0/24
octavia_lb_mgmt_allocation_pool_start: 192.168.1.2
octavia_lb_mgmt_allocation_pool_end: 192.168.1.200
+ cinder_backup_engine: 'ceph'
+ cinder_ceph_backup_pool_name: 'backups'
\ No newline at end of file
diff --git a/tcp_tests/templates/cookied-cicd-queens-dvr-sl/cookiecutter-context-queens-dvr-sl.yaml b/tcp_tests/templates/cookied-cicd-queens-dvr-sl/cookiecutter-context-queens-dvr-sl.yaml
index 4ac36a8..863fd81 100644
--- a/tcp_tests/templates/cookied-cicd-queens-dvr-sl/cookiecutter-context-queens-dvr-sl.yaml
+++ b/tcp_tests/templates/cookied-cicd-queens-dvr-sl/cookiecutter-context-queens-dvr-sl.yaml
@@ -250,3 +250,5 @@
openstack_dns_node02_address: 10.167.4.114
secrets_encryption_enabled: 'False'
+ cinder_backup_engine: 'ceph'
+ cinder_ceph_backup_pool_name: 'backups'
\ No newline at end of file
diff --git a/tcp_tests/templates/heat-cicd-pike-contrail41-sl/underlay--user-data-foundation.yaml b/tcp_tests/templates/heat-cicd-pike-contrail41-sl/underlay--user-data-foundation.yaml
index cb551ef..1677dcd 100644
--- a/tcp_tests/templates/heat-cicd-pike-contrail41-sl/underlay--user-data-foundation.yaml
+++ b/tcp_tests/templates/heat-cicd-pike-contrail41-sl/underlay--user-data-foundation.yaml
@@ -15,6 +15,7 @@
chpasswd:
list: |
root:r00tme
+ jenkins:qalab
expire: False
packages:
diff --git a/tcp_tests/tests/system/test_cvp_pipelines.py b/tcp_tests/tests/system/test_cvp_pipelines.py
index ce33806..001341d 100644
--- a/tcp_tests/tests/system/test_cvp_pipelines.py
+++ b/tcp_tests/tests/system/test_cvp_pipelines.py
@@ -134,7 +134,9 @@
'TEST_SET': '/var/lib/cvp-sanity/cvp_checks/tests/',
'TESTS_SETTINGS': (
"skipped_packages='python-setuptools,"
- "python-pkg-resources,xunitmerge', "
+ "python-pkg-resources,xunitmerge,python-gnocchiclient,"
+ "python-ujson,python-octaviaclient', "
+ "skipped_modules='xunitmerge,setuptools', "
"skipped_services='docker, "
"containerd'; drivetrain_version={0};{1}"
.format(settings.MCP_VERSION, ntp_skipped_nodes)),