Merge "Add testing jobs for Rocky Linux"
diff --git a/.gitignore b/.gitignore
index 78c2e67..59ed598 100644
--- a/.gitignore
+++ b/.gitignore
@@ -67,3 +67,6 @@
# Others
.stestr
tempest.log
+
+# Git
+*.orig
diff --git a/README.rst b/README.rst
index 8b5bd3c..beb0f5a 100644
--- a/README.rst
+++ b/README.rst
@@ -30,20 +30,26 @@
Installing
----------
-Clone this repository to the destination machine, and call from the repo::
+From the tempest directory, setup the tempest virtual environment for the
+Octavia tempest plugin::
- $ pip install -e .
+ $ tox -e venv-tempest -- pip3 install -e <path to octavia-tempest-plugin>
+
+For example, when using a typical devstack setup::
+
+ $ cd /opt/stack/tempest
+ $ tox -e venv-tempest -- pip3 install -e /opt/stack/octavia-tempest-plugin
Running the tests
-----------------
To run all the tests from this plugin, call from the tempest repo::
- $ tox -e all-plugin -- octavia_tempest_plugin
+ $ tox -e all -- octavia_tempest_plugin
To run a single test case, call with full path, for example::
- $ tox -e all-plugin -- octavia_tempest_plugin.tests.scenario.v2.test_traffic_ops.TrafficOperationsScenarioTest.test_basic_traffic
+ $ tox -e all -- octavia_tempest_plugin.tests.scenario.v2.test_traffic_ops.TrafficOperationsScenarioTest.test_basic_traffic
To retrieve a list of all tempest tests, run::
diff --git a/octavia_tempest_plugin/common/cert_utils.py b/octavia_tempest_plugin/common/cert_utils.py
index 753da6b..34d0d7d 100644
--- a/octavia_tempest_plugin/common/cert_utils.py
+++ b/octavia_tempest_plugin/common/cert_utils.py
@@ -21,7 +21,6 @@
from cryptography.hazmat.primitives.serialization import pkcs12
from cryptography import x509
from cryptography.x509.oid import NameOID
-import OpenSSL
def generate_ca_cert_and_key():
@@ -176,38 +175,13 @@
def generate_pkcs12_bundle(server_cert, server_key):
"""Creates a pkcs12 formated bundle.
- Note: This uses pyOpenSSL as the cryptography package does not yet
- support creating pkcs12 bundles. The currently un-released
- 2.5 version of cryptography supports reading pkcs12, but not
- creation. This method should be updated to only use
- cryptography once it supports creating pkcs12 bundles.
-
:param server_cert: A cryptography certificate (x509) object.
:param server_key: A cryptography key (x509) object.
:returns: A pkcs12 bundle.
"""
- # Use the PKCS12 serialization function from cryptography if it exists
- # (>=3.0), otherwise use the pyOpenSSL module.
- #
- # The PKCS12 class of the pyOpenSSL module is not compliant with FIPS.
- # It uses the SHA1 function [0] which is not allowed when generating
- # digital signatures [1]
- #
- # [0] https://github.com/pyca/pyopenssl/blob/
- # 65ca53a7a06a7c78c1749200a6b3a007e47d3214/src/OpenSSL/
- # crypto.py#L2748-L2749
- # [1] https://nvlpubs.nist.gov/nistpubs/SpecialPublications/
- # NIST.SP.800-131Ar1.pdf
- if hasattr(pkcs12, 'serialize_key_and_certificates'):
- p12 = pkcs12.serialize_key_and_certificates(
- b'', server_key, server_cert,
- cas=None, encryption_algorithm=NoEncryption())
- else:
- p12 = OpenSSL.crypto.PKCS12()
- p12.set_privatekey(
- OpenSSL.crypto.PKey.from_cryptography_key(server_key))
- p12.set_certificate(OpenSSL.crypto.X509.from_cryptography(server_cert))
- p12 = p12.export()
+ p12 = pkcs12.serialize_key_and_certificates(
+ b'', server_key, server_cert,
+ cas=None, encryption_algorithm=NoEncryption())
return p12
diff --git a/octavia_tempest_plugin/common/constants.py b/octavia_tempest_plugin/common/constants.py
index 8ef8d94..48a83ac 100644
--- a/octavia_tempest_plugin/common/constants.py
+++ b/octavia_tempest_plugin/common/constants.py
@@ -72,6 +72,9 @@
DEFAULT_POOL_ID = 'default_pool_id'
L7_POLICIES = 'l7_policies'
ALPN_PROTOCOLS = 'alpn_protocols'
+HSTS_MAX_AGE = 'hsts_max_age'
+HSTS_INCLUDE_SUBDOMAINS = 'hsts_include_subdomains'
+HSTS_PRELOAD = 'hsts_preload'
LB_ALGORITHM = 'lb_algorithm'
LB_ALGORITHM_ROUND_ROBIN = 'ROUND_ROBIN'
@@ -112,6 +115,7 @@
# Other constants
ACTIVE = 'ACTIVE'
+PAUSED = 'PAUSED'
PENDING_UPDATE = 'PENDING_UPDATE'
ADMIN_STATE_UP_TRUE = 'true'
ASC = 'asc'
@@ -126,12 +130,14 @@
SINGLE = 'SINGLE'
ACTIVE_STANDBY = 'ACTIVE_STANDBY'
SUPPORTED_LB_TOPOLOGIES = (SINGLE, ACTIVE_STANDBY)
+BACKUP_TRUE = 'true'
# Protocols
HTTP = 'HTTP'
HTTPS = 'HTTPS'
PROXY = 'PROXY'
PROMETHEUS = 'PROMETHEUS'
+SCTP = 'SCTP'
TCP = 'TCP'
TERMINATED_HTTPS = 'TERMINATED_HTTPS'
UDP = 'UDP'
@@ -152,6 +158,7 @@
HEALTH_MONITOR_TCP = 'TCP'
HEALTH_MONITOR_HTTP = 'HTTP'
HEALTH_MONITOR_HTTPS = 'HTTPS'
+HEALTH_MONITOR_SCTP = 'SCTP'
HEALTH_MONITOR_TLS_HELLO = 'TLS-HELLO'
HEALTH_MONITOR_UDP_CONNECT = 'UDP-CONNECT'
diff --git a/octavia_tempest_plugin/services/load_balancer/v2/listener_client.py b/octavia_tempest_plugin/services/load_balancer/v2/listener_client.py
index 1ee70f7..1c8e6e5 100644
--- a/octavia_tempest_plugin/services/load_balancer/v2/listener_client.py
+++ b/octavia_tempest_plugin/services/load_balancer/v2/listener_client.py
@@ -41,7 +41,8 @@
sni_container_refs=Unset, client_authentication=Unset,
client_ca_tls_container_ref=Unset,
client_crl_container_ref=Unset, allowed_cidrs=Unset,
- alpn_protocols=Unset,
+ alpn_protocols=Unset, hsts_max_age=Unset,
+ hsts_include_subdomains=Unset, hsts_preload=Unset,
return_object_only=True):
"""Create a listener.
@@ -92,6 +93,12 @@
:param allowed_cidrs: A list of IPv4 or IPv6 CIDRs.
:param alpn_protocols: A list of ALPN protocols for TERMINATED_HTTPS
listeners.
+ :param hsts_include_subdomains: Defines whether the
+ `include_subdomains` directive is used for HSTS or not
+ :param hsts_max_age: Enables HTTP Strict Transport Security (HSTS)
+ and sets the `max_age` directive to given value
+ :param hsts_preload: Defines whether the `hsts_preload` directive
+ is used for HSTS or not
:param return_object_only: If True, the response returns the object
inside the root tag. False returns the full
response from the API.
@@ -218,7 +225,8 @@
sni_container_refs=Unset, client_authentication=Unset,
client_ca_tls_container_ref=Unset,
client_crl_container_ref=Unset, allowed_cidrs=Unset,
- alpn_protocols=Unset,
+ alpn_protocols=Unset, hsts_max_age=Unset,
+ hsts_include_subdomains=Unset, hsts_preload=Unset,
return_object_only=True):
"""Update a listener.
@@ -267,6 +275,12 @@
:param allowed_cidrs: A list of IPv4 or IPv6 CIDRs.
:param alpn_protocols: A list of ALPN protocols for TERMINATED_HTTPS
listeners.
+ :param hsts_include_subdomains: Defines whether the
+ `include_subdomains` directive is used for HSTS or not
+ :param hsts_max_age: Enables HTTP Strict Transport Security (HSTS)
+ and sets the `max_age` directive to given value
+ :param hsts_preload: Defines whether the `hsts_preload` directive
+ is used for HSTS or not
:param return_object_only: If True, the response returns the object
inside the root tag. False returns the full
response from the API.
diff --git a/octavia_tempest_plugin/tests/act_stdby_scenario/v2/test_active_standby.py b/octavia_tempest_plugin/tests/act_stdby_scenario/v2/test_active_standby.py
index 11b0cec..e3f6338 100644
--- a/octavia_tempest_plugin/tests/act_stdby_scenario/v2/test_active_standby.py
+++ b/octavia_tempest_plugin/tests/act_stdby_scenario/v2/test_active_standby.py
@@ -24,6 +24,7 @@
from tempest.lib import exceptions
from octavia_tempest_plugin.common import constants as const
+from octavia_tempest_plugin.services.load_balancer import v2
from octavia_tempest_plugin.tests import test_base
from octavia_tempest_plugin.tests import waiters
@@ -35,6 +36,7 @@
CONF.validation.run_validation,
'Active-Standby tests will not work without run_validation enabled.')
class ActiveStandbyScenarioTest(test_base.LoadBalancerBaseTestWithCompute):
+ mem_listener_client: v2.ListenerClient
@classmethod
def resource_setup(cls):
diff --git a/octavia_tempest_plugin/tests/api/v2/test_l7policy.py b/octavia_tempest_plugin/tests/api/v2/test_l7policy.py
index 5a19def..47b2faa 100644
--- a/octavia_tempest_plugin/tests/api/v2/test_l7policy.py
+++ b/octavia_tempest_plugin/tests/api/v2/test_l7policy.py
@@ -62,6 +62,11 @@
listener = cls.mem_listener_client.create_listener(**listener_kwargs)
cls.listener_id = listener[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_listener_client.cleanup_listener,
+ cls.listener_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
cls.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
@@ -79,6 +84,11 @@
pool = cls.mem_pool_client.create_pool(**pool_kwargs)
cls.pool_id = pool[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_pool_client.cleanup_pool,
+ cls.pool_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
cls.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
@@ -153,6 +163,11 @@
l7policy = self.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
+ self.addClassResourceCleanup(
+ self.mem_l7policy_client.cleanup_l7policy,
+ l7policy[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer, self.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
@@ -610,6 +625,11 @@
l7policy = self.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
+ self.addClassResourceCleanup(
+ self.mem_l7policy_client.cleanup_l7policy,
+ l7policy[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer, self.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
@@ -721,6 +741,11 @@
l7policy = self.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
+ self.addClassResourceCleanup(
+ self.mem_l7policy_client.cleanup_l7policy,
+ l7policy[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer, self.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
@@ -860,6 +885,11 @@
}
l7policy = self.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
+ self.addClassResourceCleanup(
+ self.mem_l7policy_client.cleanup_l7policy,
+ l7policy[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer,
self.lb_id, const.PROVISIONING_STATUS,
diff --git a/octavia_tempest_plugin/tests/api/v2/test_listener.py b/octavia_tempest_plugin/tests/api/v2/test_listener.py
index 7382bf5..cd320f4 100644
--- a/octavia_tempest_plugin/tests/api/v2/test_listener.py
+++ b/octavia_tempest_plugin/tests/api/v2/test_listener.py
@@ -12,32 +12,205 @@
# License for the specific language governing permissions and limitations
# under the License.
+import base64
import time
from uuid import UUID
+from cryptography.hazmat.primitives import serialization
+
from dateutil import parser
+from oslo_log import log as logging
from oslo_utils import strutils
+from oslo_utils import uuidutils
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
from tempest.lib import exceptions
import testtools
+from octavia_tempest_plugin.common import barbican_client_mgr
+from octavia_tempest_plugin.common import cert_utils
from octavia_tempest_plugin.common import constants as const
from octavia_tempest_plugin.tests import test_base
from octavia_tempest_plugin.tests import waiters
CONF = config.CONF
+LOG = logging.getLogger(__name__)
class ListenerAPITest(test_base.LoadBalancerBaseTest):
"""Test the listener object API."""
@classmethod
+ def _store_secret(cls, barbican_mgr, secret):
+ new_secret_ref = barbican_mgr.store_secret(secret)
+ cls.addClassResourceCleanup(barbican_mgr.delete_secret,
+ new_secret_ref)
+
+ # Set the barbican ACL if the Octavia API version doesn't do it
+ # automatically.
+ if not cls.mem_lb_client.is_version_supported(
+ cls.api_version, '2.1'):
+ user_list = cls.os_admin.users_v3_client.list_users(
+ name=CONF.load_balancer.octavia_svc_username)
+ msg = 'Only one user named "{0}" should exist, {1} found.'.format(
+ CONF.load_balancer.octavia_svc_username,
+ len(user_list['users']))
+ cls.assertEqual(1, len(user_list['users']), msg)
+ barbican_mgr.add_acl(new_secret_ref, user_list['users'][0]['id'])
+ return new_secret_ref
+
+ @classmethod
+ def _generate_load_certificate(cls, barbican_mgr, ca_cert, ca_key, name):
+ new_cert, new_key = cert_utils.generate_server_cert_and_key(
+ ca_cert, ca_key, name)
+
+ LOG.debug('%s Cert: %s', name, new_cert.public_bytes(
+ serialization.Encoding.PEM))
+ LOG.debug('%s private Key: %s', name, new_key.private_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PrivateFormat.TraditionalOpenSSL,
+ encryption_algorithm=serialization.NoEncryption()))
+ new_public_key = new_key.public_key()
+ LOG.debug('%s public Key: %s', name, new_public_key.public_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PublicFormat.SubjectPublicKeyInfo))
+
+ # Create the pkcs12 bundle
+ pkcs12 = cert_utils.generate_pkcs12_bundle(new_cert, new_key)
+ LOG.debug('%s PKCS12 bundle: %s', name, base64.b64encode(pkcs12))
+
+ new_secret_ref = cls._store_secret(barbican_mgr, pkcs12)
+
+ return new_cert, new_key, new_secret_ref
+
+ @classmethod
+ def _load_pool_pki(cls):
+ # Create the member client authentication CA
+ cls.member_client_ca_cert, member_client_ca_key = (
+ cert_utils.generate_ca_cert_and_key())
+
+ # Create client cert and key
+ cls.member_client_cn = uuidutils.generate_uuid()
+ cls.member_client_cert, cls.member_client_key = (
+ cert_utils.generate_client_cert_and_key(
+ cls.member_client_ca_cert, member_client_ca_key,
+ cls.member_client_cn))
+
+ # Create the pkcs12 bundle
+ pkcs12 = cert_utils.generate_pkcs12_bundle(cls.member_client_cert,
+ cls.member_client_key)
+ LOG.debug('Pool client PKCS12 bundle: %s', base64.b64encode(pkcs12))
+
+ cls.pool_client_ref = cls._store_secret(cls.barbican_mgr, pkcs12)
+
+ cls.member_ca_cert, cls.member_ca_key = (
+ cert_utils.generate_ca_cert_and_key())
+
+ cert, key = cert_utils.generate_server_cert_and_key(
+ cls.member_ca_cert, cls.member_ca_key, cls.server_uuid)
+
+ cls.pool_CA_ref = cls._store_secret(
+ cls.barbican_mgr,
+ cls.member_ca_cert.public_bytes(serialization.Encoding.PEM))
+
+ cls.member_crl = cert_utils.generate_certificate_revocation_list(
+ cls.member_ca_cert, cls.member_ca_key, cert)
+
+ cls.pool_CRL_ref = cls._store_secret(
+ cls.barbican_mgr,
+ cls.member_crl.public_bytes(serialization.Encoding.PEM))
+
+ @classmethod
+ def should_apply_terminated_https(cls, protocol=None):
+ if protocol and protocol != const.TERMINATED_HTTPS:
+ return False
+ return CONF.load_balancer.test_with_noop or getattr(
+ CONF.service_available, 'barbican', False)
+
+ @classmethod
def resource_setup(cls):
"""Setup resources needed by the tests."""
super(ListenerAPITest, cls).resource_setup()
+ if CONF.load_balancer.test_with_noop:
+ cls.server_secret_ref = uuidutils.generate_uuid()
+ cls.SNI1_secret_ref = uuidutils.generate_uuid()
+ cls.SNI2_secret_ref = uuidutils.generate_uuid()
+ elif getattr(CONF.service_available, 'barbican', False):
+ # Create a CA self-signed cert and key
+ cls.ca_cert, ca_key = cert_utils.generate_ca_cert_and_key()
+
+ LOG.debug('CA Cert: %s', cls.ca_cert.public_bytes(
+ serialization.Encoding.PEM))
+ LOG.debug('CA private Key: %s', ca_key.private_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PrivateFormat.TraditionalOpenSSL,
+ encryption_algorithm=serialization.NoEncryption()))
+ LOG.debug('CA public Key: %s', ca_key.public_key().public_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PublicFormat.SubjectPublicKeyInfo))
+
+ # Load the secret into the barbican service under the
+ # os_roles_lb_member tenant
+ cls.barbican_mgr = barbican_client_mgr.BarbicanClientManager(
+ cls.os_roles_lb_member)
+
+ # Create a server cert and key
+ # This will be used as the "default certificate" in SNI tests.
+ cls.server_uuid = uuidutils.generate_uuid()
+ LOG.debug('Server (default) UUID: %s', cls.server_uuid)
+
+ server_cert, server_key, cls.server_secret_ref = (
+ cls._generate_load_certificate(cls.barbican_mgr, cls.ca_cert,
+ ca_key, cls.server_uuid))
+
+ # Create the SNI1 cert and key
+ cls.SNI1_uuid = uuidutils.generate_uuid()
+ LOG.debug('SNI1 UUID: %s', cls.SNI1_uuid)
+
+ SNI1_cert, SNI1_key, cls.SNI1_secret_ref = (
+ cls._generate_load_certificate(cls.barbican_mgr, cls.ca_cert,
+ ca_key, cls.SNI1_uuid))
+
+ # Create the SNI2 cert and key
+ cls.SNI2_uuid = uuidutils.generate_uuid()
+ LOG.debug('SNI2 UUID: %s', cls.SNI2_uuid)
+
+ SNI2_cert, SNI2_key, cls.SNI2_secret_ref = (
+ cls._generate_load_certificate(cls.barbican_mgr, cls.ca_cert,
+ ca_key, cls.SNI2_uuid))
+
+ # Create the client authentication CA
+ cls.client_ca_cert, client_ca_key = (
+ cert_utils.generate_ca_cert_and_key())
+
+ cls.client_ca_cert_ref = cls._store_secret(
+ cls.barbican_mgr,
+ cls.client_ca_cert.public_bytes(serialization.Encoding.PEM))
+
+ # Create client cert and key
+ cls.client_cn = uuidutils.generate_uuid()
+ cls.client_cert, cls.client_key = (
+ cert_utils.generate_client_cert_and_key(
+ cls.client_ca_cert, client_ca_key, cls.client_cn))
+
+ # Create revoked client cert and key
+ cls.revoked_client_cn = uuidutils.generate_uuid()
+ cls.revoked_client_cert, cls.revoked_client_key = (
+ cert_utils.generate_client_cert_and_key(
+ cls.client_ca_cert, client_ca_key, cls.revoked_client_cn))
+
+ # Create certificate revocation list and revoke cert
+ cls.client_crl = cert_utils.generate_certificate_revocation_list(
+ cls.client_ca_cert, client_ca_key, cls.revoked_client_cert)
+
+ cls.client_crl_ref = cls._store_secret(
+ cls.barbican_mgr,
+ cls.client_crl.public_bytes(serialization.Encoding.PEM))
+
+ cls._load_pool_pki()
+
lb_name = data_utils.rand_name("lb_member_lb1_listener")
lb_kwargs = {const.PROVIDER: CONF.load_balancer.provider,
const.NAME: lb_name}
@@ -91,10 +264,26 @@
'on Octavia API version 2.25 or newer.')
self._test_listener_create(const.PROMETHEUS, 8090)
+ @decorators.idempotent_id('df9861c5-4a2a-4122-8d8f-5556156e343e')
+ @testtools.skipUnless(
+ CONF.loadbalancer_feature_enabled.terminated_tls_enabled,
+ '[loadbalancer-feature-enabled] "terminated_tls_enabled" is '
+ 'False in the tempest configuration. TLS tests will be skipped.')
+ def test_terminated_https_listener_create(self):
+ if not self.should_apply_terminated_https():
+ raise self.skipException(
+ f'Listener API tests with {const.TERMINATED_HTTPS} protocol'
+ ' require the either the barbican service,or running in noop.')
+ self._test_listener_create(const.TERMINATED_HTTPS, 8095)
+
@decorators.idempotent_id('7b53f336-47bc-45ae-bbd7-4342ef0673fc')
def test_udp_listener_create(self):
self._test_listener_create(const.UDP, 8003)
+ @decorators.idempotent_id('d6d36c32-27ff-4977-9d21-fd71a14e3b20')
+ def test_sctp_listener_create(self):
+ self._test_listener_create(const.SCTP, 8004)
+
def _test_listener_create(self, protocol, protocol_port):
"""Tests listener create and basic show APIs.
@@ -104,8 +293,12 @@
* Show listener details.
* Validate the show reflects the requested values.
"""
+ self._validate_listener_protocol(protocol)
+
listener_name = data_utils.rand_name("lb_member_listener1-create")
listener_description = data_utils.arbitrary_string(size=255)
+ hsts_supported = self.mem_listener_client.is_version_supported(
+ self.api_version, '2.27') and protocol == const.TERMINATED_HTTPS
listener_kwargs = {
const.NAME: listener_name,
@@ -119,10 +312,6 @@
# but this will allow us to test that the field isn't mandatory,
# as well as not conflate pool failures with listener test failures
# const.DEFAULT_POOL_ID: self.pool_id,
-
- # TODO(rm_work): need to add TLS related stuff
- # const.DEFAULT_TLS_CONTAINER_REF: '',
- # const.SNI_CONTAINER_REFS: [],
}
if protocol == const.HTTP:
listener_kwargs[const.INSERT_HEADERS] = {
@@ -130,6 +319,15 @@
const.X_FORWARDED_PORT: "true",
const.X_FORWARDED_PROTO: "true",
}
+
+ # Add terminated_https args
+ if self.should_apply_terminated_https(protocol=protocol):
+ listener_kwargs.update({
+ const.DEFAULT_TLS_CONTAINER_REF: self.server_secret_ref,
+ const.SNI_CONTAINER_REFS: [self.SNI1_secret_ref,
+ self.SNI2_secret_ref],
+ })
+
if self.mem_listener_client.is_version_supported(
self.api_version, '2.1'):
listener_kwargs.update({
@@ -155,9 +353,13 @@
exceptions.BadRequest,
self.mem_listener_client.create_listener,
**listener_kwargs)
-
listener_kwargs.update({const.ALLOWED_CIDRS: self.allowed_cidrs})
+ if hsts_supported:
+ listener_kwargs[const.HSTS_PRELOAD] = True
+ listener_kwargs[const.HSTS_MAX_AGE] = 10000
+ listener_kwargs[const.HSTS_INCLUDE_SUBDOMAINS] = True
+
# Test that a user without the loadbalancer role cannot
# create a listener.
expected_allowed = []
@@ -179,6 +381,11 @@
listener = self.mem_listener_client.create_listener(**listener_kwargs)
+ self.addCleanup(
+ self.mem_listener_client.cleanup_listener,
+ listener[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer, self.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
@@ -210,6 +417,11 @@
equal_items.append(const.TIMEOUT_MEMBER_DATA)
equal_items.append(const.TIMEOUT_TCP_INSPECT)
+ if hsts_supported:
+ equal_items.append(const.HSTS_PRELOAD)
+ equal_items.append(const.HSTS_MAX_AGE)
+ equal_items.append(const.HSTS_INCLUDE_SUBDOMAINS)
+
for item in equal_items:
self.assertEqual(listener_kwargs[item], listener[item])
@@ -231,6 +443,14 @@
self.assertTrue(strutils.bool_from_string(
insert_headers[const.X_FORWARDED_PROTO]))
+ # Add terminated_https args
+ if self.should_apply_terminated_https(protocol=protocol):
+ self.assertEqual(self.server_secret_ref,
+ listener[const.DEFAULT_TLS_CONTAINER_REF])
+ self.assertEqual(sorted([self.SNI1_secret_ref,
+ self.SNI2_secret_ref]),
+ sorted(listener[const.SNI_CONTAINER_REFS]))
+
if self.mem_listener_client.is_version_supported(
self.api_version, '2.5'):
self.assertCountEqual(listener_kwargs[const.TAGS],
@@ -241,27 +461,60 @@
self.assertEqual(self.allowed_cidrs, listener[const.ALLOWED_CIDRS])
@decorators.idempotent_id('cceac303-4db5-4d5a-9f6e-ff33780a5f29')
- def test_http_udp_tcp_listener_create_on_same_port(self):
+ def test_http_udp_sctp_tcp_listener_create_on_same_port(self):
self._test_listener_create_on_same_port(const.HTTP, const.UDP,
+ const.SCTP,
const.TCP, 8010)
@decorators.idempotent_id('930338b8-3029-48a6-89b2-8b062060fe61')
- def test_http_udp_https_listener_create_on_same_port(self):
+ def test_http_udp_sctp_https_listener_create_on_same_port(self):
self._test_listener_create_on_same_port(const.HTTP, const.UDP,
+ const.SCTP,
const.HTTPS, 8011)
@decorators.idempotent_id('01a21892-008a-4327-b4fd-fbf194ecb1a5')
- def test_tcp_udp_http_listener_create_on_same_port(self):
+ def test_tcp_udp_sctp_http_listener_create_on_same_port(self):
self._test_listener_create_on_same_port(const.TCP, const.UDP,
+ const.SCTP,
const.HTTP, 8012)
@decorators.idempotent_id('5da764a4-c03a-46ed-848b-98b9d9fa9089')
- def test_tcp_udp_https_listener_create_on_same_port(self):
+ def test_tcp_udp_sctp_https_listener_create_on_same_port(self):
self._test_listener_create_on_same_port(const.TCP, const.UDP,
+ const.SCTP,
const.HTTPS, 8013)
+ @decorators.idempotent_id('128dabd0-3a9b-4c11-9ef5-8d189a290f17')
+ @testtools.skipUnless(
+ CONF.loadbalancer_feature_enabled.terminated_tls_enabled,
+ '[loadbalancer-feature-enabled] "terminated_tls_enabled" is '
+ 'False in the tempest configuration. TLS tests will be skipped.')
+ def test_http_udp_sctp_terminated_https_listener_create_on_same_port(self):
+ if not self.should_apply_terminated_https():
+ raise self.skipException(
+ f'Listener API tests with {const.TERMINATED_HTTPS} protocol'
+ ' require the either the barbican service,or running in noop.')
+ self._test_listener_create_on_same_port(const.HTTP, const.UDP,
+ const.SCTP,
+ const.TERMINATED_HTTPS, 8014)
+
+ @decorators.idempotent_id('21da2598-c79e-4548-8fe0-b47749027010')
+ @testtools.skipUnless(
+ CONF.loadbalancer_feature_enabled.terminated_tls_enabled,
+ '[loadbalancer-feature-enabled] "terminated_tls_enabled" is '
+ 'False in the tempest configuration. TLS tests will be skipped.')
+ def test_tcp_udp_sctp_terminated_https_listener_create_on_same_port(self):
+ if not self.should_apply_terminated_https():
+ raise self.skipException(
+ f'Listener API tests with {const.TERMINATED_HTTPS} protocol'
+ ' require the either the barbican service,or running in noop.')
+ self._test_listener_create_on_same_port(const.TCP, const.UDP,
+ const.SCTP,
+ const.TERMINATED_HTTPS, 8015)
+
def _test_listener_create_on_same_port(self, protocol1, protocol2,
- protocol3, protocol_port):
+ protocol3, protocol4,
+ protocol_port):
"""Tests listener creation on same port number.
* Create a first listener.
@@ -269,10 +522,25 @@
protocol.
* Create a second listener with the same parameters and ensure that
an error is triggered.
- * Create a third listener with another protocol over TCP, and ensure
+ * Create a third listener on an existing port, but with a different
+ protocol.
+ * Create a fourth listener with another protocol over TCP, and ensure
that it fails.
"""
+ skip_protocol1 = (
+ not self._validate_listener_protocol(protocol1,
+ raise_if_unsupported=False))
+ skip_protocol2 = (
+ not self._validate_listener_protocol(protocol2,
+ raise_if_unsupported=False))
+ skip_protocol3 = (
+ not self._validate_listener_protocol(protocol3,
+ raise_if_unsupported=False))
+ skip_protocol4 = (
+ not self._validate_listener_protocol(protocol4,
+ raise_if_unsupported=False))
+
# Using listeners on the same port for TCP and UDP was not supported
# before Train. Use 2.11 API version as reference to detect previous
# releases and skip the test.
@@ -282,92 +550,139 @@
'is only available on Octavia API '
'version 2.11 or newer.')
- listener_name = data_utils.rand_name("lb_member_listener1-create")
+ if not skip_protocol1:
+ listener_name = data_utils.rand_name("lb_member_listener1-create")
- listener_kwargs = {
- const.NAME: listener_name,
- const.ADMIN_STATE_UP: True,
- const.PROTOCOL: protocol1,
- const.PROTOCOL_PORT: protocol_port,
- const.LOADBALANCER_ID: self.lb_id,
- const.CONNECTION_LIMIT: 200
- }
+ listener_kwargs = {
+ const.NAME: listener_name,
+ const.ADMIN_STATE_UP: True,
+ const.PROTOCOL: protocol1,
+ const.PROTOCOL_PORT: protocol_port,
+ const.LOADBALANCER_ID: self.lb_id,
+ const.CONNECTION_LIMIT: 200
+ }
- try:
- self.mem_listener_client.create_listener(**listener_kwargs)
- except exceptions.BadRequest as e:
- faultstring = e.resp_body.get('faultstring', '')
- if ("Invalid input for field/attribute protocol." in faultstring
- and "Value should be one of:" in faultstring):
- raise self.skipException("Skipping unsupported protocol")
- raise e
+ try:
+ self.mem_listener_client.create_listener(**listener_kwargs)
+ except exceptions.BadRequest as e:
+ fs = e.resp_body.get('faultstring', '')
+ if ("Invalid input for field/attribute protocol." in fs
+ and "Value should be one of:" in fs):
+ LOG.info("Skipping unsupported protocol: {}".format(
+ listener_kwargs[const.PROTOCOL]))
+ else:
+ raise e
+ else:
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
- waiters.wait_for_status(
- self.mem_lb_client.show_loadbalancer, self.lb_id,
- const.PROVISIONING_STATUS, const.ACTIVE,
- CONF.load_balancer.build_interval,
- CONF.load_balancer.build_timeout)
+ if not skip_protocol2:
+ # Create a listener on the same port, but with a different protocol
+ listener2_name = data_utils.rand_name("lb_member_listener2-create")
- # Create a listener on the same port, but with a different protocol
- listener2_name = data_utils.rand_name("lb_member_listener2-create")
+ listener2_kwargs = {
+ const.NAME: listener2_name,
+ const.ADMIN_STATE_UP: True,
+ const.PROTOCOL: protocol2,
+ const.PROTOCOL_PORT: protocol_port,
+ const.LOADBALANCER_ID: self.lb_id,
+ const.CONNECTION_LIMIT: 200,
+ }
- listener2_kwargs = {
- const.NAME: listener2_name,
- const.ADMIN_STATE_UP: True,
- const.PROTOCOL: protocol2,
- const.PROTOCOL_PORT: protocol_port,
- const.LOADBALANCER_ID: self.lb_id,
- const.CONNECTION_LIMIT: 200,
- }
+ try:
+ self.mem_listener_client.create_listener(**listener2_kwargs)
+ except exceptions.BadRequest as e:
+ fs = e.resp_body.get('faultstring', '')
+ if ("Invalid input for field/attribute protocol." in fs
+ and "Value should be one of:" in fs):
+ LOG.info("Skipping unsupported protocol: {}".format(
+ listener_kwargs[const.PROTOCOL]))
+ else:
+ raise e
+ else:
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
- try:
- self.mem_listener_client.create_listener(**listener2_kwargs)
- except exceptions.BadRequest as e:
- faultstring = e.resp_body.get('faultstring', '')
- if ("Invalid input for field/attribute protocol." in faultstring
- and "Value should be one of:" in faultstring):
- raise self.skipException("Skipping unsupported protocol")
- raise e
+ if not skip_protocol1:
+ # Create a listener on the same port, with an already used protocol
+ listener3_name = data_utils.rand_name("lb_member_listener3-create")
- waiters.wait_for_status(
- self.mem_lb_client.show_loadbalancer, self.lb_id,
- const.PROVISIONING_STATUS, const.ACTIVE,
- CONF.load_balancer.build_interval,
- CONF.load_balancer.build_timeout)
+ listener3_kwargs = {
+ const.NAME: listener3_name,
+ const.ADMIN_STATE_UP: True,
+ const.PROTOCOL: protocol1,
+ const.PROTOCOL_PORT: protocol_port,
+ const.LOADBALANCER_ID: self.lb_id,
+ const.CONNECTION_LIMIT: 200,
+ }
- # Create a listener on the same port, with an already used protocol
- listener3_name = data_utils.rand_name("lb_member_listener3-create")
+ self.assertRaises(
+ exceptions.Conflict,
+ self.mem_listener_client.create_listener,
+ **listener3_kwargs)
- listener3_kwargs = {
- const.NAME: listener3_name,
- const.ADMIN_STATE_UP: True,
- const.PROTOCOL: protocol1,
- const.PROTOCOL_PORT: protocol_port,
- const.LOADBALANCER_ID: self.lb_id,
- const.CONNECTION_LIMIT: 200,
- }
+ if not skip_protocol3:
+ # Create a listener on the same port, with a different protocol
+ listener4_name = data_utils.rand_name("lb_member_listener4-create")
- self.assertRaises(
- exceptions.Conflict,
- self.mem_listener_client.create_listener,
- **listener3_kwargs)
+ listener4_kwargs = {
+ const.NAME: listener4_name,
+ const.ADMIN_STATE_UP: True,
+ const.PROTOCOL: protocol3,
+ const.PROTOCOL_PORT: protocol_port,
+ const.LOADBALANCER_ID: self.lb_id,
+ const.CONNECTION_LIMIT: 200,
+ }
- # Create a listener on the same port, with another protocol over TCP
- listener4_name = data_utils.rand_name("lb_member_listener4-create")
+ try:
+ self.mem_listener_client.create_listener(**listener4_kwargs)
+ except exceptions.BadRequest as e:
+ fs = e.resp_body.get('faultstring', '')
+ if ("Invalid input for field/attribute protocol." in fs
+ and "Value should be one of:" in fs):
+ LOG.info("Skipping unsupported protocol: {}".format(
+ listener_kwargs[const.PROTOCOL]))
+ else:
+ raise e
+ else:
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
- listener4_kwargs = {
- const.NAME: listener4_name,
- const.ADMIN_STATE_UP: True,
- const.PROTOCOL: protocol3,
- const.PROTOCOL_PORT: protocol_port,
- const.LOADBALANCER_ID: self.lb_id,
- const.CONNECTION_LIMIT: 200,
- }
+ if not skip_protocol4:
+ # Create a listener on the same port, with another protocol over
+ # TCP
+ listener5_name = data_utils.rand_name("lb_member_listener5-create")
- self.assertRaises(
- exceptions.Conflict,
- self.mem_listener_client.create_listener,
- **listener4_kwargs)
+ listener5_kwargs = {
+ const.NAME: listener5_name,
+ const.ADMIN_STATE_UP: True,
+ const.PROTOCOL: protocol4,
+ const.PROTOCOL_PORT: protocol_port,
+ const.LOADBALANCER_ID: self.lb_id,
+ const.CONNECTION_LIMIT: 200,
+ }
+
+ # Add terminated_https args
+ if self.should_apply_terminated_https(protocol=protocol4):
+ listener5_kwargs.update({
+ const.DEFAULT_TLS_CONTAINER_REF: self.server_secret_ref,
+ const.SNI_CONTAINER_REFS: [self.SNI1_secret_ref,
+ self.SNI2_secret_ref],
+ })
+
+ self.assertRaises(
+ exceptions.Conflict,
+ self.mem_listener_client.create_listener,
+ **listener5_kwargs)
@decorators.idempotent_id('78ba6eb0-178c-477e-9156-b6775ca7b271')
def test_http_listener_list(self):
@@ -396,6 +711,22 @@
def test_udp_listener_list(self):
self._test_listener_list(const.UDP, 8040)
+ @decorators.idempotent_id('0abc3998-aacd-4edd-88f5-c5c35557646f')
+ def test_sctp_listener_list(self):
+ self._test_listener_list(const.SCTP, 8041)
+
+ @decorators.idempotent_id('aed69f58-fe69-401d-bf07-37b0d6d8437f')
+ @testtools.skipUnless(
+ CONF.loadbalancer_feature_enabled.terminated_tls_enabled,
+ '[loadbalancer-feature-enabled] "terminated_tls_enabled" is '
+ 'False in the tempest configuration. TLS tests will be skipped.')
+ def test_terminated_https_listener_list(self):
+ if not self.should_apply_terminated_https():
+ raise self.skipException(
+ f'Listener API tests with {const.TERMINATED_HTTPS} protocol'
+ ' require the either the barbican service,or running in noop.')
+ self._test_listener_list(const.TERMINATED_HTTPS, 8042)
+
def _test_listener_list(self, protocol, protocol_port_base):
"""Tests listener list API and field filtering.
@@ -413,6 +744,8 @@
# IDs of listeners created in the test
test_ids = []
+ self._validate_listener_protocol(protocol)
+
lb_name = data_utils.rand_name("lb_member_lb2_listener-list")
lb = self.mem_lb_client.create_loadbalancer(
name=lb_name, provider=CONF.load_balancer.provider,
@@ -445,6 +778,14 @@
"Marketing", "Creativity"]
listener1_kwargs.update({const.TAGS: listener1_tags})
+ # Add terminated_https args
+ if self.should_apply_terminated_https(protocol=protocol):
+ listener1_kwargs.update({
+ const.DEFAULT_TLS_CONTAINER_REF: self.server_secret_ref,
+ const.SNI_CONTAINER_REFS: [self.SNI1_secret_ref,
+ self.SNI2_secret_ref],
+ })
+
listener1 = self.mem_listener_client.create_listener(
**listener1_kwargs)
self.addCleanup(
@@ -484,6 +825,14 @@
"Soft_skills", "Creativity"]
listener2_kwargs.update({const.TAGS: listener2_tags})
+ # Add terminated_https args
+ if self.should_apply_terminated_https(protocol=protocol):
+ listener2_kwargs.update({
+ const.DEFAULT_TLS_CONTAINER_REF: self.server_secret_ref,
+ const.SNI_CONTAINER_REFS: [self.SNI1_secret_ref,
+ self.SNI2_secret_ref],
+ })
+
listener2 = self.mem_listener_client.create_listener(
**listener2_kwargs)
self.addCleanup(
@@ -523,6 +872,14 @@
"Communication", "Creativity"]
listener3_kwargs.update({const.TAGS: listener3_tags})
+ # Add terminated_https args
+ if self.should_apply_terminated_https(protocol=protocol):
+ listener3_kwargs.update({
+ const.DEFAULT_TLS_CONTAINER_REF: self.server_secret_ref,
+ const.SNI_CONTAINER_REFS: [self.SNI1_secret_ref,
+ self.SNI2_secret_ref],
+ })
+
listener3 = self.mem_listener_client.create_listener(
**listener3_kwargs)
self.addCleanup(
@@ -664,6 +1021,11 @@
if self.mem_listener_client.is_version_supported(
self.api_version, '2.12'):
show_listener_response_fields.append('allowed_cidrs')
+ if self.mem_listener_client.is_version_supported(
+ self.api_version, '2.27'):
+ show_listener_response_fields.append(const.HSTS_PRELOAD)
+ show_listener_response_fields.append(const.HSTS_MAX_AGE)
+ show_listener_response_fields.append(const.HSTS_INCLUDE_SUBDOMAINS)
for field in show_listener_response_fields:
if field in (const.DEFAULT_POOL_ID, const.L7_POLICIES):
continue
@@ -768,6 +1130,22 @@
def test_udp_listener_show(self):
self._test_listener_show(const.UDP, 8053)
+ @decorators.idempotent_id('10992529-1d0a-47a3-855c-3dbcd868db4e')
+ def test_sctp_listener_show(self):
+ self._test_listener_show(const.SCTP, 8054)
+
+ @decorators.idempotent_id('2c2e7146-0efc-44b6-8401-f1c69c2422fe')
+ @testtools.skipUnless(
+ CONF.loadbalancer_feature_enabled.terminated_tls_enabled,
+ '[loadbalancer-feature-enabled] "terminated_tls_enabled" is '
+ 'False in the tempest configuration. TLS tests will be skipped.')
+ def test_terminated_https_listener_show(self):
+ if not self.should_apply_terminated_https():
+ raise self.skipException(
+ f'Listener API tests with {const.TERMINATED_HTTPS} protocol'
+ ' require the either the barbican service,or running in noop.')
+ self._test_listener_show(const.TERMINATED_HTTPS, 8055)
+
def _test_listener_show(self, protocol, protocol_port):
"""Tests listener show API.
@@ -776,8 +1154,12 @@
* Validate the show reflects the requested values.
* Validates that other accounts cannot see the listener.
"""
+ self._validate_listener_protocol(protocol)
+
listener_name = data_utils.rand_name("lb_member_listener1-show")
listener_description = data_utils.arbitrary_string(size=255)
+ hsts_supported = self.mem_listener_client.is_version_supported(
+ self.api_version, '2.27') and protocol == const.TERMINATED_HTTPS
listener_kwargs = {
const.NAME: listener_name,
@@ -787,10 +1169,7 @@
const.PROTOCOL_PORT: protocol_port,
const.LOADBALANCER_ID: self.lb_id,
const.CONNECTION_LIMIT: 200,
- # TODO(rm_work): need to finish the rest of this stuff
# const.DEFAULT_POOL_ID: '',
- # const.DEFAULT_TLS_CONTAINER_REF: '',
- # const.SNI_CONTAINER_REFS: [],
}
if protocol == const.HTTP:
listener_kwargs[const.INSERT_HEADERS] = {
@@ -799,6 +1178,19 @@
const.X_FORWARDED_PROTO: "true",
}
+ # Add terminated_https args
+ if self.should_apply_terminated_https(protocol=protocol):
+ listener_kwargs.update({
+ const.DEFAULT_TLS_CONTAINER_REF: self.server_secret_ref,
+ const.SNI_CONTAINER_REFS: [self.SNI1_secret_ref,
+ self.SNI2_secret_ref],
+ })
+
+ if hsts_supported:
+ listener_kwargs[const.HSTS_PRELOAD] = True
+ listener_kwargs[const.HSTS_MAX_AGE] = 10000
+ listener_kwargs[const.HSTS_INCLUDE_SUBDOMAINS] = True
+
if self.mem_listener_client.is_version_supported(
self.api_version, '2.1'):
listener_kwargs.update({
@@ -821,6 +1213,11 @@
listener = self.mem_listener_client.create_listener(**listener_kwargs)
+ self.addCleanup(
+ self.mem_listener_client.cleanup_listener,
+ listener[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer, self.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
@@ -868,6 +1265,14 @@
self.assertTrue(strutils.bool_from_string(
insert_headers[const.X_FORWARDED_PROTO]))
+ # Add terminated_https args
+ if self.should_apply_terminated_https(protocol=protocol):
+ self.assertEqual(self.server_secret_ref,
+ listener[const.DEFAULT_TLS_CONTAINER_REF])
+ self.assertEqual(sorted([self.SNI1_secret_ref,
+ self.SNI2_secret_ref]),
+ sorted(listener[const.SNI_CONTAINER_REFS]))
+
parser.parse(listener[const.CREATED_AT])
parser.parse(listener[const.UPDATED_AT])
UUID(listener[const.ID])
@@ -881,6 +1286,11 @@
self.api_version, '2.12'):
self.assertEqual(self.allowed_cidrs, listener[const.ALLOWED_CIDRS])
+ if hsts_supported:
+ self.assertTrue(listener[const.HSTS_PRELOAD])
+ self.assertEqual(10000, listener[const.HSTS_MAX_AGE])
+ self.assertTrue(listener[const.HSTS_INCLUDE_SUBDOMAINS])
+
# Test that the appropriate users can see or not see the listener
# based on the API RBAC.
expected_allowed = []
@@ -927,6 +1337,22 @@
def test_udp_listener_update(self):
self._test_listener_update(const.UDP, 8063)
+ @decorators.idempotent_id('c590b485-4e08-4e49-b384-2282b3f6f1b9')
+ def test_sctp_listener_update(self):
+ self._test_listener_update(const.SCTP, 8064)
+
+ @decorators.idempotent_id('2ae08e10-fbf8-46d8-a073-15f90454d718')
+ @testtools.skipUnless(
+ CONF.loadbalancer_feature_enabled.terminated_tls_enabled,
+ '[loadbalancer-feature-enabled] "terminated_tls_enabled" is '
+ 'False in the tempest configuration. TLS tests will be skipped.')
+ def test_terminated_https_listener_update(self):
+ if not self.should_apply_terminated_https():
+ raise self.skipException(
+ f'Listener API tests with {const.TERMINATED_HTTPS} protocol'
+ ' require the either the barbican service,or running in noop.')
+ self._test_listener_update(const.TERMINATED_HTTPS, 8065)
+
def _test_listener_update(self, protocol, protocol_port):
"""Tests listener update and show APIs.
@@ -938,8 +1364,12 @@
* Show listener details.
* Validate the show reflects the updated values.
"""
+ self._validate_listener_protocol(protocol)
+
listener_name = data_utils.rand_name("lb_member_listener1-update")
listener_description = data_utils.arbitrary_string(size=255)
+ hsts_supported = self.mem_listener_client.is_version_supported(
+ self.api_version, '2.27') and protocol == const.TERMINATED_HTTPS
listener_kwargs = {
const.NAME: listener_name,
@@ -949,10 +1379,7 @@
const.PROTOCOL_PORT: protocol_port,
const.LOADBALANCER_ID: self.lb_id,
const.CONNECTION_LIMIT: 200,
- # TODO(rm_work): need to finish the rest of this stuff
# const.DEFAULT_POOL_ID: '',
- # const.DEFAULT_TLS_CONTAINER_REF: '',
- # const.SNI_CONTAINER_REFS: [],
}
if protocol == const.HTTP:
listener_kwargs[const.INSERT_HEADERS] = {
@@ -961,6 +1388,14 @@
const.X_FORWARDED_PROTO: "true"
}
+ # Add terminated_https args
+ if self.should_apply_terminated_https(protocol=protocol):
+ listener_kwargs.update({
+ const.DEFAULT_TLS_CONTAINER_REF: self.server_secret_ref,
+ const.SNI_CONTAINER_REFS: [self.SNI1_secret_ref,
+ self.SNI2_secret_ref],
+ })
+
if self.mem_listener_client.is_version_supported(
self.api_version, '2.1'):
listener_kwargs.update({
@@ -983,6 +1418,11 @@
listener = self.mem_listener_client.create_listener(**listener_kwargs)
+ self.addCleanup(
+ self.mem_listener_client.cleanup_listener,
+ listener[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer, self.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
@@ -1014,6 +1454,13 @@
insert_headers[const.X_FORWARDED_PORT]))
self.assertTrue(strutils.bool_from_string(
insert_headers[const.X_FORWARDED_PROTO]))
+ # Add terminated_https args
+ if self.should_apply_terminated_https(protocol=protocol):
+ self.assertEqual(self.server_secret_ref,
+ listener[const.DEFAULT_TLS_CONTAINER_REF])
+ self.assertEqual(sorted([self.SNI1_secret_ref,
+ self.SNI2_secret_ref]),
+ sorted(listener[const.SNI_CONTAINER_REFS]))
if self.mem_listener_client.is_version_supported(
self.api_version, '2.1'):
self.assertEqual(1000, listener[const.TIMEOUT_CLIENT_DATA])
@@ -1060,8 +1507,6 @@
const.CONNECTION_LIMIT: 400,
# TODO(rm_work): need to finish the rest of this stuff
# const.DEFAULT_POOL_ID: '',
- # const.DEFAULT_TLS_CONTAINER_REF: '',
- # const.SNI_CONTAINER_REFS: [],
}
if protocol == const.HTTP:
listener_update_kwargs[const.INSERT_HEADERS] = {
@@ -1069,6 +1514,13 @@
const.X_FORWARDED_PORT: "false",
const.X_FORWARDED_PROTO: "false"
}
+ # Add terminated_https args
+ if self.should_apply_terminated_https(protocol=protocol):
+ listener_update_kwargs.update({
+ const.DEFAULT_TLS_CONTAINER_REF: self.SNI2_secret_ref,
+ const.SNI_CONTAINER_REFS: [self.SNI1_secret_ref,
+ self.server_secret_ref],
+ })
if self.mem_listener_client.is_version_supported(
self.api_version, '2.1'):
listener_update_kwargs.update({
@@ -1100,6 +1552,11 @@
new_cidrs = ['2001:db8::/64']
listener_update_kwargs.update({const.ALLOWED_CIDRS: new_cidrs})
+ if hsts_supported:
+ listener_update_kwargs[const.HSTS_PRELOAD] = False
+ listener_update_kwargs[const.HSTS_MAX_AGE] = 0
+ listener_update_kwargs[const.HSTS_INCLUDE_SUBDOMAINS] = False
+
listener = self.mem_listener_client.update_listener(
listener[const.ID], **listener_update_kwargs)
@@ -1139,6 +1596,13 @@
insert_headers[const.X_FORWARDED_PORT]))
self.assertFalse(strutils.bool_from_string(
insert_headers[const.X_FORWARDED_PROTO]))
+ # Add terminated_https args
+ if self.should_apply_terminated_https(protocol=protocol):
+ self.assertEqual(self.SNI2_secret_ref,
+ listener[const.DEFAULT_TLS_CONTAINER_REF])
+ self.assertEqual(sorted([self.SNI1_secret_ref,
+ self.server_secret_ref]),
+ sorted(listener[const.SNI_CONTAINER_REFS]))
if self.mem_listener_client.is_version_supported(
self.api_version, '2.1'):
self.assertEqual(2000, listener[const.TIMEOUT_CLIENT_DATA])
@@ -1158,6 +1622,11 @@
expected_cidrs = ['2001:db8::/64']
self.assertEqual(expected_cidrs, listener[const.ALLOWED_CIDRS])
+ if hsts_supported:
+ self.assertFalse(listener[const.HSTS_PRELOAD])
+ self.assertEqual(0, listener[const.HSTS_MAX_AGE])
+ self.assertFalse(listener[const.HSTS_INCLUDE_SUBDOMAINS])
+
@decorators.idempotent_id('16f11c82-f069-4592-8954-81b35a98e3b7')
def test_http_listener_delete(self):
self._test_listener_delete(const.HTTP, 8070)
@@ -1185,6 +1654,22 @@
def test_udp_listener_delete(self):
self._test_listener_delete(const.UDP, 8073)
+ @decorators.idempotent_id('0de6f1ad-58ae-4b31-86b6-b440fce70244')
+ def test_sctp_listener_delete(self):
+ self._test_listener_delete(const.SCTP, 8074)
+
+ @decorators.idempotent_id('ef357dcc-c9a0-40fe-a15c-b368f15d7187')
+ @testtools.skipUnless(
+ CONF.loadbalancer_feature_enabled.terminated_tls_enabled,
+ '[loadbalancer-feature-enabled] "terminated_tls_enabled" is '
+ 'False in the tempest configuration. TLS tests will be skipped.')
+ def test_terminated_https_listener_delete(self):
+ if not self.should_apply_terminated_https():
+ raise self.skipException(
+ f'Listener API tests with {const.TERMINATED_HTTPS} protocol'
+ ' require the either the barbican service,or running in noop.')
+ self._test_listener_delete(const.TERMINATED_HTTPS, 8075)
+
def _test_listener_delete(self, protocol, protocol_port):
"""Tests listener create and delete APIs.
@@ -1193,6 +1678,8 @@
* Deletes the listener.
* Validates the listener is in the DELETED state.
"""
+ self._validate_listener_protocol(protocol)
+
listener_name = data_utils.rand_name("lb_member_listener1-delete")
listener_kwargs = {
@@ -1201,8 +1688,22 @@
const.PROTOCOL_PORT: protocol_port,
const.LOADBALANCER_ID: self.lb_id,
}
+
+ # Add terminated_https args
+ if self.should_apply_terminated_https(protocol=protocol):
+ listener_kwargs.update({
+ const.DEFAULT_TLS_CONTAINER_REF: self.server_secret_ref,
+ const.SNI_CONTAINER_REFS: [self.SNI1_secret_ref,
+ self.SNI2_secret_ref],
+ })
+
listener = self.mem_listener_client.create_listener(**listener_kwargs)
+ self.addCleanup(
+ self.mem_listener_client.cleanup_listener,
+ listener[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer,
self.lb_id, const.PROVISIONING_STATUS,
@@ -1260,6 +1761,22 @@
def test_udp_listener_show_stats(self):
self._test_listener_show_stats(const.UDP, 8083)
+ @decorators.idempotent_id('7f6d3906-529c-4b99-8376-b836059df220')
+ def test_sctp_listener_show_stats(self):
+ self._test_listener_show_stats(const.SCTP, 8084)
+
+ @decorators.idempotent_id('c39c996f-9633-4d81-a5f1-e94643f0c650')
+ @testtools.skipUnless(
+ CONF.loadbalancer_feature_enabled.terminated_tls_enabled,
+ '[loadbalancer-feature-enabled] "terminated_tls_enabled" is '
+ 'False in the tempest configuration. TLS tests will be skipped.')
+ def test_terminated_https_listener_show_stats(self):
+ if not self.should_apply_terminated_https():
+ raise self.skipException(
+ f'Listener API tests with {const.TERMINATED_HTTPS} protocol'
+ ' require the either the barbican service,or running in noop.')
+ self._test_listener_show_stats(const.TERMINATED_HTTPS, 8085)
+
def _test_listener_show_stats(self, protocol, protocol_port):
"""Tests listener show statistics API.
@@ -1269,6 +1786,8 @@
* Show listener statistics.
* Validate the show reflects the expected values.
"""
+ self._validate_listener_protocol(protocol)
+
listener_name = data_utils.rand_name("lb_member_listener1-stats")
listener_description = data_utils.arbitrary_string(size=255)
@@ -1282,6 +1801,14 @@
const.CONNECTION_LIMIT: 200,
}
+ # Add terminated_https args
+ if self.should_apply_terminated_https(protocol=protocol):
+ listener_kwargs.update({
+ const.DEFAULT_TLS_CONTAINER_REF: self.server_secret_ref,
+ const.SNI_CONTAINER_REFS: [self.SNI1_secret_ref,
+ self.SNI2_secret_ref],
+ })
+
listener = self.mem_listener_client.create_listener(**listener_kwargs)
self.addCleanup(
self.mem_listener_client.cleanup_listener,
diff --git a/octavia_tempest_plugin/tests/api/v2/test_member.py b/octavia_tempest_plugin/tests/api/v2/test_member.py
index 305aa39..1e18af3 100644
--- a/octavia_tempest_plugin/tests/api/v2/test_member.py
+++ b/octavia_tempest_plugin/tests/api/v2/test_member.py
@@ -104,6 +104,11 @@
cls.current_listener_port += 1
listener = cls.mem_listener_client.create_listener(**listener_kwargs)
+ cls.addClassResourceCleanup(
+ cls.mem_listener_client.cleanup_listener,
+ listener[const.ID],
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
cls.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
@@ -133,6 +138,11 @@
message = e.resp_body.get('faultstring', message)
raise testtools.TestCase.skipException(message)
+ cls.addClassResourceCleanup(
+ cls.mem_pool_client.cleanup_pool,
+ pool[const.ID],
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
cls.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
@@ -143,6 +153,17 @@
class MemberAPITest1(MemberAPITest):
+ @decorators.idempotent_id('c1e029b0-b6d6-4fa6-8ccb-5c3f3aa293b0')
+ def test_ipv4_HTTP_LC_backup_member_create(self):
+ if not self.mem_member_client.is_version_supported(
+ self.api_version, '2.1'):
+ raise self.skipException('Backup member support is only available '
+ 'in Octavia API version 2.1 or newer')
+ pool_id = self._listener_pool_create(
+ listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+ algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+ self._test_member_create(4, pool_id, backup_member=True)
+
@decorators.idempotent_id('0684575a-0970-4fa8-8006-10c2b39c5f2b')
def test_ipv4_HTTP_LC_alt_monitor_member_create(self):
pool_id = self._listener_pool_create(
@@ -501,6 +522,17 @@
algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
self._test_member_create(6, pool_id)
+ @decorators.idempotent_id('b1994c5d-74b8-44be-b9e5-5e18e9219b61')
+ def test_ipv6_HTTP_LC_backup_member_create(self):
+ if not self.mem_member_client.is_version_supported(
+ self.api_version, '2.1'):
+ raise self.skipException('Backup member support is only available '
+ 'in Octavia API version 2.1 or newer')
+ pool_id = self._listener_pool_create(
+ listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+ algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+ self._test_member_create(6, pool_id, backup_member=True)
+
@decorators.idempotent_id('6056724b-d046-497a-ae31-c02af67d4fbb')
def test_ipv6_HTTPS_LC_alt_monitor_member_create(self):
pool_id = self._listener_pool_create(
@@ -832,12 +864,12 @@
self._test_member_create(6, pool_id)
def _test_member_create(self, ip_version, pool_id,
- alternate_monitor=False):
+ alternate_monitor=False, backup_member=False):
"""Tests member create and basic show APIs.
* Tests that users without the loadbalancer member role cannot
create members.
- * Create a fully populated member.
+ * Create a fully populated member or backup member.
* If driver doesnt support Monitors, allow to create without monitor
* Show member details.
* Validate the show reflects the requested values.
@@ -872,7 +904,7 @@
if self.mem_member_client.is_version_supported(
self.api_version, '2.1'):
member_kwargs.update({
- const.BACKUP: False,
+ const.BACKUP: backup_member,
})
if self.mem_member_client.is_version_supported(
@@ -916,6 +948,11 @@
member = self.mem_member_client.create_member(**member_kwargs)
+ self.addCleanup(
+ self.mem_member_client.cleanup_member,
+ member[const.ID], pool_id=pool_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer, self.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
@@ -956,6 +993,17 @@
for item in equal_items:
self.assertEqual(member_kwargs[item], member[item])
+ @decorators.skip_because(bug='2045803')
+ @decorators.idempotent_id('b982188a-d55f-438a-a1b2-224f0ec8ff12')
+ def test_HTTP_LC_backup_member_list(self):
+ if not self.mem_member_client.is_version_supported(
+ self.api_version, '2.1'):
+ raise self.skipException('Backup member support is only available '
+ 'in Octavia API version 2.1 or newer')
+ self._test_member_list(const.HTTP,
+ const.LB_ALGORITHM_LEAST_CONNECTIONS,
+ backup_member=True)
+
@decorators.idempotent_id('fcc5c6cd-d1c2-4a49-8d26-2268608e59a6')
def test_HTTP_LC_member_list(self):
self._test_member_list(const.HTTP,
@@ -1056,11 +1104,11 @@
self._test_member_list(const.UDP,
const.LB_ALGORITHM_SOURCE_IP_PORT)
- def _test_member_list(self, pool_protocol, algorithm):
+ def _test_member_list(self, pool_protocol, algorithm, backup_member=False):
"""Tests member list API and field filtering.
* Create a clean pool.
- * Create three members.
+ * Create three members (one backup member if backup_member is True).
* Validates that other accounts cannot list the members.
* List the members using the default sort order.
* List the members using descending sort order.
@@ -1125,6 +1173,9 @@
const.PROTOCOL_PORT: 101,
}
+ if backup_member:
+ member1_kwargs[const.BACKUP] = True
+
if self.mem_member_client.is_version_supported(
self.api_version, '2.5'):
member1_tags = ["English", "Mathematics",
@@ -1336,6 +1387,17 @@
self.assertEqual(member2[const.PROTOCOL_PORT],
members[0][const.PROTOCOL_PORT])
+ # Test filtering using the backup flag
+ if backup_member:
+ members = self.mem_member_client.list_members(
+ pool_id,
+ query_params='{backup}={backup_value}'.format(
+ backup=const.BACKUP,
+ backup_value=const.BACKUP_TRUE))
+ self.assertEqual(1, len(members))
+ self.assertEqual(member1_name, members[0][const.NAME])
+ self.assertTrue(members[0][const.BACKUP])
+
# Test combined params
members = self.mem_member_client.list_members(
pool_id,
@@ -1380,6 +1442,17 @@
class MemberAPITest2(MemberAPITest):
+ @decorators.idempotent_id('048f4b15-1cb4-49ac-82d6-b2ac7fe9d03b')
+ def test_HTTP_LC_backup_member_show(self):
+ if not self.mem_member_client.is_version_supported(
+ self.api_version, '2.1'):
+ raise self.skipException('Backup member support is only available '
+ 'in Octavia API version 2.1 or newer')
+ pool_id = self._listener_pool_create(
+ listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+ algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+ self._test_member_show(pool_id, backup_member=True)
+
@decorators.idempotent_id('2674b363-7922-494a-b121-cf415dbbb716')
def test_HTTP_LC_alt_monitor_member_show(self):
pool_id = self._listener_pool_create(
@@ -1724,7 +1797,8 @@
algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
self._test_member_show(pool_id)
- def _test_member_show(self, pool_id, alternate_monitor=False):
+ def _test_member_show(self, pool_id, alternate_monitor=False,
+ backup_member=False):
"""Tests member show API.
* Create a fully populated member.
@@ -1748,7 +1822,7 @@
if self.mem_member_client.is_version_supported(
self.api_version, '2.1'):
member_kwargs.update({
- const.BACKUP: False,
+ const.BACKUP: backup_member,
})
if self.lb_member_vip_subnet:
member_kwargs[const.SUBNET_ID] = self.lb_member_vip_subnet[
@@ -1756,6 +1830,11 @@
member = self.mem_member_client.create_member(**member_kwargs)
+ self.addCleanup(
+ self.mem_member_client.cleanup_member,
+ member[const.ID], pool_id=pool_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer, self.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
@@ -1812,6 +1891,17 @@
expected_allowed, member[const.ID],
pool_id=pool_id)
+ @decorators.idempotent_id('592c19c3-1e0d-4d6d-b2ff-0d39d8654c99')
+ def test_HTTP_LC_backup_member_update(self):
+ if not self.mem_member_client.is_version_supported(
+ self.api_version, '2.1'):
+ raise self.skipException('Backup member support is only available '
+ 'in Octavia API version 2.1 or newer')
+ pool_id = self._listener_pool_create(
+ listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+ algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+ self._test_member_update(pool_id, backup_member=True)
+
@decorators.idempotent_id('65680d48-1d49-4959-a7d1-677797e54f6b')
def test_HTTP_LC_alt_monitor_member_update(self):
pool_id = self._listener_pool_create(
@@ -2156,7 +2246,8 @@
algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
self._test_member_update(pool_id)
- def _test_member_update(self, pool_id, alternate_monitor=False):
+ def _test_member_update(self, pool_id, alternate_monitor=False,
+ backup_member=False):
"""Tests member show API and field filtering.
* Create a fully populated member.
@@ -2183,7 +2274,7 @@
if self.mem_member_client.is_version_supported(
self.api_version, '2.1'):
member_kwargs.update({
- const.BACKUP: False,
+ const.BACKUP: backup_member,
})
if self.mem_member_client.is_version_supported(
@@ -2199,6 +2290,11 @@
member = self.mem_member_client.create_member(**member_kwargs)
+ self.addCleanup(
+ self.mem_member_client.cleanup_member,
+ member[const.ID], pool_id=pool_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer, self.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
@@ -2642,6 +2738,11 @@
const.ID]
member1 = self.mem_member_client.create_member(**member1_kwargs)
+ self.addCleanup(
+ self.mem_member_client.cleanup_member,
+ member1[const.ID], pool_id=pool_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
self.lb_id,
const.PROVISIONING_STATUS,
@@ -2674,6 +2775,11 @@
member2 = self.mem_member_client.create_member(**member2_kwargs)
+ self.addCleanup(
+ self.mem_member_client.cleanup_member,
+ member2[const.ID], pool_id=pool_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
self.lb_id,
const.PROVISIONING_STATUS,
@@ -2758,6 +2864,17 @@
self.assertEqual(member2_name_update, members[0][const.NAME])
self.assertEqual(member3_name, members[1][const.NAME])
+ @decorators.idempotent_id('eab8f0dc-0959-4b50-aea2-2f2319305d15')
+ def test_HTTP_LC_backup_member_delete(self):
+ if not self.mem_member_client.is_version_supported(
+ self.api_version, '2.1'):
+ raise self.skipException('Backup member support is only available '
+ 'in Octavia API version 2.1 or newer')
+ pool_id = self._listener_pool_create(
+ listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+ algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+ self._test_member_delete(pool_id, backup_member=True)
+
@decorators.idempotent_id('8b6574a3-17e8-4950-b24e-66d0c28960d3')
def test_HTTP_LC_member_delete(self):
pool_id = self._listener_pool_create(
@@ -2930,7 +3047,7 @@
algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
self._test_member_delete(pool_id)
- def _test_member_delete(self, pool_id):
+ def _test_member_delete(self, pool_id, backup_member=False):
"""Tests member create and delete APIs.
* Creates a member.
@@ -2945,8 +3062,20 @@
const.ADDRESS: '192.0.2.1',
const.PROTOCOL_PORT: self.member_port.increment(),
}
+
+ if self.mem_member_client.is_version_supported(
+ self.api_version, '2.1'):
+ member_kwargs.update({
+ const.BACKUP: backup_member,
+ })
+
member = self.mem_member_client.create_member(**member_kwargs)
+ self.addCleanup(
+ self.mem_member_client.cleanup_member,
+ member[const.ID], pool_id=pool_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer,
self.lb_id, const.PROVISIONING_STATUS,
diff --git a/octavia_tempest_plugin/tests/api/v2/test_pool.py b/octavia_tempest_plugin/tests/api/v2/test_pool.py
index db86ebe..4e3e667 100644
--- a/octavia_tempest_plugin/tests/api/v2/test_pool.py
+++ b/octavia_tempest_plugin/tests/api/v2/test_pool.py
@@ -359,6 +359,11 @@
listener = self.mem_listener_client.create_listener(
**listener_kwargs)
+ self.addClassResourceCleanup(
+ self.mem_listener_client.cleanup_listener,
+ listener[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
self.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
diff --git a/octavia_tempest_plugin/tests/barbican_scenario/v2/test_tls_barbican.py b/octavia_tempest_plugin/tests/barbican_scenario/v2/test_tls_barbican.py
index 0c23cd7..2e1464c 100644
--- a/octavia_tempest_plugin/tests/barbican_scenario/v2/test_tls_barbican.py
+++ b/octavia_tempest_plugin/tests/barbican_scenario/v2/test_tls_barbican.py
@@ -240,6 +240,11 @@
pool = cls.mem_pool_client.create_pool(**pool_kwargs)
cls.pool_id = pool[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_pool_client.cleanup_pool,
+ cls.pool_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
cls.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
@@ -258,7 +263,13 @@
if cls.lb_member_1_subnet:
member1_kwargs[const.SUBNET_ID] = cls.lb_member_1_subnet[const.ID]
- cls.mem_member_client.create_member(**member1_kwargs)
+ member1 = cls.mem_member_client.create_member(**member1_kwargs)
+
+ cls.addClassResourceCleanup(
+ cls.mem_member_client.cleanup_member,
+ member1[const.ID], cls.pool_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
waiters.wait_for_status(
cls.mem_lb_client.show_loadbalancer, cls.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
@@ -277,7 +288,13 @@
if cls.lb_member_2_subnet:
member2_kwargs[const.SUBNET_ID] = cls.lb_member_2_subnet[const.ID]
- cls.mem_member_client.create_member(**member2_kwargs)
+ member2 = cls.mem_member_client.create_member(**member2_kwargs)
+
+ cls.addClassResourceCleanup(
+ cls.mem_member_client.cleanup_member,
+ member2[const.ID], cls.pool_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
waiters.wait_for_status(
cls.mem_lb_client.show_loadbalancer, cls.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
@@ -1205,7 +1222,8 @@
self.assertEqual(expected_proto, selected_proto)
- def _test_http_versions_tls_traffic(self, http_version, alpn_protos):
+ def _test_http_versions_tls_traffic(self, http_version, alpn_protos,
+ hsts: bool = False):
if not self.mem_listener_client.is_version_supported(
self.api_version, '2.20'):
raise self.skipException('ALPN protocols are only available on '
@@ -1220,6 +1238,12 @@
const.DEFAULT_TLS_CONTAINER_REF: self.server_secret_ref,
const.ALPN_PROTOCOLS: alpn_protos,
}
+ if self.mem_listener_client.is_version_supported(
+ self.api_version, '2.27'):
+ listener_kwargs[const.HSTS_MAX_AGE] = 100 if hsts else None
+ listener_kwargs[const.HSTS_INCLUDE_SUBDOMAINS] = hsts
+ listener_kwargs[const.HSTS_PRELOAD] = hsts
+
listener = self.mem_listener_client.create_listener(**listener_kwargs)
self.listener_id = listener[const.ID]
self.addCleanup(
@@ -1241,6 +1265,12 @@
client = httpx.Client(http2=(http_version == 'HTTP/2'), verify=context)
r = client.get(url)
self.assertEqual(http_version, r.http_version)
+ if hsts:
+ self.assertIn('strict-transport-security', r.headers)
+ self.assertEqual('max-age=100; includeSubDomains; preload;',
+ r.headers['strict-transport-security'])
+ else:
+ self.assertNotIn('strict-transport-security', r.headers)
@decorators.idempotent_id('9965828d-24af-4fa0-91ae-21c6bc47ab4c')
def test_http_2_tls_traffic(self):
@@ -1251,6 +1281,15 @@
self._test_http_versions_tls_traffic(
'HTTP/1.1', ['http/1.1', 'http/1.0'])
+ @decorators.idempotent_id('7436c6b7-44be-4544-a40b-31d2b7b2ad0b')
+ def test_http_1_1_tls_hsts_traffic(self):
+ if not self.mem_listener_client.is_version_supported(
+ self.api_version, '2.27'):
+ raise self.skipException('HSTS is only available on '
+ 'Octavia API version 2.27 or newer.')
+ self._test_http_versions_tls_traffic(
+ 'HTTP/1.1', ['http/1.1', 'http/1.0'], hsts=True)
+
@decorators.idempotent_id('ee0faf71-d11e-4323-8673-e5e15779749b')
def test_pool_reencryption(self):
if not self.mem_listener_client.is_version_supported(
@@ -1268,6 +1307,11 @@
pool = self.mem_pool_client.create_pool(**pool_kwargs)
pool_id = pool[const.ID]
+ self.addClassResourceCleanup(
+ self.mem_pool_client.cleanup_pool,
+ pool_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
self.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
@@ -1286,7 +1330,13 @@
if self.lb_member_1_subnet:
member1_kwargs[const.SUBNET_ID] = self.lb_member_1_subnet[const.ID]
- self.mem_member_client.create_member(**member1_kwargs)
+ member1 = self.mem_member_client.create_member(**member1_kwargs)
+
+ self.addClassResourceCleanup(
+ self.mem_member_client.cleanup_member,
+ member1[const.ID], pool_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer, self.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
@@ -1305,7 +1355,13 @@
if self.lb_member_2_subnet:
member2_kwargs[const.SUBNET_ID] = self.lb_member_2_subnet[const.ID]
- self.mem_member_client.create_member(**member2_kwargs)
+ member2 = self.mem_member_client.create_member(**member2_kwargs)
+
+ self.addClassResourceCleanup(
+ self.mem_member_client.cleanup_member,
+ member2[const.ID], pool_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer, self.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
@@ -1324,6 +1380,11 @@
listener = self.mem_listener_client.create_listener(**listener_kwargs)
self.listener_id = listener[const.ID]
+ self.addCleanup(
+ self.mem_listener_client.cleanup_listener,
+ self.listener_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
self.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
@@ -1420,6 +1481,11 @@
pool = self.mem_pool_client.create_pool(**pool_kwargs)
pool_id = pool[const.ID]
+ self.addClassResourceCleanup(
+ self.mem_pool_client.cleanup_pool,
+ pool_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
self.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
@@ -1472,6 +1538,12 @@
member1_kwargs[const.SUBNET_ID] = self.lb_member_1_subnet[const.ID]
member1 = self.mem_member_client.create_member(**member1_kwargs)
+
+ self.addCleanup(
+ self.mem_member_client.cleanup_member,
+ member1[const.ID], pool_id=pool_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer, self.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
@@ -1492,6 +1564,12 @@
member2_kwargs[const.SUBNET_ID] = self.lb_member_2_subnet[const.ID]
member2 = self.mem_member_client.create_member(**member2_kwargs)
+
+ self.addCleanup(
+ self.mem_member_client.cleanup_member,
+ member2[const.ID], pool_id=pool_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer, self.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
@@ -1510,6 +1588,11 @@
listener = self.mem_listener_client.create_listener(**listener_kwargs)
self.listener_id = listener[const.ID]
+ self.addCleanup(
+ self.mem_listener_client.cleanup_listener,
+ self.listener_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
self.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_healthmonitor.py b/octavia_tempest_plugin/tests/scenario/v2/test_healthmonitor.py
index 8075a35..01548c2 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_healthmonitor.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_healthmonitor.py
@@ -269,6 +269,11 @@
}
pool = self.mem_pool_client.create_pool(**pool_kwargs)
+ self.addClassResourceCleanup(
+ self.mem_pool_client.cleanup_pool,
+ pool[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
self.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
@@ -295,6 +300,11 @@
hm = self.mem_healthmonitor_client.create_healthmonitor(**hm_kwargs)
+ self.addClassResourceCleanup(
+ self.mem_healthmonitor_client.cleanup_healthmonitor,
+ hm[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer, self.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_ipv6_traffic_ops.py b/octavia_tempest_plugin/tests/scenario/v2/test_ipv6_traffic_ops.py
index 69c1f2b..d4eaa86 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_ipv6_traffic_ops.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_ipv6_traffic_ops.py
@@ -104,6 +104,11 @@
listener = cls.mem_listener_client.create_listener(**listener_kwargs)
cls.listener_ids[protocol] = listener[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_listener_client.cleanup_listener,
+ listener[const.ID],
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
cls.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
@@ -120,6 +125,11 @@
pool = cls.mem_pool_client.create_pool(**pool_kwargs)
cls.pool_ids[protocol] = pool[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_pool_client.cleanup_pool,
+ pool[const.ID],
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
cls.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_l7policy.py b/octavia_tempest_plugin/tests/scenario/v2/test_l7policy.py
index acbd094..4c84ba3 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_l7policy.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_l7policy.py
@@ -61,6 +61,11 @@
listener = cls.mem_listener_client.create_listener(**listener_kwargs)
cls.listener_id = listener[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_listener_client.cleanup_listener,
+ cls.listener_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
cls.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
@@ -77,6 +82,11 @@
pool = cls.mem_pool_client.create_pool(**pool_kwargs)
cls.pool_id = pool[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_pool_client.cleanup_pool,
+ cls.pool_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
cls.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_l7rule.py b/octavia_tempest_plugin/tests/scenario/v2/test_l7rule.py
index d5683ef..edbdc33 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_l7rule.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_l7rule.py
@@ -61,6 +61,11 @@
listener = cls.mem_listener_client.create_listener(**listener_kwargs)
cls.listener_id = listener[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_listener_client.cleanup_listener,
+ cls.listener_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
cls.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
@@ -76,6 +81,11 @@
l7policy = cls.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
cls.l7policy_id = l7policy[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_l7policy_client.cleanup_l7policy,
+ cls.l7policy_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
cls.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_listener.py b/octavia_tempest_plugin/tests/scenario/v2/test_listener.py
index 2850ec0..be997aa 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_listener.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_listener.py
@@ -76,6 +76,11 @@
pool1 = cls.mem_pool_client.create_pool(**pool1_kwargs)
pool1_id = pool1[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_pool_client.cleanup_pool,
+ pool1_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
cls.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
@@ -92,6 +97,11 @@
pool2 = cls.mem_pool_client.create_pool(**pool2_kwargs)
pool2_id = pool2[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_pool_client.cleanup_pool,
+ pool2_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
cls.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_pool.py b/octavia_tempest_plugin/tests/scenario/v2/test_pool.py
index 6df4c9c..c53e1c2 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_pool.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_pool.py
@@ -359,6 +359,11 @@
**listener_kwargs)
listener_id = listener[const.ID]
+ self.addCleanup(
+ self.mem_listener_client.cleanup_listener,
+ listener_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
self.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_traffic_ops.py b/octavia_tempest_plugin/tests/scenario/v2/test_traffic_ops.py
index e8221fe..0083887 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_traffic_ops.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_traffic_ops.py
@@ -120,6 +120,11 @@
listener = cls.mem_listener_client.create_listener(**listener_kwargs)
+ cls.addClassResourceCleanup(
+ cls.mem_listener_client.cleanup_listener,
+ listener[const.ID],
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
cls.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
@@ -135,6 +140,11 @@
}
pool = cls.mem_pool_client.create_pool(**pool_kwargs)
+ cls.addClassResourceCleanup(
+ cls.mem_pool_client.cleanup_pool,
+ pool[const.ID],
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
cls.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
@@ -1619,3 +1629,171 @@
# Make a request to the stats page
URL = 'http://{0}:{1}/metrics'.format(self.lb_vip_address, '8080')
self.validate_URL_response(URL, expected_status_code=200)
+
+ @decorators.idempotent_id('b2d5cefe-eac0-4eb3-b7c2-54f22578def9')
+ def test_backup_member(self):
+ if not self.mem_member_client.is_version_supported(
+ self.api_version, '2.1'):
+ raise self.skipException('Backup member support is only available '
+ 'in Octavia API version 2.1 or newer')
+
+ _LISTENER_PORT = 106
+ # Create a unique listener and pool for this test
+ pool_id = self._listener_pool_create(const.HTTP, _LISTENER_PORT)[1]
+
+ # Create a health monitor on the pool
+ hm_name = data_utils.rand_name("lb_member_hm1-backup-not-active")
+ hm_kwargs = {
+ const.POOL_ID: pool_id,
+ const.NAME: hm_name,
+ const.TYPE: const.HEALTH_MONITOR_HTTP,
+ const.DELAY: 1,
+ const.TIMEOUT: 1,
+ const.MAX_RETRIES: 1,
+ const.MAX_RETRIES_DOWN: 1,
+ const.HTTP_METHOD: const.GET,
+ const.URL_PATH: '/',
+ const.EXPECTED_CODES: '200',
+ const.ADMIN_STATE_UP: True,
+ }
+ hm = self.mem_healthmonitor_client.create_healthmonitor(**hm_kwargs)
+ self.addCleanup(
+ self.mem_healthmonitor_client.cleanup_healthmonitor,
+ hm[const.ID], lb_client=self.mem_lb_client, lb_id=self.lb_id)
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.check_interval,
+ CONF.load_balancer.check_timeout)
+ hm = waiters.wait_for_status(
+ self.mem_healthmonitor_client.show_healthmonitor,
+ hm[const.ID], const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.check_interval,
+ CONF.load_balancer.check_timeout)
+
+ # Set up Member 1 for Webserver 1
+ member1_name = data_utils.rand_name("lb_member_member1-not-backup")
+ member1_kwargs = {
+ const.POOL_ID: pool_id,
+ const.NAME: member1_name,
+ const.ADMIN_STATE_UP: True,
+ const.ADDRESS: self.webserver1_ip,
+ const.PROTOCOL_PORT: 80,
+ }
+ if self.lb_member_1_subnet:
+ member1_kwargs[const.SUBNET_ID] = self.lb_member_1_subnet[const.ID]
+
+ member1 = self.mem_member_client.create_member(**member1_kwargs)
+ self.addCleanup(
+ self.mem_member_client.cleanup_member,
+ member1[const.ID], pool_id=pool_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.check_interval,
+ CONF.load_balancer.check_timeout)
+
+ # Set up Member 2 for Webserver 2 (Backup)
+ member2_name = data_utils.rand_name("lb_member_member2-backup")
+ member2_kwargs = {
+ const.POOL_ID: pool_id,
+ const.NAME: member2_name,
+ const.ADMIN_STATE_UP: True,
+ const.ADDRESS: self.webserver2_ip,
+ const.PROTOCOL_PORT: 80,
+ const.BACKUP: True,
+ }
+ if self.lb_member_2_subnet:
+ member2_kwargs[const.SUBNET_ID] = self.lb_member_2_subnet[const.ID]
+
+ member2 = self.mem_member_client.create_member(**member2_kwargs)
+ self.addCleanup(
+ self.mem_member_client.cleanup_member,
+ member2[const.ID], pool_id=pool_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.check_interval,
+ CONF.load_balancer.check_timeout)
+
+ url_for_tests = f'http://{self.lb_vip_address}:{_LISTENER_PORT}/'
+
+ # Send some requests and check that only member 1 is responding
+ self.assertConsistentResponse((200, self.webserver1_response),
+ url_for_tests)
+
+ # Disable member 1 and check that the backup member takes over
+ member_update_kwargs = {
+ const.POOL_ID: pool_id,
+ const.ADMIN_STATE_UP: False}
+
+ self.mem_member_client.update_member(
+ member1[const.ID], **member_update_kwargs)
+
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.check_interval,
+ CONF.load_balancer.check_timeout)
+ waiters.wait_for_status(
+ self.mem_member_client.show_member,
+ member1[const.ID], const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.check_interval,
+ CONF.load_balancer.check_timeout,
+ pool_id=pool_id)
+
+ # Send some requests and check that only backup member 2 is responding
+ self.assertConsistentResponse((200, self.webserver2_response),
+ url_for_tests)
+
+ # Enable member 1 and check that member 1 traffic resumes
+ member_update_kwargs = {
+ const.POOL_ID: pool_id,
+ const.ADMIN_STATE_UP: True}
+
+ self.mem_member_client.update_member(
+ member1[const.ID], **member_update_kwargs)
+
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.check_interval,
+ CONF.load_balancer.check_timeout)
+ waiters.wait_for_status(
+ self.mem_member_client.show_member,
+ member1[const.ID], const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.check_interval,
+ CONF.load_balancer.check_timeout,
+ pool_id=pool_id)
+
+ # Send some requests and check that only member 1 is responding
+ self.assertConsistentResponse((200, self.webserver1_response),
+ url_for_tests)
+
+ # Delete member 1 and check that backup member 2 is responding
+ self.mem_member_client.delete_member(
+ member1[const.ID],
+ pool_id=pool_id)
+
+ waiters.wait_for_deleted_status_or_not_found(
+ self.mem_member_client.show_member, member1[const.ID],
+ const.PROVISIONING_STATUS,
+ CONF.load_balancer.check_interval,
+ CONF.load_balancer.check_timeout,
+ pool_id=pool_id)
+
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer,
+ self.lb_id, const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.check_interval,
+ CONF.load_balancer.check_timeout)
+
+ # Send some requests and check that only backup member 2 is responding
+ self.assertConsistentResponse((200, self.webserver2_response),
+ url_for_tests)
diff --git a/octavia_tempest_plugin/tests/test_base.py b/octavia_tempest_plugin/tests/test_base.py
index e1daec1..51834fe 100644
--- a/octavia_tempest_plugin/tests/test_base.py
+++ b/octavia_tempest_plugin/tests/test_base.py
@@ -34,6 +34,7 @@
from octavia_tempest_plugin.common import cert_utils
from octavia_tempest_plugin.common import constants as const
+import octavia_tempest_plugin.services.load_balancer.v2 as lbv2
from octavia_tempest_plugin.tests import RBAC_tests
from octavia_tempest_plugin.tests import validators
from octavia_tempest_plugin.tests import waiters
@@ -182,27 +183,29 @@
cls.os_roles_lb_member.security_group_rules_client)
cls.lb_mem_servers_client = cls.os_roles_lb_member.servers_client
cls.lb_mem_subnet_client = cls.os_roles_lb_member.subnets_client
- cls.mem_lb_client = (
+ cls.mem_lb_client: lbv2.LoadbalancerClient = (
cls.os_roles_lb_member.load_balancer_v2.LoadbalancerClient())
- cls.mem_listener_client = (
+ cls.mem_listener_client: lbv2.ListenerClient = (
cls.os_roles_lb_member.load_balancer_v2.ListenerClient())
- cls.mem_pool_client = (
+ cls.mem_pool_client: lbv2.PoolClient = (
cls.os_roles_lb_member.load_balancer_v2.PoolClient())
- cls.mem_member_client = (
+ cls.mem_member_client: lbv2.MemberClient = (
cls.os_roles_lb_member.load_balancer_v2.MemberClient())
- cls.mem_healthmonitor_client = (
+ cls.mem_healthmonitor_client: lbv2.HealthMonitorClient = (
cls.os_roles_lb_member.load_balancer_v2.HealthMonitorClient())
- cls.mem_l7policy_client = (
+ cls.mem_l7policy_client: lbv2.L7PolicyClient = (
cls.os_roles_lb_member.load_balancer_v2.L7PolicyClient())
- cls.mem_l7rule_client = (
+ cls.mem_l7rule_client: lbv2.L7RuleClient = (
cls.os_roles_lb_member.load_balancer_v2.L7RuleClient())
- cls.lb_admin_amphora_client = lb_admin_prefix.AmphoraClient()
- cls.lb_admin_flavor_profile_client = (
+ cls.lb_admin_amphora_client: lbv2.AmphoraClient = (
+ lb_admin_prefix.AmphoraClient())
+ cls.lb_admin_flavor_profile_client: lbv2.FlavorProfileClient = (
lb_admin_prefix.FlavorProfileClient())
- cls.lb_admin_flavor_client = lb_admin_prefix.FlavorClient()
- cls.mem_flavor_client = (
+ cls.lb_admin_flavor_client: lbv2.FlavorClient = (
+ lb_admin_prefix.FlavorClient())
+ cls.mem_flavor_client: lbv2.FlavorClient = (
cls.os_roles_lb_member.load_balancer_v2.FlavorClient())
- cls.mem_provider_client = (
+ cls.mem_provider_client: lbv2.ProviderClient = (
cls.os_roles_lb_member.load_balancer_v2.ProviderClient())
cls.os_admin_servers_client = cls.os_admin.servers_client
cls.os_admin_routers_client = cls.os_admin.routers_client
@@ -587,6 +590,17 @@
lb_kwargs[const.VIP_NETWORK_ID] = cls.lb_member_vip_net[const.ID]
lb_kwargs[const.VIP_SUBNET_ID] = None
+ def _validate_listener_protocol(self, protocol, raise_if_unsupported=True):
+ if (protocol == const.SCTP and
+ not self.mem_listener_client.is_version_supported(
+ self.api_version, '2.23')):
+ if raise_if_unsupported:
+ raise self.skipException('SCTP listener protocol '
+ 'is only available on Octavia '
+ 'API version 2.23 or newer.')
+ return False
+ return True
+
class LoadBalancerBaseTestWithCompute(LoadBalancerBaseTest):
@classmethod
diff --git a/releasenotes/notes/add-tls-terminated-listener-api-tests-2c4de76fe04b0409.yaml b/releasenotes/notes/add-tls-terminated-listener-api-tests-2c4de76fe04b0409.yaml
new file mode 100644
index 0000000..83ea11e
--- /dev/null
+++ b/releasenotes/notes/add-tls-terminated-listener-api-tests-2c4de76fe04b0409.yaml
@@ -0,0 +1,4 @@
+---
+features:
+ - |
+ Added HTTPS-terminated Listener API tests. Note: the new tests require the noop_cert_manager when using noop.
diff --git a/requirements.txt b/requirements.txt
index 7b2f0a2..afa0565 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,7 +2,7 @@
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
-cryptography>=2.1 # BSD/Apache-2.0
+cryptography>=3.0 # BSD/Apache-2.0
python-dateutil>=2.5.3 # BSD
pbr!=2.1.0,>=2.0.0 # Apache-2.0
oslo.config>=5.2.0 # Apache-2.0
diff --git a/zuul.d/jobs.yaml b/zuul.d/jobs.yaml
index c7bcaa0..b431af6 100644
--- a/zuul.d/jobs.yaml
+++ b/zuul.d/jobs.yaml
@@ -514,6 +514,8 @@
enabled: True
audit_middleware_notifications:
driver: log
+ certificates:
+ cert_manager: noop_cert_manager
tempest_concurrency: 4
tempest_test_regex: ^octavia_tempest_plugin.tests.api.v2
tox_envlist: all
@@ -552,22 +554,16 @@
USE_PYTHON3: False
- job:
- name: octavia-v2-dsvm-noop-api-stable-yoga
+ name: octavia-v2-dsvm-noop-api-stable-2023-2
parent: octavia-v2-dsvm-noop-api
- nodeset: octavia-single-node-ubuntu-focal
- override-checkout: stable/yoga
+ nodeset: octavia-single-node-ubuntu-jammy
+ override-checkout: stable/2023.2
- job:
- name: octavia-v2-dsvm-noop-api-stable-xena
+ name: octavia-v2-dsvm-noop-api-stable-2023-1
parent: octavia-v2-dsvm-noop-api
- nodeset: octavia-single-node-ubuntu-focal
- override-checkout: stable/xena
-
-- job:
- name: octavia-v2-dsvm-noop-api-stable-wallaby
- parent: octavia-v2-dsvm-noop-api
- nodeset: octavia-single-node-ubuntu-focal
- override-checkout: stable/wallaby
+ nodeset: octavia-single-node-ubuntu-jammy
+ override-checkout: stable/2023.1
- job:
name: octavia-v2-dsvm-scenario-base
@@ -596,7 +592,9 @@
- job:
name: octavia-v2-dsvm-scenario
parent: octavia-v2-dsvm-scenario-base
- branches: ^(?!stable/(train|ussuri|victoria|wallaby|xena|yoga|zed))
+ branches:
+ regex: ^(stable/(train|ussuri|victoria|wallaby|xena|yoga|zed))
+ negate: true
nodeset: octavia-single-node-ubuntu-jammy
- job:
@@ -612,6 +610,18 @@
nodeset: octavia-single-node-ubuntu-bionic
- job:
+ name: octavia-v2-dsvm-scenario-traffic-ops
+ parent: octavia-v2-dsvm-scenario
+ vars:
+ tempest_test_regex: ^octavia_tempest_plugin.tests.scenario.v2.*traffic_ops
+
+- job:
+ name: octavia-v2-dsvm-scenario-non-traffic-ops
+ parent: octavia-v2-dsvm-scenario
+ vars:
+ tempest_test_regex: ^octavia_tempest_plugin.tests.scenario.v2.(?!.*traffic_ops)
+
+- job:
name: octavia-v2-dsvm-scenario-ipv6-only
parent: octavia-dsvm-live-base-ipv6-only
vars:
@@ -642,22 +652,40 @@
override-checkout: 2.30.0
- job:
- name: octavia-v2-dsvm-scenario-stable-yoga
+ name: octavia-v2-dsvm-scenario-stable-2023-2
parent: octavia-v2-dsvm-scenario
- nodeset: octavia-single-node-ubuntu-focal
- override-checkout: stable/yoga
+ nodeset: octavia-single-node-ubuntu-jammy
+ override-checkout: stable/2023.1
- job:
- name: octavia-v2-dsvm-scenario-stable-xena
- parent: octavia-v2-dsvm-scenario
- nodeset: octavia-single-node-ubuntu-focal
- override-checkout: stable/xena
+ name: octavia-v2-dsvm-scenario-traffic-ops-stable-2023-2
+ parent: octavia-v2-dsvm-scenario-stable-2023-2
+ vars:
+ tempest_test_regex: ^octavia_tempest_plugin.tests.scenario.v2.*traffic_ops
- job:
- name: octavia-v2-dsvm-scenario-stable-wallaby
+ name: octavia-v2-dsvm-scenario-non-traffic-ops-stable-2023-2
+ parent: octavia-v2-dsvm-scenario-stable-2023-2
+ vars:
+ tempest_test_regex: ^octavia_tempest_plugin.tests.scenario.v2.(?!.*traffic_ops)
+
+- job:
+ name: octavia-v2-dsvm-scenario-stable-2023-1
parent: octavia-v2-dsvm-scenario
- nodeset: octavia-single-node-ubuntu-focal
- override-checkout: stable/wallaby
+ nodeset: octavia-single-node-ubuntu-jammy
+ override-checkout: stable/2023.1
+
+- job:
+ name: octavia-v2-dsvm-scenario-traffic-ops-stable-2023-1
+ parent: octavia-v2-dsvm-scenario-stable-2023-1
+ vars:
+ tempest_test_regex: ^octavia_tempest_plugin.tests.scenario.v2.*traffic_ops
+
+- job:
+ name: octavia-v2-dsvm-scenario-non-traffic-ops-stable-2023-1
+ parent: octavia-v2-dsvm-scenario-stable-2023-1
+ vars:
+ tempest_test_regex: ^octavia_tempest_plugin.tests.scenario.v2.(?!.*traffic_ops)
# Legacy jobs for the transition to the act-stdby two node jobs
- job:
@@ -848,7 +876,9 @@
- job:
name: octavia-v2-dsvm-tls-barbican
parent: octavia-v2-dsvm-tls-barbican-base
- branches: ^(?!stable/(train|ussuri|victoria|wallaby|xena|yoga|zed))
+ branches:
+ regex: ^(stable/(train|ussuri|victoria|wallaby|xena|yoga|zed))
+ negate: true
nodeset: octavia-single-node-ubuntu-jammy
- job:
@@ -870,35 +900,18 @@
override-checkout: 1.6.0
- job:
- name: octavia-v2-dsvm-tls-barbican-stable-yoga
+ name: octavia-v2-dsvm-tls-barbican-stable-2023-2
parent: octavia-v2-dsvm-tls-barbican
- nodeset: octavia-single-node-ubuntu-focal
- override-checkout: stable/yoga
+ nodeset: octavia-single-node-ubuntu-jammy
+ override-checkout: stable/2023.1
- job:
- name: octavia-v2-dsvm-tls-barbican-stable-xena
+ name: octavia-v2-dsvm-tls-barbican-stable-2023-1
parent: octavia-v2-dsvm-tls-barbican
- nodeset: octavia-single-node-ubuntu-focal
- override-checkout: stable/xena
+ nodeset: octavia-single-node-ubuntu-jammy
+ override-checkout: stable/2023.1
-- job:
- name: octavia-v2-dsvm-tls-barbican-stable-wallaby
- parent: octavia-v2-dsvm-tls-barbican
- nodeset: octavia-single-node-ubuntu-focal
- override-checkout: stable/wallaby
-
-- job:
- name: octavia-v2-dsvm-tls-barbican-stable-victoria
- parent: octavia-v2-dsvm-tls-barbican
- nodeset: octavia-single-node-ubuntu-focal
- override-checkout: stable/victoria
-
-- job:
- name: octavia-v2-dsvm-tls-barbican-stable-ussuri
- parent: octavia-v2-dsvm-tls-barbican
- nodeset: octavia-single-node-ubuntu-bionic
- override-checkout: stable/ussuri
-
+# Still used by barbican
- job:
name: octavia-v2-dsvm-tls-barbican-stable-train
parent: octavia-v2-dsvm-tls-barbican
@@ -1068,7 +1081,9 @@
- job:
name: octavia-v2-act-stdby-dsvm-scenario
parent: octavia-v2-act-stdby-dsvm-scenario-base
- branches: ^(?!stable/(train|ussuri|victoria|wallaby|xena|yoga|zed))
+ branches:
+ regex: ^(stable/(train|ussuri|victoria|wallaby|xena|yoga|zed))
+ negate: true
nodeset: octavia-single-node-ubuntu-jammy
- job:
@@ -1084,22 +1099,16 @@
nodeset: octavia-single-node-ubuntu-bionic
- job:
- name: octavia-v2-act-stdby-dsvm-scenario-stable-yoga
+ name: octavia-v2-act-stdby-dsvm-scenario-stable-2023-2
parent: octavia-v2-act-stdby-dsvm-scenario
- nodeset: octavia-single-node-ubuntu-focal
- override-checkout: stable/yoga
+ nodeset: octavia-single-node-ubuntu-jammy
+ override-checkout: stable/2023.2
- job:
- name: octavia-v2-act-stdby-dsvm-scenario-stable-xena
+ name: octavia-v2-act-stdby-dsvm-scenario-stable-2023-1
parent: octavia-v2-act-stdby-dsvm-scenario
- nodeset: octavia-single-node-ubuntu-focal
- override-checkout: stable/xena
-
-- job:
- name: octavia-v2-act-stdby-dsvm-scenario-stable-wallaby
- parent: octavia-v2-act-stdby-dsvm-scenario
- nodeset: octavia-single-node-ubuntu-focal
- override-checkout: stable/wallaby
+ nodeset: octavia-single-node-ubuntu-jammy
+ override-checkout: stable/2023.1
######### Third party jobs ##########
diff --git a/zuul.d/projects.yaml b/zuul.d/projects.yaml
index 50b83cf..bddea7a 100644
--- a/zuul.d/projects.yaml
+++ b/zuul.d/projects.yaml
@@ -9,22 +9,20 @@
check:
jobs:
- octavia-v2-dsvm-noop-api
- - octavia-v2-dsvm-noop-api-stable-yoga
- - octavia-v2-dsvm-noop-api-stable-xena
- - octavia-v2-dsvm-noop-api-stable-wallaby
+ - octavia-v2-dsvm-noop-api-stable-2023-2
+ - octavia-v2-dsvm-noop-api-stable-2023-1
- octavia-v2-dsvm-noop-api-keystone-default-roles
- - octavia-v2-dsvm-scenario
- - octavia-v2-dsvm-scenario-stable-yoga
- - octavia-v2-dsvm-scenario-stable-xena
- - octavia-v2-dsvm-scenario-stable-wallaby
+ - octavia-v2-dsvm-scenario-traffic-ops
+ - octavia-v2-dsvm-scenario-non-traffic-ops
+ - octavia-v2-dsvm-scenario-traffic-ops-stable-2023-2
+ - octavia-v2-dsvm-scenario-non-traffic-ops-stable-2023-2
+ - octavia-v2-dsvm-scenario-traffic-ops-stable-2023-1
+ - octavia-v2-dsvm-scenario-non-traffic-ops-stable-2023-1
- octavia-v2-dsvm-tls-barbican
- - octavia-v2-dsvm-tls-barbican-stable-yoga
- - octavia-v2-dsvm-tls-barbican-stable-xena
- - octavia-v2-dsvm-tls-barbican-stable-wallaby
+ - octavia-v2-dsvm-tls-barbican-stable-2023-2
+ - octavia-v2-dsvm-tls-barbican-stable-2023-1
- octavia-v2-dsvm-scenario-ipv6-only:
voting: false
- - octavia-v2-dsvm-scenario-centos-8-stream:
- voting: false
- octavia-v2-dsvm-scenario-centos-9-stream-traffic-ops:
voting: false
- octavia-v2-dsvm-scenario-centos-9-stream-non-traffic-ops:
@@ -37,13 +35,9 @@
voting: false
- octavia-v2-act-stdby-dsvm-scenario:
voting: false
- - octavia-v2-act-stdby-dsvm-scenario-stable-yoga:
+ - octavia-v2-act-stdby-dsvm-scenario-stable-2023-2:
voting: false
- - octavia-v2-act-stdby-dsvm-scenario-stable-xena:
- voting: false
- - octavia-v2-act-stdby-dsvm-scenario-stable-wallaby:
- voting: false
- - octavia-v2-dsvm-spare-pool-stable-wallaby:
+ - octavia-v2-act-stdby-dsvm-scenario-stable-2023-1:
voting: false
- octavia-v2-dsvm-cinder-amphora:
voting: false
@@ -57,15 +51,15 @@
fail-fast: true
jobs:
- octavia-v2-dsvm-noop-api
- - octavia-v2-dsvm-noop-api-stable-yoga
- - octavia-v2-dsvm-noop-api-stable-xena
- - octavia-v2-dsvm-noop-api-stable-wallaby
+ - octavia-v2-dsvm-noop-api-stable-2023-2
+ - octavia-v2-dsvm-noop-api-stable-2023-1
- octavia-v2-dsvm-noop-api-keystone-default-roles
- - octavia-v2-dsvm-scenario
- - octavia-v2-dsvm-scenario-stable-yoga
- - octavia-v2-dsvm-scenario-stable-xena
- - octavia-v2-dsvm-scenario-stable-wallaby
+ - octavia-v2-dsvm-scenario-traffic-ops
+ - octavia-v2-dsvm-scenario-non-traffic-ops
+ - octavia-v2-dsvm-scenario-traffic-ops-stable-2023-2
+ - octavia-v2-dsvm-scenario-non-traffic-ops-stable-2023-2
+ - octavia-v2-dsvm-scenario-traffic-ops-stable-2023-1
+ - octavia-v2-dsvm-scenario-non-traffic-ops-stable-2023-1
- octavia-v2-dsvm-tls-barbican
- - octavia-v2-dsvm-tls-barbican-stable-yoga
- - octavia-v2-dsvm-tls-barbican-stable-xena
- - octavia-v2-dsvm-tls-barbican-stable-wallaby
+ - octavia-v2-dsvm-tls-barbican-stable-2023-2
+ - octavia-v2-dsvm-tls-barbican-stable-2023-1