Create api+scenario tests for l7policies
This patch implements l7policy tests for the Octavia
Tempest Plugin.
Depends-On: https://review.openstack.org/#/c/571997/
Change-Id: I58061cd69704da80e48c0963b6639ff8d6051c01
Story: 2001387
Task: 5976
diff --git a/octavia_tempest_plugin/clients.py b/octavia_tempest_plugin/clients.py
index 1bf1ebf..bb08c3b 100644
--- a/octavia_tempest_plugin/clients.py
+++ b/octavia_tempest_plugin/clients.py
@@ -18,6 +18,8 @@
from octavia_tempest_plugin.services.load_balancer.v2 import (
healthmonitor_client)
from octavia_tempest_plugin.services.load_balancer.v2 import (
+ l7policy_client)
+from octavia_tempest_plugin.services.load_balancer.v2 import (
listener_client)
from octavia_tempest_plugin.services.load_balancer.v2 import (
loadbalancer_client)
@@ -45,3 +47,5 @@
self.auth_provider, SERVICE_TYPE, CONF.identity.region)
self.healthmonitor_client = healthmonitor_client.HealthMonitorClient(
self.auth_provider, SERVICE_TYPE, CONF.identity.region)
+ self.l7policy_client = l7policy_client.L7PolicyClient(
+ self.auth_provider, SERVICE_TYPE, CONF.identity.region)
diff --git a/octavia_tempest_plugin/common/constants.py b/octavia_tempest_plugin/common/constants.py
index 0b51eb1..cddc04b 100644
--- a/octavia_tempest_plugin/common/constants.py
+++ b/octavia_tempest_plugin/common/constants.py
@@ -115,6 +115,16 @@
SESSION_PERSISTENCE_HTTP_COOKIE = 'HTTP_COOKIE'
SESSION_PERSISTENCE_APP_COOKIE = 'APP_COOKIE'
+# L7Policy options
+POSITION = 'position'
+REDIRECT_URL = 'redirect_url'
+REDIRECT_POOL_ID = 'redirect_pool_id'
+
+ACTION = 'action'
+REDIRECT_TO_POOL = 'REDIRECT_TO_POOL'
+REDIRECT_TO_URL = 'REDIRECT_TO_URL'
+REJECT = 'REJECT'
+
# RBAC options
ADVANCED = 'advanced'
OWNERADMIN = 'owner_or_admin'
@@ -152,3 +162,9 @@
TYPE, DELAY, TIMEOUT, MAX_RETRIES, MAX_RETRIES_DOWN, HTTP_METHOD,
URL_PATH, EXPECTED_CODES, CREATED_AT, UPDATED_AT
)
+
+SHOW_L7POLICY_RESPONSE_FIELDS = (
+ ID, NAME, DESCRIPTION, PROVISIONING_STATUS, OPERATING_STATUS,
+ ADMIN_STATE_UP, LISTENER_ID, POSITION, ACTION, REDIRECT_URL,
+ REDIRECT_POOL_ID, CREATED_AT, UPDATED_AT
+)
diff --git a/octavia_tempest_plugin/services/load_balancer/v2/l7policy_client.py b/octavia_tempest_plugin/services/load_balancer/v2/l7policy_client.py
new file mode 100644
index 0000000..674ec02
--- /dev/null
+++ b/octavia_tempest_plugin/services/load_balancer/v2/l7policy_client.py
@@ -0,0 +1,245 @@
+# Copyright 2018 GoDaddy
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from tempest import config
+
+from octavia_tempest_plugin.services.load_balancer.v2 import base_client
+
+CONF = config.CONF
+Unset = base_client.Unset
+
+
+class L7PolicyClient(base_client.BaseLBaaSClient):
+
+ root_tag = 'l7policy'
+ list_root_tag = 'l7policies'
+ resource_name = 'l7policy'
+
+ def create_l7policy(self, listener_id, action, name=Unset,
+ description=Unset, admin_state_up=Unset,
+ position=Unset, redirect_pool_id=Unset,
+ redirect_url=Unset, return_object_only=True):
+ """Create a l7policy.
+
+ :param listener_id: The ID of the listener for the l7policy.
+ :param action: The l7policy action.
+ :param name: Human-readable name of the resource.
+ :param description: A human-readable description for the resource.
+ :param admin_state_up: The administrative state of the resource, which
+ is up (true) or down (false).
+ :param position: The position of this policy on the listener.
+ :param redirect_pool_id: Requests matching this policy will be
+ redirected to the pool with this ID.
+ :param redirect_url: Requests matching this policy will be redirected
+ to this URL.
+ :param return_object_only: If True, the response returns the object
+ inside the root tag. False returns the full
+ response from the API.
+ :raises AssertionError: if the expected_code isn't a valid http success
+ response code
+ :raises BadRequest: If a 400 response code is received
+ :raises Conflict: If a 409 response code is received
+ :raises Forbidden: If a 403 response code is received
+ :raises Gone: If a 410 response code is received
+ :raises InvalidContentType: If a 415 response code is received
+ :raises InvalidHTTPResponseBody: The response body wasn't valid JSON
+ :raises InvalidHttpSuccessCode: if the read code isn't an expected
+ http success code
+ :raises NotFound: If a 404 response code is received
+ :raises NotImplemented: If a 501 response code is received
+ :raises OverLimit: If a 413 response code is received and over_limit is
+ not in the response body
+ :raises RateLimitExceeded: If a 413 response code is received and
+ over_limit is in the response body
+ :raises ServerFault: If a 500 response code is received
+ :raises Unauthorized: If a 401 response code is received
+ :raises UnexpectedContentType: If the content-type of the response
+ isn't an expect type
+ :raises UnexpectedResponseCode: If a response code above 400 is
+ received and it doesn't fall into any
+ of the handled checks
+ :raises UnprocessableEntity: If a 422 response code is received and
+ couldn't be parsed
+ :returns: A l7policy object.
+ """
+ kwargs = {arg: value for arg, value in locals().items()
+ if arg != 'self' and value is not Unset}
+ return self._create_object(**kwargs)
+
+ def show_l7policy(self, l7policy_id, query_params=None,
+ return_object_only=True):
+ """Get l7policy details.
+
+ :param l7policy_id: The l7policy ID to query.
+ :param query_params: The optional query parameters to append to the
+ request. Ex. fields=id&fields=name
+ :param return_object_only: If True, the response returns the object
+ inside the root tag. False returns the full
+ response from the API.
+ :raises AssertionError: if the expected_code isn't a valid http success
+ response code
+ :raises BadRequest: If a 400 response code is received
+ :raises Conflict: If a 409 response code is received
+ :raises Forbidden: If a 403 response code is received
+ :raises Gone: If a 410 response code is received
+ :raises InvalidContentType: If a 415 response code is received
+ :raises InvalidHTTPResponseBody: The response body wasn't valid JSON
+ :raises InvalidHttpSuccessCode: if the read code isn't an expected
+ http success code
+ :raises NotFound: If a 404 response code is received
+ :raises NotImplemented: If a 501 response code is received
+ :raises OverLimit: If a 413 response code is received and over_limit is
+ not in the response body
+ :raises RateLimitExceeded: If a 413 response code is received and
+ over_limit is in the response body
+ :raises ServerFault: If a 500 response code is received
+ :raises Unauthorized: If a 401 response code is received
+ :raises UnexpectedContentType: If the content-type of the response
+ isn't an expect type
+ :raises UnexpectedResponseCode: If a response code above 400 is
+ received and it doesn't fall into any
+ of the handled checks
+ :raises UnprocessableEntity: If a 422 response code is received and
+ couldn't be parsed
+ :returns: A l7policy object.
+ """
+ return self._show_object(obj_id=l7policy_id,
+ query_params=query_params,
+ return_object_only=return_object_only)
+
+ def list_l7policies(self, query_params=None, return_object_only=True):
+ """Get a list of l7policy objects.
+
+ :param query_params: The optional query parameters to append to the
+ request. Ex. fields=id&fields=name
+ :param return_object_only: If True, the response returns the object
+ inside the root tag. False returns the full
+ response from the API.
+ :raises AssertionError: if the expected_code isn't a valid http success
+ response code
+ :raises BadRequest: If a 400 response code is received
+ :raises Conflict: If a 409 response code is received
+ :raises Forbidden: If a 403 response code is received
+ :raises Gone: If a 410 response code is received
+ :raises InvalidContentType: If a 415 response code is received
+ :raises InvalidHTTPResponseBody: The response body wasn't valid JSON
+ :raises InvalidHttpSuccessCode: if the read code isn't an expected
+ http success code
+ :raises NotFound: If a 404 response code is received
+ :raises NotImplemented: If a 501 response code is received
+ :raises OverLimit: If a 413 response code is received and over_limit is
+ not in the response body
+ :raises RateLimitExceeded: If a 413 response code is received and
+ over_limit is in the response body
+ :raises ServerFault: If a 500 response code is received
+ :raises Unauthorized: If a 401 response code is received
+ :raises UnexpectedContentType: If the content-type of the response
+ isn't an expect type
+ :raises UnexpectedResponseCode: If a response code above 400 is
+ received and it doesn't fall into any
+ of the handled checks
+ :raises UnprocessableEntity: If a 422 response code is received and
+ couldn't be parsed
+ :returns: A list of l7policy objects.
+ """
+ return self._list_objects(query_params=query_params,
+ return_object_only=return_object_only)
+
+ def update_l7policy(self, l7policy_id, action=Unset, name=Unset,
+ description=Unset, admin_state_up=Unset,
+ position=Unset, redirect_pool_id=Unset,
+ redirect_url=Unset, return_object_only=True):
+ """Update a l7policy.
+
+ :param l7policy_id: The l7policy ID to update.
+ :param action: The l7policy action.
+ :param name: Human-readable name of the resource.
+ :param description: A human-readable description for the resource.
+ :param admin_state_up: The administrative state of the resource, which
+ is up (true) or down (false).
+ :param position: The position of this policy on the listener.
+ :param redirect_pool_id: Requests matching this policy will be
+ redirected to the pool with this ID.
+ :param redirect_url: Requests matching this policy will be redirected
+ to this URL.
+ :param return_object_only: If True, the response returns the object
+ inside the root tag. False returns the full
+ response from the API.
+ :raises AssertionError: if the expected_code isn't a valid http success
+ response code
+ :raises BadRequest: If a 400 response code is received
+ :raises Conflict: If a 409 response code is received
+ :raises Forbidden: If a 403 response code is received
+ :raises Gone: If a 410 response code is received
+ :raises InvalidContentType: If a 415 response code is received
+ :raises InvalidHTTPResponseBody: The response body wasn't valid JSON
+ :raises InvalidHttpSuccessCode: if the read code isn't an expected
+ http success code
+ :raises NotFound: If a 404 response code is received
+ :raises NotImplemented: If a 501 response code is received
+ :raises OverLimit: If a 413 response code is received and over_limit is
+ not in the response body
+ :raises RateLimitExceeded: If a 413 response code is received and
+ over_limit is in the response body
+ :raises ServerFault: If a 500 response code is received
+ :raises Unauthorized: If a 401 response code is received
+ :raises UnexpectedContentType: If the content-type of the response
+ isn't an expect type
+ :raises UnexpectedResponseCode: If a response code above 400 is
+ received and it doesn't fall into any
+ of the handled checks
+ :raises UnprocessableEntity: If a 422 response code is received and
+ couldn't be parsed
+ :returns: A l7policy object.
+ """
+ kwargs = {arg: value for arg, value in locals().items()
+ if arg != 'self' and value is not Unset}
+ kwargs['obj_id'] = kwargs.pop('l7policy_id')
+ return self._update_object(**kwargs)
+
+ def delete_l7policy(self, l7policy_id, ignore_errors=False):
+ """Delete a l7policy.
+
+ :param l7policy_id: The l7policy ID to delete.
+ :param ignore_errors: True if errors should be ignored.
+ :raises AssertionError: if the expected_code isn't a valid http success
+ response code
+ :raises BadRequest: If a 400 response code is received
+ :raises Conflict: If a 409 response code is received
+ :raises Forbidden: If a 403 response code is received
+ :raises Gone: If a 410 response code is received
+ :raises InvalidContentType: If a 415 response code is received
+ :raises InvalidHTTPResponseBody: The response body wasn't valid JSON
+ :raises InvalidHttpSuccessCode: if the read code isn't an expected
+ http success code
+ :raises NotFound: If a 404 response code is received
+ :raises NotImplemented: If a 501 response code is received
+ :raises OverLimit: If a 413 response code is received and over_limit is
+ not in the response body
+ :raises RateLimitExceeded: If a 413 response code is received and
+ over_limit is in the response body
+ :raises ServerFault: If a 500 response code is received
+ :raises Unauthorized: If a 401 response code is received
+ :raises UnexpectedContentType: If the content-type of the response
+ isn't an expect type
+ :raises UnexpectedResponseCode: If a response code above 400 is
+ received and it doesn't fall into any
+ of the handled checks
+ :raises UnprocessableEntity: If a 422 response code is received and
+ couldn't be parsed
+ :returns: None if ignore_errors is True, the response status code
+ if not.
+ """
+ return self._delete_obj(obj_id=l7policy_id,
+ ignore_errors=ignore_errors)
diff --git a/octavia_tempest_plugin/tests/api/v2/test_l7policy.py b/octavia_tempest_plugin/tests/api/v2/test_l7policy.py
new file mode 100644
index 0000000..90c6db4
--- /dev/null
+++ b/octavia_tempest_plugin/tests/api/v2/test_l7policy.py
@@ -0,0 +1,789 @@
+# Copyright 2018 GoDaddy
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import time
+from uuid import UUID
+
+from dateutil import parser
+from tempest import config
+from tempest.lib.common.utils import data_utils
+from tempest.lib import decorators
+from tempest.lib import exceptions
+
+from octavia_tempest_plugin.common import constants as const
+from octavia_tempest_plugin.tests import test_base
+from octavia_tempest_plugin.tests import waiters
+
+CONF = config.CONF
+
+
+class L7PolicyAPITest(test_base.LoadBalancerBaseTest):
+ """Test the l7policy object API."""
+
+ @classmethod
+ def resource_setup(cls):
+ """Setup resources needed by the tests."""
+ super(L7PolicyAPITest, cls).resource_setup()
+
+ lb_name = data_utils.rand_name("lb_member_lb1_l7policy")
+ lb_kwargs = {const.PROVIDER: CONF.load_balancer.provider,
+ const.NAME: lb_name}
+
+ cls._setup_lb_network_kwargs(lb_kwargs)
+
+ lb = cls.mem_lb_client.create_loadbalancer(**lb_kwargs)
+ cls.lb_id = lb[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_lb_client.cleanup_loadbalancer,
+ cls.lb_id)
+
+ waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
+ cls.lb_id, const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.lb_build_interval,
+ CONF.load_balancer.lb_build_timeout)
+
+ listener_name = data_utils.rand_name("lb_member_listener1_l7policy")
+ listener_kwargs = {
+ const.NAME: listener_name,
+ const.PROTOCOL: const.HTTP,
+ const.PROTOCOL_PORT: '80',
+ const.LOADBALANCER_ID: cls.lb_id,
+ }
+ listener = cls.mem_listener_client.create_listener(**listener_kwargs)
+ cls.listener_id = listener[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_listener_client.cleanup_listener,
+ cls.listener_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
+ waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
+ cls.lb_id, const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+
+ pool_name = data_utils.rand_name("lb_member_pool1_l7policy")
+ pool_kwargs = {
+ const.NAME: pool_name,
+ const.PROTOCOL: const.HTTP,
+ const.LB_ALGORITHM: const.LB_ALGORITHM_ROUND_ROBIN,
+ const.LOADBALANCER_ID: cls.lb_id,
+ }
+
+ pool = cls.mem_pool_client.create_pool(**pool_kwargs)
+ cls.pool_id = pool[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_pool_client.cleanup_pool,
+ cls.pool_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
+ waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
+ cls.lb_id, const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+
+ @decorators.idempotent_id('eba4ddc2-758b-4dd5-bd28-c1b41d6575ca')
+ def test_l7policy_create_redirect_pool(self):
+ self._test_l7policy_create(pool_id=self.pool_id)
+
+ @decorators.idempotent_id('2b529135-71bc-46f7-912f-74d238d67190')
+ def test_l7policy_create_redirect_url(self):
+ self._test_l7policy_create(url='http://localhost')
+
+ @decorators.idempotent_id('aa9b0d50-0d16-4365-85eb-846b17eb8398')
+ def test_l7policy_create_reject(self):
+ self._test_l7policy_create()
+
+ def _test_l7policy_create(self, url=None, pool_id=None):
+ """Tests l7policy create and basic show APIs.
+
+ * Tests that users without the loadbalancer member role cannot
+ create l7policies.
+ * Create a fully populated l7policy.
+ * Show l7policy details.
+ * Validate the show reflects the requested values.
+ """
+ l7policy_name = data_utils.rand_name("lb_member_l7policy1-create")
+ l7policy_description = data_utils.arbitrary_string(size=255)
+ l7policy_kwargs = {
+ const.LISTENER_ID: self.listener_id,
+ const.NAME: l7policy_name,
+ const.DESCRIPTION: l7policy_description,
+ const.ADMIN_STATE_UP: True,
+ const.POSITION: 1,
+ }
+ if url:
+ l7policy_kwargs[const.ACTION] = const.REDIRECT_TO_URL
+ l7policy_kwargs[const.REDIRECT_URL] = url
+ elif pool_id:
+ l7policy_kwargs[const.ACTION] = const.REDIRECT_TO_POOL
+ l7policy_kwargs[const.REDIRECT_POOL_ID] = pool_id
+ else:
+ l7policy_kwargs[const.ACTION] = const.REJECT
+
+ # Test that a user without the load balancer role cannot
+ # create a l7policy
+ if CONF.load_balancer.RBAC_test_type == const.ADVANCED:
+ self.assertRaises(
+ exceptions.Forbidden,
+ self.os_primary.l7policy_client.create_l7policy,
+ **l7policy_kwargs)
+
+ l7policy = self.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
+ self.addClassResourceCleanup(
+ self.mem_l7policy_client.cleanup_l7policy,
+ l7policy[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+ l7policy = waiters.wait_for_status(
+ self.mem_l7policy_client.show_l7policy,
+ l7policy[const.ID], const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+ if not CONF.load_balancer.test_with_noop:
+ l7policy = waiters.wait_for_status(
+ self.mem_l7policy_client.show_l7policy,
+ l7policy[const.ID], const.OPERATING_STATUS,
+ const.ONLINE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+
+ self.assertEqual(l7policy_name, l7policy[const.NAME])
+ self.assertEqual(l7policy_description, l7policy[const.DESCRIPTION])
+ self.assertTrue(l7policy[const.ADMIN_STATE_UP])
+ parser.parse(l7policy[const.CREATED_AT])
+ parser.parse(l7policy[const.UPDATED_AT])
+ UUID(l7policy[const.ID])
+ # Operating status for a l7policy will be ONLINE if it is enabled:
+ if l7policy[const.ADMIN_STATE_UP]:
+ self.assertEqual(const.ONLINE, l7policy[const.OPERATING_STATUS])
+ else:
+ self.assertEqual(const.OFFLINE, l7policy[const.OPERATING_STATUS])
+ self.assertEqual(self.listener_id, l7policy[const.LISTENER_ID])
+ self.assertEqual(1, l7policy[const.POSITION])
+ if url:
+ self.assertEqual(const.REDIRECT_TO_URL, l7policy[const.ACTION])
+ self.assertEqual(url, l7policy[const.REDIRECT_URL])
+ self.assertIsNone(l7policy.pop(const.REDIRECT_POOL_ID, None))
+ elif pool_id:
+ self.assertEqual(const.REDIRECT_TO_POOL, l7policy[const.ACTION])
+ self.assertEqual(pool_id, l7policy[const.REDIRECT_POOL_ID])
+ self.assertIsNone(l7policy.pop(const.REDIRECT_URL, None))
+ else:
+ self.assertEqual(const.REJECT, l7policy[const.ACTION])
+ self.assertIsNone(l7policy.pop(const.REDIRECT_URL, None))
+ self.assertIsNone(l7policy.pop(const.REDIRECT_POOL_ID, None))
+
+ @decorators.idempotent_id('42fa14ba-caf1-465e-ab36-27e7501f95ef')
+ def test_l7policy_list(self):
+ """Tests l7policy list API and field filtering.
+
+ * Create a clean listener.
+ * Create three l7policies.
+ * Validates that other accounts cannot list the l7policies.
+ * List the l7policies using the default sort order.
+ * List the l7policies using descending sort order.
+ * List the l7policies using ascending sort order.
+ * List the l7policies returning one field at a time.
+ * List the l7policies returning two fields.
+ * List the l7policies filtering to one of the three.
+ * List the l7policies filtered, one field, and sorted.
+ """
+ listener_name = data_utils.rand_name(
+ "lb_member_listener2_l7policy-list")
+ listener_kwargs = {
+ const.NAME: listener_name,
+ const.PROTOCOL: const.HTTP,
+ const.PROTOCOL_PORT: '81',
+ const.LOADBALANCER_ID: self.lb_id,
+ }
+ listener = self.mem_listener_client.create_listener(**listener_kwargs)
+ listener_id = listener[const.ID]
+ self.addCleanup(
+ self.mem_listener_client.cleanup_listener,
+ listener_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
+ waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
+ self.lb_id, const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+
+ l7policy1_name = data_utils.rand_name("lb_member_l7policy2-list")
+ l7policy1_desc = 'B'
+ l7policy1_kwargs = {
+ const.LISTENER_ID: listener_id,
+ const.NAME: l7policy1_name,
+ const.DESCRIPTION: l7policy1_desc,
+ const.ADMIN_STATE_UP: True,
+ const.POSITION: 1,
+ const.ACTION: const.REJECT
+ }
+ l7policy1 = self.mem_l7policy_client.create_l7policy(
+ **l7policy1_kwargs)
+ self.addCleanup(
+ self.mem_l7policy_client.cleanup_l7policy,
+ l7policy1[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+ l7policy1 = waiters.wait_for_status(
+ self.mem_l7policy_client.show_l7policy, l7policy1[const.ID],
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+ waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
+ self.lb_id,
+ const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+ # Time resolution for created_at is only to the second, and we need to
+ # ensure that each object has a distinct creation time. Delaying one
+ # second is both a simple and a reliable way to accomplish this.
+ time.sleep(1)
+
+ l7policy2_name = data_utils.rand_name("lb_member_l7policy1-list")
+ l7policy2_desc = 'A'
+ l7policy2_kwargs = {
+ const.LISTENER_ID: listener_id,
+ const.NAME: l7policy2_name,
+ const.DESCRIPTION: l7policy2_desc,
+ const.ADMIN_STATE_UP: True,
+ const.POSITION: 1,
+ const.ACTION: const.REDIRECT_TO_POOL,
+ const.REDIRECT_POOL_ID: self.pool_id
+ }
+ l7policy2 = self.mem_l7policy_client.create_l7policy(
+ **l7policy2_kwargs)
+ self.addCleanup(
+ self.mem_l7policy_client.cleanup_l7policy,
+ l7policy2[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+ l7policy2 = waiters.wait_for_status(
+ self.mem_l7policy_client.show_l7policy, l7policy2[const.ID],
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+ waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
+ self.lb_id,
+ const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+ # Time resolution for created_at is only to the second, and we need to
+ # ensure that each object has a distinct creation time. Delaying one
+ # second is both a simple and a reliable way to accomplish this.
+ time.sleep(1)
+
+ l7policy3_name = data_utils.rand_name("lb_member_l7policy3-list")
+ l7policy3_desc = 'C'
+ l7_redirect_url = 'http://localhost'
+ l7policy3_kwargs = {
+ const.LISTENER_ID: listener_id,
+ const.NAME: l7policy3_name,
+ const.DESCRIPTION: l7policy3_desc,
+ const.ADMIN_STATE_UP: False,
+ const.POSITION: 1,
+ const.ACTION: const.REDIRECT_TO_URL,
+ const.REDIRECT_URL: l7_redirect_url
+ }
+ l7policy3 = self.mem_l7policy_client.create_l7policy(
+ **l7policy3_kwargs)
+ self.addCleanup(
+ self.mem_l7policy_client.cleanup_l7policy,
+ l7policy3[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+ l7policy3 = waiters.wait_for_status(
+ self.mem_l7policy_client.show_l7policy, l7policy3[const.ID],
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+ waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
+ self.lb_id,
+ const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+
+ # Test that a different user cannot list l7policies
+ if not CONF.load_balancer.RBAC_test_type == const.NONE:
+ member2_client = self.os_roles_lb_member2.l7policy_client
+ primary = member2_client.list_l7policies(
+ query_params='listener_id={listener_id}'.format(
+ listener_id=listener_id))
+ self.assertEqual(0, len(primary))
+
+ # Test that a user without the lb member role cannot list load
+ # balancers
+ if CONF.load_balancer.RBAC_test_type == const.ADVANCED:
+ self.assertRaises(
+ exceptions.Forbidden,
+ self.os_primary.l7policy_client.list_l7policies)
+
+ # Check the default sort order, created_at
+ l7policies = self.mem_l7policy_client.list_l7policies(
+ query_params='listener_id={listener_id}'.format(
+ listener_id=listener_id))
+ self.assertEqual(l7policy1[const.DESCRIPTION],
+ l7policies[0][const.DESCRIPTION])
+ self.assertEqual(l7policy2[const.DESCRIPTION],
+ l7policies[1][const.DESCRIPTION])
+ self.assertEqual(l7policy3[const.DESCRIPTION],
+ l7policies[2][const.DESCRIPTION])
+
+ # Test sort descending by description
+ l7policies = self.mem_l7policy_client.list_l7policies(
+ query_params='listener_id={listener_id}&{sort}={descr}:{desc}'
+ .format(listener_id=listener_id, sort=const.SORT,
+ descr=const.DESCRIPTION, desc=const.DESC))
+ self.assertEqual(l7policy1[const.DESCRIPTION],
+ l7policies[1][const.DESCRIPTION])
+ self.assertEqual(l7policy2[const.DESCRIPTION],
+ l7policies[2][const.DESCRIPTION])
+ self.assertEqual(l7policy3[const.DESCRIPTION],
+ l7policies[0][const.DESCRIPTION])
+
+ # Test sort ascending by description
+ l7policies = self.mem_l7policy_client.list_l7policies(
+ query_params='listener_id={listener_id}&{sort}={descr}:{asc}'
+ .format(listener_id=listener_id, sort=const.SORT,
+ descr=const.DESCRIPTION, asc=const.ASC))
+ self.assertEqual(l7policy1[const.DESCRIPTION],
+ l7policies[1][const.DESCRIPTION])
+ self.assertEqual(l7policy2[const.DESCRIPTION],
+ l7policies[0][const.DESCRIPTION])
+ self.assertEqual(l7policy3[const.DESCRIPTION],
+ l7policies[2][const.DESCRIPTION])
+
+ # Use this opportunity to verify the position insertion is working
+ l7policies = self.mem_l7policy_client.list_l7policies(
+ query_params='listener_id={listener_id}&{sort}={position}:{asc}'
+ .format(listener_id=listener_id, sort=const.SORT,
+ position=const.POSITION, asc=const.ASC))
+ self.assertEqual(1, l7policies[0][const.POSITION])
+ self.assertEqual(2, l7policies[1][const.POSITION])
+ self.assertEqual(3, l7policies[2][const.POSITION])
+ self.assertEqual(l7policy1[const.NAME],
+ l7policies[2][const.NAME])
+ self.assertEqual(l7policy2[const.NAME],
+ l7policies[1][const.NAME])
+ self.assertEqual(l7policy3[const.NAME],
+ l7policies[0][const.NAME])
+
+ # Test fields
+ for field in const.SHOW_L7POLICY_RESPONSE_FIELDS:
+ # Test position / updated fields separately, because they're odd
+ if field not in (const.POSITION, const.UPDATED_AT):
+ l7policies = self.mem_l7policy_client.list_l7policies(
+ query_params='listener_id={listener_id}&{fields}={field}'
+ .format(listener_id=listener_id,
+ fields=const.FIELDS, field=field))
+ self.assertEqual(1, len(l7policies[0]))
+ self.assertEqual(l7policy1[field], l7policies[0][field])
+ self.assertEqual(l7policy2[field], l7policies[1][field])
+ self.assertEqual(l7policy3[field], l7policies[2][field])
+ elif field == const.POSITION:
+ l7policies = self.mem_l7policy_client.list_l7policies(
+ query_params='listener_id={listener_id}&{fields}={field}'
+ .format(listener_id=listener_id,
+ fields=const.FIELDS, field=field))
+ self.assertEqual(1, len(l7policies[0]))
+ # Positions won't match the request due to insertion reordering
+ self.assertEqual(3, l7policies[0][field])
+ self.assertEqual(2, l7policies[1][field])
+ self.assertEqual(1, l7policies[2][field])
+ elif field == const.UPDATED_AT:
+ l7policies = self.mem_l7policy_client.list_l7policies(
+ query_params='listener_id={listener_id}&{fields}={field}'
+ .format(listener_id=listener_id,
+ fields=const.FIELDS, field=field))
+ # Just test that we get it -- the actual value is unpredictable
+ self.assertEqual(1, len(l7policies[0]))
+
+ # Test multiple fields at the same time
+ l7policies = self.mem_l7policy_client.list_l7policies(
+ query_params='listener_id={listener_id}&{fields}={admin}&'
+ '{fields}={created}'.format(
+ listener_id=listener_id, fields=const.FIELDS,
+ admin=const.ADMIN_STATE_UP,
+ created=const.CREATED_AT))
+ self.assertEqual(2, len(l7policies[0]))
+ self.assertTrue(l7policies[0][const.ADMIN_STATE_UP])
+ parser.parse(l7policies[0][const.CREATED_AT])
+ self.assertTrue(l7policies[1][const.ADMIN_STATE_UP])
+ parser.parse(l7policies[1][const.CREATED_AT])
+ self.assertFalse(l7policies[2][const.ADMIN_STATE_UP])
+ parser.parse(l7policies[2][const.CREATED_AT])
+
+ # Test filtering
+ l7policies = self.mem_l7policy_client.list_l7policies(
+ query_params='listener_id={listener_id}&'
+ '{desc}={l7policy_desc}'.format(
+ listener_id=listener_id, desc=const.DESCRIPTION,
+ l7policy_desc=l7policy2[const.DESCRIPTION]))
+ self.assertEqual(1, len(l7policies))
+ self.assertEqual(l7policy2[const.DESCRIPTION],
+ l7policies[0][const.DESCRIPTION])
+
+ # Test combined params
+ l7policies = self.mem_l7policy_client.list_l7policies(
+ query_params='listener_id={listener_id}&{admin}={true}&'
+ '{fields}={descr}&{fields}={id}&'
+ '{sort}={descr}:{desc}'.format(
+ listener_id=listener_id,
+ admin=const.ADMIN_STATE_UP,
+ true=const.ADMIN_STATE_UP_TRUE,
+ fields=const.FIELDS, descr=const.DESCRIPTION,
+ id=const.ID, sort=const.SORT, desc=const.DESC))
+ # Should get two l7policies
+ self.assertEqual(2, len(l7policies))
+ # l7policies should have two fields
+ self.assertEqual(2, len(l7policies[0]))
+ # Should be in descending order
+ self.assertEqual(l7policy2[const.DESCRIPTION],
+ l7policies[1][const.DESCRIPTION])
+ self.assertEqual(l7policy1[const.DESCRIPTION],
+ l7policies[0][const.DESCRIPTION])
+
+ @decorators.idempotent_id('baaa8104-a037-4976-b908-82a0b3e08129')
+ def test_l7policy_show(self):
+ """Tests l7policy show API.
+
+ * Create a fully populated l7policy.
+ * Show l7policy details.
+ * Validate the show reflects the requested values.
+ * Validates that other accounts cannot see the l7policy.
+ """
+ listener_name = data_utils.rand_name(
+ "lb_member_listener4_l7policy-show")
+ listener_kwargs = {
+ const.NAME: listener_name,
+ const.PROTOCOL: const.HTTP,
+ const.PROTOCOL_PORT: '81',
+ const.LOADBALANCER_ID: self.lb_id,
+ }
+ listener = self.mem_listener_client.create_listener(**listener_kwargs)
+ listener_id = listener[const.ID]
+ self.addCleanup(
+ self.mem_listener_client.cleanup_listener,
+ listener_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
+ waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
+ self.lb_id, const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+ l7policy_name = data_utils.rand_name("lb_member_l7policy1-show")
+ l7policy_description = data_utils.arbitrary_string(size=255)
+ l7policy_kwargs = {
+ const.LISTENER_ID: listener_id,
+ const.NAME: l7policy_name,
+ const.DESCRIPTION: l7policy_description,
+ const.ADMIN_STATE_UP: True,
+ const.POSITION: 1,
+ const.ACTION: const.REJECT,
+ }
+
+ l7policy = self.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
+ self.addClassResourceCleanup(
+ self.mem_l7policy_client.cleanup_l7policy,
+ l7policy[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+ l7policy = waiters.wait_for_status(
+ self.mem_l7policy_client.show_l7policy,
+ l7policy[const.ID], const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+ if not CONF.load_balancer.test_with_noop:
+ l7policy = waiters.wait_for_status(
+ self.mem_l7policy_client.show_l7policy,
+ l7policy[const.ID], const.OPERATING_STATUS,
+ const.ONLINE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+
+ self.assertEqual(l7policy_name, l7policy[const.NAME])
+ self.assertEqual(l7policy_description, l7policy[const.DESCRIPTION])
+ self.assertTrue(l7policy[const.ADMIN_STATE_UP])
+ parser.parse(l7policy[const.CREATED_AT])
+ parser.parse(l7policy[const.UPDATED_AT])
+ UUID(l7policy[const.ID])
+ # Operating status for a l7policy will be ONLINE if it is enabled:
+ if l7policy[const.ADMIN_STATE_UP]:
+ self.assertEqual(const.ONLINE, l7policy[const.OPERATING_STATUS])
+ else:
+ self.assertEqual(const.OFFLINE, l7policy[const.OPERATING_STATUS])
+ self.assertEqual(listener_id, l7policy[const.LISTENER_ID])
+ self.assertEqual(1, l7policy[const.POSITION])
+ self.assertEqual(const.REJECT, l7policy[const.ACTION])
+ self.assertIsNone(l7policy.pop(const.REDIRECT_URL, None))
+ self.assertIsNone(l7policy.pop(const.REDIRECT_POOL_ID, None))
+
+ # Test that a user with lb_admin role can see the l7policy
+ if CONF.load_balancer.RBAC_test_type == const.ADVANCED:
+ l7policy_client = self.os_roles_lb_admin.l7policy_client
+ l7policy_adm = l7policy_client.show_l7policy(l7policy[const.ID])
+ self.assertEqual(l7policy_name, l7policy_adm[const.NAME])
+
+ # Test that a user with cloud admin role can see the l7policy
+ if not CONF.load_balancer.RBAC_test_type == const.NONE:
+ adm = self.os_admin.l7policy_client.show_l7policy(
+ l7policy[const.ID])
+ self.assertEqual(l7policy_name, adm[const.NAME])
+
+ # Test that a different user, with load balancer member role, cannot
+ # see this l7policy
+ if not CONF.load_balancer.RBAC_test_type == const.NONE:
+ member2_client = self.os_roles_lb_member2.l7policy_client
+ self.assertRaises(exceptions.Forbidden,
+ member2_client.show_l7policy,
+ l7policy[const.ID])
+
+ # Test that a user, without the load balancer member role, cannot
+ # show l7policies
+ if CONF.load_balancer.RBAC_test_type == const.ADVANCED:
+ self.assertRaises(
+ exceptions.Forbidden,
+ self.os_primary.l7policy_client.show_l7policy,
+ l7policy[const.ID])
+
+ @decorators.idempotent_id('08f73b22-550b-4e5a-b3d6-2ec03251ca13')
+ def test_l7policy_update(self):
+ """Tests l7policy update and show APIs.
+
+ * Create a clean listener.
+ * Create a fully populated l7policy.
+ * Show l7policy details.
+ * Validate the show reflects the initial values.
+ * Validates that other accounts cannot update the l7policy.
+ * Update the l7policy details.
+ * Show l7policy details.
+ * Validate the show reflects the updated values.
+ """
+ listener_name = data_utils.rand_name(
+ "lb_member_listener3_l7policy-update")
+ listener_kwargs = {
+ const.NAME: listener_name,
+ const.PROTOCOL: const.HTTP,
+ const.PROTOCOL_PORT: '81',
+ const.LOADBALANCER_ID: self.lb_id,
+ }
+ listener = self.mem_listener_client.create_listener(**listener_kwargs)
+ listener_id = listener[const.ID]
+ self.addCleanup(
+ self.mem_listener_client.cleanup_listener,
+ listener_id,
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
+ waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
+ self.lb_id, const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+
+ l7policy_name = data_utils.rand_name("lb_member_l7policy1-update")
+ l7policy_description = data_utils.arbitrary_string(size=255)
+ l7_redirect_url = 'http://localhost'
+ l7policy_kwargs = {
+ const.LISTENER_ID: listener_id,
+ const.NAME: l7policy_name,
+ const.DESCRIPTION: l7policy_description,
+ const.ADMIN_STATE_UP: False,
+ const.POSITION: 1,
+ const.ACTION: const.REDIRECT_TO_URL,
+ const.REDIRECT_URL: l7_redirect_url,
+ }
+
+ l7policy = self.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
+ self.addClassResourceCleanup(
+ self.mem_l7policy_client.cleanup_l7policy,
+ l7policy[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+ l7policy = waiters.wait_for_status(
+ self.mem_l7policy_client.show_l7policy,
+ l7policy[const.ID], const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+
+ self.assertEqual(l7policy_name, l7policy[const.NAME])
+ self.assertEqual(l7policy_description, l7policy[const.DESCRIPTION])
+ self.assertFalse(l7policy[const.ADMIN_STATE_UP])
+ parser.parse(l7policy[const.CREATED_AT])
+ parser.parse(l7policy[const.UPDATED_AT])
+ UUID(l7policy[const.ID])
+ # Operating status for a l7policy will be ONLINE if it is enabled:
+ if l7policy[const.ADMIN_STATE_UP]:
+ self.assertEqual(const.ONLINE, l7policy[const.OPERATING_STATUS])
+ else:
+ self.assertEqual(const.OFFLINE, l7policy[const.OPERATING_STATUS])
+ self.assertEqual(listener_id, l7policy[const.LISTENER_ID])
+ self.assertEqual(1, l7policy[const.POSITION])
+ self.assertEqual(const.REDIRECT_TO_URL, l7policy[const.ACTION])
+ self.assertEqual(l7_redirect_url, l7policy[const.REDIRECT_URL])
+ self.assertIsNone(l7policy.pop(const.REDIRECT_POOL_ID, None))
+
+ # Test that a user, without the load balancer member role, cannot
+ # use this command
+ if CONF.load_balancer.RBAC_test_type == const.ADVANCED:
+ self.assertRaises(
+ exceptions.Forbidden,
+ self.os_primary.l7policy_client.update_l7policy,
+ l7policy[const.ID], admin_state_up=True)
+
+ # Assert we didn't go into PENDING_*
+ l7policy_check = self.mem_l7policy_client.show_l7policy(
+ l7policy[const.ID])
+ self.assertEqual(const.ACTIVE,
+ l7policy_check[const.PROVISIONING_STATUS])
+ self.assertFalse(l7policy_check[const.ADMIN_STATE_UP])
+
+ # Test that a user, without the load balancer member role, cannot
+ # update this l7policy
+ if not CONF.load_balancer.RBAC_test_type == const.NONE:
+ member2_client = self.os_roles_lb_member2.l7policy_client
+ self.assertRaises(exceptions.Forbidden,
+ member2_client.update_l7policy,
+ l7policy[const.ID], admin_state_up=True)
+
+ # Assert we didn't go into PENDING_*
+ l7policy_check = self.mem_l7policy_client.show_l7policy(
+ l7policy[const.ID])
+ self.assertEqual(const.ACTIVE,
+ l7policy_check[const.PROVISIONING_STATUS])
+ self.assertFalse(l7policy_check[const.ADMIN_STATE_UP])
+
+ new_name = data_utils.rand_name("lb_member_l7policy1-UPDATED")
+ new_description = data_utils.arbitrary_string(size=255,
+ base_text='new')
+ l7policy_update_kwargs = {
+ const.NAME: new_name,
+ const.DESCRIPTION: new_description,
+ const.ADMIN_STATE_UP: True,
+ const.POSITION: 2,
+ const.ACTION: const.REDIRECT_TO_POOL,
+ const.REDIRECT_POOL_ID: self.pool_id,
+ }
+ l7policy = self.mem_l7policy_client.update_l7policy(
+ l7policy[const.ID], **l7policy_update_kwargs)
+
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+ l7policy = waiters.wait_for_status(
+ self.mem_l7policy_client.show_l7policy,
+ l7policy[const.ID], const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+ if not CONF.load_balancer.test_with_noop:
+ l7policy = waiters.wait_for_status(
+ self.mem_l7policy_client.show_l7policy,
+ l7policy[const.ID], const.OPERATING_STATUS,
+ const.ONLINE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+
+ self.assertEqual(new_name, l7policy[const.NAME])
+ self.assertEqual(new_description, l7policy[const.DESCRIPTION])
+ self.assertTrue(l7policy[const.ADMIN_STATE_UP])
+ parser.parse(l7policy[const.CREATED_AT])
+ parser.parse(l7policy[const.UPDATED_AT])
+ UUID(l7policy[const.ID])
+ # Operating status for a l7policy will be ONLINE if it is enabled:
+ if l7policy[const.ADMIN_STATE_UP]:
+ self.assertEqual(const.ONLINE, l7policy[const.OPERATING_STATUS])
+ else:
+ self.assertEqual(const.OFFLINE, l7policy[const.OPERATING_STATUS])
+ self.assertEqual(listener_id, l7policy[const.LISTENER_ID])
+ self.assertEqual(1, l7policy[const.POSITION])
+ self.assertEqual(const.REDIRECT_TO_POOL, l7policy[const.ACTION])
+ self.assertEqual(self.pool_id, l7policy[const.REDIRECT_POOL_ID])
+ self.assertIsNone(l7policy.pop(const.REDIRECT_URL, None))
+
+ @decorators.idempotent_id('7925eb4b-94b6-4c28-98c2-fd0b4f0976cc')
+ def test_l7policy_delete(self):
+ """Tests l7policy create and delete APIs.
+
+ * Creates a l7policy.
+ * Validates that other accounts cannot delete the l7policy
+ * Deletes the l7policy.
+ * Validates the l7policy is in the DELETED state.
+ """
+ l7policy_name = data_utils.rand_name("lb_member_l7policy1-delete")
+ l7policy_kwargs = {
+ const.LISTENER_ID: self.listener_id,
+ const.NAME: l7policy_name,
+ const.ACTION: const.REJECT,
+ }
+ l7policy = self.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
+ self.addClassResourceCleanup(
+ self.mem_l7policy_client.cleanup_l7policy,
+ l7policy[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer,
+ self.lb_id, const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+
+ # Test that a user without the load balancer role cannot
+ # delete this l7policy
+ if CONF.load_balancer.RBAC_test_type == const.ADVANCED:
+ self.assertRaises(
+ exceptions.Forbidden,
+ self.os_primary.l7policy_client.delete_l7policy,
+ l7policy[const.ID])
+
+ # Test that a different user, with the load balancer member role
+ # cannot delete this l7policy
+ if not CONF.load_balancer.RBAC_test_type == const.NONE:
+ member2_client = self.os_roles_lb_member2.l7policy_client
+ self.assertRaises(exceptions.Forbidden,
+ member2_client.delete_l7policy,
+ l7policy[const.ID])
+
+ self.mem_l7policy_client.delete_l7policy(l7policy[const.ID])
+
+ waiters.wait_for_deleted_status_or_not_found(
+ self.mem_l7policy_client.show_l7policy, l7policy[const.ID],
+ const.PROVISIONING_STATUS,
+ CONF.load_balancer.check_interval,
+ CONF.load_balancer.check_timeout)
diff --git a/octavia_tempest_plugin/tests/api/v2/test_listener.py b/octavia_tempest_plugin/tests/api/v2/test_listener.py
index baae6f3..de412c6 100644
--- a/octavia_tempest_plugin/tests/api/v2/test_listener.py
+++ b/octavia_tempest_plugin/tests/api/v2/test_listener.py
@@ -84,8 +84,12 @@
const.X_FORWARDED_FOR: "true",
const.X_FORWARDED_PORT: "true"
},
- # TODO(rm_work): need to finish the rest of this stuff
- # const.DEFAULT_POOL_ID: '',
+ # Don't test with a default pool -- we'll do that in the scenario,
+ # but this will allow us to test that the field isn't mandatory,
+ # as well as not conflate pool failures with listener test failures
+ # const.DEFAULT_POOL_ID: self.pool_id,
+
+ # TODO(rm_work): need to add TLS related stuff
# const.DEFAULT_TLS_CONTAINER_REF: '',
# const.SNI_CONTAINER_REFS: [],
}
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_l7policy.py b/octavia_tempest_plugin/tests/scenario/v2/test_l7policy.py
new file mode 100644
index 0000000..98d3bc6
--- /dev/null
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_l7policy.py
@@ -0,0 +1,202 @@
+# Copyright 2018 GoDaddy
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from uuid import UUID
+
+from dateutil import parser
+from tempest import config
+from tempest.lib.common.utils import data_utils
+from tempest.lib import decorators
+
+from octavia_tempest_plugin.common import constants as const
+from octavia_tempest_plugin.tests import test_base
+from octavia_tempest_plugin.tests import waiters
+
+CONF = config.CONF
+
+
+class L7PolicyScenarioTest(test_base.LoadBalancerBaseTest):
+
+ @classmethod
+ def resource_setup(cls):
+ """Setup resources needed by the tests."""
+ super(L7PolicyScenarioTest, cls).resource_setup()
+
+ lb_name = data_utils.rand_name("lb_member_lb1_l7policy")
+ lb_kwargs = {const.PROVIDER: CONF.load_balancer.provider,
+ const.NAME: lb_name}
+
+ cls._setup_lb_network_kwargs(lb_kwargs)
+
+ lb = cls.mem_lb_client.create_loadbalancer(**lb_kwargs)
+ cls.lb_id = lb[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_lb_client.cleanup_loadbalancer,
+ cls.lb_id)
+
+ waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
+ cls.lb_id, const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.lb_build_interval,
+ CONF.load_balancer.lb_build_timeout)
+
+ listener_name = data_utils.rand_name("lb_member_listener1_l7policy")
+ listener_kwargs = {
+ const.NAME: listener_name,
+ const.PROTOCOL: const.HTTP,
+ const.PROTOCOL_PORT: '80',
+ const.LOADBALANCER_ID: cls.lb_id,
+ }
+ listener = cls.mem_listener_client.create_listener(**listener_kwargs)
+ cls.listener_id = listener[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_listener_client.cleanup_listener,
+ cls.listener_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
+ waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
+ cls.lb_id, const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+
+ pool_name = data_utils.rand_name("lb_member_pool1_l7policy")
+ pool_kwargs = {
+ const.NAME: pool_name,
+ const.PROTOCOL: const.HTTP,
+ const.LB_ALGORITHM: const.LB_ALGORITHM_ROUND_ROBIN,
+ const.LOADBALANCER_ID: cls.lb_id,
+ }
+ pool = cls.mem_pool_client.create_pool(**pool_kwargs)
+ cls.pool_id = pool[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_pool_client.cleanup_pool,
+ cls.pool_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
+ waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
+ cls.lb_id, const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+
+ @decorators.idempotent_id('ffd598d9-d8cd-4586-a749-cde4897e64dd')
+ def test_l7policy_CRUD(self):
+ """Tests l7policy create, read, update, delete
+
+ * Create a fully populated l7policy.
+ * Show l7policy details.
+ * Update the l7policy.
+ * Delete the l7policy.
+ """
+
+ # L7Policy create
+ l7policy_name = data_utils.rand_name("lb_member_l7policy1-CRUD")
+ l7policy_description = data_utils.arbitrary_string(size=255)
+ l7policy_kwargs = {
+ const.LISTENER_ID: self.listener_id,
+ const.NAME: l7policy_name,
+ const.DESCRIPTION: l7policy_description,
+ const.ADMIN_STATE_UP: False,
+ const.POSITION: 1,
+ const.ACTION: const.REDIRECT_TO_POOL,
+ const.REDIRECT_POOL_ID: self.pool_id,
+ }
+
+ l7policy = self.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
+ self.addClassResourceCleanup(
+ self.mem_l7policy_client.cleanup_l7policy,
+ l7policy[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+ l7policy = waiters.wait_for_status(
+ self.mem_l7policy_client.show_l7policy,
+ l7policy[const.ID], const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+
+ self.assertEqual(l7policy_name, l7policy[const.NAME])
+ self.assertEqual(l7policy_description, l7policy[const.DESCRIPTION])
+ self.assertFalse(l7policy[const.ADMIN_STATE_UP])
+ parser.parse(l7policy[const.CREATED_AT])
+ parser.parse(l7policy[const.UPDATED_AT])
+ UUID(l7policy[const.ID])
+ # Operating status will be OFFLINE while admin_state_up = False
+ self.assertEqual(const.OFFLINE, l7policy[const.OPERATING_STATUS])
+ self.assertEqual(self.listener_id, l7policy[const.LISTENER_ID])
+ self.assertEqual(1, l7policy[const.POSITION])
+ self.assertEqual(const.REDIRECT_TO_POOL, l7policy[const.ACTION])
+ self.assertEqual(self.pool_id, l7policy[const.REDIRECT_POOL_ID])
+ self.assertIsNone(l7policy.pop(const.REDIRECT_URL, None))
+
+ # L7Policy update
+ new_name = data_utils.rand_name("lb_member_l7policy1-update")
+ new_description = data_utils.arbitrary_string(size=255,
+ base_text='new')
+ redirect_url = 'http://localhost'
+ l7policy_update_kwargs = {
+ const.NAME: new_name,
+ const.DESCRIPTION: new_description,
+ const.ADMIN_STATE_UP: True,
+ const.POSITION: 2,
+ const.ACTION: const.REDIRECT_TO_URL,
+ const.REDIRECT_URL: redirect_url,
+ }
+ l7policy = self.mem_l7policy_client.update_l7policy(
+ l7policy[const.ID], **l7policy_update_kwargs)
+
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+ l7policy = waiters.wait_for_status(
+ self.mem_l7policy_client.show_l7policy,
+ l7policy[const.ID], const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+
+ self.assertEqual(new_name, l7policy[const.NAME])
+ self.assertEqual(new_description, l7policy[const.DESCRIPTION])
+ self.assertTrue(l7policy[const.ADMIN_STATE_UP])
+ # Operating status for a l7policy will be ONLINE if it is enabled:
+ self.assertEqual(const.ONLINE, l7policy[const.OPERATING_STATUS])
+ self.assertEqual(self.listener_id, l7policy[const.LISTENER_ID])
+ # Position will have recalculated to 1
+ self.assertEqual(1, l7policy[const.POSITION])
+ self.assertEqual(const.REDIRECT_TO_URL, l7policy[const.ACTION])
+ self.assertEqual(redirect_url, l7policy[const.REDIRECT_URL])
+ self.assertIsNone(l7policy.pop(const.REDIRECT_POOL_ID, None))
+
+ # L7Policy delete
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer,
+ self.lb_id, const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.check_interval,
+ CONF.load_balancer.check_timeout)
+ self.mem_l7policy_client.delete_l7policy(l7policy[const.ID])
+
+ waiters.wait_for_deleted_status_or_not_found(
+ self.mem_l7policy_client.show_l7policy, l7policy[const.ID],
+ const.PROVISIONING_STATUS,
+ CONF.load_balancer.check_interval,
+ CONF.load_balancer.check_timeout)
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_listener.py b/octavia_tempest_plugin/tests/scenario/v2/test_listener.py
index 1b6578a..c07bb4a 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_listener.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_listener.py
@@ -52,6 +52,46 @@
CONF.load_balancer.lb_build_interval,
CONF.load_balancer.lb_build_timeout)
+ pool1_name = data_utils.rand_name("lb_member_pool1_listener")
+ pool1_kwargs = {
+ const.NAME: pool1_name,
+ const.PROTOCOL: const.HTTP,
+ const.LB_ALGORITHM: const.LB_ALGORITHM_ROUND_ROBIN,
+ const.LOADBALANCER_ID: cls.lb_id,
+ }
+ pool1 = cls.mem_pool_client.create_pool(**pool1_kwargs)
+ cls.pool1_id = pool1[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_pool_client.cleanup_pool,
+ cls.pool1_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
+ waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
+ cls.lb_id, const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+
+ pool2_name = data_utils.rand_name("lb_member_pool2_listener")
+ pool2_kwargs = {
+ const.NAME: pool2_name,
+ const.PROTOCOL: const.HTTP,
+ const.LB_ALGORITHM: const.LB_ALGORITHM_ROUND_ROBIN,
+ const.LOADBALANCER_ID: cls.lb_id,
+ }
+ pool2 = cls.mem_pool_client.create_pool(**pool2_kwargs)
+ cls.pool2_id = pool2[const.ID]
+ cls.addClassResourceCleanup(
+ cls.mem_pool_client.cleanup_pool,
+ cls.pool2_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
+ waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
+ cls.lb_id, const.PROVISIONING_STATUS,
+ const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
+
@decorators.idempotent_id('4a874014-b7d1-49a4-ac9a-2400b3434700')
def test_listener_CRUD(self):
"""Tests listener create, read, update, delete
@@ -81,8 +121,8 @@
const.X_FORWARDED_FOR: "true",
const.X_FORWARDED_PORT: "true"
},
+ const.DEFAULT_POOL_ID: self.pool1_id,
# TODO(rm_work): need to finish the rest of this stuff
- # const.DEFAULT_POOL_ID: '',
# const.DEFAULT_TLS_CONTAINER_REF: '',
# const.SNI_CONTAINER_REFS: [],
}
@@ -92,6 +132,11 @@
listener[const.ID],
lb_client=self.mem_lb_client, lb_id=self.lb_id)
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
listener = waiters.wait_for_status(
self.mem_listener_client.show_listener,
listener[const.ID], const.PROVISIONING_STATUS,
@@ -119,6 +164,7 @@
self.assertEqual(1000, listener[const.TIMEOUT_MEMBER_CONNECT])
self.assertEqual(1000, listener[const.TIMEOUT_MEMBER_DATA])
self.assertEqual(50, listener[const.TIMEOUT_TCP_INSPECT])
+ self.assertEqual(self.pool1_id, listener[const.DEFAULT_POOL_ID])
# Listener update
new_name = data_utils.rand_name("lb_member_listener1-update")
@@ -137,14 +183,19 @@
const.X_FORWARDED_FOR: "false",
const.X_FORWARDED_PORT: "false"
},
+ const.DEFAULT_POOL_ID: self.pool2_id,
# TODO(rm_work): need to finish the rest of this stuff
- # const.DEFAULT_POOL_ID: '',
# const.DEFAULT_TLS_CONTAINER_REF: '',
# const.SNI_CONTAINER_REFS: [],
}
listener = self.mem_listener_client.update_listener(
listener[const.ID], **listener_update_kwargs)
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
listener = waiters.wait_for_status(
self.mem_listener_client.show_listener,
listener[const.ID], const.PROVISIONING_STATUS,
@@ -179,6 +230,7 @@
self.assertEqual(2000, listener[const.TIMEOUT_MEMBER_CONNECT])
self.assertEqual(2000, listener[const.TIMEOUT_MEMBER_DATA])
self.assertEqual(100, listener[const.TIMEOUT_TCP_INSPECT])
+ self.assertEqual(self.pool2_id, listener[const.DEFAULT_POOL_ID])
# Listener delete
waiters.wait_for_status(
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_member.py b/octavia_tempest_plugin/tests/scenario/v2/test_member.py
index 6fb53f2..2a778e9 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_member.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_member.py
@@ -123,6 +123,11 @@
member[const.ID], pool_id=self.pool_id,
lb_client=self.mem_lb_client, lb_id=self.lb_id)
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
member = waiters.wait_for_status(
self.mem_member_client.show_member,
member[const.ID], const.PROVISIONING_STATUS,
@@ -161,6 +166,11 @@
member = self.mem_member_client.update_member(
member[const.ID], **member_update_kwargs)
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
member = waiters.wait_for_status(
self.mem_member_client.show_member,
member[const.ID], const.PROVISIONING_STATUS,
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_pool.py b/octavia_tempest_plugin/tests/scenario/v2/test_pool.py
index eef80d5..1ddc8b2 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_pool.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_pool.py
@@ -113,6 +113,11 @@
pool[const.ID],
lb_client=self.mem_lb_client, lb_id=self.lb_id)
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
pool = waiters.wait_for_status(
self.mem_pool_client.show_pool,
pool[const.ID], const.PROVISIONING_STATUS,
@@ -160,6 +165,11 @@
pool = self.mem_pool_client.update_pool(
pool[const.ID], **pool_update_kwargs)
+ waiters.wait_for_status(
+ self.mem_lb_client.show_loadbalancer, self.lb_id,
+ const.PROVISIONING_STATUS, const.ACTIVE,
+ CONF.load_balancer.build_interval,
+ CONF.load_balancer.build_timeout)
pool = waiters.wait_for_status(
self.mem_pool_client.show_pool,
pool[const.ID], const.PROVISIONING_STATUS,
diff --git a/octavia_tempest_plugin/tests/test_base.py b/octavia_tempest_plugin/tests/test_base.py
index 596e7fb..47ea4ad 100644
--- a/octavia_tempest_plugin/tests/test_base.py
+++ b/octavia_tempest_plugin/tests/test_base.py
@@ -110,6 +110,7 @@
cls.mem_member_client = cls.os_roles_lb_member.member_client
cls.mem_healthmonitor_client = (
cls.os_roles_lb_member.healthmonitor_client)
+ cls.mem_l7policy_client = cls.os_roles_lb_member.l7policy_client
@classmethod
def resource_setup(cls):